text
stringlengths 8
6.05M
|
|---|
#!/usr/bin/env python3
import sys
def usage():
print("Usage")
print(" tohex.py 0x123")
print(" tohex.py 0b101")
print(" tohex.py 12345")
print("")
exit(0)
def printDEC(parsed):
print("DEC %i" % parsed)
def printHEX(parsed):
print('HEX 0x%x' % parsed)
def printBIN(parsed):
print('BIN 0b{:b}'.format(parsed))
if len(sys.argv) != 2:
usage()
arg = sys.argv[1]
if(arg.startswith("0x")):
parsed = int(arg, 16)
elif(arg.startswith("0b")):
parsed = int(arg, 2)
else:
parsed = int(arg)
printDEC(parsed)
printHEX(parsed)
printBIN(parsed)
|
vo=['a','e','i','o','u']
n=input()
if n in vo:
print("vowel")
else:
print("consonant")
|
# pylint: disable=protected-access
"""
Test the wrappers for the C API.
"""
import os
import pytest
from ..clib.core import load_libgmt, _check_libgmt, create_session, \
destroy_session, call_module, get_constant
from ..clib.context_manager import LibGMT
from ..clib.utils import clib_extension
from ..exceptions import GMTCLibError, GMTOSError, GMTCLibNotFoundError
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
def test_load_libgmt():
"Test that loading libgmt works and doesn't crash."
load_libgmt()
def test_load_libgmt_fail():
"Test that loading fails when given a bad library path."
with pytest.raises(GMTCLibNotFoundError):
load_libgmt('some/wrong/path/libgmt')
def test_check_libgmt():
"Make sure check_libgmt fails when given a bogus library"
with pytest.raises(GMTCLibError):
_check_libgmt(dict())
def test_clib_extension():
"Make sure we get the correct extension for different OS names"
for linux in ['linux', 'linux2', 'linux3']:
assert clib_extension(linux) == 'so'
assert clib_extension('darwin') == 'dylib'
with pytest.raises(GMTOSError):
clib_extension('meh')
def test_constant():
"Test that I can get correct constants from the C lib"
lib = load_libgmt()
assert get_constant('GMT_SESSION_EXTERNAL', lib) != -99999
assert get_constant('GMT_MODULE_CMD', lib) != -99999
assert get_constant('GMT_PAD_DEFAULT', lib) != -99999
with pytest.raises(GMTCLibError):
get_constant('A_WHOLE_LOT_OF_JUNK', lib)
def test_clib_session_management():
"Test that create and destroy session are called without errors"
lib = load_libgmt()
session1 = create_session(session_name='test_session1', libgmt=lib)
assert session1 is not None
session2 = create_session(session_name='test_session2', libgmt=lib)
assert session2 is not None
assert session2 != session1
destroy_session(session1, libgmt=lib)
destroy_session(session2, libgmt=lib)
def test_destroy_session_fails():
"Fail to destroy session when given bad input"
lib = load_libgmt()
with pytest.raises(GMTCLibError):
destroy_session(None, lib)
def test_call_module():
"Run a command to see if call_module works"
data_fname = os.path.join(TEST_DATA_DIR, 'points.txt')
out_fname = 'test_call_module.txt'
lib = load_libgmt()
session = create_session('test_call_module', lib)
call_module(session, 'gmtinfo', '{} -C ->{}'.format(data_fname, out_fname),
lib)
destroy_session(session, lib)
assert os.path.exists(out_fname)
with open(out_fname) as out_file:
output = out_file.read().strip().replace('\t', ' ')
assert output == '11.5309 61.7074 -2.9289 7.8648 0.1412 0.9338'
os.remove(out_fname)
def test_call_module_fails():
"Fails when given bad input"
lib = load_libgmt()
session = create_session('test_call_module_fails', lib)
with pytest.raises(GMTCLibError):
call_module(session, 'meh', '', lib)
destroy_session(session, lib)
def test_call_module_no_session():
"Fails when not in a session"
lib = load_libgmt()
with pytest.raises(GMTCLibError):
call_module(None, 'gmtdefaults', '', lib)
def test_context_manager():
"Test the LibGMT context manager"
with LibGMT() as lib:
lib.get_constant('GMT_SESSION_EXTERNAL')
lib.call_module('psbasemap', '-R0/1/0/1 -JX6i -Bafg')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__version__ = '1.0.1'
from sanic.log import logger
from sanic.request import Request
from .specification.create_hw_command_specification import (
create_hw_command_element_query, create_hw_command_element_rent_query
)
from .specification.get_hw_command_specification import (
get_hw_command_list_query, get_hw_command_list_count_query, get_hw_command_state_by_hw_module_id_query
)
__all__ = [
# SERVICES WORKING ON USER HW COMMAND TABLE
'get_user_hw_command_list', 'get_user_hw_command_list_count', 'create_user_hw_command_element',
'create_user_hw_command_element_rent', 'get_user_hw_command_state_by_traceable_object_id'
]
async def get_user_hw_command_list(
request: Request,
user_id: int = 0,
action_name: str = '',
action_type: str = '',
state: str = '',
limit: int = 0,
offset: int = 0) -> list:
""" Get get hw_command list ordered by id desc.
:param request:
:param user_id:
:param action_name:
:param action_type:
:param state:
:param limit:
:param offset:
:return:
"""
ret_val = []
query_str = get_hw_command_list_query
try:
if limit > 0:
query_str += ' ORDER BY uhc.id DESC LIMIT $5 OFFSET $6;'
async with request.app.pg.acquire() as connection:
rows = await connection.fetch(
query_str, user_id, action_name, action_type, state, limit, offset)
else:
query_str += ' ORDER BY uhc.id DESC;'
async with request.app.pg.acquire() as connection:
rows = await connection.fetch(query_str, user_id, action_name, action_type, state)
if rows is not None:
ret_val = [dict(x) for x in rows]
print(ret_val)
except Exception as gclerr:
logger.error('get_user_hw_command_list service erred with: {}'.format(gclerr))
return ret_val
async def get_user_hw_command_list_count(
request: Request,
user_id: int = 0,
action_name: str = '',
action_type: str = '',
state: str = '') -> int:
"""Get hw command list count.
:param request:
:param user_id:
:param action_name:
:param action_type:
:param state:
:return:
"""
ret_val = 0
query_str = get_hw_command_list_count_query
try:
async with request.app.pg.acquire() as connection:
row = await connection.fetchval(query_str, user_id, action_name, action_type, state)
if row is not None:
ret_val = row
except Exception as gclcerr:
logger.error('get_user_hw_command_list_count service erred with: {}'.format(gclcerr))
return ret_val
async def get_user_hw_command_state_by_traceable_object_id(
request: Request,
traceable_object_id: int = 0) -> []:
"""Get hw command list count.
:param request:
:param traceable_object_id:
:return:
"""
ret_val = 0
query_str = get_hw_command_state_by_hw_module_id_query
try:
async with request.app.pg.acquire() as connection:
row = await connection.fetchrow(query_str, traceable_object_id)
if row is not None:
ret_val = dict(row)
except Exception as gclcerr:
logger.error('get_user_hw_command_state_by_traceable_object_id service erred with: {}'.format(gclcerr))
return ret_val
async def create_user_hw_command_element(
request: Request,
user_id: object = None,
hw_action_id: object = None,
proto_field: str = '',
field_type: str = '',
value: str = '',
state: str = 'pending',
traceable_object_id: int = 0,
hw_module_id: int = 0,
ack_message: bool = True,
active: bool = True) -> dict:
""" Create user hw command element
:param request:
:param user_id:
:param hw_action_id:
:param proto_field:
:param field_type:
:param value:
:param state:
:param traceable_object_id:
:param hw_module_id:
:param ack_message:
:param active:
:return:
"""
ret_val = []
query_str = create_hw_command_element_query
try:
async with request.app.pg.acquire() as connection:
row = await connection.fetchrow(
query_str, user_id, hw_action_id, proto_field, field_type, value,
state, traceable_object_id, hw_module_id, ack_message, active)
if row is not None:
ret_val = dict(row)
except Exception as gclerr:
logger.error('create_user_hw_command_element service erred with: {}'.format(gclerr))
return ret_val
async def create_user_hw_command_element_rent(
request: Request,
user_id: object = None,
hw_action_id: object = None,
proto_field: str = '',
field_type: str = '',
value: str = '',
state: str = 'pending',
traceable_object_id: int = 0,
hw_module_id: int = 0,
ack_message: bool = True,
date_from: object = None,
date_to: object = None,
active: bool = True) -> dict:
""" Create user hw command element
:param request:
:param user_id:
:param hw_action_id:
:param proto_field:
:param field_type:
:param value:
:param state:
:param traceable_object_id:
:param hw_module_id:
:param ack_message:
:param date_from:
:param date_to:
:param active:
:return:
"""
ret_val = []
query_str = create_hw_command_element_rent_query
try:
async with request.app.pg.acquire() as connection:
row = await connection.fetchrow(
query_str, user_id, hw_action_id, proto_field, field_type, value,
state, traceable_object_id, hw_module_id, ack_message, date_from, date_to, active)
if row is not None:
ret_val = dict(row)
except Exception as gclerr:
logger.error('create_user_hw_command_element_rent service erred with: {}'.format(gclerr))
return ret_val
|
from direct.distributed.DistributedObjectAI import DistributedObjectAI
from direct.directnotify import DirectNotifyGlobal
class DistributedJollyRogerAI(DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedJollyRogerAI')
def __init__(self, air):
DistributedObjectAI.__init__(self, air)
|
import unittest
from katas.kyu_7.descending_order import Descending_Order
class DescendingOrderTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(Descending_Order(0), 0)
def test_equals_2(self):
self.assertEqual(Descending_Order(15), 51)
def test_equals_3(self):
self.assertEqual(Descending_Order(123456789), 987654321)
|
#!usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Parameter
from torch.autograd import Variable
"""
Desc:
compute MarginCosineProduct
Date:
2019/05/13
Author:
Jesse
Contact:
majie1@sensetime.com
"""
def cosine_sim(x1, x2, dim=1, eps=1e-8):
ip = torch.mm(x1, x2.t())
w1 = torch.norm(x1, 2, dim)
w2 = torch.norm(x2, 2, dim)
return ip / torch.ger(w1,w2).clamp(min=eps)
class MarginCosineProduct(nn.Module):
r"""large margin cosine distance 的实现 :
Args:
in_features: size of each input sample
out_features: size of each output sample
s: norm of input feature
m: margin
"""
def __init__(self, in_features, out_features, s=13.10, m=0.40):
super(MarginCosineProduct, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.s = s
self.m = m
self.weight = Parameter(torch.Tensor(out_features, in_features))
nn.init.xavier_uniform_(self.weight)
# forward propagation
def forward(self, input, label):
# --------------------------- cos(theta) & phi(theta) ---------------------------
# cosine = F.linear(F.normalize(input), F.normalize(self.weight))
cosine = cosine_sim(input, self.weight)
one_hot = torch.zeros_like(cosine)
one_hot.scatter_(1, label.view(-1, 1), 1.0)
output = self.s * (cosine - one_hot * self.m)
return output
def __repr__(self):
return self.__class__.__name__ + '(' \
+ 'in_features=' + str(self.in_features) \
+ ', out_features=' + str(self.out_features) \
+ ', s=' + str(self.s) \
+ ', m=' + str(self.m) + ')'
|
from direct.showbase.DirectObject import DirectObject
from bsp.leveleditor.ui.HistoryPanel import HistoryPanel
class ActionEntry:
def __init__(self, desc, action):
self.desc = desc
self.action = action
def do(self):
self.action.do()
def undo(self):
self.action.undo()
def modifiesState(self):
return self.action.modifiesState()
def cleanup(self):
self.action.cleanup()
self.action = None
self.desc = None
class ActionManager(DirectObject):
def __init__(self, doc):
DirectObject.__init__(self)
self.doc = doc
self.historyIndex = -1
self.savedIndex = -1
self.stateChangeIndex = -1
self.history = []
self.panel = HistoryPanel.getGlobalPtr()
self.accept('documentActivated', self.__onDocActivated)
def __onDocActivated(self, doc):
if doc == self.doc:
self.panel.setDoc(doc)
def cleanup(self):
self.doc = None
self.historyIndex = None
self.savedIndex = None
self.stateChangeIndex = None
for action in self.history:
action.cleanup()
self.history = None
def getCurrentStateChangeIndex(self):
if self.historyIndex == -1:
return -1
# Search back from the current history index to find the most recent state.
for i in range(self.historyIndex + 1):
idx = self.historyIndex - i
action = self.history[idx]
if action.modifiesState():
return idx
return -1
def documentSaved(self):
self.savedIndex = self.stateChangeIndex
def updateSaveStatus(self):
if self.stateChangeIndex != self.savedIndex:
self.doc.markUnsaved()
else:
self.doc.markSaved()
def moveToIndex(self, index):
if index >= len(self.history) or index < -1 or index == self.historyIndex:
return
if self.historyIndex > index:
for i in range(self.historyIndex - index):
self.undo(True)
else:
for i in range(index - self.historyIndex):
self.redo(True)
self.updateSaveStatus()
base.statusBar.showMessage("Move history index")
self.panel.updateHistoryIndex()
def undo(self, bulk = False):
# Anything to undo?
if len(self.history) == 0 or self.historyIndex < 0:
# Nope.
return
# Get at the current action and undo it
action = self.history[self.historyIndex]
action.undo()
# Move the history index back
self.historyIndex -= 1
if action.modifiesState():
self.stateChangeIndex = self.getCurrentStateChangeIndex()
if not bulk:
self.updateSaveStatus()
if not bulk:
base.statusBar.showMessage("Undo %s" % action.desc)
self.panel.updateHistoryIndex()
def redo(self, bulk = False):
# Anything to redo?
numActions = len(self.history)
if numActions == 0 or self.historyIndex >= numActions - 1:
return
# Redo the next action
self.historyIndex += 1
action = self.history[self.historyIndex]
action.do()
if action.modifiesState():
self.stateChangeIndex = self.getCurrentStateChangeIndex()
if not bulk:
self.updateSaveStatus()
if not bulk:
base.statusBar.showMessage("Redo %s" % action.desc)
self.panel.updateHistoryIndex()
def performAction(self, description, action):
# We are overriding everything after the current history index.
# If the history index is not at the end of the list,
# shave off everything from the current index to the end of the list.
if self.historyIndex < len(self.history) - 1:
first = self.historyIndex + 1
last = len(self.history) - 1
for i in range(first, last):
other = self.history[i]
other.cleanup()
del self.history[first:]
if self.savedIndex > self.historyIndex:
# If the saved index is ahead of the history index, the
# saved index is now invalid since those actions have
# been deleted.
self.savedIndex = -1
action.do()
self.history.append(ActionEntry(description, action))
self.historyIndex += 1
if action.modifiesState():
self.stateChangeIndex = self.getCurrentStateChangeIndex()
self.updateSaveStatus()
base.statusBar.showMessage(description)
self.panel.updateList()
|
from flask import request
from sqlalchemy import desc
from flask_restful import Resource, marshal_with
from ..fields import Fields
from app.models.models import SaleGroup, db
sale_group_fields = Fields().sale_group_fields()
class SaleGroupListAPI(Resource):
@marshal_with(sale_group_fields)
def get(self):
user_id = request.args.get("user")
query = SaleGroup.query
if user_id:
query = query.filter_by(user_id=user_id)
query = query.order_by(desc(SaleGroup.created_at))
return query.all()
@marshal_with(sale_group_fields)
def post(self):
return {}
class SaleGroupAPI(Resource):
@marshal_with(sale_group_fields)
def get(self, id):
sale_group = SaleGroup.query.get(id)
return sale_group
@marshal_with(sale_group_fields)
def delete(self, id):
sale_group = SaleGroup.query.get(id)
db.session.delete(sale_group)
db.session.commit()
sale_groups = SaleGroup.query.all()
return sale_groups
@marshal_with(sale_group_fields)
def put(self, id):
return {}
|
from datetime import datetime, timedelta
from django import forms
from django.http import HttpResponseRedirect
from django.urls import reverse, reverse_lazy
from django.shortcuts import get_object_or_404, render
from django.views.generic import (
ListView,
DetailView,
CreateView,
UpdateView,
DeleteView,
)
from .models import Task, Event, Routine, TimeSlot
from .scheduler import *
from .statistics import generate_overall_stats, generate_specific_stats
class IndexView(ListView):
# Locate the HTML template
template_name = "tasks/index.html"
# Name the data for use in the template
context_object_name = "task_list"
def get_queryset(self):
# Get all the tasks
return Task.objects.order_by("due_date")
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
# Get the done and todo tasks separately
context["todo_tasks"] = Task.objects.filter(done=False).order_by("due_date")
context["done_tasks"] = Task.objects.filter(done=True).order_by("due_date")
return context
class StatisticsView(ListView):
template_name = "tasks/statistics.html"
context_object_name = "recent_tasks"
def get_queryset(self):
# Get 5 most recent completed tasks
tasks = Task.objects.filter(done=True).order_by("-completion_time")[:5]
# Run the stats generator for them
generate_specific_stats(tasks)
# Return them
# return tasks
return Task.objects.filter(done=True).order_by("-completion_time")[:5]
def get_context_data(self, **kwargs):
context = super(StatisticsView, self).get_context_data(**kwargs)
# Generate the overall statistics dictionary
overall_stats = generate_overall_stats()
# Copy the stats into the context dictionary
for i in overall_stats:
context[i] = overall_stats[i]
return context
class EventView(ListView):
template_name = "tasks/event_index.html"
context_object_name = "event_list"
def get_queryset(self):
# Get the events in order of their date
return Event.objects.order_by("date")
def get_context_data(self, **kwargs):
context = super(EventView, self).get_context_data(**kwargs)
# Get today's events in time order
context["events_today"] = Event.objects.filter(date=datetime.today()).order_by(
"start_time"
)
# Likewise for today's routine events
context["routine_today"] = Routine.objects.filter(
day=datetime.today().weekday()
).order_by("start_time")
# Likeise for all events and routine events
context["events"] = Event.objects.order_by("date", "start_time")
context["routine"] = Routine.objects.order_by("day", "start_time")
return context
class ScheduleView(ListView):
template_name = "tasks/schedule.html"
context_object_name = "time_slots"
def get_queryset(self):
# Run the scheduler and get the TimeSlots it creates
update_schedule(datetime.today().weekday())
return TimeSlot.objects.filter(date=timezone.now().date()).order_by(
"start_time"
)
class TaskDetail(DetailView):
model = Task
template_name = "tasks/task_detail.html"
# Provide a method to mark the task as done
def task_done(self):
Task.mark_done()
# Provide a method to mark the task as todo
def task_todo(self):
Task.mark_todo()
# Provide a method to alter the time
def task_time_alter(self, mins):
# Convert the inputted integer to a timedelta of that many minutes
time_delta = timedelta(minute=mins)
# Run the task time-alteration method
Task.alter_time_spent(time_delta)
class EventDetail(DetailView):
model = Event
template_name = "tasks/event_detail.html"
class RoutineDetail(DetailView):
model = Routine
template_name = "tasks/routine_detail.html"
class TaskCreate(CreateView):
model = Task
# The fields the user is allowed to set when creating a task
fields = [
"title",
"description",
"due_date",
"due_time",
"time_estimate",
"priority",
]
# Provide specialised input for the due date and time
due_date = forms.DateField(widget=forms.SelectDateWidget(attrs={"type": "date"}))
due_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
class EventCreate(CreateView):
model = Event
fields = ["title", "date", "start_time", "end_time"]
date = forms.DateField(widget=forms.SelectDateWidget(attrs={"type": "date"}))
start_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
end_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
class RoutineCreate(CreateView):
model = Routine
fields = ["title", "day", "start_time", "end_time"]
start_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
end_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
class TaskUpdate(UpdateView):
model = Task
fields = [
"title",
"description",
"due_date",
"due_time",
"time_estimate",
"priority",
]
due_date = forms.DateField(widget=forms.SelectDateWidget(attrs={"type": "date"}))
due_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
template_name = "tasks/task_update_form.html"
class EventUpdate(UpdateView):
model = Event
fields = ["title", "date", "start_time", "end_time"]
date = forms.DateField(widget=forms.SelectDateWidget(attrs={"type": "date"}))
start_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
end_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
template_name = "tasks/event_update_form.html"
class RoutineUpdate(UpdateView):
model = Routine
fields = ["title", "day", "start_time", "end_time"]
start_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
end_time = forms.TimeField(widget=forms.TimeInput(attrs={"type": "time"}))
template_name = "tasks/routine_update_form.html"
class TaskDelete(DeleteView):
model = Task
# Return to the index on a successful completion
success_url = reverse_lazy("tasks:index")
class EventDelete(DeleteView):
model = Event
success_url = reverse_lazy("tasks:event_index")
class RoutineDelete(DeleteView):
model = Routine
success_url = reverse_lazy("tasks:event_index")
def mark_task_done(request, task_id):
task = get_object_or_404(Task, pk=task_id)
link = request.META.get("HTTP_REFERER", "/")
task.mark_done()
task.save()
return HttpResponseRedirect(link)
def mark_task_todo(request, task_id):
task = get_object_or_404(Task, pk=task_id)
link = request.META.get("HTTP_REFERER", "/")
task.mark_todo()
task.save()
return HttpResponseRedirect(link)
def change_time_spent(request, task_id):
task = get_object_or_404(Task, pk=task_id)
link = request.META.get("HTTP_REFERER", "/")
time = int(request.POST["input"])
tdelta = timedelta(minutes=time)
task.alter_time_spent(tdelta)
task.save()
return HttpResponseRedirect(link)
|
import numpy as np
sigmoid_range = 34.538776394910684
def sigmoid(x):
return 1.0 / (1.0 + np.exp(-np.clip(x, -sigmoid_range, sigmoid_range)))
def derivative_sigmoid(o):
return o * (1.0 - o)
# 3層ニューラルネットワーク
class ThreeLayerNetwork:
# コンストラクタ
def __init__(self, inodes, hnodes, onodes, lr):
# 各レイヤーのノード数
self.inodes = inodes
self.hnodes = hnodes
self.onodes = onodes
self.e_o = []
self.e_h = []
self.o_i = []
self.x_h = []
self.o_h = []
self.x_o = []
self.o_o = []
# 学習率
self.lr = lr
# 重みの初期化
self.w_ih = np.random.normal(0.0, 1.0, (self.hnodes, self.inodes))
self.w_ho = np.random.normal(0.0, 1.0, (self.onodes, self.hnodes))
# 活性化関数
self.af = sigmoid
self.daf = derivative_sigmoid
# 誤差逆伝搬
def backprop(self, idata, tdata):
# 縦ベクトルに変換
o_i = np.array(idata, ndmin=2).T
t = np.array(tdata, ndmin=2).T
# 隠れ層
x_h = np.dot(self.w_ih, o_i)
o_h = self.af(x_h)
# 出力層
x_o = np.dot(self.w_ho, o_h)
o_o = self.af(x_o)
self.o_i.append(o_i)
# 隠れ層
self.x_h.append(x_h)
self.o_h.append(o_h)
# 出力層
self.x_o.append(x_o)
self.o_o.append(o_o)
# 誤差計算
e_o = (t - o_o)
e_h = np.dot(self.w_ho.T, e_o)
self.e_o.append(e_o)
self.e_h.append(e_h)
def calc_weight(self, data, d):
N = len(data)
temp_ho = 0
temp_ih = 0
for n in range(N):
# 誤差計算
e_o = (np.array(d[n], ndmin=2).T - self.o_o[n])
e_h = np.dot(self.w_ho.T, e_o)
# print(e_o)
# print(self.daf(self.o_o[n]))
# print(e_o * self.daf(self.o_o[n]))
# print(self.o_h[n].T)
# print(self.o_o[n])
temp_ho += np.dot((e_o * self.daf(self.o_o[n])), self.o_h[n].T)
temp_ih += np.dot((e_h * self.daf(self.o_h[n])), self.o_i[n].T)
# w(2)kjの更新
# w(1)jiの更新
# 重みの更新
# print(temp_ih)
self.w_ho += self.lr * temp_ho/N
self.w_ih += self.lr * temp_ih/N
# 順伝搬
def feedforward(self, idata):
# 入力のリストを縦ベクトルに変換
o_i = np.array(idata, ndmin=2).T
# 隠れ層
x_h = np.dot(self.w_ih, o_i)
o_h = self.af(x_h)
# 出力層
x_o = np.dot(self.w_ho, o_h)
o_o = self.af(x_o)
return o_o
def calc_error(self, data, d):
N = len(data)
result = 0
for n in range(N):
temp_k = 0
for k in range(self.onodes):
temp_k += (d[n][k]-self.o_o[n][k, 0])**2
result += temp_k
return result
if __name__ == '__main__':
# パラメータ
inodes = 2
hnodes = 2
onodes = 2
lr = 1
# ニューラルネットワークの初期化
nn = ThreeLayerNetwork(inodes, hnodes, onodes, lr)
class1 = np.loadtxt('./class1.csv', delimiter=',')
class2 = np.loadtxt('./class2.csv', delimiter=',')
data = np.concatenate((class1, class2), axis=0)
d_class1 = np.array([[1, 0] for i in range(len(class1))])
d_class2 = np.array([[0, 1] for i in range(len(class2))])
d = np.concatenate((d_class1, d_class2), axis=0)
# 学習
epoch = 500
for e in range(epoch):
print('#epoch ', e)
data_size = len(data)
for n in range(data_size):
nn.backprop(data[n], d[n])
nn.calc_weight(data, d)
print(nn.calc_error(data, d))
test = np.loadtxt('./test.csv', delimiter=',')
for n in range(len(test)):
print(nn.feedforward(test[n]))
|
n = int(input("Enter the length of the sequence: ")) # Do not change this line
num1 = 0
num2 = 0
num3 = 1
for i in range(n):
temp3 = num3
num3 = num1 + num2 + num3
if i > 1:
num1 = num2
num2 = temp3
print(num3)
1 - 1+0+0
2 - 1+1+0
3 - 2+1+0
4 - 3+2+1
|
from django.db.models import Manager
from django.core.exceptions import ObjectDoesNotExist
from django.apps import apps
from rest_framework.exceptions import ValidationError, NotFound
from ..common.utils import on_time
class OrderManager(Manager):
@on_time
def place_order(self, **model_attributes):
"""
Place a new order for a given option and perform the respective validations
"""
option_id = model_attributes.pop('option_id')
try:
Option = apps.get_model('menu.Option')
option = Option.objects.get(pk=option_id)
except ObjectDoesNotExist:
raise NotFound(
{'detail': f'There\'s no any option with the id "{option_id}" in this menu'})
else:
if not option.menu.is_available():
raise ValidationError(
{'detail': f'The menu "{option.menu.name}" is not available'})
Menu = apps.get_model('menu.Menu')
user = model_attributes.get('user')
if not Menu.objects.has_ordered(menu=option.menu, user=user):
return self.model.objects.create(option=option, **model_attributes)
|
from threading import Thread
import random, time
class Producer(Thread):
def __init__(self, queue, condition):
super(Producer, self).__init__()
self.queue = queue
self.condition = condition
def run(self):
nums = range(5)
while True:
self.condition.acquire()
num = random.choice(nums)
print "Produced", num
self.queue.append(num)
self.condition.notify()
self.condition.release()
time.sleep(random.random())
|
"""Factories for core application."""
import factory
from django.contrib.auth.models import Group
from . import models
class PermissionFactory(factory.django.DjangoModelFactory):
"""A base factory to handle permissions."""
class Meta(object):
abstract = True
@factory.post_generation
def set_permission(self, create, extracted, **kwargs):
if not create:
return
self.post_create(models.User.objects.filter(is_superuser=True).first())
class GroupFactory(factory.django.DjangoModelFactory):
"""A factory to create Group instances."""
class Meta(object):
model = Group
name = "DefaultGroup"
class UserFactory(PermissionFactory):
"""A factory to create User instances."""
class Meta(object):
model = models.User
django_get_or_create = ("username", )
email = factory.LazyAttribute(lambda a: a.username)
password = "{PLAIN}toto"
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for group in extracted:
self.groups.add(Group.objects.get(name=group))
class LogFactory(factory.django.DjangoModelFactory):
"""Factory for Log."""
class Meta(object):
model = models.Log
logger = "modoboa.admin"
message = "A message"
level = "info"
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 24 15:58:02 2017
@author: dgratz
"""
import numpy as np
from glob import glob
from readFile import readFile
import re
from ParameterSensitivity import ParamSensetivity
import matplotlib.pyplot as plt
from calcSync import calcTimeSync, calcSyncVarLen
import xml.etree.ElementTree as ET
pvars = list(readFile('D:/synchrony-data/2SAN1RandLogNormal/0/cell_0_0_dss0_pvars.tsv').keys())
pvars.append('cond')
#datadir = 'D:/synchrony-data/2SAN1RandLogNormal/'
#datadir = 'D:/synchrony-data/2SAN1RandLogNormalT2_038/'
#datadir = 'D:/synchrony-data/2SAN1RandLogNormalT4_0038/'
datadir = 'D:/synchrony-data/2SAN1RandLogNormalManyCond/'
filesPvars = glob(datadir+'*/*_pvars.tsv')
numData = len(filesPvars)//2
pvarsVals = np.zeros((2,numData,len(pvars)))
s = re.compile('/')
u = re.compile('_')
def getCond(file):
simvars = s.split(file)
simvars[-1] = 'simvars.xml'
simvars = '/'.join(simvars)
tree = ET.parse(simvars)
root = tree.getroot()
cond = root[1][0][0][1][1].text
return float(cond)
for file in filesPvars:
file = file.replace('\\','/')
temp = readFile(file)
fnames = s.split(file)
uparts = u.split(fnames[-1])
(row,col) = tuple(map(lambda x: int(x),filter(lambda x: x.isdigit(),uparts)))
num = int(fnames[-2])
for i,pvar in enumerate(pvars):
if pvar == 'cond':
pvarsVals[col,num,i] = getCond(file)
else:
pvarsVals[col,num,i] = temp[pvar]
props = ['vOld/peak','vOld/cl','vOld/min','vOld/maxt','caI/peak','caI/min','vOld/ddr']
filesProps = glob(datadir+'*/*dss0.tsv')
propsVals = np.zeros((2,numData,len(props)))
for file in filesProps:
file = file.replace('\\','/')
temp = readFile(file)
fnames = s.split(file)
uparts = u.split(fnames[-1])
(row,col) = tuple(map(lambda x: int(x),filter(lambda x: x.isdigit(),uparts)))
num = int(fnames[-2])
for i,prop in enumerate(props):
propsVals[col,num,i] = temp['cell'+str(row)+'_'+str(col)+'/'+prop]
'''
coefs = ParamSensetivity(pvarsVals[1,:,:],propsVals[1,:,:])
for i in range(coefs.shape[1]):
plt.figure()
plt.bar(range(len(pvars)),coefs[:,i],tick_label=pvars)
plt.title(props[i])
'''
'''
Synchrony Params Sens
'''
filesProps = glob(datadir+'*/*dt0.tsv')
dtPropsVals = np.zeros((1,2,numData,2),dtype='object')
bad = set()
for file in filesProps:
file = file.replace('\\','/')
temp = readFile(file)
fnames = s.split(file)
uparts = u.split(fnames[-1])
(row,col) = tuple(map(lambda x: int(x),filter(lambda x: x.isdigit(),uparts)))
num = int(fnames[-2])
dtPropsVals[0,col,num,0] = temp['cell'+str(row)+'_'+str(col)+'/vOld/peak']
dtPropsVals[0,col,num,1] = temp['cell'+str(row)+'_'+str(col)+'/vOld/maxt']
cls = temp['cell'+str(row)+'_'+str(col)+'/vOld/cl']
if np.std(cls[-10:-1]) > 20:
bad.add(num)
propsValsSync = np.zeros((numData,2))
for i in range(numData):
times,syncT,syncV = calcSyncVarLen(dtPropsVals[:,:,i,0],dtPropsVals[:,:,i,1])
propsValsSync[i,0] = np.nanmean(syncT)
propsValsSync[i,1] = np.nanmean(syncV)
# if propsValsSync[i,0] > 50:
# bad.add(i)
syncs = ['syncT','syncV']
bad = list(bad)
gpvarsVals = np.delete(pvarsVals,bad,axis=1)
gpropsValsSync = np.delete(propsValsSync,bad,axis=0)
syncCoefs = ParamSensetivity(gpvarsVals[1,:,:],gpropsValsSync)
for i in range(syncCoefs.shape[1]):
plt.figure()
plt.bar(range(len(pvars)),syncCoefs[:,i],tick_label=pvars)
plt.title(syncs[i]+' '+u.split(s.split(datadir)[-2])[-1])
|
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.patches import Rectangle
from skimage import io
def load_image(fname):
return io.imread(fname) / 256.
class Dataset:
def __init__(self, X, Y):
self.X = X
self.Y = Y
self._epochs_completed = 0
self._index_in_epoch = 0
self._num_examples = X.shape[0]
def next_batch(self, batch_size=20):
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
self._epochs_completed += 1
# Shuffle the data
perm = np.arange(self._num_examples)
np.random.shuffle(perm)
self.X = self.X[perm]
self.Y = self.Y[perm]
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self.X[start:end], self.Y[start:end]
def epoch_completed(self):
return self._epochs_completed
def show_image(image, labels):
rect = Rectangle((labels[0], labels[1]), labels[2]-labels[0], labels[3]-labels[1], edgecolor='r', fill=False)
plt.imshow(image)
gca = plt.gca()
gca.add_patch(rect)
def plot_images(images, labels):
fig = plt.figure()
plt.gray()
for i in range(min(9, images.shape[0])):
fig.add_subplot(3, 3, i+1)
show_image(images[i], labels[i])
plt.show()
|
class Dagger():
damage = 3
def __init__(self, damage):
self.damage = damage
def get_damage(self):
return self.damage
class Longsword():
damage = 5
def __init__(self, damage):
self.damage = damage
def get_damage(self):
return self.damage
class Scimitar():
damage = 4
def __init__(self, damage):
self.damage = damage
def get_damage(self):
return self.damage
|
#!/usr/bin/env python
# Python script created by Lucas Hale
# Standard Python libraries
from __future__ import (absolute_import, print_function,
division, unicode_literals)
import os
import sys
import glob
import uuid
import shutil
from copy import deepcopy
# http://www.numpy.org/
import numpy as np
# http://pandas.pydata.org/
import pandas as pd
# https://github.com/usnistgov/DataModelDict
from DataModelDict import DataModelDict as DM
# https://github.com/usnistgov/atomman
import atomman as am
import atomman.lammps as lmp
import atomman.unitconvert as uc
# https://github.com/usnistgov/iprPy
import iprPy
# Define calc_style and record_style
calc_style = 'dislocation_Peierls_Nabarro_stress'
record_style = 'calculation_dislocation_Peierls_Nabarro_stress'
def main(*args):
"""Main function called when script is executed directly."""
# Read input script
with open(args[0]) as f:
input_dict = iprPy.tools.parseinput(f, singularkeys=singularkeys())
# Open database
dbase = iprPy.database_fromdict(input_dict)
# Pull out run_directory
run_directory = input_dict.pop('run_directory')
# Call prepare
prepare(dbase, run_directory, **input_dict)
def prepare(dbase, run_directory, **kwargs):
"""
High-throughput prepare function for the calculation.
Parameters
----------
dbase : iprPy.Database
The database to access and create new records for.
run_directory : str
The path to the local directory where the prepared calculation
instances will be placed.
**kwargs
Arbitrary keyword arguments.
"""
# Initialize Calculation instance
calculation = iprPy.Calculation(calc_style)
# Build record_df
record_df = dbase.get_records_df(style=record_style, full=False, flat=True)
# Build defect model record dictionary and df (single dbase access)
defect_record_dict = {}
defect_record_df = []
for defect_record in dbase.iget_records(style='dislocation_monopole'):
defect_record_dict[defect_record.name] = defect_record
defect_record_df.append(defect_record.todict())
defect_record_df = pd.DataFrame(defect_record_df)
# Limit by defect name
if 'dislocation_name' in kwargs:
defect_names = iprPy.tools.aslist(kwargs['dislocation_name'])
defect_selection = defect_record_df.id.isin(defect_names)
defect_record_df = defect_record_df[defect_selection]
# Get parent records
if 'parent_records' in kwargs:
parent_records = kwargs['parent_records']
else:
parent_records = iprPy.prepare.icalculations(dbase,
record_style = 'calculation_dislocation_Peierls_Nabarro',
symbol = kwargs.get('symbol_name', None),
family = kwargs.get('family_name', None),
potential = kwargs.get('potential_name', None))
load_record_dict = {}
gamma_record_dict = {}
# Loop over parent records
for parent_record in parent_records:
parent_dict = parent_record.todict()
# Get load_record from dbase only once per file
load_record_key = os.path.splitext(parent_dict['load_file'])[0]
if load_record_key in load_record_dict:
load_record = load_record_dict[load_record_key]
else:
load_record = dbase.get_record(name=load_record_key)
load_record_dict[load_record_key] = load_record
# Get gamma_record from dbase only once per file
gamma_record_key = parent_dict['gammasurface_calc_key']
if gamma_record_key in gamma_record_dict:
gamma_record = gamma_record_dict[gamma_record_key]
else:
gamma_record = dbase.get_record(name=gamma_record_key)
gamma_record_dict[gamma_record_key] = gamma_record
# Loop over defect model records with family name matching parent record
matches = defect_record_df['family'] == parent_dict['family']
defect_keys = defect_record_df[matches].id.tolist()
for defect_key in defect_keys:
defect_record = defect_record_dict[defect_key]
# Create calc_key
calc_key = str(uuid.uuid4())
# Define values for calc_*.in file
calc_dict = {}
calc_dict['load_file'] = load_record.name+'.xml'
calc_dict['load_style'] = 'system_model'
calc_dict['load_content'] = load_record.content
calc_dict['load_options'] = parent_dict['load_options']
calc_dict['dislocation_model'] = defect_record.name+'.xml'
calc_dict['dislocation_content'] = defect_record.content
calc_dict['gammasurface_model'] = gamma_record.name+'.xml'
calc_dict['gammasurface_content'] = gamma_record.content
calc_dict['peierlsnabarro_model'] = parent_record.name+'.xml'
calc_dict['peierlsnabarro_content'] = parent_record.content
for key in singularkeys():
if key in kwargs:
calc_dict[key] = kwargs[key]
# Build incomplete record
input_dict = {}
for key in calc_dict:
if calc_dict[key] != '':
input_dict[key] = deepcopy(calc_dict[key])
calculation.process_input(input_dict, calc_key, build=False)
model = iprPy.buildmodel(record_style,
'calc_' + calc_style,
input_dict)
new_record = iprPy.Record(name=calc_key,
content=model.xml(),
style=record_style)
# Check if record is new
if new_record.isnew(record_df=record_df):
# Assign '' to any unassigned keys
for key in unusedkeys()+singularkeys()+multikeys():
if key not in calc_dict:
calc_dict[key] = ''
# Add record to database
dbase.add_record(record=new_record)
# Generate calculation folder
calc_directory = os.path.join(run_directory, calc_key)
os.makedirs(calc_directory)
# Save inputfile to calculation folder
inputfile = iprPy.tools.filltemplate(calculation.template, calc_dict, '<', '>')
with open(os.path.join(calc_directory, 'calc_' + calc_style + '.in'), 'w') as f:
f.write(inputfile)
# Add calculation files to calculation folder
for calc_file in calculation.files:
shutil.copy(calc_file, calc_directory)
# Add load record file to calculation folder
with open(os.path.join(calc_directory,load_record.name+'.xml'), 'w') as f:
f.write(load_record.content)
# Add gamma record file to calculation folder
with open(os.path.join(calc_directory, gamma_record.name+'.xml'), 'w') as f:
f.write(gamma_record.content)
# Add parent record file to calculation folder
with open(os.path.join(calc_directory, parent_record.name+'.xml'), 'w') as f:
f.write(parent_record.content)
# Add defect record file to calculation folder
with open(os.path.join(calc_directory, defect_record.name+'.xml'), 'w') as f:
f.write(defect_record.content)
def unusedkeys():
"""
The calculation input parameters that are not prepare input parameters.
Returns
-------
list of str
The list of input parameter keys ignored by prepare.
"""
return [
'load_file',
'load_style',
'load_options',
'symbols',
'box_parameters',
'dislocation_model',
'gammasurface_model',
'peierlsnabarro_model',
]
def singularkeys():
"""
The prepare input parameters that can be assigned only one value.
Returns
-------
list of str
The list of input parameter keys that are limited to singular values.
"""
return [
'database',
'run_directory',
'delta_tau_xy',
'delta_tau_yy',
'delta_tau_yz',
'minimize_style',
'minimize_options',
'tausteps',
'fullstress',
'cdiffstress',
'length_unit',
'pressure_unit',
'energy_unit',
'force_unit',
]
def multikeys():
"""
The prepare input parameters that can be assigned multiple values.
Returns
-------
list of str
The list of input parameter keys that can have multiple values.
"""
return [
'potential_name',
'symbol_name',
'family_name',
'dislocation_name',
]
if __name__ == '__main__':
main(*sys.argv[1:])
|
#!/usr/bin/env python
import argparse
import yaml
import os
import io
import sys
import shutil
current_dir = os.path.dirname(os.path.realpath(__file__))
os.chdir(current_dir)
# consts
NEW_LINE = "\n"
NEW_LINE_1 = "\n "
CMD_CONJUCTION_1 = " && \\\n "
def generateImageDir(name, image, config):
dir = "images/" + name
if not os.path.exists(dir):
os.makedirs(dir)
for f in os.listdir(dir):
if f != "README.md":
os.remove(os.path.join(dir, f))
print dir
generateDockerfile(dir, name, image, config)
def generateDockerfile(dir, name, image, config):
packages = config["packages"]
before_install_scripts = []
ppa = []
apt_get_packages = []
after_install_scripts = []
env = []
copy_files = []
if "env" in image:
for e in image["env"]:
env.append("ENV " + e)
for pkg_ref in image["packages"]:
if pkg_ref in packages:
pkg = packages[pkg_ref]
else:
print("not found package: " + pkg_ref)
exit(1)
# print pkg
if "apt-get" in pkg:
apt_get = pkg["apt-get"]
apt_get_packages.append(apt_get["name"])
if "ppa" in apt_get:
ppa.append("add-apt-repository -y ppa:" + apt_get["ppa"])
if "before_install" in pkg:
before_install = pkg["before_install"]
if "script" in before_install:
before_install_scripts.append(before_install["script"])
if "file" in before_install:
# copy_files.append(before_install["file"])
# before_install_scripts.append("/image/" + before_install["file"])
print("not support execute script file before install, script files have not been added.")
exit(1)
if "after_install" in pkg:
after_install = pkg["after_install"]
if "script" in after_install:
after_install_scripts.append(after_install["script"])
if "file" in after_install:
copy_files.append(after_install["file"])
after_install_scripts.append("/image/" + after_install["file"])
# print(before_install_scripts)
# print(apt_get_packages)
# print(after_install_scripts)
content = "FROM ubuntu:14.04" + NEW_LINE
content += NEW_LINE
if env:
content += NEW_LINE.join(env) + NEW_LINE
content += NEW_LINE
content += "RUN export DEBIAN_FRONTEND=noninteractive" + CMD_CONJUCTION_1 + \
"apt-get update" + CMD_CONJUCTION_1 + \
"apt-get install -y software-properties-common python-software-properties curl wget telnet make git zip unzip build-essential" + NEW_LINE
content += NEW_LINE
content += "RUN export DEBIAN_FRONTEND=noninteractive"
if before_install_scripts:
content += CMD_CONJUCTION_1 + CMD_CONJUCTION_1.join(before_install_scripts)
if ppa:
content += CMD_CONJUCTION_1 + CMD_CONJUCTION_1.join(ppa)
if apt_get_packages:
content += CMD_CONJUCTION_1 + "apt-get update && apt-get install -y " + " ".join(apt_get_packages)
content += NEW_LINE
content += NEW_LINE
if copy_files:
content += "ADD . /image" + NEW_LINE
content += NEW_LINE
if after_install_scripts:
content += "RUN export DEBIAN_FRONTEND=noninteractive" + CMD_CONJUCTION_1 + \
CMD_CONJUCTION_1.join(after_install_scripts)
content += NEW_LINE
# print("# Dockerfile\n" + content)
with open(dir + "/Dockerfile", "w") as writer:
writer.write(content)
for f in copy_files:
shutil.copy("scripts/" + f, dir + "/" + f)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="generate dockerfiles",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog='''
examples:
./generate.py generate files for all images
./generate.py --image nodejs generate files for nodejs
''')
parser.add_argument("-i", "--image", nargs='+', help="image name")
args = parser.parse_args()
with open("config.yml", "r") as stream:
config = yaml.load(stream)
images = config["images"]
# packages = config["packages"]
if not args.image:
for name, image in images.iteritems():
generateImageDir(name, image, config)
else:
for name in args.image:
if name in images:
generateImageDir(name, images[name], config)
else:
print("not found image: " + name)
exit(1)
print("success!")
|
def mean(mylist):
the_mean=sum(mylist)/len(mylist)
return the_mean
# print(mean([2,4,6,8]))
print(type(mean), type(sum))
|
# Scratch doc
import pandas as pd
import numpy as np
import geopandas as gpd
from shapely.geometry import Point, LineString
from geopy.distance import geodesic
from sodapy import Socrata
# Function to calculate distance between from_location and to_location
def get_dist(row):
# Get point tuples.
from_loc = Point(float(row['from_latitude']),float(row['from_longitude']))
to_loc = Point(float(row['to_latitude']),float(row['to_longitude']))
# Create line string.
st_path = LineString([(from_loc.x,from_loc.y), (to_loc.x,to_loc.y)])
return st_path.length
# Function using geopy to get distance (geodesic)
def get_mi(row):
# Get tuples
from_loc = (float(row['from_latitude']), float(row['from_longitude']))
to_loc = (float(row['to_latitude']), float(row['to_longitude']))
# Get distance
dist = geodesic(from_loc, to_loc).miles
return dist
|
import os
import time
import logging
import threading
import paho.mqtt.client as mqtt
from queue import Queue
class mqttwrapper(threading.Thread):
def __init__(self,config,logger):
threading.Thread.__init__(self)
print(logger)
self._rootLogger = logger
_libName = str(__name__.rsplit('.', 1)[-1])
print(_libName)
self._log = logging.getLogger(logger + '.' + _libName + '.' + self.__class__.__name__)
# print(logger + '.'+ _libName + '.' + self.__class__.__name__)
self._log.debug('Create MQTT Wrapper Object')
self._stateConnection = False
def construct(self):
self._log.debug('Methode: construct ()')
self._mqttc = mqtt.Client(str(os.getpid()), clean_session=True)
self._mqttc.reconnect_delay_set(min_delay=10, max_delay=120)
self._mqttc.enable_logger(self._rootLogger)
self._mqttc.on_message = self.on_message
self._mqttc.on_connect = self.on_connect
self._mqttc.on_publish = self.on_publish
self._mqttc.on_subscribe = self.on_subscribe
self._mqttc.on_disconnect = self.on_disconnect
return True
def connect(self,host, port=1883, keepalive=60, bind_address=""):
self._log.debug('Methode: connect(%s, %d, %d)'%(host,port,keepalive))
_retry = 5
for _try in range(_retry):
self._mqttc.connect_async(host, port, keepalive, bind_address)
time.sleep(3)
if self._stateConnection:
self._log.info = ('MQTT client connected to %s', host)
self._mqttc.loop_start()
return True
else:
self._log.info('MQTT failed to connect, try again %d',_try)
time.sleep(10)
self.log.error('Failed to connect to %s',host)
return False
def on_connect(self, client, userdata, flags, rc):
self._log.debug('Methode: on_connect(%s, %s, %s , %s'%(client,userdata,flags,rc))
if rc == mqtt.CONNACK_ACCEPTED:
self._log.info('MQTT connected')
self._stateConnection = True
else:
self._log.error('MQTT failed to connect: {}'.format(rc))
self._stateConnection = False
# print(self._state)
return True
def run(self):
print('run thread')
self.construct()
self.connect('192.168.20.205')
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('simpleExample')
config1 = {'HOST': '192.168.1.107', 'PUBLISH': '/TEST', 'SUBSCRIBE': '/TEST/', 'CONFIG': '/TEST2/'}
mqttc = mqttwrapper(config1,'simpleExample')
mqttc.start()
|
import os
import sys
import asyncio
import string
import random
import time
import fnmatch
class random_creation():
def __init__(self, path,):
self.path = path
def string_generator(self, size,):
chars = string.ascii_uppercase + string.ascii_lowercase
return ''.join(random.choice(chars) for _ in range(size))
# data = string_generator(10)
def send(self, name,):
self.name = name
f = open(f'{path}/{name}.txt', 'a+')
f.write(data +"\n")
# await asyncio.sleep(5)
if __name__ == '__main__':
list = sys.argv[2:]
path = os.path.realpath(sys.argv[1])
print(list)
h = random_creation(path)
# data = h.string_generator(10)
for i in list:
h.send(i)
# f = open(f'{path}/{i}.txt', 'a+')
# f.write(data +"\n")
# async def send():
# while True:
# for i in list:
# f = open(f'{path}/{i}.txt', 'a+')
# f.write(data +"\n")
# await asyncio.sleep(5)
# # time.sleep(5)
# for file_name in os.listdir('some_directory/'):
# if fnmatch.fnmatch(file_name, '*.txt'):
# print(file_name)
|
#!/usr/bin/python
# -*- coding: cp936 -*-
import sqlite3
from SQLiteQuery.capitalQuery import *
from SQLiteDataProcessing.userDayATradeUtility import *
'''
prerequisite: run getsheet2()
'''
class accountCapital:
def generateAccountCapitalExcelFromSQLite(self):
with sqlite3.connect('C:\sqlite\db\hxdata.db') as db:
workbookdes = xlwt.Workbook()
dst = workbookdes.add_sheet('accoutCapital')
cq = capitalQuery()
#return: dictionary: {effectivekhcode: effectivetradedate}
effectiveATradeUsersDict = userDayATradeUtility().geteffectiveATradeUsersAndDates()
# 取出所有的account
sqStatement = 'SELECT newaccount.khdate, newaccount.khcode, newaccount.usrnameshort, newaccount.usrname,\
newaccount.khusrmobile, newaccount.lddepid, newaccount.lddepname,\
newaccount.marketperid, newaccount.qdbm, newaccount.tjrsj, newaccount.marketdepid,\
newaccount.marketpername, newaccount.marketpertype, newaccount.marketpermobile, newaccount.marketdepname,\
newaccount.isLeftMarketPer\
FROM newaccount'
# 抬头补充
dst.write(0, 0, '开户时间') # A
dst.write(0, 1, '交易账号') # B
dst.write(0, 2, '客户简称') # C
dst.write(0, 3, '客户名称') # D
dst.write(0, 4, '开户手机号') # E
# 临时需求
dst.write(0, 5, '7月31日合计资产余额') # E
dst.write(0, 6, '落地营业部代码') # G
dst.write(0, 7, '落地营业部名称') # H
dst.write(0, 8, '营销人员编码') # I
dst.write(0, 9, '营销人员名称') # J
dst.write(0, 10, '营销人员类别') # K
dst.write(0, 11, '营销人员手机号') # L
dst.write(0, 12, '营销营业部代码') # M
dst.write(0, 13, '营销营业部名称') # N
dst.write(0, 14, '营销人员变更前原值') # O
row = 1
for khdate, khcode, usrnameshort, usrname, \
khusrmobile, lddepid, lddepname, \
marketperid, qdbm, tjrsj, marketdepid, \
marketpername, marketpertype, marketpermobile, marketdepname , isLeftMarketPer\
in db.execute(sqStatement):
if str(khcode).strip() in effectiveATradeUsersDict:
#if '398000010171' in effectiveATradeUsersDict:
dst.write(row, 0, str(khdate))
dst.write(row, 1, str(khcode))
dst.write(row, 2, str(usrnameshort))
dst.write(row, 3, str(usrname))
dst.write(row, 4, str(khusrmobile))
# 7月31日合计资产余额
# 根据khcode客户号,得到他7月31日合计资产余额,7月31日当天资产余额大于0的人会有数据
# print(khcode)
zzc = cq.getZZCbyKHCodeAndDate(khcode, 20190731)
if zzc is not None:
#print(cq.getZZCbyKHCodeAndDate(khcode, 20190731)[0])
dst.write(row, 5, cq.getZZCbyKHCodeAndDate(khcode, 20190731)[0])
else:
dst.write(row, 5, '')
dst.write(row, 6, str(lddepid))
dst.write(row, 7, str(lddepname))
if str(khcode).strip() != '395000010066' and str(khcode).strip() != '395000010065' and str(
khcode).strip() != '398000010900':
dst.write(row, 8, str(marketperid))
dst.write(row, 9, str(marketpername))
dst.write(row, 10, str(marketpertype))
dst.write(row, 11, str(marketpermobile))
dst.write(row, 12, str(marketdepid))
dst.write(row, 13, str(marketdepid))
# leavedMarketPeriOriginalId 要不就是8位要不就是None
if isLeftMarketPer == 1 or isLeftMarketPer == 2:
# 说明这个营销人员已经离职了
dst.write(row, 14, '离职')
else:
# None
dst.write(row, 14, '')
else:
if str(khcode).strip() == '395000010066':
dst.write(row, 8, "39708036")
dst.write(row, 9, "陈凌")
dst.write(row, 10, "经纪人")
dst.write(row, 11, "15659100118")
dst.write(row, 12, "3970")
dst.write(row, 13, "3970 南平解放路证券营业部")
if str(khcode).strip() == '395000010065':
dst.write(row, 8, "31901042")
dst.write(row, 9, "李靖")
dst.write(row, 10, "财富管理师")
dst.write(row, 11, "13072940875")
dst.write(row, 12, "3190")
dst.write(row, 13, "3190 西安分公司")
if str(khcode).strip() == '398000010900':
dst.write(row, 8, "37809097")
dst.write(row, 9, "张多佳")
dst.write(row, 10, "财富管理师")
dst.write(row, 11, "18247130746")
dst.write(row, 12, "3780")
dst.write(row, 13, "3780 呼和浩特中山西路证券营业部")
row = row + 1
workbookdes.save('../output/effectiveATradeAccountCapital.xls')
# generate excel
a = accountCapital()
a.generateAccountCapitalExcelFromSQLite()
|
from random import shuffle
def start_situation(number_of_players, include_multicolor):
# check if given input is an integer and value 2-5
try:
number = int(number_of_players)
except ValueError:
print("error: please enter an integer for number of players")
return
else:
if number not in range(2, 6):
print("error: please select 2 to 5 players")
return
deck = [] # create an empty deck of cards
colors = ['green', 'blue', 'yellow', 'white', 'red']
numbers = [1, 1, 1, 2, 2, 3, 3, 4, 4, 5]
if include_multicolor: # if the game is with extra color, add it to the list
colors.append('multi')
cards_on_table = [] # current situation on the board
for c in colors: cards_on_table.append([0, c]) # add the initial cards
cards_needed = [] # current situation on the board
for c in colors: cards_needed.append([1, c]) # add the cards needed
# determine the number of cards that each player receives
if number_of_players > 3:
number_of_cards = 4
else:
number_of_cards = 5
for color in colors:
for number in numbers:
deck.append([number, color]) # fill up the deck with the necessary cards
shuffle(deck) # shuffle the deck, this will be the card order for the rest of the game
# the 'situation' consists of two players, both a dict. with two keys: 'cards' and 'information'
situation = []
for i in range(number_of_players): situation.append({'cards': [], 'information': []})
# create the first two hands
for player in range(number_of_players):
for card in range(number_of_cards):
situation[player]['cards'].append(deck.pop())
return situation, deck, cards_on_table, cards_needed
|
import argparse
import os
from tensorflow.contrib.learn.python.learn.utils import (
saved_model_export_utils)
from tensorflow.contrib.training.python.training import hparam
# ---------------------------------------------------
# Library used for loading a file from Google Storage
# ---------------------------------------------------
from tensorflow.python.lib.io import file_io
# ---------------------------------------------------
# Library used for uploading a file to Google Storage
# ---------------------------------------------------
from googleapiclient import discovery
from oauth2client.client import GoogleCredentials
import tensorflow as tf
import matplotlib as mpl
mpl.use('agg')
import os
import matplotlib.pyplot as plt
import csv
import numpy as np
import GPy
from sklearn.preprocessing import normalize
GPy.plotting.change_plotting_library("matplotlib")
def MinMaxScaler(data):
''' Min Max Normalization
Parameters, Returns
----------
data : numpy.ndarray
input data to be normalized
shape: [Batch size, dimension]
'''
numerator = data - np.min(data, 0)
denominator = np.max(data, 0) - np.min(data, 0)
# noise term prevents the zero division
return numerator / (denominator + 1e-7)
def load_series(filename):
filename = filename[0] #filename : route (--train-files <route>)
try:
with file_io.FileIO(filename, mode='r') as csvfile:
print("===in load_series function, fileIO===")
csvreader = csv.reader(csvfile)
data = [row for row in csvreader if len(row) > 0]
return data
except IOError:
return None
def run_experiment(hparams):
data = load_series(hparams.train_files)
print("=====run experiment=====")
#data가 string의 list라 float형의 np.array로 casting 해준다
data = np.array(data)
data = np.delete(data, (0), axis=0)
data = data.astype(float)
#print(data)
#standardization
#xy = MinMaxScaler(data)
xy = data
x = xy[:,0:-1]
y = xy[:,-1]
#build a dataset
print("========data building started========")
data_X = []
data_Y = []
size = int(len(y)*0.007)#일단 체크용으로 1/10 정도의 데이터로만/ 다시 전체 데이터로 만듬
for i in range(4300, size+4300): #4000번째 index 부터 시작
_x = x[i]
_y = y[i]
data_X.append(_x)
data_Y.append(_y)
data_Y = np.reshape(data_Y, (-1, 1))
#print(data_X)
data_X = np.array(data_X)
data_Y = np.array(data_Y)
#normalization, l1 norm (anomaly에 대해 크게 반응하지 않으려고..?)
data_X = normalize(data_X, axis=0, norm='l1')
data_Y = normalize(data_Y, axis=0, norm='l1')
print(data_Y.ndim)
print(data_Y)
print("=====train/test split started=====")
#train/test split
train_size = int(len(data_Y)*0.8)
test_size = len(data_Y) - train_size
train_X, test_X = np.array(data_X[0:train_size]), np.array(data_X[train_size:len(data_X)])
train_Y, test_Y = np.array(data_Y[0:train_size]), np.array(data_Y[train_size:len(data_X)])
#전체 데이터로 모델을 만든 후, 그 데이터가 모델의 CI를 넘는지 체크 해보면 됨
#hyperparameter
input_dim = 52
variance = 1
lengthscale = 0.2
#kernel
kernel = GPy.kern.RBF(input_dim, variance = variance, lengthscale = lengthscale)
#modeling
print("========modeling started========")
model = GPy.models.GPRegression(data_X, data_Y, kernel)
model.optimize(messages = True)
print(model)
#predict
print("========predicting started========")
Y_pred, Var_pred = model.predict(data_X)
print("========Y_pred========")
print(Y_pred)
print("========Var_pred========")
print(np.sqrt(Var_pred)) ###
#CI를 이용한 anomaly counting
print("========counting anomaly started========")
total_anomaly = 0
anomaly_indexes = []
anomalys = []
test_size = len(data_Y)
print("test_size : {}".format(test_size))
for i in range(test_size):
if (Y_pred[i]-1.96*np.sqrt(Var_pred[i]) > data_Y[i] or Y_pred[i]+1.96*np.sqrt(Var_pred[i]) < data_Y[i]):
total_anomaly +=1
anomaly_indexes.append(i)
anomalys.append(data_Y[i])
print("total anomaly : {}".format(total_anomaly))
print("anomaly_indexes")
print(anomaly_indexes)
print("========saving graph started========")
#plotting, file 바로 저장
plt.figure();
plt.plot(data_Y, '.'); #파란색
plt.plot(Y_pred, '.'); #주황색
plt.plot(np.add(Y_pred, 1.96*np.sqrt(Var_pred)))
plt.plot(np.add(Y_pred, -1.96*np.sqrt(Var_pred)))
plt.xlabel("Time Index")
plt.ylabel("sum of wait time")
plt.savefig('GP_RBF_only_new_6_small_range.png')
credentials = GoogleCredentials.get_application_default()
service = discovery.build('storage', 'v1', credentials=credentials)
filename = 'GP_RBF_only_new_6_small_range.png'
bucket = 'im5-os-stat-wait-gp'
body = {'name': 'RBF_only/GP_RBF_only_new_6_small_range.png'}
req = service.objects().insert(bucket=bucket, body=body, media_body=filename)
resp = req.execute()
plt.show()
if __name__ == '__main__':
# ---------------------------------------------
# command parsing from Google ML Engine Example
# ---------------------------------------------
parser = argparse.ArgumentParser()
# Input Arguments
parser.add_argument(
'--train-files',
help='GCS or local paths to training data',
nargs='+',
required=True
)
parser.add_argument(
'--num-epochs',
help="""\
Maximum number of training data epochs on which to train.
If both --max-steps and --num-epochs are specified,
the training job will run for --max-steps or --num-epochs,
whichever occurs first. If unspecified will run for --max-steps.\
""",
type=int,
)
parser.add_argument(
'--train-batch-size',
help='Batch size for training steps',
type=int,
default=40
)
parser.add_argument(
'--eval-batch-size',
help='Batch size for evaluation steps',
type=int,
default=40
)
# -------------------------------
# If evaluation file is prepared,
# change 'required' value
# -------------------------------
parser.add_argument(
'--eval-files',
help='GCS or local paths to evaluation data',
nargs='+',
required=False
)
# Training arguments
parser.add_argument(
'--embedding-size',
help='Number of embedding dimensions for categorical columns',
default=8,
type=int
)
parser.add_argument(
'--first-layer-size',
help='Number of nodes in the first layer of the DNN',
default=100,
type=int
)
parser.add_argument(
'--num-layers',
help='Number of layers in the DNN',
default=4,
type=int
)
parser.add_argument(
'--scale-factor',
help='How quickly should the size of the layers in the DNN decay',
default=0.7,
type=float
)
parser.add_argument(
'--job-dir',
help='GCS location to write checkpoints and export models',
required=True
)
# Argument to turn on all logging
parser.add_argument(
'--verbosity',
choices=[
'DEBUG',
'ERROR',
'FATAL',
'INFO',
'WARN'
],
default='INFO',
)
# Experiment arguments
parser.add_argument(
'--train-steps',
help="""\
Steps to run the training job for. If --num-epochs is not specified,
this must be. Otherwise the training job will run indefinitely.\
""",
type=int
)
parser.add_argument(
'--eval-steps',
help='Number of steps to run evalution for at each checkpoint',
default=100,
type=int
)
parser.add_argument(
'--export-format',
help='The input format of the exported SavedModel binary',
choices=['JSON', 'CSV', 'EXAMPLE'],
default='JSON'
)
args = parser.parse_args()
# Set python level verbosity
tf.logging.set_verbosity(args.verbosity)
# Set C++ Graph Execution level verbosity
os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(
tf.logging.__dict__[args.verbosity] / 10)
# Run the training job
hparams=hparam.HParams(**args.__dict__)
run_experiment(hparams)
|
import math
x = math.pi/4
val = math.sin(x)**2 + math.cos(x)**2
print val
v = 3 #m/s
t = 1 #sec
a = 2 #m/s**2
s = ((v * t) + ((1.0/2.0) * a * (t**2)))
print s, "meters"
a = float(input("Enter the first number"))
b = float(input("Enter the second number"))
e1 = ((a+b)**2)
e2 = (a**2)+(2*a*b)+(b**2)
if e1 == e2:
print "The equations are equal & verified."
else:
print "The equations are not equal & not verified."
|
from torch import gt
from backpack.core.derivatives.elementwise import ElementwiseDerivatives
class ReLUDerivatives(ElementwiseDerivatives):
def hessian_is_zero(self):
"""`ReLU''(x) = 0`."""
return True
def df(self, module, g_inp, g_out):
"""First ReLU derivative: `ReLU'(x) = 0 if x < 0 else 1`."""
return gt(module.input0, 0).float()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
from absl import flags
import numpy as np
import cv2
from tqdm import tqdm
import skimage.io as io
import tensorflow as tf
from src.util import renderer as vis_util
from src.util import image as img_util
from src.util import openpose as op_util
import src.config
from src.RunModel import RunModel
flags.DEFINE_string('img_path', 'data/im1963.jpg', 'Image to run')
gpu = "0"
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
os.environ["CUDA_VISIBLE_DEVICES"] = gpu
def preprocess_image(img_path, json_path=None):
img = io.imread(img_path)
if img.shape[2] == 4:
img = img[:, :, :3]
if np.max(img.shape[:2]) != config.img_size:
# print('Resizing so the max image size is %d..' % config.img_size)
scale = (float(config.img_size) / np.max(img.shape[:2]))
else:
scale = 1.
center = np.round(np.array(img.shape[:2]) / 2).astype(int)
# image center in (x,y)
center = center[::-1]
crop, proc_param = img_util.scale_and_crop(img, scale, center,
config.img_size)
# Normalize image to [-1, 1]
crop = 2 * ((crop / 255.) - 0.5)
return crop, proc_param, img
def main(pw3d_eval_path, paired=True):
"""
This function isn't really doing evaluation on 3DPW - it just runs HMR on each 3DPW frame and stores the output.
There is (or will be) a separate script in the pytorch_indirect_learning repo that will do the evaluation and metric
computations.
"""
sess = tf.Session()
model = RunModel(config, sess=sess)
cropped_frames_dir = os.path.join(pw3d_eval_path, 'cropped_frames')
save_path = '/data/cvfs/as2562/hmr/evaluations/3dpw'
if not paired:
save_path += '_unpaired'
if not os.path.isdir(save_path):
os.makedirs(save_path)
eval_data = np.load(os.path.join(pw3d_eval_path, '3dpw_test.npz'))
frame_fnames = eval_data['imgname']
fname_per_frame = []
pose_per_frame = []
shape_per_frame = []
verts_per_frame = []
cam_per_frame = []
for frame in tqdm(frame_fnames):
image_path = os.path.join(cropped_frames_dir, frame)
input_img, proc_param, img = preprocess_image(image_path)
# Add batch dimension: 1 x D x D x 3
input_img = np.expand_dims(input_img, 0)
joints, verts, cams, joints3d, thetas = model.predict(input_img, get_theta=True)
poses = thetas[:, 3:3+72]
shapes = thetas[:, 3+72:]
fname_per_frame.append(frame)
pose_per_frame.append(poses)
shape_per_frame.append(shapes)
verts_per_frame.append(verts)
cam_per_frame.append(cams)
fname_per_frame = np.array(fname_per_frame)
np.save(os.path.join(save_path, 'fname_per_frame.npy'), fname_per_frame)
pose_per_frame = np.concatenate(pose_per_frame, axis=0)
np.save(os.path.join(save_path, 'pose_per_frame.npy'), pose_per_frame)
shape_per_frame = np.concatenate(shape_per_frame, axis=0)
np.save(os.path.join(save_path, 'shape_per_frame.npy'), shape_per_frame)
verts_per_frame = np.concatenate(verts_per_frame, axis=0)
np.save(os.path.join(save_path, 'verts_per_frame.npy'), verts_per_frame)
cam_per_frame = np.concatenate(cam_per_frame, axis=0)
np.save(os.path.join(save_path, 'cam_per_frame.npy'), cam_per_frame)
if __name__ == '__main__':
config = flags.FLAGS
config(sys.argv)
# Using pre-trained model, change this to use your own.
config.load_path = src.config.PRETRAINED_MODEL
if src.config.PRETRAINED_MODEL.endswith('model.ckpt-667589'):
paired = True
elif src.config.PRETRAINED_MODEL.endswith('model.ckpt-99046'):
paired = False
print('Using unpaired model!')
config.batch_size = 1
renderer = vis_util.SMPLRenderer(face_path=config.smpl_face_path)
main(pw3d_eval_path=config.img_path, paired=paired)
|
class JobRegistry(set):
@property
def classes(self):
return self
JobRegistry = JobRegistry()
from .cleanup import *
from .comment import *
from .commit import *
from .event import *
from .general import *
from .githubuser import *
from .issue import *
from .label import *
from .milestone import *
from .repository import *
from .tokens import *
from .project import *
|
# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
import itertools #libreria de python para poder usar iteradores
import timeit #libreria python para poder utilizar el timer
import sys #libreria python para usar argumentos por consola
#BIBLIOGRAFIA CONSULTADA
#Class itertools: https://docs.python.org/3/library/itertools.html#itertools.count
#Video tutorial: https://www.youtube.com/watch?v=fDlg3DTzHrk
#Make your own iterator: https://stackoverflow.com/questions/19151/build-a-basic-python-iterator
#Operate with string: https://www.learnpython.org/en/Basic_String_Operations
#Read-Write files: https://www.pythonforbeginners.com/files/reading-and-writing-files-in-python
#Python iterators examples: https://www.w3schools.com/python/python_iterators.asp
#Python timer https://stackoverflow.com/questions/5622976/how-do-you-calculate-program-run-time-in-python
#Python tuples elements: https://www.tutorialspoint.com/python/python_tuples.htm
#Python concatenate str and float: https://stackoverflow.com/questions/16948256/cannot-concatenate-str-and-float-objects
#Python command line: https://www.tutorialspoint.com/python/python_command_line_arguments.htm
__author__ = "Andres Cabrera, Alberto Ramos, Karan Sainani"
__date__ = "$11-nov-2018 11:31:17$"
#1- Leemos el fichero de datos
file = open(sys.argv[1], "r") #abrir fichero en modo lectura
number = int(file.readline()) #Leer numero de combinaciones a conformar
codigo = file.readline() #codigo leeido del fichero
#2- Mostrar datos a ejecutar
print("El codigo a obtener es: " + codigo)
#3- Llamada al metodo combinations_with_replacement() de la clase itertools
permut = itertools.product("0123456789", repeat=number)
#variable para saber si has encontrado la combinacion
found = False
#Inicializar timer
start = timeit.default_timer()
#2- Obtener tuplas y consultar con el codigo
for i in permut: #"i" son las tuplas ya obtenidas
if(number != len(codigo)):
break
pos = 0
#print(i) #tuplas
tuple_values = ""
while pos != number: #acceder a los elementos de las tuplas
tuple_values = tuple_values + i[pos]
pos = pos+1
if(tuple_values == codigo):
found = True
break
#Parar timer
stop = timeit.default_timer()
if found:
print("ENCONTRADO " + tuple_values)
else:
print("CODIGO NO ENCONTRADO")
#Obtener tiempo medio
tiempoMedio = float(stop - start)*1000 #calculo del tiempo medio
msgTiempoMedio = str(tiempoMedio) #no se puede imprimir strig + float (casteo del tiempo a string)
print("El tiempo medio ha sido " + msgTiempoMedio + " milisegundos\n")
|
from django.db import models
from core.models import UserProfile
from django.contrib.auth.models import User
from tastypie.models import ApiKey
import datetime
#class PhotoUrl(models.Model):
# url = models.CharField(max_length=128)
# uploaded = models.DateTimeField()
#
# def save(self):
# self.uploaded = datetime.datetime.now()
# models.Model.save(self)
class FriendInvite(models.Model):
INITIAL_STATE = 'pending'
INVITE_STATES = (
('pending', 'Pending'),
('accepted', 'Accepted'),
('rejected', 'Rejected'),
)
creator = models.ForeignKey(User, related_name="friend_invite_creator")
invited = models.ForeignKey(User, related_name="friend_invite_invited")
state = models.CharField(max_length=30, choices=INVITE_STATES, default=INITIAL_STATE)
def __unicode__(self):
return u'%s => %s "%s"' % (self.creator, self.invited, self.state)
class BetAccount(UserProfile):
friends = models.ManyToManyField(User, blank=True, null=True, related_name='bet_account_friends')
friend_invites = models.ManyToManyField(FriendInvite, blank=True, null=True)
monies = models.IntegerField(default=1000)
wins = models.IntegerField(default=0)
losses = models.IntegerField(default=0)
profile_image_url = models.CharField(max_length=2048,blank=True, null=True)
def __unicode__(self):
return u'%s' % self.user
def get_key(self):
try:
key = ApiKey.objects.get(user=self.user)
return key.key
except ApiKey.DoesNotExist:
return 'No key found'
def to_dict(self):
store = {}
store['id'] = self.id
store['username'] = self.user.username
store['slug'] = self.slug
store['api_token'] = self.get_key()
store['monies'] = self.monies
store['wins'] = self.wins
store['losses'] = self.losses
store['date_joined'] = self.user.date_joined.strftime('%Y-%m-%dT%H:%M:%S-000')
if self.profile_image_url and len(self.profile_image_url) > 0:
store['profile_image_url'] = self.profile_image_url
store_friends = []
for friend in self.friends.all():
store_friends.append(friend.username)
store['friends'] = store_friends
store_friend_invites = []
for invite in self.friend_invites.all():
store_friend_invites.append([invite.invited.username, invite.state])
store['friend_invites'] = store_friend_invites
return store
def authenticate(self, slug, api_token):
try:
ApiKey.objects.get(user=self.user, key=api_token)
print u'Bet user/api_token match'
return True
except:
return False
try:
BetAccount.objects.get(user=self.user, slug=slug)
print u'Bet user/slug match'
return True
except:
return False
class AdminAccount(UserProfile):
pass
def __unicode__(self):
return u'%s' % self.user
|
import json
class InsightsEndpointsMixin(object):
"""For endpoints in related to insights functionality."""
def insights(self):
"""
Get insights
:param day:
:return:
"""
params = {
'locale': 'en_US',
'vc_policy': 'insights_policy',
'surface': 'account',
'access_token': 'undefined',
'fb_api_caller_class': 'RelayModern',
'variables': json.dumps(
{
'IgInsightsGridMediaImage_SIZE': 240,
'timezone': 'Asia/Jakarta',
'activityTab': 'true',
'audienceTab': 'true',
'contentTab': 'true',
'query_params': json.dumps(
{'access_token': '', 'id': self.authenticated_user_id}
),
}
),
'doc_id': '1926322010754880',
}
res = self._call_api('ads/graphql/', query=params, unsigned=True)
return res
|
'''Finding Numbers in a Haystack
In this assignment you will read through and parse a file with text and numbers. You will extract all the numbers in the file and compute the sum of the numbers.
Data Files
We provide two files for this assignment. One is a sample file where we give you the sum for your testing and the other is the actual data you need to process for the assignment.
Sample data: http://py4e-data.dr-chuck.net/regex_sum_42.txt (There are 90 values with a sum=445833)
Actual data: http://py4e-data.dr-chuck.net/regex_sum_1312428.txt (There are 78 values and the sum ends with 165)
These links open in a new window. Make sure to save the file into the same folder as you will be writing your Python program. Note: Each student will have a distinct data file for the assignment - so only use your own data file for analysis.
'''
import re
hand=open('example.txt')
num_list=[]
for line in hand:
line=line.rstrip()
num=re.findall('[0-9]+',line)
[num_list.append(int(i))for i in num]
print(sum(num_list))
|
import pika
import sys, os
import time
import uuid
class RpcServer(object):
def __init__(self):
self.conn = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
self.channel = self.conn.channel()
def fib(self, n): # 定义一个主逻辑:斐波那契数列.===>程序的处理逻辑在这里写.
if n == 0:
return 0
elif n == 1:
return 1
else:
return self.fib(n - 1) + self.fib(n - 2)
def on_request(self, channel, method, properties, body):
print('----------------------------------------')
print('正在消费的消息:====>%s' % body)
time.sleep(5)
print('消息的相关属性为:')
print(properties)
value = self.fib(int(body))
print('原值: ', body, '斐波那契的运行结果: ', value)
print('将计算的运行结果返回给RPC客户端....')
self.channel.basic_publish(exchange='',
routing_key=properties.reply_to,
body=str(value),
properties=pika.BasicProperties(
# delivery_mode=2,
correlation_id=properties.correlation_id,
))
self.channel.basic_ack(delivery_tag=method.delivery_tag)
print('----------------------------------------')
def call_back(self):
self.channel.basic_qos(prefetch_count=2)
self.channel.basic_consume(consumer_callback=self.on_request,
queue='rpc_queue',
no_ack=False)
def start_consume(self):
self.channel.start_consuming()
if __name__ == '__main__':
fibonaci = RpcServer()
fibonaci.call_back()
fibonaci.start_consume()
|
import time, unittest, os, sys
from selenium import webdriver
from main.page.desktop_v3.login.pe_login import *
from main.page.desktop_v3.product.pe_product import *
from main.activity.desktop_v3.activity_login import *
from main.activity.desktop_v3.activity_product import *
from utils.function.setup import *
from utils.lib.user_data import *
class TestDebug(unittest.TestCase):
_site = "live"
def setUp(self):
#self.driver = webdriver.Chrome("C:\driver\chromedriver\chromedriver.exe")
self.driver = webdriver.Firefox()
#self.driver = tsetup('phantomjs')
self.login = loginActivity()
self.product = ProductActivity()
self.user = user5
def test_1_move_to_warehouse(self):
print('==================================')
print('TEST : MOVE PRODUCT TO WAREHOUSE')
print('==================================')
self.login.do_login(self.driver, self.user, self.user['email'], self.user['password'], self._site)
self.product.setObject(self.driver)
self.product.move_product_to_warehouse(self._site, self.user['domain'])
def test_2_move_to_etalase(self):
print('==================================')
print('TEST : MOVE PRODUCT TO ETALASE')
print('==================================')
self.login.do_login(self.driver, self.user, self.user['email'], self.user['password'], self._site)
self.product.setObject(self.driver)
self.product.move_product_to_etalase(self._site, self.user['domain'])
def tearDown(self):
print("Testing akan selesai dalam beberapa saat..")
time.sleep(5)
self.driver.quit()
# main
if(__name__ == "__main__"):
unittest.main(warnings="ignore")
|
from graphics import *
import time
win=GraphWin("A STRAIGHT LINE USING DDA LINE DRAWING ALGORITHM",900,900)
def main():
xc=int(input())
yc=int(input())
rx=int(input())
ry=int(input())
ellipse(xc,yc,rx,ry)
win.getMouse()
win.close()
def ellipse(xc,yc,rx,ry):
p=ry*ry-rx*rx*ry+rx*rx/4
x=0
y=ry
while(2.0*ry*ry*x <= 2.0*rx*rx*y):
if(p < 0):
x=x+1
p=p+2*ry*ry*x+ry*ry
else:
x=x+1
y=y-1
p=p+2(ry*ry*x)-2*rx*rx*y-ry*ry
put_pixel(xc+x,yc+y,"red")
put_pixel(xc+x,yc-y,"red")
put_pixel(xc-x,yc+y,"red")
put_pixel(xc-x,yc-y,"red")
p=ry*ry*(x+0.5)*(x+0.5)+rx*rx*(y-1)*(y-1)-rx*rx*ry*ry
while(y > 0):
if(p <= 0):
x=x+1
y=y-1
p=p+2*ry*ry*x-2*rx*rx*y+rx*rx
else:
y=y-1
p=p-2*rx*rx*y+rx*rx
put_pixel(xc+x,yc+y,"red")
put_pixel(xc+x,yc-y,"red")
put_pixel(xc-x,yc+y,"red")
put_pixel(xc-x,yc-y,"red")
def put_pixel(x,y,color="red"):
global win
p= Point(x,y)
p.setFill(color)
p.draw(win)
time.sleep(.005)
main()
|
from gym_connect_four import RandomPlayer
class Vlada(RandomPlayer):
""" Clone of RandomPlayer for runner.py illustration purpose """
pass
|
from node import Node
import threading
f = open("input", 'r')
lines = f.readlines()
n = int(lines[0])
nodes = {}
for i in range(1, n+1):
nodes[i] = Node(uid=i, network_size=n)
uid = -1
for i in range((n * n) + 1):
if len(lines[i].split()) == 4:
tokens = lines[i].split()
uid = int(tokens[0])
nodes[uid].start_delay = int(tokens[1])
nodes[uid].potential_leader_time_out = int(tokens[2])
nodes[uid].propose_time_out = int(tokens[3])
elif len(lines[i].split()) == 2:
tokens = lines[i].split()
address = nodes[int(tokens[0])].address
nodes[uid].stream.add_sender(address, float(tokens[1]))
nodes[uid].outgoing_addresses[int(tokens[0])] = address
for node in nodes.values():
threading.Thread(target=node.run).start()
|
import requests
class YunPian(object):
def __init__(self, api_key):
self.api_key = api_key
self.single_send_url = 'https://sms.yunpian.com/v2/sms/single_send.json'
def send_sms(self,code ,mobile):
parmas = {
"apikey": self.api_key,
"mobile": mobile,
"text": "【陈辉】您的验证码是#code#。有效期为1分钟,请尽快验证".format(code=code)
}
response = requests.post(self.single_send_url, data=parmas)
import json
re_dict = json.loads(response.text)
return re_dict
# if __name__ == '__main__':
# yun_pian = YunPian('261ee184839ff91be6d425c01cd416f2')
# yun_pian.send_msg('1234'15659766216)
|
import socketserver
import os
import sys
import time
import threading
ip_port=("172.18.0.3",19984)
class MyServer(socketserver.BaseRequestHandler):
def handle(self):
print("conn is :",self.request) # conn
print("addr is :",self.client_address) # addr
while True:
try:
self.str = self.request.recv(8)
data = bytearray(self.str)
headIndex = data.find(b'\xff\xaa\xff\xaa')
print(headIndex)
if headIndex == 0:
allLen = int.from_bytes(data[headIndex+4:headIndex+8], byteorder='little')
print("len is ", allLen)
curSize = 0
allData = b''
while curSize < allLen:
data = self.request.recv(1024)
allData += data
curSize += len(data)
print("recv data len is ", len(allData))
#接收到的数据,前64字节是guid,后面的是图片数据
arrGuid = allData[0:64]
#去除guid末尾的0
tail = arrGuid.find(b'\x00')
arrGuid = arrGuid[0:tail]
strGuid = str(int.from_bytes(arrGuid, byteorder = 'little')) #for test
print("-------------request guid is ", strGuid)
imgData = allData[64:]
strImgFile = "2.jpg"
print("img file name is ", strImgFile)
#将图片数据保存到本地文件
with open(strImgFile, 'wb') as f:
f.write(imgData)
f.close()
break
except Exception as e:
print(e)
break
if __name__ == "__main__":
s = socketserver.ThreadingTCPServer(ip_port, MyServer)
print("start listen")
s.serve_forever()
|
import numpy as np
import matplotlib.pyplot as plt
from stuckpy.image.text import string_to_array
def append_scale_bar(image, scale):
rows, cols = image.shape
white = np.max(image)
# Add a blank region at the bottom for the meta data
res_factor = int(rows/1000) # 1 per 1000 pixels of resolution
scale_height = res_factor * 50 #50 pixels / 1000 pixels of resolution.
image = np.append(image, np.zeros((scale_height, rows)), axis=0)
# Create a scale bar that is roughly 1/5 the width of the image
nm_perfect = int(round(cols * scale / 5, 0))
digits = len(str(nm_perfect))
div = 10**(digits - 1)
nm = int(nm_perfect / div) * div
pixels = round(nm/scale)
bar = np.zeros((scale_height, pixels))
brows, bcols = bar.shape
bar[:,0:res_factor*4] = white
bar[:, pixels-res_factor*4:pixels] = white
bar[:int(brows/4),:] = 0
bar[brows-int(brows/4):,:] = 0
bar[int(scale_height/2)-res_factor*3+1:
int(scale_height/2)+res_factor*3,:] = white
bnumber = ' ' + str(nm) + ' nm'
arraytext = string_to_array(bnumber, color=white)
_, acols = arraytext.shape
atext = np.zeros((50,acols))
atext[10:40,:] = arraytext
bar = np.append(bar, atext, axis=1)
brows, bcols = bar.shape
left = int(cols/2) - int(bcols/2)
right = int(cols/2) + int(bcols/2)
try:
image[rows:,left:right] = bar
except ValueError:
image[rows:,left:right+1] = bar
#Create a white boarder
image[rows:rows+res_factor*3, :] = white
image[rows + scale_height - res_factor*3:rows + scale_height, :] = white
image[rows:rows+scale_height - 1, 0:res_factor*3] = white
image[rows:rows+scale_height - 1, cols-res_factor*3:cols] = white
return image
|
from math import sin,cos
import pymunk
import pyglet
from pyglet.gl import *
class Jelly:
def __init__ (self, space, position, radius, bounciness, shape_group, color, batch, order_group):
self.space = space
self.radius = radius
#self.color = color
#self.group = group
self.bounciness = bounciness
#self.map_size = map_size
self.angle = 0
self.angle_to_add = .21 # .21
self.mass = 0.02
self.center_radius = 3
self.position = position
self.inertia = pymunk.moment_for_circle(self.mass, 0, self.radius)
self.body = pymunk.Body(self.mass, self.inertia)
self.body.position = self.position
self.shape = pymunk.Circle(self.body, self.radius)
self.shape.friction = .2
self.shape.group = shape_group
self.space.add(self.body, self.shape)
self.list = []
for i in range(30): # 30
self.part_mass = .002
self.part_radius = 8
self.position_x = self.radius*cos(self.angle) + self.body.position[0]
self.position_y = self.radius*sin(self.angle) + self.body.position[1]
self.new_position = self.position_x, self.position_y
self.part_inertia = pymunk.moment_for_circle(self.part_mass, 0, self.part_radius)
self.part_body = pymunk.Body(self.part_mass, pymunk.inf)
self.part_body.position = self.new_position
self.part_shape = pymunk.Circle(self.part_body, self.part_radius)
self.part_shape.friction = .6
self.part_shape.group = shape_group
self.angle += self.angle_to_add
self.space.add(self.part_body, self.part_shape)
self.list.append(self.part_body)
self.rest_ln = self.radius
self.stiffness = self.bounciness # 10 for really bouncy, 1 for squishy as shit
self.damp = .006
self.spring = pymunk.constraint.DampedSpring(self.body, self.part_body, (0,0), (0,0), self.rest_ln, self.stiffness, self.damp)
self.slide = pymunk.constraint.GrooveJoint(self.body, self.part_body, (0,0), (self.position_x-self.body.position[0],self.position_y-self.body.position[1]), (0,0))
self.space.add(self.spring, self.slide)
self.jelly_fill = batch.add_indexed(31, pyglet.gl.GL_TRIANGLES, None,
[0,1,2,
0,2,3,
0,3,4,
0,4,5,
0,5,6,
0,6,7,
0,7,8,
0,8,9,
0,9,10,
0,10,11,
0,11,12,
0,12,13,
0,13,14,
0,14,15,
0,15,16,
0,16,17,
0,17,18,
0,18,19,
0,19,20,
0,20,21,
0,21,22,
0,22,23,
0,23,24,
0,24,25,
0,25,26,
0,26,27,
0,27,28,
0,28,29,
0,29,30],
('v2f'),('c4B', (color)*31))
self.jelly_outline = batch.add_indexed(30, pyglet.gl.GL_LINES, order_group,
[0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,
8,9,9,10,10,11,11,12,12,13,13,14,
14,15,15,16,16,17,17,18,18,19,19,
20,20,21,21,22,22,23,23,24,24,25,
25,26,26,27,27,28,28,29,29,0],
('v2f'), ('c4B',(0,0,0,200)*30),)
def draw(self):
# Change the outline color if the body is sleeping
self.outline_color = (0,0,0)
if self.part_body.is_sleeping:
self.outline_color = (255,0,0)
else:
self.point_list = []
self.padding_angle = self.body.angle
for self.part_body in self.list:
'''# actual position of the parts of the jelly
self.point_list.append(self.part_body.position[0])
self.point_list.append(self.part_body.position[1])
'''
# Adding padding with the width of the parts of the jelly
self.padded_x = self.part_radius*cos(self.padding_angle) + self.part_body.position[0]
self.padded_y = self.part_radius*sin(self.padding_angle) + self.part_body.position[1]
self.point_list.append(self.padded_x)
self.point_list.append(self.padded_y)
self.padding_angle += self.angle_to_add
'''
if self.part_body.position[1] < 0:
self.part_body.position[1] = self.part_radius + self.part_radius
if self.part_body.position[0] < 0:
self.part_body.position[0] = self.part_radius + self.part_radius
if self.part_body.position[1] > self.map_size[1]:
self.part_body.position[1] = self.map_size[1] - self.part_radius
if self.part_body.position[0] > self.map_size[0]:
self.part_body.position[0] = self.map_size[0] - self.part_radius
'''
# Outline
self.jelly_outline.vertices = self.point_list
self.point_list.insert(0,self.body.position[1])
self.point_list.insert(0,self.body.position[0])
self.jelly_fill.vertices = self.point_list
class Jelly_Two:
def __init__ (self, space, position, radius, bounciness, shape_group, color, batch, order_group):
self.space = space
self.radius = radius
#self.color = color
#self.group = group
self.bounciness = bounciness
#self.map_size = map_size
self.angle = 0
self.angle_to_add = .21
self.mass = 0.02
self.center_radius = 3
self.position = position
self.inertia = pymunk.moment_for_circle(self.mass, 0, self.radius)
self.body = pymunk.Body(self.mass, self.inertia)
self.body.position = self.position
self.shape = pymunk.Circle(self.body, self.radius)
self.shape.friction = .2
self.shape.group = shape_group
self.space.add(self.body)
self.list = []
for i in range(30):
self.part_mass = .002
self.part_radius = 8
self.position_x = self.radius*cos(self.angle) + self.body.position[0]
self.position_y = self.radius*sin(self.angle) + self.body.position[1]
self.new_position = self.position_x, self.position_y
self.part_inertia = pymunk.moment_for_circle(self.part_mass, 0, self.part_radius)
self.part_body = pymunk.Body(self.part_mass, pymunk.inf)
self.part_body.position = self.new_position
self.part_shape = pymunk.Circle(self.part_body, self.part_radius)
self.part_shape.friction = .6
self.part_shape.group = shape_group
self.angle += self.angle_to_add
self.space.add(self.part_body, self.part_shape)
self.list.append(self.part_body)
self.rest_ln = 0
self.stiffness = self.bounciness # 10 for really bouncy, 1 for squishy as shit
self.damp = .05
self.spring = pymunk.constraint.DampedSpring(self.body, self.part_body,
(self.position_x-self.body.position[0],self.position_y-self.body.position[1]),
(0,0), self.rest_ln, self.stiffness, self.damp)
#self.slide = pymunk.constraint.GrooveJoint(self.body, self.part_body, (0,0), (self.position_x-self.body.position[0],self.position_y-self.body.position[1]), (0,0))
self.space.add(self.spring)
self.jelly_fill = batch.add_indexed(31, pyglet.gl.GL_TRIANGLES, None,
[0,1,2,
0,2,3,
0,3,4,
0,4,5,
0,5,6,
0,6,7,
0,7,8,
0,8,9,
0,9,10,
0,10,11,
0,11,12,
0,12,13,
0,13,14,
0,14,15,
0,15,16,
0,16,17,
0,17,18,
0,18,19,
0,19,20,
0,20,21,
0,21,22,
0,22,23,
0,23,24,
0,24,25,
0,25,26,
0,26,27,
0,27,28,
0,28,29,
0,29,30],
('v2f'),('c4B', (color)*31))
self.jelly_outline = batch.add_indexed(30, pyglet.gl.GL_LINES, order_group,
[0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,
8,9,9,10,10,11,11,12,12,13,13,14,
14,15,15,16,16,17,17,18,18,19,19,
20,20,21,21,22,22,23,23,24,24,25,
25,26,26,27,27,28,28,29,29,0],
('v2f'), ('c4B',(0,0,0,200)*30),)
def draw(self):
# Change the outline color if the body is sleeping
self.outline_color = (0,0,0)
if self.part_body.is_sleeping:
self.outline_color = (255,0,0)
else:
self.point_list = []
self.padding_angle = self.body.angle
for self.part_body in self.list:
'''# actual position of the parts of the jelly
self.point_list.append(self.part_body.position[0])
self.point_list.append(self.part_body.position[1])
'''
# Adding padding with the width of the parts of the jelly
self.padded_x = self.part_radius*cos(self.padding_angle) + self.part_body.position[0]
self.padded_y = self.part_radius*sin(self.padding_angle) + self.part_body.position[1]
self.point_list.append(self.padded_x)
self.point_list.append(self.padded_y)
self.padding_angle += self.angle_to_add
'''
if self.part_body.position[1] < 0:
self.part_body.position[1] = self.part_radius + self.part_radius
if self.part_body.position[0] < 0:
self.part_body.position[0] = self.part_radius + self.part_radius
if self.part_body.position[1] > self.map_size[1]:
self.part_body.position[1] = self.map_size[1] - self.part_radius
if self.part_body.position[0] > self.map_size[0]:
self.part_body.position[0] = self.map_size[0] - self.part_radius
'''
# Outline
self.jelly_outline.vertices = self.point_list
self.point_list.insert(0,self.body.position[1])
self.point_list.insert(0,self.body.position[0])
self.jelly_fill.vertices = self.point_list
class JellyTypeTwo:
def __init__ (self, space, radius, position, bounciness, group, color, map_size, batch, order_group):
self.space = space
self.radius = radius
self.color = color
self.group = group
self.bounciness = bounciness
self.map_size = map_size
self.angle = 0
self.angle_to_add = .21
self.mass = 0.02
self.center_radius = 3
self.position = position
self.inertia = pymunk.moment_for_circle(self.mass, 0, self.radius)
self.body = pymunk.Body(self.mass, self.inertia)
self.body.position = self.position
self.shape = pymunk.Circle(self.body, self.radius)
self.shape.friction = .2
self.shape.group = self.group
self.space.add(self.body, self.shape)
self.list = []
for i in range(30):
self.part_mass = .002
self.part_radius = 8
self.position_x = self.radius*cos(self.angle) + self.body.position[0]
self.position_y = self.radius*sin(self.angle) + self.body.position[1]
self.new_position = self.position_x, self.position_y
self.part_inertia = pymunk.moment_for_circle(self.part_mass, 0, self.part_radius)
self.part_body = pymunk.Body(self.part_mass, pymunk.inf)
self.part_body.position = self.new_position
self.part_shape = pymunk.Circle(self.part_body, self.part_radius)
self.part_shape.friction = .6
self.part_shape.group = self.group
self.angle += self.angle_to_add
self.space.add(self.part_body, self.part_shape)
self.list.append(self.part_body)
self.rest_ln = 0
self.stiffness = 10 # 10 for really bouncy, 1 for squishy as shit
self.damp = .2
self.spring = pymunk.constraint.DampedSpring(self.body, self.part_body,
(self.position_x-self.body.position[0],self.position_y-self.body.position[1]), (0,0),
self.rest_ln, self.stiffness, self.damp)
#self.slide = pymunk.constraint.GrooveJoint(self.body, self.part_body, (0,0), (self.position_x-self.body.position[0],self.position_y-self.body.position[1]), (0,0))
self.space.add(self.spring, self.slide)
self.jelly_fill = batch.add_indexed(31, pyglet.gl.GL_TRIANGLES, order_group,
[0,1,2,
0,2,3,
0,3,4,
0,6,5,
0,5,6],
('v2f'),('c4B'))
self.jelly_outline = batch.add_indexed(30, pyglet.gl.GL_LINES, order_group,
[0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,
8,9,9,10,10,11,11,12,12,13,13,14,
14,15,15,16,16,17,17,18,18,19,19,
20,20,21,21,22,22,23,23,24,24,25,
25,26,26,27,27,28,28,29,29,0],
('v2f'), ('c3B',(0,0,0)*30),)
def draw(self):
# Change the outline color if the body is sleeping
self.point_list = []
self.padding_angle = self.body.angle
for self.part_body in self.list:
'''# actual position of the parts of the jelly
self.point_list.append(self.part_body.position[0])
self.point_list.append(self.part_body.position[1])
'''
# Adding padding with the width of the parts of the jelly
self.padded_x = self.part_radius*cos(self.padding_angle) + self.part_body.position[0]
self.padded_y = self.part_radius*sin(self.padding_angle) + self.part_body.position[1]
self.point_list.append(self.padded_x)
self.point_list.append(self.padded_y)
self.padding_angle += self.angle_to_add
if self.part_body.position[1] < 0:
self.part_body.position[1] = self.part_radius + self.part_radius
if self.part_body.position[0] < 0:
self.part_body.position[0] = self.part_radius + self.part_radius
if self.part_body.position[1] > self.map_size[1]:
self.part_body.position[1] = self.map_size[1] - self.part_radius - 10
if self.part_body.position[0] > self.map_size[0]:
self.part_body.position[0] = self.map_size[0] - self.part_radius
# Outline
self.jelly_outline.vertices = self.point_list
#self.jelly_outline.colors = (self.outline_color*self.jelly_outline.count)
|
#!/usr/bin/python
import sys, pickle
from os import listdir, path
from icepy import *
from icecube.dataio import I3File
from icecube import simclasses, recclasses
from icecube.phys_services import I3Calculator
#################################################################################################################
# Variables #
#################################################################################################################
trackFile = 'tracks100.pickle'
i3filesDirectory = '/data/icecube/domeff_analysis/reco_sim_nominal/allMMCTree'
i3geometryFile = '/cvmfs/icecube.opensciencegrid.org/data/GCD/GeoCalibDetectorStatus_AVG_55697-57531_PASS2_SPE.i3.gz'
trackEndPadding = -50
#################################################################################################################
# Geometry #
#################################################################################################################
i3geoFile = I3GeometryFile(i3geometryFile)
i3geo = i3geoFile.getI3geometry()
detectorCylinder = i3geoFile.getDetectorCylinder()
i3geoFile.close()
#################################################################################################################
# Generate Tracks #
#################################################################################################################
tracks = []
totalFileCount = len(listdir(i3filesDirectory))
for fileCount, i3fileName in enumerate(listdir(i3filesDirectory)):
print 'File Progress:{:03d}%\r'.format((fileCount * 100)/totalFileCount),
sys.stdout.flush()
i3fileLocation = path.join(i3filesDirectory, i3fileName)
i3file = I3File(i3fileLocation, 'r')
for frameCount, frame in enumerate(i3file):
# physics frame
if frame.Stop.id is 'P':
# only anaylze second physics frame
if not frame.Has('MMCTrackList') or not frame.Has('I3MCTree') or not frame.Has('SplitInIcePulses'):
continue
MMCTrackList = frame['MMCTrackList']
particleMask = [I3ParticleIntersectsCylinder(MMCTrack.particle, detectorCylinder) for MMCTrack in MMCTrackList]
# only one track passes detector cylinder
if sum(particleMask) != 1:
continue
MMCTrack = np.array(MMCTrackList)[particleMask][0]
i3particle = MMCTrack.particle
# begining, end, and length of track
i3trackStartPosition = I3Position(MMCTrack.xi, MMCTrack.yi, MMCTrack.zi)
i3trackEndPosition = I3Position(MMCTrack.xf, MMCTrack.yf, MMCTrack.zf)
trackLength = (i3trackEndPosition - i3trackStartPosition).magnitude
# get daughters and determine high stochastic loss bins
I3MCTree = frame['I3MCTree']
i3daughters = I3MCTree.get_daughters(i3particle)
track = Track(trackLength)
for i3daughterParticle in i3daughters:
# find energy losses
if i3daughterParticle.type != i3particle.type:
daughterTrackDistance = (i3daughterParticle.pos - i3trackStartPosition).magnitude
if daughterTrackDistance > (trackLength + trackEndPadding):
continue
track.losses.append(StochasticLoss(daughterTrackDistance, i3daughterParticle.energy))
# get DOM hits
i3pulseSeriesMap = frame['SplitInIcePulses'].apply(frame)
for omgeo in i3geo.omgeo:
i3DOMPosition = omgeo.second().position
cherenkovDistance = I3Calculator.cherenkov_distance(i3particle, i3DOMPosition)
# DOM can be hit by cherenkov radiation
if cherenkovDistance != cherenkovDistance:
continue
# limit cherenkov distance
if cherenkovDistance > 100 or cherenkovDistance < 20:
continue
# get emission track distance
i3cherenkovEmissionPosition = I3Calculator.cherenkov_position(i3particle, i3DOMPosition)
emissionTrackDistance = (i3cherenkovEmissionPosition - i3trackStartPosition).magnitude
# cherenkov distance is within track
if emissionTrackDistance > (trackLength + trackEndPadding):
continue
# light doesnt pass through dust layer
if CherenkovPassesThroughDustLayer(i3cherenkovEmissionPosition, i3DOMPosition):
continue
# get DOM impact angle and angle acceptance
DOMImpactAngleDeg = np.rad2deg(I3Calculator.cherenkov_approach_angle(i3particle, i3DOMPosition, omgeo.second().direction))
# filter impact angle
if DOMImpactAngleDeg > 135:
continue
# get DOM pulses
i3pulseSeries = i3pulseSeriesMap.get(omgeo.first())
# no DOM Pulses
if i3pulseSeries == None:
track.emissions.append(CherenkovEmission(emissionTrackDistance, 0))
continue
# sum DOM charges
chargeSum = 0
for i3pulse in i3pulseSeries:
timeResidual = I3Calculator.time_residual(i3particle, i3DOMPosition, i3pulse.time)
if -100 < timeResidual < 100:
chargeSum += i3pulse.charge
# fix charge based on acceptance
DOMacceptance = DOMAngleAcceptance(DOMImpactAngleDeg)
chargeSum /= DOMacceptance
track.emissions.append(CherenkovEmission(emissionTrackDistance, Intensity(chargeSum, cherenkovDistance)))
tracks.append(track)
i3file.close()
print 'File Progress:100%'
#################################################################################################################
# Save Tracks #
#################################################################################################################
print 'Saving Tracks in {}'.format(trackFile)
with open(trackFile, 'wb') as pickleFile:
pickle.dump(tracks, pickleFile)
|
#This problem was asked by Snapchat.
#Given an array of time intervals (start, end)
#for classroom lectures (possibly overlapping),
#find the minimum number of rooms required.
#For example, given [(30, 75), (0, 50), (60, 150)], you should return 2.
def min_intervals(intervals):
intervals.sort(key=lambda i:i[0])
count = 0
for i in range(1, len(intervals)):
if intervals[i][0] < intervals[i-1][-1]:
count+=1
return count
intervals = [(30, 75), (0, 50), (60, 150)]
print min_intervals(intervals)
|
"""
Plot Receiver Operating Characteristic (ROC) curve.
The ROC curve, a caracteristic of a binary classifier, is obtained by plotting the
*true positive rate* (TPR, also known as *sensitivity* or *recall*)
.. math::
\\text{TPR} = \\frac{\\text{TP}}{\\text{TP} + \\text{FN}}
versus the *false positive rate* (FPR, also known as *fall-out* or *probability of
false alarm*)
.. math::
\\text{FPR} = \\frac{\\text{FP}}{\\text{FP} + \\text{TN}}
where :math:`\\text{TP}`, :math:`\\text{TN}`, :math:`\\text{FP}`, and :math:`\\text{FN}`
are the number of true positives, true negatives, false positives, and false negatives,
respectively.
Here, the ROC curve is plotted from the true binary labels (:math:`[0,1]` or
:math:`[-1,1]`) and the target scores (as probability estimates or confidence values).
.. note::
For K-fold cross validation, multiple ROC curves can be plotted together.
"""
from scripts.plot._tools import get_colormap
import argparse as ap
import itertools
import numpy as np
from sklearn.metrics import roc_curve, auc
from matplotlib import pyplot as plt
from typing import Tuple, List, Optional, Dict, Any, Union
def _roc_auc(fname: str, positive_label: Union[int, float]) -> Tuple[np.array, np.array, float]:
"""
Generate ROC curve and compute AUC
Args:
fname (str): Name of the data file
Returns:
Returns the false positive valuse (as ``np.array``), true positive values
(as ``np.array``) and the AUC (as ``float``).
.. note::
The data file :param:`fname` is a two-column file containing the class
``y_true`` of the examples and their respective score ``y_score``.
"""
# Load data from file
y_true, y_score = np.loadtxt(fname, unpack=True)
# Generate ROC curve
fpr, tpr, _ = roc_curve(y_true, y_score, pos_label=positive_label)
# Compute ROC AUC
rocauc = auc(fpr, tpr)
return fpr, tpr, rocauc
def plot(
fin: List[str],
output: Optional[str] = None,
groups: Optional[List[int]] = None,
labels: Optional[List[str]] = None,
title: Optional[str] = None,
positive_label: Union[int, float] = 1,
) -> None:
"""
Plot ROC curves.
Args:
fin (List[str]): List of input files
output (str, optional): Output file for the plot
Raises:
ValueError: An error occurs when the number of labels is different from the
number of groups.
.. note::
If ``output`` is not specified, the plot is shown interactively.
.. note::
If ``groups==0`` each plot is considered as a different fold of the same group.
"""
# Figure
plt.figure()
ax = plt.subplot(
1,
1,
1,
aspect="equal",
xlim=[-0.05, 1.05],
ylim=[-0.05, 1.05],
title="Receiver Operating Characteristic" if title is None else title,
xlabel="False Positive Rate",
ylabel="True Positive Rate",
)
# Get color map
cmap = get_colormap(groups)
# Plot ROC for random classifier
ax.plot([0, 1], [0, 1], "--", label="Random", color="grey", lw=0.5)
if labels is not None:
if len(labels) != len(fin):
raise ValueError(
"The number of labels should be the same as the number of inputs."
)
for idx, f in enumerate(fin):
fpr, tpr, auc_score = _roc_auc(f, positive_label)
try:
label = f"{labels[idx]} (AUC = {auc_score:.2f})"
except:
label = f"AUC = {auc_score:.2f}"
# Plot ROC
ax.plot(fpr, tpr, label=label, color=cmap[idx])
# Set legend
ax.legend(loc="lower right")
# Plot or save
if output is not None:
plt.savefig(output)
else:
plt.show()
def args_to_dict(args: ap.Namespace) -> Dict[str, Any]:
"""
Convert command line arguments to dictionary.
Args:
args (ap.Namespace): Command line arguments
Returns:
A dictionarty with kwargs and values
"""
return {
"fin": args.input,
"output": args.output,
"groups": args.groups,
"labels": args.labels,
"title": args.title,
"positive_label": args.positive_label,
}
def parse(args: Optional[str] = None) -> ap.Namespace:
"""
Parse command-line arguments.
Args:
args (str, optional): String to parse
Returns:
An `ap.Namespace` containing the parsed options
.. note::
If ``args is None`` the string to parse is red from ``sys.argv``
"""
# Parser
parser = ap.ArgumentParser(description="Plot ROC curve(s).")
# Add arguments
parser.add_argument("input", nargs="+", type=str)
parser.add_argument("-o", "--output", default=None, type=str)
parser.add_argument("-g", "--groups", nargs="*", default=None, type=int)
parser.add_argument("-l", "--labels", nargs="*", default=None, type=str)
parser.add_argument("-t", "--title", default=None, type=str)
parser.add_argument("-pl", "--positive_label", default=1)
# Parse arguments
return parser.parse_args(args)
if __name__ == "__main__":
args = parse()
args_dict = args_to_dict(args)
plot(**args_dict)
|
from application import app
from flask_sqlalchemy import SQLAlchemy
from flask import request
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root:@localhost/book'
db=SQLAlchemy(app)
class Login(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(20), unique=False, nullable=False)
password=db.Column(db.String(20), unique=False, nullable=False)
email = db.Column(db.String(30), unique=True, nullable=False)
@app.route("/login", methods=['GET', 'POST'])
def login():
if(request.methods=='POST'):
name=request.form.get('name')
password=request.form.get('password')
email=request.form.get('email')
entry=Login(name=name, password=password,email=email)
db.session.add(entry)
db.session.commit()
|
from PyQt5.QtWidgets import QWidget, QLabel, QTextEdit, QGridLayout
from PyQt5.QtGui import QPixmap
import configparser
import requests
import time
import random
import hmac, hashlib, base64, json, binascii
class AnalysisWindow(QWidget):
def __init__(self, filename, x, y):
super().__init__()
self.file_path = self.get_file_path(filename)
self.initUI(x, y)
def initUI(self, x, y):
'''
initUI
:param x: 窗口起始X位置
:param y: 窗口起始Y位置
:return:
'''
pixmap = QPixmap(self.file_path)
review = QLabel()
review.setPixmap(pixmap)
reviewEdit = QTextEdit('Loading...')
grid = QGridLayout()
grid.setSpacing(10)
#int row, int column,
grid.addWidget(review, 0, 0, 5, 1)
# int fromRow, int fromColumn, int rowSpan, int columnSpan
grid.addWidget(reviewEdit, 0, 1, 5, 1)
self.reviewEdit = reviewEdit
self.setLayout(grid)
self.setGeometry(x, y, pixmap.width() * 2, pixmap.height() * 2)
self.setWindowTitle('Result')
self.analysis()
def authorization(self):
config = configparser.ConfigParser()
config.read('ocr/config/global.ini')
appid = config.get('recognition', 'appid')
bucket = config.get('recognition', 'bucket')
secretID = config.get('recognition', 'secretID')
secretKey = config.get('recognition', 'secretKey')
randint = random.randint(0,100)
current_time = int(time.time())
expire_time = current_time + 3600 * 24 * 30
auth_str = "a=%s&b=%s&k=%s&e=%s&t=%s&r=%s&u=%s&f="%(appid, bucket, secretID, expire_time, current_time, randint, "0" )
bin = hmac.new(secretKey.encode("utf-8"), auth_str.encode("utf-8"), hashlib.sha1)
s = bin.hexdigest()
s = binascii.unhexlify(s)
s = s + auth_str.encode('ascii')
return base64.b64encode(s).rstrip()
def analysis(self):
# 读取配置文件
config = configparser.ConfigParser()
config.read('ocr/config/global.ini')
url = config.get('recognition', 'url')
host = config.get('recognition', 'host')
appid = config.get('recognition', 'appid')
auth = self.authorization()
image = base64.b64encode(open(self.file_path, 'rb').read()).rstrip().decode('utf-8')
headers = {'host': host, 'content-type' : 'application/json', 'authorization' : auth}
payload = {"appid" : appid, "image" : image }
try:
r = requests.post(url, data=json.dumps(payload), headers=headers)
ret = r.json()
self.reviewEdit.clear()
for item in ret['data']['items']:
self.reviewEdit.append(item['itemstring'])
except requests.exceptions.RequestException as e:
self.reviewEdit.setText('请求OCR服务异常')
def get_file_path(self, filename):
return "ocr_image/%s"%filename
def act(self):
self.show()
|
import ROOT
import numpy as np
import matplotlib.pyplot as plt
#from plotter import Plotter
def scatter(file_1, file_2, name, run):
print "Creating scatter plot."
print "First file: {0}".format(file_1)
print "Second file: {0}".format(file_2)
# XKCD Colors
pinkish_red = "#f10c45"
azure = "#069af3"
ROOT.gBenchmark.Start('run time')
f1 = ROOT.TFile.Open(file_1)
f2 = ROOT.TFile.Open(file_2)
# point 5 data
data_p5 = {}
tree = f2.t1
energy = list(event.energy for event in tree)
cu = list(event.cu for event in tree)
rbx = list(event.rbx for event in tree)
rm = list(event.rm for event in tree)
sipm_ch = list(event.sipm_ch for event in tree)
data_p5["energy"] = energy
data_p5["cu"] = cu
data_p5["rbx"] = rbx
data_p5["rm"] = rm
data_p5["sipm_ch"] = sipm_ch
print "Number of channels (P5): {0}".format(len(energy))
# 904 data
data_904 = {}
tree = f1.t1
charge = list(event.max_charge for event in tree)
cu = list(event.cu for event in tree)
rm = list(event.rm for event in tree)
sipm_ch = list(event.sipm_ch for event in tree)
data_904["charge"] = charge
data_904["cu"] = cu
data_904["rm"] = rm
data_904["sipm_ch"] = sipm_ch
print "Number of channels (904): {0}".format(len(charge))
scatter = {}
scatter["HEP_x"] = []
scatter["HEP_y"] = []
scatter["HEM_x"] = []
scatter["HEM_y"] = []
rbx_channels_p5 = []
rbx_averages_p5 = []
rbx_channels_904 = []
rbx_averages_904 = []
# get RBX averages for P5 and 904
for i, energy in enumerate(data_p5["energy"]):
rbx = data_p5["rbx"][i]
# get nonzero P5 data
if energy > 0.0:
rbx_channels_p5.append(energy)
# get 904 data
for j, charge in enumerate(data_904["charge"]):
if data_p5["cu"][i] != data_904["cu"][j]: continue
if data_p5["rm"][i] != data_904["rm"][j]: continue
if data_p5["sipm_ch"][i] != data_904["sipm_ch"][j]: continue
rbx_channels_904.append(charge)
if len(rbx_channels_p5) == 184: # all channels for RBX
ave = np.mean(rbx_channels_p5)
rbx_averages_p5.append(ave)
if rbx > 0.0:
scatter["HEP_y"].append(ave)
else:
scatter["HEM_y"].append(ave)
#print "RBX {0}: {1} channels (P5)".format(rbx, len(rbx_channels_p5))
# 904 RBX0 RM types: 1, 2, 3, 2; masked 6 channels (186 total)
# P5 maksed RM types 2 and 3 only (4 masked channels)
# P5 masked Layer -1 channels (one per RM, 4 masked channels)
if len(rbx_channels_904) != 184 - 2:
print "There are {0} channels for 904 instead of 192 - 6 - 4 = 182 expected.".format(len(rbx_channels_904), rbx)
else: # all channels for RBX
ave = np.mean(rbx_channels_904)
rbx_averages_904.append(ave)
if rbx > 0.0:
scatter["HEP_x"].append(ave)
else:
scatter["HEM_x"].append(ave)
print "RBX {0}: {1} channels (904)".format(rbx, len(rbx_channels_904))
# clear channel lists
rbx_channels_p5 = []
rbx_channels_904 = []
print "Number of HEP RBXs (P5): {0}".format(len(scatter["HEP_y"]))
print "Number of HEM RBXs (P5): {0}".format(len(scatter["HEM_y"]))
print "Number of HEP RBXs (904): {0}".format(len(scatter["HEP_x"]))
print "Number of HEM RBXs (904): {0}".format(len(scatter["HEM_x"]))
#info = {}
#info["name"] = "LaserToCU_Point5_vs_904_Scatter"
#info["ynames"] = ["HEP"]
#info["xdata"] = scatter["HEP_x"]
#info["ydata"] = [scatter["HEP_y"]]
#info["title"] = "Laser to CU: Point 5 vs 904"
#info["xtitle"] = "Max Charge (fC) from 904"
#info["ytitle"] = "Energy (fC) from Point 5"
#info["statloc"] = 1
#info["setrange"] = 0
#info["plotfit"] = [1]
#p = Plotter("Nov17-18_Final_CU_Data", "Nov17-18_Final_Plots", True)
#p.plotScatter(info, True)
# create scatter plot
fig, ax = plt.subplots()
ax.plot(scatter["HEP_x"], scatter["HEP_y"], 'o', c=pinkish_red, alpha=0.5, label="HEP RBXs")
ax.plot(scatter["HEM_x"], scatter["HEM_y"], 'o', c=azure, alpha=0.5, label="HEM RBXs")
title = "Laser to CU Run {0}\nAve. Energy (P5) vs. Max Charge (904) per RBX".format(run)
xtitle = "Max Charge (fC) from 904"
ytitle = "Energy (fC) from P5"
xmax = 2.0 * 10**6
ymax = 2.5 * 10**5
xstat = 1.0 * 10**5
ystat = 1.75 * 10**5
legend = ax.legend(loc='upper left')
ax.grid(True)
axes = plt.gca()
axes.set_xlim([0, xmax])
axes.set_ylim([0, ymax])
plt.title(title)
plt.xlabel(xtitle)
plt.ylabel(ytitle)
plt.savefig(name + ".png")
plt.savefig(name + ".pdf")
info = []
entry = {}
entry["x"] = scatter["HEP_x"]
entry["y"] = scatter["HEP_y"]
entry["color"] = pinkish_red
entry["label"] = "HEP"
info.append(entry)
entry = {}
entry["x"] = scatter["HEM_x"]
entry["y"] = scatter["HEM_y"]
entry["color"] = azure
entry["label"] = "HEM"
info.append(entry)
f_box = ""
for key in info:
x = key["x"]
y = key["y"]
z = np.polyfit(x, y, 1)
f = np.poly1d(z)
f_string = str(f)
f_string = f_string.split("\n")[-1]
f_string = "{0} : f(x) = {1}".format(key["label"], f_string)
f_box += f_string + "\n"
print f_string
# calculate new x's and y's using fit function
x_new = np.linspace(min(x), max(x), 100)
y_new = f(x_new)
ax.plot(x_new, y_new, '--', c=key["color"], alpha=0.5, label=key["label"])
if f_box:
if f_box[-1] == "\n":
f_box = f_box[:-1]
ax.text(xstat, ystat, f_box)
plt.savefig(name + "_fit.png")
plt.savefig(name + "_fit.pdf")
plt.show()
plt.clf()
plt.close()
ROOT.gBenchmark.Show('run time')
if __name__ == "__main__":
file_904 = "Nov17-18_Final_CU_Data/sipm.root"
files_point5 = []
names = []
files_point5.append("Point5_Data/processed_309738_Feb14_laser-HBHE-CU-Gsel0.root")
#files_point5.append("Point5_Data/processed_310553_Feb27_laser-HBHE-CU-Gsel0.root")
#files_point5.append("Point5_Data/processed_310554_Feb27_laser-HBHE-CU-Gsel0.root")
files_point5.append("Point5_Data/processed_310600_Feb27_laser-HBHE-CU-Gsel0.root")
files_point5.append("Point5_Data/processed_310602_Feb27_laser-HBHE-CU-Gsel0.root")
files_point5.append("Point5_Data/processed_310603_Feb27_laser-HBHE-CU-Gsel0.root")
names.append("Point5_Plots/309738_Feb14_laser-HBHE-CU-Gsel0")
#names.append("Point5_Plots/310553_Feb27_laser-HBHE-CU-Gsel0")
#names.append("Point5_Plots/310554_Feb27_laser-HBHE-CU-Gsel0")
names.append("Point5_Plots/310600_Feb27_laser-HBHE-CU-Gsel0")
names.append("Point5_Plots/310602_Feb27_laser-HBHE-CU-Gsel0")
names.append("Point5_Plots/310603_Feb27_laser-HBHE-CU-Gsel0")
for i in xrange(len(names)):
scatter(file_904, files_point5[i], names[i])
|
import datetime
import time
from datetime import timedelta
class GetTime:
@staticmethod
def get_current_time():
current_now = datetime.datetime.now()
current_time = current_now.strftime("%Y-%m-%d %H:%M:%S")
current_time_num = int(time.time() * 1000)
return {"current_time": current_time, "current_time_num": current_time_num}
@staticmethod
def get_today():
current_time = datetime.datetime.now()
today_start = current_time.strftime("%Y-%m-%d 00:00:00")
today_end = current_time.strftime("%Y-%m-%d 23:59:59")
today_date = current_time.strftime("%Y-%m-%d")
return {"today_start": today_start, "today_end": today_end, "today_date": today_date}
@staticmethod
def get_this_week():
current_time = datetime.datetime.now()
this_week_start = current_time - timedelta(days=current_time.weekday())
this_week_end = current_time + timedelta(days=(6-current_time.weekday()))
this_week_start_date = this_week_start.strftime("%Y-%m-%d")
this_week_start_time = this_week_start.strftime("%Y-%m-%d 00:00:00")
this_week_end_date = this_week_end.strftime("%Y-%m-%d")
this_week_end_time = this_week_end.strftime("%Y-%m-%d 23:59:59")
return {
"this_week_start_date": this_week_start_date,
"this_week_end_date": this_week_end_date,
"this_week_start_time": this_week_start_time,
"this_week_end_time": this_week_end_time,
}
@staticmethod
def get_this_month():
current_time = datetime.datetime.now()
this_month_start = datetime.datetime(current_time.year, current_time.month, 1)
this_month = current_time.month
print(this_month)
if this_month == 12:
this_month_start_date = f"{current_time.year}-12-01"
this_month_start_time = f"{current_time.year}-12-01 00:00:00"
this_month_end_date = f"{current_time.year}-12-31"
this_month_end_time = f"{current_time.year}-12-31 23:59:59"
else:
this_month_end = datetime.datetime(current_time.year, current_time.month + 1, 1) - timedelta(days=1)
this_month_start_date = this_month_start.strftime("%Y-%m-%d")
this_month_start_time = this_month_start.strftime("%Y-%m-%d 00:00:00")
this_month_end_date = this_month_end.strftime("%Y-%m-%d")
this_month_end_time = this_month_end.strftime("%Y-%m-%d 23:59:59")
return {
"this_month_start_date": this_month_start_date,
"this_month_end_date": this_month_end_date,
"this_month_start_time": this_month_start_time,
"this_month_end_time": this_month_end_time,
}
@staticmethod
def get_today_before(num):
current_time = datetime.datetime.now()
n_tian_qian = current_time - timedelta(days=num)
before_time = n_tian_qian.strftime("%Y-%m-%d 00:00:00")
before_date = n_tian_qian.strftime("%Y-%m-%d")
return {
f"before_date_{num}": before_date,
f"before_time_{num}": before_time
}
@staticmethod
def get_today_after(num):
current_time = datetime.datetime.now()
n_tian_hou = current_time + timedelta(days=num)
after_time = n_tian_hou.strftime("%Y-%m-%d 23:59:59")
after_date = n_tian_hou.strftime("%Y-%m-%d")
return {
f"after_date_{num}": after_date,
f"after_time_{num}": after_time
}
@staticmethod
def get_all():
return {**GetTime.get_today(),
**GetTime.get_current_time(),
**GetTime.get_this_month(),
**GetTime.get_this_week(),
**GetTime.get_today_after(7),
**GetTime.get_today_before(1),
**GetTime.get_today_before(7),
**GetTime.get_today_before(30)
}
# 获取当前时间毫秒数
@staticmethod
def get_current_time_millis():
current_time = time.time()
return int(round(current_time * 1000))
# now = datetime.datetime.now()
# # 今天
# today = now.strftime("%Y-%m-%d %H:%M:%S")
# print("今天")
# print(today)
# # 昨天
# yesterday = now - timedelta(days=1)
# print("昨天")
# print(yesterday)
# # 明天
# tomorrow = now + timedelta(days=1)
# print("明天")
# print(tomorrow)
# 当前季度
# now_quarter = now.month/3 if now.month % 3 == 0 else now.month / 3 + 1
# print("当前季度")
# print(now_quarter)
#
# # 上周第一天和最后一天
# last_week_start = now - timedelta(days=now.weekday() + 7)
# last_week_end = now - timedelta(days=now.weekday() + 1)
# print("上周第一天和最后一天")
# print(last_week_start, last_week_end)
#
#
# # 上月第一天和最后一天
# last_month_end = this_month_start - timedelta(days=1)
# last_month_start = datetime.datetime(last_month_end.year, last_month_end.month, 1)
# print("上月第一天和最后一天")
# print(last_month_start, last_month_end)
#
# # 本季第一天和最后一天
# month = (now.month - 1) - (now.month - 1) % 3 + 1
# this_quarter_start = datetime.datetime(now.year, month, 1)
# this_quarter_end = datetime.datetime(now.year, month + 3, 1) - timedelta(days=1)
# print("本季第一天和最后一天")
# print(this_quarter_start, this_quarter_end)
#
# # 上季第一天和最后一天
# last_quarter_end = this_quarter_start - timedelta(days=1)
# last_quarter_start = datetime.datetime(last_quarter_end.year, last_quarter_end.month - 2, 1)
# print("上季第一天和最后一天")
# print(last_quarter_start, last_quarter_start)
#
# # 本年第一天和最后一天
# this_year_start = datetime.datetime(now.year, 1, 1)
# this_year_end = datetime.datetime(now.year + 1, 1, 1) - timedelta(days=1)
# print("本年第一天和最后一天")
# print(this_year_start, this_year_end)
#
# # 去年第一天和最后一天
# last_year_end = this_year_start - timedelta(days=1)
# last_year_start = datetime.datetime(last_year_end.year, 1, 1)
# print("去年第一天和最后一天")
# print(last_year_start, last_year_end)
if __name__ == "__main__":
# print(GetTime.get_today_start_end())
# print(GetTime.get_this_month())
# print(GetTime.get_today_before(5))
# print(GetTime.get_today_after(5))
print(GetTime.get_all())
|
'''
forms.py
-------------------
Define form in _CensusImputeForm, create instances using CensusImputeForm
'''
from flask_wtf import FlaskForm
from wtforms import SelectField, DecimalField, BooleanField
class CensusImputeForm():
def __init__(self, data_dict, numeric_fields, recordname2description):
self.form_class = _CensusImputeForm.make_form(
data_dict, numeric_fields, recordname2description)
self.numeric_fields = numeric_fields
self.data_dict = data_dict
def get_instance(self, request_form=None):
instance = self.form_class(request_form)
instance.field_list = list()
for key in self.numeric_fields:
instance.field_list.append(
(getattr(instance, key), getattr(instance, 'mask_' + key)))
for key in self.data_dict:
instance.field_list.append(
(getattr(instance, key), getattr(instance, 'mask_' + key)))
return instance
class _CensusImputeForm(FlaskForm):
# Creates a form with all the right fields
@classmethod
def make_form(cls, data_dict, numeric_fields, recordname2description):
for key in numeric_fields:
setattr(cls, key, DecimalField(
id=key, label=recordname2description[key].split('(')[0]))
setattr(cls, 'mask_' + key, BooleanField(label='mask_' + key))
for key in data_dict:
setattr(cls, key, SelectField(id=key, label=recordname2description[key],
choices=[(-1, 'None selected')] + list(data_dict[key].items())))
setattr(cls, 'mask_' + key, BooleanField(label='mask_' + key))
return cls
|
from lib.imageExtractor import ImageExtractor
from bs4 import BeautifulSoup
import urllib.parse, urllib.request
import requests
import requests.exceptions
import re
import os
import shutil
class WebsiteExtractor(ImageExtractor):
"""
This class allows to download all the images from a given website.
"""
def __init__(self):
"""
Initiate the Image extractor by creating the folder for the downloaded images in data/imageExtractor and
initialize other important parameters.
"""
# List of all visited urls
self.url_list = []
# Names of all visited images
self.img_visited = []
# Max number of images
self.num = 0
# Directory to save the images
self.folder = "data/imageExtractor/download"
# if the directory does not exist create it
if not os.path.exists(self.folder):
os.makedirs(self.folder)
def extract(self, website, parameters):
"""
Extract the given images from the given website. If parameters do not correspond to the implementation raise a
ValueError. All the images must be saved locally.
:param website: Website URL (http://www.google.com/)
:param parameters: Parameters for the implementation
:return: List of local path for the extracted images
"""
# Check parameters
if len(parameters) == 1:
try:
depth = int(parameters[0])
try:
# An user-agent is necessary because otherwise websites return ERROR 403 FORBIDDEN
request = urllib.request.Request(website, headers={'User-Agent': 'Mozilla/5.0'})
request.get_method = lambda: 'HEAD'
urllib.request.urlopen(request)
except (urllib.request.HTTPError, urllib.request.URLError, ValueError):
raise AttributeError
self.__download_images(website, depth)
names = [name for name in os.listdir(self.folder)]
return names
except ValueError:
raise ValueError
else:
raise ValueError
def clear(self):
"""
Delete all the extracted and saved images.
:return: Nothing
"""
shutil.rmtree(os.path.join(self.folder))
# ############################ HELPER ############################
def __download_images(self, website, depth):
"""
Download images from the given website. Recursive function until depth is 0.
:param website: Website URL
:param depth: Depth of the recursive function (how many link must be visited)
:return: Nothing
"""
# if the url is already visited or the number of saved images is greater than the max number, stop the process
if website in self.url_list:
return
# add current site to the list of visited urls
self.url_list.append(website)
# extract all images from current url
self.__single_page(website)
# if depth is greater than 0 continue with the images extraction
if depth > 0:
# create html parser
soup = BeautifulSoup(requests.get(website, allow_redirects=True).text, 'html.parser')
# extract all the url from a tags
links = soup.findAll('a')
# Loop over all links
for link in links:
# read href and clean the content
try:
url = link['href']
url = re.sub(r'[\t\n\r]', '', url)
res = urllib.parse.urlparse(url)
res_site = urllib.parse.urlparse(website)
# continue the process only if the url is in the same website
if res_site.netloc is not '' and res_site.netloc in res.geturl():
# check if website www.xxx.yy is not empty
if res.netloc is not '':
# extract images from this url (recursion)
self.__download_images(res.geturl(), depth - 1)
# Check if the url is relative and if it is concat with the base url
if res.netloc is '' and res.path is not '':
# extract images from this url (recursion)
self.__download_images(urllib.parse.urljoin(website, url), depth - 1)
except KeyError:
pass
def __single_page(self, site):
"""
Downloads images from a single page.
:param site: URL to process
:return: Nothing
"""
# create html parse
soup = BeautifulSoup(requests.get(site, allow_redirects=True).text, 'html.parser')
# find all images
images = soup.findAll('img')
# List of all images urls
urls = []
# loops over all images and extract the url
for img in images:
if img.has_attr('src'):
urls.append(img['src'])
# loops over all urls
for url in urls:
response = None
# certain img have control characters in the url WTF ;)
url = re.sub(r'[\t\n\r]', '', url)
# if the image is already processed skip
if url in self.img_visited:
continue
# add the image url to visited images
self.img_visited.append(url)
# parse the url
res = urllib.parse.urlparse(url)
# if website is without http or https
if res.scheme is '':
try:
# Add the http and check if it works
response = requests.get('http://' + str(res.geturl().lstrip("/")), allow_redirects=True)
# if url does not exist
if response.status_code != 200:
response = None
raise requests.exceptions.InvalidURL
# print('http://' + res.geturl().lstrip("/"))
except requests.exceptions.RequestException:
# check if the url contains the netloc -> www.cwi.nl/%7Eguido/Python.html -> netlog = ''
if res.netloc is '':
try:
# concat the base url with the img url (without the initial /)
response = requests.get(site + res.geturl().lstrip("/"), allow_redirects=True)
if response.status_code != 200:
response = None
raise requests.exceptions.InvalidURL
# print(site + res.geturl().lstrip("/"))
except requests.exceptions.RequestException:
try:
# Concat the base url (only the www.xxx.yy) with the img url (without the initial /)
res_site = urllib.parse.urlparse(site)
response = requests.get(
'http://' + res_site.netloc.lstrip("/") + res.geturl().lstrip("/"),
allow_redirects=True)
if response.status_code != 200:
response = None
raise requests.exceptions.InvalidURL
# print('http://' + res_site.netloc.lstrip("/") + res.geturl())
except requests.exceptions.RequestException:
response = None
# the image is discarded
# if website has http or https
else:
try:
response = requests.get(url, allow_redirects=True)
if response.status_code != 200:
response = None
raise requests.exceptions.InvalidURL
# print(url)
except requests.exceptions.RequestException:
response = None
# the image is discarded
# if there is a valid response continue with the process
if response is not None:
# get img extension from the header
extension = self.__get_extension(response.headers.get('content-type'))
# if the extension is known continue
if extension is not '':
# save the image
f = open(
self.folder + "/" + urllib.parse.quote(urllib.parse.urlparse(site).netloc, '') + "_" + str(
self.num) + extension,
'wb')
f.write(response.content)
f.close()
# increment the number of saved images
self.num += 1
@staticmethod
def __get_extension(content_type):
"""
Given the content type return the image extension.
:param content_type: Content type (from the header)
:return: Image extension
"""
if 'svg' in content_type.lower():
return ".svg"
if 'jpeg' in content_type.lower():
return ".jpeg"
if 'gif' in content_type.lower():
return ".gif"
if 'png' in content_type.lower():
return ".png"
else:
return ''
|
#! /usr/bin/env python3
"""
collection_interface.py - Collect stats in database and display it on a webpage
Author:
- Pablo Caruana (pablo dot caruana at gmail dot com)
Date: 12/3/2016
"""
from database_manager import DatabaseManager
from flask import Flask, jsonify, request, render_template
app = Flask(__name__)
db_manager = DatabaseManager()
@app.route('/', methods=['GET'])
def get_overview():
results = db_manager.get_all_relations_for_all_chunks()
return render_template('overview.html', data=results)
if __name__ == '__main__':
app.run()
|
import datetime
import simplejson
from django.db.models import Q
from django.http import HttpResponse
from fts3.models import Job, File
def uniqueSources(httpRequest):
query = Job.objects.values('source_se').distinct('source_se')
if 'term' in httpRequest.GET and str(httpRequest.GET['term']) != '':
query = query.filter(source_se__icontains = httpRequest.GET['term'])
ses = []
for se in query:
ses.append(se['source_se'])
return HttpResponse(simplejson.dumps(ses), mimetype='application/json')
def uniqueDestinations(httpRequest):
query = Job.objects.values('dest_se').distinct('dest_se')
if 'term' in httpRequest.GET and str(httpRequest.GET['term']) != '':
query = query.filter(dest_se__icontains = httpRequest.GET['term'])
ses = []
for se in query:
ses.append(se['dest_se'])
return HttpResponse(simplejson.dumps(ses), mimetype='application/json')
def uniqueVos(httpRequest):
query = Job.objects.values('vo_name').distinct('vo_name')
if 'term' in httpRequest.GET and str(httpRequest.GET['term']) != '':
query = query.filter(vo_name__icontains = httpRequest.GET['term'])
vos = []
for vo in query:
vos.append(vo['vo_name'])
return HttpResponse(simplejson.dumps(vos), mimetype='application/json')
|
# -*-coding=utf-8-*-
__author__ = 'Rocky'
'''
http://30daydo.com
Contact: weigesysu@qq.com
'''
import requests
from lxml import etree
session = requests.Session()
from scrapy.selector import Selector
get_crsl = 'https://passport.zujuan.com/login'
first_header = {'Host': 'passport.zujuan.com', 'Connection': 'keep-alive', 'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0(WindowsNT6.1;WOW64)AppleWebKit/537.36(KHTML,likeGecko)Chrome/67.0.3396.99Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip,deflate,br', 'Accept-Language': 'zh,en;q=0.9,en-US;q=0.8'}
s = session.get(get_crsl, headers=first_header)
# print(s.text)
tree = Selector(text=s.text)
csrf = tree.xpath('//input[@name="_csrf"]/@value').extract_first()
login_url = 'https://passport.zujuan.com/login?jump_url=https%3A%2F%2Fm.zujuan.com'
login_header = {'Host': 'passport.zujuan.com', 'Connection': 'keep-alive', 'Content-Length': '165', 'Accept': '*/*',
'Origin': 'https://passport.zujuan.com', 'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0(WindowsNT6.1;WOW64)AppleWebKit/537.36(KHTML,likeGecko)Chrome/67.0.3396.99Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Referer': 'https://passport.zujuan.com/login', 'Accept-Encoding': 'gzip,deflate,br',
'Accept-Language': 'zh,en;q=0.9,en-US;q=0.8',
# 'Cookie': 'device=310bdaba05b30bb632f66fde9bf3e2b91ebc4d607c250c2e1a1d9e0dfb900f01a%3A2%3A%7Bi%3A0%3Bs%3A6%3A%22device%22%3Bi%3A1%3BN%3B%7D;jump_url=7efb7d600a0688ce502e8ae92f2a80fd7c19f7672a19ecaaf51eda1f6ebdd3efa%3A2%3A%7Bi%3A0%3Bs%3A8%3A%22jump_url%22%3Bi%3A1%3Bs%3A20%3A%22https%3A%2F%2Fm.zujuan.com%22%3B%7D;_csrf=4437c0ec2d6f8226561cacdf9055c595dab37d0dd05af26ce79021f362b19133a%3A2%3A%7Bi%3A0%3Bs%3A5%3A%22_csrf%22%3Bi%3A1%3Bs%3A32%3A%22c0ttNa_bWlgdOPbBHJRCh8bxqe4mGZ0g%22%3B%7D;Hm_lvt_6de0a5b2c05e49d1c850edca0c13051f=1535617512;_ga=GA1.2.1479394878.1535617779;_gid=GA1.2.870226030.1535617779;_gat_gtag_UA_112991577_1=1;_sync_login_identity=36fd383060e034288f52c98c2a63b70d53b051f65eab9da587ec0e0ebbb79c30a%3A2%3A%7Bi%3A0%3Bs%3A20%3A%22_sync_login_identity%22%3Bi%3A1%3Bs%3A0%3A%22%22%3B%7D;PHPSESSID=9jg86eo35akac78v1i3fu1lsn5;Hm_lpvt_6de0a5b2c05e49d1c850edca0c13051f=1535618821'
}
data = {
'_csrf': csrf,
'LoginForm[username]': '13653978879',
'LoginForm[password]': '123456'
}
log_ret = session.post(url=login_url,
headers=login_header,
data=data)
print(log_ret.text)
|
import tensorflow as tf
import numpy as np
class Classifier:
def __init__(self, chromosome, x_train, x_test, y_train, y_test,
batch_size, epochs, seed, lamarckian):
self.train_g = tf.Graph()
self.sess = tf.Session(graph = self.train_g)
# self.test_g = tf.Graph()
self.chromosome = chromosome
self.batch_size = batch_size
self.steps = np.floor(x_train.shape[0]/batch_size).astype(int)
self.epochs = epochs
self.lr = 0.1
self.lamarckian = lamarckian
with self.train_g.as_default():
self.chromosome.setup()
train = tf.data.Dataset.from_tensor_slices((x_train, y_train));
train = train.shuffle(seed).repeat().batch(self.batch_size)
train_features, train_labels = train.make_one_shot_iterator().get_next()
program_out = self.chromosome.get_tensors(train_features, self.batch_size)
program_out = tf.add(program_out, tf.reduce_min(program_out))
out_sum = tf.reduce_sum(program_out)
self.logits = tf.divide(program_out, tf.cond(tf.greater(out_sum, 0),
lambda: out_sum,
lambda: tf.constant(1.0)))
self.loss_op = tf.losses.softmax_cross_entropy(onehot_labels=train_labels,
logits=self.logits)
optimizer = tf.train.AdagradOptimizer(learning_rate=self.lr)
self.train_op = optimizer.minimize(self.loss_op,
global_step=tf.train.get_global_step())
self.acc_labels = tf.argmax(train_labels, 1)
self.predicted_classes = tf.argmax(self.logits, 1)
self.acc_op, self.acc_update = tf.metrics.accuracy(
labels=self.acc_labels, predictions=self.predicted_classes)
init = tf.global_variables_initializer()
self.sess.run(init)
# with self.test_g.as_default():
# self.chromosome.setup()
# test = tf.data.Dataset.from_tensor_slices((x_test, y_test));
# test = test.shuffle(seed).repeat().batch(self.batch_size)
# self.test_itr = test.make_one_shot_iterator().get_next()
def train(self):
history = []
start_acc = self.evaluate()
history.append([0.0, start_acc])
with self.train_g.as_default():
init = tf.local_variables_initializer()
self.sess.run(init)
for epoch in range(self.epochs):
mloss = 0.0
acc = 0.0
count = 0.0
for step in range(self.steps):
loss, _, acc, _ = self.sess.run((self.loss_op, self.train_op,
self.acc_op, self.acc_update))
mloss += loss; count += 1
history.append([mloss/count, acc])
if self.lamarckian:
p = self.get_params()
self.chromosome.set_params(p)
return history
def print_params(self):
for k in self.chromosome.param_id.keys():
print("Node ", k, ", id ", self.chromosome.param_id[k], ", val ",
self.chromosome.nodes[k].param, ", tf ",
self.sess.run(self.chromosome.params[self.chromosome.param_id[k]]))
def get_params(self):
params = []
for p in range(len(self.chromosome.params)):
params += [self.sess.run(self.chromosome.params[p])]
return params
def evaluate(self):
total = 0.0
count = 0.0
with self.train_g.as_default():
for i in range(self.steps):
labels, pred = self.sess.run((self.acc_labels, self.predicted_classes))
total += np.sum(labels == pred)
count += len(labels)
return total/count
|
import pandas as pd
# import dataset
data = pd.read_csv('seeds_dataset.csv')
# pisakhan data dan label
X = data.iloc[:, :-1].values
y = data.iloc[:, len(data.columns)-1].values
# split trining and test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25)
# feature scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
################################
#### model, ambil salah satu ###
# KNN
from sklearn.neighbors import KNeighborsClassifier
model = KNeighborsClassifier(n_neighbors=7)
model.fit(X_train, y_train)
### end of model #####
######################
# hasil prediksi
hasil_prediksi = model.predict(X_test)
# confusion matrix
from sklearn.metrics import confusion_matrix, classification_report
cm = confusion_matrix(y_test, hasil_prediksi)
print(cm)
print(classification_report(y_test, hasil_prediksi))
|
from jinja2 import Template
import random
import base64
from flask import Flask, render_template_string, request, render_template, current_app, url_for
app = Flask(__name__)
@app.route("/", methods=["GET"])
def index():
strings = ['Uniandes', 'UNIANDES', 'Mario Laserna', 'Carlos Pacheco Devia', 'Sala Turing', 'Sala Wuaira', 'DISC', 'Ingenieria de Sistemas', 'INGENIERIA DE SISTEMAS', 'CARLOS PACHECO DEVIA', 'SALA TURING', 'SALA WUAIRA', 'DISC SEASON']
texto = random.choice(strings).encode('utf8')
encoded = base64.b64encode(texto).decode('utf8')
return render_template('template1.html', texto=encoded)
@app.route('/respuesta', methods=["GET", "POST"])
def respuesta():
encoded = request.form['base64e']
decoded = request.form['base64d'].encode('utf8')
encodeFront = base64.b64encode(decoded).decode('utf8')
if(encoded == encodeFront):
response = current_app.response_class(
response="DISCCTF{3s0_fu3_m4s_r4p1d0}",
status=302,
mimetype="text/plain"
)
response.headers["Location"] = '/resp'
return response
else:
strings = ['Uniandes', 'UNIANDES', 'Mario Laserna', 'Carlos Pacheco Devia', 'Sala Turing', 'Sala Wuaira', 'DISC', 'Ingenieria de Sistemas', 'INGENIERIA DE SISTEMAS', 'CARLOS PACHECO DEVIA', 'SALA TURING', 'SALA WUAIRA', 'DISC SEASON']
texto = random.choice(strings).encode('utf8')
encoded = base64.b64encode(texto).decode('utf8')
return render_template('template2.html', texto=encoded)
@app.route('/resp')
def resp():
strings = ['Uniandes', 'UNIANDES', 'Mario Laserna', 'Carlos Pacheco Devia', 'Sala Turing', 'Sala Wuaira', 'DISC', 'Ingenieria de Sistemas', 'INGENIERIA DE SISTEMAS', 'CARLOS PACHECO DEVIA', 'SALA TURING', 'SALA WUAIRA', 'DISC SEASON']
texto = random.choice(strings).encode('utf8')
encoded = base64.b64encode(texto).decode('utf8')
return render_template('template3.html', texto=encoded)
if __name__ == '__main__':
app.run(debug=True)
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import threading
import pytest
from pants.pantsd.service.pants_service import PantsService
class RunnableTestService(PantsService):
def run(self):
pass
@pytest.fixture
def service() -> RunnableTestService:
return RunnableTestService()
def test_init(service: RunnableTestService) -> None:
assert bool(service.name) is True
def test_run_abstract() -> None:
with pytest.raises(TypeError):
PantsService() # type: ignore[abstract]
def test_terminate(service: RunnableTestService) -> None:
service.terminate()
assert service._state.is_terminating
def test_maybe_pause(service: RunnableTestService) -> None:
# Confirm that maybe_pause with/without a timeout does not deadlock when we are not
# marked Pausing/Paused.
service._state.maybe_pause(timeout=None)
service._state.maybe_pause(timeout=0.5)
def test_pause_and_resume(service: RunnableTestService) -> None:
service.mark_pausing()
# Confirm that we don't transition to Paused without a service thread to maybe_pause.
assert service._state.await_paused(timeout=0.5) is False
# Spawn a thread to call maybe_pause.
t = threading.Thread(target=service._state.maybe_pause)
t.daemon = True
t.start()
# Confirm that we observe the pause from the main thread, and that the child thread pauses
# there without exiting.
assert service._state.await_paused(timeout=5) is True
t.join(timeout=0.5)
assert t.is_alive() is True
# Resume the service, and confirm that the child thread exits.
service.resume()
t.join(timeout=5)
assert t.is_alive() is False
|
from info.info import TestData
from pages.BasePage import BasePage
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class MainPage(BasePage):
"""Locators"""
START_FOR_FREE_BUTTON = (By.XPATH, '/html/body/div[1]/div/div[2]/div[3]/div/a')
LOGIN_BUTTON = (By.XPATH, '/html/body/div[1]/div/div[1]/div/div[3]/div/ul[2]/li[2]/a')
SIGNUP_BUTTON = (By.XPATH, '/html/body/div[1]/div/div[1]/div/div[3]/div/ul[2]/li[3]/a')
SEARCH_FIELD = (By.ID, "search-field")
ABOUT_LINK = (By.LINK_TEXT, "About")
JOBS_LINK = (By.LINK_TEXT, "Jobs")
BLOG_LINK = (By.LINK_TEXT, "Blog")
DEVELOPERS_LINK = (By.LINK_TEXT, "Developers")
GUIDELINES_LINK = (By.LINK_TEXT, "Guidelines")
HELP_LINK = (By.XPATH, '//*[@id="yui_3_16_0_1_1620345937339_936"]/footer/div[1]/div[1]/ul/li[8]/a')
HELP_FORUM_LINK = (By.XPATH, '//*[@id="yui_3_16_0_1_1620345937339_936"]/footer/div[1]/div[1]/ul/li[10]/a')
PRIVACY_LINK = (By.LINK_TEXT, "Privacy")
TERMS_LINK = (By.LINK_TEXT, "Terms")
COOKIES_LINK = (By.LINK_TEXT, "Cookies")
def __init__(self, driver):
super().__init__(driver)
self.driver.get(TestData.MAIN_URL)
def click_star_for_free_button(self):
self.click(self.START_FOR_FREE_BUTTON)
def click_login_button(self):
self.click(self.LOGIN_BUTTON)
def click_signup_button(self):
self.click(self.SIGNUP_BUTTON)
def fill_search_field(self, text):
self.send(self.SEARCH_FIELD, text)
def search(self, text):
self.send(self.SEARCH_FIELD, text)
self.send(self.SEARCH_FIELD, Keys.RETURN)
|
import requests
import urllib.request
import json
# Chien Json
CHIEN_URL = 'https://rti-giken.jp/fhc/api/train_tetsudo/delay.json'
# Get Chien JSON
def get_chien(keyword):
req = urllib.request.Request(CHIEN_URL)
with urllib.request.urlopen(req) as res:
#res = urllib2.urlopen(CHIEN_URL)
datas= json.loads(res.read())
results = ""
for data in datas:
chien_train = data["name"] + "(" + data["company"] + ") "
if chien_train.find(keyword) > -1:
results += chien_train
return results
def lambda_handler(event, context):
if event['request']['type'] == "IntentRequest":
intent = event['request']['intent']
slot = intent['slots']['MessageType']['value']
url = "https://notify-api.line.me/api/notify"
# LINE Token
token = "c3EkzehGnZvpEgAtw1iTMjsU4NZVi7echeVgvtWXamQ"
headers = {"Authorization" : "Bearer "+ token}
chien_result = get_chien(slot)
if chien_result:
message = chien_result + "は遅延しています!"
else:
message = slot + "は遅延していません!"
payload = {"message" : message}
r = requests.post(url ,headers = headers ,params=payload)
talk_message = str(message) + "ラインにも通知しました!"
# リマインドメッセージがある場合はラインに通知してアプリを終える
endSession = True
else:
keyword = "東日本"
chien_test = get_chien(keyword)
talk_message = "とりあえず、"
talk_message+= chien_test+"は遅延中!" if chien_test else keyword + "は遅延してません!"
talk_message+= "小田急線の遅れ?地下鉄の遅延は?などと聞いてみてください!"
# リマインドメッセージがない場合は何をリマインドするか確認する
endSession = False
response = {
'version': '1.0',
'response': {
'outputSpeech': {
'type': 'PlainText',
'text': talk_message
},
"shouldEndSession": endSession
}
}
return response
|
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib.colors import SymLogNorm
from matplotlib.colors import LogNorm
from mpl_toolkits.mplot3d import Axes3D
import scipy.stats as stats
import numpy as np
import pandas as pd
import h5py
import os
import subprocess
import time
from collections import Counter
pi = np.pi
mod = 1000000000000
G3X_data = ('/home/ppxrh2/Documents/test_pollux/TheThreeHundred/playground/rh'
'aggar/G3X_data/')
redshifts = np.array(pd.read_csv(G3X_data+'G3X_300_redshifts.txt', sep='\s+'
), dtype='float')
c_ids = np.array(pd.read_csv(G3X_data+('ds_infor_G3X_progen/DS_G3X_snap_128_'
'center-cluster_progenitors.txt'), sep='\s+', usecols=['rID[0]']),
dtype='int')[:,0] - (128*mod+1)
plt.rc('font', family='serif', size=18)
plt.rc('text', usetex=True)
plt.rc('legend', fontsize=18, frameon=False, loc='upper right')
plt.rc('axes', labelsize=20)
plt.rc('lines', markersize=5.)
def ld_arr(fname, sep='\s+', dtype='float'):
return np.array(pd.read_csv(fname, sep=sep), dtype=dtype)
def find_stdev(array):
""" Find median, and 1 sigma error bars on a dataset """
array = np.sort(array)
length = float(len(array))
if length < 4.:
return np.median(array), 0., 0.
stdevs = 1.#2.#
excl = 0.317310508#0.045500264#0.317310508#
low_b_f = ((excl/2.) * length) - 0.5
high_b_f = ((1. - (excl/2.)) * length) - 0.5
low_b, high_b = int(low_b_f), int(high_b_f)
median = np.median(array)
val_dn = (array[low_b] * (1. + (float(low_b) - low_b_f))) + (
array[low_b+1] * (low_b_f - float(low_b)))
err_dn = (median - val_dn) / stdevs
val_up = (array[high_b] * (1. + (float(high_b) - high_b_f))) + (
array[high_b+1] * (high_b_f - float(high_b)))
err_up = (val_up - median) / stdevs
return median, err_up, err_dn
def bootstrap(array, n):
l = len(array)
meds = np.zeros(0)
for i in range(n):
np.array(np.random.rand(l)*l, dtype='int')
arr_new = array[np.array(np.random.rand(l)*l, dtype='int')]
meds = np.append(meds, np.median(arr_new))
return find_stdev(meds)
def data_reduc(c, bins, mscut=0, lcut=0):
bs_data = pd.read_csv('/run/media/ppxrh2/166AA4B87A2DD3B7/MergerTreeAHF/'
'MergerTreeAHF_General_Tree_Comp/NewMDCLUSTER_%04d/snap_128/'
'CLUSTER_%04d_backsplash.txt' % (c, c), sep='\s+')
bs_data = np.array(bs_data, dtype='float')
select = np.ones(len(bs_data))>0.
if lcut > 0:
ls_i = pd.read_csv('/run/media/ppxrh2/166AA4B87A2DD3B7/NewMDCLUSTER_'
'data/luminosities/NewMDCLUSTER_%04d/GadgetX-NewMDCLUSTER_'
'%04d.snap_128.z0.000.AHF_luminosities' % (c, c), sep='\s+',
usecols=['#', 'haloid'])
ls_i = np.array(ls_i, dtype='int')
ls_i[:, 0] -= (1+128*mod)
lums = np.zeros(len(select))
lum_sel = ls_i[:, 0]<len(select)
lums[ls_i[lum_sel, 0]] = ls_i[lum_sel, 1]
select = select * (lums >= lcut)
if mscut > 0.:
ms_i = h5py.File('/run/media/ppxrh2/166AA4B87A2DD3B7/NewMDCLUSTER_'
'data/reduced_cluster_info/mstars/CLUSTER_%04d_mstars' % c)
ms_i = np.array(ms_i[u'128'], dtype='int')[:len(select)] >= mscut
select = select * ms_i
bs_data = bs_data[select]
r_select = (bs_data[:, 0] >= bins[0]) * (bs_data[:, 0] < bins[-1])
bs_data = bs_data[r_select]
return bs_data
|
"""The simplest way to create bit-flags.
Basic usage
-----------
>>> import intflags
>>> x, y, z = intflags.get(3)
>>> flags = x | y
>>> y in flags
True
>>> z in flags
False
>>> int(y)
2
In a class
----------
>>> class MyFlags:
... A, B, C, D = intflags.get(4)
...
>>> flags = MyFlags.A | MyFlags.D
>>> new = flags - MyFlags.D
>>> MyFlags.D in new
False
>>> new == MyFlags.A
True
"""
# ISC License
#
# Copyright (c) 2020, Robert "SeparateRecords" Cooper
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
__all__ = ["IntFlag", "get"]
__author__ = "SeparateRecords <me@rob.ac>"
__copyright__ = "(c) Robert Cooper, 2020"
__version__ = "2.0.0"
from itertools import count
class IntFlag(int):
"""Create an int flag, optionally using a namespace."""
def __new__(cls, i, ns=None):
instance = super().__new__(cls, i)
instance.ns = ns
return instance
def check(self, other):
try:
return self.ns == other.ns
except AttributeError:
return False
def make(self, i):
return type(self)(i, self.ns)
def __eq__(self, other):
try:
return int(self) == int(other) and self.ns == other.ns
except AttributeError:
return NotImplemented
def __contains__(self, other):
if not self.check(other):
return False
return bool(self & other)
def __or__(self, other):
if not self.check(other):
msg = "Flags must share a namespace to create a union."
raise ValueError(msg)
return self.make(int.__or__(self, other))
def __add__(self, other):
return self.__or__(other)
def __sub__(self, other):
if not self.check(other):
raise ValueError("Flags must share a namespace to be subtracted.")
value = self & ~other
return self.make(value)
def __str__(self):
return str(int(self))
def __repr__(self):
return "<Flag [{0}, ns={0.ns}]>".format(self)
# The global namespace index to ensure no two sets of flags have the same ns.
# This will be incremented by ``get()`` with every call.
_NS_IDX = count()
def get(n, use_ns=True):
"""Create ``n`` flags in the same namespace, optionally with a leading sentinel.
If ``n == 1``, a single flag is returned (not as an iterable).
"""
ns = next(_NS_IDX) if use_ns else None
if n == 1:
return IntFlag(1, ns)
return [IntFlag(2 ** i, ns) for i in range(0, n)]
|
# pylint: disable=missing-docstring
from ._version import __version__
from .application import Application, WebApplication
from .page import WebPage
from .element import WebElement
|
from graphics import *
import time
win=GraphWin("A STRAIGHT LINE USING DDA LINE DRAWING ALGORITHM",900,900)
def main():
line(100,100,200,300)
win.getMouse()
win.close()
def line(x1,y1,x2,y2):
dx=x2-x1
dy=y2-y1
x=x1
y=y1
p=2*(dy-dx)
while(x<x2):
if(p>=0):
put_pixel(x,y,"red")
y=y+1
p=p+2*dy-2*dx
else:
put_pixel(x,y,"red")
p=p+2*(dy)
x=x+1
def put_pixel(x,y,color="red"):
global win
p= Point(x,y)
p.setFill(color)
p.draw(win)
time.sleep(.005)
main()
|
from django.db import models
from django.conf import settings
from datetime import datetime
from django.contrib.postgres.fields import ArrayField
class Log(models.Model):
subject = models.CharField(max_length=100)
created_at = models.DateTimeField(auto_now_add=True)
last_update = models.DateTimeField(auto_now=True)
detail = models.TextField()
project = models.ForeignKey('Task', on_delete=models.CASCADE)
created_by = models.ForeignKey('user.CustomUser', on_delete=models.CASCADE)
def __str__(self):
return self.subject
class Task(models.Model):
STATUS_CHOICES = [
('OP', 'ON PROGRESS'),
('RE', 'REVIEWING'),
('FU', 'FOLLOW UP'),
('CP', 'COMPLETED')
]
subject = models.CharField(max_length=100)
description = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
last_update = models.DateTimeField(auto_now=True)
status = models.CharField(max_length=2, choices=STATUS_CHOICES, default='OP')
task_members= models.CharField(max_length=999, blank=True)
created_by = models.ForeignKey('user.CustomUser', on_delete=models.CASCADE)
def __str__(self):
return self.subject
|
import re
# Email validation checker
email_pattern = re.compile(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)")
s = 'andy.mtv12@gmail.com'
email = email_pattern.search(s).group() if email_pattern.search(s) else 'incorrect email'
print(email)
# at least 8 characters length, lowercase and uppercase, symbols $%#@
password_pattern = re.compile(r"(^[a-zA-Z0-9$%#@]{8,}$)")
p = 'aasdas31'
password = password_pattern.search(p).group() if password_pattern.search(p) else 'incorrect password'
print(password)
|
import random
from time import time
def quicksort(alist,start,end):
if start < end:
pivot = partition(alist,start,end)
quicksort(alist,start,pivot-1)
quicksort(alist,pivot+1,end)
def partition(alist,first,last):
pivot = alist[first]
leftmark = first + 1
rightmark = last
done = False
while not done:
while leftmark <= rightmark and alist[leftmark] <= pivot:
leftmark += 1
while alist[rightmark] >= pivot and rightmark >= leftmark:
rightmark -= 1
if rightmark < leftmark:
done = True
else:
alist[leftmark],alist[rightmark] = alist[rightmark],alist[leftmark]
alist[first],alist[rightmark] = alist[rightmark],alist[first]
return rightmark
start = time()
quicksort([random.randint(0,10000) for _ in range(1000000)],0,1000000-2)
end = time() - start
print(end)
|
from queries import UPDATE_USER_EXPERIENCE
from queries import DELETE_USER
from app import experience
from queries import READ_USER_EXPERIENCE
from queries import INSERT_INTO_DATABASE_EXPERIENCE
from flask import Flask,render_template,request,redirect
from decouple import config
from flask_mysqldb import MySQL
from dotenv import load_dotenv
from queries import *
load_dotenv()
import MySQLdb
import os
def create_user_experience(years, expertise, uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
print("inside create message")
cursor.execute( INSERT_INTO_DATABASE_EXPERIENCE, (years, expertise, uniqueid))
db.commit()
def read_user_experience(uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
cursor.execute( READ_USER_EXPERIENCE , (uniqueid, ))
experience = cursor.fetchone()
return experience
def delete_user_experience(uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
cursor.execute( DELETE_USER, (uniqueid, ))
db.commit()
def update_user_experience(years, expertise, uniqueid):
db = MySQLdb.connect(os.getenv('MYSQL_HOST'),os.getenv('MYSQL_USER'),os.getenv('MYSQL_PASSWORD'),os.getenv('MYSQL_DB') )
cursor = db.cursor()
print("update given message")
cursor.execute( UPDATE_USER_EXPERIENCE,(years, expertise, uniqueid,))
db.commit()
|
def is_rotation(s1, s2):
return s1 in s2 + s2 and len(s1) == len(s2)
|
# Generated by Django 2.2.13 on 2020-07-16 17:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0022_auto_20200712_1630'),
]
operations = [
migrations.AddField(
model_name='product',
name='amount',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='product',
name='variant',
field=models.CharField(choices=[('None', 'None'), ('Size', 'Size'), ('Color', 'Color'), ('Size-Color', 'Size-Color')], default='None', max_length=10),
),
]
|
#!/usr/bin/env python3.8
import sys,os,getopt
from lxml import etree
def extractInterrupts(mplabXDir:str,chipName:str):
root:lxml.etree._ElementTree=etree.parse(os.path.join(mplabXDir, chipName + ".atdf"))
family=str(root.xpath("devices/device/@family")[0])
interrupts = root.xpath("devices/device/interrupts/interrupt")
results={}
for interrupt in interrupts:
irqIndex=None
caption=""
irqCaption=""
irqName=""
moduleInstance=""
for attrib in interrupt.attrib:
if attrib == "name":
name=interrupt.attrib["name"]
elif attrib == "irq-name":
irqName = interrupt.attrib["irq-name"]
elif attrib == "index":
index=int(interrupt.attrib["index"])
elif attrib == "irq-index":
irqIndex = int(interrupt.attrib["irq-index"])
elif attrib == "caption":
caption=interrupt.attrib["caption"]
elif attrib == "irq-caption":
irqCaption=interrupt.attrib["irq-caption"]
elif attrib.find('alternate-caption') >= 0:
unknown = attrib
elif attrib == "module-instance":
moduleInstance=interrupt.attrib["module-instance"]
else:
print('Unknown Interupt Attribute: '+attrib)
sys.exit(1)
if irqIndex == None:
irqIndex=index
if not index in results.keys():
results[index]={irqIndex:{"name": name, "caption": caption, "moduleInstance": moduleInstance,"irq-caption":irqCaption,"irq-name":irqName}}
else:
results[index].update({irqIndex:{"name": name, "caption": caption, "moduleInstance": moduleInstance,"irq-caption":irqCaption,"irq-name":irqName}})
results = dict(sorted(results.items()))
if chipName.startswith('ATSAMD51') or chipName.startswith('ATSAMD21'):
results.pop(-15)
if chipName.startswith('ATSAMD21'):
results[-5][-5]["name"]="SVC"
if chipName.startswith('ATSAMD51'):
for key,interrupt in results.items():
if "moduleInstance" in interrupt[key] and interrupt[key]["moduleInstance"] != "":
if interrupt[key]["moduleInstance"] != interrupt[key]["name"]:
interrupt[key]["name"] = interrupt[key]["moduleInstance"]
if not (key-1 in results) or (not results[key-1][key-1]["name"].startswith(interrupt[key]["moduleInstance"])):
interrupt[key]["name"]=interrupt[key]["name"]+"_0"
else:
interrupt[key]["name"]=interrupt[key]["name"]+"_"+str(int(results[key-1][key-1]["name"].replace(interrupt[key]["name"]+"_",""))+1)
return(results)
def main(argv):
mplabXDir = "/Applications/Microchip"
mplabXDir="atdf/"
chip=""
try:
opts, args = getopt.getopt(argv,"hc:",["chip="])
except getopt.GetoptError:
print('atdfToPas.py -c <Chip>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('atdfToPas.py -c <Chip>')
sys.exit()
elif opt in ("-c", "--chip"):
chip = arg
if chip == "":
print('atdfToPas.py -c <Chip>')
sys.exit()
interrupts=extractInterrupts(mplabXDir,chip)
minInterrupt=list(interrupts.keys())[0]
maxInterrupt=list(interrupts.keys())[-1]
for interrupt in range(minInterrupt,maxInterrupt):
if interrupt in interrupts.keys():
values=interrupts[interrupt][list(interrupts[interrupt].keys())[0]]
if (values["moduleInstance"] != "") and (values["moduleInstance"] != values['name']):
name=values['moduleInstance']+"_"+values['name']+"_interrupt"
else:
name=values['name']+"_interrupt"
print(f"{name:30} //IRQ:{interrupt:03} {values['caption']}")
else:
print(f"{'':30} //IRQ:{interrupt:03}")
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/python
import nltk
from nltk.corpus import stopwords
import re
import json
pythonDictionary = {'name':'Bob', 'age':44, 'isEmployed':True}
dictionaryToJson = json.dumps(pythonDictionary)
class InputReadAndProcess(object):
def __init__(self):
#print("hello")
self.inp = ""
self.content = []
self.headers_list = []
self.from_to_data = []
self.subjects_set = set()
self.subjects_list = {}
self.subjects_array = []
self.nodes_array = []
self.nodes_set = set()
self.nodes_dict = {} #name --> subject array
self.target_source_dict = {}
self.json_links = []
def get_relationship_links_json(self):
#print("get_relationship_links_json")
#print()
#print()
for h in self.headers_list:
source = self.get_source(h)
#print("source")
#print(source)
all_targets = self.get_all_targets(h)
#print("all_targets")
#print(all_targets)
target_source_pairs = self.get_target_source_pairs(all_targets, source)
#print("target_source_pairs")
#print(target_source_pairs)
for pair in target_source_pairs:
if pair in self.target_source_dict:
self.target_source_dict[pair] += 1
else:
self.target_source_dict[pair] = 1
#print("target_source_dict")
#print(self.target_source_dict)
self.json_links = self.getLinksJson()
return self.json_links
#self.target_source_dict
def getLinksJson(self):
#self.target_source_dict
#print("Link JSON start")
nodes_json = []
for node in self.target_source_dict:
#print(node)
new_dict = {}
src_trgt = node.split(";");
if len(src_trgt) != 2:
print("nodu")
else:
target = src_trgt[0].split("[")
tgt = target[0].split("(")
new_dict["target"] = tgt[0]
#new_dict["source"] = src_trgt[1]
source = src_trgt[0].split("[")
src = source[0].split("(")
new_dict["source"] = src[0]
new_dict["shared_email"] = self.target_source_dict[node]
nodes_json.append(new_dict)
#print(nodes_json)
return nodes_json
def get_target_source_pairs(self, all_targets, source):
all_pairs = []
for target in all_targets:
pair = target + ";" + source
all_pairs.append(pair);
return all_pairs
def get_source(self, header):
node = ""
for h in header:
h = h.lower()
if h.startswith('from: '):
temp = h.strip()
first_part = temp.split('[')
nodes_part = first_part[0].strip().split(':')
#nodes_part = temp.split(':')
node = nodes_part[1].strip()
return node
def get_all_targets(self, header):
for h in header:
h = h.strip().lower()
if h.startswith('to: ') or h.startswith('cc: ') or h.startswith('bcc: '):
#print("inside to CC, BCC")
h = h.strip()
temp = h.split(',')
if h.startswith('CC: ') or h.startswith('BCC: '):
print("CC/BCC")
#print(temp)
if ":" in temp[0]:
tmp = temp[0].split(':')
temp[0] = tmp[1]
i = 0
for node in temp:
node = node.strip()
if node != "":
temp[i] = node
i += 1
return temp
def get_relationship_nodes_json(self):
#self.headers_list
for h in self.headers_list:
current_sub = self.get_subject(h)
#print("Curr Sub")
#print(current_sub)
if current_sub not in self.subjects_set:
self.subjects_set.add(current_sub)
self.subjects_array.append(current_sub)
self.subjects_list[current_sub] = len(self.subjects_array) - 1
#print("subjects_list")
#print(self.subjects_list)
current_nodes = self.get_nodes(h)
#print("current_nodes")
#print(current_nodes)
for node in current_nodes:
subj_arr_tmp = []
if node in self.nodes_set:
#get value(sub array) nodes_dict[node]
subj_arr_tmp = self.nodes_dict[node]
#print("Wanted lop After extracting sub array")
#print(subj_arr_tmp)
sub_id = self.subjects_list[current_sub]
for s in subj_arr_tmp:
if sub_id == s:
print("")#match found do nothing
else:
sub_id = self.subjects_list[current_sub]
subj_arr_tmp.append(sub_id)
self.nodes_dict[node] = subj_arr_tmp
else:
self.nodes_set.add(node)
self.nodes_array.append(node)
#adds node and subject
sub_id = self.subjects_list[current_sub]
subj_arr_tmp.append(sub_id)
self.nodes_dict[node] = subj_arr_tmp
#print("self.nodes_dict")
#print(self.nodes_dict)
return self.getJsonForDict()#self.nodes_dict
def getJsonForDict(self):
#self.nodes_dict
#print("JSON start")
nodes_json = []
for node in self.nodes_dict:
#print(node)
new_dict = {}
'''subject = self.nodes_dict[node].split("[")
sbjt = subject[0].split("(")
#new_dict["subject"] = self.nodes_dict[node]
new_dict["subject"] = sbjt[0]
'''
new_dict["subject"] = self.nodes_dict[node]
'''
name = node.split("[")
nme = name[0].split("(")
#new_dict["name"] = node
new_dict["name"] = nme[0]
'''
new_dict["name"] = self.nodes_dict[node]
nodes_json.append(new_dict)
#print(nodes_json)
return nodes_json
def get_nodes(self, header):
nodes = []
node_set = set()
for h in header:
h = h.lower()
#print("Curr Header")
#print(h)
if h.startswith('from: '):
#print("inside From Loop")
temp = h.strip()
first_part = temp.split('[')
nodes_part = first_part[0].strip().split(':')
#nodes_part = temp.split(':')
node = nodes_part[1].strip()
#print("node = ")
#print(node)
if node not in node_set:
nodes.append(node)
node_set.add(node)
if h.startswith('to: ') or h.startswith('cc: ') or h.startswith('bcc: '):
#print("inside to CC, BCC")
h = h.strip()
temp = h.split(',')
#if h.startswith('CC: ') or h.startswith('BCC: '):
#print("CC/BCC")
#print(temp)
if ":" in temp[0]:
tmp = temp[0].split(':')
temp[0] = tmp[1]
for node in temp:
node = node.strip()
if node not in node_set and node != "":
nodes.append(node)
node_set.add(node)
#print("Nodes look like")
#print(nodes)
#print()
return nodes
def get_subject(self, header):
current_sub = []
for h in header:
if h.startswith('Subject: '):
current_sub = h.split(':') #Assuming no other semi colons. Or start after 9 spaces.
if len(current_sub) > 2:
return current_sub[2].strip()
return current_sub[1].strip()
def extract_header_and_content(self):
#import unicode from unicode
import fileinput
content_read = False
header_read = False
header_data = []
for line in fileinput.input():
#if line.contains(
self.inp += line #.encode('ISO-8859-1').decode('ascii','ignore')
if line.startswith('From: '):
header_read = True
header_data = []
content_read = False
#if line.startswith "Subject: " or "From: " or line.startswith "Sent: " or line.startswith "To: " or line.startswith "CC: " or line.startswith "BCC: ":
if header_read == True:
header_data.append(line)
self.from_to_data.append(line)
if content_read == True:
self.content.append(line)
if line.startswith('Subject: '):
header_read = False
self.headers_list.append(header_data)#append/add--> need to check
content_read = True
#print("Content")
#for c in self.content:
#print(c)
#print("Headers")
#for h in self.headers_list:
#print(h)
def get_final_json(self, nodes_json, links_json):
final_json = {}
final_json["nodes"] = nodes_json
final_json["links"] = links_json
return final_json
class Execution(object):
def __init(self):
print("hi")
def run(self):
obj = InputReadAndProcess()
obj.extract_header_and_content()
nodes_json = obj.get_relationship_nodes_json()
links_json = obj.get_relationship_links_json()
final_json = obj.get_final_json(nodes_json, links_json)
print("Relationship JSON")
print(final_json)
def main():
ob = Execution()
ob.run()
main()
|
from django.contrib.auth.models import User
from rest_framework.test import APITestCase
from rest_framework.status import HTTP_401_UNAUTHORIZED, HTTP_200_OK
from pycont.apps.users.serializers import UserSerializer
class AuthTest(APITestCase):
def setUp(self):
User.objects.create_user('sieira', password='Pa$$word1234')
def test_should_return_user_profile(self):
response = self.client.post(
'/auth/', data={'username': 'sieira', 'password': 'Pa$$word1234'}
)
self.assertEqual(response.status_code, HTTP_200_OK)
self.assertEqual(
response.json()['user'],
UserSerializer(User.objects.get(username='sieira')).data
)
def test_jwt_token_in_httponly_cookie(self):
response = self.client.post(
'/auth/', data={'username': 'sieira', 'password': 'Pa$$word1234'}
)
self.assertEqual(response.status_code, HTTP_200_OK)
self.assertEqual(
set(self.client.cookies.keys()),
{'Authorization', 'Authorization_refresh', 'csrftoken'}
)
def test_logout_deletes_cookie(self):
response = self.client.post(
'/auth/', data={'username': 'sieira', 'password': 'Pa$$word1234'}
)
self.assertEqual(response.status_code, HTTP_200_OK)
self.assertEqual(
set(self.client.cookies.keys()),
{'Authorization', 'Authorization_refresh', 'csrftoken'}
)
response = self.client.post('/auth/delete/')
self.assertEqual(response.status_code, HTTP_200_OK)
self.assertEqual(self.client.cookies['Authorization'].value, '')
self.assertEqual(self.client.cookies['Authorization_refresh'].value, '')
response = self.client.get('/profile/')
self.assertEqual(response.status_code, HTTP_401_UNAUTHORIZED)
def test_profile_requires_auth(self):
response = self.client.get('/profile/')
self.assertEqual(response.status_code, HTTP_401_UNAUTHORIZED)
self.client.login(username='sieira', password='Pa$$word1234')
response = self.client.get('/profile/')
self.assertEqual(response.status_code, HTTP_200_OK)
|
from django.apps import AppConfig
class ExpoCmsConfig(AppConfig):
name = 'expo_cms'
|
from pathlib import Path
import xml.etree.ElementTree as ET
import re
from nltk import Tree
from collections import deque
from typing import List, Any
from babybertsrl.srl_utils import make_srl_string
from babybertsrl import configs
NAME = 'human-based-2018'
XML_PATH = Path(f'data/srl_{NAME}/xml')
VERBOSE = False
EXCLUDE_CHILD = True
OUTSIDE_LABEL = 'O'
def has_props(e):
try:
next(e.iterfind('{http://www.talkbank.org/ns/talkbank}props'))
except StopIteration:
return False
else:
return True
def is_child(e):
"""is utterance spoken by child?"""
if e.attrib['who'] == 'CHI':
return True
else:
return False
def get_start_index(a: List[Any],
b: List[Any],
) -> int:
"""return index into "a" which is first location of section of "a" which matches "b". """
num_b = len(b)
init_length = num_b - 1
d = deque(a[:init_length], maxlen=num_b)
for n, ai in enumerate(a[init_length:]):
d.append(ai)
if list(d) == b:
return n
else:
raise ValueError('a does not contain b')
num_no_predicate = 0
num_no_arguments = 0
num_bad_head_loc = 0
num_bad_arg_loc = 0
num_prepositions = 0
num_total_good = 0
lines = []
for file_path in sorted(XML_PATH.rglob('*.xml')):
parse_tree = ET.parse(str(file_path))
root = parse_tree.getroot()
num_good_props_in_file = 0
for utterance in root:
if not has_props(utterance):
print('WARNING: Did not find propositions. Skipping')
continue
if is_child(utterance) and EXCLUDE_CHILD:
print('WARNING: Skipping child utterance')
# get parse tree
parse_string = utterance.find('{http://www.talkbank.org/ns/talkbank}parse').text
parse_tree = Tree.fromstring(parse_string)
# words - get them from parse tree because parsing xml is difficult
words = parse_tree.leaves()
if VERBOSE:
print()
print('=============================================')
print(f'{file_path.name} {utterance.attrib["uID"]}')
print(' '.join(words))
print('=============================================')
print()
# collect label sequence for each <proposition> in the utterance
sense2labels = {}
for proposition in utterance.iter('{http://www.talkbank.org/ns/talkbank}proposition'):
if proposition.attrib['lemma'].endswith('-p'): # TODO what to do here?
print('WARNING: Skipping prepositional proposition')
num_prepositions += 1
continue
if VERBOSE:
print(proposition.attrib)
# initialize label-sequence
sense = proposition.attrib['sense']
label_text_list = list(proposition.itertext())
labels = [OUTSIDE_LABEL for _ in range(len(words))]
sense2labels[sense] = labels
is_bad = False
# loop over arguments in the proposition - reconstructing label-sequence along the way
for label_text in label_text_list:
# parse label_text
res = re.findall(r'(\d+):(\d)-(.*)', label_text)[0]
head_loc = int(res[0]) # location in sentence of head (not first word) of argument
num_up = int(res[1]) # levels up in hierarchy at which all sister-trees are part of argument span
tag = str(res[2])
if VERBOSE:
print(f'{head_loc:>2} {num_up:>2} {tag:>12}')
try:
words[head_loc]
except IndexError:
print('WARNING: Bad head location')
num_bad_head_loc += 1
is_bad = True
break
if 'rel' in tag:
labels[head_loc] = 'B-V'
else:
tp = parse_tree.leaf_treeposition(head_loc)
argument_tree = parse_tree[tp[: - num_up - 1]] # go up in tree from head of current argument
argument_length = len(argument_tree.leaves())
argument_labels = [f'B-{tag}'] + [f'I-{tag}'] * (argument_length - 1)
start_loc = get_start_index(words, argument_tree.leaves())
if not labels[start_loc: start_loc + argument_length] == [OUTSIDE_LABEL] * argument_length:
print('WARNING: Bad argument location. Skipping')
num_bad_arg_loc += 1
is_bad = True
# print(labels)
# labels[start_loc: start_loc + argument_length] = argument_labels
# print(labels)
#
# if input():
# continue
break
labels[start_loc: start_loc + argument_length] = argument_labels
if is_bad:
continue
# pre-check console
if VERBOSE:
for w, l in zip(words, labels):
print(f'{w:<12} {l:<12}')
# checks
if labels.count('B-V') != 1:
print('WARNING: Did not find predicate')
num_no_predicate += 1
continue
if sum([1 if l.startswith('B-ARG') else 0 for l in labels]) == 0:
print('WARNING: Did not find arguments')
num_no_arguments += 1
continue
assert len(labels) == len(words)
# console
if VERBOSE:
print(make_srl_string(words, labels))
# make line
verb_index = labels.index('B-V')
x_string = " ".join(words)
y_string = " ".join(labels)
line = f'{verb_index} {x_string} ||| {y_string}'
# collect
num_good_props_in_file += 1
lines.append(line)
print('Collected {} good propositions in {}'.format(num_good_props_in_file, file_path.name))
num_total_good += num_good_props_in_file
print(f'num good ={num_total_good:,}')
print(f'num no arguments ={num_no_arguments:,}')
print(f'num no predicate ={num_no_predicate:,}')
print(f'num bad head location ={num_bad_head_loc:,}')
print(f'num bad arg location ={num_bad_arg_loc:,}')
print(f'num prepositions ={num_prepositions:,}')
print(f'Writing {len(lines)} lines to file...')
srl_path = configs.Dirs.data / 'training' / f'{NAME}_srl.txt'
with srl_path.open('w') as f:
for line in lines:
f.write(line + '\n')
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
# File: osc4py3/oscchannel.py
# <pep8 compliant>
"""Base classe for OSC data transmission.
TransportChannel provides basic options common to communication.
TransportChannel subclasses are used for communication with peer OSC
systems. They wrap transmission of OSC packets via different ways.
A channel can be just a reader or just a writer (for packet based
communications like UDP or multicast), or both (for connected stream
based communications like TCP or serial USB).
"""
#TODO: add slip2pacekt when writing if necessary.
# To have a clean separation between channel readers and OSC management,
# we use an intermediate queue. It allow to have different policies
# for readers (ex. polling, own thread, global thread...) but same
# processing for message dispatching.
# Note: the queu manage concurrent access to storage.
# Note: I setup a Semaphore to support blocking threads on waiting for
# some packet availability in received_rawpackets queue (there is no wait
# method on deque). But it required Python3.2 for the Semaphore.wait timeout
# support, made the code unnecessarily less readable, and wasted some more
# time in system calls (TransportChannel.received_packet()+next_rawpacket cost
# twice).
# So I discard it.
# If necessary, next_rawpacket() callers should install a semi-active wait
# with frequent polling and some blocking with timeout.
import struct
import collections
import threading
import time
import queue
from .oscnettools import slip2packet
from . import oscscheduling
from . import oscpacketoptions
from . import oscbuildparse
# Problem with cycle in imports - finally, the function using this module
# does import one time when oscdispatching is None.
#from . import oscdispatching
oscdispatching = None
# These two limits + control are mainly fos DOS prevention. Maybe they are
# too high (would need limit use case to setup .
# Note: if you modify the value, modify TransportChannel documentation.
# Maximum packet size allowed if using simple header size field.
MAX_PACKET_SIZE_WITH_HEADER = 1024 * 1024
# Maximum concurrent clients allowed to send packets to same reader
# (with partial packet data staying in buffers).
MAX_SOURCES_ALLOWED = 500
# To group data and options in a named tuple when posting packets.
ReceivedPacket = collections.namedtuple("ReceivedPacket", "rawosc packopt")
# Parameters to use in post_rawpacket(*LAST_RAWPACKET) to signalspecial
# post (generally to have a thread wake up and check its termination flag).
LAST_RAWPACKET = ReceivedPacket(None, None)
# To group a packet to send and its options.
PendingPacket = collections.namedtuple("PendingPacket", "rawosc packopt")
# Monitor constants for automatically setup scheduling management.
SCHED_SELECTTHREAD = "selectthread"
SCHED_POLLTHREAD = "pollthread"
SCHED_OWNTHREAD = "ownthread"
# Queue of received raw OSC packets waiting to be processed.
# Each item is a tuple: (readername, sourceidentifier, oscrawdata)
# Note: oscrawdata is a memoryview on the packet data.
# Q? should we provide a maximum deque length for that ?
received_rawpackets = queue.Queue()
# Reference to all created channels by their name.
# Use the Python global lock to protect (rare) updates.
all_channels = {}
# Reference to channel by their peer identifiers.
all_channels_peers = {}
def get_channel(name):
"""Return a channel from a name.
:param name: the channel name to find channel.
:type name: str
"""
return all_channels.get(name, None)
#======================= COMMUNICATIONS ABSTRACTION =========================
class TransportChannel(object):
"""A communication way with an OSC peer for read and/or write.
Channel names can be built by subclasses from construction parameters.
By example, an USB connection can use the USB device name in its name.
Automatically called wrapper functions allow insertion of flow control
in communications.
Support for SLIP protocol and packet-length header in streams can be
automatically setup just via options.
Set a auto_activate flag to True in construction options to have the
channel automatically enabled and running after construction.
:ivar chaname: key to identify the OSC stream.
:type chaname str
:ivar mode: access read/write to the transport, 'r' or 'w' or 'rw'
:type mode: str
:ivar is_reader: flag to use the channel for reading.
:type is_reader: bool
:ivar is_writer: flag to use the channel for writing.
:type is_writer: bool
:ivar logger: Python logger to trace activity.
Default to None
:type logger: logging.Logger
:ivar actions_handlers: map of action verb and code or message to call.
Default to empty map.
:type actions_handlers: { str : callable or str }
:ivar is_activated: flag set when the channel has been correctly activated.
:type is_activated: bool
:ivar is_scheduled: flag set when the channel has been inserted in
scheduling (ie. read/write are monitored).
:type is_scheduled: bool
:ivar monitor: monitor used for this channel - the channel register itself
among this monitor when becoming scheduled.
This is the tool used to detect state modification on the channel
and activate reading or writing of data.
:type monitor: Monitoring
:ivar monitor_terminate: flag to indicate that our monitor must be
terminated with the channel deletion.
Default upon automatic monitor, or use monitor_terminate key in
options, default to False.
:type monitor_terminate: bool
:ivar monitored_writing: flag to indicate that channel is currently
monitored for writing (which is not mandatory).
:type monitored_writing: bool
:ivar read_buffers: currently accumulated read and not processed data per
identified source, used when data dont come by complete
packets but by chuncks needing to be aggregated before identifying
packets boundaries.
Default to None, dict created when calling received_data() method.
:type read_buffers: { identifier: bytearray }
:ivar read_forceident: indexable informations to use for peer
identification on read data (dont use other identification ways).
Default to None
:ivar read_dnsident: use address to DNS mapping automatically built
from oscnettools module.
Default to True
:ivar read_datagram: flag to consider received data as entire datagrams
(no data remain in the buffer, its all processed when received).
Default to False.
:type read_datagram: bool
:ivar read_maxsources: maximum different sources allowed to send
data to this reader simultaneously, used to limit an eventuel DOS on
the system by sending incomplete packets.
Set it to 0 to disable the limit.
Default to MAX_SOURCES_ALLOWED (500).
:type read_maxsources: int
:ivar read_withslip: flag to enable SLIP protocol on received data.
Default to False.
:type read_withslip: bool
:ivar read_withheader: flag to enable detection of packet size in the
beginning of data.
Default to False.
:type read_withheader: bool
:ivar read_headerunpack: either string data for automatic use of
struct.unpack() or a function to call.
For struct.unpack(), data is a tuple with: the format to decode header,
the fixed header size, and the index of packet length value within the
header tuple returned by unpack().
For function, it will receive the currently accumulated data and
must return a tuple with: the packet size extracted from the header,
and the total header size.
If there is no enough data in header to extract packet size, the
function must return (0, 0).
Default to ("!I", 4, 0) to detect a packet length encoded in 4 bytes
unsigned int with network bytes order.
If the function need to pass some data in the current buffer
(ex. remaining of an old failed communication), it can return an
header size corresponding to the bytes count to ignore, and a
packet size of 0 ; this will consume data with an -ignored- empty
packet.
:type read_headerunpack: (str,int,int) or fct(data) -> packsize,headsize
:ivar read_headermaxdatasize: maximum count of bytes allowed in a packet
size field when using headers.
Set it to 0 to disable the limit.
Default to MAX_PACKET_SIZE_WITH_HEADER (1 MiB).
:type read_headermaxdatasize: int
:ivar read_sequencing: indicate how the reader is used to effectively
proceed to read.
:type read_sequencing: int flag
:ivar write_pending: queue of pending OSC raw packets to write with its
options.
:type write_pending: dequ of PendingPacket
:ivar write_running: the OSC raw packet being written with its options
:type write_running: PendingPacket
:ivar write_lock: lock for concurrent access management to
write_pending, write_running & Co.
:type write_lock: Lock
:ivar write_workqueue: queue of write jobs to execute. This allow to manage
initial writing to peers in their own threads (nice for blocking
write() calls) or in an event loop if working without thread.
The queue will be filled when we detect that a write can occur
(ie same channel will have maximum one write operation in
the workqueue, even if there are multiple pending operations in
the write_pending queue).
:type write_workqueue: WorkQueue or None
:ivar write_wqterminate: flag to indicate to call work queue termination
when terminating the channel.
Default to False.
:type write_wqterminate: bool
:ivar write_withslip: flag to enable SLIP protocol on sent data.
Default to False.
:type write_withslip: bool
:ivar write_slip_flagatstart: flag to insert the SLIP END code (192) at
beginning of sent data (when using SLIP).
Default to True.
:type write_slip_flagatstart:bool
"""
def __init__(self, name, mode, options):
"""Setup an TransportChannel.
:param name: identifier for the channel
:type name: str
:param mode: flag(s) for channel mode, MODE_READER, MODE_WRITER or
MODE_READERWRITER.
:type mode: set
:param options: map of key/value options for the reader control, usage
of a map allow to store some options in pref files, and to add
options independantly of intermediate classes.
See description of options with members of TransportChannel
(option keys use same names as attributes).
:type options: dict
"""
if name.startswith('_'):
raise ValueError("OSC channel name {!r} beginning with _ "\
"reserved.".format(name))
if name in all_channels:
raise ValueError("OSC channel/peer name {!r} already "\
"used.".format(name))
self.chaname = name
self.mode = mode
self.logger = options.get("logger", None)
self.actions_handlers = options.get("actions_handlers", {})
self.monitor = options.get("monitor", None)
if self.monitor == SCHED_SELECTTHREAD:
self.monitor = oscscheduling.get_global_socket_monitor()
self.monitor_terminate = False
elif self.monitor == SCHED_POLLTHREAD:
self.monitor = oscscheduling.get_global_polling_monitor()
self.monitor_terminate = False
elif self.monitor == SCHED_OWNTHREAD:
self.monitor = oscscheduling.create_polling_monitor()
self.monitor_terminate = False
# And... we automatically create the thread here.
period = options.get("monitor_period", 0.1)
oscscheduling.create_monitoring_thread(
self.monitor, period, "mon-" + self.chaname)
else:
self.monitor_terminate = options.get("monitor_terminate", False)
self.monitored_writing = False # needed even if not a writer, for
# monitoring flags computation.
self.is_reader = False
self.is_writer = False
self.is_event = False
self.is_activated = False
self.is_scheduled = False
if "r" in mode:
self.setup_reader_options(options)
if "w" in mode:
self.setup_writer_options(options)
if "e" in mode:
self.setup_event_options(options)
# Channel is constructed, can reference it.
all_channels[self.chaname] = self
if options.get('auto_start', True):
self.activate()
self.begin_scheduling()
def terminate(self):
"""Called when channel is no longer used.
This is the __init__() correspondance at end of life, it finish
to correctly close / dissasemble / destroy the channel and remove
it from internal use.
The channel is normally never used after this call.
"""
if self.is_writer and self.write_wqterminate and \
self.write_workqueue is not None:
self.write_workqueue.terminate() # Do join() if threads.
if self.is_scheduled:
self.end_scheduling()
if self.monitor_terminate and self.monitor is not None:
self.monitor.terminate()
if self.is_activated:
self.deactivate()
del all_channels[self.chaname]
def setup_reader_options(self, options):
if self.logger is not None:
self.logger.info("OSC channel %r setup as reader.", self.chaname)
self.is_reader = True
self.read_buffers = None
self.read_forceident = options.get('read_forceident', None)
self.read_dnsident = options.get('read_dnsident', True)
self.read_datagram = options.get('read_datagram', False)
self.read_maxsources = options.get('read_maxsources',
MAX_SOURCES_ALLOWED)
self.read_buffersize = options.get('read_buffersize', 4096)
self.read_withslip = options.get("read_withslip", False)
self.read_withheader = options.get("read_withheader", False)
self.read_headerunpack = options.get('read_headerunpack', ("!I", 4, 0))
self.read_headermaxdatasize = options.get("read_headermaxdatasize",
MAX_PACKET_SIZE_WITH_HEADER)
def setup_writer_options(self, options):
if self.logger is not None:
self.logger.info("OSC channel %r setup as writer.", self.chaname)
self.is_writer = True
self.write_pending = collections.deque()
self.write_running = None
self.write_lock = threading.RLock()
# Will store tuples of: raw binary data, event to signal or None.
#Q? management of a priority queue?
self.write_withslip = options.get("write_withslip", False)
self.write_slip_flagatstart = options.get("write_slip_flagatstart",
True)
self.write_withheader = options.get("write_withheader", False)
self.write_workqueue = options.get("write_workqueue", None)
self.write_wqterminate = options.get("write_wqterminate", False)
def setup_event_options(self, options):
if self.logger is not None:
self.logger.info("OSC channel %r setup as event.", self.chaname)
self.is_event = True
def handle_action(self, verb, parameters):
"""Special management in transport time operations.
At some times transport channel methods call handle_action() to
enable management of special operations in the system.
This is controlled by the actions_handlers dictionnary, which
map action verbs to OSC messages or direct callable.
OSC messages have the advantage to simply integrate inside the
general dispatching system, but disavantage of being generally
processed asynchronously.
When using OSC messages, the action parameters must be basic
Python types usable as OSC values.
Direct callable are processed immediately, they receive three
parameters:
- channel name
- verb
- action parameters tuple
The OSC pattern is build from:
- handler string (maybe empty, else start by /)
- channel oscidentifier:
- channel kind prefix (channel class specific)
- channel name
- verb
It receives three parameters like for direct callable function:
- channel name
- verb
- action paramters tuple
OSCMessage example for a connexion request on a TCP channel, where
the handler is set to string "/osc4py3", and the parameters is
the IPV4 remote address and port tuple:
* addrpattern: /osc4py3/tcp/camera/conreq
* arguments: camera conreq (("123.152.12.4", 0),)
OSCMessage example after having installed the connexion requested
on this TCP channel, with same handler string "/osc4py3", when
this is the second connexion occuring on the channel - there is no
parameter but an ad-hoc channel has been created for the read and
write operations:
* addrpattern: /osc4py3/tcp/camera/2/connected
* arguments: camera__2 connected ()
OSCMessage example for a socket opened on an UDP channel, where the
handler is set to empty string, and the parameter is the local bound
port for the socket:
* addrpattern: /udp/soundservice/bound
* arguments: soundservice bound (17232, )
:param verb: indication of the occuring action
:type verb: str
:param parameters: simple informations about the action
:type parameters: tuple
:return: callback return value or None, all times None for message
handlers.
:rtype: callback dependant
"""
global oscdispatching
handler = self.actions_handlers.get(verb, None)
if handler is None:
return
if isinstance(handler, str):
pattern = handler + self.oscidentifier() + "/" + verb
msg = oscbuildparse.OSCMessage(pattern, None, parameters)
if oscdispatching is None: # Import here to avoid import cycle
import oscdispatching
oscdispatching.send_packet(msg, "_local")
return None
elif callable(handler):
return handler(self.chaname, verb, parameters)
else:
if self.logger is not None:
self.logger.debug("OSC channel %r handle_action %s with "\
"unusable handler %r.", self.chaname, verb,
handler)
raise ValueError("OSC channel {!r} invalid action handler for "\
"verb {}".format(self.chaname, verb))
# Default channel kind prefix, overriden in subclasses (ex. "udp", "tcp",
# "usb", "serial"...).
chankindprefix = "chan"
def oscidentifier(self):
"""Return an identifier usable as a message pattern.
The identifier must begin with /, not be terminate by / and follow
OSC pattern names rules.
"""
return "/" + self.chankindprefix + "/" + self.chaname
def activate(self):
"""Open the channel and install it inside channels scheduling.
"""
if self.is_activated:
if self.logger is not None:
self.logger.debug("OSC channel %r already activated.",
self.chaname)
return
if self.logger is not None:
self.logger.debug("OSC opening channel %r.", self.chaname)
self.handle_action("activating", ())
self.open()
self.is_activated = True
self.handle_action("activated", ())
if self.logger is not None:
self.logger.info("OSC channel %r activated.", self.chaname)
def deactivate(self):
"""Remove the channel from channels scheduling and close it.
"""
if not self.is_activated:
if self.logger is not None:
self.logger.debug("OSC channel %r already deactivated.",
self.chaname)
return
if self.is_scheduled: # Security
if self.logger is not None:
self.logger.warning("OSC preventively unscheduling "\
"channel %r before deactivating it.",
self.chaname)
self.end_scheduling()
if self.logger is not None:
self.logger.debug("OSC closing channel %r.", self.chaname)
self.handle_action("deactivating", ())
try:
self.close()
except:
if self.logger is not None:
self.logger.exception("OSC channel %r failure during close",
self.chaname)
# By default we consider that the failure dont prevent the file
# close.
self.is_activated = False
self.handle_action("deactivated", ())
if self.logger is not None:
self.logger.info("OSC channel %r deactivated.", self.chaname)
def begin_scheduling(self):
"""Start channel communication monitoring.
"""
if self.logger is not None:
self.logger.debug("OSC scheduling channel %r.", self.chaname)
self.handle_action("scheduling", ())
if not self.is_activated:
raise RuntimeError("OSC channel must be activated before "\
"being scheduled")
if self.monitor is not None:
fno, rwe = self.calc_monitoring_fileno_flags()
if rwe:
self.monitor.add_monitoring(fno, self, rwe)
self.is_scheduled = True
self.handle_action("scheduled", ())
if self.logger is not None:
self.logger.info("OSC channel %r scheduled.", self.chaname)
def end_scheduling(self):
"""Terminate channel communication monitoring.
"""
if self.logger is not None:
self.logger.debug("OSC unscheduling channel %r.", self.chaname)
self.handle_action("unscheduling", ())
if self.monitor is not None:
fno, rwe = self.calc_monitoring_fileno_flags()
if rwe:
self.monitor.remove_monitoring(fno, self, rwe)
self.is_scheduled = False
self.handle_action("unscheduled", ())
if self.logger is not None:
self.logger.info("OSC channel %r unscheduled.", self.chaname)
def calc_monitoring_fileno_flags(self):
"""Calculate fileno and rwe flags for monitoring.
This function must only be called when there is a monitor.
"""
# Note: write is not monitored by default but activated when
# some data are written, using enable_write_monitoring() method.
rwe = oscscheduling.MONITORING_EVENTS_MAP[
(int(self.is_reader),
int(self.monitored_writing),
int(self.is_event))]
if self.monitor.need_fileno():
fno = self.fileno()
else:
fno = id(self)
return fno, rwe
def open(self):
"""Called when must start the channel.
When this method is called, resources needed to communicate with
the peer via the channel mmust be setup.
"""
pass
def close(self):
"""Called when must stop the channel.
When this method is called, resources needed to communicate with
the peer via the channel mmust be released.
"""
pass
def fileno(self):
"""Return integer used for select()/poll to check/wait for data.
"""
# Subclasses dealing with sockets must return the read fileno !!!
return None
def poll_monitor(self, deadlinetime, rwe):
"""Called by poll function to check transmissions status.
When this method os called, it should try to just read the status
of the channel and return it. If this imply some transmission,
this must be considered when process_monevents() is called.
:param deadlinetime: for possibly long read operations, absolute time
to not past over.
Can be None to indicate no deadline.
Can be 0 to indicate to return as soon as possible.
:type deadlinetime: float
:param oper: operations to do, 'r' for read, 'w' for write, 'e' for
event processing - there may be multiple operations.
:type oper: str
"""
raise NotImplementedError("poll_monitor must be overriden")
def process_monevents(self, deadlinetime, oper):
"""Read next available raw data from the reader, or None.
When this method is called, there is normally some data to read
(or already read and cached somewhere), or write become possible
or some event occured.
The time spent here should be short - just the system call to
retrieve or send data.
In some case, some methods must be called to notify the end of a
transmission (ex. received_data() or received_packet() when
reading, packet_written() when writing).
:param deadlinetime: for possibly long read operations, absolute time
to not past over.
Can be None to indicate no deadline.
Can be 0 to indicate to return as soon as possible.
:type deadlinetime: float
:param oper: operations to do, 'r' for read, 'w' for write, 'e' for
event processing - there may be multiple operations.
:type oper: str
"""
raise NotImplementedError("process_monevents must be overriden")
#TODO: Use read_forceident and read_dnsident
def received_data(self, identifier, data):
"""Called by subclass when *part* of an OSC packet has been received.
Called by stream based readers to manage the stream itself.
The method manage SLIP and header based identification of packets in
streams.
If the method can identify that a full packet has been received,
then it call ad-hoc method to process it.
.. note: Datagram readers
Datagram based readers should directly call received_packet()
method, unless they need support of some received_data()
processing (multiple read concatenation, packet delimitation...).
:param identifier: idenfifier of OSC peer origin of new data
:type identifier: indexable value
:param data: new data received.
:type data: bytes
"""
if self.logger is not None:
self.logger.debug("OSC channel %r receive data from %s: %s",
self.chaname, repr(identifier), repr(data))
if self.read_datagram:
thebuffer = data
else:
# Ensure we have buffer management ready.
if self.read_buffers is None:
self.read_buffers = {}
# Locally use the buffer.
thebuffer = self.read_buffers.get(identifier, bytearray())
if self.read_maxsources and \
len(self.read_buffers) > self.read_maxsources:
# DOS security check.
raise RuntimeError("OSC reader {}, maximum sources reach "\
"({})".format(self.chaname, self.read_maxsources))
thebuffer.extend(data)
# Will loop over buffer, extracting all available packets.
while True:
rawoscdata = None
if self.read_withslip:
rawoscdata, remain = slip2packet(thebuffer)
if rawoscdata is None: # No END found in slip data.
break
# Split: received packet data / remaining data for next packet.
rawoscdata = memoryview(rawoscdata)
thebuffer = remain
elif self.read_withheader:
# Extract packet size from header if possible.
if callable(self.read_headerunpack):
packetsize, headersize = self.read_headerunpack(thebuffer)
else:
sformat, headersize, index = self.read_headerunpack
if len(thebuffer) < headersize:
headersize = 0
else:
packetsize = struct.unpack(sformat,
thebuffer[:headersize])[index]
if self.read_headermaxdatasize and \
packetsize > self.read_headermaxdatasize:
# This is a security, in case header is
# incorrectly aligned, or someone send too
# much data.
raise ValueError("OSC reader {} from client {}, "\
"packet size indicated in header ({}) is "\
"greater than max allowed "\
"({})".format(self.chaname, identifier,
packetsize, self.read_headermaxdatasize))
if not headersize or headersize + packetsize > \
len(thebuffer): # Not enough data.
break
# Split: received packet data / remaining data for next packet.
rawoscdata = memoryview(thebuffer)
rawoscdata = rawoscdata[headersize:headersize + packetsize]
thebuffer = thebuffer[headersize + packetsize:]
elif self.read_datagram:
break
else:
raise RuntimeError("OSC reader {} has no way to detect "\
"packets".format(self.chaname))
if rawoscdata:
self.received_packet(identifier, rawoscdata)
if self.read_datagram:
# We must proceed all datagram.
if thebuffer:
self.received_packet(identifier, memoryview(thebuffer))
else:
if thebuffer:
# Store remaining data inside buffers dictionnary.
self.read_buffers[identifier] = thebuffer
elif identifier in self.read_buffers:
del self.read_buffers[identifier]
# or (?):
#self.read_buffers[identifier] = bytearray()
def received_packet(self, sourceidentifier, rawoscdata):
"""Called by subclasses when a complete OSC packet has been received.
This method deal with an eventual wrapper to manipulate packet data
before it is transmited into higher OSC layers for decoding and
dispatching.
It can be called directly by message based readers.
:param sourceidentifier: identification of the data source
:type sourceidentifier: hashable value
:param rawoscdata: OSC raw packet received from somewhere
:type rawoscdata: memoryview
"""
if self.logger is not None:
self.logger.info("OSC channel %r receive packet from %s: %s",
self.chaname, repr(sourceidentifier), repr(bytes(rawoscdata)))
# Build the packet options, just update packet identification.
packopt = oscpacketoptions.PacketOptions()
packopt.readername = self.chaname
packopt.srcident = sourceidentifier
packopt.readtime = time.time()
post_rawpacket(ReceivedPacket(rawoscdata, packopt))
def transmit_data(self, rawoscdata, packopt):
"""Install the osc data into the transmission queue for writing.
Called by distributing when an OSC packet is sent.
:param rawoscdata: OSC raw packet prepared by peer manager.
:type rawoscdata: bytearray
:param packopt: options for packet sending
:type packopt: PacketOptions
"""
# May add a flag to check for writing initialization ready.
# Systematic use of the write_pending queue ensure that packets
# are written in the right order, even in multithread context.
if self.logger is not None:
self.logger.debug("OSC channel %r transmit_data via "\
"write_pending queue", self.chaname)
tosend = PendingPacket(rawoscdata, packopt)
with self.write_lock:
if packopt.nodelay:
# Install packet before other packets to send.
self.write_pending.append_left(tosend)
else:
self.write_pending.append(tosend)
self.schedule_write_packet()
def packet_written(self):
"""Called by subclasses when current written packet is finished.
* Terminate properly current write, managing internal status.
* Schedule a new write if any available.
"""
if self.logger is not None:
self.logger.debug("OSC channel %r notification of written packet",
self.chaname)
with self.write_lock:
if self.write_running is None:
if self.logger is not None:
self.logger.warning("OSC channel %r packet_written "\
"called with no packet running", self.chaname)
else:
# Finished with this packet.
self.write_running.packopt.signal_event()
self.write_running = None
self.schedule_write_packet()
def schedule_write_packet(self):
"""Install a new packet for writting if possible.
To install a new packet ther must not be a currently running packet.
"""
# Note: this method may be called recursively in case of blocking
# write operations where start_write_packet() exit only when
# transmission is finished.
# Such case dont need monitoring, but should immediatly re-schedule
# a new write at the end of start_write_packet() by calling
# packet_written() (which call schedule_write_packet()).
# This is the reason we use newwrite to start write operations out
# of the write_lock protected zone.
newwrite = False
# Search next packet to write.
with self.write_lock:
# write_running must have been cleared when a packet has been
# written, else we consider someone start writting somewhere else.
if self.write_running is None and self.write_pending:
if self.logger is not None:
self.logger.debug("OSC channel %r another write packet "\
"available", self.chaname)
# Install another packet to write.
self.write_running = self.write_pending.popleft()
newwrite = True
if self.write_running is None:
self.enable_write_monitoring(False)
if newwrite:
if self.logger is not None:
self.logger.debug("OSC channel %r schedule of packet to "\
"write", self.chaname)
# If a special workqueue is attached to the channel write, then
# writing operations take place as jobs of that workqueue - this
# allow monitoring thread for multiple channels (ex. socket
# monitoring threads) to only be used a a trigger detecting
# when a channel become available.
# Note that the same write workqueue can be shared between
# different channels.
if self.write_workqueue is None:
self.write_operation()
else:
self.write_workqueue.send_callable(self.write_operation, ())
def write_operation(self):
"""Start to write the data, and enable monitoring.
This code is extracted from schedule_write_packet() to be callable
in the context of a workqueue (either on multithreading or in
an event loop).
"""
self.start_write_packet()
self.enable_write_monitoring(True)
def enable_write_monitoring(self, enable):
"""Called to enable or disable monitoring of the channel for writing.
Default method call the monitor method if its present.
Enabling or disabling monitoring more than once is managed, and
monitor methods are only called one time.
Subclasses can override this method if they have their own write
monitoring management or dont need it.
They must manage self.monitored_writing attribute to True/False if a
monitoring deactivation must be done at end of scheduling.
They can call packet_written() if their write are synchronous,
this will directly start another write operation if there is
one pending.
:param enable: flag to enable/disable write monitoring.
:type enable: bool
"""
if self.monitor is not None:
with self.write_lock:
# Manage possible recursive call with complete send in the
# subclass start_write_packet() method which directly call
# packet_written() with no more packet to write and no need
# for monitoring.
# So, if there is no running packet, disable monitoring.
if self.write_running is None:
enable = False
if enable == self.monitored_writing:
pass # Already done.
else:
if self.logger is not None:
self.logger.debug("OSC channel %r monitoring set "\
"to %r", self.chaname, enable)
# Note: we call this method for the fileno, but we only
# change the "w" monitoring status.
fno, rwe = self.calc_monitoring_fileno_flags()
if enable:
self.monitor.add_monitoring(fno, self, "w")
else:
self.monitor.remove_monitoring(fno, self, "w")
self.monitored_writing = enable
def start_write_packet(self):
"""Called to start writing of a packet.
When his method is called there is a pending packet to write in
self.write_running attribute (PendingPacket tuple).
The override method must activate real writing of the data to OSC peer.
After this method call, the enable_write_monitoring() method is
automatically called with True.
"""
raise NotImplementedError("start_write_packet must be overriden")
# ========================= CHANNEL PUBLIC FUNCTIONS =======================
def terminate_all_channels():
"""Terminate and wait for end of all channels.
"""
# Close transport channels.
for chan in list(all_channels.values()):
chan.terminate()
# ==================== TOP-LEVEL ACCESS TO RECEIVED DATA ====================
def next_rawpacket(timeout):
"""Return next packet extracted from the received queue.
:param timeout: maximum time to wait for a new packet, 0 for immediate
return, None for infinite wait.
:type timeout: float or None
:return: received packet informations
:rtype: ReceivedPacket named tuple or None if no data available
"""
if timeout == 0: # Would not block.
if received_rawpackets.empty():
return None
try:
packet = received_rawpackets.get(True, timeout)
received_rawpackets.task_done()
return packet
except queue.Empty:
return None
def post_rawpacket(receivedpacket):
"""Send a new packet in the reception queue.
.. note: at this level packet options is used to associate reception
informations with the raw data (source, read time...).
:param receivedpacket: received data from peer and packet options
:type receivedpacket: ReceivedPacket
"""
received_rawpackets.put(receivedpacket)
|
from game import *
from model import *
#Create places to lay out the foundation of the world
places = {
"tgh" : Place(name="The Great Hall", description="It's a hall and it's great"),
"tsh" : Place(name="The Small Hall", description="It's a hall and it's small"),
"td" : Place(name="The Dungeon", description="It's dungeon and you see two ways out of here")
}
#Establish relationship between different places to enable the player to move between them
places.get("tgh").borders = [places.get("tsh")]
places.get("tsh").borders = [places.get("tgh"), places.get("td")]
places.get("td").borders = [places.get("tsh")]
#Create quests which the player can perform
quest1 = Quest(
name="The First Quest",
prompt="Oh no, blablablablablabla? What is your next course of action???",
choices=[
Quest.Choice(
description="Run!!",
consequence=Quest.Choice.Consequence(
health=-5,
summary="Not much happened. You got a bit tired and lost 5 HP."
)
)
]
)
#Add quests to places
places.get("tgh").quests.append(quest1)
#Create the player
player = Player(place=places.get("tgh"))
#Initialize game with the player and the places created above
game = Game(player=player, places = places)
done = False
#Run game loop
while not done:
game.query_player()
done = game.check_if_done()
|
#Code to find the lowest common ancestor between two nodes
class Node:
def __init__(self,key):
self.key=key
self.left=None
self.right=None
def findPath(root,path,k):
if(root is None):
return False
path.append(k)
if(root.key==k):
return True
if((root.left is not None and findPath(root.left,path,k)) or (root.right is not None and findPath(root.right,path,k))):
return True
path.pop(0)
return False
def findLCA(root,n1,n2):
path1=[]
path2=[]
if(not findPath(root,path1,n1) or not findPath(root,path2,n2)):
return -1
i=0
while(i<len(path1) and i<len(path2)):
if(path1[i]!=path2[i]):
break
i=i+1
return path1[i-1]
# Driver program to test above function
# Let's create the Binary Tree shown in above diagram
root = Node(1)
root.left = Node(2)
root.right = Node(3)
root.left.left = Node(4)
root.left.right = Node(5)
root.right.left = Node(6)
root.right.right = Node(7)
print "LCA(4, 5) = %d" % (findLCA(root, 4, 5, ))
print "LCA(4, 6) = %d" % (findLCA(root, 4, 6))
print "LCA(3, 4) = %d" % (findLCA(root, 3, 4))
print "LCA(2, 4) = %d" % (findLCA(root, 2, 4))
|
from Deck_creation import draw, listed, deck, game
l = listed()
game_deck = deck(1)
total_players = game(3)
player_1_hand = total_players[0]; player_2_hand = total_players[1]
# Turn base
stop1 = True
stop2 = True
result1 = 0
result2 = 0
active_player = 1
def winner():
print("Player 1 had a total sum of:", int(result1),"\nPlayer 2 had a total sum of:", result2)
if result1 > result2:
print("The winner is player 1 with a total of:", result1)
elif result2 > result1:
print("The winner is player 2 with a total of:", result2)
else:
print("The game is a tie")
while(stop1 == True or stop2 == True):
print("Player", active_player," has to choose the move.\n")
print("The total value for player ", active_player, "at the moment is: ", globals()["result"+str(active_player)], "\n" )
input_player = input("Please enter the move:\n")
if input_player == "0":
# with input == 0 meaning that the player draws a card
# and than passes the turn to the next player
if active_player == 1:
draw_1, game_deck, player_1_hand = draw(game_deck, player_1_hand)
result1 += int(l[draw_1,0])
print("The total value for player 1 is ",result1, "\n")
if result1 > 21:
print("Player 1 has a result higher than 21 and lost")
break
if stop2 == True:
active_player = 2
else:
active_player = 1
else:
draw_1, game_deck, player_2_hand = draw(game_deck, player_2_hand)
result2 += int(l[draw_1,0])
print("The total value for player 2 is ",result2, "\n")
if result2 > 21:
print("Player 2 has a result higher than 21 and lost")
break
if stop1 == True:
active_player = 1
else:
active_player = 2
elif input_player == "1":
# with input == 1 is the player to pass his turn forever
if active_player == 1:
stop1 = False
if stop2 == True:
active_player = 2
else:
winner()
elif active_player == 2:
stop2 = False
if stop1 == True:
active_player = 1
else:
winner()
else:
print("To draw a card type 0, to stay and wait for the end of the game type 1.")
|
import asyncio
from math import ceil
from shared.utils import get_time
class Housekeeping():
# ------------------------------------------------------------------
async def cleanup_loop(self, loop_info):
self.log.info([
['y', ' - starting '],
['b', 'cleanup_loop'],
['y', ' by: '],
['c', self.sess_id],
['y', ' for server: '],
['c', self.serv_id],
])
# self.cleanup_sleep_sec = 5
# self.cleanup_sleep_sec = 3
while self.get_loop_state(loop_info):
await asyncio.sleep(self.cleanup_sleep_sec)
# wait for all session configurations from this server to complete
async def is_locked():
sess_ids = self.redis.s_get('ws;server_sess_ids;' + self.serv_id)
sess_locks = self.locker.semaphores.get_actives(
name=self.sess_config_lock,
default_val=[],
)
locked = any(s in sess_ids for s in sess_locks)
return locked
max_lock_sec = self.get_expite_sec(
name='sess_config_expire',
is_lock_check=True,
)
await self.locker.semaphores.async_block(
is_locked=is_locked,
max_lock_sec=max_lock_sec,
)
# add a lock impacting session configurations. the name
# and key are global. so that we dont have zombie entries
# after a server restart!
self.locker.semaphores.add(
name=self.cleanup_loop_lock,
key=self.serv_id,
expire_sec=self.get_expite_sec(name='cleanup_loop_expire'),
)
# run the cleanup for this server
await self.cleanup_server(serv_id=self.serv_id)
# run the cleanup for possible zombie sessions
all_sess_ids = self.redis.s_get('ws;all_sess_ids')
for sess_id in all_sess_ids:
heartbeat_name = self.get_heartbeat_name(scope='sess', postfix=sess_id)
if not self.redis.exists(heartbeat_name):
await self.cleanup_session(sess_id=sess_id)
# run the cleanup for possible zombie widgets
widget_info = self.redis.h_get_all('ws;widget_info', default_val={})
for widget_id, info in widget_info.items():
sess_id = info['sess_id']
heartbeat_name = self.get_heartbeat_name(scope='sess', postfix=sess_id)
if not self.redis.exists(heartbeat_name):
# explicitly take care of the widget
await self.cleanup_sess_widget(widget_ids=widget_id)
# for good measure, make sure the session is also gone
await self.cleanup_session(sess_id=sess_id)
# run the cleanup for possible zombie servers
all_server_ids = self.redis.s_get('ws;all_server_ids')
for serv_id in all_server_ids:
heartbeat_name = self.get_heartbeat_name(scope='serv', postfix=serv_id)
if not self.redis.exists(heartbeat_name):
await self.cleanup_server(serv_id=serv_id)
# run the cleanup for possible zombie loops
await self.cleanup_loops()
# run the cleanup for users who have no heartbeats
all_user_ids = self.redis.s_get('ws;all_user_ids')
for user_id in all_user_ids:
heartbeat_name = self.get_heartbeat_name(scope='user', postfix=user_id)
if not self.redis.exists(heartbeat_name):
await self.cleanup_users(user_ids=user_id)
# sanity check: make sure that the local manager has been cleaned
sess_ids = self.redis.s_get('ws;server_sess_ids;' + self.serv_id)
# instead of locking the server, we accept a possible KeyError
# in case another process changes the managers dict
try:
async with self.locker.locks.acquire('serv'):
managers = await self.get_server_attr('managers')
sess_ids_check = [s for s in managers.keys() if s not in sess_ids]
except KeyError as e:
pass
except Exception as e:
raise e
if len(sess_ids_check) > 0:
self.log.warn([
['r', ' - mismatch between sess_ids ?', sess_ids,
managers.keys()],
])
for sess_id in sess_ids_check:
await self.cleanup_session(sess_id=sess_id)
# sanity check: after the cleanup for this particular session,
# check if the heartbeat is still there for the server / user
# (if any session at all is alive)
# async with self.locker.locks.acquire('user'):
# if not self.redis.exists(self.get_heartbeat_name(scope='user')):
# user_sess_ids = self.redis.s_get('ws;user_sess_ids;' + self.user_id)
# if len(user_sess_ids) > 0:
# raise Exception(
# 'no heartbeat, but sessions remaining ?!?!', self.user_id,
# user_sess_ids
# )
async with self.locker.locks.acquire('serv'):
if not self.redis.exists(self.get_heartbeat_name(scope='serv')):
server_sess_ids = self.redis.s_get(
'ws;server_sess_ids;' + self.serv_id
)
if len(server_sess_ids) > 0:
raise Exception(
'no heartbeat, but sessions remaining ?!?!', self.serv_id,
server_sess_ids
)
# cleanup widgets by their own heartbeat
await self.cleanup_widgets()
# ------------------------------------------------------------------
# ------------------------------------------------------------------
check_all_ws_keys = False
# check_all_ws_keys = True
if check_all_ws_keys:
# async with self.locker.locks.acquire('serv'):
cursor, scans = 0, []
while True:
# await asyncio.sleep(0.001)
cursor, scan = self.redis.scan(cursor=cursor, count=500, match='ws;*')
if len(scan) > 0:
scans += scan
if cursor == 0:
break
print(' - scans:\n', scans, '\n')
# ------------------------------------------------------------------
# ------------------------------------------------------------------
# remove the lock impacting session configurations
self.locker.semaphores.remove(
name=self.cleanup_loop_lock,
key=self.serv_id,
)
self.log.info([
['r', ' - ending '],
['b', 'cleanup_loop'],
['r', ' by: '],
['c', self.sess_id],
['r', ' for server: '],
['c', self.serv_id],
])
return
# ------------------------------------------------------------------
async def cleanup_session(self, sess_id):
"""clean up a session
as this is not necessarily self.sess_id, we do not assume that we
have the identical self.user_id or self.serv_id (as another user
# potentially using a different server, might have initiated this function)
"""
if sess_id is None:
return
async with self.locker.locks.acquire('sess'):
# add a lock impacting the cleanup loop
self.locker.semaphores.add(
name=self.sess_config_lock,
key=sess_id,
expire_sec=self.get_expite_sec(name='sess_config_expire'),
)
self.log.info([['c', ' - cleanup-session '], ['p', sess_id], ['c', ' ...']])
# remove the heartbeat for the session
self.redis.delete(self.get_heartbeat_name(scope='sess', postfix=sess_id))
# remove the the session from the global list
self.redis.s_rem(name='ws;all_sess_ids', data=sess_id)
# remove the the session from the server list
all_server_ids = self.redis.s_get('ws;all_server_ids')
for serv_id in all_server_ids:
self.redis.s_rem(name='ws;server_sess_ids;' + serv_id, data=sess_id)
# remove the the session from the user list (go over all users until
# the right one is found)
all_user_ids = self.redis.s_get('ws;all_user_ids')
for user_id in all_user_ids:
self.redis.s_rem(name='ws;user_sess_ids;' + user_id, data=sess_id)
await self.set_loop_state(
state=False,
group=self.get_loop_group_name(scope='sess', postfix=sess_id),
)
# remove the session from the manager list if it
# exists (if this is the current server)
async with self.locker.locks.acquire('serv'):
await self.remove_server_attr(name='managers', key=self.sess_id)
# clean up all widgets for this session
sess_widget_ids = self.redis.l_get('ws;sess_widget_ids;' + sess_id)
for widget_id in sess_widget_ids:
await self.cleanup_sess_widget(widget_ids=widget_id)
# remove the lock impacting the cleanup loop
self.locker.semaphores.remove(
name=self.sess_config_lock,
key=sess_id,
)
return
# ------------------------------------------------------------------
async def cleanup_loops(self):
"""clean up zombie loops
"""
all_loop_groups = self.redis.h_get_all(name='ws;all_loop_groups', default_val={})
heartbeats = []
for heartbeat in list(all_loop_groups.values()):
if heartbeat not in heartbeats:
heartbeats += [heartbeat]
has_context = dict([(h, self.redis.exists(h)) for h in heartbeats])
for group, heartbeat in all_loop_groups.items():
if not has_context[heartbeat]:
await self.set_loop_state(state=False, group=group)
self.redis.h_del(name='ws;all_loop_groups', key=group)
return
# ------------------------------------------------------------------
async def cleanup_widgets(self):
"""cleanup widgets by their own heartbeat
"""
reco_info = self.redis.h_get_all(
'ws;recovery_info',
default_val={},
)
widget_ids = []
for widget_id in reco_info.keys():
heartbeat_name = self.get_heartbeat_name(scope='widget_id', postfix=widget_id)
if not self.redis.exists(heartbeat_name):
widget_ids += [widget_id]
if len(widget_ids) > 0:
self.redis.h_del(
'ws;recovery_info',
keys=widget_ids,
)
await self.cleanup_sess_widget(widget_ids=widget_ids)
return
# ------------------------------------------------------------------
async def cleanup_sess_widget(self, widget_ids, grp_ids=None):
"""clean up a list of input widget ids
"""
if not isinstance(widget_ids, (list, set)):
widget_ids = [widget_ids]
if len(widget_ids) == 0:
return
if grp_ids is not None:
if not isinstance(grp_ids, (list, set)):
grp_ids = [grp_ids]
self.log.info([
['c', ' - cleanup-widget_ids '],
['p', widget_ids],
['y', '', grp_ids if grp_ids is not None else ''],
])
all_user_ids = self.redis.s_get('ws;all_user_ids')
for widget_id in widget_ids:
widget_info = self.redis.h_get(
name='ws;widget_info', key=widget_id, default_val={}
)
if 'sess_id' in widget_info:
self.redis.delete('ws;sess_widget_ids;' + widget_info['sess_id'])
self.redis.h_del(name='ws;widget_info', key=widget_id)
for user_id in all_user_ids:
self.redis.l_rem(name='ws;user_widget_ids;' + user_id, data=widget_id)
async with self.locker.locks.acquire('serv'):
await self.remove_server_attr(name='widget_inits', key=widget_id)
sess_widget_loops = self.redis.l_get('ws;sess_widget_loops;' + widget_id)
for widget_loop in sess_widget_loops:
await self.set_loop_state(
state=False,
loop_info=widget_loop,
)
self.redis.delete('ws;sess_widget_loops;' + widget_id)
# synchronisation groups
pipe = self.redis.get_pipe()
for widget_id in widget_ids:
user_sync_groups = self.redis.h_get_all(
name='ws;user_sync_groups;' + self.user_id, default_val=dict()
)
check_grp_ids = list(user_sync_groups.keys())
if grp_ids is not None:
check_grp_ids += grp_ids
for grp_id_now in set(check_grp_ids):
pipe.h_del(
name='ws;user_sync_group_widgets;' + self.user_id + ';' + grp_id_now,
key=widget_id,
)
pipe.execute()
# # if a PanelSync widget is instantiated, update it
# await self.publish_sync_groups_update()
return
# ------------------------------------------------------------------
async def cleanup_server(self, serv_id):
"""clean up servers and the corresponding sessions
"""
# cleanup expired sessions (for this particular server)
sess_ids = self.redis.s_get('ws;server_sess_ids;' + serv_id)
for sess_id in sess_ids:
heartbeat_name = self.get_heartbeat_name(scope='sess', postfix=sess_id)
if not self.redis.exists(heartbeat_name):
await self.cleanup_session(sess_id=sess_id)
# after the cleanup for dead sessions, check if
# the heartbeat is still there for the server (if any session at all is alive)
heartbeat_name = self.get_heartbeat_name(scope='serv', postfix=serv_id)
if not self.redis.exists(heartbeat_name):
self.log.info([['c', ' - cleanup-server '], ['p', serv_id], ['c', ' ...']])
await self.set_loop_state(
state=False,
group=self.get_loop_group_name(scope='serv', postfix=serv_id),
)
self.redis.s_rem(name='ws;all_server_ids', data=serv_id)
return
# ------------------------------------------------------------------
async def cleanup_users(self, user_ids=None):
"""clean up all user lists in case this use has heartbeat
"""
if user_ids is None:
user_ids = self.redis.s_get('ws;all_user_ids', default_val=[])
elif isinstance(user_ids, str):
user_ids = [user_ids]
for user_id in user_ids:
user_sess_ids = self.redis.s_get(
'ws;user_sess_ids;' + user_id,
default_val=None,
)
if user_sess_ids is not None:
continue
self.log.info([
['c', ' - cleanup-user '],
['p', user_id],
['c', ' ...'],
])
self.redis.s_rem(name='ws;all_user_ids', data=user_id)
user_sync_groups = self.redis.h_get_all(
name='ws;user_sync_groups;' + user_id, default_val=dict()
)
for grp_id in user_sync_groups.keys():
self.redis.delete(
name='ws;user_sync_group_widgets;' + user_id + ';' + grp_id,
)
self.redis.delete(name='ws;user_sync_groups;' + user_id)
# cleanup widgets
widget_types = self.redis.h_get(
name='ws;server_user_widgets' + self.serv_id, key=user_id, default_val=[]
)
for widget_type in widget_types:
name = (
'ws;server_user_widget_loops;' + self.serv_id + ';' + user_id + ';'
+ widget_type
)
self.redis.delete(name=name)
self.redis.h_del(name='ws;server_user_widgets' + self.serv_id, key=user_id)
self.redis.h_del(name='ws;active_widget', key=user_id)
self.redis.delete(name='ws;user_widget_ids;' + user_id)
return
|
first_list = [int(i) for i in input("Enter numbers separated by spaces and press Enter:").split()]
second_list = [int(j) for j in input("Enter numbers separated by spaces and press Enter:").split()]
same = ""
first_uniq = ""
second_uniq = ""
for k in range(len(first_list)):
if first_list[k] in second_list:
same += str(first_list[k]) + " "
if first_list[k] not in second_list:
first_uniq += str(first_list[k]) + " "
if second_list[k] not in first_list:
second_uniq += str(second_list[k]) + " "
print(same, first_uniq, first_uniq + second_uniq)
|
def result(ns):
for i in range(9):
for j in range(i+1, 9):
if sum(ns) - ns[i] - ns[j] == 100:
return i, j
ns = list()
for _ in range(9):
ns.append(int(input()))
ns = sorted(ns)
i, j = result(ns)
for idx, n in enumerate(ns):
if idx != i and idx != j:
print(n)
|
#knAudio.py
"""
Programmer: Keith G. Nemitz
E-mail: keithn@mousechief.com
Version 0.0.1 Development
"""
import pyglet
import knTimers
daSong = None
daSinger = None;
faderTimer = None;
#-------------------------------------------
def LoadSFX(name):
return pyglet.resource.media(name,streaming=False);
def PlaySFX(sfx):
if (type(sfx) == type('str')):
sfx = pyglet.resource.media(sfx,streaming=False);
sfx.play();
pass
#-------------------------------------------
def PrepMusic(name):
global daSong
daSong = pyglet.resource.media(name, streaming=True)
pass
def PlayMusic(name): #, loopCount=1
global daSinger, daSong
if (daSinger and daSinger.playing): daSinger.pause();
if (daSong == None):
daSong = pyglet.resource.media(name, streaming=True)
daSinger = daSong.play();
#daSinger.eos_action = daSinger.EOS_PAUSE;
daSong = None;
pass
def IsMusicPlaying():
return (daSinger and daSinger.playing);
def StopMusic():
global daSinger
if (daSinger and daSinger.playing):
daSinger.pause();
daSinger = None;
pass
def GetMusicVol():
if (daSinger):
return daSinger.volume;
return 0.0;
def MusicFader(dt):
global faderTimer, daSinger
if (faderTimer == None): return;
if (daSinger.playing == False or faderTimer.HasEnded()):
pyglet.clock.unschedule(MusicFader);
faderTimer = None;
daSinger = None;
return;
value = faderTimer.ReadValue();
daSinger.volume = value;
pass
def FadeMusicOut(milsecs):
global faderTimer, daSinger
if (daSinger == None): return;
if (daSinger.playing == False):
daSinger = None;
return;
if (faderTimer or daSinger == None): return;
faderTimer = knTimers.RangeTimer(milsecs,1.0,0.0);
pyglet.clock.schedule_interval(MusicFader, 0.01)
pass
|
# LEVEL 5
# http://www.pythonchallenge.com/pc/def/peak.html
import pickle
test = {'a': 2, 'b': 4, 'c': 6,
'alongstring0': 'averylongstringindeed0',
'alongstring1': 'averylongstringindeed1',
'alongstring2': 'averylongstringindeed2',
'alongstring3': 'averylongstringindeed3',
'alongstring4': 'averylongstringindeed4',
'alongstring5': 'averylongstringindeed5',
'alongstring6': 'averylongstringindeed6',
'alongstring7': 'averylongstringindeed7',
'alongstring8': 'averylongstringindeed8',
}
test2 = {}
try:
with open('data/level_5_test.p', 'wb') as f:
data = pickle.dump(test, f, 0) # found protocol 0 produces output similar to banner.p
print(data)
with open('data/level_5_test.p', 'rb') as f:
data = pickle.load(f)
print(data)
except Exception as exc:
raise exc
pass
histogram = {}
try:
with open('data/banner.p', 'rb') as f:
data = pickle.load(f)
except Exception as exc:
raise exc
pass
lists = [l for l in data]
for list in lists:
total_times = 0
for char, times in list:
total_times += times
print(char * times, end='')
print()
|
import pyttsx
k = pyttsx.init()
def say(text):
k.say(text)
k.runAndWait()
say('Hello')
|
import numpy as np
import scipy.signal as scs
def todo_specification_separate_channels(u,v):
nrowu,ncolu,nchu = u.shape
w = np.zeros(u.shape)
for i in range(3):
uch = u[:,:,i]
vch = v[:,:,i]
u_sort,index_u=np.sort(uch,axis=None),np.argsort(uch,axis=None)
v_sort,index_v=np.sort(vch,axis=None),np.argsort(vch,axis=None)
uspecifv= np.zeros(nrowu*ncolu)
uspecifv[index_u] = v_sort
uspecifv = uspecifv.reshape(nrowu,ncolu)
w[:,:,i] = uspecifv.reshape(nrowu,ncolu)
return w
def transport1D(X,Y):
sx = np.argsort(X) #argsort retourne les indices des valeurs s'ils étaient ordonnés par ordre croissant
sy = np.argsort(Y)
return((sx,sy))
def todo_transport3D(X,Y,N,eps): #X,y,Z are nx3 matrices
Z=np.copy(X) # output
for k in range(N):
u=np.random.randn(3,3)
q=np.linalg.qr(u)[0] #orthonormal basis with uniform distibution on the sphere
for i in range(3):
# projection on the basis
Yt=np.dot(Y,q[:,i])
Zt=np.dot(Z,q[:,i])
#Permutations
[sZ,sY]=transport1D(Zt,Yt)
Z[sZ,:] += eps * (Yt[sY]-Zt[sZ])[:,None] * q[:,i][None,:] # 3D transport
# equivalent to
#for j in range(X.shape[0]):
# Z[sZ[j],:]=Z[sZ[j],:]+e*(Yt[sY[j]]-Zt[sZ[j]])*(q[:,i]) #transport 3D
return Z,sZ,sY
def average_filter(u,r): # implementation with integral images
# uniform filter with a square (2*r+1)x(2*r+1) window
# u is a 2d image
# r is the radius for the filter
(nrow, ncol) = u.shape
big_uint = np.zeros((nrow+2*r+1,ncol+2*r+1))
big_uint[r+1:nrow+r+1,r+1:ncol+r+1] = u
big_uint = np.cumsum(np.cumsum(big_uint,0),1) # integral image
out = big_uint[2*r+1:nrow+2*r+1,2*r+1:ncol+2*r+1] + big_uint[0:nrow,0:ncol] - big_uint[0:nrow,2*r+1:ncol+2*r+1] - big_uint[2*r+1:nrow+2*r+1,0:ncol]
out = out/(2*r+1)**2
return out
def todo_guided_filter(u,guide,r,eps):
C = average_filter(np.ones(u.shape), r) # to avoid image edges pb
mean_u = average_filter(u, r)/C
mean_guide = average_filter(guide, r)/C
corr_guide = average_filter(guide*guide, r)/C
corr_uguide = average_filter(u*guide, r)/C
var_guide = corr_guide - mean_guide * mean_guide
cov_uguide = corr_uguide - mean_u * mean_guide
alph = cov_uguide / (var_guide + eps)
beta = mean_u - alph * mean_guide
mean_alph = average_filter(alph, r)/C
mean_beta = average_filter(beta, r)/C
q = mean_alph * guide + mean_beta
return q
|
# @Time :2019/7/21 13:48
# @Author :jinbiao
from Python_0719_job import match_count
from Python_0719_job.operation_excel import OperationExcel
import unittest
from ddt import ddt, data
oe = OperationExcel(excel_name="test_data.xlsx", sheet_name="divide")
test_data = oe.get_data()
@ddt
class Testoperation(unittest.TestCase): # 创建一个测试类,继承unitest包下的TestCase类
@classmethod
def setUpClass(cls) -> None:
cls.write_file = open("result.txt", mode="a", encoding="utf-8")
cls.write_file.write("{:-^50}\n".format("测试用例开始执行"))
@classmethod
def tearDownClass(cls) -> None:
cls.write_file.write("{:-^50}\n".format("测试用例执行结果"))
cls.write_file.close()
@data(*test_data)
def test_divide(self, data):
# for data in test_data:
one_operation = match_count.Count(data["l_data"], data["r_data"]) # 创建一个运算对象
actual = one_operation.divide() # 调用divide方法,返回计算结果
expectation = data["expect"] # 定义期望值
description = data["description"]
try:
self.assertEqual(expectation, actual, msg=f"{description}{one_operation.number1}/{one_operation.number2}不等于{expectation}") # 判断期望值是否等于实际结果
self.write_file.write("用例执行结果为:【PASS】\n")
oe.write_data(data["case_id"]+1, actual=actual, result="PASS")
except Exception as e:
self.write_file.write("用例执行结果为:【FAIL】{}\n".format(e))
oe.write_data(data["case_id"] + 1, actual=actual, result="FAIL")
raise e
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 28 18:25:33 2018
@author: PPAGACZ
"""
import abc
from packets import *
class IPipe(abc.ABC):
@abc.abstractmethod
def runFilter(self):
pass
@abc.abstractmethod
def checkConditions(data):
pass
|
# coding=utf-8
import ConfigParser
import os
import sys
sys.path.append(os.getenv('PY_DEV_HOME'))
from webTest_pro.common.logger import logger
from webTest_pro.common.os_sqlfile_read import getSqlPath
reload(sys)
sys.setdefaultencoding("utf-8")
home_path = os.environ.get('PY_DEV_HOME')
def getCfgPath():
tmpEnvFile = home_path + '\webTest_pro\common\.tmp'
if os.path.exists(tmpEnvFile):
with open(tmpEnvFile, 'r+') as f:
pathTmp = f.readlines()
cfg_path = pathTmp[0]
# print cfg_path
logger.info('cfg file path %s' % tmpEnvFile)
else:
cfg_path = home_path + '\webTest_pro\cfg\init_default.conf'
logger.info('cfg file path %s' % tmpEnvFile)
return cfg_path
def getCfgs(cfg_path):
# 配置文件的学校、教室、设备信息
classroom_para = []
classroom_tmp = []
# 测试平台的访问路径前缀
base_url = ''
# 管理平台数据库信息配配置
db_conf = {}
loginInfo = {}
execEnv = {}
# 流媒体地址配置
streaming_media = {}
cf = ConfigParser.ConfigParser()
# cf.read("G:\\04py\\aotuTest_pro\\webTest_pro\\cfg\\init_v1.conf")
cf.read(cfg_path)
sections = cf.sections()
# print sections
# loginInfo = {'username':'hnsadmin', 'platformname': u"河南教育局"}
# 从配置文件中读取用户登录信息
for s in sections:
if s.lower().find('classroom_para') != -1:
classroom_tmp.append(s)
if s.lower().find('basedata') != -1:
base_url = 'http://' + cf.get(s, 'addr')
child_interact_ip = cf.get(s, 'interact_1')
loginInfo.setdefault('username', cf.get(s, 'username'))
loginInfo.setdefault('platformname', cf.get(s, 'platformname'))
if s.lower().find('db_conf') != -1:
# host = cf.get(s, 'host')
db_conf.setdefault('host', cf.get(s, 'host'))
# hostadd = cf.get(s, 'hostadd')
db_conf.setdefault('hostadd', cf.get(s, 'hostadd'))
# user = cf.get(s, 'user')
db_conf.setdefault('user', cf.get(s, 'user'))
# passwd = cf.get(s, 'passwd')
db_conf.setdefault('passwd', cf.get(s, 'passwd'))
# db = cf.get(s, 'db')
db_conf.setdefault('db', cf.get(s, 'db'))
db_conf.setdefault('port', cf.get(s, 'port'))
if s.lower().find('env_para') != -1:
execEnv.setdefault('execType', cf.get(s, 'execType'))
execEnv.setdefault('remoteUrl', cf.get(s, 'remoteUrl'))
if s.lower().find('streaming_media') != -1:
streaming_media.setdefault('serverIps', cf.get(s, 'serverIps'))
# from conf get classroom_para
for s in classroom_tmp:
opts = cf.options(s)
arr = {}
for o in opts:
name = cf.get(s, o)
arr.setdefault(o, unicode(name).encode("utf-8"))
classroom_para.append(arr)
return classroom_para, base_url, db_conf, loginInfo, execEnv, streaming_media, child_interact_ip
def get_active_code(cfg_path):
cf = ConfigParser.ConfigParser()
cf.read(cfg_path)
active_code = cf.get('basedata', 'active_code')
return active_code
class cfg(object):
def __init__(self):
# self.logFile = getLogFile()
self.cfg_path = getCfgPath()
self.tmpData = getCfgs(self.cfg_path)
self.classroom_para = self.tmpData[0]
self.base_url = self.tmpData[1]
self.db_conf = self.tmpData[2]
self.loginInfo = self.tmpData[3]
self.execEnv = self.tmpData[4]
self.streaming_media = self.tmpData[5]
self.child_interact_ip = self.tmpData[6]
#读取路径
# data_path = getSqlPath(self.base_url, self.db_conf)
data_path = getSqlPath(self.base_url)
self.sqlFilePath = data_path[0]
self.sqlStatements = data_path[1]
self.active_code = get_active_code(self.cfg_path)
# self.sqlFilePathVer = data_path[1]
logger.info("#############################Init basedata start#############################")
logger.info(">>>>base_url: {}".format(self.base_url))
logger.info(">>>>cfg_path: {}".format(self.cfg_path))
logger.info(">>>>child_interact_ip: {}".format(self.child_interact_ip))
logger.info(">>db_conf")
for k, v in self.db_conf.items():
logger.info(">>>>{0}: {1}".format(k, v))
logger.info(">>loginInfo")
for k,v in self.loginInfo.items():
logger.info(">>>>{0}: {1}".format(k, v))
logger.info(">>>>media IP: {}".format(self.streaming_media))
logger.info(">>>>sql Version: {}".format(self.sqlFilePath))
logger.info(">>>>sqlStatements: {}".format(self.sqlStatements))
logger.info("#############################Init basedata end#############################")
if __name__ == "__main__":
init = cfg()
|
from django.shortcuts import render, redirect
from .models import Food
from .form import FoodForm
def food(request):
foods = Food.objects.all()
return render(request, 'food.html', {"foods": foods})
def create_food(request):
form = FoodForm(request.POST or None)
if form.is_valid():
form.save()
return redirect('food')
return render(request, 'form_food.html', {'form': form})
def update_food(request, id):
foods = Food.objects.get(id=id)
form = FoodForm(request.POST or None, instance=foods)
if form.is_valid():
form.save()
return redirect('food')
return render(request, 'form_food.html', {'form': form})
def delete_food(request, id):
food = Food.objects.get(id=id)
food.delete()
return redirect('food')
|
import unittest
from TemplateConverter import TemplateConverter
class TestConverter(unittest.TestCase):
def test_multiplePlaceHolders(self):
lines = ["$from_res—$to_res records out of $nrows.\n"]
converter = TemplateConverter(lines, "test_notPlaceholder")
convertedLines = converter.getFileLines()
self.assertEqual("{{from_res}}—{{to_res}} records out of {{nrows}}.\n", convertedLines[0])
def test_multiplePlaceHolders1(self):
lines = ["$quote($jsoncode)"]
converter = TemplateConverter(lines, "test_placeholderFuncion")
convertedLines = converter.getFileLines()
self.assertEqual("{{quote(jsoncode)}}", convertedLines[0])
def test_multiplePlaceHolders2(self):
lines = ["<td><a href='$base/task?id=$quote_plus($parent)'>$quote($parent)</a></td>"]
converter = TemplateConverter(lines, "test_notPlaceholder")
convertedLines = converter.getFileLines()
self.assertEqual("<td><a href='{{base}}/task?id={{quote_plus(parent)}}'>{{quote(parent)}}</a></td>",
convertedLines[0])
def test_multiplePlaceHolders3(self):
lines = ["$quote($jsoncode[$li[a(1)]])"]
converter = TemplateConverter(lines, "test_placeholderFuncion")
convertedLines = converter.getFileLines()
self.assertEqual("{{quote(jsoncode[li[a(1)]])}}", convertedLines[0])
def test_multiplePlaceHolders4(self):
lines = ["$quote($jsoncode[1]) aa $jsoncode[1]. $jsoncode_a($quote($la))"]
converter = TemplateConverter(lines, "test_placeholderFuncion")
convertedLines = converter.getFileLines()
self.assertEqual("{{quote(jsoncode[1])}} aa {{jsoncode[1]}}. {{jsoncode_a(quote(la))}}", convertedLines[0])
def test_multiplePlaceHolders5(self):
lines = ["($from_res) aa"]
converter = TemplateConverter(lines, "test_notPlaceholder")
convertedLines = converter.getFileLines()
self.assertEqual("({{from_res}}) aa", convertedLines[0])
def test_simplePlaceholders(self):
lines = ["$from"]
converter = TemplateConverter(lines, "test_notPlaceholder")
convertedLines = converter.getFileLines()
self.assertEqual("{{from}}", convertedLines[0])
def test_placeholderFuncion(self):
lines = ["$qoute($stuff.morestuff(anmore()))\n"]
converter = TemplateConverter(lines, "test_placeholderFuncion")
convertedLines = converter.getFileLines()
self.assertEqual("{{qoute(stuff.morestuff(anmore()))}}\n", convertedLines[0])
def test_placeholderFuncion2(self):
lines = ["<li>$quote($key): $quote($val)</li>"]
converter = TemplateConverter(lines, "test_placeholderFuncion")
convertedLines = converter.getFileLines()
self.assertEqual("<li>{{quote(key)}}: {{quote(val)}}</li>", convertedLines[0])
def test_placeholderFuncion2(self):
lines = ["< a href = \"javascript:Transition(-$width)\" style = \"background-color:#fff\" >"]
converter = TemplateConverter(lines, "test_placeholderFuncion")
convertedLines = converter.getFileLines()
self.assertEqual("< a href = \"javascript:Transition(-{{width}})\" style = \"background-color:#fff\" >",
convertedLines[0])
def test_set(self):
lines = ["#set timestamp = $time.strftime(\"%a, %d %b %Y %H:%M:%S GMT\", $time.gmtime())"]
converter = TemplateConverter(lines, "test_set")
convertedLines = converter.getFileLines()
self.assertEqual("{%- set timestamp = time.strftime(\"%a, %d %b %Y %H:%M:%S GMT\", time.gmtime()) %}",
convertedLines[0])
def test_placeholderAsHref(self):
lines = ["<a href=\"$newUrl\">next</a> |"]
converter = TemplateConverter(lines, "test_placeholderAsHref")
convertedLines = converter.getFileLines()
self.assertEqual("<a href=\"{{newUrl}}\">next</a> |", convertedLines[0])
def test_placeholdersKeywords(self):
lines = ["$quote($json.dumps($result, indent=4, default=str))"]
converter = TemplateConverter(lines, "test_placeholderAsHref")
convertedLines = converter.getFileLines()
self.assertEqual("{{quote(json.dumps(result, indent=4, default=str))}}", convertedLines[0])
def test_placeholdersMultiple(self):
lines = ["<b>$das</b> $quote($row.get(\"description\", \"N/A\"))"]
converter = TemplateConverter(lines, "test_placeholdersMultiple")
convertedLines = converter.getFileLines()
self.assertEqual("<b>{{das}}</b> {{quote(row.get(\"description\", \"N/A\"))}}", convertedLines[0])
def test_comments(self):
lines = ["##<div><h3>Result $quote($result.get($task_id))</h3></div>\n"]
converter = TemplateConverter(lines, "test_comments")
convertedLines = converter.getFileLines()
self.assertEqual("{#-<div><h3>Result {{quote(result.get(task_id))}}</h3></div> #}\n", convertedLines[0])
def test_longComment(self):
lines = ["#####if $lfn!=$lfnList[-1]\n"]
converter = TemplateConverter(lines, "test_longComment")
convertedLines = converter.getFileLines()
self.assertEqual("{#-if {{lfn}}!={{lfnList[-1]}} #}\n", convertedLines[0])
def test_specChars(self):
lines = ["\\$('$highlight').addClassName('box_attention').show()", "\\#slide_cards span {"]
converter = TemplateConverter(lines, "test_specChars")
convertedLines = converter.getFileLines()
self.assertEqual("$('$highlight').addClassName('box_attention').show()", convertedLines[0])
self.assertEqual("#slide_cards span {", convertedLines[1])
def test_for(self):
lines = ["#for row in $daskeys"]
converter = TemplateConverter(lines, "test_for")
convertedLines = converter.getFileLines()
self.assertEqual("{% for row in daskeys -%}", convertedLines[0])
def test_ifBlock(self):
lines = [
"#if $dbs==$dbs_global",
"#set msg=\"<b>default DBS instance</b>\"",
"#elif $dbs.startswith('prod')",
"#set msg=\"<em>production DBS instance</em>\"",
"#elif $dbs.startswith('int')",
"#set msg=\"<em>integration DBS instance</em>\"",
"#elif $dbs.startswith('dev')",
"#set msg=\"<em>development DBS instance</em>\"",
"#else",
"#set msg=\"\"",
"#end if",
]
linesResult = [
"{% if dbs==dbs_global -%}",
"{%- set msg=\"<b>default DBS instance</b>\" %}",
"{% elif dbs.startswith('prod') %}",
"{%- set msg=\"<em>production DBS instance</em>\" %}",
"{% elif dbs.startswith('int') %}",
"{%- set msg=\"<em>integration DBS instance</em>\" %}",
"{% elif dbs.startswith('dev') %}",
"{%- set msg=\"<em>development DBS instance</em>\" %}",
"{% else -%}",
"{%- set msg=\"\" %}",
"{%- endif -%}",
]
converter = TemplateConverter(lines, "test_ifBlock")
convertedLines = converter.getFileLines()
self.assertEqual(linesResult, convertedLines)
def test_silent(self):
lines = ["#silent $init_dbses.remove($inst)"]
converter = TemplateConverter(lines, "test_silent")
convertedLines = converter.getFileLines()
self.assertEqual("{{- \"\" if init_dbses.remove(inst)}}", convertedLines[0])
def test_str(self):
lines = ["#set newUrl = $url + \"&idx=\" + $str($last) + \"&limit=\" + $str($limit)"]
converter = TemplateConverter(lines, "test_str")
convertedLines = converter.getFileLines()
self.assertEqual("{%- set newUrl = url + \"&idx=\" + last| string + \"&limit=\" + limit| string %}",
convertedLines[0])
def test_lenRange(self):
lines = ["#for idx in $range(0, len($cards))"]
converter = TemplateConverter(lines, "test_lenRange")
convertedLines = converter.getFileLines()
self.assertEqual("{% for idx in range(0, cards| count) -%}", convertedLines[0])
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python
A="10"
def func(A):
A = 2
func(2)
print("A is now",A)
def func1(A):
A = 5
return A
A=func1(5)
print("A is now",A)
|
import requests
from datetime import datetime, timedelta
import plotly.graph_objects as go
import plotly.express as px
import numpy as np
import os, shutil
import json
API_KEY = os.environ.get('API_KEY')
if API_KEY:
print(f'API_KEY of length {len(API_KEY)} retrieved.')
else:
print('Retrieving API KEY locally.')
try:
with open('.env') as file:
API_KEY = file.read().split('=')[1].strip()
print(f'API_KEY of length {len(API_KEY)} retrieved.')
except:
print("Error. Could not find local API KEY.")
def get_mentions(keyword, delta=15):
'''Access news API to get a topic's mentions and headlines
in the past <delta> days.'''
today = datetime.today().date()
today_str = f"{today.year}-{today.month}-{today.day}"
filecache = f"{keyword}_{today_str}.json"
cache_folder = '.newscache'
if not os.path.exists(cache_folder):
os.mkdir(cache_folder)
#load from cache if available
if filecache in os.listdir(cache_folder):
print('Loading from cache')
with open(os.path.join(cache_folder, filecache)) as file:
result = json.load(file)
return result
#did not load from cache: try to retrieve fresh data
month = timedelta(days=delta)
day = timedelta(days=1)
result = {
'mentions': [],
'headlines': []
}
sel = today - month
failed_requests = 0
while sel < today:
start = sel
end = sel + day
x_date = f"{end.day}/{end.month}"
endpoint = 'https://newsapi.org/v2/everything'
params = {'apiKey': API_KEY,
'qInTitle': keyword,
'from': start.isoformat(),
'to': end.isoformat()}
resp = requests.get(endpoint, params)
if resp.ok:
print('Request succeeded')
count = resp.json().get('totalResults')
result['mentions'].append((x_date, count))
heads = [(art['title'], art['url']) for art in resp.json().get('articles')]
result['headlines'] += heads
else:
print(f'Request failed. Status {resp.status_code}')
print(resp.text)
failed_requests += 1
result['mentions'].append((x_date, None))
sel += day
if failed_requests <= delta // 3:
print('Clearing cache...')
#clearing old files (on the same keyword) from cache
for file in os.listdir(cache_folder):
path = os.path.join(cache_folder, file)
if file.startswith(keyword):
os.remove(path)
#¢aching fresh results
print('Caching results...')
with open(os.path.join(cache_folder, filecache), 'w') as file:
json.dump(result, file)
else:
#if we could not get fresh results, we load from cache
print('Too many null results. Not caching.')
print('Loading from cache...')
last_file = [file for file in os.listdir(cache_folder)
if file.startswith(keyword)
][0]
with open(os.path.join(cache_folder, last_file)) as file:
result = json.load(file)
return result
return result
|
#LISTAS
'''
numeros = [1, 2, 3, 4, 5, 6]
print(numeros)
print(numeros[0])
print(numeros[3])
print(len(numeros))
text = ["A", "B", "C"]
print(text)
print(text[2])
print(text[len(text) - 1]) # NOS DEVUELVE LA ÚLTIMA POSICIÓN
variada = [1, 2, 3, 4.2, False, "Hey"]
print(variada)
'''
#BUCLE FOR
'''
#NORMAL
for variable in range(5):
print(variable)
'''
'''
#INICIO-FIN
for variable in range(6, 10):
print(variable)
'''
'''
#INICIO-FIN-SALTO
for variable in range(5, 21, 2):
print(variable)
'''
'''
#EJEMPLO 1
palabra = "One Piece"
for letra in palabra:
if(letra == "a" or letra == "e" or letra == "i" or letra == "o" or letra == "u"):
print(str(letra.upper()) + "-es una vocal")
elif (letra == " "):
pass
else:
print(str(letra.upper())+"-es una consonante")
'''
#ITERAR SOBRE UNA LISTA
'''
numeros = [60, 7, 90, 85]
for numero in numeros:
print(numero)
numero +=10
print(numero)
print(numeros)
for indice in range(len(numeros)):
numeros[indice]+=10
print(numeros)
'''
#WHILE
'''
contador = 0
while (contador <= 5):
print(contador)
contador += 1
'''
'''
#EJEMPLO
letra_encontrada = False
letra = "a"
frase = "Estoy buscando la letra a"
indice = 0
while(not(letra_encontrada)):
if (frase[indice] == letra):
letra_encontrada = True
print(f"Ya hemos encontrado la letra '{letra}', se encuentra en el indice {indice}") #format string
else:
indice +=1
'''
'''
#BREAK > SALE DEL BUCLE
frase = "Estoy buscando la letra a"
letra = "e"
for caracter in frase:
if (caracter == letra):
print(F"Letra '{letra}' encontrada en la posicion {frase.index(letra)}") #index() FIRST SUBSTRING
break #TODO LO QUE ESTE ABAJO DE 'BREAK' NO SE VA A EJECUTAR
else:
print("Letra no encontrada")
print(caracter)
'''
'''
#CONTINUE -> NO SALE DEL BUCLE
frase = "Hola, como estas"
letra = "a"
count = 0
for caracter in frase:
if (caracter == letra):
count +=1
print(f"La letra '{letra}' la hemos encontrado {count} veces")
continue
print("Hey")
'''
'''
#PASS -> PASA, LO IGNORA
lista = [10, 20, 30, 40, 0]
for num in lista:
if(num == 10):
pass
print(f"El valor de la variable es {num}")
def hijos_zeus(arg1, arg2):
pass
def dioses_olimpo(arg1, arg2):
pass
'''
'''
#ELSE
frase = "Todos los caracteres de una frase"
count = 0
for caracter in frase:
count +=1
#if (caracter == "l"):
#break
else:
print(f"La frase tiene {count} caracteres")
'''
'''
EJERCICIO: EL USUARIO DEBE ADIVINAR UN NÚMERO ENTRE 0 Y 10.
EL PROGRAMA DEBERÁ PEDIR QUE EL USUARIO INTRODUZCA UN NÚMERO
Y DEBE DECIR SI HA ACERTADO, SI EL NÚMERO ES MENOR O MAYOR QUE
EL QUE HA INTRODUCIDO.
'''
numero_adivinar = 7
def pedirYcomprobar(num):
numUser = int(input("Adivina el número: "))
if(numUser == numero_adivinar):
print("Eres un Crack!")
return True
elif (numUser > numero_adivinar):
print("Te has pasado!!!")
return False
elif (numUser < numero_adivinar):
print("El numero es mayor")
return False
'''
while(True):
if (pedirYcomprobar(numero_adivinar)):
break
'''
while(not(pedirYcomprobar(numero_adivinar))):
pass
else:
print("Fin del juego")
|
### Laura Buchanan
### lcb402
import unittest
from grade_funcs import *
import os.path
class test_grade_funcs(unittest.Testcare):
test load_restaurant_data(self):
self.assertTrue(os.path.isfile('../clean_data.csv')
self.assertFalse(os.path.isfile('./clean_data.csv')
test_year(self):
self.assertTrue(len(year)==4)
self.assrtFalse(len(year)!=4)
if __name__=='__main__':
unittest.main()
|
# coint_bollinger_strategy.py
from __future__ import print_function
from collections import deque
from math import floor
import numpy as np
from qstrader.price_parser import PriceParser
from qstrader.event import (SignalEvent, EventType)
from qstrader.strategy.base import AbstractStrategy
class CointegrationBollingerBandsStrategy(AbstractStrategy):
"""
Requires:
tickers - The list of ticker symbols
events_queue - A handle to the system events queue
lookback - Lookback period for moving avg and moving std
weights - The weight vector describing
a "unit" of the portfolio
entry_z - The z-score trade entry threshold
exit_z - The z-score trade exit threshold
base_quantity - Number of "units" of the portfolio
to be traded
"""
def __init__(
self, tickers, events_queue,
lookback, weights, entry_z, exit_z,
base_quantity
):
self.tickers = tickers
self.events_queue = events_queue
self.lookback = lookback
self.weights = weights
self.entry_z = entry_z
self.exit_z = exit_z
self.qty = base_quantity
self.time = None
self.latest_prices = np.full(len(self.tickers), -1.0)
self.port_mkt_val = deque(maxlen=self.lookback)
self.invested = None
self.bars_elapsed = 0
def _set_correct_time_and_price(self, event):
"""
Sets the correct price and event time for prices
that arrive out of order in the events queue.
"""
# Set the first instance of time
if self.time is None:
self.time = event.time
# Set the correct latest prices depending upon
# order of arrival of market bar event
price = event.adj_close_price/float(
PriceParser.PRICE_MULTIPLIER
)
if event.time == self.time:
for i in range(0, len(self.tickers)):
if event.ticker == self.tickers[i]:
self.latest_prices[i] = price
else:
self.time = event.time
self.bars_elapsed += 1
self.latest_prices = np.full(len(self.tickers), -1.0)
for i in range(0, len(self.tickers)):
if event.ticker == self.tickers[i]:
self.latest_prices[i] = price
def go_long_units(self):
"""
Go long the appropriate number of "units" of the
portfolio to open a new position or to close out
a short position.
"""
for i, ticker in enumerate(self.tickers):
if self.weights[i] < 0.0:
self.events_queue.put(SignalEvent(
ticker, "SLD",
int(floor(-1.0*self.qty*self.weights[i])))
)
else:
self.events_queue.put(SignalEvent(
ticker, "BOT",
int(floor(self.qty*self.weights[i])))
)
def go_short_units(self):
"""
Go short the appropriate number of "units" of the
portfolio to open a new position or to close out
a long position.
"""
for i, ticker in enumerate(self.tickers):
if self.weights[i] < 0.0:
self.events_queue.put(SignalEvent(
ticker, "BOT",
int(floor(-1.0*self.qty*self.weights[i])))
)
else:
self.events_queue.put(SignalEvent(
ticker, "SLD",
int(floor(self.qty*self.weights[i])))
)
def zscore_trade(self, zscore, event):
"""
Determine whether to trade if the entry or exit zscore
threshold has been exceeded.
"""
# If we're not in the market...
if self.invested is None:
if zscore < -self.entry_z:
# Long Entry
print("LONG: %s" % event.time)
self.go_long_units()
self.invested = "long"
elif zscore > self.entry_z:
# Short Entry
print("SHORT: %s" % event.time)
self.go_short_units()
self.invested = "short"
# If we are in the market...
if self.invested is not None:
if self.invested == "long" and zscore >= -self.exit_z:
print("CLOSING LONG: %s" % event.time)
self.go_short_units()
self.invested = None
elif self.invested == "short" and zscore <= self.exit_z:
print("CLOSING SHORT: %s" % event.time)
self.go_long_units()
self.invested = None
def calculate_signals(self, event):
"""
Calculate the signals for the strategy.
"""
if event.type == EventType.BAR:
self._set_correct_time_and_price(event)
# Only trade if we have all prices
if all(self.latest_prices > -1.0):
# Calculate portfolio market value via dot product
# of ETF prices with portfolio weights
self.port_mkt_val.append(
np.dot(self.latest_prices, self.weights)
)
# If there is enough data to form a full lookback
# window, then calculate zscore and carry out
# respective trades if thresholds are exceeded
if self.bars_elapsed > self.lookback:
zscore = (
self.port_mkt_val[-1] - np.mean(self.port_mkt_val)
) / np.std(self.port_mkt_val)
self.zscore_trade(zscore, event)
|
from django.shortcuts import render
from django.views.generic.edit import CreateView
from django.contrib.auth.views import LoginView
from django.contrib.auth import login, authenticate
from .utility import SIGNIN_TEMPLATE, SIGNUP_TEMPLATE, SIGNUP_SUCCESS_URL, SIGNIN_TITLE, SIGNUP_TITLE
from login.forms import UserSignInForm, UserSignUpForm
# Create your views here.
class AuthentificationView(LoginView):
template_name = SIGNIN_TEMPLATE
form_class = UserSignInForm
def get_context_data(self, *args, **kwargs):
context = super(AuthentificationView, self).get_context_data(*args, **kwargs)
context['title'] = SIGNIN_TITLE
return context
class SignUpView(CreateView):
template_name = SIGNUP_TEMPLATE
form_class = UserSignUpForm
success_url = SIGNUP_SUCCESS_URL
def form_valid(self, form):
valid = super(SignUpView, self).form_valid(form)
username, password = form.cleaned_data.get('username'), form.cleaned_data.get('password1')
new_user = authenticate(username=username, password=password)
login(self.request, new_user)
return valid
def get_context_data(self, *args, **kwargs):
context = super(SignUpView, self).get_context_data(*args, **kwargs)
context['title'] = SIGNUP_TITLE
return context
|
def hello(a):
"""
string a parameter printed
"""
return f"Hello {a}"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.