blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6dac4d3ae2c2516063e5d6676ce3071b96f2fbdf
|
f18f08aa4d46e0c46b16a25d844fbf6c4b2d8675
|
/regym/tests/rl_algorithms/dqn_test.py
|
f6dc770777ca0301b9add9bcd5cf1c8e505b4bd2
|
[
"MIT"
] |
permissive
|
Mark-F10/Regym
|
39d9c474c043fd328a353f4d0759cbbde59832fa
|
afb02ecea67b433e2ec6369089e937078d37770c
|
refs/heads/master
| 2022-12-17T23:10:08.229072
| 2020-07-08T18:47:59
| 2020-07-08T18:47:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,002
|
py
|
from regym.rl_algorithms.agents import build_DQN_Agent
from regym.rl_algorithms import rockAgent
from test_fixtures import RPSTask, dqn_config_dict
def test_dqn_can_take_actions(RPSTask, dqn_config_dict):
env = RPSTask.env
agent = build_DQN_Agent(RPSTask, dqn_config_dict, 'DQN')
number_of_actions = 30
for i in range(number_of_actions):
# asumming that first observation corresponds to observation space of this agent
random_observation = env.observation_space.sample()[0]
a = agent.take_action(random_observation, legal_actions=[0, 1, 2])
observation, rewards, done, info = env.step([a, a])
assert RPSTask.env.action_space.contains([a, a])
def test_vanilla_DQN_learns_to_beat_rock_in_RPS(RPSTask, dqn_config_dict):
'''
Test used to make sure that agent is 'learning' by learning a best response
against an agent that only plays rock in rock paper scissors.
i.e from random, learns to play only (or mostly) paper
'''
from play_against_fixed_opponent import learn_against_fix_opponent
from torch.utils.tensorboard import SummaryWriter
import regym
regym.rl_algorithms.DQN.dqn_loss.summary_writer = SummaryWriter('test_tensorboard')
agent = build_DQN_Agent(RPSTask, dqn_config_dict, 'DQN')
assert agent.training
learn_against_fix_opponent(agent, fixed_opponent=rockAgent,
agent_position=0, # Doesn't matter in RPS
task=RPSTask,
total_episodes=250, training_percentage=0.9,
reward_tolerance=2.,
maximum_average_reward=10.0,
evaluation_method='cumulative')
def test_double_DQN_learns_to_beat_rock_in_RPS(RPSTask, dqn_config_dict):
'''
Test used to make sure that agent is 'learning' by learning a best response
against an agent that only plays rock in rock paper scissors.
i.e from random, learns to play only (or mostly) paper
'''
from play_against_fixed_opponent import learn_against_fix_opponent
from torch.utils.tensorboard import SummaryWriter
import regym
regym.rl_algorithms.DQN.dqn_loss.summary_writer = SummaryWriter('test_tensorboard')
dqn_config_dict['double'] = True
agent = build_DQN_Agent(RPSTask, dqn_config_dict, 'Double_DQN')
assert agent.training and agent.algorithm.use_double
learn_against_fix_opponent(agent, fixed_opponent=rockAgent,
agent_position=0, # Doesn't matter in RPS
task=RPSTask,
total_episodes=250, training_percentage=0.9,
reward_tolerance=2,
maximum_average_reward=10.0,
evaluation_method='cumulative')
def test_dueling_DQN_learns_to_beat_rock_in_RPS(RPSTask, dqn_config_dict):
'''
Test used to make sure that agent is 'learning' by learning a best response
against an agent that only plays rock in rock paper scissors.
i.e from random, learns to play only (or mostly) paper
'''
from play_against_fixed_opponent import learn_against_fix_opponent
from torch.utils.tensorboard import SummaryWriter
import regym
regym.rl_algorithms.DQN.dqn_loss.summary_writer = SummaryWriter('test_tensorboard')
dqn_config_dict['dueling'] = True
agent = build_DQN_Agent(RPSTask, dqn_config_dict, 'Dueling_DQN')
assert agent.training and agent.algorithm.use_dueling
learn_against_fix_opponent(agent, fixed_opponent=rockAgent,
agent_position=0, # Doesn't matter in RPS
task=RPSTask,
total_episodes=250, training_percentage=0.9,
reward_tolerance=2.,
maximum_average_reward=10.0,
evaluation_method='cumulative')
|
[
"danielhp95@gmail.com"
] |
danielhp95@gmail.com
|
545e4da781cdcb91444d0c9a3028bcd81af51526
|
0f456ba3169b38c66613695c5f05ad41ecde8de1
|
/recipes/migrations/0009_auto_20210319_0124.py
|
7501dd8274bf189442a97b901ee4f375b87521ea
|
[] |
no_license
|
RustamIR/foodgram-project
|
1daa8030b2d084720eb7ead69dcae5a81cf69437
|
4f2be1ad4a3782625557f9a7163c93d756383605
|
refs/heads/master
| 2023-03-28T19:12:50.829317
| 2021-02-20T16:14:05
| 2021-04-02T07:42:39
| 332,435,040
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
py
|
# Generated by Django 2.2.6 on 2021-03-18 22:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('recipes', '0008_auto_20210319_0123'),
]
operations = [
migrations.AlterField(
model_name='ingredient',
name='dimension',
field=models.CharField(max_length=50, verbose_name='Eдиница измерения'),
),
]
|
[
"roostamishteev@yandex.ru"
] |
roostamishteev@yandex.ru
|
cda4e97c1e1e43336e78a648b69ffe1fbc0e384b
|
6517cab0ea11f6c1328d1954c5056556e38cf54f
|
/do_deramp.py
|
f19b6f5a346c422eb883b612fbb90013f8b0a9a1
|
[] |
no_license
|
fbernauer/blueseis_sandbox
|
1cd1fc7feb006d92348304b107e86ad7340dbf37
|
8605797f0dc82c72d08adceb9952b934ffc7fb28
|
refs/heads/master
| 2020-03-11T03:04:28.063819
| 2019-06-05T10:23:33
| 2019-06-05T10:23:33
| 129,736,270
| 0
| 1
| null | 2019-06-05T10:23:35
| 2018-04-16T12:02:24
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 540
|
py
|
#! /usr/bin/env python
import os
doy = ['246', '247', '248', '249', '250', '251', '252', '253', '254']
sta = 'BS3'
for d in doy:
print('processind day '+d)
os.system("./process_blueseis.py -F /bay_mobil/stromboli2018/BlueSeis/archive/2018/XX/"+sta+"/HJ*.D/XX."+sta+"..HJ*.D.2018."+d+" -R /bay_mobil/stromboli2018/BlueSeis/archive/2018/XX/"+sta+"/YR*.D/XX."+sta+"..YR*.D.2018."+d+" -M /home/fbernauer/Experiments/Huddle_BS123/rotation_matrix_eudemo.txt -O /bay_mobil/Stromboli2018/processed/ -l 3600000 -m 10 -a mean -o 1 -p 0")
|
[
"noreply@github.com"
] |
fbernauer.noreply@github.com
|
1df682dec69f5e03efd7d4005cfab8a84fa2d5f9
|
566512c4365dc3ee591a4fd2c0f11ab71793bb02
|
/print_xml.py
|
e62d985f612391e111baf3ce3935817e4d0e1207
|
[] |
no_license
|
HOLDfoot/Xml-Python
|
d6acd08541881f3159e2a87e79b78bf01be65366
|
eb265bf851e48df50cd48c234890b10fc444a3b2
|
refs/heads/master
| 2021-01-25T14:26:31.197938
| 2018-03-03T14:07:18
| 2018-03-03T14:07:18
| 123,696,028
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,644
|
py
|
# encoding: utf-8
import lxml
from lxml import etree, objectify
from max_models import MaxModel
max_list = []
global all_max
all_max = 0
def print_element(file_name, name_contain, mini_bounds=35, print_detail=1):
tree = lxml.etree.parse(file_name)
res = tree.xpath('/resources')[0] # 获取bndbox元素的内容
had_print = False
max_text_len = 0
for ss in res.getchildren(): # 便利bndbox元素下的子元素
element_name = ss.get("name")
if str(element_name).__contains__(name_contain):
element_text = ss.text
element_text_len = len(element_text)
if element_text_len >= mini_bounds:
if not had_print:
print file_name[5:len(file_name)]
had_print = True
if print_detail != 0:
print element_name + "长度:" + str(element_text_len)
#print element_text_len
# max_text_len = max_text_len if max_text_len > element_text_len else element_text_len
# if max_text_len != 0:
# # print "max _signup_title_ = " + str(max_text_len)
# file_name = file_name[5:len(file_name)]
# max_model = "(" + file_name + "," + str(max_text_len) + ")"
# max_list.append(max_model)
# global all_max
# all_max = all_max if all_max > max_text_len else max_text_len
if __name__ == "__main__":
print "__main__"
prefix_tuple = ("flamingo", "friendsfinder", "getfriends", "kkfriends")
file_name = "res/values-fr/strings.xml"
name_contain = "_signup_title_"
print_element(file_name, name_contain)
|
[
"zhumingren@secretlisa.com"
] |
zhumingren@secretlisa.com
|
bbf6495ef15d263f9b0209e1ec6ddd64c08619e0
|
9962a8beef6a486d40b22ef6067b9160ccb2a26c
|
/lambdas/addObservation.py
|
2bfb81cad18d9d3fa237d4b5ed5e39d61e587ed9
|
[] |
no_license
|
neodyymi/observation-collector
|
c148cc9cfbce243e88bfab8b15eeca14b9f55581
|
a6329aa94c319f632bbfc63d14a07a6f884dfe6f
|
refs/heads/master
| 2021-04-26T23:18:18.977113
| 2018-03-06T08:22:33
| 2018-03-06T08:22:33
| 123,968,434
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,888
|
py
|
import boto3
import json
import uuid
from datetime import datetime
def create_response(body=None, error=None):
if error is None:
return {
'statusCode': 200,
'headers': {
'Access-Control-Allow-Origin' : '*',
},
'body': json.dumps(body)
}
else:
return {
'statusCode': 400,
'headers': {
'Access-Control-Allow-Origin' : '*',
},
'error': json.dumps(error)
}
def lambda_handler(event, context):
dynamodb = boto3.resource('dynamodb', region_name='eu-west-1')
table = dynamodb.Table('reaktor-observations')
print(event)
item = json.loads(event['body'])['Item']
if 'temperature' not in item or 'temperatureScale' not in item or 'locationId' not in item or 'temperature' not in item:
return create_response(error={'error':'Parameter missing', 'item': item})
if not item['temperature'].replace('.','',1).lstrip('-+').isnumeric():
return create_response(error={'error':'Temperature has to be numeric.'})
if item['temperatureScale'] != 'celcius' and item['temperatureScale'] != 'fahrenheit' :
return create_response(error={'error':'Temperature scale can only be fahrenheit or celcius'})
if not item['locationId'].isalnum():
return create_response(error={'error':'Invalid location id.'})
newItem = {
'id': uuid.uuid1().hex,
'locationId': item['locationId'],
'timestamp': datetime.utcnow().isoformat()+'Z',
'temperature': item['temperature'],
'temperatureScale': item['temperatureScale']
}
table.put_item(
Item = newItem
)
return create_response(body={'message' : 'Observation added.', 'Item' : newItem})
|
[
"ville-veikko.saari@cs.helsinki.fi"
] |
ville-veikko.saari@cs.helsinki.fi
|
58ccdc4f1c4a8883d310b628e0103984c4225d0f
|
f69d5688f25f881463e76b89b326f46c794843cf
|
/sitemon.py
|
c8f65a3cc3b7674c05821c9947ececa77b5d26f6
|
[] |
no_license
|
funkwhatyouheard/Sitemonitor
|
780a1c5baaafe0e46ae157297f6b4d2d46195366
|
0c89907d3ef3d2b4ae3b2bb35a8e68487084db01
|
refs/heads/master
| 2022-07-15T21:48:44.844296
| 2020-05-16T17:16:13
| 2020-05-16T17:16:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,743
|
py
|
import requests
import time
import getpass
import re
import lxml
from bs4 import BeautifulSoup
from datetime import datetime
from pushbullet import Pushbullet
# requirements below, literally copy paste the line into a terminal
# pip install requests getpass pushbullet.py bs4
# you'll also need to install pushbullet on your mobile device and
# get a valid api key
urls = {
'ticket_url':'https://www.derbycon.com/purchase-tickets',
'site_url':'https://www.derbycon.com',
'blog_url':'https://www.derbycon.com/blog',
'event_url':"https://www.eventbrite.com/e/derbycon-2019-90-finish-line-tickets-61137416659"
}
def checkForUpdates(urls, log_path, interval, pb):
old_state = dict()
with open(log_path,"a+") as log_file:
try:
# get the initial state for each url
for url in urls:
log_message = f'{getDate()}\tFirst pass, getting {urls[url]}\n'
log_file.write(log_message)
with requests.get(urls[url]) as response:
if response.status_code != 200:
pb.push_note("status fail",f'Got status code {response.status_code} trying to get {urls[url]}')
break
old_state[url] = response.text
while True:
for url in urls:
with requests.get(urls[url]) as response:
if response.status_code != 200:
pb.push_note("status fail",f'Got status code {response.status_code} trying to get {urls[url]}')
if url == 'event_url':
html = BeautifulSoup(response.text, features="lxml")
flag = False
# shtml = str(html.find_all(class_='js-event-password')[0])
# for tag in html.find_all(re.compile("^a")):
# if 'data-automation' in tag.attrs and tag.attrs['data-automation'] == 'remind-me-btn':
# flag = True
for tag in html.find_all(re.compile("^button$")):
if 'data-automation' in tag.attrs and tag.attrs['data-automation'] == 'ticket-modal-btn' and tag.text != "Details":
pb.push_note("OMG IT'S HERE", urls[url])
flag = True
break
# if not flag:
# pb.push_note("Reminder removed", urls[url])
# break
continue
else:
shtml = response.text
if old_state[url] != shtml:
log_message = f'{getDate()}\tDetected change in {urls[url]}\n'
log_file.write(log_message)
old_state[url] = shtml
pb.push_note("log",log_message)
else:
log_message = f'{getDate()}\tNo change detected for {urls[url]}\n'
log_file.write(log_message)
# set timer to sleep time and loop again
if flag:
break
time.sleep(interval)
except Exception as e:
log_file.write(f'{str(e)}\n')
pb.push_note("Error encountered",str(e))
def getDate():
return datetime.now().strftime("%m_%d_%y--%H:%M:%S")
if __name__ == "__main__":
pb = Pushbullet(getpass.getpass("Enter your API Key:"))
checkForUpdates(urls=urls, log_path=r'./sitemon.log', interval=1, pb=pb)
|
[
"erhodes@GAIG.COM"
] |
erhodes@GAIG.COM
|
41b1440af3178b6c8d0faf9af5ae3ae7979c8dde
|
2173770363bff0611a2ce609c857efba6f1b8234
|
/attendenceapp/apps.py
|
52753796c27bb764ef059e5e37244d6951d13541
|
[] |
no_license
|
karan295/attendence
|
b3041e81777328adc0458a05e04d4ab202430e2f
|
b59fb5089e156b01f073d69f727cb4e865494afd
|
refs/heads/master
| 2021-04-01T02:45:21.184255
| 2020-03-18T06:19:46
| 2020-03-18T06:19:46
| 248,151,008
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 101
|
py
|
from django.apps import AppConfig
class AttendenceappConfig(AppConfig):
name = 'attendenceapp'
|
[
"deykaran07@gmail.com"
] |
deykaran07@gmail.com
|
ed8ce7575ef1287db6eceab6410ac37199487929
|
c3570ea22a039b85a5d95b6e7773396f992516ae
|
/14 Koşullar.py
|
89472e6daa42ceda9e20b27f5728d506321d6d4a
|
[] |
no_license
|
omerdurmus61/Python
|
122089182c23a28244c3ae56e4f404bd47839b2c
|
0d8e9118965b294981401706bead59c838936994
|
refs/heads/master
| 2023-06-04T13:14:37.418113
| 2021-06-13T17:07:05
| 2021-06-13T17:07:05
| 295,943,557
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 210
|
py
|
#if & else & elif
x= input("x sayısını giriniz:")
y= input("y sayısını giriniz:")
if x>y:
print("x daha büyük")
elif x==y:
print("x ile y eşit")
else :
print("y daha büyük")
|
[
"noreply@github.com"
] |
omerdurmus61.noreply@github.com
|
48a94778a48102bf2d170dbb2c0257405b4cf550
|
9461183ab838e2c9cd3a9a256ad0de394040b28d
|
/echo-client.py
|
c4ac96b2c2be5d051772979d33270a63614a5bf8
|
[] |
no_license
|
xuanfang1993/socket_of_python
|
2828b9b296a58ae978c8e9ca06e431a88739157f
|
b69bb8a541ddf44b4b40555d7ddd7be703b2bf86
|
refs/heads/master
| 2022-12-02T10:17:14.606269
| 2020-08-17T14:47:47
| 2020-08-17T14:47:47
| 288,207,547
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 220
|
py
|
import socket
HOST = '127.0.0.1'
PORT = 65432
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST,PORT))
s.sendall(b'Hello World')
data = s.recv(1024)
print('Received!',repr(data))
|
[
"xuanfangpsyche@outlook.com"
] |
xuanfangpsyche@outlook.com
|
3a9e02fc8a6c5b3f54d8ee4037d79fcf25c19a85
|
d4db40737792b98f52aa0cf61e350a1fb57670d4
|
/20191002_DAY-24/domAPI.py
|
04a9bc016f26ae1460e050d278043820704267f2
|
[] |
no_license
|
NamrataMeripo/pyproject01_nit
|
2702a655b4e2d2b88f5ff2fa48503aaf35164123
|
aa11bb7c9ae0b571d0b25984fde2d36554604990
|
refs/heads/master
| 2020-07-10T07:18:03.459954
| 2019-12-17T01:40:28
| 2019-12-17T01:40:28
| 204,037,571
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 510
|
py
|
import xml
from xml.dom import minidom
doc = minidom.parse("/Users/keshavkummari/pyproject01_nit/20191002_DAY-24/staff.xml")
name = doc.getElementsByTagName("name")[0]
print(name.firstChild.data)
first = doc.getElementsByTagName("staff")
for staff in first:
sid = staff.getAttribute("id")
nickname = staff.getElementsByTagName("nickname")[0]
salary = staff.getElementsByTagName("salary")[0]
print("id:%s, nickname:%s, salary:%s" %(sid,nickname.firstChild.data,salary.firstChild.data))
|
[
"54295343+NamrataMeripo@users.noreply.github.com"
] |
54295343+NamrataMeripo@users.noreply.github.com
|
0d071e93586656a675e85a1d154f12b8e9874c53
|
74e9f6230f4dba7ad2ec26c7c4910e12c4abb0e2
|
/products/forms.py
|
c1d8058975c1cd3b652ee49a47d892f0ce243147
|
[] |
no_license
|
BassamMsmar/store-django
|
6456cb0d4e2fc15b865f42bc10202fd32b1051e5
|
ee82d6710872d2004dec6461ab5e493d1ab7e799
|
refs/heads/master
| 2023-06-11T11:23:04.956795
| 2021-06-05T18:49:49
| 2021-06-05T18:49:49
| 321,753,368
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 273
|
py
|
from django import forms
from .models import Product
class AddProductForm(forms.ModelForm):
class Meta:
model = Product
fields = (
'brand',
'title',
'price',
'description',
'image'
)
|
[
"bassammsmar@gmail.com"
] |
bassammsmar@gmail.com
|
c15503b47abeb7780592bcf35a87957372ab541b
|
02b6a68a13b091143b9eeea11d2105245d44da3e
|
/configs/pipelines/models/generic_cfrnn.py
|
e7449f389dc174dd1a76fd0e520851ae1a27cc0b
|
[
"BSD-3-Clause"
] |
permissive
|
xkortex/VIAME
|
7087eea21bcd562184a0a8a22d7cd2f346bc692e
|
1dce09362e62e519e9e24528b2491da853719f64
|
refs/heads/master
| 2020-05-15T19:45:52.240607
| 2019-12-11T18:02:07
| 2019-12-11T18:02:07
| 182,464,505
| 0
| 0
|
NOASSERTION
| 2019-12-11T18:02:09
| 2019-04-20T23:39:53
|
C++
|
UTF-8
|
Python
| false
| false
| 6,196
|
py
|
# model settings
model = dict(
type='CascadeRCNN',
num_stages=3,
pretrained='open-mmlab://resnext101_32x4d',
backbone=dict(
type='ResNeXt',
depth=101,
groups=32,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
use_sigmoid_cls=True),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=2,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=2,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=2,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True)
])
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
smoothl1_beta=1 / 9.0,
debug=False),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.0001, nms=dict(type='nms', iou_thr=0.5), max_per_img=100),
keep_all_stages=False)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0.5,
with_mask=False,
with_crowd=False,
with_label=True),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=False,
with_crowd=True,
with_label=True),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=False,
with_label=False,
test_mode=True))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[24, 36])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 50
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/cascade_rcnn_r50_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
|
[
"matt.dawkins@kitware.com"
] |
matt.dawkins@kitware.com
|
ac978c07b911048b02dc7b0b58e1ef51073e5877
|
640f75f7bd5cc89aa0da3e0cf0b5f317f11941bc
|
/scripts/add_pools.py
|
1e5c7964163d01e573b02eb383451a3adba40399
|
[] |
no_license
|
kfgamaga/curve-pool-registry
|
4c8d0ca796f830ef71a0bb5836275abb9fcc53f5
|
ed52ad5ddfdeb87d42c2eb78e2fc62af8230a868
|
refs/heads/master
| 2023-03-22T22:03:40.026623
| 2021-03-13T22:51:19
| 2021-03-13T22:51:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,196
|
py
|
from brownie import Contract, Registry, accounts
from brownie.exceptions import VirtualMachineError
from brownie.network.gas.strategies import GasNowScalingStrategy
from scripts.get_pool_data import get_pool_data
from scripts.utils import pack_values
# modify this prior to mainnet use
DEPLOYER = accounts.at("0x7EeAC6CDdbd1D0B8aF061742D41877D7F707289a", force=True)
REGISTRY = "0x7D86446dDb609eD0F5f8684AcF30380a356b2B4c"
GAUGE_CONTROLLER = "0x2F50D538606Fa9EDD2B11E2446BEb18C9D5846bB"
RATE_METHOD_IDS = {
"ATokenMock": "0x00000000",
"cERC20": "0x182df0f5", # exchangeRateStored
"IdleToken": "0x7ff9b596", # tokenPrice
"renERC20": "0xbd6d894d", # exchangeRateCurrent
"yERC20": "0x77c7b8fc", # getPricePerFullShare
}
gas_strategy = GasNowScalingStrategy("standard", "fast")
def add_pool(data, registry, deployer):
swap = Contract(data["swap_address"])
token = data["lp_token_address"]
n_coins = len(data["coins"])
decimals = pack_values([i.get("decimals", i.get("wrapped_decimals")) for i in data["coins"]])
if "base_pool" in data:
# adding a metapool
registry.add_metapool(
swap, n_coins, token, decimals, {"from": deployer, "gas_price": gas_strategy}
)
return
is_v1 = data["lp_contract"] == "CurveTokenV1"
has_initial_A = hasattr(swap, "intitial_A")
rate_method_id = "0x00"
if "wrapped_contract" in data:
rate_method_id = RATE_METHOD_IDS[data["wrapped_contract"]]
if hasattr(swap, "exchange_underlying"):
wrapped_decimals = pack_values(
[i.get("wrapped_decimals", i["decimals"]) for i in data["coins"]]
)
registry.add_pool(
swap,
n_coins,
token,
rate_method_id,
wrapped_decimals,
decimals,
has_initial_A,
is_v1,
{"from": deployer, "gas_price": gas_strategy},
)
else:
use_lending_rates = pack_values(["wrapped_decimals" in i for i in data["coins"]])
registry.add_pool_without_underlying(
swap,
n_coins,
token,
rate_method_id,
decimals,
use_lending_rates,
has_initial_A,
is_v1,
{"from": deployer, "gas_price": gas_strategy},
)
def add_gauges(data, registry, deployer):
pool = data["swap_address"]
gauges = data["gauge_addresses"]
gauges += ["0x0000000000000000000000000000000000000000"] * (10 - len(gauges))
if registry.get_gauges(pool)[0] != gauges:
registry.set_liquidity_gauges(pool, gauges, {"from": deployer, "gas_price": gas_strategy})
def main(registry=REGISTRY, deployer=DEPLOYER):
"""
* Fetch pool data from Github
* Add new pools to the existing registry deployment
* Add / update pool gauges within the registry
"""
balance = deployer.balance()
registry = Registry.at(registry)
pool_data = get_pool_data()
print("Adding pools to registry...")
for name, data in pool_data.items():
pool = data["swap_address"]
if registry.get_n_coins(pool)[0] == 0:
print(f"\nAdding {name}...")
add_pool(data, registry, deployer)
else:
print(f"\n{name} has already been added to registry")
gauges = data["gauge_addresses"]
gauges = gauges + ["0x0000000000000000000000000000000000000000"] * (10 - len(gauges))
if registry.get_gauges(pool)[0] == gauges:
print(f"{name} gauges are up-to-date")
continue
print(f"Updating gauges for {name}...")
for gauge in data["gauge_addresses"]:
try:
Contract(GAUGE_CONTROLLER).gauge_types(gauge)
except (ValueError, VirtualMachineError):
print(f"Gauge {gauge} is not known to GaugeController, cannot add to registry")
gauges = False
break
if gauges:
registry.set_liquidity_gauges(
pool, gauges, {"from": deployer, "gas_price": gas_strategy}
)
print(f"Total gas used: {(balance - deployer.balance()) / 1e18:.4f} eth")
|
[
"ben@hauser.id"
] |
ben@hauser.id
|
ebe6decc791db0f6e4da77d62ed16b05281d8514
|
4cdca3b1afdd775fe0ad86071b6ebc2e00c5395d
|
/vilya/frontend/assets.py
|
84d98136f91ca38bc8e01d051d4560736c281512
|
[] |
no_license
|
alexband/vilya
|
fce3da77352e4d4d45895e2c6efffd5e4fa86e61
|
f490d4a02a6a81cb1a435ec48ee68c27fe586107
|
refs/heads/master
| 2021-01-18T05:26:42.406188
| 2014-06-23T15:07:13
| 2014-06-23T15:07:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,226
|
py
|
# -*- coding: utf-8 -*-
from flask_assets import Environment, Bundle
#: application css bundle
css_overholt = Bundle("less/overholt.less",
filters="less",
output="css/overholt.css",
debug=False)
#: consolidated css bundle
css_all = Bundle("css/bootstrap.min.css",
css_overholt,
filters="cssmin",
output="css/overholt.min.css")
#: vendor js bundle
js_vendor = Bundle("js/vendor/jquery-1.10.1.min.js",
"js/vendor/bootstrap-3.1.1.min.js",
"js/vendor/underscore-1.4.4.min.js",
"js/vendor/backbone-1.0.0.min.js",
filters="jsmin",
output="js/vendor.min.js")
#: application js bundle
js_main = Bundle("coffee/*.coffee",
filters="coffeescript",
output="js/main.js")
def init_app(app):
webassets = Environment(app)
webassets.register('css_all', css_all)
webassets.register('js_vendor', js_vendor)
webassets.register('js_main', js_main)
webassets.manifest = 'cache' if not app.debug else False
webassets.cache = not app.debug
webassets.debug = app.debug
|
[
"xutao881001@gmail.com"
] |
xutao881001@gmail.com
|
c3f8b6a2a35b6f4245e56fa424e96c576e127f2a
|
012bbe8712a41a1aa991d573cc211e1ac1059356
|
/ch04tree/eular_tour.py
|
6c08cde02dc9dfaf0389691f16b375ab1dd3ecc9
|
[] |
no_license
|
dqhcjlu06/python-algorithms
|
8c0fe62da2083f98fbf58eb35ff8b5b344661f12
|
44f7e80316562614688f322472116807d7bc2409
|
refs/heads/master
| 2020-07-31T20:37:27.145895
| 2019-10-07T08:27:28
| 2019-10-07T08:27:28
| 210,746,358
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,025
|
py
|
# 欧拉图
class EulerTour:
""" Abstruct base class for performing Euler tour of a tree """
def __init__(self, tree):
self._tree = tree
def tree(self):
return self._tree
def execute(self):
if len(self._tree > 0):
return self._tour(self._tree.root(), 0, [])
def _tour(self, p, d, path):
"""Perform tour of subtree rooted at Position p.
p Position of current node being visited
d depth of p in the tree
path list of indices of children on path from root to p
"""
self._hook_previsit(p, d, path)
results = []
path.append(0)
for c in self._tree.children(p):
results.append(self._tour(c, d+1, path))
path[-1] += 1
path.pop()
return self._hook_postvisit(p, d, path, results)
def _hook_previsit(self, p, d, path):
"""Visit Position p, before the tour of its children.
p Position of current position being visited
d depth of p in the tree
path list of indices of children on path from root to p
"""
pass
def _hook_postvisit(self, p, d, path, results):
"""Visit Position p, after the tour of its children.
p Position of current possition being visited
d depth of p in the tree
path list of indices of children on path from root to p
results is a list of values returned by _hook_postvisit(c)
for each child c of p
"""
return results
class PreorderPrintIndexedTour(EulerTour):
def _hook_previsit(self, p, d, path):
print(2*d*'' + str(p.element()))
class BinaryEulerTour(EulerTour):
"""二叉树欧拉遍历
This version includes an additional _hook_invisit that is called after the tour
of the left subtree (if any), yet before the tour of the right subtree (if any)
"""
def _tour(self, p, d, path):
results = [None, None]
self._hook_previsit(p, d, path)
if self._tree.left(p) is not None:
path.append(0)
results[0] = self._tour(self._tree.left(p), d+1, path)
path.pop()
self._hook_invisit(p, d, path)
if self._tree.right(p) is not None:
path.append(1)
results[1] = self._tour(self._tree.right(p), d+1, path)
path.pop()
answer = self._hook_postvisit(p, d, path, results)
return answer
def _hook_invisit(self, p, d, path):
"""Visit Position p, between tour of its left and right subtrees."""
pass
# 二叉树图形布局的子类
# 每个位置p制定x坐标和y坐标
# x(p) p之前T的中遍历访问的位置数量
# y(p) T中p的深度
class BinaryLayout(BinaryEulerTour):
def __init__(self, tree):
super().__init__(tree)
self._count = 0
def _hook_invisit(self, p, d, path):
p.element().setX(self._count)
p.element().setY(d)
self._count += 1
|
[
"172057129@qq.com"
] |
172057129@qq.com
|
f9a5e29679cd15fff7964f31500dfa05e070f15c
|
dcb761102647aed31c9b330bc1e1028db0e92726
|
/.env3/bin/dipy_nlmeans
|
b5422837ad2ed650efc0e1ebb0e91c7caae8e778
|
[] |
no_license
|
neurodata/aloftus
|
8d2dea9884282811b8b13c432dce43824b091dae
|
0060691a5d8b90cc42523c02472ecce427b89921
|
refs/heads/master
| 2020-04-10T01:44:24.174629
| 2019-09-30T18:19:19
| 2019-09-30T18:19:19
| 160,724,056
| 0
| 0
| null | 2019-07-19T17:52:59
| 2018-12-06T19:49:32
|
Python
|
UTF-8
|
Python
| false
| false
| 283
|
#!/Users/alex/Dropbox/NeuroData/ndmg-top/exploration/aloftus/.env3/bin/python3.7
from __future__ import division, print_function
from dipy.workflows.flow_runner import run_flow
from dipy.workflows.denoise import NLMeansFlow
if __name__ == "__main__":
run_flow(NLMeansFlow())
|
[
"alexloftus2004@gmail.com"
] |
alexloftus2004@gmail.com
|
|
37cda867edc4d229e172d4eb71676bd3601fc5f9
|
2b119702e0649b9ace7d7df01839656ce927137b
|
/exp-42.py
|
265d59205302e71806b4ef32e2621819d7575425
|
[] |
no_license
|
GowthamSingamsetti/Python-Practise
|
b936ada645f89cda32dfe0d356271143abc75f1e
|
ab18011d91e4bf90b04c58948a738c0d11719f45
|
refs/heads/main
| 2023-08-11T05:28:27.834372
| 2021-09-17T13:23:00
| 2021-09-17T13:23:00
| 407,545,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 699
|
py
|
import openpyxl as xl
from openpyxl.chart import BarChart, Reference
wb = xl.load_workbook("transactions.xlsx")
sheet = wb['Sheet1']
cell = sheet['a2']
cell = sheet.cell('1, 2')
print(cell.value)
print(sheet.max_row)
for row in range(2, sheet.max_row + 1):
cell = sheet.cell(row, 3)
print(cell.value)
corrected_price = 0.9 * cell.value
corrected_price_cell = sheet.cell(row, 4)
corrected_price_cell.value = corrected_price
values = Reference(sheet,
min_row = 1,
max_row = sheet.max_row,
min_col = 4,
max_col = 4)
chart = BarChart()
chart.add_data(values)
sheet.add_chart
|
[
"noreply@github.com"
] |
GowthamSingamsetti.noreply@github.com
|
4885eac56f096a4bffe4a3e1bdae240e443b1617
|
a25789434114b9b1b116f9ceb3bc55e006fdc1aa
|
/analyse_ob2_stars_and_around.py
|
4198cef03564877aeac2f03331b5f024b44f1d62
|
[
"MIT"
] |
permissive
|
kcotar/VES-263-and-Cyg-OB2-association
|
feb8f001bf5ffaa8ff4fee7934ccaafc8fe4fe56
|
06dcd30d9e6831f312b9d166ddc53733c63d034d
|
refs/heads/master
| 2020-06-01T08:57:16.161572
| 2019-08-02T08:47:07
| 2019-08-02T08:47:07
| 190,723,094
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,944
|
py
|
import matplotlib
#matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
from astropy.table import Table, join, unique, hstack, vstack
from os import path
from scipy.stats import multivariate_normal, norm
import astropy.coordinates as coord
import astropy.units as un
import imp
import joblib
from sklearn.manifold import TSNE
from sklearn.cluster import DBSCAN, KMeans
from gaia_data_queries import get_data_subset
plt.rcParams['font.size'] = 15
# plt.rcParams['font.family'] = 'cursive'
# plt.rcParams['font.cursive'] = 'Textile'
d1 = Table.read('Berlanas_2018.csv', format='ascii.csv')
d1 = d1[d1['Sample'] == 'Cygnus OB2']
d2 = Table.read('Comeron_2012.csv', format='ascii.csv')
d2 = d2[d2['Field'] == 'Cygnus OB2']
d3 = Table.read('Kiminki_2007.csv', format='ascii.csv')
# d4 = Table.read('Kharchenko_2004.csv', format='ascii.csv')
# d4 = d4[d4['Seq'] == 490]
# d4.write('Kharchenko_2004_Cyg_OB2.csv', format='ascii.csv')
d4 = Table.read('Kharchenko_2004_Cyg_OB2.csv', format='ascii.csv')
d5 = Table.read('cyg_ob2_simbad.csv', format='ascii.csv')
d6 = Table.read('Wright_2015.csv', format='ascii.csv')
ves_263_source = 2067735760399789568
cyg_ob2 = vstack((
d1['source_id', 'ra', 'dec'],
d2['source_id', 'ra', 'dec'],
# d3['source_id', 'ra', 'dec'],
# d4['source_id', 'ra', 'dec'],
# d5['source_id', 'ra', 'dec'],
d6['source_id', 'ra', 'dec']
))
#print cyg_ob2['source_id', 'ra', 'dec', 'parallax', 'pmra', 'pmdec']
cyg_ob2 = unique(cyg_ob2, keys='source_id')
print 'Members:', len(cyg_ob2), len(np.unique(cyg_ob2['source_id']))
print 'Number VES 263:', np.sum(cyg_ob2['source_id'] == ves_263_source)
print 'Number VES 263:', np.sum(d1['source_id'] == ves_263_source), np.sum(d2['source_id'] == ves_263_source), np.sum(d3['source_id'] == ves_263_source), np.sum(d4['source_id'] == ves_263_source), np.sum(d5['source_id'] == ves_263_source), np.sum(d6['source_id'] == ves_263_source)
print 'Coords median:', np.nanmedian(cyg_ob2['ra']), np.nanmedian(cyg_ob2['dec'])
gaia_file = 'cyg_ob2_gaia.csv'
if path.isfile(gaia_file):
gaia_data_all = Table.read(gaia_file, format='ascii.csv')
else:
print 'Gaia DR2 query sent'
gaia_data_all = get_data_subset(np.nanmedian(cyg_ob2['ra']),
np.nanmedian(cyg_ob2['dec']),
3., None, dist_span=None, rv_only=False)
gaia_data_all.write(gaia_file, format='ascii.csv')
for col in ['pmra', 'pmdec', 'parallax', 'rv']:
gaia_data_all['rv'][gaia_data_all['rv'].filled(-1e6) == -1e6] = np.nan
print 'Gaia DR2 in cone:', len(gaia_data_all)
cyg_ob2 = gaia_data_all[np.in1d(gaia_data_all['source_id'], cyg_ob2['source_id'])]
gaia_ra_dec = coord.ICRS(ra=gaia_data_all['ra']*un.deg, dec=gaia_data_all['dec']*un.deg)
ob2_ra_dec = coord.ICRS(ra=np.nanmedian(cyg_ob2['ra'])*un.deg, dec=np.nanmedian(cyg_ob2['dec'])*un.deg)
idx_use_coord = gaia_ra_dec.separation(ob2_ra_dec) <= 1.*un.deg
plt.figure(figsize=(7., 3.5))
plt.hist(cyg_ob2['parallax'], range=(0.4, 0.85),
bins=np.arange(0.36, 0.92, 0.02),
color='black', histtype='step', lw=1.5)
plt.axvline(0.5962, c='red')
plt.axvline(0.58, c='black', ls='--', alpha=0.8)
plt.axvline(0.68, c='black', ls='--', alpha=0.5)
plt.axvline(0.48, c='black', ls='--', alpha=0.5)
plt.xlim(0.39, 0.86)
plt.ylabel('Number of stars in a bin')
plt.xlabel('Parallax (mas)')
plt.gca().tick_params(axis="y", direction="in")
plt.gca().tick_params(axis="x", direction="in")
# plt.grid(ls='--', c='black', alpha=0.2)
plt.tight_layout()
plt.savefig('cygob2_parallax.pdf', dpi=250)
plt.close()
# # get all the data and perform clusterings
# ob2_data = gaia_data_all[np.in1d(gaia_data_all['source_id'], cyg_ob2['source_id'])]
# ves263_data = gaia_data_all[np.in1d(gaia_data_all['source_id'], ves_263_source)]
# ob2_data_cluster = ob2_data['pmra', 'pmdec', 'parallax'].to_pandas().values
# # standardizatrion
# ob2_data_cluster = (ob2_data_cluster - np.median(ob2_data_cluster, axis=0)) / np.std(ob2_data_cluster, axis=0)
# # dbscan
# # labels = DBSCAN(0.5, min_samples=5).fit_predict(ob2_data_cluster)
# labels = KMeans(8, n_init=10, n_jobs=50).fit_predict(ob2_data_cluster)
# plt.scatter(ob2_data['ra'], ob2_data['dec'], c=labels, lw=0, s=15, cmap='Set1')
# plt.colorbar()
# plt.scatter(ves263_data['ra'], ves263_data['dec'], c='black', lw=0, s=20, marker='X')
# plt.show()
# plt.close()
#
# tsne_2d = TSNE(n_components=2, perplexity=10.0, method='barnes_hut', angle=0.5).fit_transform(ob2_data_cluster)
# plt.scatter(tsne_2d[:, 0], tsne_2d[:, 1], c=labels, lw=0, s=10, cmap='Set1')
# plt.show()
# plt.close()
#
# for col in ['ra', 'dec', 'pmra', 'pmdec', 'parallax']:
# plt.scatter(tsne_2d[:, 0], tsne_2d[:, 1], c=ob2_data[col], lw=0, s=10)
# plt.title(col)
# plt.show()
# plt.close()
#
# raise SystemExit
d_parallax = 0.1
# mean_parallax = 0.3
for mean_parallax in [0.58]: # [0.15, 0.25, 0.35, 0.45, 0.55, 0.65, 0.75, 0.85, 0.95, 1.05, 1.15]:
idx_use = np.logical_and(idx_use_coord, np.abs(gaia_data_all['parallax'] - mean_parallax) <= d_parallax)
gaia_data = gaia_data_all[idx_use]
gaia_ra_dec = coord.ICRS(ra=gaia_data['ra']*un.deg, dec=gaia_data['dec']*un.deg, distance=1e3/gaia_data['parallax']*un.pc)
gaia_gal = gaia_ra_dec.transform_to(coord.Galactocentric())
idx_ob2 = np.in1d(gaia_data['source_id'], cyg_ob2['source_id'])
idx_ves263 = gaia_data['source_id'] == ves_263_source
idx_ob2 = np.logical_xor(idx_ob2, idx_ves263)
ves_263 = gaia_data[gaia_data['source_id'] == ves_263_source]
print 'VES:', ves_263['ra'], ves_263['dec'], ves_263['parallax'], ves_263['parallax_error']
# plt.scatter(gaia_gal.x, gaia_gal.y, lw=0, s=6, alpha=0.2, c='black')
# plt.scatter(gaia_gal.x[idx_ob2], gaia_gal.y[idx_ob2], lw=0, s=12, alpha=1., c='red')
# plt.scatter(gaia_gal.x[idx_ves263], gaia_gal.y[idx_ves263], lw=0, s=50, alpha=1., c='blue')
# plt.show()
# plt.close()
idx_inparams = np.logical_and(gaia_data['pmra'] >= -0.5, gaia_data['pmra'] <= 0.5)
idx_inparams = np.logical_and(idx_inparams, gaia_data['pmdec'] >= -4)
idx_inparams = np.logical_and(idx_inparams, gaia_data['pmdec'] <= -2.5)
idx_inparams = np.logical_and(idx_inparams, gaia_data['parallax'] >= 0.65)
idx_inparams = np.logical_and(idx_inparams, gaia_data['parallax'] <= 0.85)
print 'In selection:', np.sum(idx_inparams)
print 'All stars:', len(gaia_data)
print 'OB2 stars:', np.sum(idx_ob2)
# plt.scatter(gaia_data['ra'], gaia_data['dec'], lw=0, s=6, alpha=0.2, c='black')
# plt.scatter(gaia_data['ra'][idx_inparams], gaia_data['dec'][idx_inparams], lw=0, s=6, alpha=1., c='green')
# plt.scatter(gaia_data['ra'][idx_ob2], gaia_data['dec'][idx_ob2], lw=0, s=12, alpha=1., c='red')
# plt.scatter(ves_263['ra'], ves_263['dec'], lw=0, s=50, alpha=1., c='blue')
# plt.quiver(gaia_data['ra'][idx_ob2], gaia_data['dec'][idx_ob2],
# gaia_data['pmra'][idx_ob2] - np.nanmedian(gaia_data['pmra']),
# gaia_data['pmdec'][idx_ob2] - np.nanmedian(gaia_data['pmdec']),
# color='green', width=0.003)
# plt.xlabel('RA')
# plt.ylabel('DEC')
# plt.show()
# plt.close()
#
# plt.scatter(gaia_data['phot_bp_mean_mag'][idx_inparams]-gaia_data['phot_rp_mean_mag'][idx_inparams],
# gaia_data['phot_g_mean_mag'][idx_inparams] - 2.5*np.log10(((1e3/gaia_data['parallax'][idx_inparams])/10.)**2)
# )
# plt.gca().invert_yaxis()
# plt.show()
# plt.close()
# pmra/pmdec density estimation plot
pmra_vals = np.linspace(-12, 9, 350)
pmdec_vals = np.linspace(-14, 4, 350)
xx, yy = np.meshgrid(pmra_vals, pmdec_vals)
pos = np.empty(xx.shape + (2,))
pos[:, :, 0] = xx
pos[:, :, 1] = yy
file_density = 'density_{:.2f}_{:.2f}.pkl'.format(mean_parallax, d_parallax)
if path.isfile(file_density):
total_density = joblib.load(file_density)
else:
total_density = np.full_like(xx, fill_value=0.)
for i_s, star in enumerate(gaia_data):
if i_s % 500 == 0:
print i_s
cov = np.array([[star['pmra_error'], 0], [0, star['pmdec_error']]])
mean = np.array([star['pmra'], star['pmdec']])
total_density += multivariate_normal.pdf(pos, mean=mean, cov=cov)
joblib.dump(total_density, file_density)
plt.imshow(total_density, interpolation='none', origin='lower', cmap='viridis',
extent=[-12, 9, -14, 4],
vmin=0, # np.percentile(total_density, 0),
vmax=1300, # np.percentile(total_density, 100)
)
plt.colorbar()
plt.contour(xx, yy, total_density, [0, 50, 200, 350, 500, 650, 800, 950, 1100, 1250, 1400, 1550, 1700],
colors='black', linewidths=0.5)
# plt.scatter(gaia_data['pmra'], gaia_data['pmdec'], lw=0, s=1, alpha=0.2, c='black')
# plt.scatter(gaia_data['pmra'][idx_ob2], gaia_data['pmdec'][idx_ob2], lw=0, s=1, alpha=1., c='C0')
plt.scatter(ves_263['pmra'], ves_263['pmdec'], lw=0, s=2, alpha=1., c='red')
plt.title('Number of stars: {:.0f}'.format(len(gaia_data)))
plt.xlim(np.min(pmra_vals), np.max(pmra_vals))
plt.ylim(np.min(pmdec_vals), np.max(pmdec_vals))
plt.gca().tick_params(axis="y", direction="in")
plt.gca().tick_params(axis="x", direction="in")
# plt.show()
plt.tight_layout()
plt.savefig('pmra_pmdec_dist_{:.2f}_{:.2f}.pdf'.format(mean_parallax, d_parallax), dpi=350)
plt.close()
plt.figure(figsize=(7., 6.))
plt.scatter(gaia_data['pmra'], gaia_data['pmdec'], lw=0, s=3., alpha=0.5, c='0.6',
label='', zorder=1)
# plt.title('Number of stars: {:.0f}'.format(len(gaia_data)))
# plt.contour(xx, yy, total_density,
# [0, 50, 200, 350, 500, 650, 800, 950, 1100, 1250, 1400, 1550, 1700],
# colors='black', linewidths=0.45, alpha=0.7, zorder=2)
plt.contour(xx, yy, total_density,
[0, 50, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500, 1600, 1700],
colors='black', linewidths=0.4, alpha=0.5, zorder=2)
plt.scatter(gaia_data['pmra'][idx_ob2], gaia_data['pmdec'][idx_ob2], lw=0, s=13, alpha=1., c='green',
label='Cyg OB2', zorder=3)
plt.scatter(ves_263['pmra'], ves_263['pmdec'], lw=0, s=70, alpha=1., c='red',
label='VES 263', marker='X', zorder=3)
plt.xlim(-6, 2)
plt.ylim(-8, -1)
plt.xticks([-5, -3, -1, 1], ['-5', '-3', '-1', '1'])
plt.yticks([-2, -4, -6, -8], ['-2', '-4', '-6', '-8'])
plt.gca().tick_params(axis="y", direction="in")
plt.gca().tick_params(axis="x", direction="in")
plt.xlabel(u'pmRA (mas yr$^{-1}$)')
plt.ylabel(u'pmDec (mas yr$^{-1}$)')
plt.legend(loc=4, framealpha=0.7)
plt.tight_layout()
plt.savefig('pmra_pmdec_points_{:.2f}_{:.2f}.pdf'.format(mean_parallax, d_parallax), dpi=350)
plt.close()
if mean_parallax == 0.58:
fig, ax = plt.subplots(2, 1, figsize=(7., 6.))
ax[0].errorbar(gaia_data['ra'][idx_ob2], gaia_data['pmra'][idx_ob2], yerr=gaia_data['pmra_error'][idx_ob2],
elinewidth=0.8, marker='.', ms=7, mew=0, alpha=0.8, c='green', ls='None', label='Cyg OB2',
capsize=0, capthick=3)
ax[0].errorbar(ves_263['ra'], ves_263['pmra'], yerr=ves_263['pmra_error'],
elinewidth=0.8, marker='X', ms=8, mew=0, alpha=0.8, c='red', ls='None', label='VES 263',
capsize=0, capthick=5)
lin_fit = np.polyfit(gaia_data['ra'][idx_ob2], gaia_data['pmra'][idx_ob2],
deg=1, w=1. / gaia_data['pmra_error'][idx_ob2])
idx_fit_use = np.logical_and(idx_ob2,
(np.abs(np.polyval(lin_fit, gaia_data['ra']) - gaia_data['pmra'])) <= 1.)
lin_fit = np.polyfit(gaia_data['ra'][idx_fit_use], gaia_data['pmra'][idx_fit_use],
deg=1, w=1. / gaia_data['pmra_error'][idx_fit_use])
x_lin = np.linspace(306., 310., 25)
ax[0].plot(x_lin, np.polyval(lin_fit, x_lin), lw=1, ls='--', alpha=0.75, c='black')
ax[0].set(xlabel=u'RA (deg)', ylabel=u'pmRA (mas yr$^{-1}$)', xlim=(307.1, 308.8), ylim=(-3.8, -1.6),
xticks=[307.5, 308, 308.5], xticklabels=['307.5', '308', '308.5'],
yticks=[-2, -3], yticklabels=['-2', '-3'])
ax[0].grid(ls='--', c='black', alpha=0.2)
ax[0].tick_params(axis="y", direction="in")
ax[0].tick_params(axis="x", direction="in")
ax[0].legend(loc=2)
# plt.scatter(gaia_data['ra'][idx_ob2], gaia_data['pmdec'][idx_ob2], lw=0, s=4, alpha=1., c='C0')
# plt.scatter(ves_263['ra'], ves_263['pmdec'], lw=0, s=4, alpha=1., c='C3')
# plt.xlabel('RA')
# plt.ylabel('pmDEC')
# plt.ylim(-7, -2)
# plt.tight_layout()
# plt.savefig('aa_pmdec_ra_points_{:.2f}_{:.2f}.png'.format(mean_parallax, d_parallax), dpi=350)
# plt.close()
# plt.scatter(gaia_data['dec'][idx_ob2], gaia_data['pmdec'][idx_ob2], lw=0, s=4, alpha=1., c='C0')
# plt.scatter(ves_263['dec'], ves_263['pmdec'], lw=0, s=4, alpha=1., c='C3')
ax[1].errorbar(gaia_data['dec'][idx_ob2], gaia_data['pmdec'][idx_ob2], yerr=gaia_data['pmdec_error'][idx_ob2],
elinewidth=0.8, marker='.', ms=7, mew=0, alpha=0.75, c='green', ls='None', label='Cyg OB2',
capsize=0, capthick=5)
ax[1].errorbar(ves_263['dec'], ves_263['pmdec'], yerr=ves_263['pmdec_error'],
elinewidth=0.8, marker='X', ms=8, mew=0, alpha=0.75, c='red', ls='None', label='VES 263',
capsize=0, capthick=5)
lin_fit = np.polyfit(gaia_data['dec'][idx_ob2], gaia_data['pmdec'][idx_ob2],
deg=1, w=1./gaia_data['pmdec_error'][idx_ob2])
idx_fit_use = np.logical_and(idx_ob2,
np.abs((np.polyval(lin_fit, gaia_data['dec']) - gaia_data['pmdec'])) <= 1.)
lin_fit = np.polyfit(gaia_data['dec'][idx_fit_use], gaia_data['pmdec'][idx_fit_use],
deg=1, w=1. / gaia_data['pmdec_error'][idx_fit_use])
x_lin = np.linspace(40., 42., 25)
ax[1].plot(x_lin, np.polyval(lin_fit, x_lin), lw=1, ls='--', alpha=0.75, c='black')
ax[1].set(xlabel=u'Dec (deg)', ylabel=u'pmDec (mas yr$^{-1}$)', ylim=(-5.7, -2.9), xlim=(40.55, 41.78),
xticks=[40.7, 41., 41.3, 41.6], xticklabels=['40.7', '41', '41.3', '41.6'],
yticks=[-3, -4, -5], yticklabels=['-3', '-4', '-5'])
ax[1].grid(ls='--', c='black', alpha=0.2)
ax[1].tick_params(axis="y", direction="in")
ax[1].tick_params(axis="x", direction="in")
plt.tight_layout()
plt.savefig('linfit_pmra_pmdec_points_{:.2f}_{:.2f}_ver2.pdf'.format(mean_parallax, d_parallax), dpi=300)
plt.close()
# plt.scatter(gaia_data['dec'][idx_ob2], gaia_data['pmra'][idx_ob2], lw=0, s=4, alpha=1., c='C0')
# plt.scatter(ves_263['dec'], ves_263['pmra'], lw=0, s=4, alpha=1., c='C3')
# plt.xlabel('DEC')
# plt.ylabel('pmRA')
# plt.ylim(-6, 0)
# plt.tight_layout()
# plt.savefig('aa_pmra_dec_points_{:.2f}_{:.2f}.png'.format(mean_parallax, d_parallax), dpi=350)
# plt.close()
# raise SystemExit
#
# plt.hist(gaia_data['parallax'], range=(0.45, 0.75), bins=30)
# plt.hist(gaia_data['parallax'][idx_ob2], range=(0.45, 0.75), bins=30)
# plt.axvline(ves_263['parallax'], c='blue')
# plt.show()
# plt.close()
#
# plt.hist(gaia_data['rv'], range=(-60, 40), bins=40)
# plt.hist(gaia_data['rv'][idx_ob2], range=(-60, 40), bins=40)
# plt.show()
# plt.close()
|
[
"noreply@github.com"
] |
kcotar.noreply@github.com
|
0d1062fa6ffe6c1ab9e23a5ba3b463eeaec6c5bf
|
d59c1cf0f594f001a0dc26ee3666e356a18dee97
|
/implementation_codebase/consumer_3.py
|
2e74925ff0b78b6a86aac56f3eae1ff4e4a2754a
|
[] |
no_license
|
QinZhong1014/NDN_MQTT
|
304eb5b70396571122b0da04e13384f8ff74699f
|
bf0b0f5274edef9413cd0a6f53d64258cfe65b7a
|
refs/heads/main
| 2023-08-13T10:32:13.494899
| 2021-10-15T18:11:21
| 2021-10-15T18:11:21
| 417,324,891
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,365
|
py
|
# -----------------------------------------------------------------------------
# Copyright (C) 2019-2020 The python-ndn authors
#
# This file is part of python-ndn.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -----------------------------------------------------------------------------
import logging
import ndn.utils
from ndn.app import NDNApp
from ndn.types import InterestNack, InterestTimeout, InterestCanceled, ValidationFailure
from ndn.encoding import Name, Component, InterestParam
# the global variable below is the channel name.
CHANNEL_ID = "sensor1"
# The global variable below is the path name. Put it simply, this shows where a particular sensor is located
PATH_NAME = "/MQTT/myhome/room1/"
logging.basicConfig(format='[{asctime}]{levelname}:{message}',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
style='{')
app = NDNApp()
async def main():
global PATH_NAME, CHANNEL_ID
try:
timestamp = ndn.utils.timestamp()
name = Name.from_str(PATH_NAME+CHANNEL_ID) #+ [Component.from_timestamp(timestamp)]
print(f'Sending Interest {Name.to_str(name)}, {InterestParam(must_be_fresh=True, lifetime=10000)}')
data_name, meta_info, content = await app.express_interest(
name, must_be_fresh=True, can_be_prefix=False, lifetime=10000)
print(f'Received Data Name: {Name.to_str(data_name)}')
print(meta_info)
print(bytes(content) if content else None)
except InterestNack as e:
print(f'Nacked with reason={e.reason}')
except InterestTimeout:
print(f'Timeout')
except InterestCanceled:
print(f'Canceled')
except ValidationFailure:
print(f'Data failed to validate')
finally:
app.shutdown()
if __name__ == '__main__':
app.run_forever(after_start=main())
|
[
"noreply@github.com"
] |
QinZhong1014.noreply@github.com
|
b30d9e1cb7a39cd686c7f778df51abf58f07f81c
|
1b3ba6720e74199b54c33a146e36145640781be3
|
/Software/QuickUsbPy3/QuickUsbUnittest.py
|
61dfc5532a49723e4a6af8b87ba1179c4ca0d656
|
[
"MIT"
] |
permissive
|
BitwiseSystems/QuickUSB
|
094e27663f24ecdbca3f34349bcd03fd39526a9b
|
0b26af3340a662580122aa71e33f9636992129ca
|
refs/heads/master
| 2022-01-14T12:29:06.052344
| 2022-01-04T14:34:49
| 2022-01-04T14:34:49
| 246,888,640
| 4
| 9
| null | 2022-01-04T14:34:50
| 2020-03-12T17:04:34
|
Python
|
UTF-8
|
Python
| false
| false
| 26,269
|
py
|
"""
============================================================================
Title : QuickUSB.py
Description : QuickUSB Python API Interface
Notes :
History :
Copyright (c) 2012 Bitwise Systems. All rights reserved.
This software contains confidential information and trade secrets of
Bitwise Systems and is protected by United States and international
copyright laws. Use, disclosure, or reproduction is prohibited without
the prior express written permission of Bitwise Systems, except as agreed
in the QuickUSB Plug-In Module license agreement.
Use, duplication or disclosure by the U.S. Government is subject to
restrictions as provided in DFARS 227.7202-1(a) and 227.7202-3(a)
(1998), and FAR 12.212, as applicable. Bitwise Systems, 6489 Calle Real,
Suite E, Goleta, CA 93117.
Bitwise Systems
6489 Calle Real, Suite E
Santa Barbara, CA 93117
Voice: (805) 683-6469
Fax : (805) 683-4833
Web : www.bitwisesys.com
email: support@bitwisesys.com
============================================================================
"""
import time, sys
from QuickUsb import *
# The unit tests in this file are not intended to exercise the QuickUSB API, but
# test that the C++ Class wrapper properly wraps every QuickUSB API call.
def progress(hDevice, percent, tag):
#print percent, "%\r",
pass
def StreamCR(pBulkStream):
if not pBulkStream:
print("Stream Error")
return
#Since we are using ctypes, we must dereference the pointer to the BulkStream struture here
bs = pBulkStream[0]
if (bs.Error or (bs.BytesRequested != bs.BytesTransferred)):
print("Stream request failed with error: ", bs.Error, "("+str(bs.BytesTransferred), "of", str(bs.BytesRequested)+" bytes)")
#else:
# print "Stream request OK ", "("+str(bs.BytesTransferred), "of", str(bs.BytesRequested)+" bytes)"
if len(sys.argv) == 1:
test = ("epcs")#("base", "data", "streaming", "fpga", "epcs", "rs232", "i2c", "storage", "command", "spi", "firmware", "stat")
else:
test = sys.argv[1:]
if "base" in test:
# Test FindModules
(ok, nameList) = QuickUsb.FindModules()
if (not ok):
print("***FindModules() failed with error: ", str(Error(QuickUsb.GetLastError())))
sys.exit(1)
else:
print("FindModules OK")
qusb = QuickUsb(nameList[0])
# Test Open
(ok, ) = qusb.Open()
if (not ok):
print("***Open() failed with error: ", str(Error(QuickUsb.GetLastError())))
else:
print("Open OK")
# Test GetLastError
if (QuickUsb.GetLastError()):
print("***GetLastError() failed with error: ", str(Error(QuickUsb.GetLastError())))
else:
print("GetLastError OK")
qusb = QuickUsb(nameList[0])
# Test GetLastDriverError
if (QuickUsb.GetLastDriverError()):
print("***GetLastDriverError() failed with error: ", str(Error(QuickUsb.GetLastDriverError())))
else:
print("GetLastDriverError OK")
# Test Close
(ok, ) = qusb.Close()
if (not ok):
print("***Close() failed with error: ", str(Error(qusb.LastError())))
else:
print("Close OK")
# Test OpenEx
(ok, ) = qusb.OpenEx(nameList[0], 0)
if (not ok):
print("***OpenEx() failed with error: ", str(Error(qusb.LastError())))
else:
print("OpenEx OK")
# Test GetStringDescriptor
(ok, string) = qusb.GetStringDescriptor(0)
if (not ok):
print("***GetStringDescriptor() failed with error: ", str(Error(qusb.LastError())))
else:
print("GetStringDescriptor OK")
# Test SetTimeout
(ok, ) = qusb.SetTimeout(1000)
if (not ok):
print("***SetTimeout() failed with error: ", str(Error(qusb.LastError())))
else:
print("SetTimeout OK")
# Test GetDllVersion
(ok, major, minor, rev) = QuickUsb.GetDllVersion()
if (not ok):
print("***GetDllVersion() failed with error: ", QuickUsb.GetLastError())
else:
print("GetDllVersion OK")
# Test GetDriverVersion
(ok, major, minor, rev) = QuickUsb.GetDriverVersion()
if (not ok):
print("***GetDriverVersion() failed with error: ", QuickUsb.GetLastError())
else:
print("GetDriverVersion OK")
# Test GetFirmwareVersion
(ok, major, minor, rev) = qusb.GetFirmwareVersion()
if (not ok):
print("***GetFirmwareVersion() failed with error: ", str(Error(qusb.LastError())))
else:
print("GetFirmwareVersion OK")
# Test ReadPortDir
(ok, byte) = qusb.ReadPortDir(Port.A)
if (not ok):
print("***ReadPortDir() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadPortDir OK")
byte |= 0x80
# Test WritePortDir
(ok, ) = qusb.WritePortDir(Port.A, byte)
if (not ok):
print("***WritePortDir() failed with error: ", str(Error(qusb.LastError())))
else:
print("WritePortDir OK")
# Test ReadPort
c_data = CreateByteBuffer(1)
(ok, value) = qusb.ReadPort(Port.A, c_data, 1)
if (not ok):
print("***ReadPort() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadPort OK")
# Test WritePort
(ok, ) = qusb.WritePort(Port.A, byte, 1)
if (not ok):
print("***WritePort() failed with error: ", str(Error(qusb.LastError())))
else:
print("WritePort OK")
# Test ReadSetting
(ok, value) = qusb.ReadSetting(Setting.PortA)
if (not ok):
print("***ReadSetting() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadSetting OK")
value |= 0x80
# Test WriteSetting
(ok, ) = qusb.WriteSetting(Setting.PortA, value)
if (not ok):
print("***WriteSetting() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteSetting OK")
# Test ReadDefault
(ok, value) = qusb.ReadDefault(Setting.PortA)
if (not ok):
print("***ReadDefault() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadDefault OK")
# Test WriteDefault
(ok, ) = qusb.WriteDefault(Setting.PortA, value)
if (not ok):
print("***WriteDefault() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteDefault OK")
# Test StartFpgaConfiguration
(ok, ) = qusb.StartFpgaConfiguration()
if (not ok):
print("***StartFpgaConfiguration() failed with error: ", str(Error(qusb.LastError())))
else:
print("StartFpgaConfiguration OK")
else:
# Test FindModules
(ok, nameList) = QuickUsb.FindModules()
if (not ok):
print("***FindModules() failed with error: ", str(Error(QuickUsb.GetLastError())))
sys.exit(1)
else:
print("FindModules OK")
qusb = QuickUsb(nameList[0])
# Test Open
(ok, ) = qusb.Open()
if (not ok):
print("***Open() failed with error: ", str(Error(QuickUsb.GetLastError())))
else:
print("Open OK")
if "fpga" in test:
# Test WriteFpgaData
c_data = CreateByteBuffer(64)
(ok, ) = qusb.WriteFpgaData(c_data, 64)
if (not ok):
print("***WriteFpgaData() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteFpgaData OK")
# Test IsFpgaConfigured
(ok, conf) = qusb.IsFpgaConfigured()
if (not ok):
print("***IsFpgaConfigured() failed with error: ", str(Error(qusb.LastError())))
else:
print("IsFpgaConfigured OK")
# Test FpgaConfigure
(ok, ) = qusb.ConfigureFpga("fpga.rbf")
if (not ok):
print("***FpgaConfigure() failed with error: ", str(Error(qusb.LastError())))
else:
print("FpgaConfigure OK")
if "command" in test:
# Test ReadCommand
c_data = CreateByteBuffer(64)
(ok, data) = qusb.ReadCommand(0, c_data, 64)
if (not ok):
print("***ReadCommand() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadCommand OK")
# Test WriteCommand
c_data = CreateByteBuffer(64)
(ok, ) = qusb.WriteCommand(0, c_data, 64)
if (not ok):
print("***WriteCommand() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteCommand OK")
if "rs232" in test:
# Test SetRS232BaudRate
(ok, ) = qusb.SetRS232BaudRate(192000)
if (not ok):
print("***SetRS232BaudRate() failed with error: ", str(Error(qusb.LastError())))
else:
print("SetRS232BaudRate OK")
# Test GetNumRS232
(ok, value) = qusb.GetNumRS232(0)
if (not ok):
print("***GetNumRS232() failed with error: ", str(Error(qusb.LastError())))
else:
print("GetNumRS232 OK")
# Test ReadRS232
if (value > 0):
(ok, data) = qusb.ReadRS232(1, value)
if (not ok):
print("***ReadRS232() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadRS232 OK")
# Test FlushRS232
(ok, ) = qusb.FlushRS232(0)
if (not ok):
print("***FlushRS232() failed with error: ", str(Error(qusb.LastError())))
else:
print("FlushRS232 OK")
# Test WriteRS232
c_data = CreateByteBuffer(64)
(ok, ) = qusb.WriteRS232(0, c_data, 64)
if (not ok):
print("***WriteRS232() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteRS232 OK")
if "spi" in test:
# Test ReadSpi
c_data = CreateByteBuffer(64)
(ok, data) = qusb.ReadSpi(0, c_data, 64)
if (not ok):
print("***ReadSpi() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadSpi OK")
# Test WriteSpi
c_data = CreateByteBuffer(64)
(ok, ) = qusb.WriteSpi(0, c_data, 64)
if (not ok):
print("***WriteSpi() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteSpi OK")
# Test WriteReadSpi
c_data = CreateByteBuffer(64)
(ok, ) = qusb.WriteReadSpi(0, c_data, 64)
if (not ok):
print("***WriteReadSpi() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteReadSpi OK")
if "i2c" in test:
# Test ReadI2C
c_data = CreateByteBuffer(64)
(ok, data) = qusb.ReadI2C(0x60, c_data, 64)
if (not ok):
print("***ReadI2C() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadI2C OK")
# Test CachedWriteI2C
c_data = CreateByteBuffer(64)
(ok, ) = qusb.CachedWriteI2C(0x60, c_data, 64)
if (not ok):
print("***CachedWriteI2C() failed with error: ", str(Error(qusb.LastError())))
else:
print("CachedWriteI2C OK")
# Test WriteI2C
c_data = CreateByteBuffer(64)
(ok, ) = qusb.WriteI2C(0x60, c_data, 64)
if (not ok):
print("***WriteI2C() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteI2C OK")
if "storage" in test:
# Test WriteStorage
c_data = CreateByteBuffer(64)
(ok, ) = qusb.WriteStorage(0, c_data, 64)
if (not ok):
print("***WriteStorage() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteStorage OK")
# Test ReadStorage
c_data = CreateByteBuffer(64)
(ok, ) = qusb.ReadStorage(0, c_data, 64)
if (not ok):
print("***ReadStorage() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadStorage OK")
if "epcs" in test:
# Test IdentifyEpcs
(ok, id, size) = qusb.IdentifyEpcs(0, EpcsFlags.IgnoreNCE)
if (not ok):
print("***IdentifyEpcs() failed with error: ", str(Error(qusb.LastError())))
else:
print("IdentifyEpcs OK")
if (id != EpcsId.EpcsUnknown):
# Test ConfigureEpcs
(ok, ) = qusb.ConfigureEpcs(0, "epcs.dat", 0, progress, None)
if (not ok):
print("***ConfigureEpcs() failed with error: ", str(Error(qusb.LastError())))
else:
print("ConfigureEpcs OK")
# Test VerifyEpcs
(ok, ) = qusb.VerifyEpcs(0, "epcs.dat", 0, progress, None)
if (not ok):
print("***VerifyEpcs() failed with error: ", str(Error(qusb.LastError())))
else:
print("VerifyEpcs OK")
# Test EraseEpcs
(ok, ) = qusb.EraseEpcs(0, 0, progress, None)
if (not ok):
print("***EraseEpcs() failed with error: ", str(Error(qusb.LastError())))
else:
print("EraseEpcs OK")
else:
print("***Cannot test ConfigureEpcs(): No EPCS device found")
print("***Cannot test VerifyEpcs(): No EPCS device found")
print("***Cannot test EraseEpcs(): No EPCS device found")
if "data" in test:
# Test ReadData
length = 1024
c_data = CreateByteBuffer(length)
(ok, bytesRead) = qusb.ReadData(c_data, length)
if (not ok):
print("***ReadData() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadData OK")
# Test ReadDataEx
length = 1024
c_data = CreateByteBuffer(length)
(ok, bytesRead) = qusb.ReadDataEx(c_data, length, DataFlags.OutOfOrder)
if (not ok):
print("***ReadDataEx() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadDataEx OK")
# Test WriteData
length = 1024
c_data = CreateByteBuffer(length)
(ok, ) = qusb.WriteData(c_data, length)
if (not ok):
print("***WriteData() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteData OK")
# Test WriteDataEx
length = 1024
c_data = CreateByteBuffer(length)
(ok, ) = qusb.WriteDataEx(c_data, length, DataFlags.OutOfOrder)
if (not ok):
print("***WriteDataEx() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteDataEx OK")
# Test GetNumAsyncThreads
(ok, numThreads, concurrency) = qusb.GetNumAsyncThreads()
if (not ok):
print("***GetNumAsyncThreads() failed with error: ", str(Error(qusb.LastError())))
else:
print("GetNumAsyncThreads OK")
# Test SetNumAsyncThreads
(ok,) = qusb.SetNumAsyncThreads(0, 0)
if (not ok):
print("***SetNumAsyncThreads() failed with error: ", str(Error(qusb.LastError())))
else:
print("SetNumAsyncThreads OK")
# Test ReadDataAsync - Deprecated
size = 1024
c_data = CreateByteBuffer(size)
(ok, transId) = qusb.ReadDataAsync(c_data, size)
if (not ok):
print("***ReadDataAsync() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadDataAsync OK")
# Test AsyncWait - Deprecated
(ok, size) = qusb.AsyncWait(transId, False)
if (not ok or size != 1024):
print("***AsyncWait() failed with error: ", str(Error(qusb.LastError())))
else:
print("AsyncWait OK")
# Test WriteDataAsync - Deprecated
c_data = CreateByteBuffer(1024)
(ok, transId) = qusb.WriteDataAsync(c_data, 1024)
if (not ok):
print("***WriteDataAsync() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteDataAsync OK")
# Test AsyncWait - Deprecated
(ok, size) = qusb.AsyncWait(transId, False)
if (not ok or size != 1024):
print("***AsyncWait() failed with error: ", str(Error(qusb.LastError())))
else:
print("AsyncWait OK")
# Test AllocateDataBuffer
(ok, c_data) = qusb.AllocateDataBuffer(1024)
if (not ok):
print("***AllocateDataBuffer() failed with error: ", str(Error(qusb.LastError())))
else:
print("AllocateDataBuffer OK")
# Test AllocateDataBuffer
(ok, c_data1) = qusb.AllocateDataBuffer(1024)
if (not ok):
print("***AllocateDataBuffer() failed with error: ", str(Error(qusb.LastError())))
else:
print("AllocateDataBuffer OK")
# Test ReadBulkDataAsync
(ok, bs) = qusb.ReadBulkDataAsync(c_data, 1024, 0, 0)
if (not ok):
print("***ReadBulkDataAsync() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadBulkDataAsync OK")
# Test BulkAbort
(ok,) = qusb.BulkAbort(None)
if (not ok):
print("***BulkAbort() failed with error: ", str(Error(qusb.LastError())))
else:
print("BulkAbort OK")
# Test BulkWait
(ok, ) = qusb.BulkWait(bs, False)
if (not ok):
print("***BulkWait() failed with error: ", str(Error(qusb.LastError())))
else:
print("BulkWait OK")
# Test WriteBulkDataAsync
(ok, bs) = qusb.WriteBulkDataAsync(c_data, 1024, StreamCR, 0)
if (not ok):
print("***WriteBulkDataAsync() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteBulkDataAsync OK")
# Test WriteBulkDataAsync
(ok, bs1) = qusb.WriteBulkDataAsync(c_data1, 1024, StreamCR, 0)
if (not ok):
print("***WriteBulkDataAsync() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteBulkDataAsync OK")
# Test BulkWait
(ok, ) = qusb.BulkWait(bs, False)
if (not ok):
print("***BulkWait() failed with error: ", str(Error(qusb.LastError())))
else:
print("BulkWait OK")
if (bs.Error or (bs.BytesRequested != bs.BytesTransferred)):
print("WriteBulkDataAsync request failed with error: ", bs.Error, bs.BytesRequested, bs.BytesTransferred)
# Test BulkWait
(ok, ) = qusb.BulkWait(bs1, False)
if (not ok):
print("***BulkWait() failed with error: ", str(Error(qusb.LastError())), qusb.LastDriverError())
print(bs1.Buffer, bs1.BytesRequested, bs1.BytesTransferred, bs1.CompletionRoutine, bs1.CompletionType, bs1.Error)
else:
print("BulkWait OK")
if (bs1.Error or (bs1.BytesRequested != bs1.BytesTransferred)):
print("WriteBulkDataAsync request failed with error: ", bs1.Error, bs.BytesRequested, bs.BytesTransferred)
# Test FreeDataBuffer
(ok,) = qusb.FreeDataBuffer(c_data)
if (not ok):
print("***FreeDataBuffer() failed with error: ", str(Error(qusb.LastError())))
else:
print("FreeDataBuffer OK")
# Test FreeDataBuffer
(ok,) = qusb.FreeDataBuffer(c_data1)
if (not ok):
print("***FreeDataBuffer() failed with error: ", str(Error(qusb.LastError())))
else:
print("FreeDataBuffer OK")
if "firmware" in test:
# Test WriteFirmware
if sys.platform.lower().startswith("linux") or sys.platform.lower().startswith("darwin"):
path = "/Projects/Bitwise/QuickUSB/Library/Firmware/QuickUSB Firmware v2.15.1/quickusb-simple v2.15.1.qusb"
else:
path = "C:\\Projects\\Bitwise\\QuickUSB\\Library\\Firmware\\QuickUSB Firmware v2.15.1\\quickusb-simple v2.15.1.qusb"
(ok, ) = qusb.WriteFirmware(path, 0, progress, None)
if (not ok):
print("***WriteFirmware() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteFirmware OK")
# Test VerifyFirmware
(ok, ) = qusb.VerifyFirmware(path, progress, None)
if (not ok):
print("***VerifyFirmware() failed with error: ", str(Error(qusb.LastError())))
else:
print("VerifyFirmware OK")
if "streaming" in test:
# Test ReadBulkDataStartStream
(ok, streamID) = qusb.ReadBulkDataStartStream(None, 2, 16 * 1024, StreamCR, None, 1, 1)
if (not ok):
print("***ReadBulkDataStartStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("ReadBulkDataStartStream OK")
# Stream data for a little while
time.sleep(0.5)
# Pause the stream
(ok, ) = qusb.PauseStream(streamID, False)
if (not ok):
print("***PauseStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("PauseStream OK")
# Wait a little
time.sleep(0.5)
# Resume the stream
(ok, ) = qusb.ResumeStream(streamID)
if (not ok):
print("***ResumeStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("ResumeStream OK")
# Stream data for a little while
time.sleep(0.5)
# Test BulkAbort
(ok,) = qusb.BulkAbort(None)
if (not ok):
print("***BulkAbort() failed with error: ", str(Error(qusb.LastError())))
else:
print("BulkAbort OK")
# Test StopStream
(ok, ) = qusb.StopStream(streamID, False)
if (not ok):
print("***StopStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("StopStream OK")
time.sleep(0.1)
# Test WriteBulkDataStartStream
bufArray = (PQBYTE * 4)()
allocOk = True
for k in range(4):
# Test AllocateDataBuffer
(ok, c_data) = qusb.AllocateDataBuffer(1024)
if (not ok):
allocOk = False
else:
bufArray[k] = c_data
if (not allocOk):
print("***AllocateDataBuffer() failed with error: ", str(Error(qusb.LastError())))
else:
print("AllocateDataBuffer-Array OK")
(ok, streamID) = qusb.WriteBulkDataStartStream(bufArray, 4, 1024, StreamCR, None, 0, 0)
if (not ok):
print("***WriteBulkDataStartStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteBulkDataStartStream OK")
# Stream data for a little while
stime = time.time()
processOk = True
statusOk = True
while (time.time() - stime) < 2.0:
#This is a single-threaded stream test, so we must perform processing
(ok,) = qusb.ProcessStream(streamID, 50)
if ((not ok) and (qusb.LastError() != Error.Timeout)):
processOk = False
break
(ok, status, error) = qusb.GetStreamStatus(streamID)
if ((not ok) or (error != Error.NoError)):
statusOk = False
break
# Test StopStream
(ok, ) = qusb.StopStream(streamID, False)
if (not ok):
print("***StopStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("StopStream OK")
# Report if ProcessStream failed
if (not processOk):
print("***ProcessStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("ProcessStream OK")
# Report if GetStreamStatus failed
if (not statusOk):
print("GetStreamStatus() failed with error: ", str(Error(qusb.LasError())))
else:
print("GetStreamStatus OK")
# Test FreeDataBuffer
freeOk = True
for k in range(4):
(ok,) = qusb.FreeDataBuffer(bufArray[k])
if (not ok):
freeOk = False
if (not freeOk):
print("***FreeDataBuffer() failed with error: ", str(Error(qusb.LastError())))
else:
print("FreeDataBuffer-Array OK")
# Test ReadBulkDataStartStreamToFile
(ok, streamID) = qusb.ReadBulkDataStartStreamToFile("test.bin", 4, 1024, 100, StreamFlag.CreateAlways)
if (not ok):
print("ReadBulkDataStartStreamToFile() failed with error: ", str(Error(qusb.LasError())))
else:
print("ReadBulkDataStartStreamToFile OK")
# Wait for the stream to finish
statusOk = True
while True:
(ok, status, error) = qusb.GetStreamStatus(streamID)
if (error != Error.NoError):
statusOk = False
break
# Check if the stream has stopped running
if (status != StreamStatus.Running):
break;
# Test StopStream
(ok, ) = qusb.StopStream(streamID, False)
if (not ok):
print("***StopStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("StopStream OK")
# Report if GetStreamStatus failed
if (not statusOk):
print("GetStreamStatus() failed with error: ", str(Error(qusb.LastError())))
else:
print("GetStreamStatus OK")
# Test WriteBulkDataStartStreamFromFile
(ok, streamID) = qusb.WriteBulkDataStartStreamFromFile("test.bin", 4, 1024, 100, StreamFlag.CreateAlways)
if (not ok):
print("WriteBulkDataStartStreamFromFile() failed with error: ", str(Error(qusb.LastError())))
else:
print("WriteBulkDataStartStreamFromFile OK")
# Wait for the stream to finish
statusOk = True
while True:
(ok, status, error) = qusb.GetStreamStatus(streamID)
if (error != Error.NoError):
statusOk = False
break
# Check if the stream has stopped running
if (status != StreamStatus.Running):
break;
# Test StopStream
(ok, ) = qusb.StopStream(streamID, False)
if (not ok):
print("***StopStream() failed with error: ", str(Error(qusb.LastError())))
else:
print("StopStream OK")
# Report if GetStreamStatus failed
if ((not statusOk) and (error != Error.FileEOF)):
print("GetStreamStatus() failed with error: ", str(Error(qusb.LastError())))
else:
print("GetStreamStatus OK")
if "stat" in test:
# Test QuickUsbResetStatistic
(ok, ) = qusb.ResetStatistic(Stat.All)
if (not ok):
print("***ResetStatistic() failed with error: ", str(Error(qusb.LastError())))
else:
print("ResetStatistic OK")
# Test QuickUsbGetStatistic
(ok, value) = qusb.GetStatistic(Stat.TotalThroughput, StatUnit.MBPerSec, 0)
if (not ok):
print("***GetStatistic() failed with error: ", str(Error(qusb.LastError())))
else:
print("GetStatistic OK")
# Test Close
(ok, ) = qusb.Close()
if (not ok):
print("***Close() failed with error: ", str(Error(qusb.LastError())))
else:
print("Close OK")
|
[
"bhenry@bitwisesys.com"
] |
bhenry@bitwisesys.com
|
cc64e943d7525291596095b2f73f6142a0b0f5ae
|
ea43c87200430f6ba44a631897e67b2a31e0fd9f
|
/scraper/scraper/settings.py
|
021155affd18589a9a796d611b7b5e6a5454d535
|
[] |
no_license
|
rcfox/PowerMetalGraph
|
f92954d87fd734c4459faa31f7bf112c36850408
|
530009581cbc7b1d5b510abcd3822a564c7cdc6a
|
refs/heads/master
| 2020-07-26T22:17:31.986536
| 2014-11-16T05:36:28
| 2014-11-16T05:36:28
| 26,528,486
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 656
|
py
|
# -*- coding: utf-8 -*-
# Scrapy settings for scraper project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'scraper'
SPIDER_MODULES = ['scraper.spiders']
NEWSPIDER_MODULE = 'scraper.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
USER_AGENT = 'Ryan Fox (http://rcfox.ca)'
HTTPCACHE_ENABLED=True
HTTPCACHE_IGNORE_MISSING=True
DOWNLOAD_DELAY=2
DOWNLOADER_MIDDLEWARES = {
'scrapy.contrib.downloadermiddleware.httpcache.HttpCacheMiddleware': 300
}
|
[
"foxryan@gmail.com"
] |
foxryan@gmail.com
|
d379fed41e0a25143b5d39b82fae3c78d7e19137
|
20e8d7c95cae9539601444b146d7613d7c235a8c
|
/02_word_count/02_regex.py
|
c281076f6f95336a1c33e4e54b040fcc0848c2ad
|
[] |
no_license
|
Kowczi/big-data-hadoop-map-reduce-course
|
03058a441d5af094a452d8644489167cbbe4a7bd
|
4341fec1326b53a10d7fa5ef1d82ea67dda8f5d9
|
refs/heads/master
| 2022-04-24T01:14:45.394150
| 2020-04-20T19:37:58
| 2020-04-20T19:37:58
| 255,967,234
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
import re
# Budowa wyrażeń regularnych. Poniższy regex będzie szukał znaków, mających
# co najmniej jeden tzw. word character.
WORD_RE = re.compile(r'[\w]+')
words = WORD_RE.findall('Big data, hadoop and map reduce. (hello world)')
print(words)
|
[
"kowczi@gmail.com"
] |
kowczi@gmail.com
|
cbc69c07a863d77c65ff5f3b1a5e16dca6fd6d4e
|
626bc832c8cb4d2804cdaa2e598abdfdbbcf0458
|
/t2-916-Popa-Andrei-Calin/UI.py
|
ef449914d8c34e6bd53187a813a67485f753bc45
|
[] |
no_license
|
andrei47w/Fundamentals-of-Programming
|
cc5f3d998e451c5f0535d0e75eeed6bfb9fe7171
|
b0839c1f9620da73cc117ba1539899509544ad1a
|
refs/heads/main
| 2023-08-18T14:11:01.842273
| 2021-10-22T16:56:14
| 2021-10-22T16:56:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 494
|
py
|
from entities import read_player_file
from repo import Repo
class UI:
def __init__(self):
self.repo = Repo()
def list_players(self):
for player in self.repo.list():
print(str(player))
def start(self):
self.repo.sort()
self.list_players()
print('\nRemaining players after the qualifying round: ')
self.repo.play_qualifying_round()
self.repo.sort()
self.list_players()
# self.repo.play_tourney()
|
[
"kalinpopa@outlook.com"
] |
kalinpopa@outlook.com
|
2d450af746eb856afc99a0fcde561e1d291d4721
|
af3b6f7cef150c471e777cc4eca761f77fff441d
|
/app.py
|
950cc0668626482314cbf4b8ca1caafb33369ac1
|
[] |
no_license
|
MUSKANJASSAL/opencvCamera
|
83037202dfe7c05347c1607ff9f29678eaee2322
|
9464d6333fa900c07106b53b9f3e59effd6c329d
|
refs/heads/master
| 2022-12-06T17:52:51.468125
| 2020-08-20T04:53:54
| 2020-08-20T04:53:54
| 288,909,010
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 902
|
py
|
from flask import Flask, Response
import cv2
app = Flask(__name__)
video = cv2.VideoCapture(0)
@app.route('/')
def index():
return "Default Message"
def gen(video):
while True:
# to get next frame or image in video
success, image = video.read()
# returns two values if image is loaded then success is true
ret, jpeg = cv2.imencode('.jpg', image)
# save frame as JPG file
frame = jpeg.tobytes()
# Converting image into bytes
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
@app.route('/video_feed')
def video_feed():
global video
return Response(gen(video),
mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
# app.run(host='0.0.0.0', port=2204, threaded=True)
app.run(debug=True)
|
[
"noreply@github.com"
] |
MUSKANJASSAL.noreply@github.com
|
9b53aa7ada60e066d5410a3d2ed0b2915149a81d
|
52d4debb2636893bc323d6dd76b44c813b5676fa
|
/hardware/HardwareInterface.py
|
4bde51f7a962b5470362f2b3cd6e912bccc34216
|
[] |
no_license
|
jamesfowkes/home-energy-monitor
|
f823d9918bd551fd033889b466bdfad35750f38e
|
6314eac302960eba2f7a7414790ec6fe966012ef
|
refs/heads/master
| 2021-01-17T18:03:09.214321
| 2017-07-06T07:33:49
| 2017-07-06T07:33:49
| 95,536,426
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,626
|
py
|
import serial
import datetime
import time
import sys
class HardwareInterface:
def __init__(self, port, baudrate=115200):
self.port = serial.Serial(port, baudrate, timeout=2)
self.port.readline()
@staticmethod
def get_set_timestamp_command_bytes(timestamp):
return "!T{}\n".format(timestamp).encode("ascii")
def set_timestamp(self, timestamp_dt=None, debug=False):
timestamp_dt = timestamp_dt or datetime.datetime.now()
timestamp = "{:%H:%M:%S.%f}".format(timestamp_dt)
timestamp = timestamp[:-3]
command = self.get_set_timestamp_command_bytes(timestamp)
if debug:
print("Sending {}".format(command.decode("ascii")))
self.port.write(command)
resp = self.port.readline()
if debug:
print("Got {}".format(resp))
return resp.decode("ascii"), timestamp_dt
def get_timestamp(self, debug=False):
command = "?T\n".encode("ascii")
if debug:
print("Sending {}".format(command.decode("ascii")))
self.port.write(command)
resp = self.port.readline().decode("ascii").strip()
if debug:
print("Got {}".format(resp))
return datetime.datetime.combine(
datetime.datetime.today(),
datetime.datetime.strptime(resp, "%H:%M:%S.%f").time()
)
def get_pulse_time(self, debug=False):
command = "?I\n".encode("ascii")
if debug:
print("Sending {}".format(command.decode("ascii")))
self.port.write(command)
resp = self.port.readline().decode("ascii").strip()
if debug:
print("Got {}".format(resp))
if resp != "--":
return datetime.datetime.combine(
datetime.datetime.today(),
datetime.datetime.strptime(resp, "%H:%M:%S.%f").time()
)
else:
return None
def fake_pulse(self, debug=False):
command = "!F\n".encode("ascii")
if debug:
print("Sending {}".format(command.decode("ascii")))
self.port.write(command)
resp = self.port.readline().decode("ascii").strip()
if debug:
print("Got {}".format(resp))
return resp
def get_pulse_count(self, debug=False):
command = "?C\n".encode("ascii")
if debug:
print("Sending {}".format(command.decode("ascii")))
self.port.write(command)
resp = self.port.readline().decode("ascii").strip()
if debug:
print("Got {}".format(resp))
return resp
|
[
"jamesfowkes@gmail.com"
] |
jamesfowkes@gmail.com
|
ea87132b34cce53dff9893dd206eae888f13f125
|
7c8e49455f0cd9a1e82900be41b55e56dcb7ff85
|
/app/translate.py
|
282b86af4e639b817ca1cb680fedc3105c239e24
|
[] |
no_license
|
webdevalex72/microblog-miguel
|
5de669bb398706999086d323fc10128c63fc6772
|
132ec6ead71e5a42128085c197ce57e455110735
|
refs/heads/main
| 2023-02-27T06:29:56.114900
| 2021-02-01T07:44:22
| 2021-02-01T07:44:22
| 318,638,046
| 0
| 0
| null | 2021-01-20T12:47:24
| 2020-12-04T21:22:09
|
Python
|
UTF-8
|
Python
| false
| false
| 854
|
py
|
import json
import requests
from flask_babel import _
from flask import current_app
def translate(text, source_language, dest_language):
if 'MS_TRANSLATOR_KEY' not in current_app.config or \
not current_app.config['MS_TRANSLATOR_KEY']:
return _('Error: the translation service is not configured.')
auth = {
'Ocp-Apim-Subscription-Key': current_app.config['MS_TRANSLATOR_KEY'],
'Ocp-Apim-Subscription-Region': 'centralus'}
r = requests.post(
'https://api.cognitive.microsofttranslator.com'
'/translate?api-version=3.0&from={}&to={}'.format(
source_language, dest_language), headers=auth, json=[
{'Text': text}])
if r.status_code != 200:
return _('Error: the translation service failed.')
return r.json()[0]['translations'][0]['text']
|
[
"web.dev.alex72@gmail.com"
] |
web.dev.alex72@gmail.com
|
6d60ca3e949e3a6069a4d35c1470040747faa7fc
|
c01e80d3d68d97b66e7e6639d94906c77ee8d7f8
|
/bot/keep_alert_bot.py
|
ec9b649499793909ecee03c584eb5e9d05aadd7b
|
[
"Apache-2.0"
] |
permissive
|
latenthero/keep-alert-bot
|
351c58e84c1cd209d3492d8156f24d5533fd186a
|
47a65f2cfedc6d02fb60c877893e501d6aca900e
|
refs/heads/master
| 2023-05-26T04:11:09.869617
| 2020-07-28T16:12:59
| 2020-07-28T16:12:59
| 283,247,372
| 0
| 0
|
Apache-2.0
| 2023-05-23T00:11:46
| 2020-07-28T15:07:04
|
Python
|
UTF-8
|
Python
| false
| false
| 5,976
|
py
|
import logging
from web3 import Web3
from aiogram import Bot, Dispatcher, executor, types
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters.state import State, StatesGroup
from aiogram.contrib.fsm_storage.memory import MemoryStorage
from unbonded_eth_checker import UnbondedEthChecker
import mongo_helper
import config
bot = Bot(token=config.bot_token)
memory_storage = MemoryStorage()
dp = Dispatcher(bot, storage=memory_storage)
class AddAddress(StatesGroup):
waiting_for_new_address = State()
class RemoveAddress(StatesGroup):
waiting_for_remove = State()
class SetThreshold(StatesGroup):
waiting_for_new_threshold = State()
kb_add = u'\U00002795 Add address'
kb_remove = u'\U0000274C Remove address'
kb_threshold = u'\U0000270F Set threshold'
kb_info = u'\U00002139 Info'
# Emoji codes
emoji_ok = u'\U00002705'
emoji_error = u'\U0001F6AB'
emoji_list = u'\U0001F4CB'
emoji_flag = u'\U0001F6A9'
main_keyboard = types.ReplyKeyboardMarkup(resize_keyboard=True)
main_keyboard.add(types.KeyboardButton(text=kb_add))
main_keyboard.add(types.KeyboardButton(text=kb_remove))
main_keyboard.add(types.KeyboardButton(text=kb_threshold))
main_keyboard.add(types.KeyboardButton(text=kb_info))
remove_keyboard = types.ReplyKeyboardRemove()
def validate_address(address):
try:
Web3.toChecksumAddress(address)
return True
except Exception:
return False
def validate_threshold(value):
try:
return float(value)
except ValueError:
return False
@dp.message_handler(commands=['start'])
async def cmd_start(message: types.Message):
# poll_keyboard.add(types.KeyboardButton(text='Cancel'))
await message.answer('Choose action', reply_markup=main_keyboard)
@dp.message_handler(lambda message: message.text == kb_add, state='*')
async def add_address_step_1(message: types.Message):
await message.answer('Send me address:', reply_markup=remove_keyboard)
await AddAddress.waiting_for_new_address.set()
@dp.message_handler(state=AddAddress.waiting_for_new_address, content_types=types.ContentTypes.TEXT)
async def add_address_step_2(message: types.Message, state: FSMContext):
address = message.text
if not validate_address(address):
await message.answer('%s Address `%s` is invalid' % (emoji_error, address), reply_markup=main_keyboard, parse_mode=types.ParseMode.MARKDOWN)
else:
mongo_helper.add_address_to_db(address, message.chat.id)
await message.answer('%s Address `%s` added' % (emoji_ok, address), reply_markup=main_keyboard, parse_mode=types.ParseMode.MARKDOWN)
await state.finish()
@dp.message_handler(lambda message: message.text == kb_remove, state='*')
async def remove_address_step_1(message: types.Message):
addresses = mongo_helper.get_addresses_from_db(message.chat.id)
if not addresses:
await message.answer('You have no addresses. Try to add a new one', reply_markup=main_keyboard)
else:
keyboard = types.ReplyKeyboardMarkup(resize_keyboard=True)
for address in addresses:
keyboard.add(address)
keyboard.add('Cancel')
await message.answer('Click address to remove:', reply_markup=keyboard)
await RemoveAddress.waiting_for_remove.set()
@dp.message_handler(state=RemoveAddress.waiting_for_remove, content_types=types.ContentTypes.TEXT)
async def remove_address_step_2(message: types.Message, state: FSMContext): # обратите внимание, есть второй аргумент
addresses = mongo_helper.get_addresses_from_db(message.chat.id)
if message.text in addresses:
address = message.text
mongo_helper.remove_address_from_db(address, message.chat.id)
await message.answer('%s Address `%s` removed' % (emoji_ok, address), reply_markup=main_keyboard, parse_mode=types.ParseMode.MARKDOWN)
else:
await message.answer('Canceled', reply_markup=main_keyboard)
await state.finish()
@dp.message_handler(lambda message: message.text == kb_threshold, state='*')
async def set_threshold_step_1(message: types.Message):
await message.answer('Send me threshold in ETH:', reply_markup=remove_keyboard)
await SetThreshold.waiting_for_new_threshold.set()
@dp.message_handler(state=SetThreshold.waiting_for_new_threshold, content_types=types.ContentTypes.TEXT)
async def set_threshold_step_2(message: types.Message, state: FSMContext): # обратите внимание, есть второй аргумент
threshold = validate_threshold(message.text.replace(',', '.'))
if not threshold:
await message.answer('%s Threshold `%s` is invalid' % (emoji_error, message.text), reply_markup=main_keyboard, parse_mode=types.ParseMode.MARKDOWN)
else:
mongo_helper.add_threshold_to_db(threshold, message.chat.id)
await message.answer('%s Threshold `%s ETH` set' % (emoji_ok, threshold), reply_markup=main_keyboard, parse_mode=types.ParseMode.MARKDOWN)
await state.finish()
@dp.message_handler(lambda message: message.text == kb_info)
async def get_info(message: types.Message):
addresses = mongo_helper.get_addresses_from_db(message.chat.id)
addresses = ('\n'.join([i for i in addresses]))
info = '%s *Your addresses:*\n\n%s\n\n%s *Alert threshold:* %s ETH' % \
(emoji_list, addresses, emoji_flag, mongo_helper.get_threshold_from_db(message.chat.id))
await message.answer(info, reply_markup=main_keyboard, parse_mode=types.ParseMode.MARKDOWN)
if __name__ == '__main__':
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('web3').setLevel(logging.WARNING)
logging.basicConfig(format='[%(asctime)s] %(filename)s:%(lineno)d %(levelname)s - %(message)s', level=logging.INFO,
filename=config.log_name, datefmt='%d.%m.%Y %H:%M:%S')
checker = UnbondedEthChecker('UnbondedEthChecker')
checker.start()
executor.start_polling(dp, skip_updates=True)
|
[
"latenthero@protonmail.com"
] |
latenthero@protonmail.com
|
3d87dc557f14a1fd27f9bfb4e0cfb4a1d1985fa5
|
7d7d537755f8980d0e8fd4c6d3c21a95f52c7f87
|
/mtweet/admin.py
|
e9c4aed47d67e9fc7e61eb35d3c6a9b05df18622
|
[] |
no_license
|
mayankkushal/mini-twitter
|
d4145550b9d97a4acaebd54f3853d431c4de38b0
|
4abe550303c01f1bc249e57426f6f82c898a9dbe
|
refs/heads/master
| 2021-01-11T20:01:03.088985
| 2017-01-25T10:46:01
| 2017-01-25T10:46:01
| 79,448,603
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 205
|
py
|
from django.contrib import admin
from mtweet.models import UserProfile, Post, Comment
# Register your models here.
admin.site.register(UserProfile)
admin.site.register(Post)
admin.site.register(Comment)
|
[
"noreply@github.com"
] |
mayankkushal.noreply@github.com
|
77c6dffb5337cfbc33ff1d31e091536f8984aa46
|
35469c680bae483a42dfe9a5e71f98300d0b4c86
|
/streamlit-venv/first_app/first_app.py
|
a07cd83238e99c4830945458a1525b14ae709999
|
[] |
no_license
|
difo23/streamlit-apps
|
cc0994fdbcada131e7b26e9ec260b633e75ccfb0
|
e249e0bdb152609b263920ad2b97b144ff50801c
|
refs/heads/main
| 2023-06-10T17:48:39.807490
| 2021-06-24T03:39:07
| 2021-06-24T03:39:07
| 379,794,096
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 551
|
py
|
import streamlit as st
# To make things easier later, we're also importing numpy and pandas for
# working with sample data.
import numpy as np
import pandas as pd
# Draw a title and some text to the app:
'''
# This is the document title
:+1:
:100:
This is some _markdown_.
'''
df = pd.DataFrame({'col1': [1,2,3]})
df # <-- Draw the dataframe
":+1"
'Hola a todo el mundo **En `string` normales funciona el markdown**'
"100"
x = 10
'x', x # <-- Draw the string 'x' and then the value of x
'La coma de la linea anterior es parte de la sintaxis!'
|
[
"lizandro.ramirez@ucateci.edu.do"
] |
lizandro.ramirez@ucateci.edu.do
|
8e7b656d729088b17ad2b5bc569419b228868e8e
|
296bc22dbcb0c63d197777e9dccb07571d86b577
|
/devel/tevent/patches/patch-buildtools_wafsamba_samba__install.py
|
e47e541abf0b88597b21d59c57780e4006764bd1
|
[] |
no_license
|
virtua-network/pkgsrc
|
e0adef65116d6531ce5bf00e9f3a72dac913d93b
|
115af209c442eec0f31b6f554727e696ba49515b
|
refs/heads/trunk
| 2020-04-22T16:08:27.508917
| 2019-01-10T09:38:18
| 2019-01-10T09:38:18
| 112,642,527
| 0
| 0
| null | 2017-12-12T14:28:39
| 2017-11-30T17:52:23
|
Makefile
|
UTF-8
|
Python
| false
| false
| 842
|
py
|
$NetBSD: patch-buildtools_wafsamba_samba__install.py,v 1.1 2019/01/03 19:30:47 adam Exp $
Apply correct install_name on Darwin.
--- buildtools/wafsamba/samba_install.py.orig 2018-07-12 08:23:36.000000000 +0000
+++ buildtools/wafsamba/samba_install.py
@@ -150,6 +150,9 @@ def install_library(self):
bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name))
if dev_link:
bld.symlink_as(os.path.join(install_path, dev_link), os.path.basename(install_name))
+ if getattr(self, 'samba_type', None) != 'PYTHON' and '-dynamiclib' in self.env['LINKFLAGS']:
+ self.env.append_value('LINKFLAGS', '-install_name')
+ self.env.append_value('LINKFLAGS', os.path.join(install_path, install_name))
finally:
bld.all_envs['default'] = default_env
|
[
"adam@pkgsrc.org"
] |
adam@pkgsrc.org
|
17b61f581f4214359f2214304fdd0f0683ef8675
|
d4c4e798cd0c94f28a9282bc8028157855f8fa96
|
/Tree/shortestPathBinaryMatrix.py
|
e7eb028009840ed7222b4730c047ff33797b77a2
|
[] |
no_license
|
tijugeorge/DS-and-Algos
|
5ab53fff764c8dd62b385739b696406700c5516a
|
2bab1231f9c7050972e3855281884fbe1951efa1
|
refs/heads/master
| 2023-08-31T07:30:38.374031
| 2023-08-29T00:09:50
| 2023-08-29T00:09:50
| 75,580,515
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 962
|
py
|
from collections import deque
class Solution:
def shortestPathBinaryMatrix(self, grid: List[List[int]]) -> int:
if grid[0][0] == 1:
return -1
def valid(row, col):
return 0 <= row < n and 0 <= col < n and grid[row][col] == 0
n = len(grid)
seen = {(0, 0)}
queue = deque([(0, 0, 1)]) # row, col, steps
directions = [(0, 1), (1, 0), (1, 1), (-1, -1), (-1, 1), (1, -1), (0, -1), (-1, 0)]
while queue:
row, col, steps = queue.popleft()
if (row, col) == (n - 1, n - 1):
return steps
for dx, dy in directions:
next_row, next_col = row + dy, col + dx
if valid(next_row, next_col) and (next_row, next_col) not in seen:
seen.add((next_row, next_col))
queue.append((next_row, next_col, steps + 1))
return -1
|
[
"noreply@github.com"
] |
tijugeorge.noreply@github.com
|
60d179c2f89aad439a7440bfa1803ec8ad8e05bc
|
3290ccdcb254e347d739238e366705a653aa55ba
|
/app/app/urls.py
|
b5048c3eab9f304100e682b94e13a51c64e9bcb3
|
[
"MIT"
] |
permissive
|
iapurba/recipe-app-api
|
9125fe2e58cf510762bba788b99bc794d31b5d47
|
1ab5bd2388f8651eef5fe9bea38a156668cdce17
|
refs/heads/main
| 2023-02-12T12:59:37.485543
| 2021-01-16T16:48:15
| 2021-01-16T16:48:15
| 326,918,086
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,002
|
py
|
"""app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('api/user/', include('user.urls')),
path('api/recipe/', include('recipe.urls')),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"apurbapanja1@gmail.com"
] |
apurbapanja1@gmail.com
|
8e3bd2675f39d619008fe533e9bf20c68913a478
|
fcec21a964d279aecf4b7818472e3a59ca6ff7d9
|
/apps/blog/templatetags/blog_tags.py
|
31ae1a2fd25df7ae38dc35fa08de351d505782dd
|
[] |
no_license
|
VISVO-TV/blog-with-api-for-mob-app
|
5a9763776809343f7fe662e3417de899c9cb3c3f
|
d2f1ed43f8f01113b2f6cf65bef644e392c5b6c1
|
refs/heads/master
| 2023-03-21T02:03:16.710226
| 2021-03-22T14:31:25
| 2021-03-22T14:31:25
| 350,374,303
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 717
|
py
|
from django import template
from ..models import Post
from django.db.models import Count
from django.utils.safestring import mark_safe
import markdown
register = template.Library()
@register.simple_tag
def total_posts():
return Post.objects.count()
@register.inclusion_tag('blog/post/latest_posts.html')
def show_latest_posts(count=5):
latest_posts = Post.objects.order_by('-publish')[:count]
return {'latest_posts': latest_posts}
@register.simple_tag
def get_most_commented_posts(count=5):
return Post.objects.annotate(total_comment=Count('comments')).order_by('-total_comment')[:count]
@register.filter(name='markdown')
def markdown_format(text):
return mark_safe(markdown.markdown(text))
|
[
"visvo.lider@gmail.com"
] |
visvo.lider@gmail.com
|
72a917446dbc4fd22fca1dc24a576d9f2b62b8c0
|
b14a690e635722d3a6f6578fdfa0e17a5c8132dc
|
/NLTK/3_words_list.py
|
048e4778329d66529ce572c79927a1485e475d5d
|
[] |
no_license
|
Yo-Systems/scripts
|
881023ec0684f85c98e99b67fb2d1c135ef293cf
|
f6d9b034c31114625de90e634ef4899f163c32b3
|
refs/heads/main
| 2023-01-13T02:47:57.457073
| 2020-11-20T06:21:49
| 2020-11-20T06:21:49
| 308,233,548
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
# Using list comprehension
List = [list_of_words[i:i + 3]
for i in range(len(list_of_words) - 2)]
# printing list
print(List)
|
[
"noreply@github.com"
] |
Yo-Systems.noreply@github.com
|
d599a099915df2f3a7b0e84e7bc8922249768aa1
|
9e5f89954fae8ac705d3e721d82b7b72d9fbcbaa
|
/v1/identificador.py
|
ce063a56231d27f0f73a6088b1e9cc337d37a8b5
|
[] |
no_license
|
Diego-18/python-algorithmic-exercises
|
dda950d0fcabd25244ce3ecfc1296d06a7701f5f
|
583907b82c3549a4bff7718d23aa3e0d7be2e4a3
|
refs/heads/main
| 2023-08-12T02:28:49.425894
| 2021-09-26T05:14:07
| 2021-09-26T05:14:07
| 410,439,661
| 0
| 0
| null | null | null | null |
ISO-8859-1
|
Python
| false
| false
| 395
|
py
|
car = raw_input('Dame un carácter: ')
if 'a' <= car.lower() <= 'z' or car == '_':
print 'Este carácter es válido en un identificador en Python.'
else:
if not (car < '0' or '9' < car):
print 'Un dígito es válido en un identificador en Python,',
print 'siempre que no sea el primer carácter.'
else:
print 'Carácter no válido para formar un identificador en Python.'
|
[
"ingdiegochavez18@gmail.com"
] |
ingdiegochavez18@gmail.com
|
cd192e239ca89ca3b87be8caf3c52cf21a53900b
|
74d31a65c825469d2d0a1d6e7604455ca2fbf0e2
|
/src/galaxy/__init__.py
|
9b3302a5fe954d0b6df2155cb82985ed594d2677
|
[
"MIT"
] |
permissive
|
AHCoder/galaxy-integration-ps2
|
d80116619abc33140d8af5c260ca61ba3d7c263c
|
2d57a094e4447a2016074a42dd4d7b425985a48a
|
refs/heads/master
| 2021-09-27T05:14:12.927921
| 2021-09-14T18:15:20
| 2021-09-14T18:15:20
| 198,490,246
| 49
| 9
|
MIT
| 2019-11-06T16:11:03
| 2019-07-23T18:50:57
|
Python
|
UTF-8
|
Python
| false
| false
| 155
|
py
|
import logging
logging.getLogger(__name__).setLevel(logging.INFO)
__path__: str = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
|
[
"AHCoder@users.noreply.github.com"
] |
AHCoder@users.noreply.github.com
|
95e6f59e7a2a6067468883e2555c45f701c3af2c
|
71723ab431d071af2a20a6df748c4cfaa94a4ff6
|
/src/pytest_cpp/error.py
|
da77df59894d6364602604ddcfb0143f421d5c6b
|
[
"MIT"
] |
permissive
|
pytest-dev/pytest-cpp
|
7aa20409b4e926785aa39e07f728a7122c67192e
|
00ffd823a2e7c6d2eff06274c6bda68defdfa573
|
refs/heads/master
| 2023-08-29T15:37:18.733648
| 2023-08-25T13:06:29
| 2023-08-25T13:06:29
| 22,797,415
| 111
| 32
|
MIT
| 2023-09-12T20:01:19
| 2014-08-09T23:32:11
|
C++
|
UTF-8
|
Python
| false
| false
| 3,559
|
py
|
from __future__ import annotations
import os
import string
from abc import ABC
from abc import abstractmethod
from typing import Sequence
from typing import Tuple
from _pytest._code.code import ReprFileLocation
from _pytest._io import TerminalWriter
class CppFailureError(Exception):
"""
Should be raised by test Facades when a test fails.
"""
def __init__(self, failures: Sequence[CppTestFailure]) -> None:
self.failures = list(failures)
Markup = Tuple[str, ...]
class CppTestFailure(ABC):
"""
Represents a failure in a C++ test. Each framework
must implement the abstract functions to build the final exception
message that will be displayed in the terminal.
"""
@abstractmethod
def get_lines(self) -> list[tuple[str, Markup]]:
"""
Returns list of (line, markup) that will be displayed to the user,
where markup can be a sequence of color codes from
TerminalWriter._esctable:
'black', 'red', 'green', 'yellow',
'blue', 'purple', 'cyan', 'white',
'bold', 'light', 'blink', 'invert'
"""
@abstractmethod
def get_file_reference(self) -> tuple[str, int]:
"""
Return tuple of filename, linenum of the failure.
"""
class CppFailureRepr(object):
"""
"repr" object for pytest that knows how to print a CppFailure instance
into both terminal and files.
"""
failure_sep = "---"
def __init__(self, failures: Sequence[CppTestFailure]) -> None:
self.failures = list(failures)
def __str__(self) -> str:
reprs = []
for failure in self.failures:
pure_lines = "\n".join(x[0] for x in failure.get_lines())
repr_loc = self._get_repr_file_location(failure)
reprs.append("%s\n%s" % (pure_lines, repr_loc))
return self.failure_sep.join(reprs)
def _get_repr_file_location(self, failure: CppTestFailure) -> ReprFileLocation:
filename, linenum = failure.get_file_reference()
return ReprFileLocation(filename, linenum, "C++ failure")
def toterminal(self, tw: TerminalWriter) -> None:
for index, failure in enumerate(self.failures):
filename, linenum = failure.get_file_reference()
code_lines = get_code_context_around_line(filename, linenum)
for line in code_lines:
tw.line(line, white=True, bold=True) # pragma: no cover
indent = get_left_whitespace(code_lines[-1]) if code_lines else ""
for line, markup in failure.get_lines():
markup_params = {m: True for m in markup}
tw.line(indent + line, **markup_params)
location = self._get_repr_file_location(failure)
location.toterminal(tw)
if index != len(self.failures) - 1:
tw.line(self.failure_sep, cyan=True)
def get_code_context_around_line(filename: str, linenum: int) -> list[str]:
"""
return code context lines, with the last line being the line at
linenum.
"""
if os.path.isfile(filename):
index = linenum - 1
with open(filename) as f:
index_above = index - 2
index_above = index_above if index_above >= 0 else 0
return [x.rstrip() for x in f.readlines()[index_above : index + 1]]
return []
def get_left_whitespace(line: str) -> str:
result = ""
for c in line:
if c in string.whitespace:
result += c
else:
break
return result
|
[
"nicoddemus@gmail.com"
] |
nicoddemus@gmail.com
|
7374a49abd5baf3e01c56a441e5bce07a07eb8ab
|
f6465cac733fec018d4e9d32591254d476aced7c
|
/countdown.py
|
3db55bd69d49e2ed98805bb9582bb95c47fceefe
|
[
"CC-BY-NC-SA-3.0",
"Apache-2.0",
"CC-BY-NC-4.0"
] |
permissive
|
ferranpons/ccc_pumptrack_attack
|
22c62592e2f0826288c0662edc5493104820411a
|
dbe8662c617f353821422642cf95255cef167fc6
|
refs/heads/master
| 2021-08-17T19:23:54.927727
| 2020-04-08T14:29:04
| 2020-04-08T14:29:04
| 155,850,792
| 0
| 0
|
Apache-2.0
| 2020-04-08T14:29:05
| 2018-11-02T10:29:00
|
Python
|
UTF-8
|
Python
| false
| false
| 1,586
|
py
|
import pygame
import colors
from gameState import GameState
from textUtils import text_format, MENU_FONT
class CountDown:
start_ticks = 0
state = GameState.STARTING
countdown = 5
time_in_millis = 0
def __init__(self):
self.start_ticks = pygame.time.get_ticks()
self.start_text = text_format("RACE STARTS", MENU_FONT, 25, colors.white)
self.start_text_rect = self.start_text.get_rect()
self.countdown_text = text_format(str(self.countdown), MENU_FONT, 25, colors.white)
self.countdown_text_rect = self.countdown_text.get_rect()
def set_state(self, game_state):
self.state = game_state
def update(self):
if self.state == GameState.STARTING:
self.time_in_millis = (pygame.time.get_ticks() - self.start_ticks) / 1000
else:
self.time_in_millis = 0
if self.time_in_millis >= 1:
self.countdown -= 1
self.time_in_millis = 0
def draw(self, screen, screen_rect, background, background_in_alpha):
self.countdown_text = text_format(str(self.countdown), MENU_FONT, 25, colors.white)
self.countdown_text_rect = self.countdown_text.get_rect()
screen.blit(background, screen_rect)
screen.blit(background_in_alpha, screen_rect)
screen.blit(self.start_text, (screen_rect.size[0] / 2 - self.start_text_rect.size[0] / 2, screen_rect.size[1] / 2))
screen.blit(self.countdown_text,
(screen_rect.size[0] / 2 - self.countdown_text_rect.size[0] / 2, screen_rect.size[1] / 2 + 50))
|
[
"ferranpons@gmail.com"
] |
ferranpons@gmail.com
|
a34440377e54af5af4478fddbf7d0a346c197b34
|
18b51fdd2549c49bda5cc39b342669125819de58
|
/gallery/views.py
|
57bda6daf8c650e5295db07814ef4ae767406601
|
[] |
no_license
|
gschultz64/artHub
|
51beeb5066358349e008c5501bab33214f87c501
|
2abebb8a766a1f94da510c404a40ea4e80048acc
|
refs/heads/master
| 2022-12-25T23:28:37.486249
| 2018-08-14T18:08:59
| 2018-08-14T18:08:59
| 144,074,022
| 0
| 0
| null | 2022-12-08T02:22:15
| 2018-08-08T22:27:15
|
Python
|
UTF-8
|
Python
| false
| false
| 3,743
|
py
|
from django.shortcuts import render, redirect, HttpResponse
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django import forms
from django.views import View
from django.conf import settings
from django.contrib import messages
from django.core.files.storage import FileSystemStorage
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from .models import Media, User
from .forms import LoginForm, SignUpForm, UploadForm
import requests
# Create your views here.
def index(request):
media = Media.objects.all()
return render(request, 'index.html', {'media': media})
def show(request, media_id):
media = Media.objects.get(id=media_id)
return render(request, 'show.html', {'media_id': media_id, 'media': media})
def login_view(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
u = form.cleaned_data['username']
p = form.cleaned_data['password']
user = authenticate(username=u, password=p)
if user is not None:
if user.is_active:
login(request, user)
messages.success(request, 'You successfully logged in!')
return HttpResponseRedirect('/')
else:
return HttpResponseRedirect('/login')
else:
messages.error(request, 'Your username and/or password is incorrect.')
else:
form = LoginForm()
return render(request, 'login.html', {'form': form})
def signup(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
messages.success(request, 'You have signed up for Art Hub and are logged in!')
return redirect('index')
else:
form = SignUpForm()
return render(request, 'signup.html', {'form': form})
@login_required
def profile(request, username):
user = User.objects.get(username=username)
media = Media.objects.filter(user_id=user)
return render(request, 'profile.html', {'username': username, 'media': media, })
@login_required
def upload(request, username):
user = User.objects.get(username=username)
media = Media.objects.filter(user_id_id=user.id)
if request.method == 'POST':
form = UploadForm(request.POST, request.FILES)
if form.is_valid():
print('user:', user, ' user_id: ', user.id)
instance = Media(file=request.FILES['file'], user_id_id=user.id,
name=form.cleaned_data.get('name'), description=form.cleaned_data.get('description'))
instance.save()
messages.success(request, 'Your upload was successful!')
return redirect('upload', username)
else:
form = UploadForm()
return render(request, 'upload.html', {'form': form, 'username': username, 'media': media})
def like_img(request):
media_id = request.GET.get('media_id', None)
likes = 0
if media_id:
media = Media.objects.get(id=int(media_id))
if media is not None:
likes = media.likes + 1
media.likes = likes
media.save()
return HttpResponse(likes)
def chat(request):
return render(request, 'chat.html')
def logout_view(request):
logout(request)
return HttpResponseRedirect('/')
|
[
"genschultz@gmail.com"
] |
genschultz@gmail.com
|
8eae618dd80fb9c1f441ad756c97b37bf196dcd9
|
59166105545cdd87626d15bf42e60a9ee1ef2413
|
/test/test_athletics_api.py
|
21b851ea7f9d1ec00d0e2a0d763180ba521fa921
|
[] |
no_license
|
mosoriob/dbpedia_api_client
|
8c594fc115ce75235315e890d55fbf6bd555fa85
|
8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc
|
refs/heads/master
| 2022-11-20T01:42:33.481024
| 2020-05-12T23:22:54
| 2020-05-12T23:22:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 957
|
py
|
# coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import dbpedia
from dbpedia.api.athletics_api import AthleticsApi # noqa: E501
from dbpedia.rest import ApiException
class TestAthleticsApi(unittest.TestCase):
"""AthleticsApi unit test stubs"""
def setUp(self):
self.api = dbpedia.api.athletics_api.AthleticsApi() # noqa: E501
def tearDown(self):
pass
def test_athleticss_get(self):
"""Test case for athleticss_get
List all instances of Athletics # noqa: E501
"""
pass
def test_athleticss_id_get(self):
"""Test case for athleticss_id_get
Get a single Athletics by its id # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
[
"maxiosorio@gmail.com"
] |
maxiosorio@gmail.com
|
6489b6395ab64e7a340b118348a3b0f4f27afa6b
|
bdf1dde23024e854f31da05ca6d5fe9f57930cad
|
/machineLearning/code/nbReg.py
|
80f3c009a489a8e0707f038a655044cc8e511867
|
[] |
no_license
|
setupred/iit-asgn
|
4d8400084025854734c4744a493f8fe2dd4f850e
|
121c07d3f30ab5b8e5eb79bc78d472621a1f5615
|
refs/heads/master
| 2020-04-02T09:44:01.338280
| 2013-08-05T11:34:53
| 2013-08-05T11:34:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,302
|
py
|
'''
Decision Tree Regression
'''
from time import gmtime, strftime
from collections import defaultdict
from sklearn import tree
import csv
def prob(pos,neg):
if( (pos+neg) == 0):
return -1
else : return (pos*1.0)/(pos+neg)
#'''
train = csv.reader(open(sys.argv[1],'r'))
test = csv.reader(open(sys.argv[2],'r'))
'''
train = csv.reader(open('dummy_data/tr.csv','r'))
test = csv.reader(open('dummy_data/ts.csv','r'))
'''
ques_ind = 4
track_ind = 6
subtrack_ind = 7
game_ind = 13
X = []
y = []
for line in train:
if(line[0]=='correct' or line[1] == '3' or line[1] == '4' ): continue
res = int(line[0])
user = int(line[2])
st = int(line[subtrack_ind])
t = int(line[track_ind])
q = int(line[ques_ind])
g= (line[subtrack_ind])
X.append([user,st,q])
y.append(res)
print "data ready"
clf = tree.DecisionTreeRegressor()
clf = clf.fit(X,y)
print "training done"
ques_ind = 2
track_ind = 4
subtrack_ind = 5
game_ind = 11
time = strftime("%d:%b:%Y-%H:%M:%S", gmtime())
final = open('result-'+str(time),'w')
for line in test:
if(line[0]=='user_id' ): continue
user = int(line[0])
st = int(line[subtrack_ind])
t = int(line[track_ind])
q = int(line[ques_ind])
g= (line[subtrack_ind])
final.write(str(user)+","+str(clf.predict([user,st,q])[0])+"\n")
final.close()
print "end"
|
[
"pratap130492@gmail.com"
] |
pratap130492@gmail.com
|
e7881d34621184fd7b44e1969765877ae3568079
|
8d3c37f8d66c3d49a4318d3694e1fe01fa324867
|
/build/exe.win32-3.6/venv/Scripts/pip-script.py
|
4986ff9209f1d8822a36162f98cb037d1d44d905
|
[] |
no_license
|
yessikameliza/proyecto_final_softwareIII
|
80f55ca0b03054c06fec7ef025cb191a29e7c018
|
ffce31b026790cadd83c628e0872a2c38cd5d7ef
|
refs/heads/master
| 2020-03-28T14:39:54.816050
| 2019-03-30T00:19:10
| 2019-03-30T00:19:10
| 148,509,543
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
#!"C:\Users\alexa\Music\Software 3\proyecto_final_softwareIII\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
|
[
"ealealr@uqvirtual.edu.co"
] |
ealealr@uqvirtual.edu.co
|
987738b06e1aa35e4bc300f0033658d71515f911
|
b840b86b490cfc6686bc0080780799c944682032
|
/test_api.py
|
822da321d98dedfd58e794adf1924934b3ee0a42
|
[] |
no_license
|
Swich1987/PythonImhioTestTask
|
5fb458e7edff12bef069bd46976d8c68003fa9a4
|
8e86c64e00b16782ce50a690345101958a6acb25
|
refs/heads/master
| 2020-04-30T18:51:43.540185
| 2019-03-22T12:50:59
| 2019-03-22T12:50:59
| 176,955,160
| 0
| 0
| null | 2019-03-21T14:58:09
| 2019-03-21T13:55:15
|
Python
|
UTF-8
|
Python
| false
| false
| 8,389
|
py
|
"""
Testing our API.
"""
import sys
import random
import string
import csv
import unittest
import requests
URL = 'http://localhost:8078/get_config'
SUCCESS_CODE = 200
BAD_REQUEST_CODE = 400
RAINY_DAY_REQUESTS_NUMBER = 3
CSV_DEVELOPE_FILENAME = 'sunny_days_develop_mr_robot_configs.csv'
CSV_VPN_FILENAME = 'sunny_days_test_vpn_configs.csv'
REQUEST_DEVELOP_EXAMPLE = {'Type': 'Develop.mr_robot', 'Data': 'YHySKtEhYm'}
RESPONSE_DEVELOP_EXAMPLE = {
'Data': 'YHySKtEhYm',
'Host': 'WKyh',
'Port': 46836,
'Database': 'vkNMTN',
'User': 'zmPBh',
'Password': 'teBuZLhZ',
'Schema': 'YUEZgVvd'
}
REQUEST_TEST_VPN_EXAMPLE = {
'Type': 'Test.vpn',
'Data': 'cHAIJmCYyn'
}
RESPONSE_TEST_VPN_EXAMPLE = {
"Data": "cHAIJmCYyn",
"Host": "fhhp",
"Port": 12210,
"Virtualhost": "HkJaQ",
"User": "fPMUz",
"Password": "RYEeVKFn"
}
TEST_REQUESTS_AND_RESPONSES_LIST = [
(
REQUEST_DEVELOP_EXAMPLE,
RESPONSE_DEVELOP_EXAMPLE
),
(
REQUEST_TEST_VPN_EXAMPLE,
RESPONSE_TEST_VPN_EXAMPLE
)
]
RESPONSE_BAD_INPUT = {'error': 'Bad input'}
RESPONSE_NO_MODEL = {'error': 'config model not present'}
RESPONSE_NOT_FOUND = {'error': 'record not found'}
TEST_UNEXPECTED_BEHAVIOURS_LIST = [
(
REQUEST_DEVELOP_EXAMPLE,
SUCCESS_CODE,
RESPONSE_DEVELOP_EXAMPLE
),
(
None,
BAD_REQUEST_CODE,
RESPONSE_BAD_INPUT
),
(
'',
BAD_REQUEST_CODE,
RESPONSE_BAD_INPUT
),
(
{'Type1': '', 'Data': ''},
BAD_REQUEST_CODE,
RESPONSE_NO_MODEL
),
(
{'Type': '', 'Data1': ''},
BAD_REQUEST_CODE,
RESPONSE_NO_MODEL
),
(
{'Type': '', 'Data': ''},
BAD_REQUEST_CODE,
RESPONSE_NO_MODEL
),
(
{'Type': '', 'Data': 'YHySKtEhYm'},
BAD_REQUEST_CODE,
RESPONSE_NO_MODEL
),
(
{'Type': 'Develop.mr_robot', 'Data': ''},
BAD_REQUEST_CODE,
RESPONSE_NOT_FOUND
),
(
{'Type': 'Develop.mr_robota', 'Data': 'YHySKtEhYm'},
BAD_REQUEST_CODE,
RESPONSE_NO_MODEL
),
(
{'Type': 'Develop.mr_robot', 'Data': 'YHySKtEhYma'},
BAD_REQUEST_CODE,
RESPONSE_NOT_FOUND
),
(
{'Type': 123, 'Data': 'YHySKtEhYm'},
BAD_REQUEST_CODE,
RESPONSE_BAD_INPUT
),
(
{'Type': 'Develop.mr_robot', 'Data': 123},
BAD_REQUEST_CODE,
RESPONSE_BAD_INPUT
)
]
def make_request(json_request, url=URL):
"""Send json request, return response object."""
return requests.post(url=url, json=json_request)
class BaseCompareTesting(unittest.TestCase):
"""Parent test case for check compliance in request and response."""
def _test_comparing_response_data(self, test_req_resp_data):
"""Comparing with test responses."""
for test_req, test_resp in test_req_resp_data:
with self.subTest(test_request=test_req, test_response=test_resp):
print('request: ', test_req)
resp = make_request(json_request=test_req)
print('response: ', resp.json())
self.assertEqual(resp.json(), test_resp)
class TestTechnicalTask(BaseCompareTesting):
"""Main test case for check compliance with the technical task."""
def test_response_status_code(self):
"""Checking response status."""
resp = make_request(json_request=REQUEST_DEVELOP_EXAMPLE)
self.assertEqual(resp.status_code, SUCCESS_CODE)
def test_comparing_response_data(self):
"""Comparing with technical test responses."""
print('\nCompare response with test response for technical requests')
self._test_comparing_response_data(
test_req_resp_data=TEST_REQUESTS_AND_RESPONSES_LIST)
class TestSunnyDays(BaseCompareTesting):
"""Test case for sunny day scenarios."""
@staticmethod
def _load_csv(csv_filename):
csv_list = []
with open(csv_filename, newline='') as csvfile:
csvreader = csv.DictReader(csvfile, delimiter='|')
for row in csvreader:
# skip whitespaces using strip
stripped_row = {elem.strip():row[elem].strip() for elem in row}
csv_list.append(stripped_row)
return csv_list
@classmethod
def _generate_test_list(cls, csv_filename):
"""
Generate test list from csv raw data in that format:
[ (request_0, response_0), (request_1, response_1), ... ]
"""
csv_data_list = cls._load_csv(csv_filename)
if csv_filename == CSV_VPN_FILENAME:
type_string = 'Test.vpn'
else:
type_string = 'Develop.mr_robot'
req_resp_data_list = []
for row in csv_data_list:
test_req = {
'Type': type_string,
'Data': row['data']
}
# make every first letter upper
test_resp = {elem[0].upper()+elem[1:] : row[elem] for elem in row}
# save port as int
test_resp['Port'] = int(test_resp['Port'])
req_resp_data_list.append((test_req, test_resp))
return req_resp_data_list
def setUp(self):
self.test_develop_mr_robot_configs_data = TestSunnyDays._generate_test_list(
CSV_DEVELOPE_FILENAME)
self.test_vpn_configs = TestSunnyDays._generate_test_list(CSV_VPN_FILENAME)
def test_response_data(self):
"""Comparing responses with sunny day's scenarios requests."""
print('\nCompare response with test response for sunny days requests')
for test_data in (self.test_develop_mr_robot_configs_data, self.test_vpn_configs):
with self.subTest(test_data=test_data):
print('\nCompare response with test response',
'for next sunny days requests:')
self._test_comparing_response_data(test_req_resp_data=test_data)
class TestRainyDays(BaseCompareTesting):
"""Test case for rainy day scenarios."""
def setUp(self):
random_test_requests = []
for _ in range(RAINY_DAY_REQUESTS_NUMBER):
for type_string in 'Develop.mr_robot', 'Test.vpn':
# k=10 - len of generated string
random_str = ''.join(random.choices(string.ascii_uppercase +
string.digits, k=10))
random_dict = {
'Type': type_string,
'Data': random_str
}
random_test_requests.append(random_dict)
self.random_test_requests = random_test_requests
def test_response_data(self):
"""Comparing responses with rainy day's scenarios requests."""
print('\nCompare response with test response for rainy days requests')
answer = {'error': 'record not found'}
for test_req in self.random_test_requests:
with self.subTest(test_request=test_req):
print('request: ', test_req)
resp = make_request(json_request=test_req)
print('response: ', resp.json())
self.assertEqual(resp.json(), answer)
class TestUnexpectedBehaviour(BaseCompareTesting):
"""Test case for unexpected behaviour."""
def setUp(self):
self.unexpected_test_requests = TEST_UNEXPECTED_BEHAVIOURS_LIST
self.expected_test_response = []
def test_unexpected_requests(self):
"""Comparing responses with unexpected requests."""
print('\nComparing responses with unexpected requests.')
output_string = '{0!s:>57} => {1!s:^11}|{2!s:<}'
print(output_string.format('request', 'status code', 'response'))
for request, status_code, exp_response in self.unexpected_test_requests:
with self.subTest(test_request=request, status_code=status_code,
exp_response=exp_response):
response = make_request(request)
self.assertEqual(response.status_code, status_code)
self.assertEqual(response.json(), exp_response)
print(output_string.format(request, response.status_code,
response.json()))
if __name__ == '__main__':
unittest.main()
|
[
"swich1987@mail.ru"
] |
swich1987@mail.ru
|
fca8f2fc674b7f037b327f7b907d9900aca9ba9c
|
f2721656f488c512fcf8e182efaafb88a1d70ceb
|
/matrixmul.py
|
a085bb20c14dda36bd3dad9e4fd30ab1c51a27f0
|
[] |
no_license
|
yangzhenwnt/louplus
|
5237701ff11e6091cced5e8e665d68fb7ffadcdb
|
9700a4aaaa6c780eb686c676565eb2d2aff48b8c
|
refs/heads/master
| 2021-03-17T21:49:54.480339
| 2020-03-25T03:32:06
| 2020-03-25T03:32:06
| 247,020,677
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 521
|
py
|
#!/usr/bin/env python3
n = int(input("Enter the value of n: "))
print("Enter value for the Matrix A")
a = []
for i in range(n):
a.append([int(x) for x in input().split()])
print("Enter values for the Matrix B")
b = []
for i in range(n):
b.append([int(x) for x in input().split()])
c = []
for i in range(n):
c.append([a[i][j] * b[i][j] for j in range(n)])
print("After matrix multiplication")
print("-" * 7 * n)
for x in c:
for y in x:
print(str(y).rjust(5),end=' ')
print()
print("-" * 7 * n)
|
[
"yangzhenwnt@163.com"
] |
yangzhenwnt@163.com
|
88c46763d7ccef9ca626cf9355ed3c62fa20d20d
|
eeffdf9d58189b60506e1414ed64fd74367eb6d0
|
/tracking.py
|
be56b7da2ffdbcdb374ea8655542a8bf3b60ccd2
|
[] |
no_license
|
dcmendoza2/Digital_Image_Processing
|
e653f123789f9bd899e7b3778b920b27850ca2b8
|
693983345f350f0304b8229e8717e071c26efbf3
|
refs/heads/main
| 2023-04-30T06:57:19.194680
| 2021-05-12T03:57:50
| 2021-05-12T03:57:50
| 366,585,663
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 836
|
py
|
import cv2
cap = cv2.VideoCapture(0)
tracker = cv2.TrackerMIL_create()
success, img = cap.read()
bbox = cv2.selectROI("Tracking",img,False)
tracker.init(img, bbox)
def drawBox(img, bbox):
x,y,w,h = int(bbox[0]), int(bbox[1]), int(bbox[2]), int(bbox[3])
cv2.rectangle(img,(x,y),((x+w),(y+h)),(255,0,255),3,1)
while True:
timer = cv2.getTickCount()
success, img = cap.read()
success,bbox = tracker.update(img)
if success:
drawBox(img, bbox)
else:
cv2.putText(img, "Lost Object" , (75, 75), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 50, 255), 2)
fps = cv2.getTickFrequency()/(cv2.getTickCount() - timer)
cv2.putText(img, "FPS: " + str(int(fps)), (75,50), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0,0,255),2)
cv2.imshow("Tracking", img)
if cv2.waitKey(1) & 0xff == ord('q'):
break
|
[
"ddcmendoza@gmail.com"
] |
ddcmendoza@gmail.com
|
35da978a59322a15b0812511052fcc1a3ccd2388
|
fd971f08f5603eb3651da442678e82a1c21ae098
|
/qgs/diagnostics/streamfunctions.py
|
2f3c58dce0f5824bb19f119939a0a71668381ade
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
ChanJeunlam/qgs
|
78111cd43caf67ca2474aa27f07d20990bbd3321
|
b4f9c6bb443d3846600e8bc08560dcdb398818b9
|
refs/heads/master
| 2023-09-02T10:40:55.299590
| 2021-11-23T09:41:59
| 2021-11-23T09:41:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,965
|
py
|
"""
Diagnostic streamfunction classes
=================================
Classes defining streamfunction fields diagnostics.
Description of the classes
--------------------------
* :class:`AtmosphericStreamfunctionDiagnostic`: General base class for atmospheric streamfunction fields diagnostic.
* :class:`LowerLayerAtmosphericStreamfunctionDiagnostic`: Diagnostic giving the lower layer atmospheric streamfunction fields :math:`\\psi^3_{\\rm a}`.
* :class:`UpperLayerAtmosphericStreamfunctionDiagnostic`: Diagnostic giving the upper layer atmospheric streamfunction fields :math:`\\psi^1_{\\rm a}`.
* :class:`MiddleAtmosphericStreamfunctionDiagnostic`: Diagnostic giving the middle atmospheric streamfunction fields :math:`\\psi_{\\rm a}`.
* :class:`OceanicLayerStreamfunctionDiagnostic`: Diagnostic giving the oceanic layer streamfunction fields :math:`\\psi_{\\rm o}`.
"""
import warnings
import numpy as np
from scipy.integrate import dblquad
import matplotlib.pyplot as plt
from qgs.diagnostics.base import FieldDiagnostic
class AtmosphericStreamfunctionDiagnostic(FieldDiagnostic):
"""General base class for atmospheric streamfunction fields diagnostic.
Provide a spatial gridded representation of the fields.
This is an `abstract base class`_, it must be subclassed to create new diagnostics!
.. _abstract base class: https://docs.python.org/3/glossary.html#term-abstract-base-class
Parameters
----------
model_params: QgParams
An instance of the model parameters.
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
Attributes
----------
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
"""
def __init__(self, model_params, delta_x=None, delta_y=None, dimensional=True):
FieldDiagnostic.__init__(self, model_params, dimensional)
self._configure(delta_x=delta_x, delta_y=delta_y)
self._default_plot_kwargs['cmap'] = plt.get_cmap('jet')
def _compute_grid(self, delta_x=None, delta_y=None):
if delta_x is None:
ams = self._model_params.ablocks
if ams is None:
warnings.warn("AtmosphericStreamfunctionDiagnostic: Unable to configure the grid automatically. Atmospheric wavenumbers information not " +
"present in the model's parameters ! Please call the compute_grid method with the delta_x and delta_y parameters.")
return 1
xwn = [ams[i][0] for i in range(len(ams))]
mxwn = max(xwn)
n_point_x = 4 * mxwn + 2
else:
n_point_x = int(np.ceil((2 * np.pi / self._model_params.scale_params.n) / delta_x) + 1)
if delta_y is None:
ams = self._model_params.ablocks
if ams is None:
warnings.warn("AtmosphericStreamfunctionDiagnostic: Unable to configure the grid automatically. Atmospheric wavenumbers information not " +
"present in the model's parameters ! Please call the compute_grid method with the delta_x and delta_y parameters.")
return 1
ywn = [ams[i][1] for i in range(len(ams))]
mywn = max(ywn)
n_point_y = 4 * mywn + 2
else:
n_point_y = int(np.ceil(np.pi / delta_y) + 1)
x = np.linspace(0., 2 * np.pi / self._model_params.scale_params.n, n_point_x)
y = np.linspace(0., np.pi, n_point_y)
self._X, self._Y = np.meshgrid(x, y)
def _configure(self, delta_x=None, delta_y=None):
self._compute_grid(delta_x, delta_y)
basis = self._model_params.atmospheric_basis
self._grid_basis = list()
for func in basis.num_functions():
self._grid_basis.append(func(self._X, self._Y))
self._grid_basis = np.array(self._grid_basis)
class LowerLayerAtmosphericStreamfunctionDiagnostic(AtmosphericStreamfunctionDiagnostic):
"""Diagnostic giving the lower layer atmospheric streamfunction fields :math:`\\psi^3_{\\rm a}`.
Computed as :math:`\\psi^3_{\\rm a} = \\psi_{\\rm a} - \\theta_{\\rm a}` where :math:`\\psi_{\\rm a}` and :math:`\\theta_{\\rm a}` are respectively the barotropic and baroclinic streamfunctions.
See also the :ref:`files/model/atmosphere:Atmospheric component` and :ref:`files/model/oro_model:Mid-layer equations and the thermal wind relation` sections.
Parameters
----------
model_params: QgParams
An instance of the model parameters.
delta_x: float, optional
Spatial step in the zonal direction `x` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
delta_y: float, optional
Spatial step in the meridional direction `y` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
dimensional: bool, optional
Indicate if the output diagnostic must be dimensionalized or not.
Default to `True`.
Attributes
----------
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
"""
def __init__(self, model_params, delta_x=None, delta_y=None, dimensional=True):
AtmosphericStreamfunctionDiagnostic.__init__(self, model_params, delta_x, delta_y, dimensional)
self._plot_title = r'Atmospheric $\psi_{\rm a}^3$ streamfunction'
self._plot_units = r" (in " + self._model_params.get_variable_units(0) + r")"
def _get_diagnostic(self, dimensional):
natm = self._model_params.nmod[0]
psi = np.swapaxes(self._data[:natm, ...].T @ np.swapaxes(self._grid_basis, 0, 1), 0, 1)
theta = np.swapaxes(self._data[natm:2*natm, ...].T @ np.swapaxes(self._grid_basis, 0, 1), 0, 1)
psi3 = psi - theta
if dimensional:
self._diagnostic_data = psi3 * self._model_params.streamfunction_scaling
self._diagnostic_data_dimensional = True
else:
self._diagnostic_data = psi3
self._diagnostic_data_dimensional = False
return self._diagnostic_data
class UpperLayerAtmosphericStreamfunctionDiagnostic(AtmosphericStreamfunctionDiagnostic):
"""Diagnostic giving the upper layer atmospheric streamfunction fields :math:`\\psi^1_{\\rm a}`.
Computed as :math:`\\psi^1_{\\rm a} = \\psi_{\\rm a} + \\theta_{\\rm a}` where :math:`\\psi_{\\rm a}` and :math:`\\theta_{\\rm a}` are
respectively the barotropic and baroclinic streamfunctions.
See also the :ref:`files/model/atmosphere:Atmospheric component` and :ref:`files/model/oro_model:Mid-layer equations and the thermal wind relation` sections.
Parameters
----------
model_params: QgParams
An instance of the model parameters.
delta_x: float, optional
Spatial step in the zonal direction `x` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
delta_y: float, optional
Spatial step in the meridional direction `y` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
dimensional: bool, optional
Indicate if the output diagnostic must be dimensionalized or not.
Default to `True`.
Attributes
----------
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
"""
def __init__(self, model_params, delta_x=None, delta_y=None, dimensional=True):
AtmosphericStreamfunctionDiagnostic.__init__(self, model_params, delta_x, delta_y, dimensional)
self._plot_title = r'Atmospheric $\psi_{\rm a}^1$ streamfunction'
self._plot_units = r" (in " + self._model_params.get_variable_units(0) + r")"
def _get_diagnostic(self, dimensional):
natm = self._model_params.nmod[0]
psi = np.swapaxes(self._data[:natm, ...].T @ np.swapaxes(self._grid_basis, 0, 1), 0, 1)
theta = np.swapaxes(self._data[natm:2*natm, ...].T @ np.swapaxes(self._grid_basis, 0, 1), 0, 1)
psi3 = psi + theta
if dimensional:
self._diagnostic_data = psi3 * self._model_params.streamfunction_scaling
self._diagnostic_data_dimensional = True
else:
self._diagnostic_data = psi3
self._diagnostic_data_dimensional = False
return self._diagnostic_data
class MiddleAtmosphericStreamfunctionDiagnostic(AtmosphericStreamfunctionDiagnostic):
"""Diagnostic giving the middle atmospheric streamfunction fields :math:`\\psi_{\\rm a}` at 500hPa, i.e. the barotropic streamfunction of the system.
See also :ref:`files/model/oro_model:Mid-layer equations and the thermal wind relation` sections.
Parameters
----------
model_params: QgParams
An instance of the model parameters.
delta_x: float, optional
Spatial step in the zonal direction `x` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
delta_y: float, optional
Spatial step in the meridional direction `y` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
dimensional: bool, optional
Indicate if the output diagnostic must be dimensionalized or not.
Default to `True`.
geopotential: bool, optional
Dimensionalize the field in geopotential height (in meter).
Default to `False`.
Attributes
----------
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
"""
def __init__(self, model_params, delta_x=None, delta_y=None, dimensional=True, geopotential=False):
AtmosphericStreamfunctionDiagnostic.__init__(self, model_params, delta_x, delta_y, dimensional)
if geopotential:
self._plot_title = r'Atmospheric 500hPa geopotential height'
self._plot_units = r" (in m)"
else:
self._plot_title = r'Atmospheric 500hPa $\psi_{\rm a}$ streamfunction'
self._plot_units = r" (in " + self._model_params.get_variable_units(0) + r")"
self._default_plot_kwargs['cmap'] = plt.get_cmap('gist_rainbow_r')
self.geopotential = geopotential
if geopotential:
self._color_bar_format = False
def _get_diagnostic(self, dimensional):
natm = self._model_params.nmod[0]
psi = np.swapaxes(self._data[:natm, ...].T @ np.swapaxes(self._grid_basis, 0, 1), 0, 1)
factor = 1.
if dimensional:
factor *= self._model_params.streamfunction_scaling
self._diagnostic_data_dimensional = True
if self.geopotential:
factor *= self._model_params.geopotential_scaling
else:
self._diagnostic_data_dimensional = False
self._diagnostic_data = psi * factor
return self._diagnostic_data
class OceanicStreamfunctionDiagnostic(FieldDiagnostic):
"""General base class for oceanic streamfunction fields diagnostic.
Provide a spatial gridded representation of the fields.
Parameters
----------
model_params: QgParams
An instance of the model parameters.
delta_x: float, optional
Spatial step in the zonal direction `x` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
delta_y: float, optional
Spatial step in the meridional direction `y` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
Attributes
----------
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
"""
def __init__(self, model_params, delta_x=None, delta_y=None, dimensional=True):
FieldDiagnostic.__init__(self, model_params, dimensional)
self._configure(delta_x=delta_x, delta_y=delta_y)
self._default_plot_kwargs['cmap'] = plt.get_cmap('jet')
def _compute_grid(self, delta_x=None, delta_y=None):
if delta_x is None:
oms = self._model_params.oblocks
if oms is None:
warnings.warn("OceanicStreamfunctionDiagnostic: Unable to configure the grid automatically. Oceanic wavenumbers information not " +
"present in the model's parameters ! Please call the compute_grid method with the delta_x and delta_y parameters.")
return 1
xwn = [oms[i][0] for i in range(len(oms))]
mxwn = max(xwn)
n_point_x = 4 * mxwn + 2
else:
n_point_x = int(np.ceil((2 * np.pi / self._model_params.scale_params.n) / delta_x) + 1)
if delta_y is None:
oms = self._model_params.oblocks
if oms is None:
warnings.warn("OceanicStreamfunctionDiagnostic: Unable to configure the grid automatically. Oceanic wavenumbers information not " +
"present in the model's parameters ! Please call the compute_grid method with the delta_x and delta_y parameters.")
return 1
ywn = [oms[i][1] for i in range(len(oms))]
mywn = max(ywn)
n_point_y = 4 * mywn + 2
else:
n_point_y = int(np.ceil(np.pi / delta_y) + 1)
x = np.linspace(0., 2 * np.pi / self._model_params.scale_params.n, n_point_x)
y = np.linspace(0., np.pi, n_point_y)
self._X, self._Y = np.meshgrid(x, y)
def _configure(self, model_params=None, delta_x=None, delta_y=None):
if not self._ocean:
warnings.warn("OceanicStreamfunctionDiagnostic: No ocean configuration found in the provided parameters. This model version does not have an ocean. " +
"Please check your configuration.")
return 1
self._compute_grid(delta_x, delta_y)
basis = self._model_params.oceanic_basis
self._grid_basis = list()
for func in basis.num_functions():
self._grid_basis.append(func(self._X, self._Y))
self._grid_basis = np.array(self._grid_basis)
class OceanicLayerStreamfunctionDiagnostic(OceanicStreamfunctionDiagnostic):
"""Diagnostic giving the oceanic layer streamfunction fields :math:`\\psi_{\\rm o}`.
Parameters
----------
model_params: QgParams
An instance of the model parameters.
delta_x: float, optional
Spatial step in the zonal direction `x` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
delta_y: float, optional
Spatial step in the meridional direction `y` for the gridded representation of the field.
If not provided, take an optimal guess based on the provided model's parameters.
dimensional: bool, optional
Indicate if the output diagnostic must be dimensionalized or not.
Default to `True`.
conserved: bool, optional
Whether to plot the conserved oceanic fields or not. Default to `True`.
Attributes
----------
dimensional: bool
Indicate if the output diagnostic must be dimensionalized or not.
"""
def __init__(self, model_params, delta_x=None, delta_y=None, dimensional=True, conserved=True):
OceanicStreamfunctionDiagnostic.__init__(self, model_params, delta_x, delta_y, dimensional)
natm = self._model_params.nmod[0]
self._plot_title = r'Oceanic streamfunction'
self._plot_units = r" (in " + self._model_params.get_variable_units(2 * natm) + r")"
self._conserved = conserved
self._fields_average = list()
basis = self._model_params.oceanic_basis
for func in basis.num_functions():
average = dblquad(func, 0, np.pi, 0, 2*np.pi/model_params.scale_params.n)
self._fields_average.append(average[0])
self._fields_average = np.array(self._fields_average)
def _get_diagnostic(self, dimensional):
natm = self._model_params.nmod[0]
noc = self._model_params.nmod[1]
if self._conserved:
cgrid = np.swapaxes(np.swapaxes(self._grid_basis, 0, -1) - self._fields_average, 0, -1)
else:
cgrid = self._grid_basis
psi = np.swapaxes(self._data[2 * natm:2 * natm + noc, ...].T @ np.swapaxes(cgrid, 0, 1), 0, 1)
if dimensional:
self._diagnostic_data = psi * self._model_params.streamfunction_scaling
self._diagnostic_data_dimensional = True
else:
self._diagnostic_data = psi
self._diagnostic_data_dimensional = False
return self._diagnostic_data
if __name__ == '__main__':
from qgs.params.params import QgParams
from qgs.params.params import QgParams
from qgs.integrators.integrator import RungeKuttaIntegrator
from qgs.functions.tendencies import create_tendencies
pars = QgParams()
pars.set_atmospheric_channel_fourier_modes(2, 2)
f, Df = create_tendencies(pars)
integrator = RungeKuttaIntegrator()
integrator.set_func(f)
ic = np.random.rand(pars.ndim) * 0.1
integrator.integrate(0., 200000., 0.1, ic=ic, write_steps=5)
time, traj = integrator.get_trajectories()
integrator.terminate()
psi3 = LowerLayerAtmosphericStreamfunctionDiagnostic(pars)
psi3(time, traj)
psi1 = UpperLayerAtmosphericStreamfunctionDiagnostic(pars)
psi1(time, traj)
psi = MiddleAtmosphericStreamfunctionDiagnostic(pars)
psi(time, traj)
|
[
"jodemaey@meteo.be"
] |
jodemaey@meteo.be
|
9869ba9f08a4b1d43b66ba15aadb4d3043f6f04f
|
1f3d6f2f00506ac47ffb645954c39e7f75c47da7
|
/prog4.py
|
1e521b1af8a2d713c1281a4dedede3e38f1cb3fd
|
[] |
no_license
|
ravali1101/DSA-assignment
|
1cf1ccdbaecd4d8060264734e1bcafa205d3df41
|
243d13cec6f5d10cac13aeba7a689270ea06c4de
|
refs/heads/master
| 2020-04-06T04:53:36.749991
| 2016-12-09T07:36:30
| 2016-12-09T07:36:30
| 73,819,960
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 644
|
py
|
class Stack:
def __init__(self):
self._data = []
def push(self,e):
self._data.append(e)
def pop(self):
if(self.is_empty()):
print("stack is empty")
else:
return self._data.pop()
def is_empty(self):
return len(self._data) == 0
def reverse(self):
s = Stack()
t = Stack()
length = len(self._data)
for e in range(length):
s.push(self.pop())
for e in range(length):
t.push(s.pop())
for e in range(length):
self._data.append(t.pop())
print(self._data)
if __name__ == '__main__':
s1 = Stack()
s1.push(10)
s1.push(20)
s1.reverse()
|
[
"rsanju0111@gmail.com"
] |
rsanju0111@gmail.com
|
ac8ebc3787d2342f9a4bfdb64177a5c9f1862098
|
8e57bc9558c84a59e0dd1470d45f254efa2b1322
|
/lab8.py
|
fd432dfc316e016a3b049d044ab21cdb2194826b
|
[] |
no_license
|
NathanMacDiarmid/ECOR-1051-Module-1
|
d14e9eaa2137d9fc8bdf12d85be53364cb2d8cf3
|
2325d1c6838fb58c62488d64f5e6a2a0a2a5be9d
|
refs/heads/master
| 2022-04-09T07:57:04.008666
| 2020-03-13T00:47:47
| 2020-03-13T00:47:47
| 246,958,112
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,199
|
py
|
# IMPORTS
def test_fn(fn, expect: bool, var1) -> bool:
print("Testing", fn.__name__, ":")
actual = fn(var1)
print("Expected result:", expect, "Actual result:", actual)
if actual == expect:
return "\n Test passed \n"
else:
return "\n Test failed \n"
def test_fn2(fn, expect: bool, var1, var2) -> bool:
print("Testing", fn.__name__, ":")
actual = fn(var1, var2)
print("Expected result:", expect, "Actual result:", actual)
if actual == expect:
return "\n Test passed \n"
else:
return "\n Test failed \n"
# FUNCTION DEFINITIONS
# Exercise 1 Definition
def first_last6(lst1: list) -> bool:
"""
Returns True if the first and/or last element in the list is 6
"""
if lst1[0] == 6:
return True
elif lst1[-1] == 6:
return True
elif lst1[0] == 6 and lst1[-1] == 6:
return True
else:
return False
# Exercise 2 Definition
def same_first_last(lst2):
"""
Returns True if the list isn't empty and if the first and last elements are equal.
Otherwise it returns False.
"""
if len(lst2) == 0:
return False
elif len(lst2) > 0 and lst2[0] == lst2[-1]:
return True
# Exercise 3 Definition
def make_pi():
"""
Returns a list of length that contains the first three digits of pi.
"""
return [3, 1, 4]
# Exercise 4 Definition
def common_end(lst3, lst4):
"""
Returns True if both fist elements are the same.
Returns True if the first and last elements of both lists are the same.
Returns False otherwise.
"""
if len(lst3) == 0:
return False
elif len(lst4) == 0:
return False
elif lst3[0] == lst4[0]:
return True
elif lst3[-1] == lst4[-1]:
return True
else:
return False
# Exercise 5 Definition
def sum3(lst5):
"""
Returns the sum of all three elements in provided list.
"""
return lst5[0] + lst5[1] + lst5[2]
# Exercise 6 Definition
def rotate_left3(lst6):
"""
Returns the list given, rotated to the left by one.
>>> [1, 2, 3]
[2, 3, 1]
"""
return [lst6[1], lst6[2], lst6[0]]
# Exercise 7 Definition
def reverse3(lst7):
"""
Returns the given list, reversed.
>>> [1, 2, 3]
[3, 2, 1]
"""
return [lst7[2], lst7[1], lst7[0]]
# Exercise 8 Definitino
def max_end3(lst8):
"""
Returns a list of three of the same variable for whichever is bigger, first or last element.
>>> [4, 5, 3]
[4, 4, 4]
"""
if lst8[0] > lst8[-1]:
return [lst8[0], lst8[0], lst8[0]]
elif lst8[0] == lst8[-1]:
return [lst8[0], lst8[0], lst8[0]]
else:
return [lst8[-1], lst8[-1], lst8[-1]]
# Exercise 9 Definition
def sum2(lst9):
"""
Returns the sum of the first two list elements.
"""
if len(lst9) == 0:
return 0
elif len(lst9) == 1:
return lst9[0]
else:
return lst9[0] + lst9[1]
# Exercise 10 Definition
def middle_way(lst10, lst11):
"""
Returns the middle elements of two three integeger functions.
"""
return [lst10[1], lst11[1]]
# Exercise 11 Definition
def make_ends(lst12):
"""
Returns first and last element of a list in a new list
"""
return [lst12[0], lst12[-1]]
# Exercise 12 Definition
def has23(lst13):
"""
Returns True if the list contains 2 and/or 3.
Returns False if not.
"""
if int(2) or int(3) in lst13:
return True
else:
return False
# TESTING
# Exercise 1 Testing
print("EXERCISE 1\n")
print(test_fn(first_last6, True, [1, 2, 3, 4, 5, 6]))
print(test_fn(first_last6, True, [6, 2, 3, 4, 5, 1]))
print(test_fn(first_last6, True, [6, 2, 3, 4, 5, 6]))
print(test_fn(first_last6, False, [1, 2, 3, 6, 5, 1]))
print(test_fn(first_last6, False, [1, 2, 3, 4, 5, 1]))
# Exercise 2 Testing
print("EXERCISE 2\n")
print(test_fn(same_first_last, False, []))
print(test_fn(same_first_last, True, [1, 2, 3, 4, 5, 1]))
print(test_fn(same_first_last, True, [1]))
# Exercise 3 Testing
print("EXERCISE 3\n")
print(make_pi())
# Exercise 4 Testing
print("EXERCISE 4\n")
print(test_fn2(common_end, True, [1, 2, 3, 4, 5, 6], [1, 2, 3, 4, 5]))
print(test_fn2(common_end, True, [2, 3, 4, 5, 6], [1, 2, 3, 4, 5, 6]))
print(test_fn2(common_end, True, [1, 2, 3, 4, 5, 6], [1, 2, 3, 4, 6]))
print(test_fn2(common_end, False, [0, 2, 3, 4, 5, 6], [1, 2, 3, 4, 5]))
print(test_fn2(common_end, False, [1, 2, 3, 4, 5, 6], []))
print(test_fn2(common_end, False, [], []))
# Exercise 5 Testing
print("EXERCISE 5\n")
print(test_fn(sum3, 6, [1, 2, 3]))
print(test_fn(sum3, 0, [-1, -2, 3]))
print(test_fn(sum3, 0, [0, 0, 0]))
# Exercise 6 Testing
print("EXERCISE 6\n")
print(test_fn(rotate_left3, [2, 3, 1], [1, 2, 3]))
print(test_fn(rotate_left3, [5, 7, -1], [-1, 5, 7]))
# Exercise 7 Testing
print("EXERCISE 7\n")
print(test_fn(reverse3, [3, 2, 1], [1, 2, 3]))
print(test_fn(reverse3, [-3, -2, -1], [-1, -2, -3]))
print(test_fn(reverse3, [22, 5, 7], [7, 5, 22]))
# Exercise 8 Testing
print("EXERCISE 8\n")
print(test_fn(max_end3, [3, 3, 3], [1, 2, 3]))
print(test_fn(max_end3, [3, 3, 3], [2, 9, 3]))
print(test_fn(max_end3, [5, 5, 5], [5, 2, 3]))
print(test_fn(max_end3, [1, 1, 1], [1, 2, -3]))
print(test_fn(max_end3, [1, 1, 1], [1, 2, 1]))
# Exercise 9 Testing
print("EXERCISE 9\n")
print(test_fn(sum2, 0, []))
print(test_fn(sum2, 0, [1]))
print(test_fn(sum2, 3, [1, 2]))
print(test_fn(sum2, -1, [1, -2]))
print(test_fn(sum2, 15, [10, 5, 6, 9]))
# Exercise 10 Testing
print("EXERCISE 10\n")
print(test_fn2(middle_way, [2, 2], [1, 2, 3], [1, 2, 3]))
print(test_fn2(middle_way, [5, 2], [-1, 5, 3], [1, 2, 3]))
print(test_fn2(middle_way, [22, -2], [1, 22, 3], [1, -2, 3]))
# Exercise 11 Testing
print("EXERCISE 11\n")
print(test_fn(make_ends, [5, 7], [5, 2, 3, 4, 5, 7]))
print(test_fn(make_ends, [7, 7], [7, 2, 3, 4, 5, 7]))
print(test_fn(make_ends, [-22, 543], [-22, 2, 3, 4, 5, 543]))
# Exercise 12 Testing
print("EXERCISE 12 \n")
print(test_fn(has23, True, [5, 6, 4, 7, 3, 1]))
print(test_fn(has23, True, [5, 6, 2, 7, 3, 1]))
print(test_fn(has23, False, [5, 6, 4, 7, 0, 1]))
|
[
"nathanmacdiarmid@gmail.com"
] |
nathanmacdiarmid@gmail.com
|
fefc940bf718002a0db6861dd6ef2b7e7fd84f6f
|
4ad1afc018f0fbee73e491a418451ebce131f418
|
/src/jupyter_starters/py_starters/cookiecutter.py
|
14bfe22c146ed3397af8358b7c66e1495c8dd0c6
|
[
"BSD-3-Clause"
] |
permissive
|
bollwyvl/jupyterlab-starters
|
2abfa0e92af9db71d7c5a03038aff211fc8ec9d2
|
1a829a56468d60a91c293020cdfd4685bc0db6f8
|
refs/heads/master
| 2023-02-21T09:44:41.901370
| 2019-12-18T22:12:37
| 2019-12-18T22:12:37
| 228,930,227
| 0
| 0
|
BSD-3-Clause
| 2019-12-18T22:14:24
| 2019-12-18T22:14:23
| null |
UTF-8
|
Python
| false
| false
| 6,419
|
py
|
""" a starter that runs cookiecutter
"""
# pylint: disable=cyclic-import
import re
import shutil
from copy import deepcopy
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING, Any, Dict, Text
from notebook.utils import url_path_join as ujoin
from .._json import JsonSchemaException, json_validator
if TYPE_CHECKING:
from ..manager import StarterManager # noqa
DEFAULT_TEMPLATE = "https://github.com/audreyr/cookiecutter-pypackage.git"
def cookiecutter_starters():
""" try to find some cookiecutters
"""
try:
cookiecutter = __import__("cookiecutter")
except (ImportError, ValueError) as err:
print(f"couldn't import cookiecutter: {err}")
return {}
return {
"cookiecutter": {
"label": "Cookiecutter",
"description": f"Cookiecutter {cookiecutter.__version__}",
"type": "python",
"callable": "jupyter_starters.py_starters.cookiecutter.start",
"schema": {
"required": ["template"],
"properties": {
"template": {
"title": "Template",
"description": "Directory or URL of template",
"type": "string",
"default": DEFAULT_TEMPLATE,
},
"checkout": {
"title": "Checkout",
"description": "The branch, tag or commit ID to use",
"type": "string",
},
},
},
}
}
async def start(name, starter, path, body, manager) -> Dict[Text, Any]:
""" run cookiecutter
"""
# pylint: disable=cyclic-import,broad-except,too-many-locals,unused-variable
template = body["template"]
checkout = body.get("checkout") or None
manager.log.debug(f"🍪 body: {body}")
cookiecutter = __import__("cookiecutter.main")
config_dict = cookiecutter.main.get_user_config()
repo_dir, cleanup = cookiecutter.main.determine_repo_dir(
template=template,
abbreviations=config_dict["abbreviations"],
clone_to_dir=config_dict["cookiecutters_dir"],
checkout=checkout,
no_input=True,
password=None,
)
manager.log.debug(f"🍪 repo_dir: {repo_dir}")
context_file = Path(repo_dir) / "cookiecutter.json"
base_context = cookiecutter.main.generate_context(
context_file=str(context_file),
default_context=config_dict["default_context"],
extra_context={},
)
manager.log.debug(f"🍪 base_context: {base_context}")
schema = cookiecutter_to_schema(base_context["cookiecutter"])
manager.log.debug(f"🍪 schema: {schema}")
new_starter = deepcopy(starter)
new_starter["schema"]["required"] += ["cookiecutter"]
new_starter["schema"]["properties"]["cookiecutter"] = schema
validator = json_validator(new_starter["schema"])
valid = False
try:
validator(body)
valid = True
except JsonSchemaException as err:
manager.log.debug(f"🍪 validator: {err}")
if not valid:
return {
"body": body,
"name": name,
"path": path,
"starter": new_starter,
"status": "continuing",
}
with TemporaryDirectory() as tmpd:
final_context = {"cookiecutter": body["cookiecutter"]}
final_context["cookiecutter"]["_template"] = template
try:
result = cookiecutter.main.generate_files(
repo_dir=repo_dir,
context=final_context,
overwrite_if_exists=True,
output_dir=tmpd,
)
manager.log.debug(f"result {result}")
roots = sorted(Path(tmpd).glob("*"))
for root in roots:
await manager.start_copy(
"cookiecutter-copy",
{
"label": "Copy Cookiecutter",
"description": "just copies whatever cookiecutter did",
"src": str(root),
},
path,
{},
)
if cleanup:
shutil.rmtree(repo_dir)
return {
"body": body,
"name": name,
"path": ujoin(path, roots[0].name),
"starter": new_starter,
"status": "done",
}
except Exception as err:
manager.log.exception(f"🍪 error")
if cleanup:
shutil.rmtree(repo_dir)
return {
"body": body,
"name": name,
"path": path,
"starter": new_starter,
"status": "continuing",
"errors": [str(err)],
}
def cookiecutter_to_schema(cookiecutter):
""" convert a cookiecutter context to a JSON schema
"""
bools = {"y": True, "n": False}
schema = {
"title": "Cookiecutter",
"description": "Values to use in template variables",
"type": "object",
"properties": {},
}
schema["properties"] = properties = {}
for field, value in cookiecutter.items():
title = field.replace("_", " ").replace("-", " ").title()
if isinstance(value, str):
if value in bools:
properties[field] = {
"type": "boolean",
"default": bools[value],
"title": title,
}
continue
value_no_tmpl = re.sub(r"{[%{].*?[%}]}", "", value)
properties[field] = {
"type": "string",
"description": f"default: {value}",
"default": value_no_tmpl,
"title": title,
"minLength": 1,
}
continue
if isinstance(value, dict):
enum = list(value.keys())
properties[field] = {"enum": enum, "default": enum[0], "title": title}
continue
if isinstance(value, list):
properties[field] = {"enum": value, "default": value[0], "title": title}
continue
schema["required"] = sorted(list(schema["properties"].keys()))
return schema
|
[
"noreply@github.com"
] |
bollwyvl.noreply@github.com
|
fe4c56b131d97aba2bdaa75204a2a9bd583f136d
|
c29eba01ce299ebb27b886a83e19e59add7e2f6b
|
/tests/cases/so/test_so2.py
|
ff88ef28ce3ceae12a209eb291ea8e662cd77714
|
[
"BSD-3-Clause"
] |
permissive
|
smarie/python-pytest-cases
|
e87516e73d5067d5c307c7fdb37cc5f1f97c417e
|
ab3b7190d728b18512141b9f5f3a1c3dfc7cedf2
|
refs/heads/main
| 2023-07-08T11:41:57.278697
| 2023-02-23T13:11:25
| 2023-02-23T13:11:25
| 138,296,136
| 286
| 40
|
BSD-3-Clause
| 2023-07-03T14:57:02
| 2018-06-22T11:42:19
|
Python
|
UTF-8
|
Python
| false
| false
| 980
|
py
|
# from https://stackoverflow.com/a/51199035/7262247
from pytest_cases import parametrize_with_cases
try: # python 3.2+
from functools import lru_cache
except ImportError:
from functools32 import lru_cache
read_files = set()
@lru_cache(maxsize=3)
def load_file(file_name):
""" This function loads the file and returns contents"""
print("loading file " + file_name)
global read_files
assert file_name not in read_files
read_files.add(file_name)
return "<dummy content for " + file_name + ">"
def case_1():
return load_file('file1')
def case_2():
return load_file('file2')
def case_3():
return load_file('file3')
@parametrize_with_cases("pars", cases=[case_1, case_2])
def test_a(pars):
print('test_a', pars)
@parametrize_with_cases("pars", cases=[case_2, case_3])
def test_b(pars):
print('test_b', pars)
@parametrize_with_cases("pars", cases=[case_1, case_2, case_3])
def test_c(pars):
print('test_c', pars)
|
[
"sylvain.marie@se.com"
] |
sylvain.marie@se.com
|
4b48d40dcaa6739996252f8cac135cd32f73eaae
|
8ff5d67f9781f3ff23bc3e5ed0c9d4f11c6f14ff
|
/Archive/old_src1/window_timing_test.py
|
719172a436f0c272e9784373bd2ff229ed36e920
|
[] |
no_license
|
daniellisachuk/Page-Load-Time
|
514c38d97cb94f49dd5712af3893edeba65320b6
|
aecce15c2f7cd05e652194633ac07935271e03f2
|
refs/heads/master
| 2023-08-24T05:37:26.295146
| 2021-10-25T09:28:26
| 2021-10-25T09:28:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,917
|
py
|
from selenium import webdriver
df_dict = {
'url': [],
'ff_navigationStart': [],
'ff_redirectStart': [],
'ff_redirectEnd': [],
'ff_fetchStart': [],
'ff_domainLookupStart': [],
'ff_domainLookupEnd': [],
'ff_connectStart': [],
'ff_secureConnectionStart': [],
'ff_connectEnd': [],
'ff_requestStart': [],
'ff_responseStart': [],
'ff_unloadEventStart': [],
'ff_unloadEventEnd': [],
'ff_responseEnd': [],
'ff_domLoading': [],
'ff_domInteractive': [],
'ff_domContentLoadedEventStart': [],
'ff_domContentLoadedEventEnd': [],
'ff_domComplete': [],
'ff_loadEventStart': [],
'ff_loadEventEnd': [],
}
for _ in range(2):
ff_browser = webdriver.Firefox(executable_path="./geckodriver")
ff_browser.get("https://www.google.com")
timing_obj= ff_browser.execute_script("return window.performance.timing")
'''
'domComplete': 1591724265131,
'responseStart': 1591724253317,
'fetchStart': 1591724245778,
'navigationStart': 1591724245777,
'domLoading': 1591724253326,
'redirectEnd': 0,
'redirectStart': 0,
'loadEventStart': 1591724265131,
'loadEventEnd': 1591724265140,
'connectEnd': 1591724253164,
'secureConnectionStart': 1591724251044,
'domainLookupStart': 1591724245822,
'domInteractive': 1591724253494,
'connectStart': 1591724250962,
'unloadEventStart': 0,
'unloadEventEnd': 0,
'domContentLoadedEventEnd': 1591724253522,
'domContentLoadedEventStart': 1591724253506,
'domainLookupEnd': 1591724250962,
'responseEnd': 1591724253317,
'requestStart': 1591724253164
'''
ff_browser.quit()
for i, j in dict(timing_obj).items():
df_dict['ff_{}'.format(i)].append(j)
for i, j in df_dict.items():
print(i, j)
|
[
"daniellisachuk@gmail.com"
] |
daniellisachuk@gmail.com
|
3d09b0796d9c583f864ccb2784ab8ff634c1d629
|
59c55725576bbf0e2f6617507ba2f1db639abb3f
|
/analytic_resource_plan_stock_picking_out/__openerp__.py
|
1003910d5e1a27b95f02578252bad4ec2a383147
|
[] |
no_license
|
bmya/eficent-odoo-addons
|
e3426ebaf1f59e52726253fc1dd36a09d9363059
|
5d8ddfa384ab4417f42bda103b71d926848035f6
|
refs/heads/7.0
| 2021-01-21T16:48:55.312452
| 2015-11-04T14:11:19
| 2015-11-04T14:11:19
| 45,649,141
| 1
| 3
| null | 2015-11-06T00:35:17
| 2015-11-06T00:35:17
| null |
UTF-8
|
Python
| false
| false
| 1,806
|
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Eficent (<http://www.eficent.com/>)
# <contact@eficent.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Analytic Resource Planning - Delivery Orders",
"version": "1.0",
"author": "Eficent",
"website": "www.eficent.com",
"category": "Generic Modules/Projects & Services",
"depends": ["analytic_resource_plan_procurement", "stock",
"analytic_location", "analytic_resource_plan_stock"],
"description": """
Create Delivery Orders from Planned Resource Lines
==================================================
Module features:
- Create delivery order from planned resource lines
""",
"init_xml": [],
"update_xml": [
"wizard/analytic_resource_plan_line_make_stock_picking_out.xml",
"view/analytic_resource_plan_line_view.xml",
],
'demo_xml': [],
'test':[],
'installable': True,
'active': False,
'certificate': '',
}
|
[
"jordi.ballester@eficent.com"
] |
jordi.ballester@eficent.com
|
c6be2e433f63e2c4e908dd3e1890f6ef95dcd4c5
|
f98ac38debdb21ddfd5723cd15fff28cdcbe07c1
|
/ecs_fargate/datadog_checks/ecs_fargate/__about__.py
|
d0a682750f9a67049f4d112086986abc73694a4d
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
rmsy/integrations-core
|
fff6924db331e0bccda71dde123eef37574eecc7
|
230236537acb9fc9ebeca427a01959b7bed22ac5
|
refs/heads/master
| 2020-07-09T19:38:53.747552
| 2019-08-23T20:17:52
| 2019-08-23T20:17:52
| 204,064,648
| 0
| 0
| null | 2019-08-23T20:19:24
| 2019-08-23T20:19:24
| null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
__version__ = '2.2.2'
|
[
"noreply@github.com"
] |
rmsy.noreply@github.com
|
5b05538a5055bb7d74a3fb673f64ec31bf8c02a2
|
82ecfdec9184781e16a77ed9c836d88356025a10
|
/ucf_sub_catkin_ros/src/sub_utils/src/color.py
|
6333f635f3f04d07a89fc04b1aa45497bb31d67f
|
[
"MIT"
] |
permissive
|
RoboticsClubatUCF/RoboSub
|
9778bba4cbb648566991e2d3ef089c67e8cbfc71
|
47304c620f963a8762db57a7ed248d1df90190fb
|
refs/heads/master
| 2020-12-24T06:27:48.831408
| 2018-08-03T19:37:09
| 2018-08-03T19:37:09
| 37,393,696
| 0
| 8
|
MIT
| 2018-06-24T23:34:14
| 2015-06-14T00:50:37
|
Python
|
UTF-8
|
Python
| false
| false
| 2,950
|
py
|
from matplotlib import pyplot as plt
import numpy as np
import argparse
import cv2
from imutils import paths
import imutils
import os
from sklearn.externals import joblib
#Argument Parsing
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--positive", required=True, help="path to positive images directory")
ap.add_argument("-n", "--negative", required=True, help="path to negative images directory")
ap.add_argument("--nf", required = True, help="path to negative feature directory")
ap.add_argument("-r", required = True, help="path to red features directory")
ap.add_argument("-g", required = True, help="path to green features directory")
ap.add_argument("-y", required = True, help="path to yellow features directory")
args = vars(ap.parse_args())
'''This Loop goes through every image in the specified directory and does the following:
1. Read in the image
2. Resize it to a constant size
3. Split color channels
4. Calculate color histogram for each channel and concatenate
5. Flatten feature vector
6. Determine which color buoy this is
7. Dump to appropriate file
'''
for imagePath in paths.list_images(args["positive"]):
image = cv2.imread(imagePath)
image = imutils.resize(image, width=64)
image = imutils.resize(image, height=64)
chans = cv2.split(image)
colors = ("b", "g", "r")
'''plt.figure()
plt.title("'Flattened' Color Histogram")
plt.xlabel("Bins")
plt.ylabel("# of Pixels")'''
features = []
for (chan, color) in zip(chans, colors):
hist = cv2.calcHist([chan], [0], None, [256], [0, 256])
features.extend(hist)
features = np.asarray(features)
features = features.flatten()
if("R" in imagePath):
fd_name = os.path.split(imagePath)[1].split(".")[0] + ".feat"
fd_path = os.path.join(args["r"], fd_name)
joblib.dump(features, fd_path)
if("Y" in imagePath):
fd_name = os.path.split(imagePath)[1].split(".")[0] + ".feat"
fd_path = os.path.join(args["y"], fd_name)
joblib.dump(features, fd_path)
if("G" in imagePath):
fd_name = os.path.split(imagePath)[1].split(".")[0] + ".feat"
fd_path = os.path.join(args["g"], fd_name)
joblib.dump(features, fd_path)
for imagePath in paths.list_images(args["negative"]):
image = cv2.imread(imagePath)
image = imutils.resize(image, width=64)
image = imutils.resize(image, height=64)
chans = cv2.split(image)
colors = ("b", "g", "r")
'''plt.figure()
plt.title("'Flattened' Color Histogram")
plt.xlabel("Bins")
plt.ylabel("# of Pixels")'''
features = []
for (chan, color) in zip(chans, colors):
hist = cv2.calcHist([chan], [0], None, [256], [0, 256])
features.extend(hist)
features = np.asarray(features)
features = features.flatten()
fd_name = os.path.split(imagePath)[1].split(".")[0] + ".feat"
fd_path = os.path.join(args["nf"], fd_name)
joblib.dump(features, fd_path)
'''plt.plot(hist, color = color)
plt.xlim([0, 256])
plt.show()
print "flattened feature vector size: %d" % (np.array(features).flatten().shape)'''
|
[
"rfatt13@gmail.com"
] |
rfatt13@gmail.com
|
ec43e813c712bd6dad5b19928e8169a1ac7bfb41
|
ef6213036210b487ef01ce61e361fd234b05df03
|
/friday/website/friday/apps/migration/__init__.py
|
e18667d477080c9a8efff67080eac4fa3fcd8f67
|
[] |
no_license
|
BGCX262/zzheng-hg-to-git
|
f1bfc07ea940547da3ba616fc1f799d0720eca48
|
a3fc0f6d907b8e11dcefcf4ec988b337ad66a09d
|
refs/heads/master
| 2020-06-30T05:30:59.672935
| 2010-06-04T09:10:33
| 2010-06-04T09:10:33
| 41,257,958
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 150
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 ZHENG Zhong <http://www.zhengzhong.net/>
#
# Created on 2010-05-03.
# $Id$
#
|
[
"heavyzheng"
] |
heavyzheng
|
e3efe74f2ab969e81989d355bfcd37f1df324e13
|
58f17f998d21121a4e9c7d1666d63873880fe7a0
|
/article/migrations/0002_auto_20150524_0200.py
|
9114f36613f061823d2eb618829a61ad2ea50ec2
|
[] |
no_license
|
itssiva/django_test
|
cc03b914bc2dc9cec31ac7fe6158b6b010b31f66
|
060769d3787af7212f7521e8feb7f16b450ef507
|
refs/heads/master
| 2021-01-10T10:25:06.996888
| 2015-05-24T02:16:55
| 2015-05-24T02:16:55
| 36,039,224
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,185
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import article.models
class Migration(migrations.Migration):
dependencies = [
('article', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('body', models.TextField()),
('pub_date', models.DateTimeField(verbose_name=b'date published')),
],
),
migrations.AddField(
model_name='article',
name='thumbnail',
field=models.FileField(default=b'', upload_to=article.models.get_upload_file_name),
),
migrations.AlterField(
model_name='article',
name='likes',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='comment',
name='article',
field=models.ForeignKey(to='article.Article'),
),
]
|
[
"sivaponugoti@gmail.com"
] |
sivaponugoti@gmail.com
|
695ee5a6ea016daf5d8460780f7d0f764b2a4cc3
|
4462fb13def2b1c310ff74d9eb1091ac89375191
|
/python/test/y2021/test_dec02.py
|
8f0d14198f6704a31b887a8ec37094043560bb5e
|
[] |
no_license
|
prositen/advent-of-code
|
d9037e096c04a036833aedf7afff308ad2c9f846
|
40f554a163fbf8d0c26d53aa6f624d31d1a2549b
|
refs/heads/master
| 2023-01-08T22:20:46.779900
| 2022-12-26T14:11:07
| 2022-12-26T14:11:07
| 47,338,913
| 16
| 0
| null | 2022-12-25T19:24:19
| 2015-12-03T14:58:54
|
Python
|
UTF-8
|
Python
| false
| false
| 436
|
py
|
import unittest
from python.src.y2021.dec02 import Dec02
class TestDec02(unittest.TestCase):
data = [
"forward 5",
"down 5 ",
"forward 8",
"up 3 ",
"down 8 ",
"forward 2"
]
def test_part_1(self):
self.assertEqual(150, Dec02(instructions=self.data).part_1())
def test_part_2(self):
self.assertEqual(900, Dec02(instructions=self.data).part_2())
|
[
"noreply@github.com"
] |
prositen.noreply@github.com
|
d9fec02a880258463f3f2cd5ca0c3dc36cc5b577
|
bb3167f4a848aee6bb4c16b17aa51f2785a3456f
|
/app/src/repositories/CardsRepository.py
|
b4454f555218140328873e477a1724a0965b7413
|
[] |
no_license
|
Thomasa91/flashcardsApp
|
8d80473aaa82cad3077bf89a1525cd5c20ba2038
|
5f68adcbf8775f7c5f0af2b502738aaff661aeee
|
refs/heads/main
| 2023-07-20T04:26:36.150727
| 2021-09-04T09:39:53
| 2021-09-04T09:39:53
| 328,564,217
| 0
| 0
| null | 2021-07-13T19:13:13
| 2021-01-11T06:02:48
|
Python
|
UTF-8
|
Python
| false
| false
| 2,638
|
py
|
from typing import List, Optional
from sqlite3 import Error
from app.src import dbConn
from app.src.models.Card import Card
from app.src.utilities.logger import logger
conn = dbConn.get()
#TODO implement exception handling
def create(deck_id: int, word: str, translation: str) -> Optional[Card]:
query = f"INSERT INTO card (deck_id, word, translation) VALUES ({deck_id}, '{word}', '{translation}');"
c = conn.cursor()
c.execute(query)
conn.commit()
card_id = c.lastrowid
if card_id:
logger.debug(
f"Card with id {card_id} has been saved into database successfully")
return Card(card_id, deck_id, word, translation)
logger.error("Saving card into database failed")
return None
def get_all() -> List[Card]:
query = "SELECT * FROM card"
c = conn.cursor()
c.execute(query)
cards = []
for card_data in c.fetchall():
cards.append(Card.create_from_list(card_data))
logger.debug(f"Retrieved {len(cards)} card records from database")
return cards
def get_by_deck_id(deck_id: int) -> List[Card]:
query = f"SELECT * FROM card WHERE deck_id = {deck_id};"
c = conn.cursor()
c.execute(query)
cards = []
for card_data in c.fetchall():
cards.append(Card.create_from_list(card_data))
logger.debug(
f"Retrieved {len(cards)} cards with deck_id:{deck_id} from database")
return cards
def get_by_id(card_id: int) -> Optional[Card]:
query = f"SELECT * FROM card WHERE card_id = {card_id};"
c = conn.cursor()
c.execute(query)
card_data = c.fetchone()
if card_data:
logger.debug(f"Card with id:{card_id} found in database")
return Card.create_from_list(card_data)
logger.debug(f"Card with id:{card_id} not found in database")
return None
def delete(card_id: int) -> bool:
query = f"DELETE FROM card WHERE card_id = {card_id};"
c = conn.cursor()
c.execute(query)
conn.commit()
if c.rowcount:
logger.debug(f"Card with id {card_id} has been removed")
return True
logger.debug(f"Deleting a card with id {card_id} has not finished successfully")
return False
def update(id: int, word: str, translation: str):
query = f"UPDATE card SET word = '{word}', translation = '{translation}' WHERE card_id = {id};"
c = conn.cursor()
c.execute(query)
conn.commit()
if c.rowcount:
logger.debug(f"Card with id {id} has been updated")
return True
logger.debug(f"Updating a card with id {id} has not finished successfully")
return False
|
[
"tomaszgrochdk@gmail.com"
] |
tomaszgrochdk@gmail.com
|
e5f38b9d0a0aaee53640ab7615d8e4ddda231a9a
|
959185d2f84f04ba4a31d97d11be78fdf5955d96
|
/aqsa-master/aqsa_apps/import_from_file/views_db_records_csv_backup.py
|
c2aa527216fb795fec4fac5ea5e17727ca8cded6
|
[
"MIT"
] |
permissive
|
WhytryB/Cross
|
2fcc1a87bf59f7e870e59ce3c55fbe37cb31f45a
|
00402d5198065909359af80982e7e2dbbcdb67bc
|
refs/heads/master
| 2020-04-13T13:37:13.823379
| 2018-12-27T02:43:36
| 2018-12-27T02:43:36
| 163,236,535
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,706
|
py
|
# Author of Aqsa: Yulay Musin
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from . import models as m
from django.shortcuts import redirect
from . import viewxins_record_csv_backup as vxrcb
from . import forms as f
from aqsa_apps.wallet_tag_etc import models as wte_m
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
import os
from django.conf import settings
success_msg = _('Data from your file was imported.')
@require_POST
@login_required
def db_records_csv_wallets(request, pk):
import_from_file = get_object_or_404(m.ImportFromFile, owner=request.user, pk=pk, variety=2)
if not import_from_file.checked:
return redirect(import_from_file.get_check_url())
if import_from_file.checked and import_from_file.no_error and not import_from_file.success:
vxrcb.db_recorder_of_csv_of_wallet_tag_etc(import_from_file.file.path, request.user, f.Wallet, wte_m.Wallet)
import_from_file.mark_as_finished()
messages.success(request, success_msg)
return redirect(reverse_lazy('import_from_file:list'))
@require_POST
@login_required
def db_records_csv_categories(request, pk):
import_from_file = get_object_or_404(m.ImportFromFile, owner=request.user, pk=pk, variety=3)
if not import_from_file.checked:
return redirect(import_from_file.get_check_url())
if import_from_file.checked and import_from_file.no_error and not import_from_file.success:
vxrcb.db_recorder_of_csv_of_wallet_tag_etc(import_from_file.file.path, request.user, f.Category, wte_m.Category)
import_from_file.mark_as_finished()
messages.success(request, success_msg)
return redirect(reverse_lazy('import_from_file:list'))
@require_POST
@login_required
def db_records_csv_tags(request, pk):
import_from_file = get_object_or_404(m.ImportFromFile, owner=request.user, pk=pk, variety=4)
if not import_from_file.checked:
return redirect(import_from_file.get_check_url())
if import_from_file.checked and import_from_file.no_error and not import_from_file.success:
vxrcb.db_recorder_of_csv_of_wallet_tag_etc(import_from_file.file.path, request.user, f.Tag, wte_m.Tag)
import_from_file.mark_as_finished()
messages.success(request, success_msg)
return redirect(reverse_lazy('import_from_file:list'))
@require_POST
@login_required
def db_records_csv_contacts(request, pk):
import_from_file = get_object_or_404(m.ImportFromFile, owner=request.user, pk=pk, variety=5)
if not import_from_file.checked:
return redirect(import_from_file.get_check_url())
if import_from_file.checked and import_from_file.no_error and not import_from_file.success:
vxrcb.db_recorder_of_csv_of_wallet_tag_etc(import_from_file.file.path, request.user, f.Contact, wte_m.Contact)
import_from_file.mark_as_finished()
messages.success(request, success_msg)
return redirect(reverse_lazy('import_from_file:list'))
@require_POST
@login_required
def db_records_csv_transactions(request, pk):
import_from_file = get_object_or_404(m.ImportFromFile, owner=request.user, pk=pk, variety=6)
if not import_from_file.checked:
return redirect(import_from_file.get_check_url())
if import_from_file.checked and import_from_file.no_error and not import_from_file.success:
wallet_objects = wte_m.Wallet.objects.filter(owner=request.user)
dict_with_wallets = dict((x.name, x) for x in wallet_objects)
category_objects = wte_m.Category.objects.filter(owner=request.user)
dict_with_categories = dict((x.name, x) for x in category_objects)
tag_objects = wte_m.Tag.objects.filter(owner=request.user)
dict_with_tags = dict((x.name, x) for x in tag_objects)
contact_objects = wte_m.Contact.objects.filter(owner=request.user)
dict_with_contacts = dict((x.name, x) for x in contact_objects)
vxrcb.db_recorder_of_transaction(
import_from_file.file.path, import_from_file, request.user,
dict_with_wallets, dict_with_categories, dict_with_tags, dict_with_contacts
)
import_from_file.mark_as_finished()
messages.success(request, success_msg)
return redirect(reverse_lazy('import_from_file:list'))
@require_POST
@login_required
def db_records_aqsa_backup(request, pk):
import_from_file = get_object_or_404(m.ImportFromFile, owner=request.user, pk=pk, variety=7)
if not import_from_file.checked:
return redirect(import_from_file.get_check_url())
if import_from_file.checked and import_from_file.no_error and not import_from_file.success:
unzipped_csv_path = os.path.join(settings.MEDIA_ROOT, os.path.join('import_from_file', pk))
names_of_wallets_in_csv = vxrcb.db_recorder_of_csv_of_wallet_tag_etc(
os.path.join(unzipped_csv_path, 'wallets.csv'), request.user, f.Wallet, wte_m.Wallet)
names_of_categories_in_csv = vxrcb.db_recorder_of_csv_of_wallet_tag_etc(
os.path.join(unzipped_csv_path, 'categories.csv'), request.user, f.Category, wte_m.Category)
names_of_tags_in_csv = vxrcb.db_recorder_of_csv_of_wallet_tag_etc(
os.path.join(unzipped_csv_path, 'tags.csv'), request.user, f.Tag, wte_m.Tag)
names_of_contacts_in_csv = vxrcb.db_recorder_of_csv_of_wallet_tag_etc(
os.path.join(unzipped_csv_path, 'contacts.csv'), request.user, f.Contact, wte_m.Contact)
wallet_objects = wte_m.Wallet.objects.filter(owner=request.user, name__in=names_of_wallets_in_csv)
dict_with_wallets = dict((x.name, x) for x in wallet_objects)
category_objects = wte_m.Category.objects.filter(owner=request.user, name__in=names_of_categories_in_csv)
dict_with_categories = dict((x.name, x) for x in category_objects)
tag_objects = wte_m.Tag.objects.filter(owner=request.user, name__in=names_of_tags_in_csv)
dict_with_tags = dict((x.name, x) for x in tag_objects)
contact_objects = wte_m.Contact.objects.filter(owner=request.user, name__in=names_of_contacts_in_csv)
dict_with_contacts = dict((x.name, x) for x in contact_objects)
vxrcb.db_recorder_of_transaction(
os.path.join(unzipped_csv_path, 'transactions.csv'), import_from_file, request.user,
dict_with_wallets, dict_with_categories, dict_with_tags, dict_with_contacts
)
import_from_file.mark_as_finished()
messages.success(request, success_msg)
return redirect(reverse_lazy('import_from_file:list'))
|
[
"offlittlehooligans@gmail.com"
] |
offlittlehooligans@gmail.com
|
333058665bf2b0a98e358b786ba8fea7c65b07f9
|
4063e90751b8bb35a1139ad9a7b329c2fe9b09da
|
/writing_csv.py
|
c31f00e537dfeed40a0f135b71af6056d6577715
|
[] |
no_license
|
hyde1004/parse_itooza
|
0b22e5799401e4d94b380f19caf6849965e17394
|
7ede949e8a34d28ef32464e10e6c2da16803d83c
|
refs/heads/master
| 2016-09-14T02:34:00.229364
| 2016-05-18T02:45:46
| 2016-05-18T02:45:46
| 58,028,221
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 168
|
py
|
import csv
with open('eggs.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter='\t')
spamwriter.writerows([['apple','banana', 'ccc']])
|
[
"junggu.lee@PTDMF10-NA105C3.LGE.NET"
] |
junggu.lee@PTDMF10-NA105C3.LGE.NET
|
e19ae4499f896b3193692d5ff4a4a2ac652dad7a
|
d57d7eb716f7e2b027c59fee2e3df57d043c1210
|
/004-phonebook-web-application/phonebook-app.py
|
4cb72b92aa9b110ca8f67bf7f92f6c79d89289aa
|
[] |
no_license
|
bruce-scott/clarusway-aws-workshop
|
ff8e004657ee1a77b3dac458a5ca34c6427453fb
|
eb10f4adb20330fd9176397500e048ab0260c2a3
|
refs/heads/master
| 2022-12-22T17:49:51.413744
| 2020-09-25T23:28:43
| 2020-09-25T23:28:43
| 274,543,635
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,949
|
py
|
# Import Flask modules
from flask import Flask, request, render_template
from flaskext.mysql import MySQL
# Create an object named app
app = Flask(__name__)
db_endpoint = open("/home/ec2-user/dbserver.endpoint", 'r', encoding='UTF-8')
# Configure mysql database
app.config['MYSQL_DATABASE_HOST'] = db_endpoint.readline().strip()
app.config['MYSQL_DATABASE_USER'] = 'admin'
app.config['MYSQL_DATABASE_PASSWORD'] = 'Bruce_1'
app.config['MYSQL_DATABASE_DB'] = 'phonebook'
app.config['MYSQL_DATABASE_PORT'] = 3306
db_endpoint.close()
mysql = MySQL()
mysql.init_app(app)
connection = mysql.connect()
connection.autocommit(True)
cursor = connection.cursor()
# Write a function named `init_todo_db` which initializes the todo db
# Create P table within sqlite db and populate with sample data
# Execute the code below only once.
def init_phonebook_db():
drop_table = 'DROP TABLE IF EXISTS phonebook.phonebook;'
phonebook_table = """
CREATE TABLE phonebook(
id INT NOT NULL AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
number VARCHAR(100) NOT NULL,
PRIMARY KEY (id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
"""
data = """
INSERT INTO phonebook.phonebook (name, number)
VALUES
("Callahan", "1234567890"),
("Bruce Scott", "67854"),
("Vincenzo Altobelli", "876543554");
"""
cursor.execute(drop_table)
cursor.execute(phonebook_table)
cursor.execute(data)
# Write a function named `find_persons` which finds persons' record using the keyword from the phonebook table in the db,
# and returns result as list of dictionary
# `[{'id': 1, 'name':'XXXX', 'number': 'XXXXXX'}]`.
def find_persons(keyword):
query = f"""
SELECT * FROM phonebook WHERE name like '%{keyword.strip().lower()}%';
"""
cursor.execute(query)
result = cursor.fetchall()
persons =[{'id':row[0], 'name':row[1].strip().title(), 'number':row[2]} for row in result]
if len(persons) == 0:
persons = [{'name':'No Result', 'number':'No Result'}]
return persons
# Write a function named `insert_person` which inserts person into the phonebook table in the db,
# and returns text info about result of the operation
def insert_person(name, number):
query = f"""
SELECT * FROM phonebook WHERE name like '{name.strip().lower()}';
"""
cursor.execute(query)
row = cursor.fetchone()
if row is not None:
return f'Person with name {row[1].title()} already exits.'
insert = f"""
INSERT INTO phonebook (name, number)
VALUES ('{name.strip().lower()}', '{number}');
"""
cursor.execute(insert)
result = cursor.fetchall()
return f'Person {name.strip().title()} added to Phonebook successfully'
# Write a function named `update_person` which updates the person's record in the phonebook table,
# and returns text info about result of the operation
def update_person(name, number):
query = f"""
SELECT * FROM phonebook WHERE name like '{name.strip().lower()}';
"""
cursor.execute(query)
row = cursor.fetchone()
if row is None:
return f'Person with name {name.strip().title()} does not exist.'
update = f"""
UPDATE phonebook
SET name='{row[1]}', number = '{number}'
WHERE id= {row[0]};
"""
cursor.execute(update)
return f'Phone record of {name.strip().title()} is updated successfully'
# Write a function named `delete_person` which deletes person record from the phonebook table in the db,
# and returns returns text info about result of the operation
def delete_person(name):
query = f"""
SELECT * FROM phonebook WHERE name like '{name.strip().lower()}';
"""
cursor.execute(query)
row = cursor.fetchone()
if row is None:
return f'Person with name {name.strip().title()} does not exist, no need to delete.'
delete = f"""
DELETE FROM phonebook
WHERE id= {row[0]};
"""
cursor.execute(delete)
return f'Phone record of {name.strip().title()} is deleted from the phonebook successfully'
# Write a function named `find_records` which finds phone records by keyword using `GET` and `POST` methods,
# using template files named `index.html` given under `templates` folder
# and assign to the static route of ('/')
@app.route('/', methods=['GET', 'POST'])
def find_records():
if request.method == 'POST':
keyword = request.form['username']
persons = find_persons(keyword)
return render_template('index.html', persons=persons, keyword=keyword, show_result=True, developer_name='Bruce')
else:
return render_template('index.html', show_result=False, developer_name='Bruce')
# Write a function named `add_record` which inserts new record to the database using `GET` and `POST` methods,
# using template files named `add-update.html` given under `templates` folder
# and assign to the static route of ('add')
@app.route('/add', methods=['GET', 'POST'])
def add_record():
if request.method == 'POST':
name = request.form['username']
if name is None or name.strip() == "":
return render_template('add-update.html', not_valid=True, message='Invalid input: Name can not be empty', show_result=False, action_name='save', developer_name='Bruce')
elif name.isdecimal():
return render_template('add-update.html', not_valid=True, message='Invalid input: Name of person should be text', show_result=False, action_name='save', developer_name='Bruce')
phone_number = request.form['phonenumber']
if phone_number is None or phone_number.strip() == "":
return render_template('add-update.html', not_valid=True, message='Invalid input: Phone number can not be empty', show_result=False, action_name='save', developer_name='Bruce')
elif not phone_number.isdecimal():
return render_template('add-update.html', not_valid=True, message='Invalid input: Phone number should be in numeric format', show_result=False, action_name='save', developer_name='Bruce')
result = insert_person(name, phone_number)
return render_template('add-update.html', show_result=True, result=result, not_valid=False, action_name='save', developer_name='Bruce')
else:
return render_template('add-update.html', show_result=False, not_valid=False, action_name='save', developer_name='Bruce')
# Write a function named `update_record` which updates the record in the db using `GET` and `POST` methods,
# using template files named `add-update.html` given under `templates` folder
# and assign to the static route of ('update')
@app.route('/update', methods=['GET', 'POST'])
def update_record():
if request.method == 'POST':
name = request.form['username']
if name is None or name.strip() == "":
return render_template('add-update.html', not_valid=True, message='Invalid input: Name can not be empty', show_result=False, action_name='update', developer_name='Callahan')
phone_number = request.form['phonenumber']
if phone_number is None or phone_number.strip() == "":
return render_template('add-update.html', not_valid=True, message='Invalid input: Phone number can not be empty', show_result=False, action_name='update', developer_name='Callahan')
elif not phone_number.isdecimal():
return render_template('add-update.html', not_valid=True, message='Invalid input: Phone number should be in numeric format', show_result=False, action_name='update', developer_name='Callahan')
result = update_person(name, phone_number)
return render_template('add-update.html', show_result=True, result=result, not_valid=False, action_name='update', developer_name='Bruce')
else:
return render_template('add-update.html', show_result=False, not_valid=False, action_name='update', developer_name='Bruce')
# Write a function named `delete_record` which updates the record in the db using `GET` and `POST` methods,
# using template files named `delete.html` given under `templates` folder
# and assign to the static route of ('delete')
@app.route('/delete', methods=['GET', 'POST'])
def delete_record():
if request.method == 'POST':
name = request.form['username']
if name is None or name.strip() == "":
return render_template('delete.html', not_valid=True, message='Invalid input: Name can not be empty', show_result=False, developer_name='Bruce')
result = delete_person(name)
return render_template('delete.html', show_result=True, result=result, not_valid=False, developer_name='Bruce')
else:
return render_template('delete.html', show_result=False, not_valid=False, developer_name='Bruce')
# Add a statement to run the Flask application which can be reached from any host on port 80.
if __name__== '__main__':
# init_phonebook_db()
#app.run(debug=True)
app.run(host='0.0.0.0', port=80)
|
[
"brucescott236@gmail.com"
] |
brucescott236@gmail.com
|
ad1b3bb809c1dcf82b2d13c5f9741207cdd4eaf3
|
d9df51eb5b8aa6b4b80daa412111ca4ab60a0778
|
/runrandom/batchsub.py
|
cab8f848dbb4f4d0f5a2cae020dad866229de918
|
[] |
no_license
|
liarrn/QC_Tools
|
c4b13dacb3a38e463f25a2fb22c3116a2d5fb621
|
5d03f61442ce73848c3ecc221b465f8961992d47
|
refs/heads/master
| 2021-01-19T21:52:08.062110
| 2017-04-19T08:04:31
| 2017-04-19T08:04:31
| 88,717,458
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,099
|
py
|
import numpy as np
from itertools import islice
import re
from subprocess import call
import math
import copy
def coor2xyzforce(elems, coor, force, des=''):
xyz = ''
natoms = coor.shape[0]
xyz += '%d\n'%(natoms)
xyz += des.rstrip()
xyz += '\n'
for i in range(natoms):
xyz += '%-5s%15.9f%15.9f%15.9f%15.9f%15.9f%15.9f\n'%(elems[i], coor[i][0], coor[i][1], coor[i][2], force[i][0], force[i][1], force[i][2])
return xyz
def coor2xyz(elems, coor, des=''):
xyz = ''
natoms = coor.shape[0]
xyz += '%d\n'%(natoms)
xyz += des.rstrip()
xyz += '\n'
for i in range(natoms):
xyz += '%-5s%15.9f%15.9f%15.9f\n'%(elems[i], coor[i][0], coor[i][1], coor[i][2])
return xyz
def car2coor(carfile):
carfile = [i.split() for i in carfile[4:-2]]
elem = np.array([i[0] for i in carfile])
coor = np.array([map(float, i[1: 4]) for i in carfile])
return elem, coor
def xyz2coor(xyzfile):
# input xyz string list, each element in the list is a single line in xyz file
# output elem array and coor array with shape natoms*3
natoms = int(xyzfile[0])
xyzfile = xyzfile[2: ]
xyzfile.sort()
xyzfile = [i.split() for i in xyzfile]
elems = [i[0] for i in xyzfile]
coors = [map(float, i[1: ]) for i in xyzfile]
elems = np.array(elems)
coors = np.array(coors)
return elems, coors
def coor2car(elems, coors):
header = '!BIOSYM archive 3\nPBC=OFF\nMaterials Studio Generated CAR File\n'
header += '!DATE Apr 26 20:39:18 2016\n'
end = 'end\nend\n'
blanks = ' '
natoms = coors.shape[0]
cartesian = ''
for i in range(natoms):
cartesian += "%-5s%15.9f%15.9f%15.9f%s%-2s%8.3f\n"%(elems[i], coors[i, 0], coors[i, 1], coors[i, 2], blanks, elems[i], 0.000)
return header+cartesian+end
def xyz2car(xyzfile):
# input xyz string list, each element in the list is a single line in xyz file
# output car file for dmol calculations
elems, coors = xyz2coor(xyzfile)
header = '!BIOSYM archive 3\nPBC=OFF\nMaterials Studio Generated CAR File\n'
header += '!DATE Apr 26 20:39:18 2016\n'
end = 'end\nend\n'
blanks = ' '
natoms = coors.shape[0]
cartesian = ''
for i in range(natoms):
cartesian += "%-5s%15.9f%15.9f%15.9f%s%-2s%8.3f\n"%(elems[i], coors[i, 0], coors[i, 1], coors[i, 2], blanks, elems[i], 0.000)
return header+cartesian+end
def readxyzfile(fp, natoms):
# read xyzfile
# input file pointer, number of atoms
# output: array of xyzfiles
xyzfiles = []
while True:
next_n_lines = list(islice(fp, natoms+2))
if not next_n_lines:
break
xyzfiles.append(next_n_lines)
xyzfiles = np.array(xyzfiles)
return xyzfiles
def writexyzforce(filename, write_type, elems, coor, force, des=''):
with open(filename, write_type) as xyz_out_fp:
xyz = coor2xyzforce(elems, coor, force, des)
xyz_out_fp.write(xyz)
def writexyz(filename, write_type, elems, coor, des=''):
with open(filename, write_type) as xyz_out_fp:
xyz = coor2xyz(elems, coor, des)
xyz_out_fp.write(xyz)
def writeline(filename, write_type, des):
with open(filename, write_type) as fp:
fp.write(des)
def extract_energy_dmol_sp(project_name):
'''
extract final energy from dmol3's static point calculation
return energy in eV
if not success, return -1
'''
ha2ev = 27.211396
fp = open(project_name+'.outmol', 'r')
outmol = fp.read()
fp.close()
is_success = re.findall("successfully", outmol)
if is_success != []:
is_success = True
else:
is_success = False
return -1
outmol = outmol.splitlines()
Ef_lines = []
for line in outmol:
if 'Ef' in line:
Ef_lines.append(line)
energy = float(Ef_lines[-1].split()[1][:-2])
energy *= ha2ev
return energy
def extract_energy_dmol_opt(project_name):
'''
extract final energy from dmol3's geometry optimization calculation
return energy in eV and elems, coor, force
if not success, return -1, -1, -1, -1
'''
bohr2ang = 0.529177208
ha_bohr2ev_ang = 51.42208619083232
ha2ev = 27.211396
with open(project_name+'.outmol', 'r') as fp:
outmol = fp.readlines()
length = len(outmol)
for opt_line in list(reversed(range(length))):
if 'opt==' in outmol[opt_line]:
break
if opt_line == 0:
return -1, -1, -1, -1
energy = float(outmol[opt_line].split()[2])
energy *= ha2ev
for final_line in list(reversed(range(length))):
if 'DERIVATIVES' in outmol[final_line]:
break
final_line_end = final_line
while outmol[final_line_end] != '\n':
final_line_end += 1
final = copy.copy(outmol[final_line+2: final_line_end-1])
final = [line.split()[1: ] for line in final]
elems = np.array([line[0] for line in final])
coor = np.array([map(float, line[1: 4]) for line in final])
coor *= bohr2ang
force = np.array([map(float, line[4: ]) for line in final])
force *= ha_bohr2ev_ang
return energy, elems, coor, force
def extract_energy(calculator, jobtype, project_name):
'''
input calculator, jobtype, filename
calculator = enum('dmol3')
jobtype = enum('sp', 'opt')
if calc_params['calc_type'] is 'sp', then return the converged scf energy or -1 if scf failed
if calc_params['calc_type'] is 'opt', then return energy in eV and elems, coor, force, or -1, -1, -1, -1 if failed
'''
if calculator == 'dmol3' and jobtype == 'sp':
return extract_energy_dmol_sp(project_name)
if calculator == 'dmol3' and jobtype == 'opt':
return extract_energy_dmol_opt(project_name)
def call_dmol3(calc_params, elems, coor):
'''
input calc_params, elems, coor
if calc_params['calc_type'] is 'sp', then return the converged scf energy or -1 if scf failed
if calc_params['calc_type'] is 'opt', then return energy in eV and elems, coor, force, or -1, -1, -1, -1 if failed
'''
project_name = calc_params['project_name']
calculator = 'dmol3'
calc_call = calc_params['calc_call']
calc_type = calc_params['calc_type']
with open(project_name+'.car', 'w') as carfp:
carfp.write(coor2car(elems, coor))
call(calc_call)
return extract_energy(calculator, calc_type, project_name)
def toofar(atom ,coor, criteria, natoms):
# return True if atom is too far away from cluster
# return False if new atom is not too far away
is_toofar = True
for i in range(natoms):
dist = np.sqrt(np.sum((coor[i, :] - atom)**2))
if dist <= criteria:
is_toofar = False
break
return is_toofar
def tooclose(atom ,coor, criteria, natoms):
# return True if atom is too close away from cluster
# return False if new atom is not too close
is_tooclose = False
for i in range(natoms):
dist = np.sqrt(np.sum((coor[i, :] - atom)**2))
if dist <= criteria:
is_tooclose = True
break
return is_tooclose
def ball():
# return coor in a uniform ball
coor = np.random.rand(3) * 2 - 1
dist = np.sqrt(np.sum(coor**2))
while dist > 1.0:
coor = np.random.rand(3) * 2 - 1
dist = np.sqrt(np.sum(coor**2))
return coor
def randcluster(natoms, rdmin, rdmax):
# return coor of rand cluster
length = (natoms*5.0)**(1/3.0)
coor = np.zeros([natoms, 3])
coor[0, : ] = ball()*length
is_fail = False
for i in range(1, natoms):
if is_fail == True:
break
is_satisfied = False
iteration = 0
newcoor = ball() * length
while is_satisfied == False:
iteration += 1
if iteration > 10000:
is_fail = True
break
is_satisfied = True
newcoor = ball() * length
if tooclose(newcoor, coor, rdmin, i) or toofar(newcoor, coor, rdmax, i):
is_satisfied = False
coor[i, :] = newcoor
if is_fail == True:
return -1
else:
return coor
if __name__ == '__main__':
# generate cluster from 10 atom to 100 atom and opt them
rdmin = 2.6
rdmax = 3.2
calc_params = {}
calc_params['calculator'] = 'dmol3'
calc_params['calc_type'] = 'opt'
for natoms in range(10, 101):
movecall = 'cp opt.input au%d.input'%natoms
call(movecall.split())
calc_params['project_name'] = 'au%d'%natoms
calc_params['calc_call'] = './RunDMol3.sh -np 24 ' + calc_params['project_name']
calc_params['calc_call'] = calc_params['calc_call'].split()
calc_params['natoms'] = natoms
coor = randcluster(natoms, rdmin, rdmax)
elems = np.array(['Au']*natoms)
energy, elems, coor, force = call_dmol3(calc_params, elems, coor)
writexyzforce('au%d.xyz'%natoms, 'w', elems, coor, force, des='%.9f eV'%energy)
|
[
"liyadong1102@gmail.com"
] |
liyadong1102@gmail.com
|
4f3adc03df3ac996ba76ec5833dd99a6337c3229
|
343fafb2d0e30a7c9c433602bfb1e9db217098db
|
/blog/main/routes.py
|
3e62deadbf9535da787675afb577ac2fc7348284
|
[] |
no_license
|
elmahico7/Python-Flask-App
|
1032ae69b6dc46db55abaf06d197c8c7dc784844
|
aac767868c1b993d5d9d5fd19b586ebd9904cf01
|
refs/heads/master
| 2022-11-13T00:05:51.291116
| 2020-06-23T13:51:09
| 2020-06-23T13:51:09
| 271,318,468
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
py
|
from flask import render_template, request, Blueprint
from blog.models import Post
main = Blueprint('main', __name__)
@main.route("/")
@main.route("/home")
def home():
page = request.args.get('page', 1, type=int)
posts= Post.query.order_by(Post.date_posted.desc()).paginate(page=page, per_page=4)
return render_template('home.html', posts=posts)
@main.route("/about")
def about():
return render_template('about.html', title='About')
|
[
"53228477+elmahico7@users.noreply.github.com"
] |
53228477+elmahico7@users.noreply.github.com
|
5dcc69cd5f217ed5a31c8579e42364e7264c87f7
|
94c8dd4126da6e9fe9acb2d1769e1c24abe195d3
|
/qiskit/transpiler/propertyset.py
|
3932d6497e58a40960957c373dac081afbcb852e
|
[
"Apache-2.0"
] |
permissive
|
levbishop/qiskit-terra
|
a75c2f96586768c12b51a117f9ccb7398b52843d
|
98130dd6158d1f1474e44dd5aeacbc619174ad63
|
refs/heads/master
| 2023-07-19T19:00:53.483204
| 2021-04-20T16:30:16
| 2021-04-20T16:30:16
| 181,052,828
| 1
| 0
|
Apache-2.0
| 2019-06-05T15:32:13
| 2019-04-12T17:20:54
|
Python
|
UTF-8
|
Python
| false
| false
| 724
|
py
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" A property set is maintained by the PassManager to keep information
about the current state of the circuit """
class PropertySet(dict):
""" A default dictionary-like object """
def __missing__(self, key):
return None
|
[
"noreply@github.com"
] |
levbishop.noreply@github.com
|
f4d9862f204cb7a5b07c91b4a8e8902059517d87
|
1b80f08fb2f25c625ced0cf3ba37b342f3ea9898
|
/ltr-gan/ltr-gan-pointwise/eval/ndcg.py
|
1df608ca57e3291e2a6b5a3f6b6d5eeac3cea93a
|
[] |
no_license
|
dpaddon/IRGAN
|
42a45dd574ce70b3b68006a3b9d5d9f24231ad91
|
bce7b543ddc2ffdc9fe323614111394be8383570
|
refs/heads/master
| 2021-09-03T08:44:30.206677
| 2018-01-07T17:53:14
| 2018-01-07T17:53:14
| 111,242,532
| 4
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,234
|
py
|
import numpy as np
def ndcg_at_k(sess, model, query_pos_test, query_pos_train, query_url_feature, k=5):
ndcg = 0.0
cnt = 0
for query in query_pos_test.keys():
pos_set = set(query_pos_test[query])
pred_list = list(set(query_url_feature[query].keys()) - set(query_pos_train.get(query, [])))
if len(pred_list) < k:
continue
pred_list_feature = [query_url_feature[query][url] for url in pred_list]
pred_list_feature = np.asarray(pred_list_feature)
pred_list_score = sess.run(model.pred_score, feed_dict={model.pred_data: pred_list_feature})
pred_url_score = zip(pred_list, pred_list_score)
pred_url_score = sorted(pred_url_score, key=lambda x: x[1], reverse=True)
dcg = 0.0
for i in range(0, k):
(url, score) = pred_url_score[i]
if url in pos_set:
dcg += (1 / np.log2(i + 2))
n = len(pos_set) if len(pos_set) < k else k
idcg = np.sum(np.ones(n) / np.log2(np.arange(2, n + 2)))
ndcg += (dcg / idcg)
cnt += 1
return ndcg / float(cnt)
def ndcg_at_k_user(sess, model, query_pos_test, query_pos_train, query_url_feature, k=5):
ndcg_list = []
query_test_list = sorted(query_pos_test.keys())
for query in query_test_list:
pos_set = set(query_pos_test[query])
pred_list = list(set(query_url_feature[query].keys()) - set(query_pos_train.get(query, [])))
if len(pred_list) < k:
continue
pred_list_feature = [query_url_feature[query][url] for url in pred_list]
pred_list_feature = np.asarray(pred_list_feature)
pred_list_score = sess.run(model.pred_score, feed_dict={model.pred_data: pred_list_feature})
pred_url_score = zip(pred_list, pred_list_score)
pred_url_score = sorted(pred_url_score, key=lambda x: x[1], reverse=True)
dcg = 0.0
for i in range(0, k):
(url, score) = pred_url_score[i]
if url in pos_set:
dcg += (1 / np.log2(i + 2))
n = len(pos_set) if len(pos_set) < k else k
idcg = np.sum(np.ones(n) / np.log2(np.arange(2, n + 2)))
ndcg_list.append(dcg / idcg)
return ndcg_list
|
[
"31183898+dpaddon@users.noreply.github.com"
] |
31183898+dpaddon@users.noreply.github.com
|
9e5db47ace2274632f2b2a5b285166229a46a94e
|
a601f4acfabd112ab05f6ce60a20f8549cf5e023
|
/code/data.py
|
04e3b6b7be6014d9c75c561796294caa857aabc9
|
[
"MIT"
] |
permissive
|
shri-kanth/GAIN
|
b57b0e8d9971968323c6cbebff7be275c71cf6ea
|
5ae151c23116fa553ac77833cf740e759b771f5e
|
refs/heads/main
| 2023-01-27T13:28:50.077356
| 2020-12-08T01:13:16
| 2020-12-08T01:13:16
| 306,441,540
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 30,741
|
py
|
import json
import math
import os
import pickle
import random
from collections import defaultdict
import dgl
import numpy as np
import torch
from torch.utils.data import IterableDataset, DataLoader
from transformers import *
from models.GAIN import Bert
from utils import get_cuda
IGNORE_INDEX = -100
class DGLREDataset(IterableDataset):
def __init__(self, src_file, save_file, word2id, ner2id, rel2id,
dataset_type='train', instance_in_train=None, opt=None):
super(DGLREDataset, self).__init__()
# record training set mention triples
self.instance_in_train = set([]) if instance_in_train is None else instance_in_train
self.data = None
self.document_max_length = 512
self.INTRA_EDGE = 0
self.INTER_EDGE = 1
self.LOOP_EDGE = 2
self.count = 0
print('Reading data from {}.'.format(src_file))
if os.path.exists(save_file):
with open(file=save_file, mode='rb') as fr:
info = pickle.load(fr)
self.data = info['data']
self.instance_in_train = info['intrain_set']
print('load preprocessed data from {}.'.format(save_file))
else:
with open(file=src_file, mode='r', encoding='utf-8') as fr:
ori_data = json.load(fr)
print('loading..')
self.data = []
for i, doc in enumerate(ori_data):
title, entity_list, labels, sentences = \
doc['title'], doc['vertexSet'], doc.get('labels', []), doc['sents']
Ls = [0]
L = 0
for x in sentences:
L += len(x)
Ls.append(L)
for j in range(len(entity_list)):
for k in range(len(entity_list[j])):
sent_id = int(entity_list[j][k]['sent_id'])
entity_list[j][k]['sent_id'] = sent_id
dl = Ls[sent_id]
pos0, pos1 = entity_list[j][k]['pos']
entity_list[j][k]['global_pos'] = (pos0 + dl, pos1 + dl)
# generate positive examples
train_triple = []
new_labels = []
for label in labels:
head, tail, relation, evidence = label['h'], label['t'], label['r'], label['evidence']
assert (relation in rel2id), 'no such relation {} in rel2id'.format(relation)
label['r'] = rel2id[relation]
train_triple.append((head, tail))
label['in_train'] = False
# record training set mention triples and mark it for dev and test set
for n1 in entity_list[head]:
for n2 in entity_list[tail]:
mention_triple = (n1['name'], n2['name'], relation)
if dataset_type == 'train':
self.instance_in_train.add(mention_triple)
else:
if mention_triple in self.instance_in_train:
label['in_train'] = True
break
new_labels.append(label)
# generate negative examples
na_triple = []
for j in range(len(entity_list)):
for k in range(len(entity_list)):
if j != k and (j, k) not in train_triple:
na_triple.append((j, k))
# generate document ids
words = []
for sentence in sentences:
for word in sentence:
words.append(word)
if len(words) > self.document_max_length:
words = words[:self.document_max_length]
word_id = np.zeros((self.document_max_length,), dtype=np.int32)
pos_id = np.zeros((self.document_max_length,), dtype=np.int32)
ner_id = np.zeros((self.document_max_length,), dtype=np.int32)
mention_id = np.zeros((self.document_max_length,), dtype=np.int32)
for iii, w in enumerate(words):
word = word2id.get(w.lower(), word2id['UNK'])
word_id[iii] = word
entity2mention = defaultdict(list)
mention_idx = 1
already_exist = set() # dealing with NER overlapping problem
for idx, vertex in enumerate(entity_list, 1):
for v in vertex:
sent_id, (pos0, pos1), ner_type = v['sent_id'], v['global_pos'], v['type']
if (pos0, pos1) in already_exist:
continue
pos_id[pos0:pos1] = idx
ner_id[pos0:pos1] = ner2id[ner_type]
mention_id[pos0:pos1] = mention_idx
entity2mention[idx].append(mention_idx)
mention_idx += 1
already_exist.add((pos0, pos1))
# construct graph
graph = self.create_graph(Ls, mention_id, pos_id, entity2mention)
# construct entity graph & path
entity_graph, path = self.create_entity_graph(Ls, pos_id, entity2mention)
assert pos_id.max() == len(entity_list)
assert mention_id.max() == graph.number_of_nodes() - 1
overlap = doc.get('overlap_entity_pair', [])
new_overlap = [tuple(item) for item in overlap]
self.data.append({
'title': title,
'entities': entity_list,
'labels': new_labels,
'na_triple': na_triple,
'word_id': word_id,
'pos_id': pos_id,
'ner_id': ner_id,
'mention_id': mention_id,
'entity2mention': entity2mention,
'graph': graph,
'entity_graph': entity_graph,
'path': path,
'overlap': new_overlap
})
# save data
with open(file=save_file, mode='wb') as fw:
pickle.dump({'data': self.data, 'intrain_set': self.instance_in_train}, fw)
print('finish reading {} and save preprocessed data to {}.'.format(src_file, save_file))
if opt.k_fold != "none":
k_fold = opt.k_fold.split(',')
k, total = float(k_fold[0]), float(k_fold[1])
a = (k - 1) / total * len(self.data)
b = k / total * len(self.data)
self.data = self.data[:a] + self.data[b:]
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
return self.data[idx]
def __iter__(self):
return iter(self.data)
def create_graph(self, Ls, mention_id, entity_id, entity2mention):
d = defaultdict(list)
# add intra-entity edges
for _, mentions in entity2mention.items():
for i in range(len(mentions)):
for j in range(i + 1, len(mentions)):
d[('node', 'intra', 'node')].append((mentions[i], mentions[j]))
d[('node', 'intra', 'node')].append((mentions[j], mentions[i]))
if d[('node', 'intra', 'node')] == []:
d[('node', 'intra', 'node')].append((entity2mention[1][0], 0))
for i in range(1, len(Ls)):
tmp = dict()
for j in range(Ls[i - 1], Ls[i]):
if mention_id[j] != 0:
tmp[mention_id[j]] = entity_id[j]
mention_entity_info = [(k, v) for k, v in tmp.items()]
# add self-loop & to-globle-node edges
for m in range(len(mention_entity_info)):
# self-loop
# d[('node', 'loop', 'node')].append((mention_entity_info[m][0], mention_entity_info[m][0]))
# to global node
d[('node', 'global', 'node')].append((mention_entity_info[m][0], 0))
d[('node', 'global', 'node')].append((0, mention_entity_info[m][0]))
# add inter edges
for m in range(len(mention_entity_info)):
for n in range(m + 1, len(mention_entity_info)):
if mention_entity_info[m][1] != mention_entity_info[n][1]:
# inter edge
d[('node', 'inter', 'node')].append((mention_entity_info[m][0], mention_entity_info[n][0]))
d[('node', 'inter', 'node')].append((mention_entity_info[n][0], mention_entity_info[m][0]))
# add self-loop for global node
# d[('node', 'loop', 'node')].append((0, 0))
if d[('node', 'inter', 'node')] == []:
d[('node', 'inter', 'node')].append((entity2mention[1][0], 0))
graph = dgl.heterograph(d)
return graph
def create_entity_graph(self, Ls, entity_id, entity2mention):
graph = dgl.DGLGraph()
graph.add_nodes(entity_id.max())
d = defaultdict(set)
for i in range(1, len(Ls)):
tmp = set()
for j in range(Ls[i - 1], Ls[i]):
if entity_id[j] != 0:
tmp.add(entity_id[j])
tmp = list(tmp)
for ii in range(len(tmp)):
for jj in range(ii + 1, len(tmp)):
d[tmp[ii] - 1].add(tmp[jj] - 1)
d[tmp[jj] - 1].add(tmp[ii] - 1)
a = []
b = []
for k, v in d.items():
for vv in v:
a.append(k)
b.append(vv)
graph.add_edges(a, b)
path = dict()
for i in range(0, graph.number_of_nodes()):
for j in range(i + 1, graph.number_of_nodes()):
a = set(graph.successors(i).numpy())
b = set(graph.successors(j).numpy())
c = [val + 1 for val in list(a & b)]
path[(i + 1, j + 1)] = c
return graph, path
class BERTDGLREDataset(IterableDataset):
def __init__(self, src_file, save_file, word2id, ner2id, rel2id,
dataset_type='train', instance_in_train=None, opt=None):
super(BERTDGLREDataset, self).__init__()
# record training set mention triples
self.instance_in_train = set([]) if instance_in_train is None else instance_in_train
self.data = None
self.document_max_length = 512
self.INFRA_EDGE = 0
self.INTER_EDGE = 1
self.LOOP_EDGE = 2
self.count = 0
print('Reading data from {}.'.format(src_file))
if os.path.exists(save_file):
with open(file=save_file, mode='rb') as fr:
info = pickle.load(fr)
self.data = info['data']
self.instance_in_train = info['intrain_set']
print('load preprocessed data from {}.'.format(save_file))
else:
bert = Bert(BertModel, 'bert-base-uncased', opt.bert_path)
with open(file=src_file, mode='r', encoding='utf-8') as fr:
ori_data = json.load(fr)
print('loading..')
self.data = []
for i, doc in enumerate(ori_data):
title, entity_list, labels, sentences = \
doc['title'], doc['vertexSet'], doc.get('labels', []), doc['sents']
Ls = [0]
L = 0
for x in sentences:
L += len(x)
Ls.append(L)
for j in range(len(entity_list)):
for k in range(len(entity_list[j])):
sent_id = int(entity_list[j][k]['sent_id'])
entity_list[j][k]['sent_id'] = sent_id
dl = Ls[sent_id]
pos0, pos1 = entity_list[j][k]['pos']
entity_list[j][k]['global_pos'] = (pos0 + dl, pos1 + dl)
# generate positive examples
train_triple = []
new_labels = []
for label in labels:
head, tail, relation, evidence = label['h'], label['t'], label['r'], label['evidence']
assert (relation in rel2id), 'no such relation {} in rel2id'.format(relation)
label['r'] = rel2id[relation]
train_triple.append((head, tail))
label['in_train'] = False
# record training set mention triples and mark it for dev and test set
for n1 in entity_list[head]:
for n2 in entity_list[tail]:
mention_triple = (n1['name'], n2['name'], relation)
if dataset_type == 'train':
self.instance_in_train.add(mention_triple)
else:
if mention_triple in self.instance_in_train:
label['in_train'] = True
break
new_labels.append(label)
# generate negative examples
na_triple = []
for j in range(len(entity_list)):
for k in range(len(entity_list)):
if j != k and (j, k) not in train_triple:
na_triple.append((j, k))
# generate document ids
words = []
for sentence in sentences:
for word in sentence:
words.append(word)
bert_token, bert_starts, bert_subwords = bert.subword_tokenize_to_ids(words)
word_id = np.zeros((self.document_max_length,), dtype=np.int32)
pos_id = np.zeros((self.document_max_length,), dtype=np.int32)
ner_id = np.zeros((self.document_max_length,), dtype=np.int32)
mention_id = np.zeros((self.document_max_length,), dtype=np.int32)
word_id[:] = bert_token[0]
entity2mention = defaultdict(list)
mention_idx = 1
already_exist = set()
for idx, vertex in enumerate(entity_list, 1):
for v in vertex:
sent_id, (pos0, pos1), ner_type = v['sent_id'], v['global_pos'], v['type']
pos0 = bert_starts[pos0]
pos1 = bert_starts[pos1] if pos1 < len(bert_starts) else 1024
if (pos0, pos1) in already_exist:
continue
if pos0 >= len(pos_id):
continue
pos_id[pos0:pos1] = idx
ner_id[pos0:pos1] = ner2id[ner_type]
mention_id[pos0:pos1] = mention_idx
entity2mention[idx].append(mention_idx)
mention_idx += 1
already_exist.add((pos0, pos1))
replace_i = 0
idx = len(entity_list)
if entity2mention[idx] == []:
entity2mention[idx].append(mention_idx)
while mention_id[replace_i] != 0:
replace_i += 1
mention_id[replace_i] = mention_idx
pos_id[replace_i] = idx
ner_id[replace_i] = ner2id[vertex[0]['type']]
mention_idx += 1
new_Ls = [0]
for ii in range(1, len(Ls)):
new_Ls.append(bert_starts[Ls[ii]] if Ls[ii] < len(bert_starts) else len(bert_subwords))
Ls = new_Ls
# construct graph
graph = self.create_graph(Ls, mention_id, pos_id, entity2mention)
# construct entity graph & path
entity_graph, path = self.create_entity_graph(Ls, pos_id, entity2mention)
assert pos_id.max() == len(entity_list)
assert mention_id.max() == graph.number_of_nodes() - 1
overlap = doc.get('overlap_entity_pair', [])
new_overlap = [tuple(item) for item in overlap]
self.data.append({
'title': title,
'entities': entity_list,
'labels': new_labels,
'na_triple': na_triple,
'word_id': word_id,
'pos_id': pos_id,
'ner_id': ner_id,
'mention_id': mention_id,
'entity2mention': entity2mention,
'graph': graph,
'entity_graph': entity_graph,
'path': path,
'overlap': new_overlap
})
# save data
with open(file=save_file, mode='wb') as fw:
pickle.dump({'data': self.data, 'intrain_set': self.instance_in_train}, fw)
print('finish reading {} and save preprocessed data to {}.'.format(src_file, save_file))
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
return self.data[idx]
def __iter__(self):
return iter(self.data)
def create_graph(self, Ls, mention_id, entity_id, entity2mention):
d = defaultdict(list)
# add intra edges
for _, mentions in entity2mention.items():
for i in range(len(mentions)):
for j in range(i + 1, len(mentions)):
d[('node', 'intra', 'node')].append((mentions[i], mentions[j]))
d[('node', 'intra', 'node')].append((mentions[j], mentions[i]))
if d[('node', 'intra', 'node')] == []:
d[('node', 'intra', 'node')].append((entity2mention[1][0], 0))
for i in range(1, len(Ls)):
tmp = dict()
for j in range(Ls[i - 1], Ls[i]):
if mention_id[j] != 0:
tmp[mention_id[j]] = entity_id[j]
mention_entity_info = [(k, v) for k, v in tmp.items()]
# add self-loop & to-globle-node edges
for m in range(len(mention_entity_info)):
# self-loop
# d[('node', 'loop', 'node')].append((mention_entity_info[m][0], mention_entity_info[m][0]))
# to global node
d[('node', 'global', 'node')].append((mention_entity_info[m][0], 0))
d[('node', 'global', 'node')].append((0, mention_entity_info[m][0]))
# add inter edges
for m in range(len(mention_entity_info)):
for n in range(m + 1, len(mention_entity_info)):
if mention_entity_info[m][1] != mention_entity_info[n][1]:
# inter edge
d[('node', 'inter', 'node')].append((mention_entity_info[m][0], mention_entity_info[n][0]))
d[('node', 'inter', 'node')].append((mention_entity_info[n][0], mention_entity_info[m][0]))
# add self-loop for global node
# d[('node', 'loop', 'node')].append((0, 0))
if d[('node', 'inter', 'node')] == []:
d[('node', 'inter', 'node')].append((entity2mention[1][0], 0))
graph = dgl.heterograph(d)
return graph
def create_entity_graph(self, Ls, entity_id, entity2mention):
graph = dgl.DGLGraph()
graph.add_nodes(entity_id.max())
d = defaultdict(set)
for i in range(1, len(Ls)):
tmp = set()
for j in range(Ls[i - 1], Ls[i]):
if entity_id[j] != 0:
tmp.add(entity_id[j])
tmp = list(tmp)
for ii in range(len(tmp)):
for jj in range(ii + 1, len(tmp)):
d[tmp[ii] - 1].add(tmp[jj] - 1)
d[tmp[jj] - 1].add(tmp[ii] - 1)
a = []
b = []
for k, v in d.items():
for vv in v:
a.append(k)
b.append(vv)
graph.add_edges(a, b)
path = dict()
for i in range(0, graph.number_of_nodes()):
for j in range(i + 1, graph.number_of_nodes()):
a = set(graph.successors(i).numpy())
b = set(graph.successors(j).numpy())
c = [val + 1 for val in list(a & b)]
path[(i + 1, j + 1)] = c
return graph, path
class DGLREDataloader(DataLoader):
def __init__(self, dataset, batch_size, shuffle=False, h_t_limit_per_batch=300, h_t_limit=1722, relation_num=97,
max_length=512, negativa_alpha=0.0, dataset_type='train'):
super(DGLREDataloader, self).__init__(dataset, batch_size=batch_size)
self.shuffle = shuffle
self.length = len(self.dataset)
self.max_length = max_length
self.negativa_alpha = negativa_alpha
self.dataset_type = dataset_type
self.h_t_limit_per_batch = h_t_limit_per_batch
self.h_t_limit = h_t_limit
self.relation_num = relation_num
self.dis2idx = np.zeros((512), dtype='int64')
self.dis2idx[1] = 1
self.dis2idx[2:] = 2
self.dis2idx[4:] = 3
self.dis2idx[8:] = 4
self.dis2idx[16:] = 5
self.dis2idx[32:] = 6
self.dis2idx[64:] = 7
self.dis2idx[128:] = 8
self.dis2idx[256:] = 9
self.dis_size = 20
self.order = list(range(self.length))
def __iter__(self):
# shuffle
if self.shuffle:
random.shuffle(self.order)
self.data = [self.dataset[idx] for idx in self.order]
else:
self.data = self.dataset
batch_num = math.ceil(self.length / self.batch_size)
self.batches = [self.data[idx * self.batch_size: min(self.length, (idx + 1) * self.batch_size)]
for idx in range(0, batch_num)]
self.batches_order = [self.order[idx * self.batch_size: min(self.length, (idx + 1) * self.batch_size)]
for idx in range(0, batch_num)]
# begin
context_word_ids = torch.LongTensor(self.batch_size, self.max_length).cpu()
context_pos_ids = torch.LongTensor(self.batch_size, self.max_length).cpu()
context_ner_ids = torch.LongTensor(self.batch_size, self.max_length).cpu()
context_mention_ids = torch.LongTensor(self.batch_size, self.max_length).cpu()
context_word_mask = torch.LongTensor(self.batch_size, self.max_length).cpu()
context_word_length = torch.LongTensor(self.batch_size).cpu()
ht_pairs = torch.LongTensor(self.batch_size, self.h_t_limit, 2).cpu()
relation_multi_label = torch.Tensor(self.batch_size, self.h_t_limit, self.relation_num).cpu()
relation_mask = torch.Tensor(self.batch_size, self.h_t_limit).cpu()
relation_label = torch.LongTensor(self.batch_size, self.h_t_limit).cpu()
ht_pair_distance = torch.LongTensor(self.batch_size, self.h_t_limit).cpu()
for idx, minibatch in enumerate(self.batches):
cur_bsz = len(minibatch)
for mapping in [context_word_ids, context_pos_ids, context_ner_ids, context_mention_ids,
context_word_mask, context_word_length,
ht_pairs, ht_pair_distance, relation_multi_label, relation_mask, relation_label]:
if mapping is not None:
mapping.zero_()
relation_label.fill_(IGNORE_INDEX)
max_h_t_cnt = 0
label_list = []
L_vertex = []
titles = []
indexes = []
graph_list = []
entity_graph_list = []
entity2mention_table = []
path_table = []
overlaps = []
for i, example in enumerate(minibatch):
title, entities, labels, na_triple, word_id, pos_id, ner_id, mention_id, entity2mention, graph, entity_graph, path = \
example['title'], example['entities'], example['labels'], example['na_triple'], \
example['word_id'], example['pos_id'], example['ner_id'], example['mention_id'], example[
'entity2mention'], example['graph'], example['entity_graph'], example['path']
graph_list.append(graph)
entity_graph_list.append(entity_graph)
path_table.append(path)
overlaps.append(example['overlap'])
entity2mention_t = get_cuda(torch.zeros((pos_id.max() + 1, mention_id.max() + 1)))
for e, ms in entity2mention.items():
for m in ms:
entity2mention_t[e, m] = 1
entity2mention_table.append(entity2mention_t)
L = len(entities)
word_num = word_id.shape[0]
context_word_ids[i, :word_num].copy_(torch.from_numpy(word_id))
context_pos_ids[i, :word_num].copy_(torch.from_numpy(pos_id))
context_ner_ids[i, :word_num].copy_(torch.from_numpy(ner_id))
context_mention_ids[i, :word_num].copy_(torch.from_numpy(mention_id))
idx2label = defaultdict(list)
label_set = {}
for label in labels:
head, tail, relation, intrain, evidence = \
label['h'], label['t'], label['r'], label['in_train'], label['evidence']
idx2label[(head, tail)].append(relation)
label_set[(head, tail, relation)] = intrain
label_list.append(label_set)
if self.dataset_type == 'train':
train_tripe = list(idx2label.keys())
for j, (h_idx, t_idx) in enumerate(train_tripe):
hlist, tlist = entities[h_idx], entities[t_idx]
ht_pairs[i, j, :] = torch.Tensor([h_idx + 1, t_idx + 1])
label = idx2label[(h_idx, t_idx)]
delta_dis = hlist[0]['global_pos'][0] - tlist[0]['global_pos'][0]
if delta_dis < 0:
ht_pair_distance[i, j] = -int(self.dis2idx[-delta_dis]) + self.dis_size // 2
else:
ht_pair_distance[i, j] = int(self.dis2idx[delta_dis]) + self.dis_size // 2
for r in label:
relation_multi_label[i, j, r] = 1
relation_mask[i, j] = 1
rt = np.random.randint(len(label))
relation_label[i, j] = label[rt]
lower_bound = len(na_triple)
if self.negativa_alpha > 0.0:
random.shuffle(na_triple)
lower_bound = int(max(20, len(train_tripe) * self.negativa_alpha))
for j, (h_idx, t_idx) in enumerate(na_triple[:lower_bound], len(train_tripe)):
hlist, tlist = entities[h_idx], entities[t_idx]
ht_pairs[i, j, :] = torch.Tensor([h_idx + 1, t_idx + 1])
delta_dis = hlist[0]['global_pos'][0] - tlist[0]['global_pos'][0]
if delta_dis < 0:
ht_pair_distance[i, j] = -int(self.dis2idx[-delta_dis]) + self.dis_size // 2
else:
ht_pair_distance[i, j] = int(self.dis2idx[delta_dis]) + self.dis_size // 2
relation_multi_label[i, j, 0] = 1
relation_label[i, j] = 0
relation_mask[i, j] = 1
max_h_t_cnt = max(max_h_t_cnt, len(train_tripe) + lower_bound)
else:
j = 0
for h_idx in range(L):
for t_idx in range(L):
if h_idx != t_idx:
hlist, tlist = entities[h_idx], entities[t_idx]
ht_pairs[i, j, :] = torch.Tensor([h_idx + 1, t_idx + 1])
relation_mask[i, j] = 1
delta_dis = hlist[0]['global_pos'][0] - tlist[0]['global_pos'][0]
if delta_dis < 0:
ht_pair_distance[i, j] = -int(self.dis2idx[-delta_dis]) + self.dis_size // 2
else:
ht_pair_distance[i, j] = int(self.dis2idx[delta_dis]) + self.dis_size // 2
j += 1
max_h_t_cnt = max(max_h_t_cnt, j)
L_vertex.append(L)
titles.append(title)
indexes.append(self.batches_order[idx][i])
context_word_mask = context_word_ids > 0
context_word_length = context_word_mask.sum(1)
batch_max_length = context_word_length.max()
yield {'context_idxs': get_cuda(context_word_ids[:cur_bsz, :batch_max_length].contiguous()),
'context_pos': get_cuda(context_pos_ids[:cur_bsz, :batch_max_length].contiguous()),
'context_ner': get_cuda(context_ner_ids[:cur_bsz, :batch_max_length].contiguous()),
'context_mention': get_cuda(context_mention_ids[:cur_bsz, :batch_max_length].contiguous()),
'context_word_mask': get_cuda(context_word_mask[:cur_bsz, :batch_max_length].contiguous()),
'context_word_length': get_cuda(context_word_length[:cur_bsz].contiguous()),
'h_t_pairs': get_cuda(ht_pairs[:cur_bsz, :max_h_t_cnt, :2]),
'relation_label': get_cuda(relation_label[:cur_bsz, :max_h_t_cnt]).contiguous(),
'relation_multi_label': get_cuda(relation_multi_label[:cur_bsz, :max_h_t_cnt]),
'relation_mask': get_cuda(relation_mask[:cur_bsz, :max_h_t_cnt]),
'ht_pair_distance': get_cuda(ht_pair_distance[:cur_bsz, :max_h_t_cnt]),
'labels': label_list,
'L_vertex': L_vertex,
'titles': titles,
'indexes': indexes,
'graphs': graph_list,
'entity2mention_table': entity2mention_table,
'entity_graphs': entity_graph_list,
'path_table': path_table,
'overlaps': overlaps
}
|
[
"631710518@qq.com"
] |
631710518@qq.com
|
324c11463f9e8f7ce76e5cec1d386bb0346edc81
|
81f013aabcf585fd16ea76a0ac7d037dd2006745
|
/djangoproyect/polls/views.py
|
bc4967626d77525fc6e4016d4c2722a5f3b0130f
|
[
"BSD-2-Clause"
] |
permissive
|
Khanos/SoftCode
|
e417092f140f978a140195e117516f292f7d3742
|
b7ea21ac6acaf54c49379c14281f3992ddf9316d
|
refs/heads/master
| 2021-01-16T19:02:58.507240
| 2015-02-11T20:55:48
| 2015-02-11T20:55:48
| 30,253,078
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 196
|
py
|
# from django.shortcuts import render
#
# # Create your views here.
from django.http import HttpResponse
def index(request):
return HttpResponse("Hello, world. You're at the polls index.")
|
[
"KhanosTech@gmail.com"
] |
KhanosTech@gmail.com
|
8fd4870d1e4f43fde69fefd695806b933d476116
|
b652934b616631053844e76adfb036d2bc74f9d3
|
/env/lib/python3.7/genericpath.py
|
4559668c144e91d0f549ad238af863de599a3695
|
[] |
no_license
|
majmoud1/saHeroku
|
fc49ee554c4c7d580e85cd7ceab827c33c1bc178
|
cb76a6116c60515d7946bb0ca6d7d7fc87ed33b9
|
refs/heads/master
| 2021-06-23T20:30:18.161533
| 2019-07-26T11:48:56
| 2019-07-26T11:48:56
| 199,005,727
| 0
| 0
| null | 2021-03-20T01:24:33
| 2019-07-26T11:33:14
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 49
|
py
|
/home/diop/anaconda3/lib/python3.7/genericpath.py
|
[
"thebest16295@gmail.com"
] |
thebest16295@gmail.com
|
e7a365fc1d2c464c860a69ded83309e68287fdb9
|
dd51b54eae1d60e2c97b7f0e0b5b35b6c7c5e573
|
/matriz.py
|
c20ae7fd1e12a0a6e0817674b9f9b4775d309ff1
|
[] |
no_license
|
leob009/projetospython
|
eb82a83f9b7d4fd4a7238f02d386fba58c95a23c
|
06f14e6dda4dffda179ff5d977d7a5601c063dd9
|
refs/heads/master
| 2023-08-28T06:47:06.698414
| 2021-09-29T19:02:24
| 2021-09-29T23:42:00
| 409,776,145
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 600
|
py
|
from random import randint
def cria_matriz(num_l, num_col):
mat = []
l = []
while len(mat) != num_l:
l.append(randint(0, 99))
if len(l) == num_l:
mat.append(l)
l = []
return mat
num_lin = 5
num_col = 5
mat = cria_matriz(num_lin, num_col)
print('* '*11)
for lin in range(0,num_lin):
for col in range(0,num_col):
print(f'{mat[lin][col]:02}',end=' ')
print('*')
print('* '*11)
# while True:
# print(mat[lin][col],end=' ')
# col +=1
# if col == 10:
# print('\n')
# col = 0
# lin +=1
|
[
"leobc009@gmail.com"
] |
leobc009@gmail.com
|
6d13e94941952220209672f122ad70bf317b32c7
|
a75c3fc060cd3e1d8e48e5f32a6e7e6dd58fb1ed
|
/challenges/re/vault1/chal.py
|
b5986c6eaae1bb7792d83be997b5871c0b177708
|
[] |
no_license
|
cyber-castors/castorsCTF20
|
7bb7f7ae3161c4e60297ab2cfd0e356ead4d8f13
|
e88a0b24562cf9ec1ddd51e4ede378469f42271c
|
refs/heads/master
| 2022-09-18T13:45:11.051813
| 2020-06-03T23:18:19
| 2020-06-03T23:18:19
| 268,179,913
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 566
|
py
|
import base64
def xor(s1,s2):
return ''.join(chr(ord(a) ^ ord(b)) for a,b in zip(s1,s2))
def checkpass():
_input = input("Enter the password: ")
key = "promortyusvatofacid"
encoded = str.encode(xor(key, _input))
result = base64.b64encode(encoded, altchars=None)
if result == b'ExMcGQAABzohNQ0TRQwtPidYAQ==':
return True
else:
return False
def main():
global access
access = checkpass()
if access:
print("Yeah...okay. You got it!")
else:
print("Lol...try again...")
access = False
main()
|
[
"admin@cybercastors.com"
] |
admin@cybercastors.com
|
1c6aab212a160a1e5b8c7357ffc472e1a02d3394
|
907147fa621b090c8b4f7a1dbd4debfdb2167a3f
|
/Triángulos .py
|
e84ce7386e9a2d1b6be1770d3ef6909cbefe19e3
|
[] |
no_license
|
A01745969/Mision_04
|
ad89be759aafaf429cd58a92ba488a0edb8812e7
|
ee37bf48586a9c7858d9b6d5202e7f43442d7efa
|
refs/heads/master
| 2022-04-20T20:03:04.539464
| 2020-04-17T05:47:11
| 2020-04-17T05:47:11
| 255,160,055
| 0
| 0
| null | 2020-04-12T20:00:13
| 2020-04-12T20:00:13
| null |
UTF-8
|
Python
| false
| false
| 1,014
|
py
|
# Paulina Mendoza Iglesias
# El programa indica el tipo de triángulo de acuerdo a su longitud
def main ():
ladoA = int(input("Teclea el valor 1: "))
ladoB = int(input("Teclea el valor 2: "))
ladoC = int(input("Teclea el valor 3: "))
if ladoA == ladoB and ladoB == ladoC:
print ("El triángulo es equilátero")
elif ladoA == ladoB and ladoB != ladoC:
print ("El triángulo es isósceles")
elif ladoA != ladoB and ladoB == ladoC:
print ("El triángulo es isósceles")
elif ladoA == ladoC and ladoB != ladoC:
print("El trángulo es isósceles")
else:
print ("Otro")
if (ladoA + ladoB) < ladoC:
print ("No existe el triángulo")
elif (ladoB +ladoC) < ladoA:
print ("No existe el triángulo")
elif (ladoA + ladoC) < ladoB:
print ("No existe el triángulo")
#Llamada a main ()
main()
|
[
"noreply@github.com"
] |
A01745969.noreply@github.com
|
faec3958eb37fe965876c4475d19f4c1fe393695
|
360c35fbb399ae6433e2eb38651e399d83c88bac
|
/src/fetchFunction/video_service/fields/auth.py
|
337a2c5e0e836bca13fc17876f9c8b12e285b017
|
[] |
no_license
|
tbnoy/ownvl
|
738634a9015bcef5e13bf0ce3f416ca8669be992
|
bc7136e6f8bc072757d19e41447ef5f634b12aa1
|
refs/heads/master
| 2021-09-03T21:47:04.444508
| 2018-01-12T08:08:51
| 2018-01-12T08:08:51
| 116,219,443
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,014
|
py
|
class Auth:
def __init__(self, profileApi):
self.profileApi = profileApi
def fetch(self, baseData, raysData, params, language):
returnArr = {}
linkId = baseData['media$content'][0]['plfile$releases'][0]['plrelease$pid']
data = {
'videoId': params['videoGuid'],
'profileId': params['profileGuid'],
'linkId': linkId,
'platform': params['platform'],
'language': language
}
res = self.profileApi.getVideoAuth(data)
if 'errorCode' in res:
returnArr['code'] = res['errorCode']
returnArr['status'] = "error"
returnArr['errorMessage'] = res.get('userMessage', "")
else:
returnArr['signature'] = res.get('videoAuthSignature', "")
returnArr['requireWaiver'] = res.get('requirewaiver', False)
returnArr['waiverSlug'] = res.get('waiverSlug', "tbird-waiver")
return returnArr
|
[
"tzookb@gmail.com"
] |
tzookb@gmail.com
|
071b9018d7b5d3a3d7c9a5b3589f44d0281d703d
|
2cb2bc953975540de8dfe3aee256fb3daa852bfb
|
/thisweek_masuipeo/tyama_codeiq1004.py
|
4f5716abb362e9613f9926ca620533ad8a0c0db1
|
[] |
no_license
|
cielavenir/codeiq_solutions
|
db0c2001f9a837716aee1effbd92071e4033d7e0
|
750a22c937db0a5d94bfa5b6ee5ae7f1a2c06d57
|
refs/heads/master
| 2023-04-27T14:20:09.251817
| 2023-04-17T03:22:57
| 2023-04-17T03:22:57
| 19,687,315
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,531
|
py
|
#Used this to solve checkio "Lantern Festival"
D=( #Right,Up,Left,Down
(0,1),
(-1,0),
(0,-1),
(1,0),
)
class Maze:
def __init__(self,x,y,gx,gy,d,v):
self.x=x
self.y=y
self.gx=gx
self.gy=gy
self.d=d
self.v=v
def ok(self):
return self.gx==self.x and self.gy==self.y
def same(self,other):
return self.x==other.x and self.y==other.y
def move(self):
d_orig=(self.d+3)%4
for i in range(4):
self.d=(d_orig+i)%4
if self.v[self.y+D[self.d][0]][self.x+D[self.d][1]]!='#': break
self.y=self.y+D[self.d][0]
self.x=self.x+D[self.d][1]
#printf("%d %d %d\n"%(x,y,d))
def solvable(_x,_y,_gx,_gy,v):
if v[_y][_x]=='#' or v[_gy][_gx]=='#': return False
q=[]
back={}
q.append((_x,_y))
back[(_x,_y)]=(_x,_y)
while q:
x,y=q[0]
q.pop(0)
for d in range(4):
if v[y+D[d][0]][x+D[d][1]]!='#':
_p=(x+D[d][1],y+D[d][0])
if _p==(_gx,_gy): return True
if _p not in back:
back[_p]=(x,y)
q.append(_p)
return False
if __name__=='__main__':
N=4
power=N*N
v=[]
v.append(['#']*(N+2))
for i in range(N):
v.append(['#']+[' ']*N+['#'])
v.append(['#']*(N+2))
#v=[
# list("######"),
# list("# #"),
# list("## #"),
# list("# # #"),
# list("# #"),
# list("######"),
#]
r=0
for i in range(1<<power):
for j in range(power):
v[j/N+1][j%N+1]='#' if (i&(1<<j)) else ' '
if not solvable(1,1,N,N,v): continue
m1=Maze(1,1,N,N,3,v)
m2=Maze(N,N,1,1,1,v)
while not m1.ok() and not m2.ok():
m1.move()
m2.move()
if m1.same(m2):
r+=1
break
print(r)
|
[
"cielartisan@gmail.com"
] |
cielartisan@gmail.com
|
ac907214a6b17a3e62c84672c03479b87ba1abec
|
41d4ae4913ac94f5e21ce45f8c07f631faffd0ba
|
/phonenumbers/data/region_AF.py
|
d72f4ebcb3ebd230b5c50f7bdb29871fa662aaf4
|
[
"MIT"
] |
permissive
|
ayushgoel/FixGoogleContacts
|
faabbc475b9e992caca8aed23f253cee7ae120bf
|
e49e58db6718bef8f95b6f767241605441c7fe41
|
refs/heads/master
| 2021-01-20T06:25:26.550106
| 2013-09-07T07:59:41
| 2013-09-07T07:59:41
| 12,215,068
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,966
|
py
|
"""Auto-generated file, do not edit by hand. AF metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_AF = PhoneMetadata(id='AF', country_code=93, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2-7]\\d{8}', possible_number_pattern='\\d{7,9}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:[25][0-8]|[34][0-4]|6[0-5])[2-9]\\d{6}', possible_number_pattern='\\d{7,9}', example_number='234567890'),
mobile=PhoneNumberDesc(national_number_pattern='7[057-9]\\d{7}', possible_number_pattern='\\d{9}', example_number='701234567'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='1(?:02|19)', possible_number_pattern='\\d{3}', example_number='119'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_code=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='([2-7]\\d)(\\d{3})(\\d{4})', format='\\1 \\2 \\3', national_prefix_formatting_rule='0\\1')])
|
[
"ayush.g@directi.com"
] |
ayush.g@directi.com
|
cce5fa446680822ce8935dae90fab36f22c29f29
|
4c59278e23b4e2620288b3216973e1f7637be2cd
|
/0x11-python-network_1/4-hbtn_status.py
|
ad05aab9c086665c99f05b6aa69b4fa7a977bd8e
|
[] |
no_license
|
Hassan8521/alx-higher_level_programming-1
|
f469c77da954f4661eab86b531a379489563046a
|
ca10e1c36c8fb52a839e2e1ea88b83fd0d025b80
|
refs/heads/main
| 2023-08-06T22:40:58.167847
| 2021-09-23T06:43:41
| 2021-09-23T06:43:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 287
|
py
|
#!/usr/bin/python3
"""
fetches https://intranet.hbtn.io/status
"""
import requests
if __name__ == "__main__":
r = requests.get("https://intranet.hbtn.io/status")
print('Body response:')
print("\t- type: {}".format(type(r.text)))
print("\t- content: {}".format(r.text))
|
[
"fraoltesfaye.ft@gmail.com"
] |
fraoltesfaye.ft@gmail.com
|
1684c3866ad82c508fcf59e23e783bc17e47562f
|
522fcd903a5308c015cfd85c40b63a9f30236170
|
/mainapp/migrations/0003_auto_20210812_1548.py
|
c9f72c6ba35f43b322b5b7b8c75431c81899538e
|
[] |
no_license
|
ChyngyzTorogeldiev/super_flawer
|
2b8cf1d8108e4f5c8b7b5780e8149e6f951224ba
|
c42ba643e3f35a3eddba9fb7e8f857adfced0112
|
refs/heads/main
| 2023-07-12T12:20:55.741873
| 2021-08-18T14:48:31
| 2021-08-18T14:48:31
| 394,655,141
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,180
|
py
|
# Generated by Django 3.2.6 on 2021-08-12 09:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0002_flowers_flowersinpots'),
]
operations = [
migrations.CreateModel(
name='Flower',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Наименование')),
('slug', models.SlugField(unique=True)),
('image', models.ImageField(upload_to='', verbose_name='Изображние')),
('description', models.TextField(null=True, verbose_name='Описание')),
('price', models.DecimalField(decimal_places=2, max_digits=9, verbose_name='Цена')),
('country', models.CharField(max_length=255, verbose_name='Страна')),
('grade', models.CharField(max_length=255, verbose_name='Сорт')),
('scent', models.CharField(max_length=255, verbose_name='Аромат')),
('history', models.CharField(max_length=255, verbose_name='История')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.category', verbose_name='Категория')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='FlowerInPot',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Наименование')),
('slug', models.SlugField(unique=True)),
('image', models.ImageField(upload_to='', verbose_name='Изображние')),
('description', models.TextField(null=True, verbose_name='Описание')),
('price', models.DecimalField(decimal_places=2, max_digits=9, verbose_name='Цена')),
('country', models.CharField(max_length=255, verbose_name='Страна')),
('grade', models.CharField(max_length=255, verbose_name='Сорт')),
('scent', models.CharField(max_length=255, verbose_name='Аромат')),
('history', models.CharField(max_length=255, verbose_name='История')),
('flower_pot', models.CharField(max_length=255, verbose_name='Цветочный горшок')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.category', verbose_name='Категория')),
],
options={
'abstract': False,
},
),
migrations.RemoveField(
model_name='flowersinpots',
name='category',
),
migrations.DeleteModel(
name='Flowers',
),
migrations.DeleteModel(
name='FlowersInPots',
),
]
|
[
"www.araket@gmail.com"
] |
www.araket@gmail.com
|
930ca0fd3a6df943f526f2121d75079cec161856
|
e955d60c69bd7c7b09bd759324e2e5beab0d5736
|
/appbuild/build.py
|
2beabf4013aabbeabba3db1c03cf3823f6d51ec3
|
[
"BSD-2-Clause"
] |
permissive
|
a-mcintosh/blackbox
|
0c201f45e223f4b00401dc9d58373c21651b307e
|
23e09f747cb0af0028bd4d471df5dc3a5d7dcb32
|
refs/heads/master
| 2020-07-15T11:14:38.664963
| 2016-11-12T23:29:48
| 2016-11-12T23:29:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,103
|
py
|
#!/usr/bin/python
#
# Python 2.7 script for building the BlackBox Component Builder for Windows under Linux Debian 7.
# Looks at all branches and puts the output into the branch's output folder 'unstable/<branch>'
# unless building a stable (final) release, which is always put into folder 'stable'.
# A stable release is one that does not have a development phase in appVersion and that is built for branch 'master'.
#
# Ivan Denisov, Josef Templ
#
# use: "build.py -h" to get a short help text
#
# Creates 3 files in case of success:
# 1. a build log file named blackbox-<AppVersion>.<buildnr>-buildlog.html
# 2. a Windows installer file named blackbox-<AppVersion>.<buildnr>-setup.exe
# 3. a zipped package named blackbox-<AppVersion>-<buildnr>.zip
# In case of building a final release, buildnr is not included.
# In case building was started for a branch, updates the branch's last-build-commit hash.
# In case of successfully finishing the build, increments the global build number.
#
# By always rebuilding bbscript.exe it avoids problems with changes in the symbol or object file formats
# and acts as a rigorous test for some parts of BlackBox, in particular for the compiler itself.
# This script uses the general purpose 'bbscript' scripting engine for BlackBox, which
# can be found in the subsystem named 'Script'.
#
# Error handling:
# Stops building when shellExec writes to stderr, unless stopOnError is False.
# Stops building when there is an error reported by bbscript.
# Stops building when there is a Python exception.
# The next build will take place upon the next commit.
#
# TODO git checkout reports a message on stderr but it works, so it is ignored
from subprocess import Popen, PIPE, call
import sys, datetime, fileinput, os.path, argparse, urllib2, time
import xml.etree.ElementTree as ET
buildDate = datetime.datetime.now().isoformat()[:19]
buildDir = "/var/www/zenario/makeapp"
bbName = "bb"
bbDir = buildDir + "/" + bbName
appbuildDir = bbDir + "/appbuild"
localRepository = "/var/www/git/blackbox.git"
unstableDir = "/var/www/zenario/unstable"
stableDir = "/var/www/zenario/stable"
wine = "/usr/local/bin/wine"
xvfb = "xvfb-run --server-args='-screen 1, 1024x768x24' "
bbscript = xvfb + wine + " bbscript.exe"
bbchanges = xvfb + wine + " " + buildDir + "/bbchanges.exe /USE " + bbDir + " /LOAD ScriptChanges"
iscc = "/usr/local/bin/iscc"
windres="/usr/bin/i586-mingw32msvc-windres"
testName = "testbuild"
branch = None
commitHash = None
logFile = None
outputNamePrefix = None # until appVersion and build number are known
buildNumberIncremented = False
parser = argparse.ArgumentParser(description='Build BlackBox')
parser.add_argument('--verbose', action="store_true", default=False, help='turn verbose output on')
parser.add_argument('--test', action="store_true", default=False, help='put all results into local directory "' + testName + '"')
parser.add_argument('--branch', help='select BRANCH for building')
args = parser.parse_args()
def repositoryLocked():
return os.path.exists(localRepository + ".lock")
def hashFilePath():
return buildDir + "/lastBuildHash/" + branch
def getLastHash():
if os.path.exists(hashFilePath()):
hashFile = open(hashFilePath(), "r")
commit = hashFile.readline().strip()
hashFile.close()
return commit
else:
return ""
def getCommitHash():
gitLog = shellExec(localRepository, "git log " + branch + " -1")
global commitHash
commitHash = gitLog.split("\n")[0].split(" ")[1]
return commitHash
def needsRebuild():
return getLastHash() != getCommitHash()
def selectBranch():
global branch
if args.branch != None:
branch = args.branch
getCommitHash()
return branch
else:
branches = shellExec(localRepository, "git branch -a")
for line in branches.split("\n"):
branch = line[2:].strip()
if branch != "" and needsRebuild():
return branch
return None
def openLog():
global logFile
logFile = open(unstableDir + "/logFile.html", "w")
def logVerbose(text):
if args.verbose:
print text # for testing, goes to console
def log(text, startMarkup="", endMarkup=""):
if text != "":
if logFile != None:
for line in text.split("\n"):
logFile.write(startMarkup + line + endMarkup + "<br/>\n")
logFile.flush()
elif args.verbose:
for line in text.split("\n"):
logVerbose(line)
def logErr(text): # use color red
log(text, '<font color="#FF0000">', '</font>')
def logStep(text): # use bold font
log(text, '<b>', '</b>')
def logShell(text): # use color green
log(text, '<font color="#009600">', '</font>')
def shellExec(wd, cmd, stopOnError=True):
cmd = "cd " + wd + " && " + cmd
logShell(cmd)
(stdout, stderr) = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True).communicate()
log(stdout)
if stderr == "":
return stdout
elif not stopOnError:
logErr(stderr)
logErr("--- error ignored ---")
return stdout
else:
logErr(stderr)
logErr("--- build aborted ---")
print "--- build aborted ---\n"
incrementBuildNumber() # if not args.test
cleanup() # if not args.test
renameLog() # if not args.test
sys.exit()
def getAppVerName(appVersion):
x = appVersion
if appVersion.find("-a") >= 0:
x = appVersion.replace("-a", " Alpha ")
elif appVersion.find("-b") >= 0:
x = appVersion.replace("-b", " Beta ")
elif appVersion.find("-rc") >= 0:
x = appVersion.replace("-rc", " Release Candidate ")
return "BlackBox Component Builder " + x
def getVersionInfoVersion(appVersion, buildNum):
version = appVersion.split("-")[0]
v = version.split(".")
v0 = v[0] if len(v) > 0 else "0"
v1 = v[1] if len(v) > 1 else "0"
v2 = v[2] if len(v) > 2 else "0"
return v0 + "." + v1 + "." + v2 + "." + str(buildNum)
def isStable(appVersion):
return appVersion.find("-") < 0 and branch == "master"
def prepareCompileAndLink():
logStep("Preparing BlackBox.rc")
vrsn = versionInfoVersion.replace(".", ",")
shellExec(bbDir + "/Win/Rsrc", "mv BlackBox.rc BlackBox.rc_template")
shellExec(bbDir + "/Win/Rsrc", "sed s/{#VERSION}/" + vrsn + "/ < BlackBox.rc_template > BlackBox.rc")
shellExec(bbDir + "/Win/Rsrc", "rm BlackBox.rc_template")
logStep("Creating the BlackBox.res resource file")
shellExec(bbDir + "/Win/Rsrc", windres + " -i BlackBox.rc -o BlackBox.res")
logStep("Preparing bbscript.exe")
shellExec(buildDir, "cp bbscript.exe " + bbDir + "/")
def deleteBbFile(name):
if os.path.exists(bbDir + "/" + name):
shellExec(bbDir, "rm " + name)
def runbbscript(fileName):
deleteBbFile("StdLog.txt");
# fileName is relative to bbscript.exe startup directory, which is bbDir
# if a /USE param is useed it must an absolute path, otherwise some texts cannot be opened, e.g Converters.
cmd = "cd " + bbDir + " && " + bbscript + ' /PAR "' + fileName + '"'
logShell(cmd)
bbres = call(cmd + " >wine_out.txt 2>&1", shell=True) # wine produces irrelevant error messages
if bbres != 0:
shellExec(bbDir, "cat StdLog.txt", False)
cleanup()
logErr("--- build aborted ---")
renameLog() # if not args.test
sys.exit()
def compileAndLink():
logStep("Compiling and linking BlackBox")
runbbscript("Dev/Docu/Build-Tool.odc")
shellExec(bbDir, "mv BlackBox2.exe BlackBox.exe && mv Code System/ && mv Sym System/")
def buildBbscript():
logStep("Incrementally building BlackBox scripting engine bbscript.exe")
runbbscript("appbuild/newbbscript.txt")
shellExec(bbDir, "mv newbbscript.exe bbscript.exe && chmod a+x bbscript.exe")
shellExec(bbDir, "rm -R Code Sym */Code */Sym BlackBox.exe")
def appendSystemProperties():
logStep("Setting system properties in appendProps.txt")
shellExec(appbuildDir, 'sed s/{#AppVersion}/"' + appVersion + '"/ < appendProps.txt > appendProps_.txt')
shellExec(appbuildDir, 'sed s/{#AppVerName}/"' + appVerName + '"/ < appendProps_.txt > appendProps.txt')
shellExec(appbuildDir, "sed s/{#FileVersion}/" + versionInfoVersion + "/ < appendProps.txt > appendProps_.txt")
shellExec(appbuildDir, "sed s/{#BuildNum}/" + str(buildNum) + "/ < appendProps_.txt > appendProps.txt")
shellExec(appbuildDir, "sed s/{#BuildDate}/" + buildDate[:10] + "/ < appendProps.txt > appendProps_.txt")
shellExec(appbuildDir, "sed s/{#CommitHash}/" + commitHash + "/ < appendProps_.txt > appendProps.txt")
logStep("Appending version properties to System/Rsrc/Strings.odc")
runbbscript("appbuild/appendProps.txt")
def updateBbscript():
if not args.test and branch == "master":
logStep("Updating bbscript.exe")
shellExec(bbDir, "mv bbscript.exe " + buildDir + "/")
else:
logStep("Removing bbscript.exe")
shellExec(bbDir, "rm bbscript.exe ")
def get_fixed_version_id(versions_file, target):
tree = ET.parse(versions_file)
root = tree.getroot()
for version in root.findall('version'):
if version.findtext('name') == target:
return version.findtext('id')
return "-1" # unknown
def addChanges():
if branch == "master" or args.test:
logStep("downloading xml files from Redmine")
versions_file = bbDir + "/blackbox_versions.xml"
url = "http://redmine.blackboxframework.org/projects/blackbox/versions.xml"
with open(versions_file, 'wb') as out_file:
out_file.write(urllib2.urlopen(url).read())
minusPos = appVersion.find("-")
target = appVersion if minusPos < 0 else appVersion[0:minusPos]
fixed_version_id = get_fixed_version_id(versions_file, target)
# status_id=5 means 'Closed', limit above 100 is not supported by Redmine
url = "http://redmine.blackboxframework.org/projects/blackbox/issues.xml?status_id=5&fixed_version_id=" + fixed_version_id + "&offset=0&limit=100"
issues_file1 = bbDir + "/blackbox_issues100.xml"
with open(issues_file1, 'wb') as out_file:
out_file.write(urllib2.urlopen(url).read())
url = "http://redmine.blackboxframework.org/projects/blackbox/issues.xml?status_id=5&fixed_version_id=" + fixed_version_id + "&offset=100&limit=100"
issues_file2 = bbDir + "/blackbox_issues200.xml"
with open(issues_file2, 'wb') as out_file:
out_file.write(urllib2.urlopen(url).read())
logStep("converting to BlackBox_" + appVersion + "_Changes.odc/.html")
bbres = call(bbchanges + " >" + bbDir + "/wine_out.txt 2>&1", shell=True)
logStep("removing xml files")
shellExec(".", "rm " + versions_file + " " + issues_file1 + " " + issues_file2)
logStep("moving file BlackBox_" + appVersion + "_Changes.html to outputDir")
shellExec(".", "mv " + bbDir + "/BlackBox_" + appVersion + "_Changes.html " + outputPathPrefix + "-changes.html")
def buildSetupFile():
logStep("Building " + outputNamePrefix + "-setup.exe file using InnoSetup")
deleteBbFile("StdLog.txt");
deleteBbFile("wine_out.txt");
deleteBbFile("README.txt");
shellExec(bbDir, "rm -R Script appbuild")
shellExec(bbDir, iscc + " - < Win/Rsrc/BlackBox.iss" \
+ ' "/dAppVersion=' + appVersion
+ '" "/dAppVerName=' + appVerName
+ '" "/dVersionInfoVersion=' + versionInfoVersion
+ '"', False) # a meaningless error is displayed
shellExec(bbDir, "mv Output/setup.exe " + outputPathPrefix + "-setup.exe")
shellExec(bbDir, "rm -R Output")
def buildZipFile():
deleteBbFile("LICENSE.txt")
logStep("Zipping package to file " + outputNamePrefix + ".zip")
shellExec(bbDir, "zip -r " + outputPathPrefix + ".zip *")
def updateCommitHash():
if not args.test:
logStep("Updating commit hash for branch '" + branch + "'")
hashFile = open(hashFilePath(), "w")
hashFile.write(commitHash)
hashFile.close()
def incrementBuildNumber():
global buildNumberIncremented
if not buildNumberIncremented:
logStep("Updating build number to " + str(buildNum + 1))
numberFile.seek(0)
numberFile.write(str(buildNum+1))
numberFile.truncate()
numberFile.close()
buildNumberIncremented = True
def cleanup():
if not args.test:
logStep("Cleaning up")
shellExec(buildDir, "rm -R -f " + bbDir)
def renameLog():
global logFile
logFile.close()
logFile = None
if not args.test and outputNamePrefix != None:
logStep("Renaming 'logFile.html' to '" + outputNamePrefix + "-buildlog.html'")
shellExec(unstableDir, "mv logFile.html " + outputPathPrefix + "-buildlog.html")
if args.test:
buildNumberIncremented = True # avoid side effect when testing
unstableDir = buildDir + "/" + testName
stableDir = unstableDir
if (os.path.exists(bbDir)):
shellExec(buildDir, "rm -R -f " + bbDir)
if (os.path.exists(unstableDir)):
shellExec(buildDir, "rm -R -f " + testName)
shellExec(buildDir, "mkdir " + testName)
if os.path.exists(bbDir): # previous build is still running or was terminated after an error
logVerbose("no build because directory '" + bbDir + "' exists")
sys.exit()
if repositoryLocked():
logVerbose("no build because repository is locked; probably due to sync process")
sys.exit()
if selectBranch() == None:
logVerbose("no build because no new commit in any branch")
sys.exit()
updateCommitHash() # if not args.test
# this file contains the build number to be used for this build; incremented after successfull build
numberFile = open(buildDir + "/" + "number", "r+")
buildNum = int(numberFile.readline().strip())
openLog()
log("<h2>Build " + str(buildNum) + " from '" + branch + "' at " + buildDate + "</h2>")
log("<h3>git commit hash: " + commitHash + "</h3>")
logStep("Cloning repository into temporary folder '" + bbName + "'")
# option -q suppresses the progress reporting on stderr
shellExec(buildDir, "git clone -q --branch " + branch + " " + localRepository + " " + bbDir)
if not os.path.exists(appbuildDir + "/AppVersion.txt"):
cleanup() # if not args.test
logStep('No build because file "appbuild/AppVersion.txt" not in branch')
sys.exit()
print "<br/>Build " + str(buildNum) + " from '" + branch + "' at " + buildDate + "<br/>" # goes to buildlog.html
appVersion = open(appbuildDir + "/AppVersion.txt", "r").readline().strip()
appVerName = getAppVerName(appVersion)
versionInfoVersion = getVersionInfoVersion(appVersion, buildNum)
stableRelease = isStable(appVersion)
outputNamePrefix = "blackbox-" + appVersion + ("" if stableRelease else ("." + str(buildNum).zfill(3)))
outputDir = stableDir if stableRelease else unstableDir + "/" + branch
outputPathPrefix = outputDir + "/" + outputNamePrefix
if stableRelease and os.path.exists(outputPathPrefix + ".zip"):
#for rebuilding a stable release remove the output files manually from the stable dir
cleanup() # if not args.test
logStep('Cannot rebuild stable release ' + appVersion + '.')
print "Cannot rebuild stable release " + appVersion + ".<br/>" # goes to buildlog.html
sys.exit()
if not os.path.exists(outputDir):
shellExec(buildDir, "mkdir " + outputDir)
prepareCompileAndLink()
compileAndLink() #1
buildBbscript()
compileAndLink() #2
buildBbscript()
compileAndLink() #3
appendSystemProperties()
updateBbscript()
addChanges()
buildSetupFile()
buildZipFile()
# if not args.test
incrementBuildNumber()
cleanup()
renameLog()
|
[
"josef.templ@gmail.com"
] |
josef.templ@gmail.com
|
9f12948c7c8d94d0fe4ef8f846a9192a6f472f7a
|
347dcbcfa4d05b4d4310d725a4201c81972659b7
|
/HMM/Gb_stimulus.py
|
2a72738c07af36dbabe4ea152191826e5568d96a
|
[] |
no_license
|
nghdavid/retsim
|
d3a73f6e60897a41e9ea136db8db8f1533d50700
|
76ed39c37bc034d6e455952cb400f23ab3a82c95
|
refs/heads/master
| 2022-05-31T06:44:45.703776
| 2022-03-29T07:21:38
| 2022-03-29T07:21:38
| 118,088,017
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,573
|
py
|
#Calculate the mutual information between stimulus and Gauss_bipolar
import matplotlib as mpl
mpl.use('nbagg')
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from scipy.io import loadmat
import numpy as np
import operator
import os
import sys
sim_time = int(sys.argv[1])
period = float(sys.argv[2])/1000.0
nbin = 30
rank = 100
MI_Limit = 100
shift = 5
forward = int(np.ceil(shift/period))
backward = int(np.ceil(shift/period))
taus = np.linspace(-2,2,401)# Notice period
pwd = os.getcwd()
read_stimulus = np.load(pwd+"/input_sequence/stimulus.npy")
read_stimulus = read_stimulus[-int(sim_time/period):]
stt = read_stimulus[forward:len(read_stimulus)-backward]
tst = np.arange(0,sim_time-2*shift,period)
# discretize periods with equally-binned indices
def binning(nbin,stt):
tsi = np.argsort(stt)
tss = np.array(stt,dtype=int)
p = 0
for i in range(0,nbin):
ii = len(stt)*(i+1)//nbin
tss[tsi[p:ii]] = i
p = ii
return tss
def MI(pxy):
px = pxy.sum(axis=1)
py = pxy.sum(axis=0)
pxy /= pxy.sum()
px /= px.sum()
py /= py.sum()
mi = 0
for i in range(len(px)):
for j in range(len(py)):
if pxy[i][j]>0: mi += pxy[i][j]*np.log2(pxy[i][j]/(px[i]*py[j]))
return mi
def mi_kevin(nbin,nz,bsp,period,tss,tst):
xedge = range(nbin+1)
yedge = range(nz+1)
pxy, xedges, yedges = np.histogram2d(tss,bsp,bins=(xedge,yedge))
print (pxy)
print (xedges)
print (yedges)
return MI(pxy)
id = 1
peak = []
Mutual_Information = {}
tss = binning(nbin,stt)
step_size = 0.001
Forward = int(np.ceil(shift/step_size))
Backward = int(np.ceil(shift/step_size))
for i in range(10,21,1):
for j in range(10,21,1):
number=str(i)+str(j)
filename = pwd +"/Gb_MI/"+ number + ".jpg"
read_Gb = np.loadtxt(pwd+"/results/Gauss_bipolar"+number)
mik = []
mik_shuffle = []
for t in taus:
time_shift = int(np.round(t/step_size))
Gb = read_Gb[-int(sim_time/0.001):]
Gb = Gb[Forward-time_shift:len(Gb)-Backward-time_shift]
shift_Gb = Gb.reshape(-1, 50).mean(axis=1)
shift_tss = binning(nbin,shift_Gb)
xedge = range(nbin+1)
yedge = range(nbin+1)
pxy, xedges, yedges = np.histogram2d(tss,shift_tss,bins=(xedge,yedge))
mik.append(MI(pxy))
stt_shuffle = np.random.permutation(shift_Gb)
shuffle_pxy, xedges, yedges = np.histogram2d(tss,stt_shuffle,bins=(xedge,yedge))
mik_shuffle.append(MI(shuffle_pxy))
Max_MI = max(mik)/period
Max_Time = taus[mik.index(max(mik))]
peak.append(Max_Time)
plt.figure(id)
id = id + 1
plt.plot(taus,np.array(mik)/period,'b')
plt.plot(taus,np.array(mik_shuffle)/period,'r-',label="shuffle")
plt.ylim([0,MI_Limit])
plt.xlabel("delta t(s)")
plt.ylabel("mutual information(bits/sec)")
plt.axvline(Max_Time,ymax=Max_MI/MI_Limit,color='k')
plt.legend()
plt.savefig(filename)
peak_time = np.multiply(np.array(peak),1000)
np.save('peak_Gb.npy',peak_time)
print (peak_time)
num_peak = {}
x = []
y = []
p = int(period*1000)
for i in range(-300,200,10):
num_peak[round(i)] = 0
x.append(round(i))
y.append(round(i))
for j in peak_time:
if round(j) < 200 and round(j) >-300:
num = num_peak[round(j)]
num_peak[round(j)] = num + 1
for key in num_peak:
y[x.index(key)] = num_peak[key]
plt.figure(0)
plt.bar(x,y,20,align='center')
plt.ylim([0,30])
plt.xlim([-300,200])
plt.xticks(range(-250,200,p))
plt.title("Peak Distribution")
plt.xlabel("peak (ms)")
plt.ylabel("frequency")
plt.savefig("peak_Gb.jpg")
|
[
"nghdavid123@gmail.com"
] |
nghdavid123@gmail.com
|
e127d00741cb6dbd6bce2dda267f67ec41545d00
|
6c60d05d92a1b9d3a763688f8e15bf5cbc88d6fb
|
/heraldbot/discord.py
|
9b2121d95436020cba1c01db40f7a57cc3a7fafb
|
[
"CC0-1.0"
] |
permissive
|
Elemecca/heraldbot
|
2ad77fb055ff7ed7efe4afc73dbf2fe0f2662343
|
681ba6e1aee99eba24ea5ec0ff2c58c14297d89c
|
refs/heads/master
| 2021-04-09T16:20:22.100203
| 2019-05-03T20:42:45
| 2019-05-03T20:42:45
| 125,658,862
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,386
|
py
|
# HeraldBot - polling notification bot for Discord
# Written in 2018 by Sam Hanes <sam@maltera.com>
#
# To the extent possible under law, the author(s) have dedicated all
# copyright and related and neighboring rights to this software to the
# public domain worldwide. This software is distributed without any warranty.
#
# You should have received a copy of the CC0 Public Domain Dedication
# along with this software. If not, see
# <http://creativecommons.org/publicdomain/zero/1.0/>.
import aiohttp
import json
import logging
LOG = logging.getLogger('heraldbot')
class DiscordSender(object):
def __init__(self, config=None, http_con=None):
self.url = None
self.body = {
'wait': True,
}
self.url = config['discord.webhook']
if 'discord.username' in config:
self.body['username'] = config['discord.username']
if 'discord.avatar_url' in config:
self.body['avatar_url'] = config['discord.avatar_url']
self.http = aiohttp.ClientSession(
connector=http_con,
conn_timeout=15,
read_timeout=60,
raise_for_status=True,
)
async def send(self, content=None, embed=None):
body = self.body.copy()
if content:
body['content'] = content
if embed:
body['embeds'] = [embed]
LOG.debug("sending webhook:\n%s", json.dumps(body, indent=2))
await self.http.post(self.url, json=body)
|
[
"sam@maltera.com"
] |
sam@maltera.com
|
d0e08883e725074865937fe4750be1941a25ee52
|
f1ed50d1e9644fa2f17bcb48e1ad273133deb5c5
|
/Descent_dice.py
|
2d550f5d132f86a703a0c36891241cf16e6c23fa
|
[] |
no_license
|
ErickBrindock/Descent2eDice
|
412c978a819221e2767355fca7e7fbc847da88fc
|
0b9a7608180fe76988a757c1f47b0637aa15dae4
|
refs/heads/master
| 2020-05-24T06:03:06.362988
| 2019-05-17T02:24:35
| 2019-05-17T02:24:35
| 187,132,302
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,586
|
py
|
import numpy as np
import matplotlib.pyplot as plt
import random
import math
class Dice(object):
color = ""
face = None
class Face(object):
damage = 0
burst = 0
range = 0
defend = 0
def __init__(self, dmg, burst, range, defend):
self.damage = dmg
self.burst = burst
self.range = range
self.defend = defend
def addFace(self, face):
self.damage += face.damage
self.defend += face.defend
self.burst += face.burst
self.range += face.range
def normFace(self):
net = self.damage - self.defend
if net >= 0:
self.damage = net
self.defend = 0
else:
self.defend = -net
self.damage = 0
def __str__(self):
out = "["
if self.damage == 0 and self.burst == 0 and self.range == 0 and self.defend == 0:
out += "XX MISS XX"
else:
out += "dmg: " + str(self.damage)
if self.burst > 0:
out += " * " * self.burst
out += " rng: " + str(self.range)
out += " def:" + str(self.defend)
out += "]"
return out
def __init__(self, color):
self.face = []
if color == "red":
self.face.append(self.Face(1,0,0,0))
self.face.append(self.Face(2,0,0,0))
self.face.append(self.Face(2,0,0,0))
self.face.append(self.Face(2,0,0,0))
self.face.append(self.Face(3,0,0,0))
self.face.append(self.Face(3,1,0,0))
elif color == "blue":
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(2,1,2,0))
self.face.append(self.Face(2,0,3,0))
self.face.append(self.Face(2,0,4,0))
self.face.append(self.Face(1,0,5,0))
self.face.append(self.Face(1,1,6,0))
elif color == "yellow":
self.face.append(self.Face(0,1,1,0))
self.face.append(self.Face(1,0,1,0))
self.face.append(self.Face(1,0,2,0))
self.face.append(self.Face(1,1,0,0))
self.face.append(self.Face(2,0,0,0))
self.face.append(self.Face(2,1,0,0))
elif color == "gray":
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(0,0,0,1))
self.face.append(self.Face(0,0,0,1))
self.face.append(self.Face(0,0,0,2))
elif color == "brown":
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(0,0,0,1))
self.face.append(self.Face(0,0,0,1))
self.face.append(self.Face(0,0,0,2))
elif color == "black":
self.face.append(self.Face(0,0,0,0))
self.face.append(self.Face(0,0,0,2))
self.face.append(self.Face(0,0,0,2))
self.face.append(self.Face(0,0,0,2))
self.face.append(self.Face(0,0,0,3))
self.face.append(self.Face(0,0,0,4))
self.color = color
def printface(self, index):
out = "[" + self.color[0:3] + " "
if self.color == "gray" or self.color == "brown" or self.color == "black":
out += "def:" + str(self.face[index].defend)
elif self.face[index].damage == 0 and self.face[index].burst == 0 and self.face[index].range == 0 and self.face[index].defend == 0:
out += "--missed--"
else:
out += "dmg: " + str(self.face[index].damage)
if self.face[index].burst > 0:
out += " *burst* "
out += " rng: " + str(self.face[index].range)
out += "]"
print(out)
def print_dice(self):
print(self.color)
for i in range(6):
self.printface(i)
def roll(self, disp=False):
r = random.randint(0,5)
if disp == True:
self.printface(r)
return self.face[r]
@staticmethod
def roll_dice(args, disp = False):
total = Dice.Face(0,0,0,0)
for die in args:
temp = die.roll(disp)
total.addFace(temp)
total.normFace()
print("tot" + str(total))
def generate_pmf(dice):
totalperm = 6**len(dice)
dmax = 0
bmax = 0
rmax = 0
defmax = 0
d_data = np.zeros(10)
b_data = np.zeros(10)
r_data = np.zeros(10)
def_data = np.zeros(10)
for i in range(totalperm):
local = Dice.Face(0,0,0,0)
for j in range(len(dice)):
local.addFace(dice[j].face[ (i // (6**j)) % 6 ])
local.normFace()
d_data[local.damage] += 1
b_data[local.burst] += 1
r_data[local.range] += 1
def_data[local.defend] += 1
dmax = max(dmax, local.damage)
bmax = max(bmax, local.burst)
rmax = max(rmax, local.range)
defmax = max(defmax, local.defend)
dmax += 1
bmax += 1
rmax+= 1
defmax += 1
return d_data[0:dmax] / totalperm, b_data[0:bmax] / totalperm, r_data[0:rmax] / totalperm, def_data[0:defmax] / totalperm
def generate_stats(probs):
mu = 0
mu2 = 0
for i in range(len(probs)):
mu += i * probs[i]
mu2 += i * i *probs[i]
var = mu2 - mu**2
return mu, var, math.sqrt(var)
def create_hist(dice):
d_data, b_data, r_data, def_data = generate_pmf(dice)
plt.subplot(2, 2, 1)
plt.bar(np.arange(len(d_data)),d_data)
plt.title("Damage")
plt.ylabel("%")
plt.xlabel("Damage")
plt.subplot(2, 2, 2)
plt.bar(np.arange(len(b_data)),b_data)
plt.title("Burst")
plt.xlabel("Burst Points")
plt.subplot(2, 2, 3)
plt.bar(np.arange(len(r_data)),r_data)
plt.title("Range")
plt.ylabel("%")
plt.xlabel("Spaces")
plt.subplot(2, 2, 4)
plt.bar(np.arange(len(def_data)),def_data)
plt.title("Defense")
plt.xlabel("Defense")
plt.tight_layout()
plt.show()
if __name__ == "__main__":
die_types = ["red", "blue", "yellow", "brown", "gray", "black"]
dice = []
for die in die_types:
dice.append(Dice(die))
for die in dice:
die.print_dice()
test_dice = [dice[0], dice[1], dice[0], dice[4], dice[4]]
for die in test_dice:
die.print_dice()
d, b, r, defen = generate_pmf(test_dice)
print(" : Average | Variance | Std. Dev.")
print("Damage : %6f | %6f | %6f" % generate_stats(d))
print("Burst : %6f | %6f | %6f" % generate_stats(b))
print("Range : %6f | %6f | %6f" % generate_stats(r))
print("Defense: %6f | %6f | %6f" % generate_stats(defen))
create_hist(test_dice)
#for i in range(2):
# Dice.roll_dice(dice[1], dice[4], dice[2])
|
[
"noreply@github.com"
] |
ErickBrindock.noreply@github.com
|
657501ffbfcf89b356b0d8a9d8eea04b060dd09a
|
2793f4d1e70e50bd5dba329be5c7c15a1423f189
|
/pyscripts/pythoncookbook3/6.2_json/jsondemo.py
|
93e71b051067f567897e4e3a72789b90efeeaf60
|
[] |
no_license
|
finfou/viclearning
|
92b13c1c9c102f857dcf5110ca85ac288e7b229d
|
165473514684bb320f7e8352edc5edb583f9bda3
|
refs/heads/master
| 2021-01-17T08:52:28.990141
| 2019-10-26T12:10:51
| 2019-10-26T12:10:51
| 38,920,029
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 804
|
py
|
#!/usr/bin/env python
import json
data = {
'name' : 'ACME',
'shares': 100,
'price' : 512.43
}
json_str = json.dumps(data)
data = json.loads(json_str)
class JSONObject:
def __init__(self, d):
self.__dict__ = d
data1 = json.loads(json_str, object_hook=JSONObject)
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
p = Point(2,3)
def serialize_instance(obj):
d = {'__classname__' : type(obj).__name__}
d.update(vars(obj))
return d
classes = {
'Point': Point
}
def deserialize_obj(d):
clsname = d.pop('__classname__', None)
if clsname:
cls = classes[clsname]
obj = cls.__new__(cls)
for key, value in d.items():
setattr(obj, key, value)
return obj
else:
return d
s = json.dumps(p, default=serialize_instance)
a = json.loads(s, object_hook=deserialize_obj)
|
[
"finfou@gmail.com"
] |
finfou@gmail.com
|
c08056d18d4732a3aa0f499f8c32cc258ca6d053
|
f5ac2542ea4cba357b6224c053e91ae864e0610f
|
/iris_estimator/utils.py
|
1bc3f6e7ceb277a08c38eba6b2d41af4628b29b6
|
[] |
no_license
|
tonylibing/tfestimator
|
492f0eb4909f2106d05284e4c4b324fc176374b2
|
1b083820b3c88f0ebb8171ed40d28073d6ff43b9
|
refs/heads/master
| 2023-03-21T07:19:20.012206
| 2020-10-16T08:02:04
| 2020-10-16T08:02:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,158
|
py
|
# -*-coding: utf-8-*-
# Author : Littlely
# FileName : utils.py
# Description:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
import logging
from tensorflow.contrib.saved_model.python.saved_model import reader
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import signature_constants
DEFAULT_TAGS = 'serve'
_DEFAULT_INPUT_ALTERNATIVE_FORMAT = 'default_input_alternative:{}'
@six.add_metaclass(abc.ABCMeta)
class Predictor(object):
"""Abstract base class for all predictors."""
@property
def graph(self):
return self._graph
@property
def session(self):
return self._session
@property
def feed_tensors(self):
return self._feed_tensors
@property
def fetch_tensors(self):
return self._fetch_tensors
def __repr__(self):
return '{} with feed tensors {} and fetch_tensors {}'.format(
type(self).__name__, self._feed_tensors, self._fetch_tensors)
def __call__(self, input_dict):
"""Returns predictions based on `input_dict`.
Args:
input_dict: a `dict` mapping strings to numpy arrays. These keys
must match `self._feed_tensors.keys()`.
Returns:
A `dict` mapping strings to numpy arrays. The keys match
`self.fetch_tensors.keys()`.
Raises:
ValueError: `input_dict` does not match `feed_tensors`.
"""
# TODO(jamieas): make validation optional?
input_keys = set(input_dict.keys())
expected_keys = set(self.feed_tensors.keys())
unexpected_keys = input_keys - expected_keys
if unexpected_keys:
raise ValueError(
'Got unexpected keys in input_dict: {}\nexpected: {}'.format(
unexpected_keys, expected_keys))
feed_dict = {}
for key in self.feed_tensors.keys():
value = input_dict.get(key)
if value is not None:
feed_dict[self.feed_tensors[key]] = value
return self._session.run(fetches=self.fetch_tensors, feed_dict=feed_dict)
def get_meta_graph_def(saved_model_dir, tags):
"""Gets `MetaGraphDef` from a directory containing a `SavedModel`.
Returns the `MetaGraphDef` for the given tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel.
tags: Comma separated list of tags used to identify the correct
`MetaGraphDef`.
Raises:
ValueError: An error when the given tags cannot be found.
Returns:
A `MetaGraphDef` corresponding to the given tags.
"""
saved_model = reader.read_saved_model(saved_model_dir)
set_of_tags = set([tag.strip() for tag in tags.split(',')])
for meta_graph_def in saved_model.meta_graphs:
if set(meta_graph_def.meta_info_def.tags) == set_of_tags:
return meta_graph_def
raise ValueError('Could not find MetaGraphDef with tags {}'.format(tags))
def _get_signature_def(signature_def_key, export_dir, tags):
"""Construct a `SignatureDef` proto."""
signature_def_key = (
signature_def_key or
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY)
metagraph_def = get_meta_graph_def(export_dir, tags)
try:
signature_def = metagraph_def.signature_def[signature_def_key]
except KeyError as e:
formatted_key = _DEFAULT_INPUT_ALTERNATIVE_FORMAT.format(
signature_def_key)
try:
signature_def = metagraph_def.signature_def[formatted_key]
except KeyError:
raise ValueError(
'Got signature_def_key "{}". Available signatures are {}. '
'Original error:\n{}'.format(
signature_def_key, list(metagraph_def.signature_def), e))
logging.warning('Could not find signature def "%s". '
'Using "%s" instead', signature_def_key, formatted_key)
return signature_def
def _check_signature_arguments(signature_def_key,
signature_def,
input_names,
output_names):
"""Validates signature arguments for `SavedModelPredictor`."""
signature_def_key_specified = signature_def_key is not None
signature_def_specified = signature_def is not None
input_names_specified = input_names is not None
output_names_specified = output_names is not None
if input_names_specified != output_names_specified:
raise ValueError(
'input_names and output_names must both be specified or both be '
'unspecified.'
)
if (signature_def_key_specified + signature_def_specified +
input_names_specified > 1):
raise ValueError(
'You must specify at most one of signature_def_key OR signature_def OR'
'(input_names AND output_names).'
)
class SavedModelPredictor(Predictor):
"""A `Predictor` constructed from a `SavedModel`."""
def __init__(self,
export_dir,
signature_def_key=None,
signature_def=None,
input_names=None,
output_names=None,
tags=None,
graph=None,
config=None):
"""Initialize a `CoreEstimatorPredictor`.
Args:
export_dir: a path to a directory containing a `SavedModel`.
signature_def_key: Optional string specifying the signature to use. If
`None`, then `DEFAULT_SERVING_SIGNATURE_DEF_KEY` is used. Only one of
`signature_def_key` and `signature_def` should be specified.
signature_def: A `SignatureDef` proto specifying the inputs and outputs
for prediction. Only one of `signature_def_key` and `signature_def`
should be specified.
input_names: A dictionary mapping strings to `Tensor`s in the `SavedModel`
that represent the input. The keys can be any string of the user's
choosing.
output_names: A dictionary mapping strings to `Tensor`s in the
`SavedModel` that represent the output. The keys can be any string of
the user's choosing.
tags: Optional. Comma separated list of tags that will be used to retrieve
the correct `SignatureDef`. Defaults to `DEFAULT_TAGS`.
graph: Optional. The Tensorflow `graph` in which prediction should be
done.
config: `ConfigProto` proto used to configure the session.
Raises:
ValueError: If more than one of signature_def_key OR signature_def OR
(input_names AND output_names) is specified.
"""
_check_signature_arguments(
signature_def_key, signature_def, input_names, output_names)
tags = tags or DEFAULT_TAGS
self._graph = graph or ops.Graph()
with self._graph.as_default():
self._session = session.Session(config=config)
loader.load(self._session, tags.split(','), export_dir)
if input_names is None:
if signature_def is None:
signature_def = _get_signature_def(signature_def_key, export_dir, tags)
input_names = {k: v.name for k, v in signature_def.inputs.items()}
output_names = {k: v.name for k, v in signature_def.outputs.items()}
self._feed_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in input_names.items()}
self._fetch_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in output_names.items()}
def from_saved_model(export_dir,
signature_def_key=None,
signature_def=None,
input_names=None,
output_names=None,
tags=None,
graph=None,
config=None):
"""Constructs a `Predictor` from a `SavedModel` on disk.
Args:
export_dir: a path to a directory containing a `SavedModel`.
signature_def_key: Optional string specifying the signature to use. If
`None`, then `DEFAULT_SERVING_SIGNATURE_DEF_KEY` is used. Only one of
`signature_def_key` and `signature_def`
signature_def: A `SignatureDef` proto specifying the inputs and outputs
for prediction. Only one of `signature_def_key` and `signature_def`
should be specified.
input_names: A dictionary mapping strings to `Tensor`s in the `SavedModel`
that represent the input. The keys can be any string of the user's
choosing.
output_names: A dictionary mapping strings to `Tensor`s in the
`SavedModel` that represent the output. The keys can be any string of
the user's choosing.
tags: Optional. Tags that will be used to retrieve the correct
`SignatureDef`. Defaults to `DEFAULT_TAGS`.
graph: Optional. The Tensorflow `graph` in which prediction should be
done.
config: `ConfigProto` proto used to configure the session.
Returns:
An initialized `Predictor`.
Raises:
ValueError: More than one of `signature_def_key` and `signature_def` is
specified.
"""
return SavedModelPredictor(
export_dir,
signature_def_key=signature_def_key,
signature_def=signature_def,
input_names=input_names,
output_names=output_names,
tags=tags,
graph=graph,
config=config)
|
[
"2473134393@qq.com"
] |
2473134393@qq.com
|
78baf2a896b614f96b9d6e4e8e6b222c12551547
|
bfc477f64528bda4f13d2541f38e4e8b879a8d4f
|
/one_month/07day/work/09-判断奇偶数.py
|
1d9444e37476510a5b2177e4574cb65777bfee7e
|
[] |
no_license
|
945153590/first_codes
|
fa3cb6c71c2c4591789df0194b44fc96dee9f443
|
ad431740fbd0bc0107e1ae7396d0bf39f81a384d
|
refs/heads/master
| 2020-03-22T15:30:16.201038
| 2018-09-06T00:58:33
| 2018-09-06T01:03:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 108
|
py
|
a = int(input('请输入一个数字:'))
if a%2 == 0:
print('我是偶数')
else:
print('我是奇数')
|
[
"945153590@qq.com"
] |
945153590@qq.com
|
382fe25bc8201c96508d4f5af3cf37e14838ac3a
|
881db1d02e39223e6320bc19da950f997b4b1545
|
/apps/hotdeal/models.py
|
91a30029c6edb3bfe01e726ebaa3f3ff6d761586
|
[] |
no_license
|
ds-tothemoon/wantdeal-crawling
|
b575b8469a0d841a9ce0a07b1a7a882b57bc602c
|
8d257aa344dc8fdb6aae974a55e4fbce968d3971
|
refs/heads/master
| 2023-02-09T18:33:10.986333
| 2020-12-31T04:48:42
| 2020-12-31T04:48:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 833
|
py
|
from django.db import models
class HotDeal(models.Model):
site_post_id = models.IntegerField(unique=True, blank=True, null=True)
site = models.CharField('사이트',max_length=255, blank=True, null=True)
title = models.CharField('제목',max_length=255, blank=True, null=True)
category = models.CharField('카테고리',max_length=255, blank=True, null=True)
location = models.CharField('지역',max_length=255, blank=True, null=True)
is_closed = models.CharField('종료여부',max_length=255, blank=True, null=True)
url = models.CharField('URL',max_length=255, blank=True, null=True)
price = models.DecimalField('가격', max_digits=15, decimal_places=2, blank=True, null=True)
photo = models.ImageField('이미지', blank=True, null=True)
def __str__(self):
return self.title
|
[
"mmdsds@u.sogang.ac.kr"
] |
mmdsds@u.sogang.ac.kr
|
7427cb112c14b466eb4d2092abdf6f8f6059d08f
|
78073b175933419fb50b1a7e97fda0c92f67696c
|
/Chapter-03/code/keyui/lib/send_email.py
|
ac6eb0b27804e5c0a07d048f52c7ad1b0d7d487b
|
[] |
no_license
|
yueweiw/python-test-dev
|
baeefffefd0564db25339ea7258b33b1673f1a5b
|
05a70f350d58bd5efd3f4fa963035faa0004cc09
|
refs/heads/master
| 2023-07-02T09:06:04.797797
| 2021-08-07T00:47:53
| 2021-08-07T00:47:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,960
|
py
|
import os
import smtplib
from email.header import Header
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
from email.mime.text import MIMEText
from .log_config import get_logger
from .read_config import readConfig
_mylogger = get_logger('send_mail')
mailto_list = readConfig('config.ini','email','mailto_list').replace(' ','').split(',') # 收件人列表
mail_host = readConfig('config.ini','email','mail_host') # 配置邮件服务器
mail_from = readConfig('config.ini','email','mail_from') #发件人
mail_pass = readConfig('config.ini','email','mail_pass') #密码
def send_mail( sub, content, reportFile ): # to_list:收件人;sub:主题;content:邮件内容
msg = MIMEMultipart()
msg.attach(MIMEText(content, 'plain', 'utf-8'))
part = MIMEApplication(open(reportFile, 'rb').read())
part.add_header('Content-Disposition', 'attachment',filename=Header(os.path.basename(reportFile),"utf-8").encode())
#part["Content-Disposition"] = 'attachment; filename="%s"'%reportFile
msg.attach(part) #添加附件
msg['subject'] = Header(sub, 'utf-8').encode()
msg['From'] = mail_from
msg['To'] = ','.join(mailto_list) #兼容多个收件人
smtp = smtplib.SMTP()
try:
smtp.connect(mail_host)
smtp.login(mail_from, mail_pass)
smtp.sendmail(mail_from, mailto_list, msg.as_string())
smtp.close()
_mylogger.info('带附件测试报告发送成功!')
return True
except (Exception) as e:
_mylogger.error('邮件发送失败:%s' %e)
return False
if __name__ == '__main__':
content= "测试"
#reportFile = 'log_config.py'
reportFile='d:/居住证信息采集表.xls'
email_result = send_mail("自动化测试结果",content, reportFile)
if email_result:
print ("发送成功")
else:
_mylogger.error(email_result)
print ("发送失败")
|
[
"260137162@qq.com"
] |
260137162@qq.com
|
0a5097054cb54b982ff61d0b09a2de99fa6086e9
|
a86b732e65530ca74487e006994df75db8321b19
|
/.venv/bin/easy_install
|
d93767c6e1e13a1496ae2d1702a5c54f411a8721
|
[] |
no_license
|
imudiand/coffee
|
fe5babc3479aea3e039895a8aa3d9650fdea88ea
|
c0a4902e167ee5f8b72d8ab8819d904662ab33ac
|
refs/heads/master
| 2021-01-23T05:24:14.828719
| 2017-03-27T07:01:38
| 2017-03-27T07:01:38
| 86,301,559
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 265
|
#!/home/himudianda/Code/pourkarma/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"harshit.imudianda@gmail.com"
] |
harshit.imudianda@gmail.com
|
|
3be39b5fe4dd419e200b224263af3c62fdc0ab08
|
5fa129728be401ac437ea29ce851d603dd884d40
|
/amazon_from_space/experiments/simple_vgg/submit.py
|
980209affb34e26234e1cc507f38ea9d47970f10
|
[] |
no_license
|
csmfindling/kaggle
|
343b6be155423b4b5d6b539e45b91d86f5f3dce5
|
5260488128e4d1fc3265b6794232588598c9f090
|
refs/heads/master
| 2021-05-09T21:50:09.943758
| 2018-02-06T17:36:07
| 2018-02-06T17:36:07
| 118,736,590
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,983
|
py
|
import sys
from vgg_16 import get_model, build_model
from theano import tensor, function, config
import lasagne
from fuel.streams import ServerDataStream
import numpy
import pickle
from config import basepath
submit_stream = ServerDataStream(('features', 'image_name'), produces_examples=False)
# tensor
X = tensor.ftensor4('images')
# build simple vgg model
net, layers_names = build_model(X)
f_pretrained = open(basepath + 'vgg16.pkl')
model_pretrained = pickle.load(f_pretrained)
w_pretrained = model_pretrained['param values']
net['mean value'].set_value(model_pretrained['mean value'].astype(config.floatX))
# load weights
from lasagne.layers import set_all_param_values
with numpy.load('weights/simple_vgg_valid.npz') as f:
param_values = [f['arr_%d' % i] for i in range(len(f.files))]
set_all_param_values(net[layers_names[len(layers_names)-1]], param_values[0])
# create predict function
prediction_test = lasagne.layers.get_output(net[layers_names[len(layers_names)-1]], deterministic=True)
eval_fn = function([X], [prediction_test])
# Classes
classes = {0: 'haze', 1: 'primary', 2: 'agriculture', 3: 'clear',
4: 'water', 5: 'habitation', 6: 'road', 7: 'cultivation', 8: 'slash_burn', 9: 'cloudy',
10: 'partly_cloudy', 11: 'conventional_mine', 12: 'bare_ground', 13: 'artisinal_mine', 14: 'blooming', 15: 'selective_logging', 16: 'blow_down'}
# submit file
import csv
csvfile = open('submits/submit_simplevgg.csv', 'wb')
spamwriter = csv.writer(csvfile, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(['image_name,tags'])
# prediction
import time
print('writing submit file')
for imgs, imgs_name in submit_stream.get_epoch_iterator():
pred_targets = eval_fn(imgs)
for pred_idx in range(len(pred_targets[0])):
spamwriter.writerow([imgs_name[pred_idx].split('.')[0] + ','] + [classes[k] for k in numpy.arange(len(classes))[(pred_targets[0][pred_idx] > .5)]])
time.sleep(.5)
csvfile.close()
|
[
"charles.findling@gmail.com"
] |
charles.findling@gmail.com
|
e6e189cb558812c531ad1716c7146ef83fc9e95e
|
85d90c0bd78b8bb37f8dfd2e8a93b5f43a34e100
|
/scripts/analysis/bag/plot_ttm_performance.py
|
d59533fa56ad459f43306ca5bef1d85ee8bde2ab
|
[] |
no_license
|
tsizemo2/flymad
|
61a85b1bb9eda9692e1ba0ca2545d7cf4da7f562
|
2c484df1244549da52f49ffedc894d435c809d34
|
refs/heads/master
| 2022-04-27T18:29:46.953644
| 2015-04-09T09:54:08
| 2015-04-09T09:54:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,305
|
py
|
import sys
import json
import math
import os.path
import collections
import cPickle
import glob
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import roslib; roslib.load_manifest('flymad')
import flymad.madplot as madplot
Err = collections.namedtuple('Err', 'err dt_thisfly dt_ttm')
Target = collections.namedtuple('Target', 'obj_id from_idx to_idx v ttm_err wf_err')
ttm_unit = 'mm'
ttm_conv = lambda x: np.array(x)*0.00416 #px -> mm
wide_unit = 'mm'
w_arena = madplot.Arena('mm')
wide_conv = w_arena.scale #lambda x: x*0.21077 #px -> mm
def prepare_data(arena, path):
pool_df = madplot.load_bagfile_single_dataframe(path, arena, ffill=False, filter_short_pct=10.0)
targets = pool_df['target_type'].dropna().unique()
if len(targets) != 1:
raise Exception("Only head or body may be targeted")
if targets[0] == 1:
target_name = "head"
elif targets[0] == 2:
target_name = "body"
else:
raise Exception("Only head or body may be targeted")
ldf_all = pool_df[~pool_df['lobj_id'].isnull()]
hdf_all = pool_df[~pool_df['h_framenumber'].isnull()]
#the first fly targeted
prev = pool_df['lobj_id'].dropna().head(1)
prev_id = prev.values[0]
prev_ix = prev.index[0]
target_ranges = {
(0,100):[],
}
for ix, row in pool_df.iterrows():
lobj_id = row['lobj_id']
if np.isnan(lobj_id):
continue
elif lobj_id != prev_id:
df = pool_df[prev_ix:ix]
#mean speed of fly over its whole turn
v = df['v'].mean()
#details of the TrackedObj message
ldf = df[~df['lobj_id'].isnull()]
#we want to know when we go to TTM mode
gbs = ldf.groupby('mode')
try:
#this is the time we first switch to TTM
to_ttm = gbs.groups[2.0][0]
print "------------",v
ttm_errs = []
ttm_dts_thisfly = []
ttm_dts = []
wf_errs = []
wf_dts_thisfly = []
wf_dts = []
#get the head detection data once we start ttm
hdf = pool_df[to_ttm:ix]
#extract only the head messages
for i,(hix,hrow) in enumerate(hdf[~hdf['h_framenumber'].isnull()].iterrows()):
#skip when we miss the head
if hrow['head_x'] == 1e6:
continue
if target_name == "head":
ttm_err = math.sqrt( (hrow['head_x']-hrow['target_x'])**2 +\
(hrow['head_y']-hrow['target_y'])**2)
elif target_name == "body":
ttm_err = math.sqrt( (hrow['body_x']-hrow['target_x'])**2 +\
(hrow['body_y']-hrow['target_y'])**2)
#if the first value is < 10 it is likely actually associated
#with the last fly TTM, so ignore it....
if (i == 0) and (ttm_err < 100):
continue
#the time since we switched to this fly
thisfly_dt = (hix-prev_ix).total_seconds()
#the time since we switched to TTM
ttm_dt = (hix-to_ttm).total_seconds()
ttm_errs.append(ttm_err)
ttm_dts_thisfly.append(thisfly_dt)
ttm_dts.append(ttm_dt)
wdf = ldf[:hix].tail(1)
wf_err = math.sqrt( (wdf['fly_x']-wdf['laser_x'])**2 +\
(wdf['fly_y']-wdf['laser_y'])**2)
#the time since we switched to this fly
thisfly_dt = (wdf.index[0]-prev_ix).total_seconds()
#the time since we switched to TTM
ttm_dt = (wdf.index[0]-to_ttm).total_seconds()
wf_errs.append(wf_err)
wf_dts_thisfly.append(thisfly_dt)
wf_dts.append(ttm_dt)
print thisfly_dt, ttm_dt, ttm_err
ttm_err = Err(ttm_errs, ttm_dts_thisfly, ttm_dts)
wf_err = Err(wf_errs, wf_dts_thisfly, wf_dts)
trg = Target(lobj_id, prev_ix, ix, v, ttm_err, wf_err)
except KeyError:
#never switched to TTM
print "never switched to ttm"
trg = None
pass
#except Exception:
# print "UNKNOWN ERROR"
# trg = None
#classify the target into which speed range
if trg is not None:
for k in target_ranges:
vmin,vmax = k
if vmin < trg.v <= vmax:
target_ranges[k].append(trg)
prev_id = lobj_id
prev_ix = ix
data = {'target_name':target_name,
'pooldf':pool_df,
'ldf':ldf_all,
'hdf':hdf_all,
'target_ranges':target_ranges}
pkl_fname = "%s_%s.pkl" % (path, target_name)
cPickle.dump(data, open(pkl_fname,'wb'), -1)
return data
def load_data(arena, path):
pkl_fname = glob.glob("%s_*.pkl" % path)[0]
return cPickle.load(open(pkl_fname,'rb'))
def plot_data(arena, path, data):
target_name = data['target_name']
pool_df = data['pooldf']
ldf = data['ldf']
hdf = data['hdf']
target_ranges = data['target_ranges']
fig = plt.figure("Image Processing Time (%s)" % target_name)
ax = fig.add_subplot(1,1,1)
ax.hist(hdf['h_processing_time'].values, bins=100)
ax.set_xlabel('processing time (s)')
ax.set_title("Distribution of time taken for %s detection" % target_name)
fig.savefig('imgproc_%s.png' % target_name)
fig = plt.figure("TTM Tracking", figsize=(8,8))
ax = fig.add_subplot(1,1,1)
ax.add_patch(arena.get_patch(color='k', alpha=0.1))
madplot.plot_laser_trajectory(ax, ldf, arena)
ax.set_title("The effect of TTM tracking on laser position.\n"\
"Values required to hit the fly %s" % target_name)
ax.legend()
fig.savefig('ttmeffect_%s.png' % target_name)
all_v = []
all_e = []
#YAY finally plot
for k in target_ranges:
vmin,vmax = k
fvels = "%s-%s px" % (vmin,vmax)
figf = plt.figure("tFly %s/s" % fvels)
axf = figf.add_subplot(1,1,1)
axf_w = axf.twinx()
figt = plt.figure("tTTM %s/s" % fvels)
axt = figt.add_subplot(1,1,1)
axt_w = axt.twinx()
for trg in target_ranges[k]:
#widefiled
err = trg.wf_err
axf_w.plot(err.dt_thisfly,
wide_conv(err.err),
'r', alpha=0.3)
axt_w.plot(err.dt_ttm,
wide_conv(err.err),
'r', alpha=0.3)
#ttm
err = trg.ttm_err
axf.plot(err.dt_thisfly,
ttm_conv(err.err),
'k', alpha=0.2)
axt.plot(err.dt_ttm,
ttm_conv(err.err),
'k', alpha=0.2)
all_v.append(trg.v)
all_e.append(np.mean(ttm_conv(err.err)))
for ax in (axf,axt):
ax.set_xlim([0, 0.25])
ax.set_xlabel('time (s)')
ax.set_ylabel('TTM error (%s)' % ttm_unit)
ax.set_ylim([0, 1.5])
for ax in (axf_w,axt_w):
ax.set_ylabel('deviation from WF COM (%s)' % wide_unit)
ax.set_ylim([0, 4])
for axttm,axwf in [(axt,axt_w),(axf,axf_w)]:
axttm.set_zorder(axwf.get_zorder()+1) # put ax in front of ax2
axttm.patch.set_visible(False)
axf.set_title("Spacial accuracy of %s targeting (fly speed %s/s)\n"\
"(time since targeting this fly)" % (target_name,fvels))
axt.set_title("Spacial accuracy of %s targeting (fly speed %s/s)\n"\
"(time since switching to TTM targeting)" % (target_name,fvels))
figf.savefig(('tFly%s%s.png' % (fvels,target_name)).replace(' ',''))
figt.savefig(('tTTM%s%s.png' % (fvels,target_name)).replace(' ',''))
fig = plt.figure("Speed %s" % target_name)
ax = fig.add_subplot(1,1,1)
ax.scatter(wide_conv(all_v),all_e)
ax.set_ylim([0, 1.5])
ax.set_xlim([0, 2.5])
ax.set_title("Spacial accuracy of %s targeting" % target_name)
ax.set_xlabel('fly speed (%s/s)' % wide_unit)
ax.set_ylabel('error (%s)' % ttm_unit)
fig.savefig('flyv_errpx_%s.png' % target_name)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('path', nargs=1, help='path to bag file')
parser.add_argument('--only-plot', action='store_true', default=False)
parser.add_argument('--show', action='store_true', default=False)
args = parser.parse_args()
path = args.path[0]
#This arena is only for plotting
arena = madplot.Arena(False)
if args.only_plot:
data = load_data(arena, path)
else:
data = prepare_data(arena, path)
plot_data(arena, path, data)
if args.show:
plt.show()
|
[
"john.stowers@gmail.com"
] |
john.stowers@gmail.com
|
0bd87328647ac320d4cd30c64aa2ba244f533733
|
2e0a75c2b277c6e36838fe5410ab54c897a9d952
|
/train_xception.py
|
da7652769cfcf49d03ea095f40ab18e24e261702
|
[] |
no_license
|
msakarvadia/TFrecords-
|
4a4f30cabfe4794ce2c85d621b399ab488da7426
|
f5c3a413a079df366adf63d55ba484fa8ab831f3
|
refs/heads/master
| 2022-12-19T05:56:14.177044
| 2020-09-28T03:26:14
| 2020-09-28T03:26:14
| 299,161,718
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,197
|
py
|
import tensorflow as tf
from functools import partial
import matplotlib.pyplot as plt
try:
tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
print("Device:", tpu.master())
tf.config.experimental_connect_to_cluster(tpu)
tf.tpu.experimental.initialize_tpu_system(tpu)
strategy = tf.distribute.experimental.TPUStrategy(tpu)
except:
strategy = tf.distribute.get_strategy()
print("Number of replicas:", strategy.num_replicas_in_sync)
AUTOTUNE = tf.data.experimental.AUTOTUNE
GCS_PATH = "./tfrecords/"
BATCH_SIZE = 64
IMAGE_SIZE = [1024, 1024]
FILENAMES = tf.io.gfile.glob(GCS_PATH + "*.tfrecords")
print("number of total file names ",len(FILENAMES))
train_ind = int(0.7 * len(FILENAMES))
test_ind = int(0.9 * len(FILENAMES))
TRAINING_FILENAMES, VALID_FILENAMES, TEST_FILENAMES = FILENAMES[:train_ind],FILENAMES[train_ind:test_ind], FILENAMES[test_ind:]
print("Train TFRecord Files:", len(TRAINING_FILENAMES))
print("Validation TFRecord Files:", len(VALID_FILENAMES))
print("Test TFRecord Files:", len(TEST_FILENAMES))
def decode_image(image):
image = tf.image.decode_jpeg(image, channels=3)
image = tf.cast(image, tf.float32)
image = tf.reshape(image, [*IMAGE_SIZE, 3])
return image
def read_tfrecord(example, labeled):
tfrecord_format = (
{
"image": tf.io.FixedLenFeature([], tf.string),
"target": tf.io.FixedLenFeature([], tf.int64),
}
if labeled
else {"image": tf.io.FixedLenFeature([], tf.string),}
)
example = tf.io.parse_single_example(example, tfrecord_format)
image = decode_image(example["image"])
if labeled:
label = tf.cast(example["target"], tf.int32)
return image, label
return image
def load_dataset(filenames, labeled=True):
ignore_order = tf.data.Options()
ignore_order.experimental_deterministic = False # disable order, increase speed
dataset = tf.data.TFRecordDataset(
filenames
) # automatically interleaves reads from multiple files
dataset = dataset.with_options(
ignore_order
) # uses data as soon as it streams in, rather than in its original order
dataset = dataset.map(
partial(read_tfrecord, labeled=labeled), num_parallel_calls=AUTOTUNE
)
# returns a dataset of (image, label) pairs if labeled=True or just images if labeled=False
return dataset
def get_dataset(filenames, labeled=True):
dataset = load_dataset(filenames, labeled=labeled)
dataset = dataset.shuffle(2048)
dataset = dataset.prefetch(buffer_size=AUTOTUNE)
dataset = dataset.batch(BATCH_SIZE)
return dataset
train_dataset = get_dataset(TRAINING_FILENAMES)
valid_dataset = get_dataset(VALID_FILENAMES)
test_dataset = get_dataset(TEST_FILENAMES, labeled=False)
image_batch, label_batch = next(iter(train_dataset))
initial_learning_rate = 0.01
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate, decay_steps=20, decay_rate=0.96, staircase=True
)
checkpoint_cb = tf.keras.callbacks.ModelCheckpoint(
"melanoma_model.h5", save_best_only=True
)
early_stopping_cb = tf.keras.callbacks.EarlyStopping(
patience=10, restore_best_weights=True
)
def make_model():
base_model = tf.keras.applications.Xception(
input_shape=(*IMAGE_SIZE, 3), include_top=False, weights="imagenet"
)
base_model.trainable = False
inputs = tf.keras.layers.Input([*IMAGE_SIZE, 3])
x = tf.keras.applications.xception.preprocess_input(inputs)
x = base_model(x)
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dense(8, activation="relu")(x)
x = tf.keras.layers.Dropout(0.7)(x)
outputs = tf.keras.layers.Dense(1, activation="sigmoid")(x)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=lr_schedule),
loss="binary_crossentropy",
metrics=tf.keras.metrics.AUC(name="auc"),
)
return model
with strategy.scope():
model = make_model()
history = model.fit(
train_dataset,
epochs=2,
validation_data=valid_dataset,
callbacks=[checkpoint_cb, early_stopping_cb],
)
|
[
"mansi.sakarvadia@gmail.com"
] |
mansi.sakarvadia@gmail.com
|
bd41f792944b8b6c00855931c0ca64dcf053e71c
|
4ddd4a5f7361ded2c6582c6f132b996e8ead3998
|
/controle/migrations/0003_auto_20180605_2344.py
|
874399f1adb9c265dac280d65b5eb9e0c01817d4
|
[] |
no_license
|
welttonsantos/django-biblioteca
|
e9eff96d72787ec283189d64d15e265c5a2b1fe7
|
873bee1c880c055376054a2cc5a4d0e90581ad16
|
refs/heads/master
| 2020-03-26T22:43:20.938094
| 2018-08-21T00:08:21
| 2018-08-21T00:08:21
| 145,482,259
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,692
|
py
|
# Generated by Django 2.0.4 on 2018-06-05 23:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('controle', '0002_auto_20180605_2228'),
]
operations = [
migrations.RenameField(
model_name='cliente',
old_name='CPF_cliente',
new_name='cpf',
),
migrations.RenameField(
model_name='cliente',
old_name='Tel_cliente',
new_name='nome',
),
migrations.RenameField(
model_name='cliente',
old_name='nome_Cliente',
new_name='telefone',
),
migrations.RenameField(
model_name='funcionario',
old_name='Cargo_Func',
new_name='Cargo',
),
migrations.RenameField(
model_name='funcionario',
old_name='CPF_cliente',
new_name='Telefone',
),
migrations.RenameField(
model_name='funcionario',
old_name='Tel_cliente',
new_name='cpf',
),
migrations.RenameField(
model_name='funcionario',
old_name='nome_Funcionario',
new_name='nome',
),
migrations.RenameField(
model_name='livros',
old_name='descricao_livro',
new_name='descricao',
),
migrations.RenameField(
model_name='livros',
old_name='nome_livro',
new_name='nome',
),
migrations.RenameField(
model_name='livros',
old_name='quantidade_paginas',
new_name='quantidade',
),
]
|
[
"welttonsantos2009@gmail.com"
] |
welttonsantos2009@gmail.com
|
ce71225f3eda6053f680a0c9e7c4f22bac2e8596
|
4a532f64adb8d1ebc8a529a5d56c47f61a3fd680
|
/cgi-bin/example
|
47984e11d78db501772eb810bfe4826d13e13b6c
|
[] |
no_license
|
HuangXiaohan/Server_projet
|
05c34373d7827ec8c03b9a15e8ef9a6e663ff9d9
|
b3b9a3510072ae71faa78bf066b76a921819598b
|
refs/heads/master
| 2020-09-11T07:12:07.591698
| 2016-09-01T21:39:48
| 2016-09-01T21:39:48
| 67,076,275
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 444
|
#!/usr/bin/env python
import os
header = 'Content-Type:text/html\n\n'
form = os.environ["QUERY_STRING"]
username = form.split("&")[0].split("=")[1]
whattodo = form.split("&")[1].split("=")[1]
print "<html>"
print "<head>"
print "<title>Doc. Produit par un CGI</title>"
print "</head>"
print "<body>"
print "<h2>CGI Script Output </h2>"
print "<p> Username = %s" % username
print "<p> WhatTodo = %s" % whattodo
print "</body>"
print "</html>"
|
[
"stephanie1028han@gmail.com"
] |
stephanie1028han@gmail.com
|
|
fff729bb9d54ea5cfc094327904b3a288a0d004e
|
da2e582a2ce3b25d6102d891050550f90cd1c255
|
/web_service/CentralVisualizerService.py
|
ebe23327c551894e6e8dbefff84cbd073ecf67e1
|
[
"MIT"
] |
permissive
|
harshalpatil199529/Network-Visualizer
|
9e4432bda196a04ca3aa1f8ccd1c3439c558a8f4
|
fc9d14c3c829eb3b645e05bf7c1a81d6d81188a3
|
refs/heads/master
| 2020-03-27T04:08:02.488901
| 2018-04-09T15:14:52
| 2018-04-09T15:14:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,894
|
py
|
#!/usr/bin/env python
import time, json, sys, logging
from datetime import datetime
from flask import Flask, make_response, render_template, request
from pymongo import MongoClient
class CentralVisualizerService(object):
def __init__(self,config_dict):
_mc = MongoClient()
_ipopdb = _mc[config_dict["mongo"]["dbname"]]
self._mongo_data = _ipopdb[config_dict["mongo"]["collection_name"]]
self._logger = logging.getLogger("network_visualizer.central_visualizer")
def _get_current_state(self,current_doc,requested_interval):
response_msg = {
"current_state": current_doc,
"intervalNo":str(requested_interval)
}
return response_msg
def _find_diff_between_intervals(self,new_doc,old_doc,requested_interval):
added = { key : new_doc[key] for key in set(new_doc) - set(old_doc) }
removed = { key : old_doc[key] for key in set(old_doc) - set(new_doc) }
existing = list(set(new_doc) - set(added))
modified ={}
for key in existing:
if frozenset(new_doc[key].items()) != frozenset(old_doc[key].items()) :
modified[key] = new_doc[key]
response_msg = {
"removed": removed,
"added": added,
"modified": modified,
"intervalNo": str(requested_interval),
"updateOffset": 15 # To be modified
}
return response_msg
#route('/IPOP')
def homepage(self):
resp = render_template('ipop_mainpage.html')
return resp
#route('/IPOP/intervals', methods=['GET'])
def get_intervals(self):
start_time = datetime.strptime(request.args.get('start') , "%Y-%m-%dT%H:%M:%S")
end_time = datetime.strptime(request.args.get('end') , "%Y-%m-%dT%H:%M:%S")
interval_numbers = []
for docs in self._mongo_data.find({"_id": {"$gt": start_time , "$lt": end_time}}, {"_id":1}):
interval_numbers.append(str(docs["_id"]))
response_msg = {"IPOP" : {
"IntervalNumbers" : interval_numbers
}}
resp = make_response(json.dumps(response_msg))
resp.headers['Content-Type'] = "application/json"
return resp
#route('/IPOP/overlays', methods=['GET'])
def get_overlays(self):
current_state = request.args.get('current_state')
requested_interval = datetime.strptime(request.args.get('interval') , "%Y-%m-%dT%H:%M:%S")
if current_state == "True":
self._logger.debug('Request received for all Overlays at {}'.format(requested_interval))
current_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Overlays":1})
if current_doc == None : current_doc = {"_id":"", "Overlays":{}}
response_msg = self._get_current_state(current_doc["Overlays"],str(current_doc["_id"]))
else:
self._logger.debug('Request received for updates in Overlays at {}'.format(requested_interval))
new_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Overlays":1})
if new_doc == None : new_doc = {"_id":"", "Overlays":{}}
old_doc = self._mongo_data.find_one({"$query":{"_id": {"$lt": new_doc["_id"]}},"$orderby":{"_id":-1}},{"_id":1, "Overlays":1})
if old_doc == None : old_doc = {"_id":"", "Overlays":{}}
response_msg = self._find_diff_between_intervals(new_doc["Overlays"],old_doc["Overlays"],str(new_doc["_id"]))
self._logger.debug('The server response for Overlays request: {}'.format(response_msg))
resp = make_response(json.dumps(response_msg))
resp.headers['Content-Type'] = "application/json"
return resp
#route('/IPOP/overlays/<overlayid>/nodes', methods=['GET'])
def get_nodes_in_an_overlay(self,overlayid):
current_state = request.args.get('current_state')
requested_interval = datetime.strptime(request.args.get('interval') , "%Y-%m-%dT%H:%M:%S")
if current_state == "True":
self._logger.debug('Request received for all nodes in overlay {} at {}'.format(overlayid,requested_interval))
current_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Nodes."+overlayid:1})
if current_doc == None: current_doc = {"_id":"", "Nodes":{ overlayid:{} }}
elif current_doc["Nodes"] == {}: current_doc["Nodes"][overlayid] = {}
response = self._get_current_state(current_doc["Nodes"][overlayid],str(current_doc["_id"]))
response_msg = {
overlayid: {
"current_state": response["current_state"]
},
"intervalNo": response["intervalNo"]
}
else:
self._logger.debug('Request received for updates in nodes in overlay {} at {}'.format(overlayid,requested_interval))
new_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Nodes."+overlayid:1})
if new_doc == None: new_doc = {"_id":"", "Nodes":{ overlayid:{} }}
elif new_doc["Nodes"] == {}: new_doc["Nodes"][overlayid]={}
old_doc = self._mongo_data.find_one({"$query":{"_id": {"$lt": new_doc["_id"]}},"$orderby":{"_id":-1}},{"_id":1, "Nodes."+overlayid:1})
if old_doc == None: old_doc = {"_id":"", "Nodes":{ overlayid:{} }}
elif old_doc["Nodes"] == {}: old_doc["Nodes"][overlayid]={}
response = self._find_diff_between_intervals(new_doc["Nodes"][overlayid],old_doc["Nodes"][overlayid],str(new_doc["_id"]))
response_msg = {
overlayid: {
"removed": response["removed"],
"added": response["added"],
"modified": response["modified"]
},
"intervalNo": response["intervalNo"],
"updateOffset": response["updateOffset"]
}
self._logger.debug('The server response for nodes in overlay {} request: {}'.format(overlayid,response_msg))
resp = make_response(json.dumps(response_msg))
resp.headers['Content-Type'] = "application/json"
return resp
#route('/IPOP/overlays/<overlayid>/nodes/<nodeid>', methods=['GET'])
def get_single_node(self,overlayid,nodeid):
requested_interval = datetime.strptime(request.args.get('interval') , "%Y-%m-%dT%H:%M:%S")
self._logger.debug('Request received for the node {} in overlay {} at {}'.format(nodeid,overlayid,requested_interval))
new_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Nodes."+overlayid+"."+nodeid:1})
try:
nodeProp = new_doc["Nodes"][overlayid][nodeid]
except:
nodeProp = {}
response_msg = {
overlayid : {
nodeid : nodeProp
},
"intervalNo": str(new_doc["_id"])
}
self._logger.debug('The server response for the node {} in overlay {} request: {}'.format(nodeid,overlayid,response_msg))
resp = make_response(json.dumps(response_msg))
resp.headers['Content-Type'] = "application/json"
return resp
#route('/IPOP/overlays/<overlayid>/links', methods=['GET'])
def get_links_in_an_overlay(self,overlayid):
current_state = request.args.get('current_state')
requested_interval = datetime.strptime(request.args.get('interval') , "%Y-%m-%dT%H:%M:%S")
if current_state == "True":
self._logger.debug('Request received for all links in overlay {} at {}'.format(overlayid,requested_interval))
current_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Links."+overlayid:1})
if current_doc == None: current_doc = {"_id":"", "Links":{ overlayid:{} }}
elif current_doc["Links"] == {}: current_doc["Links"][overlayid] = {}
response = self._get_current_state(current_doc["Links"][overlayid],str(current_doc["_id"]))
response_msg = {
overlayid: {
"current_state": response["current_state"]
},
"intervalNo":response["intervalNo"]
}
else:
self._logger.debug('Request received for updates in links in overlay {} at {}'.format(overlayid,requested_interval))
new_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Links."+overlayid:1})
if new_doc == None: new_doc = {"_id":"", "Links":{ overlayid:{} }}
elif new_doc["Links"] == {}: new_doc["Links"][overlayid] = {}
old_doc = self._mongo_data.find_one({"$query":{"_id": {"$lt": new_doc["_id"]}},"$orderby":{"_id":-1}},{"_id":1, "Links."+overlayid:1})
if old_doc == None: old_doc = {"_id":"", "Links":{ overlayid:{} }}
elif old_doc["Links"] == {}: old_doc["Links"][overlayid] = {}
allNodes = (set(new_doc["Links"][overlayid]) | (set(old_doc["Links"][overlayid])-set(new_doc["Links"][overlayid])))
response_msg = {
overlayid: {
"removed": {}, #Might be converted to List in future
"added": {},
"modified": {}
},
"intervalNo": str(new_doc["_id"]),
"updateOffset": 10 #To be modified
}
for nodeid in allNodes:
if nodeid not in old_doc["Links"][overlayid]:
old_doc["Links"][overlayid][nodeid] = {}
if nodeid not in new_doc["Links"][overlayid]:
new_doc["Links"][overlayid][nodeid] = {}
tempResponse = self._find_diff_between_intervals(new_doc["Links"][overlayid][nodeid],old_doc["Links"][overlayid][nodeid],str(new_doc["_id"]))
response_msg[overlayid]["removed"][nodeid] = tempResponse["removed"]
response_msg[overlayid]["added"][nodeid] = tempResponse["added"]
response_msg[overlayid]["modified"][nodeid] = tempResponse["modified"]
self._logger.debug('The server response for links in overlay {} request: {}'.format(overlayid,response_msg))
resp = make_response(json.dumps(response_msg))
resp.headers['Content-Type'] = "application/json"
return resp
#route('/IPOP/overlays/<overlayid>/nodes/<nodeid>/links', methods=['GET'])
def get_links_for_a_node(self,overlayid,nodeid):
current_state = request.args.get('current_state')
requested_interval = datetime.strptime(request.args.get('interval') , "%Y-%m-%dT%H:%M:%S")
if current_state == "True":
self._logger.debug('Request received for all links from node {} in overlay {} at'' {}'.format(nodeid,overlayid,requested_interval))
current_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Links."+overlayid+"."+nodeid:1})
if current_doc == None: current_doc = {"_id":"", "Links":{ overlayid:{ nodeid:{} }}}
elif current_doc["Links"] == {}: current_doc["Links"][overlayid] = {nodeid:{}}
elif current_doc["Links"][overlayid] == {}: current_doc["Links"][overlayid][nodeid] = {}
response = self._get_current_state(current_doc["Links"][overlayid][nodeid],str(current_doc["_id"]))
response_msg = {
overlayid: {
nodeid: {
"current_state": response["current_state"]
}
},
"intervalNo": response["intervalNo"]
}
else:
self._logger.debug('Request received for updates in links from node {} in overlay {} at '' {}'.format(nodeid,overlayid,requested_interval))
new_doc = self._mongo_data.find_one({"$query":{"_id": {"$lte": requested_interval}},"$orderby":{"_id":-1}},{"_id":1, "Links."+overlayid+"."+nodeid:1})
if new_doc == None: old_doc = {"_id":"", "Links":{ overlayid:{ nodeid:{} }}}
elif new_doc["Links"] == {}: new_doc["Links"][overlayid] = {nodeid:{}}
elif new_doc["Links"][overlayid] == {}: new_doc["Links"][overlayid][nodeid] = {}
old_doc = self._mongo_data.find_one({"$query":{"_id": {"$lt": new_doc["_id"]}},"$orderby":{"_id":-1}},{"_id":1, "Links."+overlayid+"."+nodeid:1})
if old_doc == None: old_doc = {"_id":"", "Links":{ overlayid:{ nodeid:{} }}}
elif old_doc["Links"] == {}: old_doc["Links"][overlayid] = {nodeid:{}}
elif old_doc["Links"][overlayid] == {}: old_doc["Links"][overlayid][nodeid] = {}
response = self._find_diff_between_intervals(new_doc["Links"][overlayid][nodeid],old_doc["Links"][overlayid][nodeid],str(new_doc["_id"]))
response_msg = {
overlayid: {
nodeid: {
"removed": response["removed"],
"added": response["added"],
"modified": response["modified"]
}
},
"intervalNo": response["intervalNo"],
"updateOffset": response["updateOffset"]
}
self._logger.debug('The server response for links from node {} in overlay {} request: {}'.format(nodeid,overlayid,response_msg))
resp = make_response(json.dumps(response_msg))
resp.headers['Content-Type'] = "application/json"
return resp
|
[
"kcratie@users.noreply.github.com"
] |
kcratie@users.noreply.github.com
|
3603674231af86631210a837cbefa841d37ed8b0
|
95050b0846038887fb632b95300d439ccbd3f96e
|
/Martinez_1D_Euler/Runge_Kutta_Third_Order_np3.py
|
91438a89608a09602f4d303430b811bf7d620384
|
[] |
no_license
|
lgmartinez/AeroPython-studentprojects
|
3d53407042a0f0cd69903ae1bb5f0826dc6642bb
|
6469189565605264e53653db501097b846056539
|
refs/heads/master
| 2021-01-18T08:25:44.244554
| 2015-05-08T15:37:13
| 2015-05-08T15:37:13
| 35,285,807
| 0
| 0
| null | 2015-05-08T15:07:34
| 2015-05-08T15:07:34
| null |
UTF-8
|
Python
| false
| false
| 1,615
|
py
|
import numpy
#---------------------------------------------------
# ----------------Stage 1 Runge Kutta---------------
def get_Qrk_stage1(Q_rk, ncell, rk3, np, dt, residual):
Qrk1 = numpy.zeros((rk3,np, ncell), dtype=float)
for i in range(ncell):
for iv in range(rk3):
for k in range(np):
Qrk1[iv,k,i] = Q_rk[iv,k,i]+(dt*residual[iv,k,i])
return Qrk1
#------------------------------------------------------
#------------------------------------------------------
# ----------------Stage 1 Runge Kutta------------------
def get_Qrk_stage2(Q_rk, Qrk1, ncell, rk3, np, dt, residual):
Qrk2 = numpy.zeros((rk3,np, ncell), dtype=float)
for i in range(ncell):
for iv in range(rk3):
for k in range(np):
Qrk2[iv,k,i] = (0.75*Q_rk[iv,k,i])+0.25*(Qrk1[iv,k,i]+(dt*residual[iv,k,i]))
return Qrk2
#------------------------------------------------------
#------------------------------------------------------
# -----------------Stage 3 Runge Kutta-----------------
def get_Qrk_stage3(Q_rk, Qrk2, ncell, rk3, np, dt, residual):
Qrk3 = numpy.zeros((rk3,np, ncell), dtype=float)
for i in range(ncell):
for iv in range(rk3):
for k in range(np):
Qrk3[iv,k,i] = ((1.0/3.0)*(Q_rk[iv,k,i])) + ((2.0/3.0)*(Qrk2[iv,k,i]+(dt*residual[iv,k,i])))
return Qrk3
#------------------------------------------------------
#ctime = ctime + 0.5*dt
#------------------------------------------------------
'''residnorm = 0.0
for i in range(ncell):
for k in range(np):
residnorm = residnorm + numpy.abs(residual[2,k,i])
'''
|
[
"lgmartinez1@gwu.edu"
] |
lgmartinez1@gwu.edu
|
3d11271b7ad12e1c3aa9d06aef923626c99c4af8
|
9b62f23a0542077c16d9d283eaeefb69a84fd0e5
|
/easy_print/__init__.py
|
49d1eeabf415f2ce8a0a03b7f608ecfca6c6a5af
|
[
"MIT"
] |
permissive
|
dsaiztc/easy_print
|
fdce9b9dd1832457cb606968c2f870eebb254f6e
|
a484028814a892f859092c3a0c43966dbb4aca44
|
refs/heads/master
| 2021-01-20T22:11:51.921302
| 2016-08-01T11:10:25
| 2016-08-01T11:10:25
| 64,545,262
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,331
|
py
|
# -*- coding: utf-8 -*-
from sys import stdout
# http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
colors = {
'header': '\033[95m',
'okblue': '\033[94m',
'okgreen': '\033[92m',
'warning': '\033[93m',
'fail': '\033[91m',
'endc': '\033[0m',
'bold': '\033[1m',
'underline': '\033[4m'
}
colors['red'] = colors['fail']
colors['green'] = colors['okgreen']
colors['blue'] = colors['okblue']
colors['purple'] = colors['header']
colors['yellow'] = colors['warning']
def eprint(text, stay=False, color=None):
stdout.write('\r' + ' ' * 64)
stdout.write('\r{text}\r'.format(
text=colors[color] + str(text) + colors['endc'] if color else str(text)
))
stdout.flush()
if stay:
stdout.write('\n')
stdout.flush()
def eprogress(progress=0, total=100, text=None, width=64, color=None):
n_completed = int(progress * width / float(total))
completed = '█' * n_completed
to_complete = '-' * (width - n_completed)
text = '{text}{completed}{to_complete} {percentage}'.format(
text=text + ' ' if text else '',
completed=completed,
to_complete=to_complete,
percentage='{num} %'.format(num=round(progress * 100 / float(total), 2))
)
eprint(text=text, color=color, stay=progress==total)
|
[
"dsaiztc@gmail.com"
] |
dsaiztc@gmail.com
|
c67c1b23adff4a4fd98d3e0e6847d448831f2288
|
00c9a648f7c41ed321dee21d00f202068f2855a7
|
/conlang_data.py
|
0d4f969fd9b71d24b59636443fc681b744a69969
|
[] |
no_license
|
OlegKorn/conlang_generator
|
9403b738effb7eb885647959e3f69ce10cf3e775
|
7da4bc46e032a071e21af9ef146f78161350840e
|
refs/heads/master
| 2020-04-27T14:42:22.746566
| 2019-03-15T13:43:58
| 2019-03-15T13:43:58
| 174,418,187
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 600
|
py
|
#prepositions data
PREPOS = [
'at', 'in', 'on', 'upon', 'of',
'with', 'against','among','by'
]
#preffixes data
PREF_TYPES = ['vc', 'cv']
PVOWS = ['a','o']
PCONS = ['r','v','rr','m','ð']
#roots data
ROOT_TYPES = [
'cvvc', 'vcvvc', 'cv', 'cvv', 'vvcv',
'cvcv', 'vcvc', 'ccv', 'vvc', 'vc'
]
RVOWS = ['a','e','y','u','i','o', 'æ', 'ā', 'ī']
RCONS = [
's','k','d','rr','n',
'g','t','f','rr','b','n',
'm','k','rr','l','p','ð'
]
#suffixes data
SUFF_TYPES = ['vc']
SVOWS = ['a','æ','ā']
SCONS = ['ss','p','ð']
VERB_CONJ_MARKERS = ['-am', '-em', '-ed', '-t', '-and', '-i']
|
[
"noreply@github.com"
] |
OlegKorn.noreply@github.com
|
c96d1652b6eb8ad31b695faf3530c1d85aa14f96
|
a3c7888591d2851352a1a27774675153aa5363d3
|
/cms/settings.py
|
7b3f5ef39a1a13ffe998f074c12d0615b3547bbb
|
[] |
no_license
|
GearL/pika
|
7eb804b0119ad902a0a8378fc7b0ce110aca11c3
|
f7a292320e12c8bc832f337803535e6d2c1b8d66
|
refs/heads/master
| 2020-05-23T19:00:04.124131
| 2017-03-20T09:30:44
| 2017-03-20T09:30:44
| 84,781,516
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 282
|
py
|
#-*- coding: utf-8 -*-
# Settings for cmd
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://goldenlau:goldenlau@127.0.0.1:3306/cms'
RBAC_USE_WHITE = True
WTF_CSRF_SECRET_KEY = 'pikacms@author:goldenlau'
SECRET_KEY = 'pikacms@author:goldenlau'
UPLOAD_FOLDER = 'static/Uploads'
|
[
"goldenlau@liuxiangjideMacBook-Pro.local"
] |
goldenlau@liuxiangjideMacBook-Pro.local
|
74d5923e8dd06b5f7423dba24d5c6582479adf2d
|
e9379a0184dd10d8fe67ecc019840bf3e97dc95d
|
/fullsite/admin.py
|
f2f9e22728f702367ea86bfe28466d95df2a2339
|
[] |
no_license
|
jitendraSingh03/Stock_marketing_django_
|
26db6d751711d494e3377e05d0839b9634813d5a
|
af906146874c986b40d0d459cc37232b313e004b
|
refs/heads/main
| 2023-06-08T23:44:09.871711
| 2021-07-04T13:23:13
| 2021-07-04T13:23:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 118
|
py
|
from django.contrib import admin
from .models import Country
# Register your models here.
admin.site.register(Country)
|
[
"jitendrasikarwar987@gmail.com"
] |
jitendrasikarwar987@gmail.com
|
ace7a8064fe479519e4ac5cbbe8d0d805d77a34b
|
c1b30e61bdf6123e64dcc2037c8e1ae86919abb8
|
/myapi/myapp/apps.py
|
aa32124734076b0ce1a8dd151afcf14d0d60d228
|
[] |
no_license
|
davidmesquita/project_infog2
|
c4fffff0d23696559aba91fedbf74ec1c12cd50b
|
dd628764396ab07cb353052703917b9e7369124b
|
refs/heads/master
| 2023-02-16T05:44:11.659301
| 2021-01-08T00:50:55
| 2021-01-08T00:50:55
| 327,752,877
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 127
|
py
|
from __future__ import unicode_literals
from django.apps import AppConfig
class RestapiConfig(AppConfig):
name = 'myapp'
|
[
"devid.mesquita13@gmail.com"
] |
devid.mesquita13@gmail.com
|
6f4d1a786bd0e3f8878a41e7f66b9b6cc3fc03dc
|
66590a3ce7e23ac1315db7f2e2559de315892a6f
|
/test.py
|
b0e6c213d013e03628a3f9a397190aaf19b2775b
|
[] |
no_license
|
nunonamoram/webrtc_extra
|
d67687f6f2bebb75250c714746f8d19b0c35e68c
|
c433d62fce72714ad6708a8580e200112505c273
|
refs/heads/master
| 2023-02-08T00:06:44.293816
| 2020-12-31T02:09:26
| 2020-12-31T02:09:26
| 320,398,928
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 188
|
py
|
import speechEmotionRecognition as sp
sp.init()
'''
def predict(frase):
nova_frase = frase + "ola"
return nova_frase
def init():
frase = "ola"
x= predict(frase)
return x
'''
|
[
"namoranuno@gmail.com"
] |
namoranuno@gmail.com
|
2db8a50a90731e26bb11519cc1e9672738fdc3a8
|
4145493f1054fb256cd1fc47659564505d855fa7
|
/pychron/core/regression/tests/regression.py
|
9e6059c1c7e82f7a0680a9710902c1150ea389b2
|
[
"Apache-2.0"
] |
permissive
|
USGSDenverPychron/pychron
|
6500a4b57fb55ad056ef565ef0d732a0cec550a5
|
0fd7383631789dbe81f3218c00eb3a0aeec31f43
|
refs/heads/main
| 2023-04-15T04:16:06.150392
| 2022-02-12T14:19:11
| 2022-02-12T14:19:11
| 444,606,749
| 0
| 0
|
Apache-2.0
| 2022-01-05T00:28:39
| 2022-01-05T00:28:38
| null |
UTF-8
|
Python
| false
| false
| 14,634
|
py
|
# ===============================================================================
# Copyright 2012 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
# ============= standard library imports ========================
from unittest import TestCase
from numpy import linspace, polyval
# ============= local library imports ==========================
from pychron.core.regression.least_squares_regressor import ExponentialRegressor
from pychron.core.regression.mean_regressor import (
MeanRegressor,
) # , WeightedMeanRegressor
from pychron.core.regression.new_york_regressor import (
ReedYorkRegressor,
NewYorkRegressor,
)
from pychron.core.regression.ols_regressor import OLSRegressor
# from pychron.core.regression.york_regressor import YorkRegressor
from pychron.core.regression.tests.standard_data import (
mean_data,
filter_data,
ols_data,
pearson,
pre_truncated_data,
expo_data,
expo_data_linear,
)
class RegressionTestCase(object):
@classmethod
def setUpClass(cls):
cls.reg = cls.reg_klass()
def testN(self):
self.assertEqual(self.reg.n, self.solution["n"])
class TruncateRegressionTest(TestCase):
def setUp(self):
self.reg = MeanRegressor()
def test_pre_truncate(self):
xs, ys, sol = pre_truncated_data()
self.reg.trait_set(xs=xs, ys=ys)
self.solution = sol
self.reg.trait_set(xs=xs, ys=ys)
self.reg.set_truncate("x<5")
self.assertEqual(self.reg.mean, self.solution["pre_mean"])
def test_post_truncate(self):
xs, ys, sol = pre_truncated_data()
self.reg.trait_set(xs=xs, ys=ys)
self.solution = sol
self.reg.trait_set(xs=xs, ys=ys)
self.reg.set_truncate("x>=5")
self.assertEqual(self.reg.mean, self.solution["post_mean"])
class MeanRegressionTest(RegressionTestCase, TestCase):
reg_klass = MeanRegressor
def setUp(self):
n = 1e5
xs, ys, sol = mean_data(n=n)
self.reg.trait_set(xs=xs, ys=ys)
self.solution = sol
def testMean(self):
self.assertAlmostEqual(self.reg.mean, self.solution["mean"], 2)
def testStd(self):
self.assertAlmostEqual(self.reg.std, self.solution["std"], 2)
class OLSRegressionTest(RegressionTestCase, TestCase):
reg_klass = OLSRegressor
def setUp(self):
xs, ys, sol = ols_data()
self.reg.trait_set(xs=xs, ys=ys, fit="linear")
self.solution = sol
self.reg.calculate()
def testSlope(self):
cs = self.reg.coefficients
self.assertAlmostEqual(cs[-1], self.solution["slope"], 4)
def testYIntercept(self):
cs = self.reg.coefficients
self.assertAlmostEqual(cs[0], self.solution["y_intercept"], 4)
def testPredictErrorSEM(self):
e = self.reg.predict_error(self.solution["pred_x"], error_calc="SEM")
self.assertAlmostEqual(e, self.solution["pred_error"], 3)
class OLSRegressionTest2(RegressionTestCase, TestCase):
reg_klass = OLSRegressor
def setUp(self):
n = 100
coeffs = [2.12, 1.13, 5.14]
xs = linspace(0, 100, n)
ys = polyval(coeffs, xs)
self.reg.trait_set(xs=xs, ys=ys, fit="parabolic")
sol = {"coefficients": coeffs, "n": n}
self.solution = sol
self.reg.calculate()
def testcoefficients(self):
self.assertListEqual(
list([round(x, 6) for x in self.reg.coefficients[::-1]]),
self.solution["coefficients"],
)
class FilterOLSRegressionTest(RegressionTestCase, TestCase):
reg_klass = OLSRegressor
def setUp(self):
xs, ys, sol = filter_data()
self.reg.trait_set(
xs=xs,
ys=ys,
fit="linear",
filter_outliers_dict={
"filter_outliers": True,
"iterations": 1,
"std_devs": 2,
},
)
self.solution = sol
self.reg.calculate()
def testSlope(self):
cs = self.reg.coefficients
self.assertAlmostEqual(cs[-1], self.solution["slope"], 4)
def testYIntercept(self):
cs = self.reg.coefficients
self.assertAlmostEqual(cs[0], self.solution["y_intercept"], 4)
def testPredictErrorSEM(self):
e = self.reg.predict_error(self.solution["pred_x"], error_calc="SEM")
# e=self.reg.coefficient_errors[0]
self.assertAlmostEqual(e, self.solution["pred_error"], 3)
class PearsonRegressionTest(RegressionTestCase):
kind = ""
def setUp(self):
xs, ys, wxs, wys = pearson()
exs = wxs ** -0.5
eys = wys ** -0.5
self.reg.trait_set(xs=xs, ys=ys, xserr=exs, yserr=eys, error_calc_type="SE")
self.reg.calculate()
self.solution = {"n": len(xs)}
def test_slope(self):
exp = pearson(self.kind)
self.assertAlmostEqual(self.reg.slope, exp["slope"], 4)
def test_slope_err(self):
exp = pearson(self.kind)
self.assertAlmostEqual(
self.reg.get_slope_variance() ** 0.5, exp["slope_err"], 4
)
def test_y_intercept(self):
expected = pearson(self.kind)
self.assertAlmostEqual(self.reg.intercept, expected["intercept"], 4)
def test_y_intercept_error(self):
expected = pearson(self.kind)
self.assertAlmostEqual(
self.reg.get_intercept_error(), expected["intercept_err"], 4
)
def test_mswd(self):
expected = pearson(self.kind)
self.assertAlmostEqual(self.reg.mswd, expected["mswd"], 3)
class ReedRegressionTest(PearsonRegressionTest, TestCase):
reg_klass = ReedYorkRegressor
kind = "reed"
class NewYorkRegressionTest(PearsonRegressionTest, TestCase):
reg_klass = NewYorkRegressor
kind = "new_york"
# def test_llnl(self):
# self.assertEqual(self.reg.get_slope_variance(), self.reg.test_llnl())
# def test_llnl_vs_pychron_mahon(self):
# self.assertEqual(self.reg.get_slope_variance_llnl(), self.reg.get_slope_variance_pychron())
class ExpoRegressionTest(TestCase):
def setUp(self):
xs, ys, sol = expo_data()
self.reg = ExponentialRegressor(xs=xs, ys=ys)
self.solution = sol
def test_a(self):
self.reg.calculate()
self.assertAlmostEqual(
self.reg.coefficients[0], self.solution["coefficients"][0]
)
def test_b(self):
self.reg.calculate()
self.assertAlmostEqual(
self.reg.coefficients[1], self.solution["coefficients"][1]
)
def test_c(self):
self.reg.calculate()
self.assertAlmostEqual(
self.reg.coefficients[2], self.solution["coefficients"][2]
)
class ExpoRegressionTest2(TestCase):
def setUp(self):
xs, ys, sol = expo_data_linear()
self.reg = ExponentialRegressor(xs=xs, ys=ys)
self.solution = sol
def test_c(self):
self.reg.calculate()
self.assertAlmostEqual(
self.reg.coefficients[2], self.solution["coefficients"][2], places=5
)
# ============= EOF =============================================
# class WeightedMeanRegressionTest(RegressionTestCase, TestCase):
# @staticmethod
# def regressor_factory():
# return WeightedMeanRegressor()
#
# def setUp(self):
# xs, ys, yes, sol = weighted_mean_data()
# self.reg.trait_set(xs=xs, ys=ys,
# yserr=yes,
# )
# self.solution = sol
# self.reg.calculate()
#
# def testMean(self):
# v = self.reg.mean
# self.assertEqual(v, self.solution['mean'])
# class WeightedMeanRegressionTest(TestCase):
# def setUp(self):
# n = 1000
# ys = np.ones(n) * 5
# # es = np.random.rand(n)
# es = np.ones(n)
# ys = np.hstack((ys, [5.1]))
# es = np.hstack((es, [1000]))
# # print 'exception', es
# self.reg = WeightedMeanRegressor(ys=ys, errors=es)
# def testMean(self):
# m = self.reg.mean
# self.assertEqual(m, 5)
# class RegressionTest(TestCase):
# def setUp(self):
# self.x = np.array([1, 2, 3, 4, 4, 5, 5, 6, 6, 7])
# self.y = np.array([7, 8, 9, 8, 9, 11, 10, 13, 14, 13])
#
# def testMeans(self):
# xm = self.x.mean()
# ym = self.y.mean()
# self.assertEqual(xm, 4.3)
# self.assertEqual(ym, 10.2)
#
#
# class CITest(TestCase):
# def setUp(self):
# self.x = np.array([0, 12, 29.5, 43, 53, 62.5, 75.5, 85, 93])
# self.y = np.array([8.98, 8.14, 6.67, 6.08, 5.9, 5.83, 4.68, 4.2, 3.72])
#
# def testUpper(self):
# reg = PolynomialRegressor(xs=self.x, ys=self.y, degree=1)
# l, u = reg.calculate_ci([0, 10, 100])
# for ui, ti in zip(u, [9.16, 8.56, 3.83]):
# self.assertAlmostEqual(ui, ti, delta=0.01)
#
# def testLower(self):
# reg = PolynomialRegressor(xs=self.x, ys=self.y, degree=1)
# l, u = reg.calculate_ci([0])
#
# self.assertAlmostEqual(l[0], 8.25, delta=0.01)
#
# def testSYX(self):
# reg = PolynomialRegressor(xs=self.x, ys=self.y, degree=1)
# self.assertAlmostEqual(reg.get_syx(), 0.297, delta=0.01)
#
# def testSSX(self):
# reg = PolynomialRegressor(xs=self.x, ys=self.y, degree=1)
# self.assertAlmostEqual(reg.get_ssx(), 8301.389, delta=0.01)
# class WLSRegressionTest(TestCase):
# def setUp(self):
# self.xs = np.linspace(0, 10, 10)
# self.ys = np.random.normal(self.xs, 1)
#
# '''
# draper and smith p.8
# '''
# self.xs = [35.3, 29.7, 30.8, 58.8, 61.4, 71.3, 74.4, 76.7, 70.7, 57.5,
# 46.4, 28.9, 28.1, 39.1, 46.8, 48.5, 59.3, 70, 70, 74.5, 72.1,
# 58.1, 44.6, 33.4, 28.6
# ]
# self.ys = [10.98, 11.13, 12.51, 8.4, 9.27, 8.73, 6.36, 8.50,
# 7.82, 9.14, 8.24, 12.19, 11.88, 9.57, 10.94, 9.58,
# 10.09, 8.11, 6.83, 8.88, 7.68, 8.47, 8.86, 10.36, 11.08
# ]
# self.es = np.random.normal(1, 0.5, len(self.xs))
#
# self.slope = -0.0798
# self.intercept = 13.623
# self.Xk = 28.6
# self.ypred_k = 0.3091
# xs = self.xs
# ys = self.ys
# es = self.es
# self.wls = WeightedPolynomialRegressor(xs=xs, ys=ys,
# yserr=es, fit='linear')
#
# def testVarCovar(self):
# wls = self.wls
# cv = wls.var_covar
# print cv
# print wls._result.normalized_cov_params
#
# # print wls._result.cov_params()
# class OLSRegressionTest(TestCase):
# def setUp(self):
# self.xs = np.linspace(0, 10, 10)
# # self.ys = np.random.normal(self.xs, 1)
# # print self.ys
# self.ys = [-1.8593967, 3.15506254, 1.82144207, 4.58729807, 4.95813564,
# 5.71229382, 7.04611731, 8.14459843, 10.27429285, 10.10989719]
#
# '''
# draper and smith p.8
# '''
# self.xs = [35.3, 29.7, 30.8, 58.8, 61.4, 71.3, 74.4, 76.7, 70.7, 57.5,
# 46.4, 28.9, 28.1, 39.1, 46.8, 48.5, 59.3, 70, 70, 74.5, 72.1,
# 58.1, 44.6, 33.4, 28.6
# ]
# self.ys = [10.98, 11.13, 12.51, 8.4, 9.27, 8.73, 6.36, 8.50,
# 7.82, 9.14, 8.24, 12.19, 11.88, 9.57, 10.94, 9.58,
# 10.09, 8.11, 6.83, 8.88, 7.68, 8.47, 8.86, 10.36, 11.08
# ]
#
# self.slope = -0.0798
# self.intercept = 13.623
# self.Xk = 28.6
# self.ypred_k = 0.3091
# xs = self.xs
# ys = self.ys
# ols = PolynomialRegressor(xs=xs, ys=ys, fit='linear')
#
# self.ols = ols
#
# def testSlope(self):
# ols = self.ols
# b, s = ols.coefficients
# self.assertAlmostEqual(s, self.slope, 4)
#
# def testIntercept(self):
# ols = self.ols
# b, s = ols.coefficients
# self.assertAlmostEqual(b, self.intercept, 4)
# self.assertAlmostEqual(ols.predict(0), self.intercept, 4)
#
# def testPredictYerr(self):
# ols = self.ols
# ypred = ols.predict_error(self.Xk, error_calc='SEM')
# self.assertAlmostEqual(ypred, self.ypred_k, 3)
#
# def testPredictYerr_matrix(self):
# ols = self.ols
# ypred = ols.predict_error_matrix([self.Xk])[0]
# self.assertAlmostEqual(ypred, self.ypred_k, 3)
#
# def testPredictYerr_al(self):
# ols = self.ols
# ypred = ols.predict_error_al(self.Xk)[0]
# self.assertAlmostEqual(ypred, self.ypred_k, 3)
#
# def testPredictYerrSD(self):
# ols = self.ols
# ypred = ols.predict_error(self.Xk, error_calc='SEM')
# ypredm = ols.predict_error_matrix([self.Xk], error_calc='SEM')[0]
# self.assertAlmostEqual(ypred, ypredm, 7)
#
# def testPredictYerrSD_al(self):
# ols = self.ols
# ypred = ols.predict_error(self.Xk, error_calc='sd')
# ypredal = ols.predict_error_al(self.Xk, error_calc='sd')[0]
# self.assertAlmostEqual(ypred, ypredal, 7)
# def testCovar(self):
# ols = self.ols
# cv = ols.calculate_var_covar()
# self.assertEqual(cv, cvm)
# def testCovar(self):
# ols = self.ols
# covar = ols.calculate_var_covar()
# print covar
# print
# assert np.array_equal(covar,)
# print covar
# print ols._result.cov_params()
# print ols._result.normalized_cov_params
# def testPredictYerr2(self):
# xs = self.xs
# ys = self.ys
#
# ols = PolynomialRegressor(xs=xs, ys=ys, fit='parabolic')
# y = ols.predict_error(5)[0]
# # yal = ols.predict_error_al(5)[0]
# # print y, yal
# self.assertEqual(y, self.Yprederr_5_parabolic)
# # self.assertEqual(yal, self.Yprederr_5_parabolic)
|
[
"jirhiker@gmail.com"
] |
jirhiker@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.