max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
trax/shapes.py
|
koz4k2/trax
| 0
|
12780851
|
# coding=utf-8
# Copyright 2019 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Core class and functions for handling data abstractly as shapes/dtypes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as onp
class ShapeDtype(object):
"""A NumPy ndarray-like object abstracted as shape and dtype."""
__slots__ = ['shape', 'dtype']
def __init__(self, shape, dtype=onp.float32):
self.shape = shape
self.dtype = dtype
def __repr__(self):
return 'ShapeDtype{{shape:{}, dtype:{}}}'.format(self.shape, self.dtype)
def shape_dtype_for(obj):
"""Returns a `ShapeDtype` instance with the shape and dtype of `obj`."""
return ShapeDtype(obj.shape, obj.dtype)
| 2.78125
| 3
|
ygo_pro_coder/koishi_coder.py
|
shiyinayuriko/toolbox_python
| 1
|
12780852
|
<reponame>shiyinayuriko/toolbox_python
import sys
import base64
if(sys.argv.__len__() == 1):
sourceFile = input("input source file\n")
else:
sourceFile = sys.argv[1]
counter = [0,0]
cardlist = []
currentCounter = 0
with open(sourceFile, "r", encoding="utf8") as f:
for line in f:
if line.strip().isdigit():
card = int(line)
cardlist.append(card)
counter[currentCounter] += 1
elif line.startswith("!side"):
currentCounter+=1
print(cardlist)
print(counter)
byteArray = bytearray()
for c in counter:
byteArray+= c.to_bytes(4,byteorder="little")
for c in cardlist:
byteArray+= c.to_bytes(4,byteorder="little")
b64 = base64.b64encode(byteArray)
print(b64)
| 3.046875
| 3
|
src/microspeclib/__init__.py
|
microspectrometer/microspec
| 0
|
12780853
|
import os
__copyright__ = """Copyright 2020 Chromation, Inc"""
__license__ = """All Rights Reserved by Chromation, Inc"""
__doc__ = """
see API documentation: 'python -m pydoc microspeclib.simple'
"""
# NOTE: Sphinx ignores __init__.py files, so for generalized documentation,
# please use pydoc, or the sphinx-generated documents in doc/build,
# or the README.md file
# NOTE on CHROMASPEC_ROOTDIR
#
# It is specifically located in the __init__.py of the base microspeclib
# package, so that the ../ (src) ../ (microspec) directory can be found,
# so that, in turn, the cfg and other directories can be referenced
# programmatically and without relative references throughout the
# packages. The test system can find root package directories, but the
# runtime system has no standard for this, and we are avoiding utilizing
# a test system for runtime use.
#
# If microspeclib is in /foo/bar/microspec/src/microspeclib then
# CHROMASPEC_ROOTDIR will be /foo/bar/microspec
CHROMASPEC_ROOTDIR = os.path.realpath(
os.path.join(
os.path.dirname(__file__), # microspeclib
"..", # src
".." # microspec
)
)
| 1.929688
| 2
|
appserver/neo4japp/services/annotations/lmdb_service.py
|
SBRG/lifelike
| 8
|
12780854
|
<reponame>SBRG/lifelike<gh_stars>1-10
from .lmdb_connection import LMDBConnection
class LMDBService(LMDBConnection):
def __init__(self, dirpath: str, **kwargs) -> None:
super().__init__(dirpath, **kwargs)
| 1.734375
| 2
|
User/transform.py
|
rvock/Sublime-Text-Preferences
| 1
|
12780855
|
<filename>User/transform.py
import string
import sublime
import sublime_plugin
import zlib
class GzdecodeCommand(Transformer):
transformer = lambda s: zlib.decompress(s),
| 1.703125
| 2
|
Bloomberg_codecon/General_challenger_problems/basic_encryption.py
|
SelvorWhim/competitive
| 0
|
12780856
|
<reponame>SelvorWhim/competitive<filename>Bloomberg_codecon/General_challenger_problems/basic_encryption.py<gh_stars>0
### INSTRUCTIONS ###
'''
With all the talk about cryptography and encryption, your friend has come up with the following basic encryption method:
An initial array of data (called A) is an array of integers. Each integer can have a value between 0 and 255, inclusive.
The encrypted array of data (called E) is the same size as A. Each entry of the encrypted array E is the sum of all entries in A that are not in the same position as the entry being calculated. For example, with the original array A consisting of:
1 2 3 4 5
The encrypted array E will be:
14 13 12 11 10
The first entry of E is the sum of the second through fifth entries in A (2+3+4+5=14).
You feel this method of encryption is faulty. To demonstrate this, write a program to take an array encrypted by your friend's method and decrypt it!
> Input Specifications
Your program must read from STDIN
A single integer N (1 <= N <= 5000) indicating the number of entries in the array.
N lines each containing a single integer between 0 and 100000000, inclusive, formatted with no unnecessary leading zeros.
> Output Specifications
If the decrypted array is valid (i.e. all entries are integers between 0 and 255), print each element of the decrypted array with no unnecessary leading zeros in order, one element per line.
If there is no possible decrypted array, output the string NO SOLUTION on a single line.
If there are multiple valid decrypted arrays, output the string MULTIPLE SOLUTIONS on a single line.
'''
### MY SOLUTION (accepted) ###
#Problem : Basic Encryption
#Language : Python 3
#Compiled Using : py_compile
#Version : Python 3.4.3
#Input for your program will be provided from STDIN
#Print out all output from your program to STDOUT
import sys
data = sys.stdin.read().splitlines()
N = int(data[0])
arr = [int(line) for line in data[1:]]
flag = True
if N == 1:
print ("MULTIPLE SOLUTIONS")
flag = False
else:
sumarry = int(sum(arr)/(N-1))
decrypted = [(sumarry-X) for X in arr]
if flag:
if sum(decrypted) != sumarry: # not sure if this case is possible
print("NO SOLUTION")
flag = False
for d in decrypted:
if d%1 != 0 or d < 0 or d > 255:
print("NO SOLUTION")
flag = False
break
if flag:
for d in decrypted:
print(d)
| 4.34375
| 4
|
flask/app/views.py
|
nokiam9/forester
| 1
|
12780857
|
<reponame>nokiam9/forester
# # -*- coding: utf-8 -*-
from flask import request, render_template, abort
from mongoengine.errors import NotUniqueError
from models import BidNotice
import json, datetime
NOTICE_TYPE_CONFIG = {
'0': '全部招标公告',
'1': '单一来源采购公告',
'2': '采购公告',
'7': '中标结果公示',
'3': '资格预审公告',
'8': '供应商信息收集',
'99': '供应商公告',
}
PAGE_SIZE = 10
# pylint: disable=no-member
# 所有route的定义,采用add_url_rule(),而不是修饰符,便于将应用隐藏在views.py中
def index():
return render_template('index.html')
def content_view(nid):
content = BidNotice.objects(nid=nid).first().notice_content
if not content:
abort(status=404)
else:
return content
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
def notice_page_view(type_id):
""" View of /notice/pagination/[012378]/?page_id=1 """
try:
title = NOTICE_TYPE_CONFIG[type_id]
except KeyError:
abort(status=406) # Unacceptable url para
page_id=request.args.get('page_id', default=1, type=int)
# 为了解决order by排序时内存溢出的问题,document的meta定义增加了index
if type_id == '0' or type_id is None:
todos_page = BidNotice.objects(). \
order_by("-published_date", "-timestamp"). \
paginate(page=page_id, per_page=PAGE_SIZE)
else:
todos_page = BidNotice.objects(type_id=type_id). \
order_by("-published_date", "-timestamp"). \
paginate(page=page_id, per_page=PAGE_SIZE)
return render_template('pagination.html',
todos_page=todos_page,
type_id=type_id,
title=title)
'''
Func: 试图插入一条Notice
'''
def api_post_notice():
json_data = json.loads(request.get_data().decode("utf-8"))
try: # try to insert new record
BidNotice(
title = json_data['title'],
nid = json_data['nid'],
notice_type = json_data['notice_type'],
type_id = json_data['type_id'],
spider = json_data['spider'],
source_ch = json_data['source_ch'],
notice_url = json_data['notice_url'],
notice_content = json_data['notice_content'],
published_date = datetime.datetime.strptime(json_data['published_date'], '%Y-%m-%d'), # 日期转换
# 填入API网关当前时间
timestamp = datetime.datetime.utcnow() + datetime.timedelta(hours=8),
).save()
except (NotUniqueError): ## DuplicateKeyError,
print('Dup rec! nid=' + json_data['nid'])
return 'dup rec', 200
except ValueError as e:
print('Unknown error:', e)
return('error',200)
finally:
return 'ok', 200
| 1.992188
| 2
|
convert_ejm.py
|
fediskhakov/ejm2evernote
| 1
|
12780858
|
<filename>convert_ejm.py
#! /usr/bin/env python
# By <NAME>
# fedor.iskh.me
# The packages:
# geopy is needed for geo-locating the employers
# bleach is needed for cleaning up the content of ads for Evernote standard (ENML)
# https://dev.evernote.com/doc/articles/enml.php#prohibited
# https://pypi.python.org/pypi/bleach
# http://geopy.readthedocs.org/en/1.10.0/
import sys
import xml.etree.ElementTree as ET
import geopy
import datetime
import calendar
import bleach
from xml.sax.saxutils import escape
# SETUP:
# The XML file from EJM/AIMS
ejm_xmlfile='<PATH>'
# The output file that will be imported into Evernote
evernote_xmlfile='<PATH>'
print '''
Python script that converts XML positions data downloaded from EJM/AIMS
to ENEX format XML that can be imported into Evernote.
'''
#patch for CDATA support from http://www.kaarsemaker.net/blog/2013/10/10/cdata-support-in-elementtree/
def CDATA(text=None):
element = ET.Element('![CDATA[')
element.text = text
return element
# Python 2.7 and 3
if hasattr(ET, '_serialize_xml'):
ET._original_serialize_xml = ET._serialize_xml
def _serialize_xml(write, elem, *args):
if elem.tag == '![CDATA[':
# write("%s%s" % (elem.tag, elem.text))
write("<![CDATA[%s]]>" % elem.text.encode('utf-8'))
return
return ET._original_serialize_xml(write, elem, *args)
ET._serialize_xml = ET._serialize['xml'] = _serialize_xml
# Python 2.5-2.6, and non-stdlib ElementTree
elif hasattr(ET.ElementTree, '_write'):
ET.ElementTree._orig_write = ET.ElementTree._write
def _write(self, file, node, encoding, namespaces):
if node.tag == '![CDATA[':
file.write("\n<![CDATA[%s]]>\n" % node.text.encode(encoding))
else:
self._orig_write(file, node, encoding, namespaces)
ET.ElementTree._write = _write
else:
raise RuntimeError("Don't know how to monkeypatch CDATA support. Please report a bug at https://github.com/seveas/python-hpilo")
from geopy.geocoders import Nominatim
# from geopy.geocoders import GoogleV3
geolocator = Nominatim()
# geolocator = GoogleV3()
# input XML tree
intree = ET.parse(ejm_xmlfile)
# output start building the tree
root2 = ET.Element("en-export")
#number of positions in the file
npos=len(list(intree.iter('position')))
i=1
for position in intree.iter('position'):
print '\nPosition ',i,' of ',npos,':'
ejmid=position.find('Position_id').text
print ' EJM id=',ejmid
title=position.find('Position_type').text
print ' title=',title
institution=position.find('Institution').text
print ' institution=',institution
print ' address=',
sys.stdout.flush()
#analyse location
try:
country=position.find('Country').text
geo = geolocator.geocode(institution, exactly_one=True)
if geo is None:
geo = geolocator.geocode(country, exactly_one=True)
except Exception:
geo = None
if geo is not None:
print geo.address,
print((geo.latitude, geo.longitude))
else:
print 'unknown after 2 tries'
i=i+1
# Fields
fields=position.find('Fields').text
if fields is not None:
fields=fields.split(";")
#start creating a note for Evernote
note = ET.SubElement(root2, "note")
ET.SubElement(note, "title").text = title+' at '+institution
# if 'full-time' in section.lower():
# ET.SubElement(note, "tag").text = 'Full-Time'
if 'non-academic' in title.lower():
ET.SubElement(note, "tag").text = 'Non-Academic'
# if 'international' not in section.lower():
# ET.SubElement(note, "tag").text = 'USA'
#the actual Note content
entry='<?xml version="1.0" encoding="UTF-8" standalone="no"?>' + \
'<!DOCTYPE en-note SYSTEM "http://xml.evernote.com/pub/enml2.dtd">' + \
'<en-note style="word-wrap: break-word; -webkit-nbsp-mode: space; -webkit-line-break: after-white-space;">'
entry=entry+'<div style="margin-bottom:1em;"><a style="color:black" href="https://econjobmarket.org/Apply/PosApp.php?posid='+ejmid+'">EJM id '+ejmid+' (view online)</a></div>'
if position.find('Ad_title') is not None and position.find('Ad_title').text is not None:
entry=entry+'<h2>'+escape(position.find('Ad_title').text)+'</h2>'
entry=entry+'<div style="font-size:large;color:#00b300">'+escape(title)+'</div>'
entry=entry+'<div style="font-size:large;font-weight:bold;color:#c80000">'+escape(institution)+'</div>'
if position.find('Department') is not None and position.find('Department').text is not None:
entry=entry+'<div style="font-size:norlam;font-weight:bold;color:#c80000">'+escape(position.find('Department').text)+'</div>'
if geo is not None:
entry=entry+'<div><a style="font-size:large;font-weight:bold;color:#0000cc" href="https://www.google.com.au/maps/@'+str(geo.latitude)+','+str(geo.longitude)+',10z">'
if geo.address is not None:
entry=entry+escape(geo.address)
else:
if len(country)>0:
entry=entry+escape(country)
else:
entry=entry+'location'
entry=entry+'</a></div>'
if position.find('Ad_opens') is not None and position.find('Ad_opens').text is not None:
datevar=datetime.datetime.strptime(position.find('Ad_opens').text,"%Y-%m-%d %H:%M:%S")
entry=entry+'<div>Position opens: '+datevar.strftime("%B %d")+'</div>'
if position.find('Ad_closes') is not None and position.find('Ad_closes').text is not None:
datevar=datetime.datetime.strptime(position.find('Ad_closes').text,"%Y-%m-%d %H:%M:%S")
entry=entry+'<div style="font-size:large;font-weight:bold;color:#b30059">DEADLINE: '+datevar.strftime("%B %d")+'</div>'
if fields is not None:
entry=entry+'<div style="margin-top:1.5em;margin-bottom:0em;font-size:small">Fields:</div>'
entry=entry+'<ul>'
for k in fields:
entry=entry+'<li style="color:black">'+escape(k)+'</li>'
entry=entry+'</ul>'
#clean the ad text
allowed_tags=['a','abbr','acronym','address','area','b','bdo','big','blockquote','br','caption','center','cite','code','col','colgroup','dd','del','dfn','div','dl','dt','em','font','h1','h2','h3','h4','h5','h6','hr','i','img','ins','kbd','li','map','ol','p','pre','q','s','samp','small','span','strike','strong','sub','sup','table','tbody','td','tfoot','th','thead','title','tr','tt','u','ul','var','xmp']
allowed_attrib=['style','href']
allowed_styles=['font-size','font-weight','margin-bottom','margin-top','color','white-space','word-wrap']
ad_clean=bleach.clean(position.find('Ad_text').text,allowed_tags,allowed_attrib,allowed_styles, strip=True,strip_comments=True)
entry=entry+'<pre style="white-space:pre-wrap;word-wrap:break-word;">'+escape(ad_clean)+'</pre>'
entry=entry + '</en-note>'
contenttag=ET.SubElement(note, "content")
ET.SubElement(contenttag, "![CDATA[").text=entry
# xmlstr = ElementTree.tostring(ET, encoding='utf8', method='xml')
note_attr=ET.SubElement(note, "note-attributes")
note_attr.text=''
ET.SubElement(note_attr, "author").text = 'EJM'
if geo is not None:
ET.SubElement(note_attr, "latitude").text = str(geo.latitude)
ET.SubElement(note_attr, "longitude").text = str(geo.longitude)
ET.SubElement(note_attr, "altitude").text = '0'
#reminder and reminder order from
if position.find('Ad_closes') is not None and position.find('Ad_closes').text is not None:
datevar=datetime.datetime.strptime(position.find('Ad_closes').text,"%Y-%m-%d %H:%M:%S")
year_corr=max(min(datevar.year,datetime.date.today().year+1),datetime.date.today().year)
try:
datevar=datetime.date(year_corr,datevar.month,datevar.day)
except ValueError:
#February 29 in a wrong year..
datevar=datetime.date(year_corr,datevar.month,datevar.day-1)
ET.SubElement(note_attr, "reminder-order").text = str(calendar.timegm(datevar.timetuple()))
ET.SubElement(note_attr, "reminder-time").text = datevar.strftime("%Y%m%dT%H%M%SZ")
#clean the objects
note_attr=None
note=None
with open(evernote_xmlfile, 'w') as f:
f.write('<?xml version="1.0" encoding="UTF-8" ?>\n<!DOCTYPE en-export SYSTEM "http://xml.evernote.com/pub/evernote-export3.dtd">\n')
ET.ElementTree(root2).write(f,'utf-8')
| 2.125
| 2
|
mqtt_handler.py
|
sskorol/respeaker-led
| 0
|
12780859
|
<filename>mqtt_handler.py
import uuid
import paho.mqtt.client as mqtt
import mraa
import time
from os import environ, geteuid
from pixel_ring import pixel_ring
from utils import read_json, current_time
class MqttHandler:
def __init__(self):
# It's required to change GPIO state on Respeaker Core V2 before the actual LED processing.
self.gpio = mraa.Gpio(12)
if geteuid() != 0 :
time.sleep(1)
self.gpio.dir(mraa.DIR_OUT)
self.gpio.write(0)
# Change LED color preset. Google pattern is enabled by default. Echo stands for Amazon Echo Speaker.
pixel_ring.change_pattern('echo')
# Load MQTT settings.
config = read_json("config")
self.mqtt_address = config["mqttAddress"]
self.mqtt_username = config["mqttUser"]
self.mqtt_password = config["<PASSWORD>"]
self.__init_mqtt_client()
async def start(self):
self.mqtt_client.connect(self.mqtt_address)
task = await self.mqtt_client.loop_start()
return task
def send(self, topic, message):
self.mqtt_client.publish(topic, message)
def stop(self):
# Restore GPIO state on exit.
self.gpio.write(1)
self.mqtt_client.loop_stop(True)
print(current_time(), 'Disconnected from MQTT server.')
def __init_mqtt_client(self):
self.mqtt_client = mqtt.Client("RespeakerLED-" + uuid.uuid4().hex)
self.mqtt_client.username_pw_set(self.mqtt_username, self.mqtt_password)
self.mqtt_client.on_connect = self.__on_mqtt_connect
self.mqtt_client.on_message = self.__on_mqtt_message
def __on_mqtt_connect(self, client, obj, flags, rc):
print(current_time(), 'Connected to MQTT server.')
self.mqtt_client.subscribe('respeaker/led/#')
def __on_mqtt_message(self, client, obj, msg):
topic = msg.topic
if topic.endswith('/wake'):
angle = msg.payload.decode('utf-8')
adjusted_angle = (int(angle) + 360 - 60) % 360
pixel_ring.wakeup(adjusted_angle)
elif topic.endswith('/listen'):
pixel_ring.listen()
elif topic.endswith('/speak'):
pixel_ring.speak()
elif topic.endswith('/think'):
pixel_ring.think()
elif topic.endswith('/sleep'):
pixel_ring.off()
| 2.59375
| 3
|
pdbattach/exchange/exchange.py
|
jschwinger233/pdbattach
| 15
|
12780860
|
<reponame>jschwinger233/pdbattach<filename>pdbattach/exchange/exchange.py
from ..utils import singleton
class Subscriber:
def recv(self, msg):
handle = getattr(self, "handle_msg_" + msg.__class__.__name__, None)
if handle:
handle(msg)
@singleton
class Exchange:
def __init__(self):
self._subs = set()
def attach(self, sub: Subscriber):
self._subs.add(sub)
def detach(self, sub: Subscriber):
self._subs.remove(sub)
def send(self, msg):
for sub in self._subs:
sub.recv(msg)
| 2.375
| 2
|
wave_freq.py
|
JonahY/AE_GUI
| 2
|
12780861
|
<reponame>JonahY/AE_GUI<gh_stars>1-10
"""
@version: 2.0
@author: Jonah
@file: wave_freq.py
@Created time: 2020/12/15 00:00
@Last Modified: 2021/12/18 19:07
"""
from plot_format import plot_norm
from ssqueezepy import ssq_cwt
from scipy.fftpack import fft
import array
import numpy as np
import matplotlib.pyplot as plt
import os
from tqdm import tqdm
from pac import Preprocessing
from plotwindow import PlotWindow
class Waveform:
def __init__(self, color_1, color_2, data_tra, input, output, status, device, thr_dB=25, magnification_dB=60):
self.data_tra = data_tra
self.input = input
self.output = output
self.color_1 = color_1
self.color_2 = color_2
self.status = status
self.device = device
self.thr_μV = pow(10, thr_dB / 20)
self.process = Preprocessing(None, thr_dB, magnification_dB, input, None)
def cal_wave(self, i, valid=True):
if self.device == 'vallen':
# Time, Chan, Thr, SampleRate, Samples, TR_mV, Data, TRAI
sig = np.multiply(array.array('h', bytes(i[-2])), i[-3] * 1000)
time = np.linspace(0, pow(i[-5], -1) * (i[-4] - 1) * pow(10, 6), i[-4])
thr = i[2]
if valid:
valid_wave_idx = np.where(abs(sig) >= thr)[0]
start = time[valid_wave_idx[0]]
end = time[valid_wave_idx[-1]]
duration = end - start
sig = sig[valid_wave_idx[0]:(valid_wave_idx[-1] + 1)]
time = np.linspace(0, duration, sig.shape[0])
elif self.device == 'pac':
sig = i[-2]
time = np.linspace(0, i[2] * (i[-3] - 1) * pow(10, 6), i[-3])
if valid:
valid_wave_idx = np.where(abs(sig) >= self.thr_μV)[0]
start = time[valid_wave_idx[0]]
end = time[valid_wave_idx[-1]]
duration = end - start
sig = sig[valid_wave_idx[0]:(valid_wave_idx[-1] + 1)]
time = np.linspace(0, duration, sig.shape[0])
return time, sig
def find_wave(self, Dur, Eny, cls_KKM, chan, dur_lim, eny_lim):
for i in np.where((np.log10(Dur)[cls_KKM] > dur_lim[0]) & (np.log10(Dur)[cls_KKM] < dur_lim[1]) &
(np.log10(Eny)[cls_KKM] > eny_lim[0]) & (np.log10(Eny)[cls_KKM] < eny_lim[1]))[0]:
# Idx, Dur, Eny, TRAI
print(i, np.log10(Dur)[cls_KKM][i], np.log10(Eny)[cls_KKM][i], '{:.0f}'.format(chan[cls_KKM][i][-1]))
def plot_2cls_wave(self, TRAI_select_1, TRAI_select_2, same, value, valid=False):
fig = plt.figure(figsize=(9.2, 3), num='Waveforms with same %s--%d μV' % (same, value))
fig.text(0.48, 0.24, self.status, fontdict={'family': 'Arial', 'fontweight': 'bold', 'fontsize': 12},
horizontalalignment="right")
fig.text(0.975, 0.24, self.status, fontdict={'family': 'Arial', 'fontweight': 'bold', 'fontsize': 12},
horizontalalignment="right")
i = self.data_tra[TRAI_select_1 - 1]
if i[-1] != TRAI_select_1:
print('Error: TRAI %d in data_tra is inconsistent with %d by input!' % (i[-1], TRAI_select_1))
return
valid_time, valid_data = self.cal_wave(i, valid=valid)
ax = fig.add_subplot(1, 2, 1)
ax.plot(valid_time, valid_data, lw=0.5, color=self.color_1)
ax.axhline(abs(i[2]), 0, valid_data.shape[0], linewidth=1, color="black")
ax.axhline(-abs(i[2]), 0, valid_data.shape[0], linewidth=1, color="black")
plot_norm(ax, xlabel='Time (μs)', ylabel='Amplitude (μV)', legend=False, grid=True)
ax2 = fig.add_subplot(1, 2, 2)
i = self.data_tra[TRAI_select_2 - 1]
if i[-1] != TRAI_select_2:
print('Error: TRAI %d in data_tra is inconsistent with %d by input!' % (i[-1], TRAI_select_2))
return
valid_time, valid_data = self.cal_wave(i, valid=valid)
ax2.plot(valid_time, valid_data, lw=0.5, color=self.color_2)
ax2.axhline(abs(i[2]), 0, valid_data.shape[0], linewidth=1, color="black")
ax2.axhline(-abs(i[2]), 0, valid_data.shape[0], linewidth=1, color="black")
plot_norm(ax2, xlabel='Time (μs)', ylabel='Amplitude (μV)', legend=False, grid=True)
def plot_wave_TRAI(self, k, data_pri, show_features=False, valid=False, cwt=False):
# Waveform with specific TRAI
try:
if self.device == 'VALLEN':
i = self.data_tra[k - 1]
else:
i = self.data_tra[k - self.data_tra[0][-1]]
except IndexError:
return str('Error: TRAI %d can not be found in data!' % k)
if i[-1] != k:
return str('Error: TRAI %d in data_tra is inconsistent with %d by input!' % (i[-1], k))
time, sig = self.cal_wave(i, valid=valid)
for tmp_tail, s in enumerate(sig[::-1]):
if s != 0:
tail = -tmp_tail if tmp_tail > 0 else None
break
time, sig = time[:tail], sig[:tail]
if cwt:
plotWindow = PlotWindow('Waveform--TRAI: %s' % i[-1], 9.2, 3)
fig = plotWindow.static_canvas.figure
fig.subplots_adjust(left=0.076, bottom=0.205, right=0.984, top=0.927, hspace=0.2, wspace=0.26)
fig.text(0.47, 0.25, self.status, fontdict={'family': 'Arial', 'fontweight': 'bold', 'fontsize': 12},
horizontalalignment="right")
ax = fig.add_subplot(1, 2, 2)
Twxo, Wxo, ssq_freqs, *_ = ssq_cwt(sig, wavelet='morlet', scales='log-piecewise', fs=i[3], t=time)
ax.contourf(time, ssq_freqs * 1000, pow(abs(Twxo), 0.5), cmap='cubehelix_r')
plot_norm(ax, 'Time (μs)', 'Frequency (kHz)', y_lim=[min(ssq_freqs * 1000), 1000], legend=False)
ax = fig.add_subplot(1, 2, 1)
ax.plot(time, sig, lw=1)
else:
plotWindow = PlotWindow('Waveform--TRAI: %s' % i[-1], 6, 3.6)
fig = plotWindow.static_canvas.figure
fig.subplots_adjust(left=0.115, bottom=0.17, right=0.975, top=0.95)
fig.text(0.96, 0.2, self.status, fontdict={'family': 'Arial', 'fontweight': 'bold', 'fontsize': 12},
horizontalalignment="right")
ax = fig.add_subplot()
ax.plot(time, sig, lw=1)
if self.device == 'vallen':
if show_features:
try:
string = data_pri[np.where(data_pri[:, -1] == i[-1])][0]
except IndexError:
return str('Error: TRAI %d can not be found in data!' % k)
print("=" * 23 + " Waveform information " + "=" * 23)
for info, value, r in zip(
['SetID', 'Time', 'Chan', 'Thr', 'Amp', 'RiseT', 'Dur', 'Eny', 'RMS', 'Counts', 'TRAI'],
[j for j in string], [0, 8, 0, 8, 8, 2, 2, 8, 8, 0, 0]):
if r == 0:
print('%s: %d' % (info, int(value)))
else:
print('%s: %s' % (info, round(value, r)))
ax.axhline(abs(i[2]), 0, sig.shape[0], linewidth=1, color="black")
ax.axhline(-abs(i[2]), 0, sig.shape[0], linewidth=1, color="black")
elif self.device == 'pac':
if show_features:
# time, channel_num, sample_interval, points_num, dataset, hit_num
# ID, Time(s), Chan, Thr(μV), Thr(dB), Amp(μV), Amp(dB), RiseT(s), Dur(s), Eny(aJ), RMS(μV), Frequency(Hz), Counts
string = data_pri[np.where(data_pri[:, 0] == i[-1])][0]
print("=" * 23 + " Waveform information " + "=" * 23)
for info, value, r in zip(
['Hit number', 'Time', 'Chan', 'Thr', 'Amp', 'RiseT', 'Dur', 'Eny', 'RMS', 'Counts'],
[j for j in string[np.array([0, 1, 2, 3, 5, 7, 8, 9, 10, 12])]], [0, 7, 0, 8, 8, 7, 7, 8, 8, 0]):
if r == 0:
print('%s: %d' % (info, int(value)))
else:
print('%s: %s' % (info, round(value, r)))
ax.axhline(abs(self.thr_μV), 0, sig.shape[0], linewidth=1, color="black")
ax.axhline(-abs(self.thr_μV), 0, sig.shape[0], linewidth=1, color="black")
plot_norm(ax, 'Time (μs)', 'Amplitude (μV)', legend=False, grid=True)
with open('/'.join([self.output, self.status]) + '-%d' % i[-1] + '.txt', 'w') as f:
f.write('Time, Signal\n')
for k in range(sig.shape[0]):
f.write("{}, {}\n".format(time[k], sig[k]))
return plotWindow
def plot_wave_realtime(self, k, file_list, file_idx, chan, valid=False):
try:
sig, time = self.process.read_wave_realtime(file_list, file_idx, chan, k, valid)
except TypeError:
return
fig = plt.figure(figsize=(6, 4.1), num='Waveform--Hit number:%d (%s)' % (k, valid))
ax = fig.add_subplot(1, 1, 1)
ax.plot(time, sig, lw=1)
plt.axhline(abs(self.thr_μV), 0, sig.shape[0], linewidth=1, color="black")
plt.axhline(-abs(self.thr_μV), 0, sig.shape[0], linewidth=1, color="black")
plot_norm(ax, 'Time (μs)', 'Amplitude (μV)', title='Hit number:%d' % k, legend=False, grid=True)
def save_wave(self, TRAI, pop):
# Save waveform
for idx, j in enumerate(tqdm(TRAI)):
i = self.data_tra[j - 1]
valid_time, valid_data = self.cal_wave(i)
with open('/'.join([self.output, self.status]) + '_pop%s-%d' % (pop, idx + 1) + '.txt', 'w') as f:
f.write('Time, Signal\n')
for k in range(valid_data.shape[0]):
f.write("{}, {}\n".format(valid_time[k], valid_data[k]))
class Frequency:
def __init__(self, color_1, color_2, data_tra, path, path_pri, status, device, thr_dB=25, size=500):
self.data_tra = data_tra
self.waveform = Waveform(color_1, color_2, data_tra, path, path_pri, status, device, thr_dB)
self.size = size
self.grid = np.linspace(0, pow(10, 6), self.size)
self.status = status
self.device = device
self.thr = pow(10, thr_dB / 20)
def cal_frequency(self, k, valid=True):
if self.device == 'vallen':
i = self.data_tra[k]
sig = np.multiply(array.array('h', bytes(i[-2])), i[-3] * 1000)
thr, Fs = i[2], i[3]
# Ts = 1 / Fs
if valid:
valid_wave_idx = np.where(abs(sig) >= thr)[0]
sig = sig[valid_wave_idx[0]:(valid_wave_idx[-1] + 1)]
elif self.device == 'pac':
i = self.data_tra[k]
Fs = 1 / i[2]
sig = i[-2]
if valid:
valid_wave_idx = np.where(abs(sig) >= self.thr)[0]
sig = sig[valid_wave_idx[0]:(valid_wave_idx[-1] + 1)]
N = sig.shape[0]
fft_y = fft(sig)
abs_y = np.abs(fft_y)
normalization = abs_y / N
normalization_half = normalization[range(int(N / 2))]
frq = (np.arange(N) / N) * Fs
half_frq = frq[range(int(N / 2))]
return half_frq, normalization_half
def cal_ave_freq(self, TRAI):
Res = np.array([0 for _ in range(self.size)]).astype('float64')
for j in TRAI:
half_frq, normalization_half = self.cal_frequency(j - 1, valid=False)
valid_idx = int((pow(10, 6) / max(half_frq)) * half_frq.shape[0])
tmp = [0 for _ in range(self.size)]
i = 1
for j, k in zip(half_frq[:valid_idx], normalization_half[:valid_idx]):
while True:
if self.grid[i - 1] <= j < self.grid[i]:
tmp[i - 1] += k
break
i += 1
Res += np.array(tmp)
return Res
def plot_wave_frequency(self, TRAI_select, pop):
fig = plt.figure(figsize=(6.5, 10), num='Waveform & Frequency--pop%s' % pop)
for idx, j in enumerate(TRAI_select):
i = self.data_tra[j - 1]
valid_time, valid_data = self.waveform.cal_wave(i, valid=False)
half_frq, normalization_half = self.cal_frequency(j - 1, valid=False)
ax = fig.add_subplot(5, 2, 1 + idx * 2)
ax.plot(valid_time, valid_data)
ax.axhline(abs(i[2]), 0, valid_data.shape[0], linewidth=1, color="black")
ax.axhline(-abs(i[2]), 0, valid_data.shape[0], linewidth=1, color="black")
plot_norm(ax, 'Time (μs)', 'Amplitude (μV)', legend=False, grid=True)
ax = fig.add_subplot(5, 2, 2 + idx * 2)
ax.plot(half_frq, normalization_half)
plot_norm(ax, 'Freq (Hz)', '|Y(freq)|', x_lim=[0, pow(10, 6)], legend=False)
def plot_ave_freq(self, Res, N, title):
fig = plt.figure(figsize=(6, 4.1), num='Average Frequency--%s' % title)
ax = fig.add_subplot()
ax.plot(self.grid, Res / N)
plot_norm(ax, xlabel='Freq (Hz)', ylabel='|Y(freq)|', title='Average Frequency', legend=False)
def plot_freq_TRAI(self, k, valid=False):
# Frequency with specific TRAI
half_frq, normalization_half = self.cal_frequency(k-1, valid=valid)
fig = plt.figure(figsize=(6, 4.1), num='Frequency--TRAI:%d (%s)' % (k, valid))
ax = plt.subplot()
ax.plot(half_frq, normalization_half)
plot_norm(ax, 'Freq (Hz)', '|Y(freq)|', x_lim=[0, pow(10, 6)], title='TRAI:%d' % k, legend=False)
def plot_2cls_freq(self, TRAI_1, TRAI_2, same):
fig = plt.figure(figsize=(6.5, 10), num='Frequency with same %s' % same)
for idx, k in enumerate(TRAI_1):
half_frq, normalization_half = self.cal_frequency(k - 1)
ax = fig.add_subplot(5, 2, 1 + idx * 2)
ax.plot(half_frq, normalization_half)
plot_norm(ax, 'Freq (Hz)', '|Y(freq)|', x_lim=[0, pow(10, 6)], legend=False)
half_frq, normalization_half = self.cal_frequency(TRAI_2[idx] - 1)
ax2 = fig.add_subplot(5, 2, 2 + idx * 2)
ax2.plot(half_frq, normalization_half)
plot_norm(ax2, 'Freq (Hz)', '|Y(freq)|', x_lim=[0, pow(10, 6)], legend=False)
| 2.265625
| 2
|
xv_leak_tools/test_device/linux_device.py
|
UAEKondaya1/expressvpn_leak_testing
| 219
|
12780862
|
<filename>xv_leak_tools/test_device/linux_device.py
import platform
import signal
from xv_leak_tools.exception import XVEx
from xv_leak_tools.helpers import unused
from xv_leak_tools.log import L
from xv_leak_tools.test_device.desktop_device import DesktopDevice
from xv_leak_tools.test_device.connector_helper import ConnectorHelper
from xv_leak_tools.process import XVProcessException
# TODO: consider a UnixDevice as ancestor of MacOSDevice, LinuxDevice
class LinuxDevice(DesktopDevice):
def __init__(self, config, connector):
super().__init__(config, connector)
self._connector_helper = ConnectorHelper(self)
@staticmethod
def local_ips():
raise XVEx("TODO: Local IPs for Linux")
@staticmethod
def open_app(binary_path, root=False):
unused(root)
if binary_path is None:
L.debug('Application has no binary path; not opening')
# TODO: open the application here
@staticmethod
def close_app(binary_path, root=False):
unused(root)
if binary_path is None:
L.debug('Application has no binary path; not closing')
# TODO: close the application here
def os_name(self):
return 'linux'
def os_version(self):
return " ".join(platform.linux_distribution())
def report_info(self):
info = super().report_info()
commands = [
['uname', '-a'],
['lsb_release', '-a'],
['lscpu'],
]
for command in commands:
try:
info += self._connector_helper.check_command(command)[0]
except XVProcessException as ex:
L.warning("Couldn't get system info using command {}:\n{}".format(command, ex))
return info
def kill_process(self, pid):
L.debug("Killing process {}".format(pid))
return self._connector_helper.execute_scriptlet(
'remote_os_kill.py', [pid, int(signal.SIGKILL)], root=True)
def pgrep(self, process_name):
L.debug("pgrep-ing for {}".format(process_name))
return self._connector_helper.execute_scriptlet('pgrep.py', [process_name], root=True)
def command_line_for_pid(self, pid):
return self._connector_helper.execute_scriptlet('command_line_for_pid.py', [pid], root=True)
| 2
| 2
|
app.py
|
alexwagg/SimplePaymentChannel
| 7
|
12780863
|
from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash, send_from_directory
import mysql.connector
from web3 import Web3, HTTPProvider
import rlp
import json
import my_connections
app = Flask(__name__)
## set this to your eth node, this is localhost default config
w3 = Web3(HTTPProvider('http://127.0.0.1:8545'))
## payment channel address
channel_address = '0x19bee2ce208ae4f1a333cffc80976349d22b35f5'
## payment channel abi
channel_abi = json.load(open('./static/abi/PaymentChannelABI.json'))
## initializing the contract with this address
channel_instance = w3.eth.contract(address=channel_address, abi=channel_abi)
## MANDATORY PAYMENT in Wei, obviously this shouldn't be hardcoded, but for now...
PAYMENT_SIZE = 1000000000000000
@app.route('/', methods=['GET'])
def home():
return render_template('home.html')
@app.route('/opened-channel', methods=['POST'])
def opened_channel():
try:
channel_id = w3.toInt(hexstr = request.form['channel_id'])
except:
return json.dumps({'success': False, 'msg': 'Channel ID must be hex-encoded'})
## now, use our own eth node to verify that the user actually created a channel with the proper deposit
## also, if the user created a channel, and it isn't in out database, this function will add it.
success, msg, deposit, paid = determine_valid_channel(channel_id)
return json.dumps({'success': success, 'msg': msg, 'deposit': deposit, 'paid': paid})
@app.route('/pay-channel', methods=['POST'])
def pay_channel():
amt_to_pay = int(request.form['amt_to_pay'])
channel_id = int(request.form['channel_id'])
## remove 0x... from signed blob
signed_blob = request.form['signed_blob']
## check if this channel is valid, and insert into the database if we do not have it
## this could occur if a user did not use our front-end to create a channel, but directly used the blockchain
success, msg, deposit, paid = determine_valid_channel(channel_id, amt_to_pay)
if (not success):
return json.dumps({'success': success, 'msg': msg})
## retreive the r, s, v values from the signed blob
r = w3.toBytes(hexstr=signed_blob[2:66])
s = w3.toBytes(hexstr=signed_blob[66:130])
v = w3.toInt(hexstr=signed_blob[130:])
try:
recovered_address = channel_instance.call().testECRecover(channel_id, amt_to_pay, r, s, v);
except:
return json.dumps({'success': False, 'msg': 'Cannot ECRecover these values', 'deposit': deposit, 'paid': paid})
conn = mysql.connector.connect(user=my_connections.mysql_user, password=<PASSWORD>, host=my_connections.mysql_host, database=my_connections.mysql_dbname)
cursor = conn.cursor()
query = 'SELECT payer_address, paid, deposit FROM OpenChannels WHERE channel_id = %s'
cursor.execute(query, (channel_id, ))
rows = cursor.fetchall()
actual_address, paid, deposit = rows[0]
## force the payment to be correct... this actually is covered by the ec recover (wouldn't return correct address if payment size was incorrect)
## but we can give a better error message this way
if (paid + PAYMENT_SIZE != amt_to_pay):
conn.close()
cursor.close()
return json.dumps({'success': False, 'msg': 'Incorrect payment size.', 'deposit': deposit, 'paid': paid})
elif (recovered_address != actual_address):
conn.close()
cursor.close()
return json.dumps({'success': False, 'msg': 'Not owner of channel', 'deposit': deposit, 'paid': paid})
else:
query = 'UPDATE OpenChannels SET paid = %s, signed_blob = %s WHERE channel_id = %s'
cursor.execute(query, (amt_to_pay, signed_blob, channel_id))
conn.commit()
cursor.close()
conn.close()
return json.dumps({'success': True, 'msg': 'Channel paid successfully!', 'deposit': deposit, 'paid': amt_to_pay})
@app.route('/close-channel', methods=['POST'])
def close_channel_request():
try:
channel_id = int(request.form['channel_id'])
except:
return json.dumps({'success': False, 'msg': 'Bad channel id.', 'deposit': 0, 'paid': 0})
## this should probably send to a database where all of the 'requests' sit until the server iterates over
## them and batch closes them for efficiency reasons, however I'm just gonna immedately call close_channel(channel_id)
success, msg, deposit, paid = close_channel(channel_id)
return json.dumps({'success': success, 'msg': msg, 'deposit': deposit, 'paid': paid})
def close_channel(channel_id):
conn = mysql.connector.connect(user=my_connections.mysql_user, password=my_connections.mysql_<PASSWORD>, host=my_connections.mysql_host, database=my_connections.mysql_dbname)
cursor = conn.cursor()
## get the data for this specific channel
query = 'SELECT payer_address, open_timestamp, deposit, paid, signed_blob FROM OpenChannels WHERE channel_id = %s'
cursor.execute(query, (channel_id, ))
rows = cursor.fetchall()
## if no rows exist, then this channel doesn't exist yet and can't be closed
if (rows == [] or rows == None):
return (False, 'Channel does not exist in database', 0, 0)
else:
payer_address, open_timestamp, deposit, paid, signed_blob = rows[0]
if (signed_blob == '' or paid == 0):
return (False, 'Channel has not been used', deposit, 0)
else:
## retreive the r, s, v values from the signed blob
r = w3.toBytes(hexstr=signed_blob[2:66])
s = w3.toBytes(hexstr=signed_blob[66:130])
v = w3.toInt(hexstr=signed_blob[130:])
## build and then send a transaction from the owner address to the contract
tx_hash = str(channel_instance.transact({'from':my_connections.owner_pubkey}).closeChannel(channel_id, paid, r, s, v))
# ## signing and sending transaction...
## WARNING: we are NOT currently checking is the transaction succeeds... yet
## we should implement this, either through some async function that callback's when the transaction is mined
## or through a intermediate table in the database, where we occassionally iterate through the rows, and see if
## they are successful, and can be added to the ClosedChannels database
## I'm just gonna add these to the "closed transactions" db, and just assume they were successful
query = 'INSERT INTO ClosedChannels (channel_id, payer_address, open_timestamp, deposit, paid, close_tx_hash, signed_blob) VALUES (%s, %s, %s, %s, %s, %s, %s)'
cursor.execute(query, (channel_id, payer_address, open_timestamp, deposit, paid, tx_hash, signed_blob))
conn.commit()
## now delete this entry in the OpenChannels database
query = 'DELETE FROM OpenChannels WHERE channel_id = %s'
cursor.execute(query, (channel_id, ))
conn.commit()
cursor.close()
conn.close()
return (True, 'Channel closed at transaction: ' + tx_hash +'. Thanks!', deposit, paid)
def determine_valid_channel(channel_id, amt_to_pay=0):
## returns (bool - valid/invalid channel, string - reason for fail/success, int deposit_amount, int paid_amt)
## NOTE: if channel is not yet in the database, and is valid, then we add it
## determine that this channel is valid, ie:
## is still open
## has a positive balance
## hasn't been expired yet
## NOTE: we are not verifying that the user has access to the private key that has created this channel... yet.
## get if the channel is closed from blockchain
is_closed = channel_instance.call().getClosedStatus(channel_id)
## if the channel is closed, then it is invalid
if (is_closed):
return (False, 'Closed channel', 0, 0)
## get current block timestamp, channel open timestamp, and channel expire timedelta
latest_timestamp = w3.eth.getBlock('latest').timestamp
open_timestamp = channel_instance.call().getOpenTime(channel_id)
expire_timedelta = channel_instance.call().CHANNELLIFETIME()
## if the channel is expired, or will expire in 6 hours, then this channel is invalid
if (open_timestamp + expire_timedelta < latest_timestamp - 21600):
return (False, 'Old channel', 0, 0)
## open a db connection, and see if this channel has been added to the database yet
## get payments that have been signed for, to see if user still has the required balance
conn = mysql.connector.connect(user=my_connections.mysql_user, password=<PASSWORD>, host=my_connections.mysql_host, database=my_connections.mysql_dbname)
cursor = conn.cursor()
query = 'SELECT paid FROM OpenChannels WHERE channel_id = %s'
cursor.execute(query, (channel_id, ))
rows = cursor.fetchall()
## get deposit amount from blockchain
deposit_amt = channel_instance.call().getDeposit(channel_id)
## channel not in db, so we have no payment data
if (rows == [] or rows == None):
payer_address = channel_instance.call().getPayer(channel_id)
## if payer address is zero, then it means that the channel is not opened
if (payer_address == '0'):
cursor.close()
conn.close()
return (False, 'Channel unopened', 0, 0)
query = 'INSERT INTO OpenChannels (channel_id, payer_address, open_timestamp, deposit, paid) VALUES (%s, %s, %s, %s, %s)'
cursor.execute(query, (channel_id, payer_address, open_timestamp, deposit_amt, 0))
conn.commit()
cursor.close()
conn.close()
return (True, 'Channel added to database', deposit_amt, 0)
## if channel is in db, then we need to check that there is still "space" in the channel to transact
else:
paid_amt = rows[0][0]
if (paid_amt > deposit_amt or amt_to_pay > deposit_amt):
cursor.close()
conn.close()
return (False, 'Channel fully paid', deposit_amt, paid_amt)
else:
cursor.close()
conn.close()
return (True, 'Channel in db', deposit_amt, paid_amt)
## for debugging purposes
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| 2.21875
| 2
|
conv_sop1.py
|
tomtkg/EC-Comp2021
| 0
|
12780864
|
<reponame>tomtkg/EC-Comp2021<filename>conv_sop1.py
import sys
import random
from deap import base
from deap import creator
from deap import tools
from eval import evaluator
### GAの設定
# - N_IND:個体数
# - N_GEN:世代数
# - S_TOUR: トーナメントサイズ
# - P_CROSS_1:交叉確率(交叉を行うかどうか決定する確率)
# - P_CROSS_2:交叉確率(一様交叉を行うときに,その遺伝子座が交叉する確率)
# - P_MUTATION:各遺伝子座が突然変異する確率
N_IND = 20
N_GEN = 100
S_TOUR = 3
P_CROSS_1 = 0.5
P_CROSS_2 = 0.5
P_MUTATION = 0.025
def create_valid_pop():
valid_pop = []
for _ in range(N_IND):
tmp = [0] * evaluator.D
for i in range(evaluator.D):
if random.random() < 0.5:
tmp[i] = 1
valid_pop.append(tmp)
return valid_pop
def main():
### メインルーチン
# GAはDEAPを使って実装する
# 詳細は https://deap.readthedocs.io/en/master/index.html
# 遺伝子:0 or 1で生成(ランダムに生成.生成/割当のしかたは改善の余地あり)
# 交叉:一様交叉
# 突然変異:ビット反転
# 選択:トーナメント選択
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
creator.create("Individual", list, fitness=creator.FitnessMin)
toolbox = base.Toolbox()
args = sys.argv
evaluator.init(args[1], args[2])
random.seed(args[3])
valid_pop = create_valid_pop()
def initPopulation(pcls, ind_init, file):
return pcls(ind_init(c) for c in file)
toolbox.register("population_byhand", initPopulation, list, creator.Individual, valid_pop)
toolbox.register("mate", tools.cxUniform)
toolbox.register("mutate", tools.mutFlipBit, indpb=P_MUTATION)
toolbox.register("select", tools.selTournament, tournsize=S_TOUR)
# 個体集合の作成
pop = toolbox.population_byhand()
# 個体の評価
f1_list, _, m_list = evaluator.evaluation(pop)
for ind, f1, m in zip(pop, f1_list, m_list):
ind.fitness.values = f1,
print(0,f1,m,sum(ind),*ind,sep=',')
count = 2*N_IND
print(count, *min([ind.fitness.values for ind in pop]), sep=",", file=sys.stderr)
# 進化のサイクルを回す
for g in range(1, N_GEN):
if count > 1000:
break
# 子の世代の選択と複製
offspring = toolbox.select(pop, len(pop))
offspring = list(map(toolbox.clone, offspring))
# 交叉
for child1, child2 in zip(offspring[::2], offspring[1::2]):
if random.random() < P_CROSS_1:
toolbox.mate(child1, child2, P_CROSS_2)
del child1.fitness.values
del child2.fitness.values
# 突然変異
for mutant in offspring:
if random.random() < P_MUTATION:
toolbox.mutate(mutant)
del mutant.fitness.values
# 子の世代で無効な適応度(delされたもの)をもつ個体を対象として評価を行う
invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
f1_list, _, m_list = evaluator.evaluation(invalid_ind)
for ind, f1, m in zip(invalid_ind, f1_list, m_list):
ind.fitness.values = f1,
print(g,f1,m,sum(ind),*ind,sep=',')
# 子の世代を次の個体集合へ置き換える
pop[:] = offspring
count += 2*len(invalid_ind)
print(count, *min([ind.fitness.values for ind in pop]), sep=",", file=sys.stderr)
if __name__ == "__main__":
main()
| 2.234375
| 2
|
gen_mx_traits.py
|
Dunkelschorsch/arrayadapt
| 1
|
12780865
|
#!env python
import re
def c_to_mx_typename(c_type, special_map):
m = re.search("([a-zA-Z0-9]+)_t", c_type)
if m == None:
mx_type = c_type
else:
mx_type = m.groups()[0]
if c_type in special_map:
mx_type = special_map[c_type]
return mx_type.upper()
c_type = ('void', 'bool', 'double', 'float', 'uint64_t', 'int64_t', 'uint32_t', 'int32_t', 'uint16_t', 'int16_t', 'uint8_t', 'int8_t')
special_map = {'float': 'single', 'bool': 'logical' }
empty_trait = "template <class T>\nstruct mx_traits { };\n\n"
header_guard = """#ifndef HAVE_MX_TRAITS_HPP
#define HAVE_MX_TRAITS_HPP
#include <mex.h>
"""
trait_template = """// %s
template<> struct mx_traits<%s> {
static const mxClassID classId = mx%s_CLASS;
static inline const char* name() {
return "%s";
}
};
"""
mx_traits_header = open('include/mx_traits.hpp', 'wt')
mx_traits_header.write(header_guard)
mx_traits_header.write(empty_trait)
for type_curr in c_type:
for constness in ("", "const ",):
full_type = constness + type_curr
mx_traits_header.write(trait_template % (full_type, full_type, c_to_mx_typename(type_curr, special_map), full_type))
mx_traits_header.write("#endif // HAVE_MX_TRAITS_HPP\n")
mx_traits_header.close()
| 2.234375
| 2
|
engineer/contrib/__init__.py
|
tylerbutler/engineer
| 6
|
12780866
|
<reponame>tylerbutler/engineer<filename>engineer/contrib/__init__.py
# coding=utf-8
__author__ = '<NAME> <<EMAIL>>'
| 1.023438
| 1
|
localstack/services/cloudwatch/cloudwatch_listener.py
|
doytsujin/localstack
| 0
|
12780867
|
from moto.cloudwatch.models import cloudwatch_backends
from localstack.services.generic_proxy import ProxyListener
from localstack.utils.aws import aws_stack
# path for backdoor API to receive raw metrics
PATH_GET_RAW_METRICS = "/cloudwatch/metrics/raw"
class ProxyListenerCloudWatch(ProxyListener):
def forward_request(self, method, path, data, headers):
# TODO: solve with custom url routing rules for ASF providers
if path.startswith(PATH_GET_RAW_METRICS):
result = cloudwatch_backends[aws_stack.get_region()].metric_data
result = [
{
"ns": r.namespace,
"n": r.name,
"v": r.value,
"t": r.timestamp,
"d": [{"n": d.name, "v": d.value} for d in r.dimensions],
}
for r in result
]
return {"metrics": result}
return True
# instantiate listener
UPDATE_CLOUD_WATCH = ProxyListenerCloudWatch()
| 2
| 2
|
10 Random Forest/02 Random Forest.py
|
Free-Machine-Learning/Machine-Learning-Classifiers
| 10
|
12780868
|
<reponame>Free-Machine-Learning/Machine-Learning-Classifiers
# Import the libararies
import math
import numpy as np
import pandas as pd
from datetime import datetime
import seaborn as sns
import matplotlib.pyplot as plt
%matplotlib inline
plt.style.use('seaborn-whitegrid')
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
# Import the data
df = pd.read_csv('data/00 df.csv')
# split the data into train & test
train = df[df['flag']=='train']
test = df[df['flag']=='test']
cat_feats = ['age_bin','capital_gl_bin','education_bin','hours_per_week_bin','msr_bin','occupation_bin','race_sex_bin']
y_train = train['y']
x_train = train[['age_bin','capital_gl_bin','education_bin','hours_per_week_bin','msr_bin','occupation_bin','race_sex_bin']]
x_train = pd.get_dummies(x_train,columns=cat_feats,drop_first=True)
y_test = test['y']
x_test = test[['age_bin','capital_gl_bin','education_bin','hours_per_week_bin','msr_bin','occupation_bin','race_sex_bin']]
x_test = pd.get_dummies(x_test,columns=cat_feats,drop_first=True)
# Random Forest
results = []
n_estimaor_options = [20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100]
for trees in n_estimaor_options:
model = RandomForestClassifier(trees, oob_score=True, n_jobs=-1, random_state=101)
model.fit(x_train, y_train)
y_pred = model.predict(x_test)
accuracy = np.mean(y_test==y_pred)
results.append(accuracy)
plt.figure(figsize=(8,4))
pd.Series(results, n_estimaor_options).plot(color="darkred",marker="o")
results = []
max_features_options = ['auto',None,'sqrt',0.95,0.75,0.5,0.25,0.10]
for trees in max_features_options:
model = RandomForestClassifier(n_estimators=70, oob_score=True, n_jobs=-1, random_state=101, max_features = trees)
model.fit(x_train, y_train)
y_pred = model.predict(x_test)
accuracy = np.mean(y_test==y_pred)
results.append(accuracy)
plt.figure(figsize=(8,4))
pd.Series(results, max_features_options).plot(kind="bar",color="darkred",ylim=(0.7,0.9))
results = []
min_samples_leaf_options = [5,10,15,20,25,30,35,40,45,50]
for trees in min_samples_leaf_options:
model = RandomForestClassifier(n_estimators=70, oob_score=True, n_jobs=-1, random_state=101, max_features = None, min_samples_leaf = trees)
model.fit(x_train, y_train)
y_pred = model.predict(x_test)
accuracy = np.mean(y_test==y_pred)
results.append(accuracy)
plt.figure(figsize=(8,4))
pd.Series(results, min_samples_leaf_options).plot(color="darkred",marker="o")
rfm = RandomForestClassifier(n_estimators=70, oob_score=True, n_jobs=-1, random_state=101, max_features = None, min_samples_leaf = 30)
rfm.fit(x_train, y_train)
y_pred=rfm.predict(x_test)
test_calc = pd.concat([pd.DataFrame(y_test).reset_index(drop=True),pd.DataFrame(y_pred).reset_index(drop=True)],axis=1)
test_calc.rename(columns={0: 'predicted'}, inplace=True)
test_calc['predicted'] = test_calc['predicted'].apply(lambda x: 1 if x > 0.5 else 0)
df_table = confusion_matrix(test_calc['y'],test_calc['predicted'])
print (df_table)
print('accuracy:', (df_table[0,0] + df_table[1,1]) / (df_table[0,0] + df_table[0,1] + df_table[1,0] + df_table[1,1]))
print ('precision:', df_table[1,1] / (df_table[1,1] + df_table[0,1]))
print('recall:', df_table[1,1] / (df_table[1,1] + df_table[1,0]))
p = df_table[1,1] / (df_table[1,1] + df_table[0,1])
r = df_table[1,1] / (df_table[1,1] + df_table[1,0])
print('f1 score: ', (2*p*r)/(p+r))
| 2.875
| 3
|
qucumber/observables/entanglement.py
|
silky/QuCumber
| 1
|
12780869
|
<reponame>silky/QuCumber
# Copyright 2018 PIQuIL - All Rights Reserved
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import torch
from .observable import ObservableBase
from qucumber.utils import cplx
def swap(s1, s2, A):
for a in A:
_s = s1[:, a].clone()
s1[:, a] = s2[:, a]
s2[:, a] = _s
return s1, s2
class RenyiEntropy(ObservableBase):
r"""The :math:`\sigma_y` observable
Computes the 2nd Renyi entropy of the region A based on the SWAP operator.
Ref: PhysRevLett.104.157201
"""
def __init__(self):
self.name = "SWAP"
self.symbol = "S"
def apply(self, nn_state, samples, A):
r"""Computes the entanglement entropy via a swap operator which an esimtaor for the 2nd Renyi entropy.
The swap operator requires an access to two identical copies of a wavefunction. In practice, this translates
to the requirement of having two independent sets of samples from the wavefunction replicas. For this
purpose, the batch of samples stored in the param samples is split into two subsets. Although this
procedure is designed to break the autocorrelation between the samples, it must be used with caution.
For a fully unbiased estimate of the entanglement entropy, the batch of samples needs to be built from two
independent initializations of the wavefucntion each having a different random number generator.
:param nn_state: The WaveFunction that drew the samples.
:type nn_state: qucumber.nn_states.WaveFunction
:param samples: A batch of samples to calculate the observable on.
Must be using the :math:`\sigma_i = 0, 1` convention.
:type samples: torch.Tensor
"""
samples = samples.to(device=nn_state.device).clone()
# split the batch of samples into two equal batches
# if their total number is odd, the last sample is ignored
_ns = samples.shape[0] // 2
samples1 = samples[:_ns, :]
samples2 = samples[_ns : _ns * 2, :]
psi_ket1 = nn_state.psi(samples1)
psi_ket2 = nn_state.psi(samples2)
psi_ket = cplx.elementwise_mult(psi_ket1, psi_ket2)
psi_ket_star = cplx.conjugate(psi_ket)
samples1_, samples2_ = swap(samples1, samples2, A)
psi_bra1 = nn_state.psi(samples1_)
psi_bra2 = nn_state.psi(samples2_)
psi_bra = cplx.elementwise_mult(psi_bra1, psi_bra2)
psi_bra_star = cplx.conjugate(psi_bra)
EE = -torch.log(cplx.elementwise_division(psi_bra_star, psi_ket_star).mean(1))
return EE
| 2.21875
| 2
|
galibrate/__init__.py
|
blakeaw/GAlibrate
| 6
|
12780870
|
<gh_stars>1-10
"""Initialize the galibrate package.
"""
from .gao import GAO
| 0.964844
| 1
|
examples/convert_text_to_paths.py
|
CatherineH/svgpathtools
| 2
|
12780871
|
from xml.dom.minidom import parseString
from svgpathtools import svgdoc2paths, wsvg
example_text = '<svg>' \
' <rect x="100" y="100" height="200" width="200" style="fill:#0ff;" />' \
' <line x1="200" y1="200" x2="200" y2="300" />' \
' <line x1="200" y1="200" x2="300" y2="200" />' \
' <line x1="200" y1="200" x2="100" y2="200" />' \
' <line x1="200" y1="200" x2="200" y2="100" />' \
' <circle cx="200" cy="200" r="30" style="fill:#00f;" />' \
' <circle cx="200" cy="300" r="30" style="fill:#0f0;" />' \
' <circle cx="300" cy="200" r="30" style="fill:#f00;" />' \
' <circle cx="100" cy="200" r="30" style="fill:#ff0;" />' \
' <circle cx="200" cy="100" r="30" style="fill:#f0f;" />' \
' <text x="50" y="50" font-size="24">' \
' Testing SVG </text></svg>'
doc = parseString(example_text)
paths, attributes = svgdoc2paths(doc)
wsvg(paths)
| 2.859375
| 3
|
src/biokbase/narrative/services/invocation_tools.py
|
teharrison/narrative
| 0
|
12780872
|
"""
invocation functions for all
"""
__author__ = '<NAME>'
__date__ = '6/18/14'
__version__ = '0.5'
## Imports
import re
import json
import time
import os
import base64
import urllib
import urllib2
import cStringIO
import requests
import datetime
from string import Template
from collections import defaultdict
# Local
from biokbase.narrative.common.service import init_service, method, finalize_service
from biokbase.narrative.common import kbtypes
from biokbase.InvocationService.Client import InvocationService
from biokbase.shock import Client as shockService
## Globals
VERSION = (0, 0, 1)
NAME = "KBase Commands"
class URLS:
shock = "http://shock.metagenomics.anl.gov"
workspace = "https://kbase.us/services/ws"
invocation = "https://kbase.us/services/invocation"
# Initialize
init_service(name=NAME, desc="Functions for executing KBase commands and manipulating the results", version=VERSION)
def _list_cmds():
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
return invo.valid_commands()
def _run_invo(cmd):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
stdout, stderr = invo.run_pipeline("", cmd, [], 0, '/')
return "".join(stdout), "".join(stderr)
def _list_files(d):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
_, files = invo.list_files("", '/', d)
return files
def _mv_file(old, new):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
invo.rename_file("", '/', old, new)
return
def _rm_file(f):
token = os.environ['KB_AUTH_TOKEN']
invo = InvocationService(url=URLS.invocation, token=token)
invo.remove_files("", '/', f)
return
def _get_invo(name, binary=False):
# upload from invo server
stdout, stderr = _run_invo("mg-upload2shock %s %s"%(URLS.shock, name))
if stderr:
return stderr, True
node = json.loads(stdout)
# get file content from shock
return _get_shock_data(node['id'], binary=binary), False
def _get_shock_data(nodeid, binary=False):
token = os.environ['KB_AUTH_TOKEN']
shock = shockService(URLS.shock, token)
return shock.download_to_string(nodeid, binary=binary)
@method(name="Execute KBase Command")
def _execute_command(meth, command):
"""Execute given KBase command.
:param command: command to run
:type command: kbtypes.Unicode
:ui_name command: Command
:return: Results
:rtype: kbtypes.Unicode
:output_widget: DisplayTextWidget
"""
meth.stages = 2
if not command:
raise Exception("Command is empty.")
command.replace('$workspace', os.environ['KB_WORKSPACE_ID'])
meth.advance("Running Command")
stdout, stderr = _run_invo(command)
if (stdout == '') and (stderr == ''):
stdout = 'Your command executed successfully'
meth.advance("Displaying Output")
return json.dumps({'text': stdout, 'error': stderr})
@method(name="View KBase Commands")
def _view_cmds(meth):
"""View available KBase commands.
:return: Command List
:rtype: kbtypes.Unicode
:output_widget: CategoryViewWidget
"""
meth.stages = 2
meth.advance("Retrieving Commands")
cmd_list = _list_cmds()
meth.advance("Displaying Output")
cmd_sort = sorted(cmd_list, key=lambda k: k['title'])
cmd_data = []
for cat in cmd_sort:
data = {'title': cat['title'], 'items': []}
for c in cat['items']:
data['items'].append(c['cmd'])
cmd_data.append(data)
return json.dumps({'data': cmd_data})
@method(name="View Files")
def _view_files(meth, sortby):
"""View your files in temp invocation file space.
:param sortby: sort files by name or date, default is name
:type sortby: kbtypes.Unicode
:ui_name sortby: Sort By
:default sortby: name
:return: File List
:rtype: kbtypes.Unicode
:output_widget: GeneTableWidget
"""
meth.stages = 2
meth.advance("Retrieving File List")
file_list = _list_files("")
meth.advance("Displaying Output")
# get datetime objects
for f in file_list:
f['mod_date'] = datetime.datetime.strptime(f['mod_date'], "%b %d %Y %H:%M:%S")
# sort
if sortby == 'date':
file_sort = sorted(file_list, key=lambda k: k['mod_date'], reverse=True)
else:
file_sort = sorted(file_list, key=lambda k: k['name'])
# output
file_table = [['name', 'size', 'timestamp']]
for f in file_sort:
file_table.append([ f['name'], f['size'], f['mod_date'].ctime() ])
return json.dumps({'table': file_table})
@method(name="View PNG File")
def _view_files(meth, afile):
"""View a .png image file from temp invocation file space.
:param afile: file to display
:type afile: kbtypes.Unicode
:ui_name afile: File
:return: File List
:rtype: kbtypes.Unicode
:output_widget: ImageViewWidget
"""
meth.stages = 2
if not afile:
raise Exception("Missing file name.")
if not afile.endswith('.png'):
raise Exception("Invalid file type.")
meth.advance("Retrieving Content")
content, err = _get_invo(afile, binary=True)
meth.advance("Displaying Image")
if err:
raise Exception(content)
b64png = base64.b64encode(content)
return json.dumps({'type': 'png', 'width': '600', 'data': b64png})
@method(name="Download File")
def _download_file(meth, afile):
"""Download a file from temp invocation file space.
:param afile: file to download
:type afile: kbtypes.Unicode
:ui_name afile: File
:return: Status
:rtype: kbtypes.Unicode
:output_widget: DownloadFileWidget
"""
meth.stages = 3
if not afile:
raise Exception("Missing file name.")
meth.advance("Validating Filename")
file_list = _list_files("")
has_file = False
for f in file_list:
if f['name'] == afile:
has_file = True
break
if not has_file:
raise Exception("The file '"+afile+"' does not exist")
meth.advance("Retrieving Content")
content, err = _get_invo(afile, binary=False)
if err:
raise Exception(content)
meth.advance("Creating Download")
return json.dumps({'data': content, 'name': afile})
@method(name="Upload File")
def _upload_file(meth):
"""Upload a file to temp invocation file space.
:return: Status
:rtype: kbtypes.Unicode
:output_widget: UploadFileWidget
"""
meth.stages = 1
meth.advance("Creating Upload")
return json.dumps({'url': URLS.invocation, 'auth': {'token': os.environ['KB_AUTH_TOKEN']}})
@method(name="Rename File")
def _rename_file(meth, old, new):
"""Rename a file in temp invocation file space.
:param old: old filename
:type old: kbtypes.Unicode
:ui_name old: Old
:param new: new filename
:type new: kbtypes.Unicode
:ui_name new: New
:return: Status
:rtype: kbtypes.Unicode
:output_widget: DisplayTextWidget
"""
meth.stages = 1
if not (old and new):
raise Exception("Missing file names.")
meth.advance("Renaming File")
_mv_file(old, new)
return json.dumps({'text': '%s changed to %s'%(old,new)})
@method(name="Delete File")
def _delete_file(meth, afile):
"""Delete a file from temp invocation file space.
:param afile: file to delete
:type afile: kbtypes.Unicode
:ui_name afile: File
:return: Status
:rtype: kbtypes.Unicode
:output_widget: DisplayTextWidget
"""
meth.stages = 1
if not afile:
raise Exception("Missing file name.")
meth.advance("Deleting File")
_rm_file(afile)
return json.dumps({'text': 'removed '+afile})
# Finalization
finalize_service()
| 2.3125
| 2
|
face_ae_window.py
|
hujunchina/FaceAE
| 0
|
12780873
|
<filename>face_ae_window.py
import sys
from numpy import asarray
from face_ae_const import CONST_VAL
from cv2 import cvtColor, COLOR_RGB2BGR
from PIL import (ImageQt)
from PyQt5.QtWidgets import (QApplication, QWidget, QLabel, QFormLayout, QGridLayout, QLineEdit, QPushButton,QFileDialog)
from PyQt5.QtGui import (QPainter, QPen, QPixmap)
from PyQt5.QtCore import (Qt, QRect, pyqtSignal)
GLOBAL_VAL = {
'isDraw': False
}
# 自定义显示图片的窗口,并实现鼠标拖动截图
class ImgPanel(QLabel):
# 1. 定义个信号 signal
mouseReleased = pyqtSignal(list)
def __init__(self):
super().__init__()
self.x0 = 0
self.y0 = 0
self.x1 = 0
self.y1 = 0
self.isPaint = False
self.checkDraw()
def mousePressEvent(self, event):
self.checkDraw()
self.isPaint = True
self.x0 = event.x()
self.y0 = event.y()
# 图片的更新和对图片的处理入口
def mouseReleaseEvent(self, event):
self.checkDraw()
self.clearRect()
self.img_cut = self.pixmap().copy(self.rect)
self.setPixmap(self.img_cut)
self.setAlignment(Qt.AlignCenter)
# screen = QApplication.primaryScreen()
# if screen is not None:
# self.img_screen = screen.grabWindow(0, self.x0, self.y0, abs(self.x1-self.x0), abs(self.y1-self.y0))
# self.setPixmap(self.img_screen)
# self.setScaledContents(False)
img_pil = ImageQt.fromqpixmap(self.pixmap()) # qpixmap to image
img_cv = cvtColor(asarray(img_pil), COLOR_RGB2BGR) # image to cv2
# 2 在需要的地方发射就行了
self.mouseReleased.emit([img_cv.mean()])
def mouseMoveEvent(self, event):
self.checkDraw()
if self.isPaint:
self.x1 = event.x()
self.y1 = event.y()
self.update()
def paintEvent(self, event):
super().paintEvent(event)
self.checkDraw()
self.rect = QRect(self.x0, self.y0, abs(self.x1-self.x0), abs(self.y1-self.y0))
painter = QPainter(self)
painter.setPen(QPen(Qt.blue, 2, Qt.SolidLine))
painter.drawRect(self.rect)
def clearRect(self):
self.x0 = 0
self.y0 = 0
self.x1 = 0
self.y1 = 0
self.isPaint = False
GLOBAL_VAL['isDraw'] = False
self.update()
def checkDraw(self):
if GLOBAL_VAL['isDraw'] is False:
self.isPaint = False
pass
class MainWindow(QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle(CONST_VAL['WIN_TITLE'])
self.resize(CONST_VAL['WIN_W'], CONST_VAL['WIN_H'])
self.move(CONST_VAL['INIT_POSITION_Y'], CONST_VAL['INIT_POSITION_X'])
self.init_layout()
self.img_url_val = None
def init_layout(self):
self.grid = QGridLayout()
self.img_url = QLineEdit()
self.img_open_btn = QPushButton(CONST_VAL['OPEN_IMG'])
self.img_open_btn.clicked.connect(self.open_file_slot)
self.img_lbl = ImgPanel()
self.img_lbl.mouseReleased.connect(self.img_info_slot)
self.img_lbl.setFixedHeight(CONST_VAL['IMG_MAX_H'])
self.img_lbl.setFixedWidth(CONST_VAL['IMG_MAX_W'])
self.img_reset_btn = QPushButton(CONST_VAL['RESET_IMG'], self.img_lbl)
self.img_reset_btn.clicked.connect(self.img_reset_slot)
self.right_lbl = QLabel()
self.img_info_hbox = QFormLayout(self.right_lbl)
# self.img_info_hbox.setSpacing(20)
# 添加布局
self.grid.addWidget(self.img_url, 0, 0, 1, 3)
self.grid.addWidget(self.img_open_btn, 0, 3, 1, 1)
self.grid.addWidget(self.img_lbl, 1, 0, 3, 3)
self.grid.addWidget(self.right_lbl, 1, 3, 3, 1)
# 设置右边图像信息布局
self.img_mean_lbl = QLineEdit("00000")
self.img_info_hbox.addRow(CONST_VAL['MEAN_TXT'], self.img_mean_lbl)
self.img_info_hbox.addRow(CONST_VAL['CLEAR_TXT'], QLineEdit("00000"))
self.img_info_hbox.addRow(CONST_VAL['BALANCE_TXT'], QLineEdit("00000"))
self.setLayout(self.grid)
def img_info_slot(self, data):
mean_msg = "{:.5f}".format(data[0])
self.img_mean_lbl.setText(mean_msg)
# 图片打开按钮响应方法
def open_file_slot(self):
GLOBAL_VAL['isDraw'] = True
img_name = QFileDialog.getOpenFileName(self, CONST_VAL['OPEN_IMG'], CONST_VAL['OPEN_IMG_START_FOLD'])
self.img_url.setText(img_name[0])
self.img_url_val = img_name[0]
print(img_name[0])
if img_name[0]:
img_pixmap = QPixmap(img_name[0])
ratio = CONST_VAL['IMG_MAX_W'] / img_pixmap.width()
img_resize = img_pixmap.scaled(img_pixmap.width() * ratio, img_pixmap.height() * ratio)
self.img_lbl.setPixmap(img_resize)
self.img_lbl.setAlignment(Qt.AlignLeft)
# self.img_lbl.setScaledContents(True)
# 图片重置按钮响应方法
def img_reset_slot(self):
GLOBAL_VAL['isDraw'] = True
if self.img_url_val:
self.img_url_val = self.img_url_val
img_pixmap = QPixmap(self.img_url_val)
ratio = CONST_VAL['IMG_MAX_W'] / img_pixmap.width()
img_resize = img_pixmap.scaled(img_pixmap.width() * ratio, img_pixmap.height() * ratio)
self.img_lbl.setPixmap(img_resize)
self.img_lbl.setAlignment(Qt.AlignLeft)
# self.img_lbl.setScaledContents(True)
else:
self.open_file_slot()
| 2.34375
| 2
|
pmutt/statmech/vib.py
|
wittregr/pMuTT
| 28
|
12780874
|
# -*- coding: utf-8 -*-
import numpy as np
from scipy.integrate import quad
from pmutt import _ModelBase
from pmutt import constants as c
from pmutt.io.json import remove_class
class HarmonicVib(_ModelBase):
"""Vibrational modes using the harmonic approximation. Equations used
sourced from:
- <NAME>. An Introduction to Applied Statistical Thermodynamics;
<NAME> & Sons, 2010.
Attributes
----------
vib_wavenumbers : list of float
Vibrational wavenumbers (:math:`\\tilde{\\nu}`) in 1/cm
imaginary_substitute : float, optional
If this value is set, imaginary frequencies are substituted with
this value for calculations. Otherwise, imaginary frequencies are
ignored. Default is None
"""
def __init__(self, vib_wavenumbers=[], imaginary_substitute=None):
self.imaginary_substitute = imaginary_substitute
self.vib_wavenumbers = np.array(vib_wavenumbers)
@property
def vib_wavenumbers(self):
return self._vib_wavenumbers
@vib_wavenumbers.setter
def vib_wavenumbers(self, val):
self._vib_wavenumbers = val
self._valid_vib_wavenumbers = _get_valid_vib_wavenumbers(
wavenumbers=val, substitute=self.imaginary_substitute)
self._valid_vib_temperatures = c.wavenumber_to_temp(
self._valid_vib_wavenumbers)
def get_q(self, T, include_ZPE=True):
"""Calculates the partition function
:math:`q^{vib}=\\prod_i \\frac{\\exp({-\\frac{\\Theta_{V,i}}{2T}})}
{1-\\exp({-\\frac{\\Theta_{V,i}}{T}})}` if include_ZPE = True
:math:`q^{vib}=\\prod_i \\frac{1}
{1-\\exp({-\\frac{\\Theta_{V,i}}{T}})}` if include_ZPE = False
Parameters
----------
T : float
Temperature in K
include_ZPE : bool, optional
If True, includes the zero-point energy term
Returns
-------
q_vib : float
Vibrational partition function
"""
vib_dimless = self._valid_vib_temperatures / T
if include_ZPE:
qs = np.array(
np.exp(-vib_dimless / 2.) / (1. - np.exp(-vib_dimless)))
else:
qs = np.array(1. / (1. - np.exp(-vib_dimless)))
return np.prod(qs)
def get_CvoR(self, T):
"""Calculates the dimensionless heat capacity at constant volume
:math:`\\frac{C_V^{vib}}{R}=\\sum_i \\bigg(\\frac{\\Theta_{V,i}}{2T}
\\bigg)^2 \\frac{1}{\\big(\\sinh{\\frac{\\Theta_{V,i}}{2T}}\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR_vib : float
Vibrational dimensionless heat capacity at constant volume
"""
vib_dimless = self._valid_vib_temperatures / T
CvoRs = np.array([
(0.5 * vib_dimless)**2 * (1. / np.sinh(vib_dimless / 2.))**2
])
return np.sum(CvoRs)
def get_CpoR(self, T):
"""Calculates the dimensionless heat capacity at constant pressure
:math:`\\frac{C_P^{vib}}{R}=\\frac{C_V^{vib}}{R}=\\sum_i \\bigg(\\frac{
\\Theta_{V,i}}{2T}\\bigg)^2 \\frac{1}{\\big(\\sinh{\\frac{\\Theta_{V,i}}
{2T}}\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR_vib : float
Vibrational dimensionless heat capacity at constant pressure
"""
return self.get_CvoR(T=T)
def get_ZPE(self):
"""Calculates the zero point energy
:math:`ZPE=\\frac{1}{2}k_b\\sum_i \\Theta_{V,i}`
Returns
-------
zpe : float
Zero point energy in eV
"""
return 0.5 * c.kb('eV/K') * np.sum(self._valid_vib_temperatures)
def get_UoRT(self, T):
"""Calculates the dimensionless internal energy
:math:`\\frac{U^{vib}}{RT}=\\sum_i \\bigg(\\frac{\\Theta_{V,i}}{2T}+
\\frac{\\Theta_{V,i}}{T}\\frac{\\exp\\big(-\\frac{\\Theta_{V,i}}{T}
\\big)}{1-\\exp\\big(-\\frac{\\Theta_{V_i}}{T}\\big)}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT_vib : float
Vibrational dimensionless internal energy
"""
vib_dimless = self._valid_vib_temperatures / T
UoRT = np.array([
vib_dimless / 2. + vib_dimless * np.exp(-vib_dimless) /
(1. - np.exp(-vib_dimless))
])
return np.sum(UoRT)
def get_HoRT(self, T):
"""Calculates the dimensionless enthalpy
:math:`\\frac{H^{vib}}{RT}=\\frac{U^{vib}}{RT}=\\sum_i \\bigg(\\frac{
\\Theta_{V,i}}{2T}+\\frac{\\Theta_{V,i}}{T}\\frac{\\exp\\big(-\\frac{
\\Theta_{V,i}}{T}\\big)}{1-\\exp\\big(-\\frac{\\Theta_{V_i}}{T}\\big)}
\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT_vib : float
Vibrational dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def get_SoR(self, T):
"""Calculates the dimensionless entropy
:math:`\\frac{S^{vib}}{R}=\\sum_i \\frac{\\Theta_{V,i}}{T}\\frac{\\exp
\\big(-\\frac{\\Theta_{V,i}}{T}\\big)}{1-\\exp\\big(-\\frac{
\\Theta_{V,i}}{T}\\big)}-\\ln \\bigg(1-\\exp\\big(-\\frac{
\\Theta_{V,i}}{T}\\big)\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR_vib : float
Vibrational dimensionless entropy
"""
vib_dimless = self._valid_vib_temperatures / T
return np.sum([
vib_dimless * np.exp(-vib_dimless) / (1. - np.exp(-vib_dimless)) -
np.log(1. - np.exp(-vib_dimless))
])
def get_FoRT(self, T):
"""Calculates the dimensionless Helmholtz energy
:math:`\\frac{A^{vib}}{RT}=\\frac{U^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT_vib : float
Vibrational dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates the dimensionless Gibbs energy
:math:`\\frac{G^{vib}}{RT}=\\frac{H^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT_vib : float
Vibrational dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def to_dict(self):
"""Represents object as dictionary with JSON-accepted datatypes
Returns
-------
obj_dict : dict
"""
return {
'class': str(self.__class__),
'vib_wavenumbers': list(self.vib_wavenumbers),
'imaginary_substitute': self.imaginary_substitute
}
@classmethod
def from_dict(cls, json_obj):
"""Recreate an object from the JSON representation.
Parameters
----------
json_obj : dict
JSON representation
Returns
-------
HarmonicVib : HarmonicVib object
"""
json_obj = remove_class(json_obj)
return cls(**json_obj)
def print_calc_wavenumbers(self):
"""Prints the wavenumbers that will be used in a thermodynamic
calculation. If ``self.imaginary_substitute`` is a float, then
imaginary frequencies are replaced with that value. Otherwise,
imaginary frequencies are ignored."""
print(self._valid_vib_wavenumbers)
class QRRHOVib(_ModelBase):
"""Vibrational modes using the Quasi Rigid Rotor Harmonic Oscillator
approximation. Equations source from:
* <NAME>.; <NAME>.; <NAME>.; <NAME>.; <NAME>.
Phys. Chem. C 2015, 119 (4), 1840–1850.
* <NAME>. - A Eur. J. 2012, 18 (32), 9955–9964.
Attributes
----------
vib_wavenumber : list of float
Vibrational wavenumbers (:math:`\\tilde{\\nu}`) in 1/cm
Bav : float, optional
Average molecular moment of inertia as a limiting value of small
wavenumbers. Default is 1.e-44 kg m2
v0 : float, optional
Wavenumber to scale vibrations. Default is 100 cm :sup:`-1`
alpha : int, optional
Power to raise ratio of wavenumbers. Default is 4
imaginary_substitute : float, optional
If this value is set, imaginary frequencies are substituted with
this value for calculations. Otherwise, imaginary frequencies are
ignored. Default is None
"""
def __init__(self,
vib_wavenumbers,
Bav=1.e-44,
v0=100.,
alpha=4,
imaginary_substitute=None):
self.Bav = Bav
self.v0 = v0
self.alpha = alpha
self.imaginary_substitute = imaginary_substitute
self.vib_wavenumbers = vib_wavenumbers
@property
def vib_wavenumbers(self):
return self._vib_wavenumbers
@vib_wavenumbers.setter
def vib_wavenumbers(self, val):
self._vib_wavenumbers = val
self._valid_vib_wavenumbers = _get_valid_vib_wavenumbers(
wavenumbers=val, substitute=self.imaginary_substitute)
self._valid_vib_temperatures = c.wavenumber_to_temp(
self._valid_vib_wavenumbers)
self._valid_scaled_wavenumbers = self._get_scaled_wavenumber()
self._valid_scaled_inertia = self._get_scaled_inertia()
def _get_scaled_wavenumber(self):
"""Calculates the scaled wavenumber determining mixture of RRHO to
add.
:math:`\\omega = \\frac {1}{1 + (\\frac{\\nu_0}{\\nu})^\\alpha}`
Returns
-------
scaled_wavenumber : float
Scaled wavenumber
"""
return 1. / (1. + (self.v0 / self._valid_vib_wavenumbers)**self.alpha)
def _get_scaled_inertia(self):
"""Calculates the scaled moment of inertia.
:math:`\\mu'=\\frac {\\mu B_{av}} {\\mu + B_{av}}`
Returns
-------
mu1 : float
Scaled moment of inertia in kg*m2
"""
mu = c.wavenumber_to_inertia(self._valid_vib_wavenumbers)
return mu * self.Bav / (mu + self.Bav)
def get_q(self):
"""Calculates the partition function
Returns
-------
q_vib : float
Vibrational partition function
"""
raise NotImplementedError()
def get_CvoR(self, T):
"""Calculates the dimensionless heat capacity at constant volume
:math:`\\frac {C_{v}^{qRRHO}}{R} = \\sum_{i}\\omega_i\\frac{C_{v,i}
^{RRHO}}{R} + \\frac{1}{2}(1-\\omega_i)`
:math:`\\frac{C_{v}^{RRHO}}{R} = \\sum_{i}\\exp \\bigg(-\\frac{
\\Theta_i}{T}\\bigg) \\bigg(\\frac{\\Theta_i}{T}\\frac{1}{1-\\exp(-
\\frac{\\Theta_i}{T})}\\bigg)^2`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR_vib : float
Vibrational dimensionless heat capacity at constant volume
"""
CvoR = []
vib_dimless = self._valid_vib_temperatures / T
for vib_dimless_i, w_i in zip(vib_dimless,
self._valid_scaled_wavenumbers):
CvoR_RRHO = np.exp(-vib_dimless_i) \
* (vib_dimless_i/(1. - np.exp(-vib_dimless_i)))**2
CvoR.append(w_i * CvoR_RRHO + 0.5 * (1. - w_i))
return np.sum(CvoR)
def get_CpoR(self, T):
"""Calculates the dimensionless heat capacity at constant pressure
:math:`\\frac{C_{P}^{qRRHO}} {R} = \\frac{C_{V}^{qRRHO}} {R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR_vib : float
Vibrational dimensionless heat capacity at constant pressure
"""
return self.get_CvoR(T=T)
def get_ZPE(self):
"""Calculates the zero point energy
:math:`ZPE=\\frac{1}{2}k_b\\sum_i \\omega_i\\Theta_{V,i}`
Returns
-------
zpe : float
Zero point energy in eV
"""
return 0.5 * c.kb('eV/K') * np.dot(self._valid_vib_temperatures,
self._valid_scaled_wavenumbers)
def _get_UoRT_RRHO(self, T, vib_temperature):
"""Calculates the dimensionless RRHO contribution to internal energy
Parameters
----------
T : float
Temperature in K
vib_temperature : float
Vibrational temperature in K
Returns
-------
UoRT_RRHO : float
Dimensionless internal energy of Rigid Rotor Harmonic Oscillator
"""
vib_dimless = vib_temperature / T
return vib_dimless * (0.5 + np.exp(-vib_dimless) /
(1. - np.exp(-vib_dimless)))
def get_UoRT(self, T):
"""Calculates the dimensionless internal energy
:math:`\\frac {U^{qRRHO}}{RT} = \\sum_{i}\\omega_i\\frac{U^{RRHO}}{RT}
+ \\frac{1}{2}(1-\\omega_i)`
:math:`\\frac {U^{RRHO}_{i}}{RT} = \\frac{\\Theta_i}{T} \\bigg(
\\frac{1}{2} + \\frac{\\exp(-\\frac{\\Theta_i}{T})}{1-\\exp(-\\frac{
\\Theta_i}{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT_vib : float
Vibrational dimensionless internal energy
"""
UoRT_QRRHO = []
for theta_i, w_i in zip(self._valid_vib_temperatures,
self._valid_scaled_wavenumbers):
UoRT_RRHO = self._get_UoRT_RRHO(T=T, vib_temperature=theta_i)
UoRT_QRRHO.append(w_i * UoRT_RRHO + (1. - w_i) * 0.5)
return np.sum(UoRT_QRRHO)
def get_HoRT(self, T):
"""Calculates the dimensionless enthalpy
:math:`\\frac{H^{qRRHO}} {RT} = \\frac{U^{qRRHO}} {RT}`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT_vib : float
Vibrational dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def _get_SoR_H(self, T, vib_temperature):
"""Calculates the dimensionless harmonic osccilator contribution to
entropy
Parameters
----------
T : float
Temperature in K
vib_temperature : float
Vibrational temperature in K
Returns
-------
SoR_RHHO : float
Dimensionless entropy of Rigid Rotor Harmonic Oscillator
"""
return vib_temperature/T/(np.exp(vib_temperature/T)-1) \
- np.log(1-np.exp(-vib_temperature/T))
def _get_SoR_RRHO(self, T, vib_inertia):
"""Calculates the dimensionless RRHO contribution to entropy
Parameters
----------
T : float
Temperature in K
vib_inertia : float
Vibrational inertia in kg m2
Returns
-------
SoR_RHHO : float
Dimensionless entropy of Rigid Rotor Harmonic Oscillator
"""
return 0.5 + np.log(
(8. * np.pi**3 * vib_inertia * c.kb('J/K') * T / c.h('J s')**2)**
0.5)
def get_SoR(self, T):
"""Calculates the dimensionless entropy
:math:`\\frac{S^{qRRHO}}{R}=\\sum_i\\omega_i\\frac{S_i^{H}}{R}+(1-
\\omega_i)\\frac{S_i^{RRHO}}{R}`
:math:`\\frac {S^{RRHO}_i}{R} = \\frac{1}{2} + \\log \\bigg(\\bigg[
\\frac{8\\pi^3\\mu'_ik_BT}{h^2}\\bigg]^{\\frac{1}{2}}\\bigg)`
:math:`\\frac {S^{H}_i}{R}=\\bigg(\\frac{\\Theta_i}{T}\\bigg)\\frac{1}
{\\exp(\\frac{\\Theta_i}{T})-1}-\\log\\bigg(1-\\exp(\\frac{-\\Theta_i}
{T})\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR_vib : float
Vibrational dimensionless entropy
"""
SoR_QRRHO = []
for theta_i, mu_i, w_i in zip(self._valid_vib_temperatures,
self._valid_scaled_inertia,
self._valid_scaled_wavenumbers):
SoR_H = self._get_SoR_H(T=T, vib_temperature=theta_i)
SoR_RRHO = self._get_SoR_RRHO(T=T, vib_inertia=mu_i)
SoR_QRRHO.append(w_i * SoR_H + (1. - w_i) * SoR_RRHO)
return np.sum(SoR_QRRHO)
def get_FoRT(self, T):
"""Calculates the dimensionless Helmholtz energy
:math:`\\frac{A^{qRRHO}}{RT} = \\frac{U^{qRRHO}}{RT}-
\\frac{S^{qRRHO}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT_vib : float
Vibrational dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates the dimensionless Gibbs energy
:math:`\\frac{G^{qRRHO}}{RT} = \\frac{H^{qRRHO}}{RT}-
\\frac{S^{qRRHO}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT_vib : float
Vibrational dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def to_dict(self):
"""Represents object as dictionary with JSON-accepted datatypes
Returns
-------
obj_dict : dict
"""
return {
'class': str(self.__class__),
'vib_wavenumbers': list(self.vib_wavenumbers),
'Bav': self.Bav,
'v0': self.v0,
'alpha': self.alpha,
'imaginary_substitute': self.imaginary_substitute
}
@classmethod
def from_dict(cls, json_obj):
"""Recreate an object from the JSON representation.
Parameters
----------
json_obj : dict
JSON representation
Returns
-------
QRRHOVib : QRRHOVib object
"""
json_obj = remove_class(json_obj)
return cls(**json_obj)
def print_calc_wavenumbers(self):
"""Prints the wavenumbers that will be used in a thermodynamic
calculation. If ``self.imaginary_substitute`` is a float, then
imaginary frequencies are replaced with that value. Otherwise,
imaginary frequencies are ignored."""
print(
_get_valid_vib_wavenumbers(wavenumbers=self.vib_wavenumbers,
substitute=self.imaginary_substitute))
class EinsteinVib(_ModelBase):
"""Einstein model of a crystal. Equations used sourced from
* <NAME>. An Introduction to Applied Statistical Thermodynamics;
<NAME> & Sons, 2010.
Attributes
----------
einstein_temperature : float
Einstein temperature (:math:`\\Theta_E`) in K
interaction_energy : float, optional
Interaction energy (:math:`u`) per atom in eV. Default is 0 eV
"""
def __init__(self, einstein_temperature, interaction_energy=0.):
self.einstein_temperature = einstein_temperature
self.interaction_energy = interaction_energy
def get_q(self, T):
"""Calculates the partition function
:math:`q^{vib}=\\exp\\bigg({\\frac{-u}{k_BT}}\\bigg)\\bigg(\\frac{
\\exp(-\\frac{\\Theta_E}{2T})}{1-\\exp(\\frac{-\\Theta_E}{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
q_vib : float
Vibrational partition function
"""
u = self.interaction_energy
theta_E = self.einstein_temperature
return np.exp(-u/c.kb('eV/K')/T) \
* (np.exp(-theta_E/2./T)/(1. - np.exp(-theta_E/T)))
def get_CvoR(self, T):
"""Calculates the dimensionless heat capacity at constant volume
:math:`\\frac{C_V^{vib}}{R}=3\\bigg(\\frac{\\Theta_E}{T}\\bigg)^2
\\frac{\\exp(-\\frac{\\Theta_E}{T})}{\\big(1-\\exp(\\frac{-
\\Theta_E}{T})\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR_vib : float
Vibrational dimensionless heat capacity at constant volume
"""
theta_E = self.einstein_temperature
return 3. * (theta_E / T)**2 * np.exp(
-theta_E / T) / (1 - np.exp(-theta_E / T))**2
def get_CpoR(self, T):
"""Calculates the dimensionless heat capacity at constant pressure
:math:`\\frac{C_P^{vib}}{R}=\\frac{C_V^{vib}}{R}=3\\bigg(\\frac{
\\Theta_E}{T}\\bigg)^2\\frac{\\exp(-\\frac{\\Theta_E}{T})}{\\big(1-
\\exp(\\frac{-\\Theta_E}{T})\\big)^2}`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR_vib : float
Vibrational dimensionless heat capacity at constant pressure
"""
return self.get_CvoR(T=T)
def get_ZPE(self):
"""Calculates the zero point energy
:math:`u^0_E=u+\\frac{3}{2}\\Theta_E k_B`
Returns
-------
zpe : float
Zero point energy in eV
"""
return self.interaction_energy \
+ 1.5*self.einstein_temperature*c.kb('eV/K')
def get_UoRT(self, T):
"""Calculates the dimensionless internal energy
:math:`\\frac{U^{vib}}{RT}=\\frac{u^0_E}{k_BT}+3\\frac{\\Theta_E}{T}
\\bigg(\\frac{\\exp(-\\frac{\\Theta_E}{T})}{1-\\exp(-\\frac{\\Theta_E}
{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT_vib : float
Vibrational dimensionless internal energy
"""
theta_E = self.einstein_temperature
return self.get_ZPE()/c.kb('eV/K')/T \
+ 3.*theta_E/T*np.exp(-theta_E/T)/(1. - np.exp(-theta_E/T))
def get_HoRT(self, T):
"""Calculates the dimensionless enthalpy
:math:`\\frac{H^{vib}}{RT}=\\frac{U^{vib}}{RT}=\\frac{N_A u^0_E}{k_BT}
+3\\frac{\\Theta_E}{T}\\bigg(\\frac{\\exp(-\\frac{\\Theta_E}{T})}{1-
\\exp(-\\frac{\\Theta_E}{T})}\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT_vib : float
Vibrational dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def get_SoR(self, T):
"""Calculates the dimensionless entropy
:math:`\\frac{S^{vib}}{R}=3\\bigg(\\frac{\\Theta_E}{T}\\frac{\\exp\\big(
\\frac{-\\Theta_E}{T}\\big)}{1-\\exp\\big(-\\frac{\\Theta_E}{T}\\big)}
\\bigg)-\\ln\\bigg(1-\\exp\\big(\\frac{-\\Theta_E}{T}\\big)\\bigg)`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR_vib : float
Vibrational dimensionless entropy
"""
theta_E = self.einstein_temperature
exp_term = np.exp(-theta_E / T)
return 3. * (theta_E / T * exp_term /
(1. - exp_term) - np.log(1. - exp_term))
def get_FoRT(self, T):
"""Calculates the dimensionless Helmholtz energy
:math:`\\frac{A^{vib}}{RT}=\\frac{U^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT_vib : float
Vibrational dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates the dimensionless Gibbs energy
:math:`\\frac{G^{vib}}{RT}=\\frac{H^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT_vib : float
Vibrational dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def to_dict(self):
"""Represents object as dictionary with JSON-accepted datatypes
Returns
-------
obj_dict : dict
"""
return {
'class': str(self.__class__),
'einstein_temperature': self.einstein_temperature,
'interaction_energy': self.interaction_energy
}
class DebyeVib(_ModelBase):
"""Debye model of a crystal. Equations sourced from:
* <NAME>. An Introduction to Applied Statistical Thermodynamics;
<NAME> & Sons, 2010.
Attributes
----------
debye_temperature : float
Debye temperature (:math:`\\Theta_D`) in K
interaction_energy : float, optional
Interaction energy (:math:`u`) per atom in eV. Default is 0 eV
"""
def __init__(self, debye_temperature, interaction_energy):
self.debye_temperature = debye_temperature
self.interaction_energy = interaction_energy
def get_q(self, T):
"""Calculate the partition function
:math:`q^{vib} = \\exp\\bigg(-\\frac{u}{3k_B T} - \\frac{3}{8}
\\frac{\\Theta_D}{T} - G\\big(\\frac{\\Theta_D}{T}\\big)\\bigg)`
:math:`G\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3\\int_0^{\\frac{\\Theta_D}{T}}x^2 \\ln
\\bigg(1-e^{-x}\\bigg)dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
q : float
Partition function
"""
G = self._get_intermediate_fn(T=T, fn=self._G_integrand)
return np.exp(-self.interaction_energy/3./c.kb('eV/K')/T \
-3./8.*self.debye_temperature/T - G)
def get_CvoR(self, T):
"""Calculates dimensionless heat capacity (constant V)
:math:`\\frac {C_V^{vib}}{R} = 3K\\bigg(\\frac{\\Theta_D}{T}\\bigg)`
:math:`K\\bigg(\\frac{\\Theta_D}{T}\\bigg)=3\\bigg(\\frac{T}{\\Theta_D}
\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}}\\frac{x^4 e^x}{(e^x-1)^2}dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
CvoR : float
Dimensionless heat capacity (constant V)
"""
K = self._get_intermediate_fn(T=T, fn=self._K_integrand)
return 3. * K
def get_CpoR(self, T):
"""Calculates dimensionless heat capacity (constant P)
:math:`\\frac {C_P^{vib}}{R} = 3K\\bigg(\\frac{\\Theta_D}{T}\\bigg)`
:math:`K\\bigg(\\frac{\\Theta_D}{T}\\bigg)=3\\bigg(\\frac{T}{\\Theta_D}
\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}}\\frac{x^4 e^x}{(e^x-1)^2}dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
CpoR : float
Dimensionless heat capacity (constant P)
"""
return self.get_CvoR(T=T)
def get_UoRT(self, T):
"""Calculates dimensionless internal energy
:math:`\\frac{U^{vib}}{RT} = \\frac{u_D^o}{RT} + 3F\\bigg(\\frac{
\\Theta_D}{T}\\bigg)`
:math:`F\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}} \\frac{x^3 e^x}
{e^x-1} dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
UoRT : float
Dimensionless internal energy
"""
return self.get_ZPE()/c.kb('eV/K')/T \
+ 3.*self._get_intermediate_fn(T=T, fn=self._F_integrand)
def get_HoRT(self, T):
"""Calculates dimensionless enthalpy
:math:`\\frac{H^{vib}}{RT} = \\frac{u_D^o}{RT} + 3F\\bigg(\\frac{
\\Theta_D}{T}\\bigg)`
:math:`F\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}} \\frac{x^3 e^x}
{e^x-1} dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
HoRT : float
Dimensionless enthalpy
"""
return self.get_UoRT(T=T)
def get_SoR(self, T):
"""Calculates dimensionless entropy
:math:`\\frac{S^{vib}}{R} = 3\\bigg[F\\bigg(\\frac{\\Theta_D}{T}\\bigg)
- G\\bigg(\\frac{\\Theta_D}{T}\\bigg)\\bigg]`
:math:`F\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3 \\int_0^{\\frac{\\Theta_D}{T}} \\frac{x^3 e^x}
{e^x-1} dx`
:math:`G\\bigg(\\frac{\\Theta_D}{T}\\bigg) = 3\\bigg(\\frac{T}{
\\Theta_D}\\bigg)^3\\int_0^{\\frac{\\Theta_D}{T}}x^2 \\ln
\\bigg(1-e^{-x}\\bigg)dx`
Parameters
----------
T : float
Temperature in K
Returns
-------
SoR : float
Dimensionless entropy
"""
F = self._get_intermediate_fn(T=T, fn=self._F_integrand)
G = self._get_intermediate_fn(T=T, fn=self._G_integrand)
return 3. * (F - G)
def get_FoRT(self, T):
"""Calculates dimensionless Helmholtz energy
:math:`\\frac{F^{vib}}{RT}=\\frac{U^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
FoRT : float
Dimensionless Helmholtz energy
"""
return self.get_UoRT(T=T) - self.get_SoR(T=T)
def get_GoRT(self, T):
"""Calculates dimensionless Gibbs energy
:math:`\\frac{G^{vib}}{RT}=\\frac{H^{vib}}{RT}-\\frac{S^{vib}}{R}`
Parameters
----------
T : float
Temperature in K
Returns
-------
GoRT : float
Dimensionless Gibbs energy
"""
return self.get_HoRT(T=T) - self.get_SoR(T=T)
def get_ZPE(self):
"""Calculate zero point energy
:math:`u^o_D = u^o +\\frac{9}{8}R\\Theta_D`
Returns
-------
zpe : float
Zero point energy in eV
"""
return self.interaction_energy \
+ 9./8.*c.R('eV/K')*self.debye_temperature
def _G_integrand(self, x):
"""Integrand when evaluating intermediate function G.
:math:`f(x) = x^2 \\ln \\bigg(1-e^{-x}\\bigg)`
Parameters
----------
x : float
Variable of integration. Represents
:math:`\\frac{\\Theta_D}{T}}`
Returns
-------
f(x) : float
Integrand evaluated at x
"""
return np.log(1. - np.exp(-x)) * (x**2)
def _K_integrand(self, x):
"""Integrand when evaluating intermediate function K.
:math:`f(x) = \\frac {x^4 e^x}{(e^x -1)^2}`
Parameters
----------
x : float
Variable of integration. Represents
:math:`\\frac{\\Theta_D}{T}}`
Returns
-------
f(x) : float
Integrand evaluated at x
"""
return (x**4) * np.exp(x) / (np.exp(x) - 1.)**2
def _F_integrand(self, x):
"""Integrand when evaluating intermediate function F.
:math:`f(x) = \\frac {x^3 e^x}{e^x -1}`
Parameters
----------
x : float
Variable of integration. Represents
:math:`\\frac{\\Theta_D}{T}}`
Returns
-------
f(x) : float
Integrand evaluated at x
"""
return (x**3) * np.exp(x) / (np.exp(x) - 1.)
def _get_intermediate_fn(self, T, fn):
"""Calculates the intermediate function (i.e. F, G, or K)
:math:`F(x) = 3\\bigg(\\frac{T}{\\Theta_D}\\bigg)^3\\int_0^{\\frac
{\\Theta_D}{T}} f(x) dx`
Parameters
----------
T : float
Temperature in K
fn : function
Integrand function, f(x)
Returns
-------
F : float
Intermediate function evaluated at T
"""
vib_dimless = self.debye_temperature / T
integral = quad(func=fn, a=0., b=vib_dimless)[0]
return 3. * integral / vib_dimless**3
def _get_valid_vib_wavenumbers(wavenumbers, substitute=None):
"""Returns wavenumbers to use for vibration calculations. Imaginary
frequencies are expected to be negative.
Parameters
----------
wavenumbers : list of float
Wavenumbers in 1/cm
substitute : float, optional
Value to use to replace imaginary frequencies. If not specified,
imaginary frequencies are ignored. Default is None
Returns
-------
wavenumbers_out : (N,) np.ndarray
Valid wavenumbers
"""
wavenumbers_out = []
for wavenumber in wavenumbers:
if wavenumber > 0.:
# Real wavenumbers always added
wavenumbers_out.append(wavenumber)
elif substitute is not None:
# Substitute added if imaginary frequency encountered
wavenumbers_out.append(substitute)
return np.array(wavenumbers_out)
def _get_vib_dimless(wavenumbers, T, substitute=None):
"""Calculates dimensionless temperatures for the wavenumbers and
temperature specified
Parameters
----------
wavenumbers : (N,) np.ndarray
Wavenumbers in 1/cm
T : float
Temperature in K
substitute : float, optional
Value to use to replace imaginary frequencies. If not specified,
imaginary frequencies are ignored. Default is None
Returns
-------
vib_dimless : (N,) np.ndarray
Vibrational temperatures normalized by T
"""
valid_wavenumbers = _get_valid_vib_wavenumbers(wavenumbers=wavenumbers,
substitute=substitute)
vib_dimless = c.wavenumber_to_temp(valid_wavenumbers) / T
return vib_dimless
| 2.625
| 3
|
main.py
|
libojia-aug/compound-calculator
| 0
|
12780875
|
import formula
#年利率、借款期数(月)、初始资金(元)、投资总周期(月)、坏账率
print(formula.annualIncome(22,12,10000,12,0))
| 2.0625
| 2
|
users/urls.py
|
BalighMehrez/share-school-books
| 0
|
12780876
|
<reponame>BalighMehrez/share-school-books
from django.urls import path,include
from . import views
from bookshop.views import index
from django.contrib.auth import views as auth_views
urlpatterns = [
path('account/register',views.register,name='register'),
path('account/login',views.login,name='login'),
path('dashboard',index,name='dashboard'),
path('logout/',auth_views.LogoutView.as_view(template_name='bookshop/index.html'),name='logout'),
]
| 1.84375
| 2
|
python-skylark/skylark/__init__.py
|
xdata-skylark/libskylark
| 86
|
12780877
|
# TODO I am not sure the following line should really be here, but without
# it Python just exits (complains of not initilizing MPI).
import El
__all__ = ["io", "sketch", "ml", "nla", "base", "elemhelper", "lib", "errors"]
| 1.195313
| 1
|
v0.1/membrane_solvers/FCD-2D/tests/OsmoticProperties.py
|
drizdar/Propmod
| 3
|
12780878
|
<gh_stars>1-10
import math
import formulas as f
T = 273.15 + 25
P = 1.01325
pc_wt = 0.26
print(f'Percent weight {pc_wt} kg NaCl / kg Total')
m_NaCl = f.mNaCl(pc_wt)
print(f'Mass of NaCl {m_NaCl} g')
Molal_NaCl = f.Molality(m_NaCl)
# Molal_NaCl = 6
# m_NaCl = Molal_NaCl*58.44277
print(f'Molality {Molal_NaCl} mol/kg')
D = f.RelativeDiffusivity(T, P)
print(f'Relative Diffusivity D = {D}')
A_phi = f.APhi(D, T)
print(f'A phi = {A_phi}')
I = f.IonicStrength(Molal_NaCl)
print(f'Iconic Strength I = {I}')
Beta_0_NaCl = f.Beta0NaCl(T, P)
print(f'Beta 0 MX = {Beta_0_NaCl}')
Beta_1_NaCl = f.Beta1NaCl(T, P)
print(f'Beta 1 MX = {Beta_1_NaCl}')
C_phi_NaCl = f.CphiNaCl(T, P)
print(f'C phi = {C_phi_NaCl}')
print(f'C = {C_phi_NaCl/2*1e3}')
gamma_MX = f.GammaPhi(A_phi,Beta_0_NaCl,Beta_1_NaCl, C_phi_NaCl, I,Molal_NaCl)
print(f'Ionic activity coefficient gamma NaCl {gamma_MX}')
phi = f.Phi(A_phi, Beta_0_NaCl, Beta_1_NaCl, C_phi_NaCl, I, Molal_NaCl)
print(f'Osmotic coefficient phi {phi}')
A_v = f.Av(T)
print(f'A v = {A_v}')
B_V_NaCl = f.BVNaCl(T,P)
print(f'B V NaCl = {B_V_NaCl}')
C_V_NaCl = f.CVNaCl(T)
print(f'C V NaCl = {C_V_NaCl}')
V_0_NaCl = f.V0NaCl(T)
print(f'V 0 NaCl = {V_0_NaCl}')
rho_w = f.DensityWater(T) # kg/L
print(f'Density of water = {rho_w} kg/L')
V_phi_NaCl = f.VPhiNaCL(A_v, B_V_NaCl, C_V_NaCl, I, Molal_NaCl, T, V_0_NaCl)
print(f'V phi NaCl = {V_phi_NaCl}')
rho = f.ApparentDensity(Molal_NaCl, rho_w, V_phi_NaCl)
print(f'Apparent density rho = {rho} kg/L')
V = (m_NaCl + 1000)/rho
print(f'Apparent volume = {V} cm^3')
a_w = f.WaterActivity(Molal_NaCl, phi)
print(f'Water activity coefficient a w = {a_w}')
MVW = f.MolarVolumeWater(rho_w)
print(f'Molar Volume of water {MVW} cm^3/mol')
M_NaCl = f.MolalityToMolarity(m_NaCl, rho)
print(f'Molarity of NaCl {M_NaCl} mol/L')
PI_w = f.OsmoticPressurePitzer(a_w, MVW, T)
print(f'Osmotic Pressure (Pitzer) {PI_w} bar')
PI_w2 = f.OsP(M_NaCl, 2, T)
print(f'Osmotic Pressure (van\'t Hoff) {PI_w2} bar')
print(f'Difference between van\'t Hoff and Pitzer {PI_w/PI_w2}')
PI, rho1, C = f.OsmoticProperties(P, T, pc_wt)
assert PI_w == PI
assert rho == rho1
assert C == M_NaCl
| 2.21875
| 2
|
002. Add Two Numbers.py
|
yuzhangClaremont/gitbasics
| 0
|
12780879
|
# -*- coding: utf-8 -*-
# @Author: LC
# @Date: 2016-01-23 10:53:34
# @Last modified by: LC
# @Last Modified time: 2016-04-10 16:23:45
# @Email: <EMAIL>
#########################################
# 注意:
# 题目已经定义好了链表类
# 链表的相加进位
###########################################
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
head=ListNode(0)
tmp=head
flag=0
while l1 or l2:
sum=flag
if l1:
sum+=l1.val
l1=l1.next
if l2:
sum+=l2.val
l2=l2.next
if sum>=10:
qua=sum%10
flag=1
tmp.next=ListNode(qua)
tmp=tmp.next
else:
flag=0
tmp.next=ListNode(sum)
tmp=tmp.next
if flag==1:
tmp.next=ListNode(1)
return head.next
| 3.875
| 4
|
dao/stock_def.py
|
zheng-zy/ot_root
| 0
|
12780880
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" etf class
"""
# import math
import gevent
from pb import base_pb2
# import quotation_def_pb2
__author__ = 'qinjing'
# RF_MUST = 0
# RF_ALLOW = 1
# RF_FORBIDDEN = 2
# class StockInfo:
# # 股票代码, 数量
# __slots__ = ['code', 'md']
#
# def __init__(self, code):
# self.code = code
# # self.price = qty
# self.md = None
# self.market = base_pb2.MKT_SH
#
# def __str__(self):
# return ('price %s:%s qty %s flag %d payment %d' % (self.bprc[0],
# self.sprc[0], self.qty, self.rflag, self.payment))
#
# def price(self, pl, bs_flag):
# if base_pb2.PRICE_2_PERCENT == pl:
# if base_pb2.OPR_BUY == bs_flag:
# price = self.prices[base_pb2.PRICE_CURRENT] * 1.02
# if price > self.prices[base_pb2.LIMIT_UP]:
# price = self.prices[base_pb2.LIMIT_UP]
# elif base_pb2.OPR_SELL == bs_flag:
# price = self.prices[base_pb2.PRICE_CURRENT] * 0.98
# if price < self.prices[base_pb2.LIMIT_DOWN]:
# price = self.prices[base_pb2.LIMIT_DOWN]
# else:
# print('error bs flag 0x%x' % (bs_flag))
# price = self.prices[base_pb2.PRICE_CURRENT]
# else:
# price = self.prices[pl]
#
# return price
# -------------------------------------------------------------------
class StockInfo(object):
__slots__ = ('stkcode', 'mkid', 'md', 'buy', 'sell', 'purchase',
'redeem', 'qty', 'opr')
def __init__(self, code, mkid, market_data=None):
self.mkid = mkid
self.md = market_data
self.stkcode = code
self.buy = 0
self.sell = 0
self.purchase = 0
self.redeem = 0
# qty[etfcode] = etf_qty
self.qty = {}
def price(self, pl, bs_flag):
i = 0
while self.md is None and i < 5:
gevent.sleep(1)
# print('%s no quotation %r' % (self.stkcode, self))
i += 1
else:
if self.md is None:
print('stock def %s price 1' % (self.stkcode))
return 1
if pl > 100:
r = float(pl) / 100000
if base_pb2.OPR_BUY == bs_flag:
price = self.md.match * (1 + r)
if price > self.md.high_limited:
price = self.md.high_limited
elif base_pb2.OPR_SELL == bs_flag:
price = self.md.match * (1 - r)
if price < self.md.low_limited:
price = self.md.low_limited
else:
print('error bs flag 0x%x' % (bs_flag))
price = self.md.ask_price
# 停牌???
if 0 == price:
print 'price == 0 %s %d %d' % (self.stkcode, self.md.pre_close, self.md.match)
price = self.md.pre_close
return price
if 1 > pl > 0 or pl < 0:
if base_pb2.OPR_BUY == bs_flag:
price = self.md.match * (1 + pl)
if price > self.md.high_limited:
price = self.md.high_limited
elif base_pb2.OPR_SELL == bs_flag:
price = self.md.match * (1 - pl)
if price < self.md.low_limited:
price = self.md.low_limited
else:
print'error bs flag 0x%x' % (bs_flag)
price = self.md.ask_price
return price
elif base_pb2.LIMIT_DOWN == pl:
price = self.md.low_limited
elif base_pb2.LIMIT_UP == pl:
price = self.md.high_limited
elif base_pb2.PRICE_MATCH == pl:
price = self.md.match
elif pl > base_pb2.PRICE_MATCH:
price = self.md.ask_price[pl - base_pb2.S_1]
elif pl >= base_pb2.B_10:
price = self.md.bid_price[base_pb2.B_1 - pl]
else:
price = self.md.match
print'error price level %d' % (pl)
return price
# -----------------------------------------------------------------------------
class EtfInfo(object):
# 510050 (交易代码) 510051 (申赎代码)
# 一级市场, 二级市场, 现金差额, 预估现金, 最小申购、赎回单位净值,
# 最小申购、赎回单位, 现金替代比例上限
__slots__ = ('etfcode', 'stcks', 'buy', 'sell', 'purchase', 'redeem',
'etf_base_info', 'count')
def __init__(self, code, stklist=None):
self.etfcode = code
self.stcks = {}
self.stcks[base_pb2.MKT_SZ] = []
self.stcks[base_pb2.MKT_SH] = []
self.stcks[base_pb2.MKT_CF] = []
# self.stcks[base_pb2.MKT_SZ] = {}
# self.stcks[base_pb2.MKT_SH] = {}
# self.stcks[base_pb2.MKT_CF] = {}
self.buy = 0
self.sell = 0
self.purchase = 0
self.redeem = 0
self.count = 0
self.etf_base_info = None
if stklist is not None:
for stk_code in stklist:
stock = self.StockInEtf(stk_code, 100, 1, 10, 2000)
self.etf_stks.append(stock)
# # 买价计算净值
# def iopv_b(self, bi):
# pstk = (sts[st].payment for st in self.etf_stks
# if base_pb2.RF_MUST == sts[st].rflag)
# mcash = math.fsum(pstk)
# print mcash
# allow = (sts[st].bprc[bi] * sts[st].qty for st in self.etf_stks
# if RF_ALLOW == sts[st].rflag)
# acash = math.fsum(allow)
# for l in allow:
# print 'allow', l
# print 'acash', acash
# forbid = (sts[st].bprc[bi] * sts[st].qty for st in self.etf_stks
# if RF_FORBIDDEN == sts[st].rflag)
# fcash = math.fsum(forbid)
# print 'fcash', fcash
# sumcash = ((mcash + fcash + acash + self.estimated) / self.min_unit)
# return sumcash
# # 卖价计算净值
# def iopv_s(self, si):
# pstk = (sts[st].payment for st in self.etf_stks
# if base_pb2.RF_MUST == sts[st].rflag)
# mcash = math.fsum(pstk)
# print mcash
# allow = (sts[st].sprc[si] * sts[st].qty for st in self.etf_stks
# if RF_ALLOW == sts[st].rflag)
# acash = math.fsum(allow)
# print 'acash', acash
# forbid = (sts[st].sprc[si] * sts[st].qty for st in self.etf_stks
# if RF_FORBIDDEN == sts[st].rflag)
# fcash = math.fsum(forbid)
# print 'fcash', fcash
# sumcash = ((mcash + fcash + acash + self.estimated) / self.min_unit)
# return sumcash
# class StockInEtf:
# # 股票代码, 数量, 现金替代标志, 溢价比率, 替代金额
# __slots__ = ('stkcode', 'quo', 'buy', 'sell', 'purchase', 'redeem', )
# def __init__(self, code):
# self.stkcode = code
# self.quo = None
# self.buy = 0
# self.sell = 0
# self.purchase = 0
# self.redeem = 0
# # self.market = base_pb2.MKT_UNKNOW
# # if quotation_def_pb2.RF_MUST == replace_flag:
# # self.payment = payment
# # else:
# # self.payment = 0
# def __str__(self):
# return ('buy %d sell %d purchase %s redeem %d quo %r' %
# (self.buy, self.sell, self.purchase, self.redeem,
# self.quo))
# eee = etf(111, 222, 333, 0.0003)
# for i in range(10):
# code = i * 10000 + i
# st = stockinfo(code, 1.2, 1000, i % 3, 0.1, i * 1000 + i)
# eee.etf_stks.append(code)
# sts[code] = st
# #eee.sts.append(st)
# print st
#
# # for stcode in eee.sts:
# # st = sts[stcode]
# # print 'price flag', st.price, st.rflag
#
# print eee.iopv_b(0)
# print eee.iopv_s(0)
| 2.765625
| 3
|
pydec/io/__init__.py
|
michaels10/pydec
| 0
|
12780881
|
"PyDEC mesh and array IO"
from info import __doc__
from meshio import *
from arrayio import *
__all__ = filter(lambda s:not s.startswith('_'),dir())
| 1.179688
| 1
|
codeStore/support_fun_resistance.py
|
pcmagic/stokes_flow
| 1
|
12780882
|
<gh_stars>1-10
from tqdm.notebook import tqdm as tqdm_notebook
import os
import glob
import pickle
import numpy as np
# load the resistance matrix form dir, standard version
def load_ABC_list(job_dir):
t_dir = os.path.join(job_dir, '*.pickle')
pickle_names = glob.glob(t_dir)
problem_kwarg_list = []
A_list = []
B1_list = []
B2_list = []
C_list = []
for pickle_name in pickle_names:
with open(pickle_name, 'rb') as myinput:
problem_kwargs, A, B1, B2, C, = pickle.load(myinput)[:5]
problem_kwarg_list.append(problem_kwargs)
A_list.append(A)
B1_list.append(B1)
B2_list.append(B2)
C_list.append(C)
A_list = np.array(A_list)
B1_list = np.array(B1_list)
B2_list = np.array(B2_list)
C_list = np.array(C_list)
problem_kwarg_list = np.array(problem_kwarg_list)
return problem_kwarg_list, A_list, B1_list, B2_list, C_list
#
#
# # load (u_i^{Ej}, \omega_i^{Ej}) and (u_i^a, \omega_i^a), standard version.
# # see the method of base flow for detail
# def load_MBF(pickle_name):)
#
# # load (u_i^{Ej}, \omega_i^{Ej}) and (u_i^a, \omega_i^a) from dir, standard version.
# # see the method of base flow for detail
# def load_MBF_list(job_dir):
# t_dir = os.path.join(job_dir, '*.pickle')
# pickle_names = glob.glob(t_dir)
# A_list = []
# B1_list = []
# B2_list = []
# C_list = []
#
# for pickle_name in pickle_names:
# with open(pickle_name, 'rb') as myinput:
# problem_kwargs, A, B1, B2, C, = pickle.load(myinput)
# problem_kwarg_list.append(problem_kwargs)
# A_list.append(A)
# B1_list.append(B1)
# B2_list.append(B2)
# C_list.append(C)
| 2.1875
| 2
|
list/CSPdarknet53.py
|
PHL22/Backbone
| 0
|
12780883
|
<reponame>PHL22/Backbone
# -*- coding: UTF-8 -*-
"""
An unofficial implementation of CSP-DarkNet with pytorch
@<NAME> 2020_09_30
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
# from torchsummary import summary
from .CSPdarknet53conv_bn import Mish, BN_Conv_Mish
from .build import BACKBONE_REGISTRY
from .backbone import Backbone
from detectron2.modeling import ShapeSpec
class ResidualBlock(nn.Module):
"""
basic residual block for CSP-Darknet
"""
def __init__(self, chnls, inner_chnnls=None):
super(ResidualBlock, self).__init__()
if inner_chnnls is None:
inner_chnnls = chnls
self.conv1 = BN_Conv_Mish(chnls, inner_chnnls, 1, 1, 0) # always use samepadding
self.conv2 = nn.Conv2d(inner_chnnls, chnls, 3, 1, 1, bias=False)
self.bn = nn.BatchNorm2d(chnls)
def forward(self, x):
out = self.conv1(x)
out = self.conv2(out)
out = self.bn(out) + x
return Mish()(out)
class CSPFirst(nn.Module):
"""
First CSP Stage
"""
def __init__(self, in_chnnls, out_chnls):
super(CSPFirst, self).__init__()
self.dsample = BN_Conv_Mish(in_chnnls, out_chnls, 3, 2, 1) # same padding
self.trans_0 = BN_Conv_Mish(out_chnls, out_chnls, 1, 1, 0)
self.trans_1 = BN_Conv_Mish(out_chnls, out_chnls, 1, 1, 0)
self.block = ResidualBlock(out_chnls, out_chnls // 2)
self.trans_cat = BN_Conv_Mish(2 * out_chnls, out_chnls, 1, 1, 0)
def forward(self, x):
x = self.dsample(x)
out_0 = self.trans_0(x)
out_1 = self.trans_1(x)
out_1 = self.block(out_1)
out = torch.cat((out_0, out_1), 1)
out = self.trans_cat(out)
return out
class CSPStem(nn.Module):
"""
CSP structures including downsampling
"""
def __init__(self, in_chnls, out_chnls, num_block):
super(CSPStem, self).__init__()
self.dsample = BN_Conv_Mish(in_chnls, out_chnls, 3, 2, 1)
self.trans_0 = BN_Conv_Mish(out_chnls, out_chnls // 2, 1, 1, 0)
self.trans_1 = BN_Conv_Mish(out_chnls, out_chnls // 2, 1, 1, 0)
self.blocks = nn.Sequential(*[ResidualBlock(out_chnls // 2) for _ in range(num_block)])
self.trans_cat = BN_Conv_Mish(out_chnls, out_chnls, 1, 1, 0)
def forward(self, x):
x = self.dsample(x)
out_0 = self.trans_0(x)
out_1 = self.trans_1(x)
out_1 = self.blocks(out_1)
out = torch.cat((out_0, out_1), 1)
out = self.trans_cat(out)
return out
class CSP_DarkNet(Backbone):
"""
CSP-DarkNet
"""
def __init__(self, num_blocks: object, num_classes=1000) -> object:
super(CSP_DarkNet, self).__init__()
chnls = [64, 128, 256, 512, 1024]
self.conv0 = BN_Conv_Mish(3, 32, 3, 1, 1) # same padding
self.neck = CSPFirst(32, chnls[0])
self.body = nn.Sequential(
*[CSPStem(chnls[i], chnls[i + 1], num_blocks[i]) for i in range(4)])
self.global_pool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(chnls[4], num_classes)
def forward(self, x):
outputs={}
out = self.conv0(x)
out = self.neck(out)
outlist = ['0','1', '2', '3']
outnames = ['res2','res3', 'res4', 'res5']
ptr = 0
for i in range(len(self.body)):
out = self.body._modules[str(i)](out)
if str(i) in outlist:
outputs[outnames[ptr]] = out
ptr += 1
# out = self.global_pool(out)
# out = out.view(out.size(0), -1)
# out = self.fc(out)
return outputs
def output_shape(self):
return {'res2': ShapeSpec(channels=128, stride=4),
'res3': ShapeSpec(channels=256, stride=8),
'res4': ShapeSpec(channels=512, stride=16),
'res5': ShapeSpec(channels=1024, stride=32)}
def csp_darknet_53(num_classes=1000):
return CSP_DarkNet([2, 8, 8, 4], num_classes)
@BACKBONE_REGISTRY.register()
def build_CSPdarknet53_backbone(cfg, input_shape):
return csp_darknet_53()
if __name__ == '__main__':
net=csp_darknet_53()
from torchsummary import summary
summary(net, (3, 224, 224))
pass
| 2.265625
| 2
|
starter_code/api_keys.py
|
goblebla/Python-APIs
| 0
|
12780884
|
# OpenWeatherMap API Key
weather_api_key = "40449008a54beb2007d8de8d8b5d63a4"
# Google API Key
g_key = "<KEY>"
| 1.210938
| 1
|
src/main/python/pybuilder_integration/tasks.py
|
rspitler/pybuilder-integration
| 0
|
12780885
|
<filename>src/main/python/pybuilder_integration/tasks.py
import os
import shutil
import pytest
from pybuilder.core import Project, Logger, init, RequirementsFile
from pybuilder.errors import BuildFailedException
from pybuilder.install_utils import install_dependencies
from pybuilder.reactor import Reactor
from pybuilder_integration import exec_utility, tool_utility
from pybuilder_integration.artifact_manager import get_artifact_manager
from pybuilder_integration.cloudwatchlogs_utility import CloudwatchLogs
from pybuilder_integration.directory_utility import prepare_dist_directory, get_working_distribution_directory, \
package_artifacts, prepare_reports_directory, get_local_zip_artifact_path, prepare_logs_directory
from pybuilder_integration.properties import *
from pybuilder_integration.tool_utility import install_cypress
def integration_artifact_push(project: Project, logger: Logger, reactor: Reactor):
logger.info("Starting upload of integration artifacts")
manager = get_artifact_manager(project)
for tool in ["tavern", "cypress"]:
artifact_file = get_local_zip_artifact_path(tool=tool, project=project, include_ending=True)
if os.path.exists(artifact_file):
logger.info(f"Starting upload of integration artifact: {os.path.basename(artifact_file)} to: {manager.friendly_name}")
manager.upload(file=artifact_file, project=project, logger=logger, reactor=reactor)
def verify_environment(project: Project, logger: Logger, reactor: Reactor):
dist_directory = project.get_property(WORKING_TEST_DIR, get_working_distribution_directory(project))
logger.info(f"Preparing to run tests found in: {dist_directory}")
_run_tests_in_directory(dist_directory, logger, project, reactor)
artifact_manager = get_artifact_manager(project=project)
latest_directory = artifact_manager.download_artifacts(project=project, logger=logger, reactor=reactor)
_run_tests_in_directory(latest_directory, logger, project, reactor, latest=True)
if project.get_property(PROMOTE_ARTIFACT, True):
integration_artifact_push(project=project, logger=logger, reactor=reactor)
def _run_tests_in_directory(dist_directory, logger, project, reactor, latest=False):
cypress_test_path = f"{dist_directory}/cypress"
if os.path.exists(cypress_test_path):
logger.info(f"Found cypress tests - starting run latest: {latest}")
if latest:
for dir in os.listdir(cypress_test_path):
if os.path.isdir(f"{cypress_test_path}/{dir}"):
logger.info(f"Running {dir}")
_run_cypress_tests_in_directory(work_dir=f"{cypress_test_path}/{dir}",
logger=logger,
project=project,
reactor=reactor)
else:
_run_cypress_tests_in_directory(work_dir=cypress_test_path,
logger=logger,
project=project,
reactor=reactor)
tavern_test_path = f"{dist_directory}/tavern"
if os.path.exists(tavern_test_path):
logger.info(f"Found tavern tests - starting run latest: {latest}")
if latest:
for dir in os.listdir(tavern_test_path):
if os.path.isdir(f"{tavern_test_path}/{dir}"):
logger.info(f"Running {dir}")
_run_tavern_tests_in_dir(test_dir=f"{tavern_test_path}/{dir}",
logger=logger,
project=project,
reactor=reactor,
role=os.path.basename(dir))
else:
_run_tavern_tests_in_dir(test_dir=f"{tavern_test_path}",
logger=logger,
project=project,
reactor=reactor)
def verify_cypress(project: Project, logger: Logger, reactor: Reactor):
# Get directories with test and cypress executable
work_dir = project.expand_path(f"${CYPRESS_TEST_DIR}")
if _run_cypress_tests_in_directory(work_dir=work_dir, logger=logger, project=project, reactor=reactor):
package_artifacts(project, work_dir, "cypress",project.get_property(ROLE))
def _run_cypress_tests_in_directory(work_dir, logger, project, reactor: Reactor):
target_url = project.get_mandatory_property(INTEGRATION_TARGET_URL)
environment = project.get_mandatory_property(ENVIRONMENT)
if not os.path.exists(work_dir):
logger.info("Skipping cypress run: no tests")
return False
logger.info(f"Found {len(os.listdir(work_dir))} files in cypress test directory")
# Validate NPM install and Install cypress
package_json = os.path.join(work_dir, "package.json")
if os.path.exists(package_json):
logger.info("Found package.json installing dependencies")
tool_utility.install_npm_dependencies(work_dir, project=project, logger=logger, reactor=reactor)
else:
install_cypress(logger=logger, project=project, reactor=reactor, work_dir=work_dir)
executable = os.path.join(work_dir, "node_modules/cypress/bin/cypress")
results_file, run_name = get_test_report_file(project=project,test_dir=work_dir,tool="cypress")
# Run the actual tests against the baseURL provided by ${integration_target}
args = ["run", "--config", f"baseUrl={target_url}", "--reporter-options",
f"mochaFile={results_file}" ]
config_file_path = f'{environment}-config.json'
if os.path.exists(os.path.join(work_dir, config_file_path)):
args.append("--config-file")
args.append(config_file_path)
logger.info(f"Running cypress on host: {target_url}")
exec_utility.exec_command(command_name=executable, args=args,
failure_message="Failed to execute cypress tests", log_file_name='cypress_run.log',
project=project, reactor=reactor, logger=logger, working_dir=work_dir, report=False)
# workaround but cypress output are relative to location of cypress.json so we need to collapse
if os.path.exists(f"{work_dir}/target"):
shutil.copytree(f"{work_dir}/target","./target",dirs_exist_ok=True)
return True
def verify_tavern(project: Project, logger: Logger, reactor: Reactor):
# Expand the directory to get full path
test_dir = project.expand_path(f"${TAVERN_TEST_DIR}")
# Run the tests in the directory
if _run_tavern_tests_in_dir(test_dir, logger, project, reactor):
package_artifacts(project, test_dir, "tavern",project.get_property(ROLE))
def _run_tavern_tests_in_dir(test_dir: str, logger: Logger, project: Project, reactor: Reactor, role=None):
logger.info("Running tavern tests: {}".format(test_dir))
if not os.path.exists(test_dir):
logger.info("Skipping tavern run: no tests")
return False
logger.info(f"Found {len(os.listdir(test_dir))} files in tavern test directory")
# todo is this unique enough for each run?
output_file, run_name = get_test_report_file(project, test_dir)
from sys import path as syspath
syspath.insert(0, test_dir)
# install any requirements that my exist
requirements_file = os.path.join(test_dir, "requirements.txt")
if os.path.exists(requirements_file):
dependency = RequirementsFile(requirements_file)
install_dependencies(logger, project, dependency, reactor.pybuilder_venv, f"{prepare_logs_directory(project)}/install_tavern_pip_dependencies.log")
extra_args = [project.expand(prop) for prop in project.get_property(TAVERN_ADDITIONAL_ARGS, [])]
args = ["--junit-xml", f"{output_file}", test_dir] + extra_args
if project.get_property("verbose"):
args.append("-s")
args.append("-v")
os.environ['TARGET'] = project.get_property(INTEGRATION_TARGET_URL)
os.environ[ENVIRONMENT] = project.get_property(ENVIRONMENT)
logger.info(f"Running against: {project.get_property(INTEGRATION_TARGET_URL)} ")
cache_wd = os.getcwd()
try:
os.chdir(test_dir)
ret = pytest.main(args)
finally:
os.chdir(cache_wd)
if role:
CloudwatchLogs(project.get_property(ENVIRONMENT), project.get_property(APPLICATION), role, logger).print_latest()
if ret != 0:
raise BuildFailedException(f"Tavern tests failed see complete output here - {output_file}")
return True
def get_test_report_file(project, test_dir,tool="tavern"):
run_name = os.path.basename(os.path.realpath(os.path.join(test_dir, os.pardir)))
output_file = os.path.join(prepare_reports_directory(project), f"{tool}-{run_name}.out.xml")
return output_file, run_name
| 2.171875
| 2
|
Chapter24/apple_factory.py
|
DeeMATT/AdvancedPythonProgramming
| 278
|
12780886
|
MINI14 = '1.4GHz Mac mini'
class AppleFactory:
class MacMini14:
def __init__(self):
self.memory = 4 # in gigabytes
self.hdd = 500 # in gigabytes
self.gpu = 'Intel HD Graphics 5000'
def __str__(self):
info = (f'Model: {MINI14}',
f'Memory: {self.memory}GB',
f'Hard Disk: {self.hdd}GB',
f'Graphics Card: {self.gpu}')
return '\n'.join(info)
def build_computer(self, model):
if model == MINI14:
return self.MacMini14()
else:
print(f"I don't know how to build {model}")
if __name__ == '__main__':
afac = AppleFactory()
mac_mini = afac.build_computer(MINI14)
print(mac_mini)
| 3.421875
| 3
|
examples/get_links.py
|
cesardeaptude/crawlerpy
| 0
|
12780887
|
<gh_stars>0
from crawlerpy import Get_news
from crawlerpy.resources.resources import Resources
pprint = Resources()
news = Get_news()
links = news.get_links("https://www.tehrantimes.com/page/archive.xhtml?wide=0&ms=0&pi=2&tp=697")
#links = news.get_links("https://www.tehrantimes.com/archive?tp=697",
# "https://www.tehrantimes.com/page/archive.xhtml?wide=0&ms=0&pi=2",
# "https://www.tehrantimes.com/page/archive.xhtml?wide=0&ms=0&pi=3&tp=696")
#print(links)
pprint.pretty(links)
| 2.484375
| 2
|
tke_project/myapp/migrations/0003_gallery_title.py
|
moradia100/TKE-Website
| 0
|
12780888
|
# Generated by Django 2.2.6 on 2019-10-15 23:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0002_gallery'),
]
operations = [
migrations.AddField(
model_name='gallery',
name='title',
field=models.TextField(default=0),
preserve_default=False,
),
]
| 1.5625
| 2
|
nes/processors/cpu/instructions/jump/__init__.py
|
Hexadorsimal/pynes
| 1
|
12780889
|
<reponame>Hexadorsimal/pynes<gh_stars>1-10
from .jmp import Jmp
| 1.085938
| 1
|
chalice/sample_lambda_periodic.py
|
terratenney/aws-tools
| 8
|
12780890
|
<gh_stars>1-10
from chalice import Chalice, Rate
app = Chalice(app_name="helloworld")
# Automatically runs every 5 minutes
@app.schedule(Rate(5, unit=Rate.MINUTES))
def periodic_task(event):
return {"hello": "world"}
| 2.671875
| 3
|
snekcord/objects/teamobject.py
|
asleep-cult/snakecord
| 6
|
12780891
|
<gh_stars>1-10
from ..enums import TeamMembershipState
from ..json import JsonField, JsonObject
from ..snowflake import Snowflake
class Team(JsonObject):
id = JsonField('id', Snowflake)
owner_id = JsonField('owner_user_id', Snowflake)
def __init__(self, *, application):
self.application = application
self.members = {}
@property
def owner(self):
return self.members.get(self.owner_id)
def update(self, data):
super().update(data)
if 'members' in data:
self.members.clear()
for member in data['members']:
member = TeamMember.unmarshal(member, team=self)
self.members[member.user.id] = member
return self
class TeamMember(JsonObject):
membership_state = JsonField('membership_state', TeamMembershipState.try_enum)
team_id = JsonField('team_id', Snowflake)
def __init__(self, *, team):
self.team = team
self.user = None
def update(self, data):
super().update(data)
if 'user' in data:
self.user = self.team.application.client.users.upsert(data['user'])
return self
| 2.390625
| 2
|
router.py
|
jersobh/zfs-resty
| 11
|
12780892
|
from controllers import mainController
import aiohttp_cors
def routes(app):
cors = aiohttp_cors.setup(app, defaults={
"*": aiohttp_cors.ResourceOptions(
allow_methods=("*"),
allow_credentials=True,
expose_headers=("*",),
allow_headers=("*"),
max_age=3600,
)
})
cors.add(app.router.add_get('/', mainController.index))
cors.add(app.router.add_post('/auth', mainController.auth))
cors.add(app.router.add_post('/create-pool', mainController.create_pool))
cors.add(app.router.add_post('/delete-pool', mainController.delete_pool))
cors.add(app.router.add_get('/devices', mainController.get_storage_info))
cors.add(app.router.add_get('/status', mainController.check_status))
cors.add(app.router.add_get('/io-status', mainController.get_io_status))
cors.add(app.router.add_post('/add-disk', mainController.add_disk))
cors.add(app.router.add_post('/add-spare-disk', mainController.add_spare_disk))
cors.add(app.router.add_post('/replace-disk', mainController.replace_disk))
cors.add(app.router.add_post('/mountpoint', mainController.set_mountpoint))
| 2.09375
| 2
|
env_dev/lib/python3.6/site-packages/sparsegrad/base/expr.py
|
icweaver/cs207_public
| 0
|
12780893
|
<filename>env_dev/lib/python3.6/site-packages/sparsegrad/base/expr.py<gh_stars>0
# -*- coding: utf-8; -*-
#
# sparsegrad - automatic calculation of sparse gradient
# Copyright (C) 2016-2018 <NAME> (<EMAIL>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3,
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from sparsegrad import func
from sparsegrad import impl
import sparsegrad.impl.sparsevec as impl_sparsevec
import numpy as np
def _genu():
def _():
for name, f in func.known_ufuncs.items():
if f.nin == 1:
yield "def %s(self): return self.apply(func.%s,self)" % (name, name)
return "\n".join(_())
def _geng():
def _():
for name in func.known_funcs.keys():
yield "%s=wrapped_func(func.%s)" % (name, name)
return "\n".join(_())
class bool_expr(object):
"Abstract base class for boolean expressions"
pass
class expr_base(object):
"""
Base class for numpy-compatible operator overloading
It provides default overloads of arithmetic operators and methods for mathematical functions.
The default overloads call abstract apply method to calculate the result of operation.
"""
__array_priority__ = 100
__array_wrap__ = None
def apply(self, func, *args):
"""
Evaluate and return func(*args)
Subclasses do not need to call this for all functions.
"""
raise NotImplementedError()
def __add__(self, other): return self.apply(func.add, self, other)
def __radd__(self, other): return self.apply(func.add, other, self)
def __sub__(self, other): return self.apply(func.subtract, self, other)
def __rsub__(self, other): return self.apply(func.subtract, other, self)
def __mul__(self, other): return self.apply(func.multiply, self, other)
def __rmul__(self, other): return self.apply(func.multiply, other, self)
def __div__(self, other): return self.apply(func.divide, self, other)
def __rdiv__(self, other): return self.apply(func.divide, other, self)
def __truediv__(self, other): return self.apply(func.divide, self, other)
def __rtruediv__(self, other): return self.apply(func.divide, other, self)
def __pow__(self, other): return self.apply(func.power, self, other)
def __rpow__(self, other): return self.apply(func.power, other, self)
def __pos__(self): return self
def __neg__(self): return self.apply(func.negative, self)
def __getitem__(self, idx):
return self.apply(func.getitem, self, idx)
def __abs__(self):
return self.apply(func.abs, self)
# ufuncs
exec(_genu())
class wrapped_func():
"Wrap function for compatibility with expr_base"
def __init__(self, func):
self.func = func
def __call__(self, *args):
impl = _find_arr(args, 'apply', default=self)
return impl.apply(self.func, *args)
def apply(self, f, *args):
return f.evaluate(*args)
# non ufuncs
exec(_geng())
def _find_arr(arrays, attr, default=None, default_priority=0.):
highest = default
current = default_priority
for a in arrays:
if hasattr(a, attr):
priority = getattr(a, '__array_priority__', 0.)
if highest is None or priority > current:
highest, current = a, priority
return highest
def dot(a, b):
"Equivalent of scipy.sparse.dot function aware of expr_base"
impl_ = _find_arr((a, b), 'dot_', default=impl)
return impl_.dot_(a, b)
def where(cond, a, b):
"Equivalent of numpy.where function aware of expr_base"
impl = _find_arr((cond, a, b), 'where', default=np)
return impl.where(cond, a, b)
def hstack(arrays):
"Equivalent of numpy.hstack function aware of expr_base"
impl = _find_arr(arrays, 'hstack', default=np)
return impl.hstack(arrays)
def sum(a):
"Equivalent of numpy.sum function aware of expr_base"
if isinstance(a, expr_base):
return a.sum()
else:
return np.sum(a)
def stack(*arrays):
"Alias for hstack, taking arrays as separate arguments"
return hstack(arrays)
def sparsesum(terms, **kwargs):
"Sparse summing function aware of expr_base"
impl_ = _find_arr(
(a.v for a in terms),
'sparsesum',
default=impl_sparsevec)
return impl_.sparsesum(terms, **kwargs)
def as_condition_value(a):
"Return value as concrete boolean value"
return np.asarray(a, dtype=np.bool)
def broadcast_to(arr, shape):
"Equivalent of numpy.broadcast_to aware of expr_base"
impl = _find_arr([arr], 'broadcast_to', default=np)
return impl.broadcast_to(arr, shape)
def branch(cond, iftrue, iffalse):
"""
Branch execution
Note that, in some cases (propagation of sparsity pattern), both branches can executed
more than once.
Parameters:
-----------
cond : bool vector
Condition
iftrue : callable(idx)
Function called to evaluate elements with indices idx, where cond is True
iffalse : callable(idx)
Function called to evaluate elements with indices idx, where cond is False
"""
if isinstance(cond, bool_expr) and cond.hasattr('branch'):
return cond.branch(iftrue, iffalse)
def _branch(cond, iftrue, iffalse):
if not cond.shape:
if cond:
return iftrue(None)
else:
return iffalse(None)
n = len(cond)
r = np.arange(len(cond))
ixtrue = r[cond]
ixfalse = r[np.logical_not(cond)]
vtrue = impl_sparsevec.sparsevec(
n, ixtrue, broadcast_to(
iftrue(ixtrue), ixtrue.shape))
vfalse = impl_sparsevec.sparsevec(
n, ixfalse, broadcast_to(
iffalse(ixfalse), ixfalse.shape))
return sparsesum([vtrue, vfalse])
value = _branch(as_condition_value(cond), iftrue, iffalse)
if hasattr(value, 'branch_join'):
return value.branch_join(cond, iftrue, iffalse)
else:
return value
| 2.265625
| 2
|
_common.py
|
mxr/advent-of-code-2020
| 2
|
12780894
|
<reponame>mxr/advent-of-code-2020
from __future__ import annotations
import sys
from argparse import ArgumentParser
from typing import Callable
def main(part1: Callable[[str], int], part2: Callable[[str], int]) -> int:
parser = ArgumentParser()
parser.add_argument("-p", "--part", type=int, default=0)
parser.add_argument(
"-f", "--filename", type=str, default=sys.argv[0].replace(".py", ".txt")
)
args = parser.parse_args()
part: int = args.part
filename: str = args.filename
if (part or 1) == 1:
print("part1: ", end="")
print(part1(filename))
if (part or 2) == 2:
print("part2: ", end="")
print(part2(filename))
return 0
| 3.109375
| 3
|
exp-05/MainWindow.py
|
SevdanurGENC/QT-Designer-Examples
| 0
|
12780895
|
<gh_stars>0
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'MainWindow.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(495, 269)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.lbl_sayi1 = QtWidgets.QLabel(self.centralwidget)
self.lbl_sayi1.setGeometry(QtCore.QRect(40, 60, 47, 14))
self.lbl_sayi1.setObjectName("lbl_sayi1")
self.lbl_sayi2 = QtWidgets.QLabel(self.centralwidget)
self.lbl_sayi2.setGeometry(QtCore.QRect(40, 90, 47, 14))
self.lbl_sayi2.setObjectName("lbl_sayi2")
self.txt_sayi1 = QtWidgets.QLineEdit(self.centralwidget)
self.txt_sayi1.setGeometry(QtCore.QRect(110, 60, 181, 20))
self.txt_sayi1.setObjectName("txt_sayi1")
self.txt_sayi2 = QtWidgets.QLineEdit(self.centralwidget)
self.txt_sayi2.setGeometry(QtCore.QRect(110, 90, 181, 20))
self.txt_sayi2.setObjectName("txt_sayi2")
self.btn_topla = QtWidgets.QPushButton(self.centralwidget)
self.btn_topla.setGeometry(QtCore.QRect(110, 130, 75, 23))
self.btn_topla.setObjectName("btn_topla")
self.btn_cikarma = QtWidgets.QPushButton(self.centralwidget)
self.btn_cikarma.setGeometry(QtCore.QRect(190, 130, 75, 23))
self.btn_cikarma.setObjectName("btn_cikarma")
self.btn_carpma = QtWidgets.QPushButton(self.centralwidget)
self.btn_carpma.setGeometry(QtCore.QRect(270, 130, 75, 23))
self.btn_carpma.setObjectName("btn_carpma")
self.btn_bolme = QtWidgets.QPushButton(self.centralwidget)
self.btn_bolme.setGeometry(QtCore.QRect(350, 130, 75, 23))
self.btn_bolme.setObjectName("btn_bolme")
self.lbl_sonuc = QtWidgets.QLabel(self.centralwidget)
self.lbl_sonuc.setGeometry(QtCore.QRect(110, 180, 301, 31))
self.lbl_sonuc.setObjectName("lbl_sonuc")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 495, 22))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.lbl_sayi1.setText(_translate("MainWindow", "Sayi1 : "))
self.lbl_sayi2.setText(_translate("MainWindow", "Sayi 2 : "))
self.btn_topla.setText(_translate("MainWindow", "Toplam"))
self.btn_cikarma.setText(_translate("MainWindow", "Cikarma"))
self.btn_carpma.setText(_translate("MainWindow", "Carpma"))
self.btn_bolme.setText(_translate("MainWindow", "Bolme"))
self.lbl_sonuc.setText(_translate("MainWindow", "Sonuc : "))
| 2.109375
| 2
|
meutils/decorators/catch.py
|
Jie-Yuan/MeUtils
| 3
|
12780896
|
<reponame>Jie-Yuan/MeUtils
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Project : MeUtils.
# @File : try
# @Time : 2021/4/2 11:03 上午
# @Author : yuanjie
# @WeChat : 313303303
# @Software : PyCharm
# @Description :
from meutils.pipe import *
from meutils.log_utils import logger4wecom
def wecom_hook(title='Task Done', text=None, hook_url=None):
"""装饰器里不可变参数
:param title:
:param text: 如果为空,用函数返回值填充【text覆盖函数返回值】
:param hook_url: hook_url或者群名称
:return:
"""
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
s = time.time()
r = wrapped(*args, **kwargs)
e = time.time()
mins = (e - s) // 60
logger.info(f"{title} done in {mins} m")
logger4wecom(
title=title,
text=f"**{wrapped.__name__}:** {r if text is None else text}\n耗时 {mins} m",
hook_url=hook_url
)
return r
return wrapper
def wecom_catch(hook_url=None, more_info=True):
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
try:
return wrapped(*args, **kwargs)
except Exception as e:
info = traceback.format_exc() if more_info else e
text = f"""
```
{info.strip()}
```
""".strip()
logger4wecom(wrapped.__name__, text, hook_url)
return wrapper
if __name__ == '__main__':
# @feishu_catch()
# def f():
# 1 / 0
#
#
# f()
# @wecom_catch(more_info=False)
# def f():
# 1 / 0
#
#
# f()
@wecom_hook('catch_hook测试', text="TEXT")
@wecom_catch()
def f():
# 1 / 0
print(time.time())
return 'RES'
f()
| 2.40625
| 2
|
sandbox/src/COSFocusOutputs.py
|
sniemi/SamPy
| 5
|
12780897
|
#! /usr/bin/env python
'''
DESCRIPTION:
This short script pulls out COS data from a fits file
and outputs the data to two ascii files. The first file
contains two columns; pixels and counts, while the other
file has columns; wavelength and counts.
Dispersion solutions are given in the beginning of the script
and should be changed if necessary. Dispersion solutions are
given for G130M and G160M, for both segments separately.
USAGE:
For example:
python COSFocusOutputs.py "*1dx*.fits"
HISTORY:
Created on Aug 21, 2009
@author: <NAME>
'''
import pyfits as PF
import sys
import glob as G
import matplotlib
matplotlib.rc('text', usetex = True)
import pylab as P
try:
import scipy.stsci.convolve as C
except:
import convolve as C
######################################################
#CHANGE THESE IF REQUIRED!
######################################################
#Dispersion coeffs
#G160M/1600/Seg A:
G160MAa0 = 1586.40
G160MAa1 = 0.0122397
#G160M/1600/Seg B:
G160MBa0 = 1398.06
G160MBa1 = 0.0122369
#G130M/1309/Seg A:
G130MAa0 = 1297.67
G130MAa1 = 0.00996572
#G130M/1309/Seg B:
G130MBa0 = 1144.38
G130MBa1 = 0.00996337
######################################################
filelist = G.glob(sys.argv[1])
smoothing = (50,)
for file in filelist:
fulldata = PF.getdata(file)
hdr0 = PF.open(file)[0].header
#ydata = data.field('Y')
ydata = fulldata[0][1]
xdata = fulldata[0][0]
#output
fh = open(file[:-5] + '_pix.dat', 'w')
fh.write(';pixels counts\n')
for x,y in zip(xdata, ydata):
tmp = str(x) + ' ' + str(y) + '\n'
fh.write(tmp)
fh.close()
fh = open(file[:-5] + '_wave.dat', 'w')
fh.write(';wavelength counts\n')
#manipulats data
if hdr0['SEGMENT'] == 'FUVA':
if hdr0['OPT_ELEM'].strip() == 'G130M':
wave = G130MAa0 + G130MAa1*xdata
if hdr0['OPT_ELEM'].strip() == 'G160M':
wave = G160MAa0 + G160MAa1*xdata
if hdr0['SEGMENT'] == 'FUVB':
if hdr0['OPT_ELEM'].strip() == 'G130M':
wave = G130MBa0 + G130MBa1*xdata
if hdr0['OPT_ELEM'].strip() == 'G160M':
wave = G160MBa0 + G160MBa1*xdata
for wav, y in zip(wave, ydata):
tmp = str(wav) + ' ' + str(y) + '\n'
fh.write(tmp)
fh.close()
#lets make a plot too
#smooth = C.boxcar(ydata, smoothing)
#P.title(file)
#P.plot(xdata, ydata, label = 'Spectrum')
#P.plot(xdata, smooth, label = 'Boxcar Smoothed')
#P.xlabel('Pixels')
#P.ylabel('Counts')
#P.savefig(file[:-5] + '_counts.pdf')
#P.close()
#P.title(file)
#P.plot(wave, ydata, label = 'Spectrum')
#P.plot(wave, smooth, label = 'Boxcar Smoothed')
#P.xlabel('Wavelength (\AA)')
#P.ylabel('Counts')
#P.savefig(file[:-5] + '_wavelength.pdf')
print 'Script ends...'
| 3.015625
| 3
|
test/Test_LinkSearchBase.py
|
shift4869/PictureGathering
| 0
|
12780898
|
<gh_stars>0
# coding: utf-8
import sys
import unittest
from contextlib import ExitStack
from logging import WARNING, getLogger
from mock import MagicMock, PropertyMock, mock_open, patch
from PictureGathering import LinkSearchBase
logger = getLogger("root")
logger.setLevel(WARNING)
class TestLinkSearchBase(unittest.TestCase):
"""外部リンク探索処理テストメインクラス
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_LinkSearchBase(self):
"""外部リンク探索処理クラス初期状態チェック
"""
ls_cont = LinkSearchBase.LinkSearchBase()
self.assertEqual([], ls_cont.processer_list)
def test_Register(self):
"""担当者登録機能をチェック
"""
ls_cont = LinkSearchBase.LinkSearchBase()
# 正常系
# 具体的な担当者を登録
lsc = LinkSearchBase.LSConcrete_0()
res0 = ls_cont.Register(lsc)
lsc = LinkSearchBase.LSConcrete_1()
res1 = ls_cont.Register(lsc)
lsc = LinkSearchBase.LSConcrete_2()
res2 = ls_cont.Register(lsc)
self.assertEqual(0, res0)
self.assertEqual(0, res1)
self.assertEqual(0, res2)
# 異常系
# LinkSearchBaseの派生クラスでないクラスを登録しようとする
class LSFake():
def __init__(self):
pass
lsc = LSFake()
res = ls_cont.Register(lsc)
self.assertEqual(-1, res)
# 正常系?
# THINK::LinkSearchBaseの派生クラスでないがインターフェイスは整っている
# 処理もできるので一応OKとする
class LSImitation():
def __init__(self):
pass
def IsTargetUrl(self, url: str) -> bool:
return False
def Process(self, url: str) -> int:
return 0
lsc = LSImitation()
res = ls_cont.Register(lsc)
self.assertEqual(0, res)
def test_CoRProcessDo(self):
"""処理実行メインをチェック
"""
ls_cont = LinkSearchBase.LinkSearchBase()
url = "https://www.anyurl/sample/index_{}.html"
# 具体的な担当者を登録
lsc = LinkSearchBase.LSConcrete_0()
ls_cont.Register(lsc)
lsc = LinkSearchBase.LSConcrete_1()
ls_cont.Register(lsc)
lsc = LinkSearchBase.LSConcrete_2()
ls_cont.Register(lsc)
# CoR実行
for i in range(0, 4):
res = ls_cont.CoRProcessDo(url.format(i))
if res == 0:
self.assertIn(i, [0, 1])
elif res == -1:
self.assertEqual(2, i)
elif res == 1:
self.assertEqual(3, i)
pass
def test_CoRProcessCheck(self):
"""処理実行メインをチェック
"""
ls_cont = LinkSearchBase.LinkSearchBase()
url = "https://www.anyurl/sample/index_{}.html"
# 具体的な担当者を登録
lsc = LinkSearchBase.LSConcrete_0()
ls_cont.Register(lsc)
lsc = LinkSearchBase.LSConcrete_1()
ls_cont.Register(lsc)
lsc = LinkSearchBase.LSConcrete_2()
ls_cont.Register(lsc)
# CoR実行
for i in range(0, 4):
res = ls_cont.CoRProcessCheck(url.format(i))
if res:
self.assertIn(i, [0, 1, 2])
else:
self.assertEqual(3, i)
pass
def test_IsTargetUrl(self):
"""自分(担当者)が処理できるurlかどうか判定する機能をチェック
"""
ls_cont = LinkSearchBase.LinkSearchBase()
url = "https://www.google.co.jp/"
self.assertEqual(False, ls_cont.IsTargetUrl(url)) # 基底クラスなので常にFalse
def Process(self, url: str) -> int:
"""自分(担当者)が担当する処理をチェック
"""
ls_cont = LinkSearchBase.LinkSearchBase()
url = "https://www.google.co.jp/"
self.assertEqual(-1, ls_cont.IsTargetUrl(url)) # 基底クラスなので常に失敗
if __name__ == "__main__":
if sys.argv:
del sys.argv[1:]
unittest.main()
| 2.578125
| 3
|
Aula1a17/Aula_1_noCreative.py
|
paiva-rodrigo/PythonScripts
| 0
|
12780899
|
<reponame>paiva-rodrigo/PythonScripts
nome=input("qual é o seu nome?")
idade= input("qual e a sua idade?")
peso=input("qual e o seu peso?")
print(nome,idade,peso)
print("seja bem vindo",nome)
dia=input("Dia de nascimento?")
mes=input("mes de nascimento?")
ano=input("ano de nascimento?")
print("voce nasceu em",dia,"/",mes,"/",ano)
| 3.828125
| 4
|
tests/__init__.py
|
yangyangkiki/pytorch-lightning-bolts
| 2
|
12780900
|
import os
from pytorch_lightning import seed_everything
TEST_ROOT = os.path.realpath(os.path.dirname(__file__))
PACKAGE_ROOT = os.path.dirname(TEST_ROOT)
DATASETS_PATH = os.path.join(PACKAGE_ROOT, 'datasets')
# generate a list of random seeds for each test
ROOT_SEED = 1234
def reset_seed():
seed_everything()
| 2.171875
| 2
|
22_Honors_class/52_Implement_A_Producer_Consumer_Queue/Producer.py
|
vtkrishn/EPI
| 0
|
12780901
|
<gh_stars>0
from threading import Thread,Condition
import random
import time
#Producer thread
condition = Condition()
MAX_NUM = 10
class Producer(Thread):
def set(self,q, l):
self.queue = q
self.lock = l
#produce
def run(self):
nums = range(5) #creates [0,1,2,3,4]
while True:
#get the lock
#self.lock.acquire()
condition.acquire()
if len(self.queue) == MAX_NUM:
print 'queue size is full'
condition.wait()
print 'waiting for consumers to take from queue'
num = random.choice(nums) #pick random from choices
#modify the queue with the produced number
self.queue.append(num)
print 'Produced :: ', num
condition.notify()
#release the lock
#self.lock.release()
condition.release()
time.sleep(random.random())
| 3.5
| 4
|
psec/secrets/restore.py
|
davedittrich/python_secrets
| 10
|
12780902
|
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import tarfile
import textwrap
from cliff.command import Command
# TODO(dittrich): https://github.com/Mckinsey666/bullet/issues/2
# Workaround until bullet has Windows missing 'termios' fix.
try:
from bullet import Bullet
except ModuleNotFoundError:
pass
from sys import stdin
class SecretsRestore(Command):
"""Restore secrets and descriptions from a backup file."""
logger = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.add_argument('backup', nargs='?', default=None)
parser.epilog = textwrap.dedent("""
TODO(dittrich): Finish documenting command.
""")
return parser
def take_action(self, parsed_args):
self.logger.debug('[*] restore secrets')
secrets = self.app.secrets
secrets.requires_environment()
backups_dir = os.path.join(
secrets.environment_path(),
"backups")
backups = [fn for fn in
os.listdir(backups_dir)
if fn.endswith('.tgz')]
if parsed_args.backup is not None:
choice = parsed_args.backup
elif not (stdin.isatty() and 'Bullet' in globals()):
# Can't involve user in getting a choice.
raise RuntimeError('[-] no backup specified for restore')
else:
# Give user a chance to choose.
choices = ['<CANCEL>'] + sorted(backups)
cli = Bullet(prompt="\nSelect a backup from which to restore:",
choices=choices,
indent=0,
align=2,
margin=1,
shift=0,
bullet="→",
pad_right=5)
choice = cli.launch()
if choice == "<CANCEL>":
self.logger.info('cancelled restoring from backup')
return
backup_path = os.path.join(backups_dir, choice)
with tarfile.open(backup_path, "r:gz") as tf:
# Only select intended files. See warning re: Tarfile.extractall()
# in https://docs.python.org/3/library/tarfile.html
allowed_prefixes = ['secrets.json', 'secrets.d/']
names = [fn for fn in tf.getnames()
if any(fn.startswith(prefix)
for prefix in allowed_prefixes
if '../' not in fn)
]
env_path = secrets.environment_path()
for name in names:
tf.extract(name, path=env_path)
self.logger.info('[+] restored backup %s to %s', backup_path, env_path)
# vim: set fileencoding=utf-8 ts=4 sw=4 tw=0 et :
| 2.234375
| 2
|
plotcollection.py
|
camminady/camminapy
| 0
|
12780903
|
<reponame>camminady/camminapy<gh_stars>0
import numpy as np
import matplotlib.pyplot as plt
from scipy.interpolate import RegularGridInterpolator, interp1d
import kitcolors as kit
from scipy.interpolate import interp1d
from scipy.interpolate import RegularGridInterpolator
from .getvalues import getcircles, getcuts
import os
from scipy.special import sph_harm as Y
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
import kitcolors as kit
from camminapy import *
from matplotlib.colors import LinearSegmentedColormap
n_bin = 101
def plotls(fig, ax, data, title):
plt.style.use("kitish")
ny, nx = data.shape
x = np.linspace(-1.5, 1.5, nx + 1)
y = np.linspace(-1.5, 1.5, ny + 1)
p = ax.pcolormesh(x, y, data, cmap="plasma", vmin=0, vmax=0.4, rasterized=True)
ax.set_xticks([-1.5, 1.5])
ax.set_xticklabels([r"$-1.5$", r"$1.5$"])
ax.set_xlabel(r"$x$ [cm]", labelpad=-5)
ax.set_yticks([-1.5, 1.5])
ax.set_yticklabels([r"$-1.5$", r"$1.5$"])
ax.set_ylabel(r"$y$ [cm]", labelpad=-15)
# cbaxes = fig.add_axes([1.05, 0.0, 0.05, 0.5])
p.cmap.set_over("gray")
clb = fig.colorbar(
p,
orientation="vertical",
ax=ax,
ticks=[0, 0.2, 0.4],
extend="max",
extendfrac=0.05,
shrink=0.55,
pad=-0.04,
aspect=10,
)
clb.ax.set_title(r"$\phi(x,y)$", horizontalalignment="center")
ax.set_xlim([-1.5, 1.5])
ax.set_ylim([-1.5, 1.5])
ax.set_aspect("equal", "box")
ax.margins(x=0.0, y=0)
ax.set_title(title)
plt.setp(ax.spines.values(), linewidth=0)
def plotlslarge(fig, ax, data, title):
plt.style.use("kitish")
ny, nx = data.shape
x = np.linspace(-1.5, 1.5, nx + 1)
y = np.linspace(-1.5, 1.5, ny + 1)
p = ax.pcolormesh(x, y, data, cmap="plasma", vmin=0, vmax=0.4, rasterized=True)
ax.set_xticks([]) # [-1.5,1.5])
# ax.set_xticklabels([r"$-1.5$", r"$1.5$"])
# ax.set_xlabel(r"$x$ [cm]",labelpad = -5)
ax.set_yticks([]) # [-1.5,1.5])
# ax.set_yticklabels([r"$-1.5$", r"$1.5$"])
# ax.set_ylabel(r"$y$ [cm]",labelpad = -15)
# cbaxes = fig.add_axes([1.05, 0.0, 0.05, 0.5])
p.cmap.set_over("gray")
# clb = fig.colorbar(p,orientation = "vertical",ax=ax,ticks = [ 0,0.2,0.4],
# extend='max',extendfrac=0.05,shrink = 0.55,pad = -0.04,aspect = 10)
# clb.ax.set_title(r"$\phi(x,y)$",horizontalalignment="center")
ax.set_xlim([-1.5, 1.5])
ax.set_ylim([-1.5, 1.5])
ax.set_aspect("equal", "box")
ax.margins(x=0.0, y=0)
ax.set_title(title, fontsize=22, pad=+5)
plt.setp(ax.spines.values(), linewidth=0)
def plotlslargedelta(fig, ax, data, title):
plt.style.use("kitish")
ny, nx = data.shape
x = np.linspace(-1.5, 1.5, nx + 1)
y = np.linspace(-1.5, 1.5, ny + 1)
cmap = mycmapdiv(kit.cyan, "white", kit.purple, nbins=201)
p = ax.pcolormesh(x, y, data, cmap=cmap, vmin=-0.2, vmax=0.2, rasterized=True)
ax.set_xticks([]) # [-1.5,1.5])
# ax.set_xticklabels([r"$-1.5$", r"$1.5$"])
# ax.set_xlabel(r"$x$ [cm]",labelpad = -5)
ax.set_yticks([]) # [-1.5,1.5])
# ax.set_yticklabels([r"$-1.5$", r"$1.5$"])
# ax.set_ylabel(r"$y$ [cm]",labelpad = -15)
# cbaxes = fig.add_axes([1.05, 0.0, 0.05, 0.5])
# p.cmap.set_over("gray")
clb = fig.colorbar(
p,
orientation="horizontal",
ax=ax,
ticks=[-0.2, 0, 0.2],
extendfrac=0.05,
shrink=0.55,
pad=-0.04,
aspect=10,
)
# clb.ax.set_title(r"$\phi(x,y)$",horizontalalignment="center")
ax.set_xlim([-1.5, 1.5])
ax.set_ylim([-1.5, 1.5])
ax.set_aspect("equal", "box")
ax.margins(x=0.0, y=0)
ax.set_title(title, fontsize=22, pad=+5)
plt.setp(ax.spines.values(), linewidth=0)
def plotcuts(fig, ax, data, n, ls):
hori, verti, dia = getcuts(data, n)
x = np.linspace(-1.4, 1.4, n)
plt.style.use("kitish")
ax.plot(x, hori, lw=1.5, linestyle="-", label="horizontal", color=kit.blue)
ax.plot(x, verti, linestyle="-", lw=1.5, label="vertical", color=kit.purple)
ax.plot(
np.sqrt(2) * x, dia, lw=1.5, linestyle="-", label="diagonal", color=kit.orange
)
ax.plot(ls[:, 0], ls[:, 1], lw=1.5, label="reference", color=kit.black)
ax.legend(loc="lower center", ncol=2, fontsize="x-small")
ax.set_xlim([-1.5, 1.5])
ax.set_ylim([0, 0.6])
ax.set_xlabel(r"$x$, $y$, $r$ [cm]")
ax.set_ylabel("$\phi$")
def plotcutslarge(fig, ax, data, n, ls, title):
hori, verti, dia = getcuts(data, n)
x = np.linspace(-1.4, 1.4, n)
plt.style.use("kitish")
ax.plot(x, hori, lw=1.5, linestyle="-", label="horizontal", color=kit.blue)
ax.plot(x, verti, linestyle="-", lw=1.5, label="vertical", color=kit.purple)
ax.plot(
np.sqrt(2) * x, dia, lw=1.5, linestyle="-", label="diagonal", color=kit.orange
)
ax.plot(ls[:, 0], ls[:, 1], lw=1.5, label="reference", color=kit.black)
ax.legend(loc="lower left",ncol = 1,fontsize="large")
ax.set_xlim([0, 1.2])
ax.set_ylim([0, 0.5])
ax.set_xlabel(r"Distance from the origin [cm]", fontsize=15)
# ax.set_xlabel(r"$x$, $y$, $r$ [cm]",fontsize=15)
ax.set_ylabel("$\phi$", fontsize=15)
ax.set_title(title, fontsize=22, pad=+5)
def plotcircles(fig, ax, data, n, ls,title):
radii = [1.0,0.9, 0.6, 0.2]
sols = getcircles(data, n, radii)
interpolant = interp1d(ls[:, 0], ls[:, 1])
x = np.linspace(0, 2 * np.pi, n + 1)[:-1]
plt.style.use("kitish")
colors = [kit.blue, kit.orange, kit.purple, kit.green]
for sol, rad, color in zip(sols, radii, colors):
ax.plot(x, sol, lw=1.5, label="$r={}$".format(rad), color=color)
refri = interpolant(rad)
ax.axhline(refri, zorder=+100, lw=0.75, color=kit.black)
ax.text(
2 * np.pi + 0.05,
refri,
"$r={}$".format(rad),
fontsize=8,
verticalalignment="center",
)
ax.legend(loc="lower center", ncol=2, fontsize="large",framealpha=0.5)
ax.set_xlim([0, 2 * np.pi])
ax.set_ylim([0, 0.5])
ax.set_xlabel(r"Cardinal direction",fontsize = 15)
ax.set_ylabel("$\phi$",fontsize = 15)
ax.set_xticks([0, 0.5 * np.pi, np.pi, 1.5 * np.pi, 2 * np.pi])
ax.set_xticklabels(["N", "E", "S", "W", "N"])
ax.set_title(title, fontsize=22, pad=+5)
def plotcblarge(fig, ax, data, title):
plt.style.use("kitish")
ny, nx = data.shape
x = np.linspace(-1.5, 1.5, nx + 1)
y = np.linspace(-1.5, 1.5, ny + 1)
data = np.log10(data + 1e-8)
p = ax.pcolormesh(x, y, data, cmap="plasma", vmin=-7, vmax=0, rasterized=True)
xc = (x[1:] + x[:-1]) / 2
yc = (y[1:] + y[:-1]) / 2
lines = [-5, -4, -3]
CS = ax.contour(
xc,
yc,
data,
lines,
colors=["white", "silver", "black"],
linestyles="-",
linewidths=1.0,
)
manual_locations = [(-1.3, +0.5), (-1.25, +1.2), (-0.1, 0.1)]
ax.clabel(CS, inline=1, fontsize=8, fmt="%d", manual=manual_locations)
ax.set_xticks([]) # [-1.5,1.5])
# ax.set_xticklabels([r"$-1.5$", r"$1.5$"])
# ax.set_xlabel(r"$x$ [cm]",labelpad = -5)
ax.set_yticks([]) # [-1.5,1.5])
# ax.set_yticklabels([r"$-1.5$", r"$1.5$"])
# ax.set_ylabel(r"$y$ [cm]",labelpad = -15)
# cbaxes = fig.add_axes([1.05, 0.0, 0.05, 0.5])
# p.cmap.set_over("gray")
# clb = fig.colorbar(p,orientation = "vertical",ax=ax,ticks = [ 0,0.2,0.4],
# extend='max',extendfrac=0.05,shrink = 0.55,pad = -0.04,aspect = 10)
# clb.ax.set_title(r"$\phi(x,y)$",horizontalalignment="center")
ax.set_xlim([-1.5, 1.5])
ax.set_ylim([-1.5, 1.5])
ax.set_aspect("equal", "box")
ax.margins(x=0.0, y=0)
ax.set_title(title, fontsize=18, pad=+5)
plt.setp(ax.spines.values(), linewidth=0)
clb = fig.colorbar(
p,
orientation="horizontal",
ax=ax,
ticks=[-7, -6, -5, -4, -3, -2, -1, 0],
shrink=0.65,
pad=-0.022,
aspect=30,
)
clb.ax.set_title(
r"$\log_{10} \, \phi(x,y)$",
horizontalalignment="center",
verticalalignment="center",
)
# fig.colorbar(p, orientation="horizontal", pad=0.01,shrink = 0.6,)
def plotall(data, prefix, title, testcaseid):
ls = np.loadtxt("/home/qd4314/Software/camminapy/camminapy/exactLineSource.txt", delimiter=",")
ls[0,0] = -2.5
ls[-1,0] = 2.5
if testcaseid == 1: # Linesource
plt.style.use("kitish")
fig, ax = plt.subplots(1, 1, figsize=(3.0, 4.0))
plotlslarge(fig, ax, data, title)
fn = prefix + "imagesclarge.pdf"
plt.savefig(fn, rasterized=True)
os.system("pdfcrop --margins '1 1 1 40' {} {}".format(fn, fn))
plt.style.use("kitish")
fig, ax = plt.subplots(1, 1, figsize=(3.0, 4.0))
plotcutslarge(fig, ax, data, 1000, ls, title)
fn = prefix + "cuts.pdf"
plt.savefig(fn, rasterized=True)
os.system("pdfcrop --margins '1 1 1 20' {} {}".format(fn, fn))
plt.style.use("kitish")
fig, ax = plt.subplots(1, 1, figsize=(3.0, 4.0))
plotcircles(fig, ax, data, 1000, ls, title)
fn = prefix + "circles.pdf"
plt.savefig(fn, rasterized=True)
os.system("pdfcrop --margins '1 1 1 20' {} {}".format(fn, fn))
else: # Checkerboard
pass
plt.style.use("kitish")
fig, ax = plt.subplots(1, 1, figsize=(3.0, 3.0))
plotcblarge(fig, ax, data, title)
fn = prefix + "imagesclarge.pdf"
plt.savefig(fn, rasterized=True)
os.system("pdfcrop --margins '1 1 1 20' {} {}".format(fn, fn))
# plt.style.use("kitish")
# fig,ax = plt.subplots(1,1,figsize=(3.0,2.6))
# plotls(fig,ax,data,title)
# plt.savefig(prefix+"imagesc.pdf",rasterized=True)
# plt.style.use("kitish")
# fig,ax = plt.subplots(1,1,figsize=(3.0,2.6))
# plotcuts(fig,ax,data,1000,ls)
# plt.savefig(prefix + "cuts.pdf",rasterized=True)
# plt.style.use("kitish")
# fig,ax = plt.subplots(1,1,figsize=(3.0,2.6))
# plotcircles(fig,ax,data,1000,ls)
# plt.savefig(prefix + "circles.pdf",rasterized=True)
# plt.style.use("kitish")
# fig,ax = plt.subplots(1,1,figsize=(3.0,3.0))
# plotlslargedelta(fig,ax,data,title)
# fn = prefix+"imagesclargedelta.pdf"#
# plt.savefig(fn,rasterized=True)
# os.system("pdfcrop --margins '1 1 1 40' {} {}".format(fn,fn))
# plt.style.use("kitish")
# fig,ax = plt.subplots(1,1,figsize=(3.0,4.0))
# plotcutslarge(fig,ax,data,1000,ls,title)
# fn = prefix + "cuts.pdf"
# plt.savefig(fn,rasterized=True)
# os.system("pdfcrop --margins '1 1 1 20' {} {}".format(fn,fn))
| 2.3125
| 2
|
run-addon.py
|
chinedufn/landon
| 117
|
12780904
|
<reponame>chinedufn/landon
# A script to temporarily install and run the addon. Useful for running
# blender-mesh-to-json via blender CLI where you might be in a
# continuous integration environment that doesn't have the addon
# installed
#
# blender file.blend --python $(mesh2json)
# -> becomes ->
# blender file.blend --python /path/to/run-addon
import bpy
import os
# Get the absolute path to the addon
dir = os.path.dirname(__file__)
addonFilePath = dir + '/blender-mesh-to-json.py'
# Install and enable the addon temporarily (since we aren't saving our user preferences)
# We just want to have access to the addon during this blender session
bpy.ops.preferences.addon_install(filepath=addonFilePath)
bpy.ops.preferences.addon_enable(module='blender-mesh-to-json')
# Run our addon
bpy.ops.import_export.mesh2json()
| 1.773438
| 2
|
VirusVisuals.py
|
keklarup/VirusSpread
| 0
|
12780905
|
# -*- coding: utf-8 -*-
"""
ABM virus visuals
Created on Thu Apr 9 10:16:47 2020
@author: Kyle
"""
import matplotlib.pyplot as plt
from matplotlib import colors
import numpy as np
import os
import tempfile
from datetime import datetime
import imageio
class visuals():
def agentPlot(self, storageArrayList, cmap=None, save=False, saveFolder=None,
display=False, i=0, fig=None, axs=None):
"""Generate a plot of the environment grid.
Expects ABM to have already been run and status of every grid point
(which will encode status of every agent) to be saved in an array.
Each time step is also in array, and which time step to visualize
is set by i.
cmap needs to be defined to provide color coding for agent status."""
if cmap ==None:
cmap = colors.ListedColormap(['white','lightblue','lightgreen',
[elm/250 for elm in [72, 169, 171]], 'orange','red', 'black'])
storedArray=storageArrayList[i]
if axs == None:
fig, (ax1) = plt.subplots(1, figsize=[8,8])
else:
ax1=axs[0]
#plt.figure(figsize=[8,8])
ax1.pcolormesh(storedArray, cmap=cmap, vmin=-1,vmax=5)
# #plt.colorbar()
ax1.axis('off')
plt.tight_layout()
if save==True:
plt.savefig(os.path.join(os.getcwd(), saveFolder, 'step_%s.png'%(i)))
if display == True:
plt.show()
#plt.close()
#return fig
def agentStatusPlot(self, agent_status, steps, cmap=None, hospitalThreshold = None,
save=False, saveFolder=None,
display=False,
fig = None,
axs=None):
"""Generates various high level visuals of the progression of the
disease through the population. Expects """
agent_status = agent_status[['type']]; #hotfix for updated code elsewhere
if cmap ==None:
cmap = colors.ListedColormap(['white','lightblue','lightgreen',
[elm/250 for elm in [72, 169, 171]], 'orange','red', 'black'])
i = agent_status.index[-1][0]+1
#i=steps
healthy=np.count_nonzero(agent_status.unstack().to_numpy() == 0,axis=1)[:i]
recovered=np.count_nonzero(agent_status.unstack().to_numpy() == 1,axis=1)[:i]
vaccinated=np.count_nonzero(agent_status.unstack().to_numpy() == 2,axis=1)[:i]
walkingSick=np.count_nonzero(agent_status.unstack().to_numpy() == 3,axis=1)[:i]
hospital=np.count_nonzero(agent_status.unstack().to_numpy() == 4,axis=1)[:i]
dead=np.count_nonzero(agent_status.unstack().to_numpy() == 5,axis=1)[:i]
if axs == None:
fig, (ax1, ax2, ax3) = plt.subplots(3, sharex=True, figsize=[12,8])
else:
ax1=axs[0]; ax2=axs[1]; ax3=axs[2]
ax1.bar(range(len(healthy)), dead, width=1.0, color='black', label='dead')
ax1.bar(range(len(healthy)), hospital, width=1.0,
bottom=dead,
color='red', label='hospitalized')
ax1.bar(range(len(healthy)), walkingSick, width=1.0,
bottom=dead+hospital,
color='orange', label='walking sick')
ax1.bar(range(len(healthy)), vaccinated, width=1.0,
bottom=dead+hospital+walkingSick,
color=[elm/250 for elm in [72, 169, 171]], label='vaccinated')
ax1.bar(range(len(healthy)), healthy, width=1.0,
bottom=dead+hospital+walkingSick+vaccinated,
color='lightblue', label='healthy')
ax1.bar(range(len(healthy)), recovered, width=1.0,
bottom=dead+hospital+walkingSick+vaccinated+healthy,
color='green', label='recovered')
ax1.set_ylabel('Population', size=12);
ax1.set_title('Effect of Virus on Population Over Time',size=20)
ax2.plot(walkingSick, color='orange', label='walking sick')
ax2.plot(hospital, color='red', label='hospitalized')
if hospitalThreshold:
print(hospitalThreshold)
ax2.axhline(y=hospitalThreshold,
linestyle='--',color='gray', label='capacity')
ax2.set_ylabel('Number of sick');
ax2.set_title('Number of Sick Over Time', size=20)
ax3.plot(dead, color='black', label='dead');
ax3.set_xlabel('Time Steps',size=18)
ax3.set_ylabel('Number of deaad');
ax3.set_title('Number of Dead Over Time', size=20)
ax1.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax2.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax3.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax1.axvline(x=steps, color='black',alpha=.25,linewidth=7)
ax2.axvline(x=steps, color='black',alpha=.25,linewidth=7)
ax3.axvline(x=steps, color='black',alpha=.25,linewidth=7)
#plt.xlim([0,steps])
plt.xlim([0,i])
#plt.tight_layout();
if save==True:
plt.savefig(os.path.join(os.getcwd(), saveFolder, 'step_%s.png'%(steps)))
if display==True:
plt.show()
#plt.close()
#return fig
def combinedVisuals(self, SAL, agent_status, cmap=None, i=0,
hospitalThreshold = None,
modelName='Model visualization',
save=False, saveFolder=None, display=False):
"""Combines a few different visuals into a single large image."""
fig = plt.figure(figsize=[16,8])
gs = fig.add_gridspec(3, 5)
ax4 = fig.add_subplot(gs[0:3, 0:3])
ax3 = fig.add_subplot(gs[2, 3:])
ax1 = fig.add_subplot(gs[0, 3:], sharex=ax3)
ax2 = fig.add_subplot(gs[1, 3:], sharex=ax3)
self.agentPlot(SAL, i=i, fig=fig, axs=[ax4])
self.agentStatusPlot(agent_status, i, fig=fig, axs=(ax1, ax2, ax3), cmap=cmap, hospitalThreshold=hospitalThreshold)
plt.suptitle('%s\nTime Step %s'%(modelName, i), size=24)
fig.tight_layout(rect=[0, 0.03, 1, 0.9])
if save==True:
plt.savefig(os.path.join(os.getcwd(), saveFolder, 'step_%s.png'%(i)))
if display == True:
plt.show()
#plt.close()
#return fig
def generateGIF(self, SAL, agent_status, NumSteps, visualFunction='all', cmap=None, stepSkip=1,
saveFolder=os.getcwd(),modelName='ABM Simulation',
GIFname='ABM_sim', datestamp=True, fps = 10,
hospitalThreshold = None):
if not cmap:
cmap = colors.ListedColormap(['white','lightblue','lightgreen',
[elm/250 for elm in [72, 169, 171]], 'orange','red', 'black'])
print("Starting to generate frames for GIF...")
with tempfile.TemporaryDirectory(dir=os.getcwd()) as f:
for i in range(0, NumSteps):
if i%stepSkip == 0: #saving only every stepSkip frame for the GIF
if visualFunction == 'all' and i != 0:
self.combinedVisuals(SAL, agent_status, i = i,
cmap=None,
hospitalThreshold = None,#hospitalThreshold,
modelName=modelName.strip()+' ',
save=True, saveFolder=f, display=False)
elif visualFunction == 'animation':
self.agentPlot(SAL, cmap=cmap, save=True, saveFolder=f, display=False, i = i)
elif visualFunction == 'graphs':
self.agentStatusPlot(agent_status, i, cmap=cmap,
hospitalThreshold=hospitalThreshold,
save=True, saveFolder=f, display=False)
plt.close()
print("frames generated. Making GIF...")
images = []
fileNums = [int(elm.split('_')[1].split('.png')[0]) for elm in os.listdir(f) if '.png' in elm]
fileNums = sorted(fileNums)
for num in fileNums:
file_name = 'step_%s.png'%(num)
file_path = os.path.join(f, file_name)
images.append(imageio.imread(file_path))
imageio.mimsave(os.path.join(saveFolder,'%s.gif'%(GIFname)),images,fps=fps)
print("GIF complete!")
| 2.421875
| 2
|
init_data/permission_data/group.py
|
ModifiedClass/flaskapipermission
| 0
|
12780906
|
<gh_stars>0
# -*- coding:utf-8 -*-
# 权限管理数据
# database:mysql base:fua user:root pwd:<PASSWORD>
from app.block.permission.model import Group
# 初始化组
def initGroup():
g1 = Group()
g1.name='管理员'
g1.remark='所有权限'
g2 = Group()
g2.name='注册用户'
g2.remark='基本权限'
return {'admin':g1,'guest':g2}
| 1.945313
| 2
|
appengine_config.py
|
sreejithb/cows_and_bulls
| 0
|
12780907
|
from google.appengine.ext import vendor
vendor.add('lib')
vendor.add('lib/nltk')
vendor.add('lib/nltk-3.2.1.egg-info')
| 1.414063
| 1
|
Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/tests/camera_test.py
|
JE-Chen/je_old_repo
| 1
|
12780908
|
<filename>Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/tests/camera_test.py
import unittest
import math
import pygame
from pygame.compat import long_
class CameraModuleTest(unittest.TestCase):
pass
| 1.414063
| 1
|
src/wifi_sensor/wifi_sensor.py
|
azz2k/wifi_sensor
| 3
|
12780909
|
<filename>src/wifi_sensor/wifi_sensor.py<gh_stars>1-10
#!/usr/bin/env python
import rospy
import time
import numpy as np
from msg import *
import thread
import subprocess
import struct
class WifiSensor():
def __init__(self):
# setup rospy and get parameters
rospy.init_node("wifisensor")
self.adapter = rospy.get_param("~adapter", "wlan0")
self.channel = rospy.get_param("~channel", 9)
self.rate = rospy.get_param("~rate", 20)
# setup wifi adapter
subprocess.call(["ifconfig", self.adapter, "down"])
subprocess.call(["iwconfig", self.adapter, "mode", "monitor"])
subprocess.call(["ifconfig", self.adapter, "up"])
subprocess.call(["iwconfig", self.adapter, "channel", str(self.channel)])
# find your own mac addr
output = subprocess.check_output(["ifconfig", "-a", self.adapter])
self.my_addr = "-".join([x.lower() for x in output.split(" ")[output.split(" ").index("HWaddr")+1].split("-")[:6]])
# setup shared data for threads and start
self.data = {}
self.dataMutex = thread.allocate_lock()
thread.start_new_thread(self.mesRaw, ())
# setup main loop
self.pub = rospy.Publisher("rssi", RssiMulti, queue_size=10)
r = rospy.Rate(self.rate)
# main loop
while not rospy.is_shutdown():
data = {}
with self.dataMutex:
data = dict(self.data)
self.data = {}
msg = RssiMulti()
msg.header.stamp = rospy.Time.now()
for addr in data.keys():
submsg = Rssi()
submsg.header.stamp = rospy.Time.now()
submsg.my_mac_addr = self.my_addr
submsg.their_mac_addr = addr
submsg.rssi = data[addr]
msg.data.append(submsg)
self.pub.publish(msg)
r.sleep()
def mesRaw(self):
p = subprocess.Popen(("tcpdump", "-l", "-e", "-i", self.adapter), stdout=subprocess.PIPE)
while not rospy.is_shutdown():
try:
for line in iter(p.stdout.readline, ""):
chunks = line.split(" ")
addr = None
rssi = None
# addr
candidates = [[chunk, len(chunk.split(":"))] for chunk in chunks if ":" in chunk]
candidates = [candidate[0] for candidate in candidates if candidate[1] == 7 and candidate[0][0:3] == "SA:"]
if len(candidates) == 1:
addr = candidates[0][3:]
# rssi
if "signal" in chunks:
rssi = int(chunks[chunks.index("signal")-1][:-2])
# store
if addr is not None and rssi is not None and rssi < 0:
with self.dataMutex:
if addr in self.data.keys():
self.data[addr].append(rssi)
else:
self.data[addr] = [rssi]
except:
pass
| 2.59375
| 3
|
0028-implement-strstr/solution.py
|
radelman/leetcode
| 0
|
12780910
|
<filename>0028-implement-strstr/solution.py
class Solution:
def strStr(self, haystack: str, needle: str) -> int:
if len(needle) == 0:
return 0
cumsum = [ord(c) for c in haystack]
for i in range(1, len(cumsum)):
cumsum[i] = cumsum[i - 1] + cumsum[i]
target = sum([ord(c) for c in needle])
for i in range(len(haystack) - len(needle) + 1):
first = cumsum[i - 1] if i > 0 else 0
last = cumsum[i + len(needle) - 1]
attempt = last - first
if attempt == target:
if haystack[i : i + len(needle)] == needle:
return i
return -1
def main() -> None:
test_cases = [
["hello", "ll"],
["aaaaa", "bba"]
]
solution = Solution();
for inputs in test_cases:
haystack, needle = inputs
test = solution.strStr(haystack, needle)
print(test)
if __name__ == '__main__':
main()
| 3.3125
| 3
|
fastapi/test/test_app.py
|
oslokommune/lambda-boilerplate
| 0
|
12780911
|
from aws_xray_sdk.core import xray_recorder
import app
xray_recorder.begin_segment("Test")
def test_read_root():
response = app.read_root()
assert response == {"hello": "world"}
| 2.359375
| 2
|
src/tools/python/payment_log_reporter.py
|
Justintime50/easypost-tools
| 1
|
12780912
|
<gh_stars>1-10
# Payment Log reporter
# Requests payment_log reports and optionally downloads ZIP files
# or the combined CSV of all data.
#
# Usage:
# python3 paymentlog_reporter.py
#
# 0.0 Initial version 05 Mar 2020 <EMAIL>
#
# Note: this script makes raw endpoint queries instead of using the easypost
# API Python modules to limit the amount of dependencies that are required
#############################################################################
# Copyright (C) 2020 by Simpler Postage, Inc. (dba EasyPost) <<EMAIL>>
#
# Permission to use, copy, modify, and/or distribute this software for
# any purpose with or without fee is hereby granted.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
# AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR
# PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#############################################################################
import calendar
import csv
import json
import zipfile
from base64 import b64encode
from datetime import datetime
from functools import partial
from http.client import HTTPSConnection
from io import BytesIO as BIO
from io import StringIO as SIO
from pathlib import Path
from time import sleep
from urllib.parse import urlparse
from urllib.request import urlopen
# Attribution: [roehnan](https://github.com/roehnan)
# user-settable params
API_KEY = "" # "EZTK949...."
# user account was created "2019-08-22T14:18:02Z"
START_YR = 2019
START_MO = 8
DOWNLOAD_DIR = '~/Documents'
SAVE_REPORT_ZIP = False
GENERATE_COMBINED_CSV = True
# end user-settable params
# constants
B64USERPASSWORD = b64encode(bytes(API_KEY + ":", encoding='utf-8')).decode("utf-8")
USER_AGENT = 'python3 raw_api test-paymentlogreports'
URLBASE = "/v2/"
# end constants
# capture our current date so we know when to stop
NOW = datetime.now()
def getURL(url, json_dict_data=None):
"""
inspired by https://stackoverflow.com/a/7000784
"""
# create our connection
conn = HTTPSConnection("api.easypost.com")
# build our authentication header
headers = {
'Authorization': 'Basic %s' % B64USERPASSWORD,
'User-Agent': USER_AGENT,
}
if json_dict_data:
headers.update(
{
'Content-type': 'application/json',
'Accept': 'text/plain',
}
)
# if data passed in as dict, convert to str
if type(json_dict_data) == dict:
json_dict_data = json.dumps(json_dict_data)
try:
conn.request('GET', f'{URLBASE}{url}', json_dict_data, headers=headers)
res_str = conn.getresponse().read()
data = json.loads(res_str)
except Exception:
data = {}
return data
def _post_putURL_json(CMD, url, json_dict_data):
"""
inspired by https://stackoverflow.com/a/7000784
"""
# create our connection
conn = HTTPSConnection("api.easypost.com")
# build our authentication header
headers = {
'Authorization': 'Basic %s' % B64USERPASSWORD,
'User-Agent': USER_AGENT,
'Content-type': 'application/json',
'Accept': 'text/plain',
}
# if data passed in as dict, convert to str
if type(json_dict_data) == dict:
json_dict_data = json.dumps(json_dict_data)
try:
conn.request(CMD, f'{URLBASE}{url}', json_dict_data, headers=headers)
res_str = conn.getresponse().read()
data = json.loads(res_str)
except Exception:
data = {}
return data
# create our 'POST' command obj, so that we don't have to pass in the CMD
# this works because the post_putURL method only switches the "method";
# all other code is the same
# postURL = partial(_post_putURL, 'POST')
postURL = partial(_post_putURL_json, 'POST')
# our "PUT" command obj
# putURL = partial(_post_putURL, 'PUT')
putURL = partial(_post_putURL_json, 'PUT')
def saveReport(url, start_date, end_date):
parsed_url = urlparse(url)
url_path = parsed_url.path
fn_stem = Path(url_path).stem
out_path = Path(
DOWNLOAD_DIR,
'_'.join((start_date.replace('-', ''), end_date.replace('-', ''), fn_stem)) + '.zip',
)
with urlopen(url) as F, out_path.expanduser().open('wb') as out_file:
print(f"Saving '{url_path}' to '{out_path!s}'...")
_ = out_file.write(F.read())
def getReportRows(url):
with urlopen(url) as F:
zip_bytes = F.read()
bio = BIO(zip_bytes)
_ = bio.seek(0)
with zipfile.ZipFile(bio) as z:
data = z.read(z.namelist()[0])
sio = SIO(data.decode('utf-8'))
_ = sio.seek(0)
rows = [r for r in csv.reader(sio)]
return rows
def getReportURLS(download=False, buildmasterreport=False):
has_more = True
params = {'start_date': f'{START_YR}-{START_MO:02}-01', 'page_size': 3}
data = []
while has_more:
res = getURL('reports/payment_log', params)
reports = res['reports']
for r in reports:
# filter results and ensure that we're only looking at
# data that is on or after our requested start date
# because the API can return data that is before our
# requested date
if r['start_date'] >= params['start_date']:
data.append([r[k] for k in ('id', 'created_at', 'start_date', 'end_date', 'url')])
# update parameters to get to the "next page" of data
params['before_id'] = reports[-1]['id']
has_more = res['has_more']
# sort our reports by start_date, created_at
data.sort(key=lambda x: (x[2], x[1]))
return data
if __name__ == '__main__':
# request reports and print out their URL
yr = START_YR
mo = START_MO
while True:
if mo > NOW.month and yr == NOW.year:
break
try:
# request payment log report
rpt = postURL(
'reports/payment_log',
{
'start_date': f'{yr}-{mo:02}-01',
'end_date': f'{yr}-{mo}-{calendar.monthlen(yr, mo)}',
},
)
# if the request was successful, a dictionary will be returned with a URL
# entry set to None (the report is being generated)
if 'url' in rpt:
# wait until the URL is populated
while rpt['url'] is None:
rpt = getURL(API_KEY, f'reports/{rpt["id"]}')
sleep(0.1)
except Exception:
pass
# move to our next month
mo += 1
# if we've gone past December, go to January of the next year
if mo > 12:
yr += 1
mo = 1
URLS = getReportURLS(True)
for row in URLS:
print(" | ".join(row))
print()
# save individual report files
if SAVE_REPORT_ZIP:
for row in URLS:
rid, created_at, start_date, end_date, url = row
saveReport(url, start_date, end_date)
if GENERATE_COMBINED_CSV:
row_key = set()
hdr = None
# create our output file
output_path = Path(
DOWNLOAD_DIR,
'_'.join(
(
f'{START_YR}{START_MO:02}01',
f'{NOW.year}{NOW.month:02}{calendar.monthlen(NOW.year, NOW.month):02}',
'payment_log.csv',
)
),
)
print(f"Saving combined CSV to '{output_path!s}'...")
with output_path.expanduser().open('w', encoding='utf-8') as F:
# create a CSV writer for our output file
wtr = csv.writer(F)
# iterate over the report URLs
for row in URLS:
rid, created_at, start_date, end_date, url = row
# download the ZIP file and unpack it for the embedded csv
# return the rows in the embedded CSV
data = getReportRows(url)
# separate the header from the remaining rows
h, rpt_rows = data[0], data[1:]
# store our header if we haven't outputted it yet
if not hdr:
_ = wtr.writerow(h)
hdr = h
# iterate the report rows
for r in rpt_rows:
# convert the row to a tuple so it can be hashed (needed to evaluate against the set)
# we want to ensure that our resulting CSV doesn't contain duplicate data
key = tuple(r)
# if we haven't seen the row, store it
if key not in row_key:
row_key.add(key)
_ = wtr.writerow(r)
else:
print(f"Skipping '{r!s}'...")
| 1.570313
| 2
|
Chapter 03/ch3_1_29.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
| 0
|
12780913
|
<reponame>bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
greet="Never Criticise"
if not greet.islower():
print(greet.casefold())
else:
print("Already in lowercase")
# never criticise
| 3.1875
| 3
|
master/Challenges-master/Challenges-master/level-1/swapCase/swap_case.py
|
AlexRogalskiy/DevArtifacts
| 4
|
12780914
|
import sys
def main(input_file):
with open(input_file, 'r') as data:
for line in data:
print swap_case(line.strip())
def swap_case(string):
output = ''
for char in string:
if char.islower():
output += char.upper()
else:
output += char.lower()
return output
if __name__ == "__main__":
try:
main(sys.argv[1])
except Exception as e:
print 'First argument must be a text file!\nError: {0}'.format(e)
| 3.8125
| 4
|
src/datasets/google_speech.py
|
shgoren/viewmaker
| 29
|
12780915
|
import os
import torch
import random
import librosa
import torchaudio
import numpy as np
from glob import glob
import nlpaug.flow as naf
import nlpaug.augmenter.audio as naa
import nlpaug.augmenter.spectrogram as nas
from torchvision.transforms import Normalize
from torch.utils.data import Dataset
from nlpaug.augmenter.audio import AudioAugmenter
from src.datasets.librispeech import WavformAugmentation, SpectrumAugmentation
from src.datasets.root_paths import DATA_ROOTS
GOOGLESPEECH_MEAN = [-46.847]
GOOGLESPEECH_STDEV = [19.151]
GOOGLESPEECH_LABELS = ['eight', 'right', 'happy', 'three', 'yes', 'up', 'no', 'stop', 'on', 'four', 'nine',
'zero', 'down', 'go', 'six', 'two', 'left', 'five', 'off', 'seven', 'one',
'cat', 'bird', 'marvin', 'wow', 'tree', 'dog', 'sheila', 'bed', 'house']
class GoogleSpeechCommands(Dataset):
def __init__(
self,
root=DATA_ROOTS['google_speech'],
train=True,
spectral_transforms=False,
wavform_transforms=False,
max_length=150526,
input_size=224,
normalize_mean=GOOGLESPEECH_MEAN,
normalize_stdev=GOOGLESPEECH_STDEV,
):
super().__init__()
assert not (spectral_transforms and wavform_transforms)
if train:
train_paths = open(os.path.join(root, 'training_list.txt'), 'r').readlines()
val_paths = open(os.path.join(root, 'validation_list.txt'), 'r').readlines()
wav_paths = train_paths + val_paths
else:
test_paths = open(os.path.join(root, 'testing_list.txt'), 'r').readlines()
wav_paths = test_paths
wav_paths = [path.strip() for path in wav_paths]
self.root = root
self.num_labels = len(GOOGLESPEECH_LABELS)
self.wav_paths = wav_paths
self.spectral_transforms = spectral_transforms
self.wavform_transforms = wavform_transforms
self.max_length = max_length
self.train = train
self.input_size = input_size
self.FILTER_SIZE = input_size
self.normalize_mean = normalize_mean
self.normalize_stdev = normalize_stdev
def __getitem__(self, index):
wav_name = self.wav_paths[index]
label_name = wav_name.split('/')[0].lower()
label = GOOGLESPEECH_LABELS.index(label_name)
wav_path = os.path.join(self.root, wav_name)
wavform, sample_rate = torchaudio.load(wav_path)
wavform = wavform[0].numpy()
if self.wavform_transforms:
transforms = WavformAugmentation(sample_rate)
wavform = transforms(wavform)
# pad to 150k frames
if len(wavform) > self.max_length:
# randomly pick which side to chop off (fix if validation)
flip = (bool(random.getrandbits(1)) if self.train else True)
padded = (wavform[:self.max_length] if flip else
wavform[-self.max_length:])
else:
padded = np.zeros(self.max_length)
padded[:len(wavform)] = wavform # pad w/ silence
hop_length_dict = {224: 672, 112: 1344, 64: 2360, 32: 4800}
spectrum = librosa.feature.melspectrogram(
padded,
sample_rate,
hop_length=hop_length_dict[self.input_size],
n_mels=self.input_size,
)
if self.spectral_transforms: # apply time and frequency masks
transforms = SpectrumAugmentation()
spectrum = transforms(spectrum)
# log mel-spectrogram
spectrum = librosa.power_to_db(spectrum**2)
spectrum = torch.from_numpy(spectrum).float()
spectrum = spectrum.unsqueeze(0)
if self.spectral_transforms: # apply noise on spectral
noise_stdev = 0.25 * self.normalize_stdev[0]
noise = torch.randn_like(spectrum) * noise_stdev
spectrum = spectrum + noise
normalize = Normalize(self.normalize_mean, self.normalize_stdev)
spectrum = normalize(spectrum)
return index, spectrum, int(label)
def __len__(self):
return len(self.wav_paths)
| 2.171875
| 2
|
ATTOM/initial_connection.py
|
taylor-curran/discover-realestate-data
| 0
|
12780916
|
import os
from dotenv import load_dotenv
import requests
import json
from xml.etree import ElementTree
# Load API Keys
load_dotenv()
ATTOM_API_KEY = os.getenv('ATTOM_API_KEY')
url = "http://api.gateway.attomdata.com/propertyapi/v1.0.0/property/detail?"
headers = {
'accept': "application/json",
'apikey': ATTOM_API_KEY
}
params = {
'address1': '4529 Winona Court' ,
'address2': 'Denver, CO'
}
response = requests.request("GET", url, headers=headers, params=params)
print(response.json())
| 2.671875
| 3
|
setup_helpers.py
|
wildernesstechie/blackhole
| 0
|
12780917
|
<gh_stars>0
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Kura
# Copyright (C) 2009-2015 <NAME>
#
# This file is part of setup_helpers.py
#
# setup_helpers.py is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, version 3 of the License.
#
# setup_helpers.py is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with setup_helpers.py. If not, see <http://www.gnu.org/licenses/>.
"""setup.py helper functions."""
import io
import os
import sys
from setuptools.command.test import test as TestCommand
__all__ = ("get_version", "include_file", "require_python", "PyTest")
class PyTest(TestCommand):
"""Test command."""
def finalize_options(self):
"""Build options."""
TestCommand.finalize_options(self)
self.test_args = ["--pylama", "-q", "./blackhole", "./tests"]
self.test_suite = True
def run_tests(self):
"""Run ze tests."""
import pytest
sys.exit(pytest.main(self.test_args))
def require_python(minimum):
"""Python version check."""
if sys.hexversion < minimum:
hversion = hex(minimum)[2:]
if len(hversion) % 2 != 0:
hversion = "0" + hversion
split = list(hversion)
parts = []
while split:
parts.append(int("".join((split.pop(0), split.pop(0))), 16))
major, minor, micro, release = parts
if release == 0xF0:
print(
"Python {0}.{1}.{2} or higher is required".format(
major, minor, micro
)
)
else:
print(
"Python {0}.{1}.{2} ({3}) or higher is required".format(
major, minor, micro, hex(release)[2:]
)
)
sys.exit(1)
def include_file(filename):
"""Include contents of specified file."""
fpath = os.path.join(os.path.dirname(__file__), filename)
with io.open(fpath, encoding="utf-8") as f:
c = f.read()
return c
def get_version(filepath):
"""Return program version."""
for line in include_file(filepath).split("\n"):
if line.startswith("__version__"):
_, vers = line.split("=")
return vers.strip().replace('"', "").replace("'", "")
| 2.046875
| 2
|
test/test_game_cache.py
|
vanderh0ff/pyvtt
| 8
|
12780918
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
https://github.com/cgloeckner/pyvtt/
Copyright (c) 2020-2021 <NAME>
License: MIT (see LICENSE for details)
"""
from pony.orm import db_session
import cache, orm
from test.utils import EngineBaseTest, SocketDummy
class GameCacheTest(EngineBaseTest):
def setUp(self):
super().setUp()
with db_session:
gm = self.engine.main_db.GM(name='user123', url='foo', sid='123456')
gm.postSetup()
# create GM database
self.db = orm.createGmDatabase(engine=self.engine, filename=':memory:')
with db_session:
game = self.db.Game(url='bar', gm_url='foo')
game.postSetup()
self.cache = self.engine.cache.get(gm).get(game)
def tearDown(self):
del self.db
del self.cache
super().tearDown()
def test_getNextId(self):
self.assertEqual(self.cache.getNextId(), 0)
self.assertEqual(self.cache.getNextId(), 1)
self.assertEqual(self.cache.getNextId(), 2)
self.assertEqual(self.cache.getNextId(), 3)
def rebuildIndices(self):
# @NOTE: this is called on insert and remove. hence it's tested
# during those operations
pass
def test_insert(self):
# create some players
p = self.cache.insert('arthur', 'red', False)
self.assertIsNotNone(p)
self.cache.insert('bob', 'blue', True) # GM
self.cache.insert('carlos', 'yellow', False)
# test indices being rebuilt
ids = set()
for name in self.cache.players:
ids.add(self.cache.players[name].index)
self.assertEqual(len(ids), 3)
self.assertEqual(ids, {0, 1, 2})
# force carlos to be online
self.cache.get('carlos').socket = SocketDummy()
# cannot add player twice (if online)
with self.assertRaises(KeyError) as e:
self.cache.insert('carlos', 'black', True)
self.assertEqual(str(e), 'carlos')
# can re-login player if offline
self.cache.insert('bob', 'cyan', False)
def test_get(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('bob', 'blue', True) # GM
self.cache.insert('carlos', 'yellow', False)
# query players
cache1 = self.cache.get('arthur')
self.assertIsNotNone(cache1)
cache2 = self.cache.get('bob')
self.assertIsNotNone(cache2)
cache3 = self.cache.get('carlos')
self.assertIsNotNone(cache3)
# removed player cannot be queried
self.cache.remove('bob')
cache2 = self.cache.get('bob')
self.assertIsNone(cache2)
# cannot query unknown player
unknown_cache = self.cache.get('gabriel')
self.assertIsNone(unknown_cache)
def test_getData(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('gabriel', 'red', False)
self.cache.insert('carlos', 'yellow', False)
self.cache.insert('bob', 'blue', True)
# query data (in index-order)
data = self.cache.getData()
self.assertEqual(len(data), 4)
self.assertEqual(data[0]['name'], 'arthur')
self.assertEqual(data[1]['name'], 'gabriel')
self.assertEqual(data[2]['name'], 'carlos')
self.assertEqual(data[3]['name'], 'bob')
# remove player
self.cache.remove('carlos')
# re- query data (in index-order)
data = self.cache.getData()
self.assertEqual(len(data), 3)
self.assertEqual(data[0]['name'], 'arthur')
self.assertEqual(data[1]['name'], 'gabriel')
self.assertEqual(data[2]['name'], 'bob')
def test_getSelections(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('gabriel', 'red', False)
self.cache.insert('carlos', 'yellow', False)
self.cache.insert('bob', 'blue', True)
# set selections
self.cache.get('arthur').selected = [236, 154]
self.cache.get('carlos').selected = [12]
self.cache.get('bob').selected = [124, 236, 12]
# expect selections per player name
selections = self.cache.getSelections()
for name in selections:
self.assertEqual(selections[name], self.cache.get(name).selected)
def test_remove(self):
# create some players
self.cache.insert('arthur', 'red', False)
self.cache.insert('gabriel', 'red', False)
self.cache.insert('carlos', 'yellow', False)
self.cache.insert('bob', 'blue', True)
# remove but expect indices being rebuilt
self.cache.remove('carlos')
ids = set()
for name in self.cache.players:
ids.add(self.cache.players[name].index)
self.assertEqual(len(ids), 3)
self.assertEqual(ids, {0, 1, 2})
# cannot remove player twice
with self.assertRaises(KeyError):
self.cache.remove('carlos')
# cannot remove unknown player
with self.assertRaises(KeyError):
self.cache.remove('dimitri')
# @NOTE: other operations are tested during integration test
| 2.265625
| 2
|
test_maximum_rate.py
|
saydulk/blurt
| 5
|
12780919
|
<filename>test_maximum_rate.py
import time
rate = 0
length = 1500
input_octets = np.random.random_integers(0,255,length)
output = wifi.encode(input_octets, rate)
N = output.size
trials = 10
t0 = time.time(); [(None, wifi.encode(input_octets, rate))[0] for i in xrange(trials)]; t1 = time.time()
samples_encoded = trials * N
time_elapsed_encode = t1 - t0
max_sample_rate_encode = samples_encoded / time_elapsed_encode
t0 = time.time(); [(None, wifi.decode(output))[0] for i in xrange(trials)]; t1 = time.time()
samples_decoded = trials * N
time_elapsed_decode = t1 - t0
max_sample_rate_decode = samples_decoded / time_elapsed_decode
print max_sample_rate_encode, max_sample_rate_decode
import cProfile as profile
profile.run('wifi.decode(output)')
| 2.859375
| 3
|
main.py
|
promethee/pimoroni.pirate-audio.dual-mic
| 0
|
12780920
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import math
import time
import numpy
import digitalio
import board
from PIL import Image, ImageDraw, ImageFont
from fonts.ttf import RobotoMedium
import RPi.GPIO as GPIO
from ST7789 import ST7789
SPI_SPEED_MHZ = 80
display = ST7789(
rotation=90, # Needed to display the right way up on Pirate Audio
port=0, # SPI port
cs=1, # SPI port Chip-select channel
dc=9, # BCM pin used for data/command
backlight=13,
spi_speed_hz=SPI_SPEED_MHZ * 1000 * 1000
)
GPIO.setmode(GPIO.BCM)
GPIO.setup(13, GPIO.OUT)
FLIP = os.environ.get('FLIP', False)
WIDTH = display.height
HEIGHT = display.width
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
COLORS = [
(255, 0, 0),
(255, 128, 0),
(255, 255, 0),
(128, 255, 0),
(0, 255, 0),
(0, 255, 128),
(0, 255, 255),
(0, 128, 255),
(0, 0, 255),
(255, 0, 255),
(255, 0, 128),
]
index = 0
font_smiley = ImageFont.truetype('./CODE2000.TTF', 28)
font = ImageFont.truetype(RobotoMedium, 40)
img = Image.new("RGB", (WIDTH, HEIGHT), 0)
draw = ImageDraw.Draw(img)
BUTTONS = [5, 6, 16, 24]
LABELS = ['A', 'B', 'X', 'Y']
GPIO.setmode(GPIO.BCM)
GPIO.setup(BUTTONS, GPIO.IN, pull_up_down=GPIO.PUD_UP)
button = ""
def show_credits(button):
global index
ROTATION = 270 if FLIP else 90
draw.text((0, 0), "A", font=font, fill=COLORS[index] if button == "A" else WHITE)
draw.text((WIDTH - 32, 0), "X", font=font, fill=COLORS[index] if button == "X" else WHITE)
draw.text((0, HEIGHT - 48), "B", font=font, fill=COLORS[index] if button == "B" else WHITE)
draw.text((WIDTH - 32, HEIGHT - 48), "Y", font=font, fill=COLORS[index] if button == "Y" else WHITE)
draw.text((int(WIDTH*0.2), int(HEIGHT*0.09)), "¯\_(ツ)_/¯", font=font_smiley, fill=COLORS[index] if button == "" else WHITE)
draw.text((int(WIDTH*0.09), int(HEIGHT*0.35)), "promethee", font=font, fill=COLORS[index] if button == "" else WHITE)
draw.text((int(WIDTH*0.2), int(HEIGHT*0.6)), "@github", font=font, fill=COLORS[index] if button == "" else WHITE)
display.display(img)
def button_press(pin):
global button
button = LABELS[BUTTONS.index(pin)] if button == "" else ""
for pin in BUTTONS:
GPIO.add_event_detect(pin, GPIO.BOTH, button_press, bouncetime=100)
while True:
index = index + 1 if index < len(COLORS) - 1 else 0
show_credits(button)
| 2.765625
| 3
|
config/v1.py
|
shucheng-ai/WDA-web-server
| 0
|
12780921
|
#!/usr/bin/env python3
# coding:utf-8
import os
import sys
"""
config 1.0
"""
DEBUG = True
HOST = '0.0.0.0'
PORT = 8000
NAME = 'layout'
DEPLOY = 0 # 0: 单机部署 ; 1: 接入云服务器
HOMEPAGE = "/projects"
ERRPAGE = "/404"
TEST_ID = 0
# path
_PATH = os.path.abspath(os.path.dirname(__file__))
APP_PATH = os.path.abspath(os.path.dirname(_PATH))
ROOT_PATH = os.path.abspath(os.path.dirname(APP_PATH))
WEB_PATH = os.path.abspath(os.path.join(ROOT_PATH, "web"))
DIST_PATH = os.path.abspath(os.path.join(WEB_PATH, "dist"))
DIST_STATIC_PATH = os.path.abspath(os.path.join(WEB_PATH, "dist"))
DIST_INDEX_PATH = os.path.abspath(os.path.join(WEB_PATH, "dist", "index.html"))
WEB_3D_PATH = os.path.abspath(os.path.join(ROOT_PATH, "3d"))
DIST_3D_PATH = os.path.abspath(os.path.join(WEB_3D_PATH, "dist"))
DIST_3D_INDEX = os.path.abspath(os.path.join(DIST_3D_PATH, "index.html"))
# sqlite
DB_FILE_PATH = os.path.abspath(os.path.join(ROOT_PATH, f"{NAME}.db"))
DB_FILE = f'sqlite:///{DB_FILE_PATH}'
# PROJECT PATH
BASE_PROJECT_PATH = os.path.abspath(os.path.join(ROOT_PATH, "project"))
PROJECT_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "project"))
DWG_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "dwg"))
# WDA CAD PATH
WDA_CAD_PROJECT_PATH = os.path.abspath(os.path.join(ROOT_PATH, "cad-project", "storage"))
PROJECT_LOG_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "log"))
GLOBAL_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "global"))
STORAGE_PATH = os.path.abspath(os.path.join(GLOBAL_PATH, "storage"))
TMP_PATH = os.path.abspath(os.path.join(BASE_PROJECT_PATH, "tmp"))
TMP_INPUT_PATH = os.path.abspath(os.path.join(TMP_PATH, "input"))
DEMO_PATH = os.path.abspath(os.path.join(APP_PATH, "demo"))
DEMO_JSON_PATH = os.path.abspath(os.path.join(DEMO_PATH, "json"))
# tool v2
LIB_TOOL_PATH = os.path.abspath(os.path.join(ROOT_PATH, "tools"))
sys.path.insert(0, LIB_TOOL_PATH)
# core v2
LIB_CORE_PATH = os.path.abspath(os.path.join(ROOT_PATH, "core"))
sys.path.insert(0, LIB_CORE_PATH)
# cad v2
LIB_CAD_PATH = os.path.abspath(os.path.join(ROOT_PATH, "cad"))
sys.path.insert(0, LIB_CAD_PATH)
# auth wda-auth-decorators
AUTH_DECORATORS_PATH = os.path.abspath(os.path.join(ROOT_PATH, "wda-auth-decorators"))
# auth database
AUTH_DB_HOST = "172.17.0.1"
AUTH_DB_PORT = 15432
AUTH_DB_USERNAME = "admin"
AUTH_DB_PASSWORD = "<PASSWORD>"
# model
MODEL_PATH = os.path.abspath(os.path.join(ROOT_PATH, "wda-cloud"))
# model database 172.17.0.1
DB_HOST = "172.17.0.1"
DB_PORT = 15433
DB_USERNAME = "admin"
DB_PASSWORD = "<PASSWORD>"
# logger
LOG_NAME = f"{NAME}"
LOG_LEVER = "INFO" # "WARNING"
LOG_PATH = os.path.abspath(os.path.join(APP_PATH, f"{NAME}.log"))
# dwg2dxf
DWG2DXF_SERVER = "http://172.17.0.1:8001/dwg2dxf/"
DXF2DWG_SERVER = "http://172.17.0.1:8001/dxf2dwg/"
try:
from local_config import *
except:
pass
try:
from config.cloud import *
except:
pass
if DEPLOY == 1:
sys.path.append(AUTH_DECORATORS_PATH)
sys.path.append(MODEL_PATH)
print("deploy", DEPLOY)
print("homepage", HOMEPAGE)
print(sys.path)
| 2.140625
| 2
|
selecttotex/totex.py
|
M3nin0/selectToTex
| 4
|
12780922
|
import pandas as pd
from selecttotex.database import Database
class Totex:
"""Classe para transformar resultados de selects em Latex
"""
def __init__(self):
self.db = Database().get_connection()
def to_tex(self, command_list: list, output_file: str) -> None:
"""Função para transformar select em tabela latex
:param: command_list: Lista com os selects que deverão ser utilizados
:param: output_file: Caminho/Nome do arquivo a ser salvo com as tabelas
"""
# Criando arquivo para armazenar resultados
file = open(output_file, 'w')
file.write('Tabelas geradas pelo SelectToTex\n\n\n')
# Criando o loop para percorrer os comandos da lista
for command in command_list:
self.db.execute(command)
# Recupera o resultado e já transforma ele em String
r = str(pd.DataFrame(self.db.fetchall()).to_latex())
file.write(r)
file.write('\n\n')
file.close()
| 3.0625
| 3
|
captioner/train.py
|
svaisakh/captioner
| 1
|
12780923
|
<gh_stars>1-10
import magnet as mag
from torch.nn import functional as F
from captioner.nlp import process_caption
def optimize(model, optimizer, history, dataloader, nlp, save_path, epochs=1, iterations=-1, save_every=5,
write_every=1):
"""
Trains the model for the specified number of epochs/iterations.
This method also handles checkpointing of the model and optimizer.
:param model: The RNN generative model to train.
:param optimizer: The optimizer to use for training.
:param history: A Training history dictionary with the following keys: ['iterations', 'loss', 'val_loss'].
It will be updated during training with the statistics.
:param dataloader: A dictionary containing the training and validation DataLoaders with keys 'train' and 'val' respectively.
:param nlp: The spaCy model to use for training.
:param save_path: The model and optimizer state will be periodically saved to this path.
:param epochs: The number of epochs to train.
:param iterations: Number of iterations to train. If this is positive, then epochs is overriden with this. Useful for debugging (eg. train for 1 iteration).
:param save_every: The frequency (number of minutes) with which the model is saved during training.
:param write_every: The frequency (number of minutes) with which the training history is appended.
"""
import torch
from captioner.utils import get_tqdm, loopy
from time import time
tqdm = get_tqdm()
start_time_write = time()
start_time_save = time()
mean = lambda x: sum(x) / len(x)
model.train()
if iterations < 0: iterations = int(epochs * len(dataloader['train']))
prog_bar = tqdm(range(iterations))
gen = {mode: loopy(dataloader[mode]) for mode in ('train', 'val')}
running_history = {'loss': []}
device = 'cuda:0' if mag.device == 'cuda' else mag.device
for batch in prog_bar:
feature, caption = next(gen['train'])
loss = _get_loss(model, feature, caption[0], nlp)
loss.backward()
optimizer.step()
optimizer.zero_grad()
running_history['loss'].append(loss.item())
history['iterations'] += 1
if (time() - start_time_write > write_every * 60) or (batch == iterations - 1):
start_time_write = time()
mean_loss = mean(running_history['loss'])
history['loss'].append(mean_loss)
running_history['loss'] = []
feature, caption = next(gen['val'])
with mag.eval(model): loss = _get_loss(model, feature, caption[0], nlp).item()
history['val_loss'].append(loss)
prog_bar.set_description(f'{mean_loss:.2f} val={loss:.2f}')
if (time() - start_time_save > save_every * 60) or (batch == iterations - 1):
start_time_save = time()
torch.save(model.state_dict(), save_path / 'model.pt')
torch.save(optimizer.state_dict(), save_path / 'optimizer.pt')
def _get_loss(model, feature, caption, nlp):
cap, target = process_caption(caption, nlp)
y = model(feature.to(mag.device), cap.to(mag.device))
return F.cross_entropy(y.squeeze(0), target.to(mag.device))
def __main(epochs, iterations, shuffle, optimizer, learning_rate, vocab_size, caption_idx, hidden_size, num_layers, rnn_type):
from captioner.data import get_training_dataloaders
from captioner.nlp import get_nlp
from captioner.utils import DIR_DATA, DIR_CHECKPOINTS, get_optimizer
if not (DIR_DATA / 'train' / 'features.pt').exists():
print("Features don't seem to be extracted or cannot be found. Run extract.py once again, maybe?")
return
device = 'cuda:0' if mag.device == 'cuda' else mag.device
print('Loading SpaCy into memory with', vocab_size, 'words.')
nlp = get_nlp('en_core_web_lg', vocab_size, DIR_CHECKPOINTS / 'vocab')
embedding_dim = nlp.vocab.vectors.shape[1]
print('Getting data.')
caption_idx = None if caption_idx == 'None' else int(caption_idx)
dataloader = get_training_dataloaders(DIR_DATA, caption_idx, shuffle)
x = next(iter(dataloader['val']))
feature_dim = x[0].shape[1]
print('Creating the model with:\nfeature_dim =', feature_dim, '\nembedding_dim =', embedding_dim, '\nhidden_size = ',
hidden_size, '\nnum_layers = ', num_layers, '\nrnn_type = ', rnn_type)
model = Model(feature_dim, embedding_dim, hidden_size,
num_layers, rnn_type, vocab_size)
if (DIR_CHECKPOINTS / 'model.pt').exists(): model.load_state_dict(torch.load(DIR_CHECKPOINTS / 'model.pt', map_location=device))
print('Using the {optimizer} optimizer.')
if isinstance(optimizer, str): optimizer = get_optimizer(optimizer)
optimizer = optimizer(model.parameters(), learning_rate)
if (DIR_CHECKPOINTS / 'optimizer.pt').exists():
optimizer.load_state_dict(torch.load(DIR_CHECKPOINTS / 'optimizer.pt', map_location=device))
history = {'iterations': 0, 'loss': [], 'val_loss': []}
print(f"Training for {iterations/len(dataloader['train']):.2f} epochs ({iterations} iterations)")
print('Will save the model to disk every', save_every, 'minutes.')
print('\n\t\t\tHere we go!')
optimize(model, optimizer, history, dataloader, nlp, DIR_CHECKPOINTS, epochs, iterations, save_every)
print('Done.')
if __name__ == '__main__':
import hparams
from captioner.utils import launch
launch(__main, default_module=hparams)
| 2.796875
| 3
|
user/managers.py
|
TheKiddos/StaRat
| 1
|
12780924
|
<reponame>TheKiddos/StaRat<filename>user/managers.py
from django.contrib.auth.base_user import BaseUserManager
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **kwargs):
"""Creates and saves a new user"""
if not email:
raise ValueError("User must have an email address")
user = self.model(email=self.normalize_email(email), **kwargs)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
"""Creates and saves a superuser"""
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
| 2.671875
| 3
|
archive/Jamshidian/Jamshidian_cls.py
|
nkapchenko/HW
| 2
|
12780925
|
import numpy as np
from numpy import exp, sqrt
from functools import partial
from scipy import optimize
from scipy.stats import norm
import scipy.integrate as integrate
from fox_toolbox.utils import rates
"""This module price swaption under Hull White model using Jamshidian method.
Usage example:
from hw import Jamshidian as jamsh
jamsh_price, debug = jamsh.hw_swo(swo, ref_mr, sigma_hw_jamsh, dsc_curve, estim_curve)
swo : rates.Swaption
ref_mr : float
sigma_hw_jamsh : rates.Curve
dsc_curve : rates.RateCurve
estim_curve : rates.RateCurve
"""
class Jamshidian():
def __init__(self, mr, sigma, dsc_curve, estim_curve):
assert isinstance(sigma, (float, rates.Curve)), f'sigma: float or rates.Curve, not {type(sigma)}'
self.mr = mr
self.sigma = sigma
self.dsc_curve = dsc_curve
self.estim_curve = estim_curve
@staticmethod
def sign_changes(array):
"""return number of times the sign is changed in array"""
return np.where(np.diff(np.sign(array)))[0]
@staticmethod
def _B(t, T, a):
return (1 - exp(-a * (T - t))) / a
@staticmethod
def _v(t, T, u, a):
p1 = (T - t)
p2 = - (2 / a) * exp(-a * u) * (exp(a * T) - exp(a * t))
p3 = exp(-2 * a *u) * (exp(2 * a *T) - exp(2 * a *t)) / (2 * a)
return (p1 + p2 + p3) / (a**2)
@staticmethod
def _V(t, T, u, a, sigma):
if isinstance(sigma, float):
return sigma**2 * _v(t, T, u, a)
elif isinstance(sigma, rates.Curve):
total_var = 0.
expiry = T
previous_expiries = [t_exp for t_exp in sigma.buckets if t_exp <= expiry]
previous_sigmas = list(sigma.values[:len(previous_expiries)])
if previous_expiries[-1] < expiry:
previous_sigmas.append(sigma.values[len(previous_expiries)])
previous_expiries.append(expiry)
for i in range(len(previous_expiries) - 1):
total_var += (previous_sigmas[i+1] ** 2) * _v(t, previous_expiries[i+1], u, a)
return total_var
@staticmethod
def _A(t, T, a, sigma, dsc_curve):
assert isinstance(sigma, (float, rates.Curve)), f'sigma: float or rates.Curve, not {type(sigma)}'
fwd_dsc = dsc_curve.get_fwd_dsc(t, T)
return fwd_dsc * exp(0.5*(_V(0, t, t, a, sigma) - _V(0, t, T, a, sigma)))
def get_coef(self, swo):
""" Coefficients for Put swaption from calibration basket. Jamishidian """
flt_adjs = swo.get_flt_adjustments(self.dsc_curve, self.estim_curve)
c0 = -_A(swo.expiry, swo.start_date, self.mr, self.sigma, self.dsc_curve)
c = list(map(lambda dcf, pdate, fadj: dcf * (swo.strike - fadj) * _A(swo.expiry, pdate, self.mr, self.sigma, self.dsc_curve),
swo.day_count_fractions, swo.payment_dates, flt_adjs))
c[-1] += _A(swo.expiry, swo.maturity, self.mr, self.sigma, self.dsc_curve)
c.insert(0, c0)
return np.array(c)
def get_var_x(self, expiry):
if isinstance(sigma, float):
return 1 / (2 * a) * (1 - exp(-2 * a * expiry)) * sigma ** 2
elif isinstance(sigma, rates.Curve):
total_var = 0.
previous_expiries = [t_exp for t_exp in self.sigma.buckets if t_exp <= expiry]
previous_sigmas = list(self.sigma.values[:len(previous_expiries)])
if previous_expiries[-1] < expiry:
previous_sigmas.append(self.sigma.values[len(previous_expiries)])
previous_expiries.append(expiry)
for i in range(len(previous_expiries) - 1):
total_var += 1 / (2 * self.mr) * (previous_sigmas[i+1] ** 2) * (exp(-2 * self.mr * (expiry - previous_expiries[i+1])) - exp(-2 * self.mr * (expiry - previous_expiries[i])))
return total_var
def get_b_i(self, swo):
""" array of B_i for by each payment date """
b0 = _B(swo.expiry, swo.start_date, self.mr)
b = list(map(lambda pdate: _B(swo.expiry, pdate, self.mr), swo.payment_dates))
b.insert(0, b0)
return np.array(b)
@staticmethod
def swap_value(coef, b_i, varx, x):
""" Swap function for finding x_star """
exp_b_var = exp(- b_i * sqrt(varx) * x)
return coef.dot(exp_b_var)
@staticmethod
def get_x_star(coef, b_i, varx):
x0 = .0
func = partial(swap_value, coef, b_i, varx)
# optimum = optimize.newton(func, x0=x0)
optimum = optimize.bisect(func, -6, 6)
return optimum
###TODO: continue adopting
def hw_swo_analytic(coef, b_i, varx, x_star, IsCall):
""" analytic """
sign = -1 if IsCall else 1
if IsCall: coef = np.negative(coef)
val_arr = exp(0.5 * b_i ** 2 * varx) * norm.cdf(sign*(x_star + b_i * sqrt(varx)))
return coef.dot(val_arr)
def hw_swo_numeric(coef, b_i, varx, IsCall):
if IsCall: coef = np.negative(coef)
swaption_numeric = integrate.quad(lambda x: swo_payoff(coef, b_i, varx, x) * norm.pdf(x), -10, 10)[0]
degen_swo_analytic, degen_swo_numeric = 0, 0
control_variable = degen_swo_analytic - degen_swo_numeric
return swaption_numeric + control_variable
def swo_payoff(coef, b_i, varx, x):
"""Call/Put is hidden in coef"""
swap = swap_value(coef, b_i, varx, x)
return swap if swap > 0 else 0
def hw_swo(swo, a, sigma, dsc_curve, estim_curve):
""" Main Hull White swaption function """
IsCall = False if swo.pay_rec == 'Receiver' else True
coef = get_coef(swo, a, sigma, dsc_curve, estim_curve)
b_i = get_b_i(swo, a)
varx = get_var_x(swo.expiry, a, sigma)
sgn_changes = sign_changes(coef)
change_once = len(sgn_changes) == 1
if change_once:
x_star = get_x_star(coef, b_i, varx)
debug_dict = {}
return hw_swo_analytic(coef, b_i, varx, x_star, IsCall), debug_dict
else:
debug_dict = {}
return hw_swo_numeric(coef, b_i, varx, IsCall), debug_dict
| 2.75
| 3
|
convenient/decorators.py
|
ixc/glamkit-convenient
| 0
|
12780926
|
<gh_stars>0
from django.db.models.signals import post_save
def post_save_handler(model):
def renderer(func):
post_save.connect(func, sender=model)
return func
return renderer
| 1.890625
| 2
|
python/utils.py
|
wenh06/dgne
| 0
|
12780927
|
<reponame>wenh06/dgne
"""
"""
import re
import time
from functools import wraps
from typing import Any, MutableMapping, Optional, List, Callable, NoReturn, Tuple
import numpy as np
__all__ = [
"DEFAULTS",
"set_seed",
"ReprMixin",
"Timer",
]
class CFG(dict):
"""
this class is created in order to renew the `update` method,
to fit the hierarchical structure of configurations
Examples
--------
>>> c = CFG(hehe={"a":1,"b":2})
>>> c.update(hehe={"a":-1})
>>> c
{'hehe': {'a': -1, 'b': 2}}
>>> c.__update__(hehe={"a":-10})
>>> c
{'hehe': {'a': -10}}
"""
__name__ = "CFG"
def __init__(self, *args, **kwargs) -> NoReturn:
""" """
if len(args) > 1:
raise TypeError(f"expected at most 1 arguments, got {len(args)}")
elif len(args) == 1:
d = args[0]
assert isinstance(d, MutableMapping)
else:
d = {}
if kwargs:
d.update(**kwargs)
for k, v in d.items():
try:
setattr(self, k, v)
except Exception:
dict.__setitem__(self, k, v)
# Class attributes
exclude_fields = ["update", "pop"]
for k in self.__class__.__dict__:
if (
not (k.startswith("__") and k.endswith("__"))
and k not in exclude_fields
):
setattr(self, k, getattr(self, k))
def __setattr__(self, name: str, value: Any) -> NoReturn:
if isinstance(value, (list, tuple)):
value = [self.__class__(x) if isinstance(x, dict) else x for x in value]
elif isinstance(value, dict) and not isinstance(value, self.__class__):
value = self.__class__(value)
super().__setattr__(name, value)
super().__setitem__(name, value)
__setitem__ = __setattr__
def update(
self, new_cfg: Optional[MutableMapping] = None, **kwargs: Any
) -> NoReturn:
"""
the new hierarchical update method
Parameters
----------
new_cfg : MutableMapping, optional
the new configuration, by default None
kwargs : Any, optional
key value pairs, by default None
"""
_new_cfg = new_cfg or CFG()
if len(kwargs) > 0: # avoid RecursionError
_new_cfg.update(kwargs)
for k in _new_cfg:
# if _new_cfg[k].__class__.__name__ in ["dict", "EasyDict", "CFG"] and k in self:
if isinstance(_new_cfg[k], MutableMapping) and k in self:
self[k].update(_new_cfg[k])
else:
try:
setattr(self, k, _new_cfg[k])
except Exception:
dict.__setitem__(self, k, _new_cfg[k])
def pop(self, key: str, default: Optional[Any] = None) -> Any:
"""
the updated pop method
Parameters
----------
key : str
the key to pop
default : Any, optional
the default value, by default None
"""
if key in self:
delattr(self, key)
return super().pop(key, default)
DEFAULTS = CFG()
DEFAULTS.SEED = 1
DEFAULTS.RNG = np.random.default_rng(seed=DEFAULTS.SEED)
def set_seed(seed: int) -> NoReturn:
"""
set the seed of the random number generator
Parameters
----------
seed: int,
the seed to be set
"""
global DEFAULTS
DEFAULTS.SEED = seed
DEFAULTS.RNG = np.random.default_rng(seed=seed)
def default_class_repr(c: object, align: str = "center", depth: int = 1) -> str:
"""
Parameters
----------
c: object,
the object to be represented
align: str, default "center",
the alignment of the class arguments
Returns
-------
str,
the representation of the class
"""
indent = 4 * depth * " "
closing_indent = 4 * (depth - 1) * " "
if not hasattr(c, "extra_repr_keys"):
return repr(c)
elif len(c.extra_repr_keys()) > 0:
max_len = max([len(k) for k in c.extra_repr_keys()])
extra_str = (
"(\n"
+ ",\n".join(
[
f"""{indent}{k.ljust(max_len, " ") if align.lower() in ["center", "c"] else k} = {default_class_repr(eval(f"c.{k}"),align,depth+1)}"""
for k in c.__dir__()
if k in c.extra_repr_keys()
]
)
+ f"{closing_indent}\n)"
)
else:
extra_str = ""
return f"{c.__class__.__name__}{extra_str}"
class ReprMixin(object):
"""
Mixin for enhanced __repr__ and __str__ methods.
"""
def __repr__(self) -> str:
return default_class_repr(self)
__str__ = __repr__
def extra_repr_keys(self) -> List[str]:
""" """
return []
def add_docstring(doc: str, mode: str = "replace") -> Callable:
"""
decorator to add docstring to a function
Parameters
----------
doc: str,
the docstring to be added
mode: str, default "replace",
the mode of the docstring,
can be "replace", "append" or "prepend",
case insensitive
"""
def decorator(func: Callable) -> Callable:
""" """
@wraps(func)
def wrapper(*args, **kwargs) -> Callable:
""" """
return func(*args, **kwargs)
pattern = "(\\s^\n){1,}"
if mode.lower() == "replace":
wrapper.__doc__ = doc
elif mode.lower() == "append":
tmp = re.sub(pattern, "", wrapper.__doc__)
new_lines = 1 - (len(tmp) - len(tmp.rstrip("\n")))
tmp = re.sub(pattern, "", doc)
new_lines -= len(tmp) - len(tmp.lstrip("\n"))
new_lines = max(0, new_lines) * "\n"
wrapper.__doc__ += new_lines + doc
elif mode.lower() == "prepend":
tmp = re.sub(pattern, "", doc)
new_lines = 1 - (len(tmp) - len(tmp.rstrip("\n")))
tmp = re.sub(pattern, "", wrapper.__doc__)
new_lines -= len(tmp) - len(tmp.lstrip("\n"))
new_lines = max(0, new_lines) * "\n"
wrapper.__doc__ = doc + new_lines + wrapper.__doc__
else:
raise ValueError(f"mode {mode} is not supported")
return wrapper
return decorator
class Timer(ReprMixin):
"""
Context manager to time the execution of a block of code.
Usage
-----
>>> with Timer("task name", verbose=2) as timer:
>>> do_something()
>>> timer.add_time("subtask 1", level=2)
>>> do_subtask_1()
>>> timer.stop_timer("subtask 1")
>>> timer.add_time("subtask 2", level=2)
>>> do_subtask_2()
>>> timer.stop_timer("subtask 2")
>>> do_something_else()
"""
__name__ = "Timer"
def __init__(self, name: Optional[str] = None, verbose: int = 0) -> NoReturn:
"""
Parameters
----------
name: str, optional
the name of the timer, defaults to "default timer"
verbose: int, default 0
the verbosity level of the timer,
"""
self.name = name or "default timer"
self.verbose = verbose
self.timers = {self.name: 0.0}
self.ends = {self.name: 0.0}
self.levels = {self.name: 1}
def __enter__(self) -> "Timer":
self.timers = {self.name: time.perf_counter()}
self.ends = {self.name: 0.0}
self.levels = {self.name: 1}
return self
def __exit__(self, *args) -> NoReturn:
for k in self.timers:
self.stop_timer(k)
self.timers[k] = self.ends[k] - self.timers[k]
def add_timer(self, name: str, level: int = 1) -> NoReturn:
"""
add a new timer for some subtask
Parameters
----------
name: str,
the name of the timer to be added
level: int, default 1
the verbosity level of the timer,
"""
self.timers[name] = time.perf_counter()
self.ends[name] = 0
self.levels[name] = level
def stop_timer(self, name: str) -> NoReturn:
"""
stop a timer
Parameters
----------
name: str,
the name of the timer to be stopped
"""
if self.ends[name] == 0:
self.ends[name] = time.perf_counter()
if self.verbose >= self.levels[name]:
time_cost, unit = self._simplify_time_expr(
self.ends[name] - self.timers[name]
)
print(f"{name} took {time_cost:.4f} {unit}")
def _simplify_time_expr(self, time_cost: float) -> Tuple[float, str]:
"""
simplify the time expression
Parameters
----------
time_cost: float,
the time cost, with units in seconds
Returns
-------
time_cost: float,
the time cost,
unit: str,
the unit of the time cost
"""
if time_cost <= 0.1:
return 1000 * time_cost, "ms"
return time_cost, "s"
def extra_repr_keys(self) -> List[str]:
return ["name", "verbose"]
| 2.359375
| 2
|
news/app.py
|
PythonForChange/Egg
| 0
|
12780928
|
<reponame>PythonForChange/Egg<filename>news/app.py
#Imports
from news.news import New
from news.config import files,year
from egg.resources.console import get
from egg.resources.constants import *
def journalistConsole(condition: bool = True):
print(white+"Journalist Console is now running")
while condition:
print(white+"Title:")
title=get("new")
print(white+"Day:")
day=int(get("new"))
print(white+"Month:")
month=int(get("new"))
new=New(title,day,month,year,files)
print(white+"Tags:")
tagsbycommas=get("new")
new.tags=tagsbycommas.split(", ")
print(white+"Content:")
content=""
while True:
i=get("new")
if i=="$save":
new.text=content
new.add()
break
elif i[0]=="$":
print(white+"Error: NQS could not found the command \""+i+" \"")
else:
content+=i+"\n"
print(white+"Write $end to close the console")
print(white+"Press enter key to write other new")
command=get("new")
if command=="$end":
print(white+"Journalist Console stopped running")
return "done"
| 2.75
| 3
|
website/website/apps/pronouns/tools/copy_paradigm.py
|
SimonGreenhill/Language5
| 1
|
12780929
|
<gh_stars>1-10
from website.apps.pronouns.models import Paradigm
def copy_paradigm(pdm, language):
"""Copies the paradigm `pdm` to a new paradigm for `language`"""
# 1. create new paradigm
old = Paradigm._prefill_pronouns
Paradigm._prefill_pronouns = lambda x: x # Unhook prefill_pronouns!
newpdm = Paradigm.objects.create(
language=language,
source=pdm.source,
editor=pdm.editor,
comment=pdm.comment,
analect=pdm.analect,
label=pdm.label
)
Paradigm._prefill_pronouns = old # Reattach prefill_pronouns (YUCK)
# 2. RULES: loop over rules in pdm and copy to newpdm
for obj in pdm.rule_set.all():
obj.pk = None # will now create new entry
obj.paradigm = newpdm
obj.save()
# 3. PRONOUNS: loop over pronouns in pdm and COPY to newpdm
mapping_pronoun = {} # dictionary of old pronouns -> new pronouns
mapping_entry = {} # dictionary of old entries -> new entries
for pron in pdm.pronoun_set.all():
# save these for later
old_pk = pron.pk
# ... and this, because as soon as we change the pk on pron, then
# it'll forget its lexical items.
lexicon_set = pron.entries.all()
pron.pk = None # will now create new entry
pron.paradigm = newpdm # update paradigm
pron.save() # save, creating a new paradigm
assert pron.pk != old_pk is not None, \
"Should have created a new paradigm PK"
mapping_pronoun[old_pk] = pron
assert pron.entries.count() == 0, \
"Oops. Lexical items should not have been copied yet"
# now copy the lexical items.
# have to use the old pronoun as the new one's forgotten everything.
for lex_obj in lexicon_set:
old_lex_pk = lex_obj.pk
lex_obj.pk = None # will now create new entry
if lex_obj.language != language:
lex_obj.language = language
lex_obj.save()
mapping_entry[old_lex_pk] = lex_obj
# and add to new pronoun
pron.entries.add(lex_obj)
if pron.entries.count() != len(lexicon_set):
m = "Lexicon count does not match %d, got %d"
raise ValueError(
m % (len(lexicon_set), pron.entries.count()))
if pdm.pronoun_set.count() != newpdm.pronoun_set.count():
raise ValueError(
"Something went wrong - "
"should have the same number of pronouns in both paradigms"
)
# 4. RELATIONSHIPS: loop over relationships in pdm and copy to newpdm
for obj in pdm.relationship_set.all():
obj.pk = None # will now create new entry
obj.paradigm = newpdm
# update pronouns
obj.pronoun1 = mapping_pronoun[obj.pronoun1.pk]
obj.pronoun2 = mapping_pronoun[obj.pronoun2.pk]
obj.save()
return newpdm
| 2.546875
| 3
|
DTSGUI/IFLAnim.py
|
pchan126/Blender_DTS_30
| 0
|
12780930
|
<filename>DTSGUI/IFLAnim.py
'''
IFLAnim.py
Copyright (c) 2008 <NAME>(<EMAIL>)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
import bpy
from .UserAnimBase import *
# ***************************************************************************************************
## @brief Class that creates and owns the GUI controls on the IFL sub-panel of the Sequences panel.
#
# This class contains event handler and resize callbacks for it's associated GUI controls, along
# with implementations of refreshSequenceOptions, clearSequenceOptions, and addNewAnim specific to its
# controls.
#
class IFLControlsClass(UserCreatedSeqControlsClassBase):
#######################################
# init and cleanup methods
#######################################
## @brief Initialize the controls and values that are specific to this panel
# @note Calls parent init method
# @param tabContainer The GUI tab container control into which everything should be placed.
def __init__(self, tabContainer):
self.animationTypeString = "IFL"
self.shortAnimationTypeString = "IFL"
UserCreatedSeqControlsClassBase.__init__(self, tabContainer)
## Need to set this in all classes derived from SeqControlsClassBase
# @note valid values are: "All", "Action", "IFL", "Vis" and eventually "TexUV" and "Morph"
self.seqFilter = "IFL"
self.guiFrameSelectedBoxLabel = Common_Gui.BoxSelectionLabel("guiFrameSelectedBoxLabel",
"Selected image:\n None Selected", None,
self.guiFrameSelectedBoxLabelResize)
self.guiMat = Common_Gui.ComboBox("guiMat", "IFL Material",
"Select a Material from this list to use in the IFL Animation",
self.getNextEvent(), self.handleGuiMatEvent, self.guiMatResize)
self.guiNumImages = Common_Gui.NumberPicker("guiNumImages", "Images", "Number of Images in the IFL animation",
self.getNextEvent(), self.handleGuiNumImagesEvent,
self.guiNumImagesResize)
self.guiFramesListTxt = Common_Gui.SimpleText("guiFramesListTxt", "Images:", None, self.guiFramesListTxtResize)
self.guiFramesList = Common_Gui.ListContainer("guiFramesList", "", self.handleGuiFrameListEvent,
self.guiFramesListResize)
self.guiFramesListContainer = Common_Gui.BasicContainer("guiFramesListContainer", "", None,
self.guiFramesListContainerResize)
self.guiFramesListSelectedTxt = Common_Gui.SimpleText("guiFramesListSelectedTxt", "Hold image for:", None,
self.guiFramesListSelectedTxtResize)
self.guiNumFrames = Common_Gui.NumberPicker("guiNumFrames", "Frames", "Hold Selected image for n frames",
self.getNextEvent(), self.handleGuiNumFramesEvent,
self.guiNumFramesResize)
self.guiApplyToAll = Common_Gui.BasicButton("guiApplyToAll", "Apply to all",
"Apply current frame display value to all IFL images",
self.getNextEvent(), self.handleGuiApplyToAllEvent,
self.guiApplyToAllResize)
self.guiWriteIFLFile = Common_Gui.ToggleButton("guiWriteIFLFile", "Write .ifl file",
"Write .ifl file for this sequence to disk upon export.",
self.getNextEvent(), self.handleGuiWriteIFLFileEvent,
self.guiWriteIFLFileResize)
# set initial states
self.guiFramesList.enabled = True
self.guiNumImages.min = 1
self.guiNumFrames.min = 1
self.guiNumImages.value = 1
self.guiNumFrames.value = 1
self.guiNumFrames.max = 65535 # <- reasonable? I wonder if anyone wants to do day/night cycles with IFL?
self.guiWriteIFLFile.state = False
# add controls to containers
self.guiFramesListContainer.addControl(self.guiFrameSelectedBoxLabel)
self.guiSeqOptsContainer.addControl(self.guiMat)
self.guiSeqOptsContainer.addControl(self.guiNumImages)
self.guiSeqOptsContainer.addControl(self.guiFramesListTxt)
self.guiSeqOptsContainer.addControl(self.guiFramesList)
self.guiSeqOptsContainer.addControl(self.guiFramesListContainer)
self.guiSeqOptsContainer.addControl(self.guiWriteIFLFile)
self.guiFramesListContainer.addControl(self.guiFramesListSelectedTxt)
self.guiFramesListContainer.addControl(self.guiNumFrames)
self.guiFramesListContainer.addControl(self.guiApplyToAll)
## @brief Cleans up Blender GUI objects before the interpreter exits;
# we must destroy any GUI objects that are referenced in a non-global scope
# explicitly before interpreter shutdown to avoid the dreaded
# "error totblock" message when exiting Blender.
# @note The builtin __del__ method is not guaranteed to be called for objects
# that still exist when the interpreter exits.
# @note Calls base class cleanup method explicitly.
def cleanup(self):
UserCreatedSeqControlsClassBase.cleanup(self)
del self.guiFrameSelectedBoxLabel
del self.guiMat
del self.guiNumImages
del self.guiFramesListTxt
del self.guiFramesList
del self.guiFramesListSelectedTxt
del self.guiNumFrames
del self.guiApplyToAll
del self.guiWriteIFLFile
#######################################
# Event handler methods
#######################################
## @brief Handle events generated by the "Images" number picker (guiNumImages).
# @param control The invoking GUI control (guiNumImages)
def handleGuiNumImagesEvent(self, control):
Prefs = DtsGlobals.Prefs
if self.guiMat.itemIndex < 0:
control.value = 1
return
seqName, seqPrefs = self.getSelectedSeqNameAndPrefs()
newCount = control.value
holdCount = self.guiNumFrames.value
Prefs.changeSeqIFLImageCount(seqName, newCount, holdCount)
self.refreshImageFramesList(seqName)
## @brief Handle events generated by the "Select IFL Material" menu (guiMat).
# @param control The invoking GUI control (guiMat)
def handleGuiMatEvent(self, control):
Prefs = DtsGlobals.Prefs
guiSeqList = self.guiSeqList
guiMat = self.guiMat
itemIndex = guiMat.itemIndex
# set the pref for the selected sequence
if guiSeqList.itemIndex > -1 and itemIndex >= 0 and itemIndex < len(guiMat.items):
seqName, seqPrefs = self.getSelectedSeqNameAndPrefs()
matName = control.getSelectedItemString()
# ifl material name changed
if Prefs['Sequences'][seqName]['IFL']['Material'] != matName:
# rename images
Prefs.changeSeqIFLMaterial(seqName, matName)
# make frames if we don't have any yet.
Prefs.changeSeqIFLImageCount(seqName, self.guiNumImages.value, self.guiNumFrames.value)
self.refreshImageFramesList(seqName)
## @brief Handle events generated by the "Frames" number picker (guiNumFrames).
# @param control The invoking GUI control (guiNumFrames)
def handleGuiNumFramesEvent(self, control):
guiSeqList = self.guiSeqList
guiFramesList = self.guiFramesList
if guiFramesList.itemIndex > -1:
seqName, seqPrefs = self.getSelectedSeqNameAndPrefs()
itemIndex = guiFramesList.itemIndex
seqPrefs['IFL']['IFLFrames'][itemIndex][1] = control.value
guiFramesList.controls[guiFramesList.itemIndex].controls[1].label = "fr:" + str(control.value)
if self.guiFramesList.callback: self.guiFramesList.callback(self.guiFramesList) # Bit of a hack, but works
## @brief Handle events generated by the "Apply to all" button (guiApplyToAll).
# @param control The invoking GUI control (guiApplyToAll)
def handleGuiApplyToAllEvent(self, control):
guiSeqList = self.guiSeqList
guiFramesList = self.guiFramesList
seqName, seqPrefs = self.getSelectedSeqNameAndPrefs()
itemIndex = guiFramesList.itemIndex
for i in range(0, len(seqPrefs['IFL']['IFLFrames'])):
seqPrefs['IFL']['IFLFrames'][i][1] = self.guiNumFrames.value
guiFramesList.controls[i].controls[1].label = "fr:" + str(self.guiNumFrames.value)
if self.guiFramesList.callback: self.guiFramesList.callback(self.guiFramesList) # Bit of a hack, but works
## @brief Handle events generated by the "Write .ifl file" button (guiWriteIFLFile).
# @param control The invoking GUI control (guiWriteIFLFile)
def handleGuiWriteIFLFileEvent(self, control):
if self.guiSeqList.itemIndex > -1:
seqName, seqPrefs = self.getSelectedSeqNameAndPrefs()
seqPrefs['IFL']['WriteIFLFile'] = control.state
## @brief Handle events generated by the "IFL Image Frames" list (guiFramesList).
# @param control The invoking GUI control (guiFramesList)
def handleGuiFrameListEvent(self, control):
guiFramesList = self.guiFramesList
guiNumFrames = self.guiNumFrames
if control.itemIndex > -1:
seqName, seqPrefs = self.getSelectedSeqNameAndPrefs()
guiNumFrames.value = seqPrefs['IFL']['IFLFrames'][control.itemIndex][1]
curImageName = seqPrefs['IFL']['IFLFrames'][control.itemIndex][0]
if curImageName != "" and curImageName != None:
self.guiFrameSelectedBoxLabel.text = ("Selected image:\n \'%s\'" % curImageName)
else:
guiNumFrames.value = 1
self.guiFrameSelectedBoxLabel.text = "Selected image:\n None Selected"
## @brief Handle events generated by the "Remove Visibility from selected" button
# @param control The invoking GUI control (guiSeqDelFromExisting)
def handleGuiSeqDelFromExistingEvent(self, control):
Prefs = DtsGlobals.Prefs
seqName, seqPrefs = self.getSelectedSeqNameAndPrefs()
if seqName != None:
Prefs.delIFLAnim(seqName)
self.refreshAll()
else:
message = "No sequence was selected.%t|Cancel"
# x = Blender.Draw.PupMenu(message)
del x
#######################################
# Refresh and Clear methods
#######################################
## @brief Refreshes sequence specific option controls on the right side of the sequences panel.
# @note This method should be called whenever the sequence list is refreshed, or when sequence
# list selection changes.
# @note Overrides parent class "virtual" method.
# @param seqName The name of the currently selected sequence.
# @param seqPrefs The preferences key of the currently selected sequence.
def refreshSequenceOptions(self, seqName, seqPrefs):
self.guiSeqOptsContainer.enabled = True
self.guiSeqOptsContainer.visible = True
self.refreshIFLMatPulldown()
self.guiMat.selectStringItem(seqPrefs['IFL']['Material'])
self.guiNumImages.value = seqPrefs['IFL']['NumImages']
try:
self.guiNumFrames.value = seqPrefs['IFL']['IFLFrames'][1]
except:
self.guiNumFrames.value = 1
self.refreshImageFramesList(seqName)
self.guiSeqSelectedBoxLabel.text = ("Selected Sequence:\n %s" % seqName)
self.guiWriteIFLFile.state = seqPrefs['IFL']['WriteIFLFile']
## @brief Clears sequence specific option controls on the right side of the sequences panel.
# @note Overrides parent class "virtual" method.
# @note This method should be called when no sequence list item is currently selected.
def clearSequenceOptions(self):
self.guiSeqOptsContainer.enabled = False
self.guiMat.selectStringItem("")
self.guiNumImages.value = 1
self.guiNumFrames.value = 1
self.clearImageFramesList()
self.guiNumFrames.value = 1
self.guiSeqOptsContainer.enabled = False
self.guiSeqSelectedBoxLabel.text = "Selected Sequence:\n None Selected"
self.guiWriteIFLFile.state = False
## @brief Clears the list of IFL image frames
def clearIFLList(self):
for i in range(0, len(self.guiSeqList.controls)):
del self.guiSeqList.controls[i].controls[:]
del self.guiSeqList.controls[:]
self.curSeqListEvent = 40
self.guiSeqList.itemIndex = -1
self.guiSeqList.scrollPosition = 0
if self.guiSeqList.callback: self.guiSeqList.callback(self.guiSeqList) # Bit of a hack, but works
## @brief Refreshes the items in the IFL material menu.
def refreshIFLMatPulldown(self):
self.clearIFLMatPulldown()
# loop through all materials in the preferences and check for IFL materials
Prefs = DtsGlobals.Prefs
try:
x = list(Prefs['Materials'].keys())
except:
Prefs['Materials'] = {}
keys = list(Prefs['Materials'].keys())
keys.sort(lambda x, y: cmp(x.lower(), y.lower()))
for matName in list(Prefs['Materials'].keys()):
mat = Prefs['Materials'][matName]
try:
x = mat['IFLMaterial']
except KeyError:
mat['IFLMaterial'] = False
if mat['IFLMaterial'] == True:
self.guiMat.items.append(matName)
## @brief Clears the items in the IFL material menu.
def clearIFLMatPulldown(self):
self.guiMat.itemIndex = -1
self.guiMat.items = []
## @brief Refreshes the items in the IFL Image Frames list based on current pref settings
def refreshImageFramesList(self, seqName):
Prefs = DtsGlobals.Prefs
self.clearImageFramesList()
guiFramesList = self.guiFramesList
IFLMat = Prefs['Sequences'][seqName]['IFL']['IFLFrames']
for fr in IFLMat:
guiFramesList.addControl(self.createFramesListItem(fr[0], fr[1]))
## @brief Clears the items in the IFL Image Frames list
def clearImageFramesList(self):
for i in range(0, len(self.guiFramesList.controls)):
del self.guiFramesList.controls[i].controls[:]
del self.guiFramesList.controls[:]
self.guiFramesList.itemIndex = -1
self.guiFramesList.scrollPosition = 0
if self.guiFramesList.callback: self.guiFramesList.callback(self.guiFramesList) # Bit of a hack, but works
#########################
# Misc / utility methods
#########################
## @brief Adds a new IFL sequence in the GUI and the prefs
# @note Overrides parent class "virtual" method.
def addNewAnim(self, seqName):
Prefs = DtsGlobals.Prefs
# add ifl pref key w/ default values
Prefs.addIFLAnim(seqName)
# re-populate the sequence list
self.populateSequenceList()
# Select the new sequence.
self.selectSequence(seqName)
## @brief Creates a list item for the IFL Image Frames List
# @param matName The name of the current IFL material
# @param holdFrames The number of frames for which the image is to be displayed.
def createFramesListItem(self, matName, holdFrames=1):
guiContainer = Common_Gui.BasicContainer("", None, None)
guiContainer.fade_mode = 0 # flat color
guiName = Common_Gui.SimpleText("", matName, None, None)
guiName.x, guiName.y = 5, 5
guiHoldFrames = Common_Gui.SimpleText("", "fr:" + str(holdFrames), None, None)
guiHoldFrames.x, guiHoldFrames.y = 140, 5
# Add everything
guiContainer.addControl(guiName)
guiContainer.addControl(guiHoldFrames)
return guiContainer
## @brief Removes the last item from the frames list box
def removeLastItemFromFrameList(self):
i = len(self.guiFramesList.controls) - 1
try:
del self.guiFramesList.controls[i].controls[:]
del self.guiFramesList.controls[i]
except IndexError:
pass
self.guiFramesList.itemIndex = -1
self.guiFramesList.scrollPosition = 0
if self.guiFramesList.callback: self.guiFramesList.callback(self.guiFramesList) # Bit of a hack, but works
#########################
# Resize callback methods
#########################
## @brief Resize callback for guiMat
# @param control The invoking GUI control object
def guiMatResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 10, newheight - 63, 20, 120
## @brief Resize callback for guiNumImages
# @param control The invoking GUI control object
def guiNumImagesResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 132, newheight - 63, 20, 90
## @brief Resize callback for guiWriteIFLFile
# @param control The invoking GUI control object
def guiWriteIFLFileResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 232, newheight - 63, 20, 80
## @brief Resize callback for guiSeqIFLFrame
# @param control The invoking GUI control object
def guiSeqIFLFrameResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 64, 211, 20, 120
## @brief Resize callback for guiSeqIFLImageBox
# @param control The invoking GUI control object
def guiSeqIFLImageBoxResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 4, 5, 220, 241
## @brief Resize callback for guiSeqImageName
# @param control The invoking GUI control object
def guiSeqImageNameResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 15, 183, 20, 219
## @brief Resize callback for guiFramesListTxt
# @param control The invoking GUI control object
def guiFramesListTxtResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 10, 161, 20, 120
## @brief Resize callback for guiFramesList
# @param control The invoking GUI control object
def guiFramesListResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 10, 10, 146, 185
def guiFramesListContainerResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 196, 10, 146, newwidth - 206
def guiFrameSelectedBoxLabelResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 5, newheight - 35, 33, 107
## @brief Resize callback for guiFramesListSelectedTxt
# @param control The invoking GUI control object
def guiFramesListSelectedTxtResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 5, newheight - 60, 20, 120
## @brief Resize callback for guiNumFrames
# @param control The invoking GUI control object
def guiNumFramesResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 5, newheight - 88, 20, newwidth - 10
## @brief Resize callback for guiApplyToAll
# @param control The invoking GUI control object
def guiApplyToAllResize(self, control, newwidth, newheight):
control.x, control.y, control.height, control.width = 5, newheight - 114, 20, newwidth - 10
| 1.570313
| 2
|
supp_experiments/Toy_GMM/run_NPL_toygmm.py
|
edfong/npl
| 6
|
12780931
|
<filename>supp_experiments/Toy_GMM/run_NPL_toygmm.py
"""
Running RR-NPL for Toy GMM (set R_restarts = 0 for FI-NPL)
"""
import numpy as np
import npl.sk_gaussian_mixture as skgm
import pandas as pd
import time
import copy
from npl import bootstrap_gmm as bgmm
from npl.maximise_gmm import init_toy
from npl.maximise_gmm import sampleprior_toy
from npl.maximise_gmm import sampleprior_toyMDP
def load_data(seed):
#load data and parameters
gmm_data = np.load('./sim_data/gmm_data_insep_seed{}.npy'.format(seed),allow_pickle = True).item()
#Extract parameters from data
N_data = gmm_data['N']
K_clusters = gmm_data['K']
D_data = gmm_data['D']
y = gmm_data['y']
return y,N_data,K_clusters,D_data
def main_IS(B_postsamples,R_restarts): #B_postsamples is number of bootstrap samples, R_restarts is number of repeats in RR-NPL (set to 0 for FI-NPL)
np.random.seed(100)
gmm_data = np.load('./sim_data_plot/gmm_data_sep.npy',allow_pickle = True).item()
#Extract parameters from data
N_data = gmm_data['N']
K_clusters = gmm_data['K']
D_data = gmm_data['D']
y = gmm_data['y']
#prior settings
alph_conc=0 #alph_concentration
T_trunc = 500 #DP truncation
tol = 1e-7
max_iter = 6000
rep = 10
for r in range(rep):
start = time.time()
pi_bb,mu_bb,sigma_bb= bgmm.bootstrap_gmm(B_postsamples,alph_conc,T_trunc,y,N_data,D_data,K_clusters,R_restarts,tol,max_iter,init_toy,None)
end = time.time()
print(end-start)
#save file
dict_bb = {'pi': pi_bb.tolist(),'sigma': sigma_bb.tolist(), 'mu': mu_bb.tolist(),'time': end-start}
par_bb = pd.Series(data = dict_bb)
par_bb.to_pickle('./parameters/par_bb_sep_random_repeat_parallel_rep{}_B{}_plot{}'.format(R_restarts,B_postsamples,r))
def main_DP(B_postsamples,R_restarts): #B_postsamples is number of bootstrap samples, R_restarts is number of repeats in RR-NPL (set to 0 for FI-NPL)
for n in range(30):
seed = 100+n
np.random.seed(seed)
y,N_data,K_clusters,D_data = load_data(seed)
#prior settings
alph_conc=10 #alph_concentration
T_trunc = 500 #DP truncation
tol = 1e-7
max_iter = 6000
start = time.time()
pi_bb,mu_bb,sigma_bb= bgmm.bootstrap_gmm(B_postsamples,alph_conc,T_trunc,y,N_data,D_data,K_clusters,R_restarts,tol,max_iter,init_toy,sampleprior_toy)
end = time.time()
print(end-start)
#save file
dict_bb = {'pi': pi_bb.tolist(),'sigma': sigma_bb.tolist(), 'mu': mu_bb.tolist(),'time': end-start}
par_bb = pd.Series(data = dict_bb)
if R_restarts ==0:
par_bb.to_pickle('./parameters/par_bb_insep_parallel_mle_rep_B{}_seed{}'.format(B_postsamples,seed)) #uncomment for FI-NPL
else:
par_bb.to_pickle('./parameters/par_bb_insep_random_repeat_parallel_alpha{}_rep{}_B{}_seed{}'.format(alph_conc,R_restarts,B_postsamples,seed))
def main_MDP(B_postsamples,R_restarts): #B_postsamples is number of bootstrap samples, R_restarts is number of repeats in RR-NPL (set to 0 for FI-NPL)
for n in range(30):
seed = 100+n
alph_conc = 1000
np.random.seed(seed)
y,N_data,K_clusters,D_data = load_data(seed)
T_trunc = 500 #DP truncation
tol = 1e-7
max_iter = 6000
par_nuts = pd.read_pickle('./parameters/par_nuts_insep_seed{}'.format(seed))
start = time.time()
pi_bb,mu_bb,sigma_bb= bgmm.bootstrap_gmm(B_postsamples,alph_conc,T_trunc,y,N_data,D_data,K_clusters,R_restarts,tol,\
max_iter,init_toy,sampleprior_toyMDP,postsamples = par_nuts)
end = time.time()
print(end-start)
#save file
dict_bb = {'pi': pi_bb.tolist(),'sigma': sigma_bb.tolist(), 'mu': mu_bb.tolist(),'time': end-start}
par_bb = pd.Series(data = dict_bb)
par_bb.to_pickle('./parameters/par_bb_insep_random_repeat_parallel_alpha{}_rep{}_B{}_seed{}_MDP'.format(alph_conc, R_restarts,B_postsamples,seed))
if __name__ == '__main__':
main_IS(2000,10)
main_DP(2000,10)
main_MDP(2000,10)
| 2.28125
| 2
|
src/mining/preprocessing.py
|
Youssef-Mak/covid19-datamart
| 2
|
12780932
|
<gh_stars>1-10
import database_connect
import psycopg2
import pandas as pd
import math
from imblearn.under_sampling import RandomUnderSampler
import numpy as np
import os
def main():
try:
database_connection = database_connect.connect()
cursor = database_connection.cursor()
# Get the data
query_string = '''
SELECT spec.title, mob.metro_area, mob.subregion, f.resolved, f.un_resolved, f.fatal, phu.phu_name, p.age_group, p.gender, d.day, d.month, d.season
FROM "Covid19DataMart".covid19_tracking_fact AS f
INNER JOIN "Covid19DataMart".date_dimension AS d
ON f.onset_date_dim_key = d.date_dim_key
INNER JOIN "Covid19DataMart".patient_dimension AS p
ON f.patient_dim_key = p.patient_dim_key
INNER JOIN "Covid19DataMart".phu_dimension AS phu
ON f.phu_dim_key = phu.phu_dim_key
INNER JOIN "Covid19DataMart".mobility_dimension AS mob
ON f.mobility_dim_key = mob.mobility_dim_key
INNER JOIN "Covid19DataMart".special_measures_dimension as spec
ON f.special_measures_dim_key = spec.special_measures_dim_key'''
# Query the data
print("Querying data...")
raw_data = query_data(query_string, cursor)
# Preprocess the data
print("Preprocessing data...")
preprocess_data(raw_data)
# End the connection
cursor.close()
database_connection.close()
print('Database connection closed.')
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if database_connection is not None:
database_connection.close()
print('Database connection closed.')
def preprocess_data(data):
"""
Preprocesses thed raw data by converting to numerical forms and outputting into a csv file ready to use.
List of Features:
1. special measures title
2. metro area
3. subregion
4. resolved, unresolved, fatal
5. phu location name
6. age
7. gender
8. day, month, season
Preprocessing Pipeline Stages:
Stage 1: Handling missing data
Stage 2: Handling categorical attributes
Stage 3: Data normalization
Stage 4: Under or oversampling
"""
# Separate the raw data into categories with columns in a dataframe
unprocessed_data = separate_raw_data(data)
# Handle missing data
handle_missing_data(unprocessed_data)
# Convert categorical attributes
processed_data = convert_categorical_to_numeric(unprocessed_data)
# Normalize data
normalize_data(processed_data)
# Undersample data
undersampled_df = undersample_data(processed_data)
# Shuffle the data around in case the other guys don't realize it is sorted
undersampled_df = undersampled_df.sample(frac=1).reset_index(drop=True)
# Save the processed data to a csv file for the next stage of the project.
undersampled_df.to_csv("C:/Users/grayd/OneDrive/Documents/University of Ottawa/Fourth Year/Winter 2021/CSI4142/Group Project/Data Mart/covid19-datamart/data/preprocessed_data/mined_data.csv", index=False)
def separate_raw_data(raw_data):
return pd.DataFrame(raw_data, columns=["Special Measure", "metro_area", "subregion", "resolved", "unresolved", "fatal", "phu_location", "age", "gender", "day", "month", "season"])
def undersample_data(data):
rus = RandomUnderSampler(random_state=0)
resolved_indices = data[data["resolved"] == 1].index
random_indices = np.random.choice(resolved_indices, 5000, replace=False)
resolved_sample = data.loc[random_indices]
unresolved_sample = data.loc[data[data["unresolved"] == 1].index]
fatal_sample = data.loc[data[data["fatal"] == 1].index]
undersampled_df = pd.DataFrame()
undersampled_df = undersampled_df.append(resolved_sample)
undersampled_df = undersampled_df.append(fatal_sample, ignore_index=True)
undersample_df = undersampled_df.append(unresolved_sample, ignore_index=True)
return undersample_df
def normalize_data(data):
"""
Features to normalize:
1. day
2. month
3. age
"""
def normalize_helper(feature):
data[feature] = (data[feature] - data[feature].mean()) / data[feature].std()
columns_to_normalize = ["day", "month", "age"]
for feature in columns_to_normalize:
normalize_helper(feature)
def convert_categorical_to_numeric(df):
# One hot encode resolved, unresolved, fatal. This is already pretty much done because there are
# 3 columns of true/false already. Just need to replace these values with 1's and 0's.
df["resolved"].replace([False, True], [0, 1], inplace=True)
df["unresolved"].replace([False, True], [0, 1], inplace=True)
df["fatal"].replace([False, True], [0, 1], inplace=True)
# One hot encode special measure title, metro area, subregion, gender, and season
features_to_encode = ['Special Measure', 'metro_area', 'subregion', 'gender', 'season', 'phu_location']
for feature in features_to_encode:
df = encode_and_bind(df, feature)
# Convert age to numeric
df["age"].replace(["<20", "UNKNOWN"], ["10s", "20s"], inplace=True)
df["age"] = df["age"].str[0:-1]
df["age"] = df["age"].astype(int)
return df
def encode_and_bind(original_dataframe, feature_to_encode):
dummies = pd.get_dummies(original_dataframe[[feature_to_encode]])
res = pd.concat([original_dataframe, dummies], axis=1)
res = res.drop([feature_to_encode], axis=1)
return res
def handle_missing_data(data):
# Replace 'None' special measure values to 'other'
data["Special Measure"].fillna("Other", inplace=True)
# Impute missing metro-area with most common metro area
data["metro_area"].fillna("Greater Toronto Area", inplace=True)
# Impute missing subregion with most common subregion
data["subregion"].fillna("Toronto Divison", inplace=True)
# Find any empty cells in resolved, unresolved, fatal and replace with False
data[["resolved", "unresolved", "fatal"]].fillna(False, inplace=True)
# Impute phu location name if missing
data["phu_location"].fillna("Toronto Public Health", inplace=True)
# Replace missing age values with the mode of ages seen
data["age"].fillna(data["age"].mode()[0], inplace=True)
def query_data(query, cursor):
cursor.execute(query)
rows = cursor.fetchall()
return rows
if __name__ == '__main__':
main()
| 2.75
| 3
|
uwtools/__init__.py
|
AlexEidt/uwtools
| 7
|
12780933
|
<reponame>AlexEidt/uwtools<gh_stars>1-10
from .parse_courses import parse_catalogs as course_catalogs
from .parse_courses import get_departments as departments
from .parse_schedules import gather as time_schedules
from .parse_schedules import get_academic_year as academic_year
from .parse_buildings import get_buildings as buildings
from .parse_buildings import geocode
| 1.265625
| 1
|
pyeureka/client.py
|
lajonss/pyeureka
| 4
|
12780934
|
import time
import requests
import pyeureka.validator as validator
import pyeureka.const as c
def get_timestamp():
return int(time.time())
class EurekaClientError(Exception):
pass
class EurekaInstanceDoesNotExistException(Exception):
pass
class EurekaClient:
def __init__(self, eureka_url, instance_definition=None, verbose=False):
"""
eureka_url is the address to send requests to.
instance_definition is description of service
NOT conforming (as of 16.05.17) to schema available in
https://github.com/Netflix/eureka/wiki/Eureka-REST-operations
Basic operations:
service side:
client = EurekaClient('localhost:8765', {'ipAddr': '127.0.0.1', 'port': 80, 'app': 'myapp'})
client.register()
client.heartbeat()
client side:
client = EurekaClient('localhost:8765')
try:
client.query(app='myapp')
except EurekaClientError:
print('operation failed')
"""
self.eureka_url = eureka_url
if instance_definition is not None:
self.instance_definition = validator.validate_instance_definition(
instance_definition)
self.app_id = self.instance_definition['instance']['app']
self.instance_id = self.instance_definition[
'instance']['instanceId']
self.verbose = verbose
if verbose:
print("EurekaClient running with verbosity enabled")
print("instance_definition: {}".format(self.instance_definition))
def register(self):
request_uri = self.eureka_url + '/eureka/apps/' + self.app_id
self._request('POST', request_uri, 'registration',
204, payload=self.instance_definition)
def deregister(self):
self._request('DELETE', comment='deregistration')
def heartbeat(self):
request_uri = self._instance_uri() + '?status=UP&lastDirtyTimestamp=' + \
str(get_timestamp())
self._request('PUT', uri=request_uri, comment='heartbeat',
errors={404: EurekaInstanceDoesNotExistException})
def query(self, app=None, instance=None):
request_uri = self.eureka_url + '/eureka/apps/'
if app is not None:
request_uri += app
if instance is not None:
request_uri += '/' + instance
elif instance is not None:
request_uri = self.eureka_url + '/eureka/instances/' + instance
request = self._request('GET', request_uri, 'query')
return request.json()
def query_vip(self, vip):
request_uri = self.eureka_url + '/eureka/vips/' + vip
request = self._request('GET', request_uri, 'query vip')
return request
def query_svip(self, svip):
request_uri = self.eureka_url + '/eureka/svips/' + svip
request = self._request('GET', request_uri, 'query svip')
return request
def take_instance_out_of_service(self):
request_uri = self._instance_uri() + '/status?value=OUT_OF_SERVICE'
self._request('PUT', request_uri, 'out of service')
def put_instance_back_into_service(self):
request_uri = self._instance_uri() + '/status?value=UP'
self._request('PUT', request_uri, 'up')
def update_metadata(self, key, value):
request_uri = self._instance_uri() + \
'/metadata?{}={}'.format(key, value)
self._request('PUT', request_uri, 'update_metadata')
def _instance_uri(self):
return self.eureka_url + '/eureka/apps/' + self.app_id + '/' + self.instance_id
def _fail_code(self, code, request, comment, errors=None):
if self.verbose:
self._show_request(request, comment)
if request.status_code != code:
error = EurekaClientError
if errors is not None and request.status_code in errors:
error = errors[request.status_code]
raise error({'request': request, 'comment': comment,
'status_code': request.status_code})
def _show_request(self, request, comment):
print("{}:".format(comment))
print("Request code: {}".format(request.status_code))
print("Request headers: {}".format(request.headers))
print("Request response: {}".format(request.text))
def _request(self, method, uri=None, comment='operation', accepted_code=200, errors=None, payload=None):
if uri is None:
uri = self._instance_uri()
request = c.EUREKA_REQUESTS[method](
uri, headers=c.EUREKA_HEADERS[method], json=payload)
self._fail_code(accepted_code, request, comment, errors=errors)
return request
| 2.71875
| 3
|
plot_images.py
|
ayush-mundra/Hair_Style_Recommendation
| 2
|
12780935
|
## FUNCTIONS TO OVERLAYS ALL PICS!!
get_ipython().magic('matplotlib inline')
import cv2
from matplotlib import pyplot as plt
import numpy as np
import time as t
import glob, os
import operator
from PIL import Image
import pathlib
from pathlib import Path
image_dir = ["data/pics_for_overlaps/Sarah",
"data/pics_for_overlaps/Allison",
"data/pics_for_overlaps/Amanda_S",
"data/pics_for_overlaps/Gisele",
"data/pics_for_overlaps/Keira",
"data/pics_for_overlaps/Squares"
]
plt.figure(figsize=(20,10))
from PIL import Image, ImageDraw,ImageFont
font = ImageFont.truetype("fonts/Arial.ttf", 20)
n_row = 2
n_col = 3
g = 0
text = ["Sarah-round","Allison-oval","Amanda-heart",'Gisele-long','Keira-square','All Squares']
for ddir in image_dir:
a = .6
i = 0
g += 1
for f in os.listdir(ddir):
if f.endswith('.jpg'):
file, ext = os.path.splitext(f)
im = Image.open(ddir+'/'+f)
image = cv2.imread(ddir+'/'+f)
a = a-.01
i += 1
draw = ImageDraw.Draw(im)
draw.text((10,10) ,text[g-1], fill=None, font=font, anchor=None)
draw.text((10,30) ,str(i)+" Pics", fill=None, font=font, anchor=None)
plt.subplot(n_row, n_col, g )
plt.imshow(im, alpha = a)
| 2.765625
| 3
|
pylib/pointprocesses/__init__.py
|
ManifoldFR/hawkes-process-rust
| 28
|
12780936
|
"""
Algorithms for simulating point processes, stochastic processes used in statistical models.
Implemented in the Rust programming language.
"""
from . import temporal
from . import spatial
| 1.65625
| 2
|
deliver/dtimp/run_analysis.py
|
mariecpereira/Extracao-de-Caracteristicas-Corpo-Caloso
| 0
|
12780937
|
<filename>deliver/dtimp/run_analysis.py
# coding: utf-8
# In[ ]:
def run_analysis(rootdir):
import glob as glob
import numpy as np
t1_filename = '{}/t1.nii.gz'.format(rootdir)
print(rootdir)
minc_filename = glob('{}/*.mnc'.format(rootdir))[0]
print(minc_filename)
tagfilename = glob('{}/*.tag'.format(rootdir))[0]
brain_mask_filename = '{}/nodif_brain_mask.nii.gz'.format(rootdir)
tag = np.loadtxt(tagfilename, skiprows=4, comments=';')
eigvals, eigvects, T3 = loadNiftiDTI(rootdir, reorient=True)
FA,MD = getFractionalAnisotropy(eigvals)
FA[np.isnan(FA)] = 0
FA[FA>1] = 1
fissure, FA_mean = getFissureSlice(eigvals, FA)
wFA = FA*abs(eigvects[0,0]) #weighted FA
return wFA, FA, MD, fissure, eigvals, eigvects
# In[ ]:
| 2.390625
| 2
|
fourth-year/EGC/EGC-1230-julgomrod/decide/census/models.py
|
JulianGR/university
| 0
|
12780938
|
from django.db import models
class Census(models.Model):
voting_id = models.PositiveIntegerField()
voter_id = models.PositiveIntegerField()
class Meta:
unique_together = (('voting_id', 'voter_id'),)
| 2.234375
| 2
|
addons/event_crm/models/crm_lead.py
|
SHIVJITH/Odoo_Machine_Test
| 0
|
12780939
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api
class Lead(models.Model):
_inherit = 'crm.lead'
event_lead_rule_id = fields.Many2one('event.lead.rule', string="Registration Rule", help="Rule that created this lead")
event_id = fields.Many2one('event.event', string="Source Event", help="Event triggering the rule that created this lead")
registration_ids = fields.Many2many(
'event.registration', string="Source Registrations",
groups='event.group_event_user',
help="Registrations triggering the rule that created this lead")
registration_count = fields.Integer(
string="# Registrations", compute='_compute_registration_count',
groups='event.group_event_user',
help="Counter for the registrations linked to this lead")
@api.depends('registration_ids')
def _compute_registration_count(self):
for record in self:
record.registration_count = len(record.registration_ids)
| 1.984375
| 2
|
day21.part2.py
|
gigs94/aoc2021
| 0
|
12780940
|
from re import U
import numpy as np
from collections import Counter, defaultdict
from pprint import pprint
def moves(pos, endv, pathz, rolls=0):
if rolls==3:
pathz.append(pos); return
for i in [ 1, 2, 3 ]:
npos=(pos+i-1)%10+1
moves(npos, endv, pathz, rolls+1)
possibilities={}
for x in range (1,11):
pathz=[]
moves(x,0,pathz)
#print(x, Counter(pathz), len(pathz))
possibilities[x]=Counter(pathz)
#pu=dict({(4,0,8,0):1})
pu=dict({(7,0,6,0):1})
p1wins=0
p2wins=0
onesmove=0
aa=0
while len(pu.keys()) != 0:
onesmove=not onesmove
pun=defaultdict(int)
for p1,s1,p2,s2 in pu.keys():
universes=pu[(p1,s1,p2,s2)]
if onesmove:
for npos in possibilities[p1]:
nscore=s1+npos
if nscore>=21:
p1wins+=universes*possibilities[p1][npos]
else:
pun[(npos,nscore,p2,s2)]+=universes*possibilities[p1][npos]
else:
for npos in possibilities[p2]:
nscore=s2+npos
if nscore>=21:
p2wins+=universes*possibilities[p2][npos]
else:
pun[(p1,s1,npos,nscore)]+=universes*possibilities[p2][npos]
pu=pun.copy()
print(f'player1 wins: {p1wins}')
print(f'player2 wins: {p2wins}')
| 2.8125
| 3
|
sha256cracker.py
|
deceptivecz/portfolio
| 0
|
12780941
|
<gh_stars>0
import hashlib
hash_input = input("Enter hash here : ").lower()
wordlist = "fakeyou.txt"
try:
words = open(wordlist)
except:
print("Wordlist not found.")
quit()
def crack(hash_input):
for word in words:
hs = hashlib.sha256(word.encode('utf-8')).hexdigest()
if str(hs) == hash_input:
print("Password found!")
print(word)
return True
return False
if (crack(hash_input) == False):
print("Not found")
| 3.703125
| 4
|
simulation/loader/location.py
|
GerardLutterop/corona
| 0
|
12780942
|
import datetime
import re
import time
from logging import getLogger
from random import randint, gauss, random
import pandas as pd
from .external import DataframeLoader
log = getLogger(__name__)
class PrimarySchoolClasses(DataframeLoader):
DISABLE_CACHE = False
def __init__(self, pupils, present=None):
self._present = present
super().__init__(pupils)
def data(self) -> pd.DataFrame:
"""Return locations for all the classes in the supplied primary schools. Simple approximation: only one class
per pupil-age, even if 80 pupils in one class..."""
def rows():
i = 0
seen = set()
# for row in self._source.itertuples(name='Segment'): Does not work! No column headings!
for index, row in self._source.iterrows():
for cell in row.items():
r = re.match('leeftijd_(\d+)', cell[0])
if not r:
continue
age = int(r.group(1))
if (row.brin_nummer, age) in seen:
continue
seen.add((row.brin_nummer, age))
i += 1
yield {'location_id': i,
'postcode_target': row.postcode_target}
return pd.DataFrame((row for row in rows()), columns=('location_id', 'postcode_target'))
| 2.953125
| 3
|
DB_Treeview.py
|
jakobis95/ILIAS---Test-Generator
| 0
|
12780943
|
<gh_stars>0
from tkinter import *
import tkinter as tk
from tkinter import ttk
from Fragen_GUI import formelfrage, singlechoice, multiplechoice, zuordnungsfrage, formelfrage_permutation
from ScrolledText_Functionality import Textformatierung
class UI():
def __init__(self,table_dict , db_interface, frame, screenwidth, ID, table_index_list, table_index_dict, Title, bg_color, button_color, label_color, Button_Font, Label_Font, *args, **kwargs):
# self.active = False
self.table_dict = table_dict
self.bg_color = bg_color
self.button_color = button_color
self.label_color = label_color
self.Button_Font = Button_Font
self.Label_Font = Label_Font
rel_Top_Abstand = .15
self.active = False # Aktivitätsflag für Such Eingabefeld
self.rel_Top_Abstand = rel_Top_Abstand
self.table_index_list = table_index_list #hier sind die header/index und die StringVar instanzen für jeden table
self.table_index_dict = table_index_dict
self.ScrText = Textformatierung()
print("das ist in Treeview", table_index_list[0][1][1])
self.ID = ID
self.db_I = db_interface
self.db_I.subscribe(self.update)
self.Frame = frame
self.trv_spec_Frame = Frame(frame)
self.trv_spec_Frame.place(relx=0, rely=.1)
#self.Width = screenwidth
self.Width = int(frame.winfo_screenwidth() / 1.25)
print("screenwidth of TRV Frame", self.Width)
self.create_style()
self.q = StringVar()
self.create_trv()
#self.Searchbox('blue')
self.db_I.get_complete_DB(self.ID)
self.trv.bind('<Double-Button-1>', self.Select_from_DB)
self.Searchbox("blue", Title, rel_Top_Abstand)
self.ent.bind('<Return>', self.search)
self.ent.bind('<FocusIn>', self.delete_placeholder)
self.ent.bind('<FocusOut>', self.delete_placeholder)
#self.ent.bind('<Return>', self.search)
print('init finished')
def create_trv(self):
# Create Treview Frame
self.DB_frame = tk.Frame(self.Frame)
self.DB_frame.place(relx=0, rely=self.rel_Top_Abstand)
# create Scrollbar
self.vsb = ttk.Scrollbar(self.DB_frame)
self.vsb.pack(side=RIGHT, fill=Y)
# create Treeview
self.trv = ttk.Treeview(self.DB_frame, columns=(1, 2, 3, 4, 5, 6, 7), show="headings", height=9,
style="mystyle.Treeview")
self.trv.configure(yscrollcommand=self.vsb.set)
self.trv.tag_configure('odd', background='#ff5733')
self.trv.pack(fill=BOTH)
# Create Treeview Headings
self.trv.heading(1, text="Titel")
self.trv.heading(2, text="Taxonomie")
self.trv.heading(3, text="Typ")
self.trv.heading(4, text="Fragentext")
self.trv.heading(5, text="Datum")
self.trv.heading(6, text="Author")
#self.trv.heading(8, text="Zuletzt verändert")
# Format Columns
self.width_gesamt = (self.Width / 7.7)*6
self.width_column = self.width_gesamt/24
self.trv.column(1, width=int(self.width_column*3), anchor=CENTER,
minwidth=int(self.Width / 30))
self.trv.column(2, width=int(self.width_column*3), anchor=CENTER,
minwidth=int(self.Width / 30))
self.trv.column(3, width=int(self.width_column*2), anchor=W,
minwidth=int(self.Width / 30))
self.trv.column(4, width=int(self.width_column*10), anchor=W,
minwidth=int(self.Width / 30))
self.trv.column(5, width=int(self.width_column*2), anchor=CENTER,
minwidth=int(self.Width / 30))
self.trv.column(6, width=int(self.width_column*4), anchor=W,
minwidth=int(self.Width / 30))
print('trv created')
def create_style(self):
# Create Stryle for treeview
style = ttk.Style()
style.configure("mystyle.Treeview", highlightthickness=0, bd=0,
font=('Verdana', 8)) # Modify the font of the body
style.configure("mystyle.Treeview.Heading", font=('Verdana', 10, 'bold')) # Modify the font of the headings
style.layout("mystyle.Treeview", [('mystyle.Treeview.treearea', {'sticky': 'nswe'})]) # Remove the borders
print('style created')
def Searchbox(self, color, Title, rel_Top_Abstand):
bd_Frame = tk.Frame(self.Frame, bg=self.label_color)
bd_Frame.place(relx=0, rely=0, relwidth=1, relheight=rel_Top_Abstand)
SearchBox = tk.Frame(bd_Frame, bg=self.label_color)
SearchBox.place(relx=0, rely=0.2, relwidth=1, relheight=.8)
Title_Label = Label(bd_Frame, text=Title, anchor='w', bd=5, bg=self.label_color, fg=self.bg_color)
Title_Label['font'] = self.Label_Font
Title_Label.place(relx=0, rely=0, relwidth=.25, relheight=1)
self.ent = Entry(SearchBox, textvariable=self.q, fg="grey")
self.q.set("Suche")
self.ent.place(relx=0.7, rely=0, relwidth=.1, relheight=1)
cbtn = Button(SearchBox, text="zurücksetzen", command=self.clear, bg=self.button_color, fg=self.bg_color)
cbtn['font'] = self.Button_Font
cbtn.place(relx=0.80, rely=0, relwidth=.1, relheight=1)
del_btn = Button(SearchBox, text="löschen", command=self.delete_selection, bg=self.button_color, fg=self.bg_color)
del_btn['font'] = self.Button_Font
del_btn.place(relx=0.90, rely=0, relwidth=.1, relheight=1)
def clear(self):
self.db_I.get_complete_DB(0)
def search(self, a):
q = self.q.get() #get search text from entry
self.db_I.search_DB(q, 0)
def delete_selection(self):
i = 0
item_list = []
for selection in self.trv.selection():
item_list.append(self.trv.item(selection))
print(self.trv.item(selection))
print(i)
i = i + 1
self.db_I.delete_DB_content(item_list, self.ID)
def neue_fromelfrage(self, choice_window):
choice_window.destroy()
work_window = Toplevel()
Work_on_question = formelfrage(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list, self.table_index_dict, self.bg_color, self.label_color, self.button_color)
#self.db_I.empty_fragenauswahl()
def neue_singlechoicefrage(self, choice_window):
choice_window.destroy()
work_window = Toplevel()
work_on_question = singlechoice(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list,
self.table_index_dict, self.bg_color, self.label_color, self.button_color)
def neue_multiplechoicefrage(self, choice_window):
choice_window.destroy()
work_window = Toplevel()
work_on_question = multiplechoice(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list,
self.table_index_dict, self.bg_color, self.label_color, self.button_color)
def neue_zuordnungsfrage(self, choice_window):
choice_window.destroy()
work_window = Toplevel()
work_on_question = zuordnungsfrage(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list,
self.table_index_dict, self.bg_color, self.label_color, self.button_color)
def neue_fromelfrage_permutation(self, choice_window):
choice_window.destroy()
work_window = Toplevel()
Work_on_question = formelfrage_permutation(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list, self.table_index_dict, self.bg_color, self.label_color, self.button_color)
#self.db_I.empty_fragenauswahl()
def choose_qt_typ(self):
work_window = Toplevel(bg=self.bg_color)
work_window.geometry("%dx%d+%d+%d" % (self.Width/4, self.Width/10, self.Width/2, self.Width/4))
Menu_lbl = Label(work_window, text="Wählen Sie einen Fragentyp um Fortzufahren", bg=self.label_color, fg=self.bg_color)
Menu_lbl['font'] = self.Label_Font
Menu_lbl.pack(side="top", fill=X)
formelfrage = Button(work_window, text="Formelfrage", bg=self.button_color, fg=self.bg_color, command=lambda: self.neue_fromelfrage(work_window))
formelfrage['font'] = self.Button_Font
formelfrage.pack(side="top", fill=X)
singlechoice = Button(work_window, text="Single Choice Frage", bg=self.button_color, fg=self.bg_color, command=lambda: self.neue_singlechoicefrage(work_window))
singlechoice['font'] = self.Button_Font
singlechoice.pack(side="top", fill=X)
multiplechoice = Button(work_window, text="Multiple Choice Frage", bg=self.button_color, fg=self.bg_color, command=lambda: self.neue_multiplechoicefrage(work_window))
multiplechoice['font'] = self.Button_Font
multiplechoice.pack(side="top", fill=X)
zuordnungsfrage = Button(work_window, text="Zuodnungsfrage", bg=self.button_color, fg=self.bg_color, command=lambda: self.neue_zuordnungsfrage(work_window))
zuordnungsfrage['font'] = self.Button_Font
zuordnungsfrage.pack(side="top", fill=X)
formelfrage_permutation = Button(work_window, text="Formelfrage Permutation", bg=self.button_color, fg=self.bg_color, command=lambda: self.neue_fromelfrage_permutation(work_window))
formelfrage_permutation['font'] = self.Button_Font
formelfrage_permutation.pack(side="top", fill=X)
def update(self, db_data):
index = 0
self.trv.delete(*self.trv.get_children())
for table in db_data[self.ID]:
if index < 4:
for data in table:
self.trv.insert('', 'end', values=data)
#print("update", data)
index = index + 1#
def add_data_to_testdb(self):
i = 0
item_list = []
for selection in self.trv.selection():
item_list.append(self.trv.item(selection))
print(self.trv.item(selection))
print(i)
i = i + 1
self.db_I.add_question_to_temp(item_list)
#selects correlating date from Treeview selection from the Original DB
def Select_from_DB(self, a):
Auswahl = self.trv.focus()
gesucht = self.trv.item(self.trv.focus())
result = str(self.trv.item(Auswahl))
print("Titel gesucht:", gesucht)
#print("Typ gesucht:", gesucht['values'][1])
#print("das ist in Treeview", self.e[1][1])
if gesucht['values'][2] == "formelfrage":
work_window = Toplevel()
work_window.title(gesucht['values'][3])
Work_on_question = formelfrage(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list, self.table_index_dict, self.bg_color, self.label_color, self.button_color)
self.db_I.get_question(gesucht['values'][0], 1) #hier wird nach dem Titel gesucht, beim ändern der Reihenfolge in der Anzeige muss auch hier der index geändert werden
elif gesucht['values'][2] == "singlechoice":
work_window = Toplevel()
work_window.title(gesucht['values'][3])
print("Hier wir in zukunft eine single Choice Frage geöffnet")
work_on_question = singlechoice(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list,
self.table_index_dict, self.bg_color, self.label_color, self.button_color)
self.db_I.get_question(gesucht['values'][0], 1)
elif gesucht['values'][2] == "multiplechoice":
work_window = Toplevel()
work_window.title(gesucht['values'][3])
print("Hier wir in zukunft eine multiple Choice Frage geöffnet")
work_on_question = multiplechoice(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list,
self.table_index_dict, self.bg_color, self.label_color, self.button_color)
self.db_I.get_question(gesucht['values'][0], 1)
elif gesucht['values'][2] == "zuordnungsfrage":
work_window = Toplevel()
work_window.title(gesucht['values'][3])
print("Hier wir in zukunft eine zuodnungsfrage Frage geöffnet")
work_on_question = zuordnungsfrage(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list,
self.table_index_dict, self.bg_color, self.label_color, self.button_color)
self.db_I.get_question(gesucht['values'][0], 1)
elif gesucht['values'][2] == "formelfrage_permutation":
work_window = Toplevel()
work_window.title(gesucht['values'][3])
Work_on_question = formelfrage_permutation(self.table_dict, work_window, self.db_I, self.ScrText, self.table_index_list, self.table_index_dict, self.bg_color, self.label_color, self.button_color)
self.db_I.get_question(gesucht['values'][0], 1)
else:
print("der Fragentyp konnte nicht zugeornet werden ")
def delete_placeholder(self, e):
if len(self.q.get()) <1 & self.active == True:
self.q.set("Suche")
self.ent.configure(fg="grey")
self.active = False
elif self.active == False:
self.q.set("")
self.ent.configure(fg="black")
self.active = True
# def Testeinstellungen_UI(self):
# testeinstellungen = Testeinstellungen(DBI)
| 2.453125
| 2
|
tools/infer_mot_client.py
|
Mr-JNP/PaddleDetection
| 0
|
12780944
|
#
# Hello World client in Python
# Connects REQ socket to tcp://localhost:5555
#
import zmq
def main():
context = zmq.Context()
# Socket to talk to server
print("Connecting to Paddle Server…")
socket = context.socket(zmq.REQ)
socket.connect("tcp://localhost:5555")
# Do 2 requests, waiting each time for a response
VIDEO_FILE_PATHS = [
"dataset/ict/CAM1-Case18-Low.mp4",
"dataset/ict/CAM2-Case18-Low.mp4",
]
try:
for i, p in enumerate(VIDEO_FILE_PATHS):
print(f"Sending Video: {i} ...")
socket.send_string(p)
# Get the reply.
message = socket.recv()
print(f"Received reply {i}, Status: {message}")
except KeyboardInterrupt:
print("W: interrupt received, stopping...")
finally:
# clean up
socket.close()
context.term()
if __name__ == "__main__":
main()
| 2.953125
| 3
|
tests/conftest.py
|
sidhulabs/sidhulabs-py
| 0
|
12780945
|
import pytest
from _pytest.logging import LogCaptureFixture
from loguru import logger
@pytest.fixture
def caplog(caplog: LogCaptureFixture):
handler_id = logger.add(caplog.handler, format="{message}")
yield caplog
logger.remove(handler_id)
| 1.765625
| 2
|
letras_sao_iguais.py
|
rodrigolins92/exercicios-diversos
| 0
|
12780946
|
<reponame>rodrigolins92/exercicios-diversos
def SaoIguais(a, b, c):
if (a == b) and (b == c):
return print("São Iguais")
else:
return print("São diferentes")
x1 = input("Primeira letra: ")
x2 = input("Segunda letra: ")
x3 = input("Terceira letra: ")
resposta = SaoIguais(x1, x2, x3)
| 3.796875
| 4
|
core/callbacks.py
|
susemm/PyVin
| 0
|
12780947
|
__author__ = '<EMAIL>'
class Callbacks():
def __init__(self):
self.handler = {}
def init(self, events):
for event in events:
self.handler[event] = self.callback
def callback(self, *pros, **attrs):
pass
def bind(self, event, handler):
if self.handler.has_key(event):
self.handler[event] = handler
def dispatch(self, *args, **kwargs):
# print 'dispatch'
# print args
if len(args) > 0:
event = args[0]
if self.handler.has_key(event):
# print self.handler[event]
return self.handler[event](*args, **kwargs)
else:
print 'error: no event'
#######################################################################################################################
class Test(Callbacks):
EVT1 = 1
EVT2 = 2
def __init__(self):
Callbacks.__init__(self)
self.init([Test.EVT1, Test.EVT2])
def cb1(self, event, str='', str2=''):
print '11111'
print str
print str2
def cb2(self, event, str):
print '22222'
print str
def ttt(self):
self.bind(Test.EVT1, self.cb1)
# self.bind(Test.EVT2, self.cb2)
self.dispatch(Test.EVT1, 'test 111')
self.dispatch(Test.EVT2, 'test 222')
if __name__ == '__main__':
test = Test()
test.ttt()
| 3.25
| 3
|
http_requestor/core/models.py
|
mscam/http_requestor
| 0
|
12780948
|
import sys
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from celery import states
from celery.result import AsyncResult, allow_join_result
from .fields import JSONField
def validate_schedule_at(value):
if value < timezone.now():
raise ValidationError("Request schedule cannot be in the past!")
return value
class HttpRequest(models.Model):
GET = 'get'
HEAD = 'head'
POST = 'post'
PUT = 'put'
DELETE = 'delete'
METHOD_CHOICES = (
(GET, _('Get')),
(HEAD, _('Head')),
(POST, _('Post')),
(PUT, _('Put')),
(DELETE, _('Delete')),
)
url = models.URLField()
method = models.CharField(max_length=8, choices=METHOD_CHOICES)
headers = JSONField(blank=True)
params = JSONField(blank=True)
data = JSONField(blank=True)
schedule_at = models.DateTimeField(validators=[validate_schedule_at])
task_id = models.CharField(max_length=36, blank=True, editable=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
@property
def task_status(self):
if not self.task_id:
return states.PENDING
if self.httpresponse:
return states.SUCCESS
in_celery = sys.argv and sys.argv[0].endswith('celery') and 'worker' in sys.argv
if in_celery:
with allow_join_result():
result = AsyncResult(self.task_id)
else:
result = AsyncResult(self.task_id)
return result.state
def __str__(self):
return f'{self.url} ({self.method}) at {self.schedule_at}'
class HttpResponse(models.Model):
request = models.OneToOneField(HttpRequest, on_delete=models.CASCADE)
status_code = models.PositiveIntegerField()
headers = JSONField()
text = models.TextField(blank=True)
def __str__(self):
return f'Response from url {self.request} ({self.request.method}): {self.status_code}'
| 2.078125
| 2
|
tests/data/aws/securityhub.py
|
ramonpetgrave64/cartography
| 2,322
|
12780949
|
GET_HUB = {
'HubArn': 'arn:aws:securityhub:us-east-1:000000000000:hub/default',
'SubscribedAt': '2020-12-03T11:05:17.571Z',
'AutoEnableControls': True,
}
| 1.328125
| 1
|
PJ/20_Python.py
|
vedgar/ip
| 5
|
12780950
|
<reponame>vedgar/ip
"""Primjer kako python (interpreter) obrađuje Python (programski jezik)."""
import tokenize, io, keyword, ast, dis, warnings, textwrap
def tokeni(string):
lex = tokenize.tokenize(io.BytesIO(string.encode('utf8')).readline)
for tok in list(lex)[1:-1]:
if keyword.iskeyword(tok.string): tip = tok.string.upper()
else: tip = tokenize.tok_name[tok.exact_type]
print('\t' + tip + repr(tok.string))
def stablo_parsiranja(string):
warnings.simplefilter('ignore')
try: import parser
except ImportError: return print(textwrap.dedent('''\
U verziji 3.10 Python je prešao na novi parser, koji više nije
zasnovan na beskontekstnoj gramatici (i nije dostupan kroz
Pythonovu standardnu biblioteku). To je u skladu s onim što smo
rekli na nastavi, da sve više parsera producira direktno AST umjesto
stabala parsiranja. Više o razlozima možete pročitati u PEP617.
Ako ipak želite vidjeti stablo parsiranja za gornji komad koda,
pokrenite ovaj program pod Pythonom 3.9 ili nižim.'''))
def ispis(t, razina, nastavak=False):
if not nastavak: print(end=' '*2*razina)
if len(t) == 2:
if isinstance(t[~0], str):
tip, sadržaj = t
if keyword.iskeyword(sadržaj): tip = sadržaj.upper()
else: tip = tokenize.tok_name[tip]
print(tip + repr(sadržaj))
else:
print(t[0] - 256, end='>')
ispis(t[1], razina, True)
else:
print(t[0] - 256)
for podstablo in t[1:]:
ispis(podstablo, razina + 1)
ispis(parser.suite(string).tolist(), 0)
def apstablo(string):
try: print(ast.dump(ast.parse(string), indent=4))
except TypeError:
print(ast.dump(ast.parse(string)))
print('Za ljepši ispis pokrenite ovo pod Pythonom 3.9 ili kasnijim.')
def bytecode(string): dis.dis(string)
def izvršavanje(string): exec(string)
def source(string): print(string)
if __name__ == '__main__':
primjer = textwrap.dedent('''\
for x in 2, 3:
print(x)
''') # slobodno eksperimentirajte!
for funkcija in source, tokeni, stablo_parsiranja, \
apstablo, bytecode, izvršavanje:
print(funkcija.__name__.join('[]').center(75, '-'))
print()
funkcija(primjer)
print()
# Module:
# body = [...]:
# For:
# target = Name(id='x', ctx=Store())
# iter = Tuple(elts=[Num(n=2), Num(n=3)], ctx=Load())
# body = [...]
# Expr:
# value = Call:
# func = Name(id='print', ctx=Load())
# args = [Name(id='x', ctx=Load())]
# keywords = []
# orelse = []
| 3.203125
| 3
|