seq_id
stringlengths 7
11
| text
stringlengths 156
1.7M
| repo_name
stringlengths 7
125
| sub_path
stringlengths 4
132
| file_name
stringlengths 4
77
| file_ext
stringclasses 6
values | file_size_in_byte
int64 156
1.7M
| program_lang
stringclasses 1
value | lang
stringclasses 38
values | doc_type
stringclasses 1
value | stars
int64 0
24.2k
⌀ | dataset
stringclasses 1
value | pt
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
27132320608
|
"""
Main launching point of the Top Patch Server
"""
import base64
import uuid
import os
import logging
import logging.config
import tornado.httpserver
import tornado.ioloop
import tornado.web
import tornado.options
from redis import StrictRedis
from rq import Connection, Queue
from server.handlers import RootHandler, RvlLoginHandler, RvlLogoutHandler
from server.handlers import WebSocketHandler, AdminHandler
from receiver.api.core.newagent import NewAgentV1
from receiver.api.core.checkin import CheckInV1
from receiver.api.core.startup import StartUpV1
from receiver.api.rv.results import *
from receiver.api.core.results import *
from receiver.api.rv.updateapplications import UpdateApplicationsV1
from receiver.api.ra.results import RemoteDesktopResults
from receiver.api.monitoring.monitoringdata import UpdateMonitoringStatsV1
from db.client import *
from scheduler.jobManager import start_scheduler
from tornado.options import define, options
#import newrelic.agent
#newrelic.agent.initialize('/opt/TopPatch/conf/newrelic.ini')
define("port", default=9001, help="run on port", type=int)
define("debug", default=True, help="enable debugging features", type=bool)
class Application(tornado.web.Application):
def __init__(self, debug):
handlers = [
#Operations for the Monitoring Plugin
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/monitoring/monitordata/?", UpdateMonitoringStatsV1),
#RA plugin
(r"/rvl/ra/rd/results/?", RemoteDesktopResults),
#Login and Logout Operations
(r"/rvl/?", RootHandler),
(r"/rvl/login/?", RvlLoginHandler),
(r"/rvl/logout/?", RvlLogoutHandler),
#Operations for the New Core Plugin
(r"/rvl/v1/core/newagent/?", NewAgentV1),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/core/startup/?", StartUpV1),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/core/checkin/?", CheckInV1),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/rv/updatesapplications/?", UpdateApplicationsV1),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/core/results/reboot/?", RebootResultsV1),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/core/results/shutdown/?", ShutdownResultsV1),
#New Operations for the New RV Plugin
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/rv/results/install/apps/os?",
InstallOsAppsResults),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/rv/results/install/apps/custom?",
InstallCustomAppsResults),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/rv/results/install/apps/supported?",
InstallSupportedAppsResults),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/rv/results/install/apps/agent?",
InstallAgentAppsResults),
(r"/rvl/v1/([a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12})/rv/results/uninstall?",
UnInstallAppsResults),
]
template_path = "/opt/TopPatch/tp/templates"
settings = {
"cookie_secret": "patching-0.7",
"login_url": "/rvl/login",
}
tornado.web.Application.__init__(self, handlers,
template_path=template_path,
debug=True, **settings)
def log_request(self, handler):
logging.config.fileConfig('/opt/TopPatch/conf/logging.config')
log = logging.getLogger('rvweb')
log_method = log.debug
if handler.get_status() <= 299:
log_method = log.info
elif handler.get_status() <= 399 and \
handler.get_status() >= 300:
log_method = log.warn
elif handler.get_status() <= 499 and \
handler.get_status() >= 400:
log_method = log.error
elif handler.get_status() <= 599 and \
handler.get_status() >= 500:
log_method = log.error
request_time = 1000.0 * handler.request.request_time()
real_ip = handler.request.headers.get('X-Real-Ip', None)
#remote_ip = handler.request.remote_ip
#uri = handler.request.remote_ip
forwarded_ip = handler.request.headers.get('X-Forwarded-For', None)
user_agent = handler.request.headers.get('User-Agent')
log_message = '%d %s %s, %.2fms' % (handler.get_status(), handler._request_summary(), user_agent, request_time)
if real_ip:
log_message = (
'%d %s %s %s %s, %.2fms' %
(
handler.get_status(), handler._request_summary(),
real_ip, forwarded_ip, user_agent, request_time
)
)
log_method(log_message)
if __name__ == '__main__':
tornado.options.parse_command_line()
https_server = tornado.httpserver.HTTPServer(
Application(options.debug),
ssl_options={
"certfile": os.path.join(
"/opt/TopPatch/tp/data/ssl/",
"server.crt"),
"keyfile": os.path.join(
"/opt/TopPatch/tp/data/ssl/",
"server.key"),
}
)
https_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
|
SteelHouseLabs/vFense
|
tp/src/vFense_listener.py
|
vFense_listener.py
|
py
| 5,617
|
python
|
en
|
code
| 5
|
github-code
|
6
|
4707609049
|
#!/usr/bin/env python
import tensorflow as tf
import numpy as np
# from tensorflow.examples.tutorials.mnist import input_data
def init_weights(shape):
return tf.Variable(tf.random_normal(shape, stddev=0.01))
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels
trX = trX.reshape(-1, 28, 28, 1) # 28x28x1 input img
teX = teX.reshape(-1, 28, 28, 1) # 28x28x1 input img
X = tf.placeholder("float", [None, 28, 28, 1])
Y = tf.placeholder("float", [None, 10])
w = init_weights([3, 3, 1, 32]) # 3x3x1 conv, 32 outputs
w2 = init_weights([3, 3, 32, 64]) # 3x3x32 conv, 64 outputs
w3 = init_weights([3, 3, 64, 128]) # 3x3x32 conv, 128 outputs
w4 = init_weights([128 * 4 * 4, 625]) # FC 128 * 4 * 4 inputs, 625 outputs
w_o = init_weights([625, 10]) # FC 625 inputs, 10 outputs (labels)
p_keep_conv = tf.placeholder("float")
p_keep_hidden = tf.placeholder("float")
py_x = model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))
train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)
predict_op = tf.argmax(py_x, 1)
def model(X, w_o):
l1a = tf.nn.relu(tf.nn.conv2d(X, w, # l1a shape=(?, 28, 28, 32)
strides=[1, 1, 1, 1], padding='SAME'))
l1 = tf.nn.max_pool(l1a, ksize=[1, 2, 2, 1], # l1 shape=(?, 14, 14, 32)
strides=[1, 2, 2, 1], padding='SAME')
l1 = tf.nn.dropout(l1, p_keep_conv)
l2a = tf.nn.relu(tf.nn.conv2d(l1, w2, # l2a shape=(?, 14, 14, 64)
strides=[1, 1, 1, 1], padding='SAME'))
l2 = tf.nn.max_pool(l2a, ksize=[1, 2, 2, 1], # l2 shape=(?, 7, 7, 64)
strides=[1, 2, 2, 1], padding='SAME')
l2 = tf.nn.dropout(l2, p_keep_conv)
l3a = tf.nn.relu(tf.nn.conv2d(l2, w3, # l3a shape=(?, 7, 7, 128)
strides=[1, 1, 1, 1], padding='SAME'))
l3 = tf.nn.max_pool(l3a, ksize=[1, 2, 2, 1], # l3 shape=(?, 4, 4, 128)
strides=[1, 2, 2, 1], padding='SAME')
l3 = tf.reshape(l3, [-1, w4.get_shape().as_list()[0]]) # reshape to (?, 2048)
l3 = tf.nn.dropout(l3, p_keep_conv)
l4 = tf.nn.relu(tf.matmul(l3, w4))
l4 = tf.nn.dropout(l4, p_keep_hidden)
pyx = tf.matmul(l4, w_o)
return pyx
|
WYGNG/USTC_SSE_AI
|
实验/AI2019_SA19225404_吴语港_Lab4_TF1.x/AI2019_SA19225404_吴语港_Lab4_TF1.x/CNN.py
|
CNN.py
|
py
| 2,508
|
python
|
en
|
code
| 34
|
github-code
|
6
|
12424048897
|
__author__ = "Vanessa Sochat, Alec Scott"
__copyright__ = "Copyright 2021-2022, Vanessa Sochat and Alec Scott"
__license__ = "Apache-2.0"
from paks.utils.names import namer
from paks.logger import logger
import paks.utils
import paks.defaults
import paks.templates
import paks.commands
import paks.settings
import subprocess
import select
import string
import pty
import termios
import tty
import os
import sys
import re
class ContainerName:
"""
Parse a container name into named parts
"""
def __init__(self, raw):
self.raw = raw
self.registry = None
self.repository = None
self.tool = None
self.version = None
self.digest = None
self.parse(raw)
self._name = None
@property
def extended_name(self):
if not self._name:
self._name = namer.generate()
return self.slug + "-" + self._name
@property
def name(self):
if not self._name:
self._name = namer.generate()
return self._name
@property
def slug(self):
slug = ""
for attr in [self.registry, self.repository, self.tool, self.version]:
if attr and not slug:
slug = attr.replace(".", "")
elif attr and slug:
slug = slug + "-" + attr.replace(".", "")
return slug
def parse(self, raw):
"""
Parse a name into known pieces
"""
match = re.search(paks.templates.docker_regex, raw)
if not match:
logger.exit("%s does not match a known identifier pattern." % raw)
for key, value in match.groupdict().items():
value = value.strip("/") if value else None
setattr(self, key, value)
class ContainerTechnology:
"""
A base class for a container technology
"""
def __init__(self, settings=None):
if not settings:
settings = paks.settings.Settings(paks.defaults.settings_file)
self.settings = settings
def get_history(self, line, openpty):
"""
Given an input with some number of up/down and newline, derive command.
"""
up = line.count("[A")
down = line.count("[B")
change = up - down
# pushed down below history
if change <= 0:
return ""
history = self.hist.run(
container_name=self.uri.extended_name,
out=openpty,
history_file=self.settings.history_file,
user=self.settings.user,
)
history = [x for x in history.split("\n") if x]
if not history:
return ""
if change > len(history):
return ""
# here we are looking back up into history (negative index)
newline = history[-1 * change]
# Add back any characters typed
newline += re.split("(\[A|\[B)", line, 1)[-1]
return newline
def encode(self, msg):
return bytes((msg).encode("utf-8"))
def interactive_command(self, cmd):
"""
Ensure we always restore original TTY otherwise terminal gets messed up
"""
# Controller to get history
self.hist = self.commands.history
# save original tty setting then set it to raw mode
old_tty = termios.tcgetattr(sys.stdin)
old_pty = termios.tcgetattr(sys.stdout)
try:
self._interactive_command(cmd)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
termios.tcsetattr(sys.stdout, termios.TCSADRAIN, old_pty)
def run_executor(self, string_input, openpty):
"""
Given a string input, run executor
"""
string_input = string_input.replace("[A", "").replace("[B", "")
if not string_input.startswith("#"):
return
executor = self.commands.get_executor(string_input, out=openpty)
if executor is not None:
# Provide pre-command message to the terminal
if executor.pre_message:
print("\n\r" + executor.pre_message)
# If we have an executor for the command, run it!
# All commands require the original / current name
result = executor.run(
name=self.image,
container_name=self.uri.extended_name,
original=string_input,
)
if result.message:
print("\r" + result.message)
def clean(self, string_input):
string_input = re.sub(
r"[^a-zA-Z0-9%s\n\r\w ]" % string.punctuation, "", string_input
)
return string_input.replace("\x1b", "")
def welcome(self, openpty):
"""
Welcome the user and clear terminal
"""
# Don't add commands executed to history
os.write(openpty, self.encode(" export PROMPT_COMMAND='history -a'\r"))
os.write(openpty, self.encode(" clear\r"))
os.write(openpty, self.encode(" ### Welcome to PAKS! ###\r"))
def _interactive_command(self, cmd):
"""
Run an interactive command.
"""
tty.setraw(sys.stdin.fileno())
# open pseudo-terminal to interact with subprocess
openpty, opentty = pty.openpty()
# use os.setsid() make it run in a new process group, or bash job control will not be enabled
p = subprocess.Popen(
cmd,
preexec_fn=os.setsid,
stdin=opentty,
stdout=opentty,
stderr=opentty,
universal_newlines=True,
)
# Welcome to Paks!
self.welcome(openpty)
string_input = ""
while p.poll() is None:
r, w, e = select.select([sys.stdin, openpty], [], [])
if sys.stdin in r:
terminal_input = os.read(sys.stdin.fileno(), 10240)
new_char = terminal_input.decode("utf-8")
# if we have a backspace (ord 127)
if len(new_char) == 1 and ord(new_char) == 127:
# Backspace to empty line
if len(string_input) > 0:
string_input = string_input[:-1]
if not string_input:
os.write(openpty, terminal_input)
continue
else:
string_input = string_input + new_char
# Get rid of left/right
string_input = string_input.replace("[D", "").replace("[C", "")
has_newline = "\n" in string_input or "\r" in string_input
# Replace weird characters and escape sequences
string_input = self.clean(string_input)
# Universal exit command
if "exit" in string_input and has_newline:
print("\n\rContainer exited.\n\r")
return self.uri.extended_name
# Pressing up or down, but not enter
if ("[A" in string_input or "[B" in string_input) and not has_newline:
string_input = self.get_history(string_input, openpty)
os.write(openpty, terminal_input)
continue
# Pressing up or down with enter
if ("[A" in string_input or "[B" in string_input) and has_newline:
string_input = self.get_history(string_input, openpty)
os.write(openpty, terminal_input)
if not string_input:
continue
# If we have a newline (and possibly a command)
if has_newline:
self.run_executor(string_input, openpty)
# Add derived line to the history
os.write(openpty, terminal_input)
string_input = ""
else:
os.write(openpty, terminal_input)
elif openpty in r:
o = os.read(openpty, 10240)
if o:
os.write(sys.stdout.fileno(), o)
def __str__(self):
return str(self.__class__.__name__)
|
syspack/paks
|
paks/backends/base.py
|
base.py
|
py
| 8,161
|
python
|
en
|
code
| 2
|
github-code
|
6
|
34778096922
|
import os
import yaml
def book_ids_from_frontmatter(frontmatter):
'''Return a list of book id hashes from frontmatter of list file.'''
sections = yaml.load(frontmatter)['sections']
books = []
for section in sections:
for source in section['listings']:
if source['type'] == 'book':
books.append(source['id'])
return books
def get_asins_from_files(book_data_paths):
'''Given list of file paths, return list of ASIN strings in YAML \
frontmatter in specified files.'''
asins = []
for path in book_data_paths:
book_file = open(path)
book_yaml = grab_yaml_frontmatter(book_file)
asins.append(str(yaml.load(book_yaml)['amzn']))
book_file.close()
return asins
def get_book_data_paths(list_file_path, books):
'''Given root book data directory, return list of paths to files that \
match book id hashes in given list of hashes.'''
book_data_dir = os.path.abspath(list_file_path) + '/../../../../../_books'
book_data_dir = os.path.abspath(book_data_dir)
book_data_paths = []
for path, visit, arg in os.walk(book_data_dir):
for filename in arg:
if os.path.splitext(filename)[1] == '.bib':
for book in books:
if filename.find(book) >= 0:
book_data_paths.append(path + '/' + filename)
return book_data_paths
def grab_yaml_frontmatter(f):
'''Given a file, return YAML frontmatter as string, if present'''
yaml_result = ''
if f.readline() == '---\n':
yaml_active = True
for line in f.readlines():
if yaml_active:
if line != '---\n':
yaml_result += line
else:
yaml_active = False
return yaml_result
|
Backlist/backlist-workflows
|
backlist.py
|
backlist.py
|
py
| 1,832
|
python
|
en
|
code
| 0
|
github-code
|
6
|
10159693048
|
# enter a no and print the sum of the 1st and last digits of that no
number = int(input("Enter a no:"))
first = number % 10
while number != 0:
rem = number % 10
number = number // 10
print("the sum of the 1st and last digits is:", first+rem)
|
suchishree/django_assignment1
|
python/looping/while loop/demo3.py
|
demo3.py
|
py
| 253
|
python
|
en
|
code
| 0
|
github-code
|
6
|
6018015716
|
import os
import re
from pathlib import Path
summary = ""
def get_sql(name, docs, cat, markdown, docs_url):
return f"INSERT INTO `ae-expression` ( `name`, `docs`, `cat`, `markdown`, `docs_url`) VALUES ( {name}, {docs}, {cat}, {markdown}, {docs_url});"
def get_content(file_path, docs, cat):
with open(file_path, "r", encoding='utf-8') as file:
markdown_text = file.read()
# print(markdown_text)
reg = r'^##.+\n[^#]+'
result = re.findall(reg, markdown_text, re.M)
for res in result:
name = res.split("\n\n")[0]
markdown = "\n".join(res.split("\n\n")[1:])
# get_sql()
cat = Path(file_path).name
print(name.replace("## ", "").replace("(", "").replace(")", ""))
docs = docs
cat = cat.replace(".md", "")
get_sql(name, docs, cat, markdown, "")
print(markdown)
def print_folder_tree(folder_path, cat=""):
# 遍历文件夹中的所有文件和子文件夹
global summary
global index
index += 1
for entry in os.scandir(folder_path):
if index > 2:
return
summary += "\n"
if entry.is_dir():
print_folder_tree(entry.path, entry.name)
...
elif entry.name != "summary.md":
get_content(entry.path)
# with open('data.txt', "r+", encoding='utf-8') as file:
# file_content = file.read()
# file.seek(0)
# file.truncate()
# file.write("# Title\n" + file_content)
root_dir = r"H:\Scripting\Vue Projects\docs2_yuelili_com\AE\expression"
# print_folder_tree(root_dir)
# get_content(
# r"H:\Scripting\Vue Projects\docs2_yuelili_com\AE\expression\General\Global.md")
|
Yuelioi/Program-Learning
|
Python/Projects/提取文件API的sql.py
|
提取文件API的sql.py
|
py
| 1,733
|
python
|
en
|
code
| 0
|
github-code
|
6
|
26408420089
|
import json
from socket import *
import base64
def client_json(ip, port, obj):
# 创建TCP Socket并连接
sockobj = socket(AF_INET, SOCK_STREAM)
sockobj.connect((ip, port))
if 'exec_cmd' in obj.keys():
send_obj = obj
elif 'upload_file' in obj.keys():
with open('{0}'.format(obj['upload_file']), 'rb') as f:
read_data = f.read()
bytes_b64code = base64.b64encode(read_data)
send_obj = {'upload_file': obj['upload_file'], 'file_bit': bytes_b64code.decode()}
elif 'download_file' in obj.keys():
send_obj = obj
# 把obj转换为JSON字节字符串
send_message = json.dumps(send_obj).encode()
# 读取1024字节长度数据, 准备发送数据分片
send_message_fragment = send_message[:1024]
# 剩余部分数据
send_message = send_message[1024:]
while send_message_fragment:
sockobj.send(send_message_fragment) # 发送数据分片(如果分片的话)
send_message_fragment = send_message[:1024] # 读取1024字节长度数据
send_message = send_message[1024:] # 剩余部分数据
recieved_message = b'' # 预先定义接收信息变量
recieved_message_fragment = sockobj.recv(1024) # 读取接收到的信息,写入到接收到信息分片
while recieved_message_fragment:
recieved_message = recieved_message + recieved_message_fragment # 把所有接收到信息分片重组装
recieved_message_fragment = sockobj.recv(1024)
return_data = json.loads(recieved_message.decode())
if 'download_file' not in return_data.keys():
print('收到确认数据:', return_data)
else:
print('收到确认数据:', return_data)
# 应该考虑写入下载的文件名!但是由于使用了相同的目录测试!所以使用了’download_file.py‘
with open('download_file.py', 'w+') as f:
b4code_back = bytes(return_data['file_bit'], 'GBK')
file_info = base64.b64decode(b4code_back)
f.write(file_info.decode())
print('下载文件{0}保存成功!'.format((obj.get('download_file'))))
sockobj.close()
if __name__ == '__main__':
# 使用Linux解释器 & WIN解释器
port = 6666
# 执行命令
exec_cmd = {'exec_cmd': 'pwd'}
client_json('192.168.0.188', port, exec_cmd)
# 上传文件
upload_file = {'upload_file': 'snmpv2_get_file.py'}
client_json('192.168.0.188', port, upload_file)
# 下载文件
download_file = {'download_file': 'snmpv2_get_file.py'}
client_json('192.168.0.188', port, download_file)
|
Prin-Meng/NetDevOps
|
network_protocal/task_day13/socket_client.py
|
socket_client.py
|
py
| 2,627
|
python
|
en
|
code
| 0
|
github-code
|
6
|
5212403801
|
# -*- coding: utf-8 -*-
import scrapy
from junyang_spider.items import YouzySchoolBadgeItem
class SchoolBadgeSpider(scrapy.Spider):
name = "school_badge"
allowed_domains = ["youzy.cn"]
start_urls = [
"https://www.youzy.cn/college/search?page=1",
]
custom_settings = {
'ITEM_PIPELINES': {'gaokao.pipelines.SchoolBadgePipeline': 200}
}
def parse(self, response):
for school in response.css("li.clearfix"):
image_url = school.css('a img::attr("src")').extract_first()
if image_url.find("http") != -1:
item = YouzySchoolBadgeItem()
item['school_name'] = school.css('a.name::text').extract_first()
item['image_url'] = image_url
yield item
for i in range(2, 144):
yield scrapy.Request('https://www.youzy.cn/college/search?page=%d' % i, callback=self.parse)
|
endForYou/spider
|
junyang_spider/spiders/school_badge_spider.py
|
school_badge_spider.py
|
py
| 914
|
python
|
en
|
code
| 0
|
github-code
|
6
|
44070613144
|
import numpy as np
import math
import matplotlib.pyplot as plt
class LotkaVolterra:
"""This class defines the Lotka--Voltera prey-predator
system. There are 4 parameters in this class which
define the evoluion of the system.
Attributes:
k_a reproduction rate of the antelopes
k_ca death rate of antelopes when the meet cheetahs
k_c death rate of cheetahs
k_a reproduction rate of the cheetahs when they meet antelopes
"""
def __init__(self,k_a,k_ca,k_c,k_ac):
self.k_a = k_a
self.k_ca = k_ca
self.k_c = k_c
self.k_ac = k_ac
def __call__(self,x,t):
y = np.zeros(len(x))
y[0] = self.k_a*x[0]-self.k_ca*x[0]*x[1]
y[1] = -self.k_c*x[1]+self.k_ac*x[0]*x[1]
return y
class Logistic:
"""This class defines the Logistic population
growth of a population which has a limited size C
and a growth rate of nu.
Attributes:
nu Growth rate of the population
C Limit sizeof the population
"""
def __init__(self,nu,C):
self.nu = nu
self.C = C
def __call__(self,x,t):
return self.nu*(1-x/self.C)*x
class ExplicitEuler:
"""This class defines the Explicit Euler
scheme for the numerical resolution of
a differentiel equation.
"""
def __init__(self,f):
self.f = f
def iterate(self,x0,t,dt):
return x0+dt*self.f(x0,t)
class RK2:
"""This class defines the Runge-Kutta 2
scheme for the numerical resolution of
a differentiel equation.
"""
def __init__(self,f):
self.f = f
def iterate(self,x0,t,dt):
return x0+dt*self.f(x0+dt/2*self.f(x0,t),t+dt/2)
class Integrator:
"""This class defines the Integration
of a differential equation between tMin and tMax
with N discretization steps and x0 as an initial condition
"""
def __init__(self,method,x0,tMin,tMax,N):
self.x0 = x0
self.tMin = tMin
self.tMax = tMax
self.dt = (tMax - tMin)/(N-1)
self.f = method
def getIntegrationTime(self):
return np.arange(self.tMin,self.tMax+self.dt,self.dt)
def integrate(self):
x = np.array([self.x0])
for t in np.arange(self.tMin,self.tMax,self.dt):
x = np.append( x, [self.f.iterate(x[-1,:],t,self.dt)],axis=0)
return x
# Plots the data in a 2d plot
def plotData(x,y,color,legend):
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
plt.rc('xtick', labelsize=20)
plt.rc('ytick', labelsize=20)
plt.ylabel('$a(t),c(t)$',fontsize=20)
plt.xlabel('$t$', fontsize=20)
plt.plot(x,y,color,linewidth=2.0,label=legend)
plt.legend(loc=2,prop={'size':20})
# Parametric plot of x vs y
def parametricPlotData(x,y,color,xAxis,yAxis,legend):
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
plt.rc('xtick', labelsize=20)
plt.rc('ytick', labelsize=20)
plt.xlabel('$'+xAxis+'$',fontsize=20)
plt.ylabel('$'+yAxis+'$',fontsize=20)
plt.plot(x,y,color,linewidth=2.0,label=legend)
plt.legend(loc=2,prop={'size':20})
# Plot the population of the antelope and the cheetah
x0 = np.array([2, 4])
tmin = 0
tmax = 100
rk2 = Integrator(RK2(LotkaVolterra(1,1,0.5,0.5)),x0,tmin,tmax,2000)
eul = Integrator(ExplicitEuler(LotkaVolterra(1,1,0.5,0.5)),x0,tmin,tmax,2000)
plotData(rk2.getIntegrationTime(),rk2.integrate()[:,0],'r-',"antelope (RK)")
plotData(rk2.getIntegrationTime(),rk2.integrate()[:,1],'b-',"cheetah (RK)")
plotData(eul.getIntegrationTime(),eul.integrate()[:,0],'g-',"antelope (E)")
plotData(eul.getIntegrationTime(),eul.integrate()[:,1],'m-',"cheetah (E)")
plt.show()
parametricPlotData(rk2.integrate()[:,0], rk2.integrate()[:,1],'r-','a(t)','c(t)',"6 ini (RK)")
parametricPlotData(eul.integrate()[:,0], eul.integrate()[:,1],'b-','a(t)','c(t)',"6 ini (E)")
plt.show()
# Compues the errror between 2 solutions with a given ratio
# in term of resolution points
def computeError(x,xRef,ratio):
iMax = np.size(xRef,axis=0)
totError = 0
for i in np.arange(0,np.size(xRef,axis=1)):
totError += math.sqrt(np.sum(np.square(x[:,i]-xRef[0:iMax:ratio,i])))/np.size(x[:,i])
return totError
n_rk = np.array([1000, 2000, 4000, 8000])
n_e = np.array([1000, 2000, 4000, 8000])
n_ref = 16000
tmin = 0
tmax = 13
rk2 = Integrator(RK2(LotkaVolterra(1,1,0.5,0.5)),x0,tmin,tmax,n_ref)
solRefRK = rk2.integrate()
eul = Integrator(ExplicitEuler(LotkaVolterra(1,1,0.5,0.5)),x0,tmin,tmax,n_ref)
solRefE = eul.integrate()
errRK = []
for i in n_rk:
rk = Integrator(RK2(LotkaVolterra(1,1,0.5,0.5)),x0,tmin,tmax,i)
r_rk = n_ref//i
errRK.append(computeError(rk.integrate(),solRefRK,r_rk))
print(computeError(rk.integrate(),solRefRK,r_rk))
plt.loglog(n_rk,errRK,'ro',linewidth=2.0,label="RK2 error")
plt.loglog(n_rk,np.power(n_rk/10,-2),'k-',linewidth=2.0,label="-2 slope")
plt.legend(loc=3)
plt.show()
errE = []
for i in n_rk:
e = Integrator(ExplicitEuler(LotkaVolterra(1,1,0.5,0.5)),x0,tmin,tmax,i)
r_rk = n_ref//i
errE.append(computeError(e.integrate(),solRefRK,r_rk))
print(computeError(e.integrate(),solRefRK,r_rk))
plt.loglog(n_rk,errE,'ro',linewidth=2.0,label="Euler error")
plt.loglog(n_rk,np.power(n_e/100,-2),'k-',linewidth=2.0,label="-1 slope")
plt.legend(loc=3)
plt.show()
|
sidsriv/Simulation-and-modelling-of-natural-processes
|
lotkaVolterra.py
|
lotkaVolterra.py
|
py
| 5,583
|
python
|
en
|
code
| 21
|
github-code
|
6
|
19272008799
|
import MDAnalysis
import sys
import itertools
import tool
from argparse import ArgumentParser
"""
a = sys.argv
a.pop(0)
kai1 = [i for i in a if ".trr" in i]
kai2 = [i for i in a if ".pdb" in i]
kai3 = [i for i in a if "prob" in i]
kai4 = [i for i in a if ".trr" not in i and ".pdb" not in i
and ".txt" not in i and ".dat" not in i]
a = []
a.append([0, 0])
a.append(kai1)
a.append(kai2)
a.append(kai3)
a.append(kai4)
"""
# a = [i.split() for i in a]
# a[1]:trr_path_list
# a[2]:pdb_path_list
# a[3]:prob_path_list
# a[4]:data processing
"""
1:only CA coordinates
2:chain A and chain B | only CA
3:select residue coordinates like["3","C","N","O"]
4:chain A and chain B | selesct ATOM
"""
a=[]
def get_option():
argparser = ArgumentParser()
argparser.add_argument("-trr","--trajectory",type=str,help="path of trr")
argparser.add_argument("-pdb","--protein",type=str,help="path of pdb")
argparser.add_argument("-prob","--probtxt",type=str,help="path of prob")
argparser.add_argument("-cal","--caluculation",type=str,help="way of data processing")
return argparser.parse_args()
def PDB_cal1(num1):
num_pdb = []
for i in open(a[1][num1], "r"):
f = i.split()
if f[2] == "CA":
num_pdb.append(int(f[1]))
return num_pdb
def PDB_cal2(num1):
num_pdb = []
kai_a = []
kai_b = []
for i in open(a[1][num1], "r"):
f = i.split()
if f[4] == "A" and f[2] == "CA":
kai_a.append(int(f[1]))
if f[4] == "B" and f[2] == "CA":
kai_b.append(int(f[1]))
num_pdb.append(kai_a)
num_pdb.append(kai_b)
return num_pdb
def PDB_cal3(num1):
num_pdb = []
for i in open(a[1][num1], "r"):
f = i.split()
if f[2] in a[3]:
num_pdb.append(int(f[1]))
return num_pdb
def PDB_cal4(num1):
num_pdb = []
kai_a = []
kai_b = []
for i in open(a[1][num1], "r"):
f = i.split()
if f[4] == "A" and f[2] in a[3]:
kai_a.append(int(f[1]))
if f[4] == "B" and f[2] in a[3]:
kai_b.append(int(f[1]))
num_pdb.append(kai_a)
num_pdb.append(kai_b)
return num_pdb
def PROB_cal(num1):
num_prob = []
num2 = 0
for i in open(a[2][num1], "r"):
if float(i) != 0:
num_prob.append(num2)
num2 += 1
return num_prob
def main():
global a
args = get_option()
a.append(str(args.trajectory).split(","))
a.append(str(args.protein).split(","))
a.append(str(args.probtxt).split(","))
a.append(str(args.caluculation).split(","))
if len(a[0]) == len(a[1]) and len(a[1]) == len(a[2]):
print("go")
for i in a[0]:
num1 = a[0].index(i)
if len(a[3]) > 1:
if int(a[3][0]) == 4:
num_pdb = PDB_cal4(num1)
elif int(a[3][0]) == 3:
num_pdb = PDB_cal3(num1)
else:
if int(a[3][0]) == 1:
num_pdb = PDB_cal1(num1)
elif int(a[3][0]) == 2:
num_pdb = PDB_cal2(num1)
num_prob = PROB_cal(num1)
u = MDAnalysis.Universe(i)
frm = u.trajectory
del u
if int(a[3][0]) == 2 or int(a[3][0]) == 4:
for i in safe_mem_distance(frm, num_prob, num_pdb):
k = i.replace(",", "")
k = k.replace("[", "")
k = k.replace("]", "")
print(k)
else:
for i in safe_mem_coordinates(frm, num_prob, num_pdb):
k = i.replace(",", "")
k = k.replace("[", "")
k = k.replace("]", "")
print(k)
else:
print("not match kind of trr or pdb, prob")
def safe_mem_distance(frm, num_prob, num_pdb):
num2 = 0
for frm_num in frm:
try:
kai = []
if num2 in num_prob:
for i in safe_safe_distance(num_pdb, frm_num):
kai.append(i)
yield str(kai)
# del x, y, z, FRM, kai
except StopIteration:
del kai
break
num2 += 1
def safe_safe_distance(num_pdb, FRM):
for j1, j2 in itertools.product(num_pdb[0], num_pdb[1]):
yield tool.contact(float(FRM[j1][0]),
float(FRM[j1][1]),
float(FRM[j1][2]),
float(FRM[j2][0]),
float(FRM[j2][1]),
float(FRM[j2][2]))
def safe_mem_coordinates(frm, num_prob, num_pdb):
frm_itr = iter(frm)
num2 = 0
while True:
try:
kai = []
FRM = next(frm_itr)
if num2 in num_prob:
for j1 in num_pdb:
for j2 in range(3):
kai.append(float(FRM[j1][j2]))
yield str(kai)
# del x, y, z, FRM, kai
except StopIteration:
del kai
break
num2 += 1
if __name__ == '__main__':
main()
|
satoshi-python/Desktop
|
pca1_kai.py
|
pca1_kai.py
|
py
| 5,325
|
python
|
en
|
code
| 0
|
github-code
|
6
|
17509147003
|
'''
start: 7:58
end: 8:11
13 mins
constraint:
|nodes| > 0
'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def diameterOfBinaryTree(self, root: TreeNode) -> int:
maxdiam = 0
def helper(curr: TreeNode) -> int:
nonlocal maxdiam
if not curr:
return 0
leftdepth = helper(curr.left)
rightdepth = helper(curr.right)
maxdiam = max(maxdiam, leftdepth + rightdepth)
return 1 + max(leftdepth, rightdepth)
helper(root)
return maxdiam
|
soji-omiwade/cs
|
dsa/before_rubrik/diameter_of_binary_tree_2021_06_28.py
|
diameter_of_binary_tree_2021_06_28.py
|
py
| 719
|
python
|
en
|
code
| 0
|
github-code
|
6
|
70893752508
|
import sys
import pandas as pd
from sklearn.feature_selection import SelectKBest, mutual_info_regression
from sklearn.preprocessing import LabelEncoder
import matplotlib.pyplot as plt
# Load the dataset
#filename = sys.argv[1]
data = pd.read_csv('uploads/BigBasket.csv')
# Encode categorical variables using label encoding
le = LabelEncoder()
for column in data.columns:
data[column] = le.fit_transform(data[column])
X = data[['index','product','category','sub_category','brand','sale_price','market_price','type','rating','description']]
y = data['brand']
# Use SelectKBest with mutual information to find the top 5 features
selector = SelectKBest(score_func=mutual_info_regression, k=5)
selector.fit(X, y)
# Print the top 5 features and their mutual information scores
top_features = selector.scores_
top_features_index = selector.get_support(indices=True)
feature_names = []
scores = []
for i, feature in enumerate(X.columns[top_features_index]):
feature_names.append(feature)
scores.append(top_features[top_features_index[i]])
# Sort the features based on their scores in descending order
sorted_features = sorted(zip(feature_names, scores), key=lambda x: x[1], reverse=True)
print("Top 5 features:")
for i in range(5):
print(f'{i+1}. {sorted_features[i][0]} ({sorted_features[i][1]:.4f})')
# Define k-anonymity rules for the top 5 features
k_anonymity_rules = {
'index': None,
'product': 2,
'category': 3,
'sub_category': 3,
'brand': 2
}
# Apply k-anonymity to the top 5 features based on the defined rules
for feature in sorted_features[:5]:
feature_name = feature[0]
k = k_anonymity_rules.get(feature_name, None)
if k is not None:
X[feature_name] = X[feature_name] // k * k
# Save the anonymized data to a new CSV file
output_filename = 'BigBasket_anonymized.csv'
X.to_csv(output_filename, index=False)
# Plot the feature selection scores
plt.bar(feature_names, scores)
# # Add labels and title
plt.xlabel('Feature')
plt.ylabel('Score')
plt.title('Feature Selection Scores')
# Show the plot
plt.show()
# Print the filenames of the output files
print(f'Anonymized dataset saved to {output_filename}')
# print(f'Feature selection scores plot saved to {plot_filename}')
|
FireQueen-3010/MainProject
|
script.py
|
script.py
|
py
| 2,244
|
python
|
en
|
code
| 0
|
github-code
|
6
|
17110435205
|
import tkinter
from tkinter import *
from PIL import ImageTk, Image
# configure window
root = Tk()
windowColor = "#F2F2F2"
root.geometry("827x1500")
root.configure(bg = windowColor)
root.title("Train Build")
# create a container for canvas so window is scrollable
# window is a frame inside canvas that is a container for rest of application
container = Frame(root, bg = windowColor)
canvas = Canvas(container, bg = windowColor, bd=0, highlightthickness=0)
scrollbar = Scrollbar(container, orient="vertical", command=canvas.yview)
window = Frame(canvas, bg = windowColor)
# make canvas and window scrollable
window.bind(
"<Configure>",
lambda e: canvas.configure(
scrollregion=canvas.bbox("all")
)
)
canvas.create_window((0, 0), window=window, anchor="nw")
canvas.configure(yscrollcommand=scrollbar.set)
# pack containers into root
container.pack(side="left", fill="both", expand=True)
canvas.pack(side="left", fill="both", expand=True)
scrollbar.pack(side="right", fill="y")
image1 = Image.open("Asset 4.png")
test = ImageTk.PhotoImage(image1)
label1 = tkinter.Label(image=test, width = 753, height = 355, bg=windowColor)
label1.place(x=150, y=25)
# span information frame
framea = LabelFrame(
root,
bg="#FFFFFF",
bd=0,
highlightthickness=0
)
framea.place(
x=60,
y=48,
width=334,
height=390
)
infoSpan = Label(root, text="Span Information", bg="#FFFFFF", font='Helvetica 22 bold')
infoSpan.place(x = 72, y = 64)
# start span name and label
startSpanEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
startSpanEntry.configure(highlightbackground="black", highlightcolor="black")
startSpanEntry.place(
x=292,
y=104,
width=87,
height=22
)
startSpanLabel = Label(root, text="Start Span", bg="#FFFFFF")
startSpanLabel.place(x = 100, y = 108)
# end span name and label
endSpanEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
endSpanEntry.configure(highlightbackground="black", highlightcolor="black")
endSpanEntry.place(
x=292,
y=139,
width=87,
height=22
)
endSpanLabel = Label(root, text="End Span", bg="#FFFFFF")
endSpanLabel.place(x = 100, y = 142)
# span increment name and label
incrementSpanEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
incrementSpanEntry.configure(highlightbackground="black", highlightcolor="black")
incrementSpanEntry.place(
x=292,
y=174,
width=87,
height=22
)
incrementSpanLabel = Label(root, text="Span Increment", bg="#FFFFFF")
incrementSpanLabel.place(x = 100, y = 178)
llTypeOptionList = ["Custom", "Option", "Option", "Option"]
lltext = StringVar()
lltext.set(llTypeOptionList[0])
lloption = OptionMenu(root, lltext, *llTypeOptionList)
lloption.configure(highlightbackground="#FFFFFF")
lloption.place(x = 279, y = 215)
llTypeLabel = Label(root, text="LL Type", bg="#FFFFFF")
llTypeLabel.place(x = 100, y = 213)
infoSpan = Label(root, text="Moment/Stress Calculation Interval Locations", bg="#FFFFFF", font='Helvetica 13 bold')
infoSpan.place(x = 77, y = 248)
# start location name and label
startLocationEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
startLocationEntry.configure(highlightbackground="black", highlightcolor="black")
startLocationEntry.place(
x=292,
y=279,
width=87,
height=22
)
startLocationLabel = Label(root, text="Start Location", bg="#FFFFFF")
startLocationLabel.place(x = 100, y = 283)
# end location name and label
endLocationEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
endLocationEntry.configure(highlightbackground="black", highlightcolor="black")
endLocationEntry.place(
x=292,
y=314,
width=87,
height=22
)
endLocationLabel = Label(root, text="End Location", bg="#FFFFFF")
endLocationLabel.place(x = 100, y = 318)
# interval name and label
intervalEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
intervalEntry.configure(highlightbackground="black", highlightcolor="black")
intervalEntry.place(
x=292,
y=349,
width=87,
height=22
)
intervalLabel = Label(root, text="Interval", bg="#FFFFFF")
intervalLabel.place(x = 100, y = 353)
# step size name and label
stepSizeEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
stepSizeEntry.configure(highlightbackground="black", highlightcolor="black")
stepSizeEntry.place(
x=292,
y=384,
width=87,
height=22
)
stepSizeLabel = Label(root, text="Step Size", bg="#FFFFFF")
stepSizeLabel.place(x = 100, y = 388)
# loads frame
framec = LabelFrame(
root,
bg="#FFFFFF",
bd=0,
highlightthickness=0
)
framec.place(
x=60,
y=473,
width=334,
height=286
)
loadsDead = Label(root, text="Dead Loads", bg="#FFFFFF", font='Helvetica 22 bold')
loadsDead.place(x = 75, y = 489)
# girder type name and label
girderTypeOptionList = ["Custom", "Option", "Option", "Option"]
girdertext = StringVar()
girdertext.set(girderTypeOptionList[0])
girderoption = OptionMenu(root, girdertext, *girderTypeOptionList)
girderoption.configure(highlightbackground="#FFFFFF")
girderoption.place(x = 292, y = 529)
typeGirderLabel = Label(root, text="Girder Type", bg="#FFFFFF")
typeGirderLabel.place(x = 100, y = 533)
# deck type name and label
deckTypeOptionList = ["Custom", "Option", "Option", "Option"]
decktext = StringVar()
decktext.set(deckTypeOptionList[0])
deckoption = OptionMenu(root, decktext, *deckTypeOptionList)
deckoption.configure(highlightbackground="#FFFFFF")
deckoption.place(x = 292, y = 564)
typeDeckLabel = Label(root, text="Deck Type", bg="#FFFFFF")
typeDeckLabel.place(x = 100, y = 568)
loadsDead = Label(root, text="Cooper Loads", bg="#FFFFFF", font='Helvetica 22 bold')
loadsDead.place(x = 75, y = 597)
# cooper type name and label
cooperTypeOptionList = ["Custom", "Option", "Option", "Option"]
coopertext = StringVar()
coopertext.set(cooperTypeOptionList[0])
cooperoption = OptionMenu(root, coopertext, *cooperTypeOptionList)
cooperoption.configure(highlightbackground="#FFFFFF")
cooperoption.place(x = 292, y = 635)
typeCooperLabel = Label(root, text="Cooper Type", bg="#FFFFFF")
typeCooperLabel.place(x = 100, y = 639)
# cooper year name and label
cooperYearTypeOptionList = ["Custom", "Option", "Option", "Option"]
cooperYeartext = StringVar()
cooperYeartext.set(cooperYearTypeOptionList[0])
cooperYearoption = OptionMenu(root, cooperYeartext, *cooperYearTypeOptionList)
cooperYearoption.configure(highlightbackground="#FFFFFF")
cooperYearoption.place(x = 292, y = 670)
yearCooperLabel = Label(root, text="Cooper Year", bg="#FFFFFF")
yearCooperLabel.place(x = 100, y = 674)
# girder connection name and label
girderTypeOptionList = ["Custom", "Option", "Option", "Option"]
girdertext = StringVar()
girdertext.set(girderTypeOptionList[0])
girderoption = OptionMenu(root, girdertext, *girderTypeOptionList)
girderoption.configure(highlightbackground="#FFFFFF")
girderoption.place(x = 292, y = 705)
connectionGirderLabel = Label(root, text="Girder Connection", bg="#FFFFFF")
connectionGirderLabel.place(x = 100, y = 709)
valueImpact = Label(root, text="Impact Value", bg=windowColor, font='Helvetica 22 bold')
valueImpact.place(x = 448, y = 64)
# impact option and bridge
impactTypeOptionList = ["Unknown", "Option", "Option", "Option"]
impacttext = StringVar()
impacttext.set(impactTypeOptionList[0])
impactoption = OptionMenu(root, impacttext, *impactTypeOptionList)
impactoption.configure(highlightbackground="#FFFFFF")
impactoption.place(x = 633, y = 103)
impactTypeLabel = Label(root, text="Design Impact and Year", bg=windowColor)
impactTypeLabel.place(x = 474, y = 104)
# section modulus frame
frameb = LabelFrame(
root,
bg="#FFFFFF",
bd=0,
highlightthickness=0
)
frameb.place(
x=433,
y=282,
width=334,
height=477
)
# column labels
modulusSection = Label(root, text="Section Modulus", bg="#FFFFFF", font='Helvetica 22 bold')
modulusSection.place(x = 448, y = 300)
# section area load name and label
areaCheck1Value = IntVar()
areaCheck1 = Checkbutton(
root,
text = "Gross",
variable = areaCheck1Value,
onvalue = 1,
offvalue = 0,
bg = "#ffffff"
)
areaCheck1.place(
x = 665,
y = 341
)
areaCheck2Value = IntVar()
areaCheck2 = Checkbutton(
root,
text = "Net",
variable = areaCheck2Value,
onvalue = 1,
offvalue = 0,
bg = "#ffffff"
)
areaCheck2.place(
x = 665,
y = 368
)
areaSectionLabel = Label(root, text="Section Area", bg="#FFFFFF")
areaSectionLabel.place(x = 473, y = 340)
# number of girders name and label
girderSectionEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
girderSectionEntry.configure(highlightbackground="black", highlightcolor="black")
girderSectionEntry.place(
x=665,
y=406,
width=87,
height=22
)
girderSectionLabel = Label(root, text="Number of Girders", bg="#FFFFFF")
girderSectionLabel.place(x = 473, y = 410)
# S value name and label
sValueEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
sValueEntry.configure(highlightbackground="black", highlightcolor="black")
sValueEntry.place(
x=665,
y=445,
width=87,
height=22
)
sValueLabel = Label(root, text="S Value", bg="#FFFFFF")
sValueLabel.place(x = 473, y = 449)
# Hammer Blow name and label
blowCheck1Value = IntVar()
blowCheck1 = Checkbutton(
root,
text = "Yes",
variable = blowCheck1Value,
onvalue = 1,
offvalue = 0,
bg = "#ffffff"
)
blowCheck1.place(
x = 665,
y = 489
)
blowCheck2Value = IntVar()
blowCheck2 = Checkbutton(
root,
text = "No",
variable = blowCheck2Value,
onvalue = 1,
offvalue = 0,
bg = "#ffffff"
)
blowCheck2.place(
x = 665,
y = 516
)
blowHammerLabel = Label(root, text="Hammer Blow", bg="#FFFFFF")
blowHammerLabel.place(x = 473, y = 488)
modulusSection = Label(root, text="Fatigue", bg="#FFFFFF", font='Helvetica 22 bold')
modulusSection.place(x = 448, y = 547)
# mean impact load name and label
impactTypeOptionList = ["Custom", "Option", "Option", "Option"]
impacttext = StringVar()
impacttext.set(impactTypeOptionList[0])
impactoption = OptionMenu(root, impacttext, *impactTypeOptionList)
impactoption.configure(highlightbackground="#FFFFFF")
impactoption.place(x = 665, y = 587)
meanImpactLoadLabel = Label(root, text="Mean Impact Load", bg="#FFFFFF")
meanImpactLoadLabel.place(x = 473, y = 591)
# fatigue category name and label
fatigueTypeOptionList = ["Custom", "Option", "Option", "Option"]
fatiguetext = StringVar()
fatiguetext.set(fatigueTypeOptionList[0])
fatigueoption = OptionMenu(root, fatiguetext, *fatigueTypeOptionList)
fatigueoption.configure(highlightbackground="#FFFFFF")
fatigueoption.place(x = 665, y = 622)
categoryFatigueLabel = Label(root, text="Fatigue Category", bg="#FFFFFF")
categoryFatigueLabel.place(x = 473, y = 626)
# ignore stress name and label
ignoreStressEntry = Entry(
bd=0,
bg="#E6E6E6",
fg="#000716",
highlightthickness=0
)
ignoreStressEntry.configure(highlightbackground="black", highlightcolor="black")
ignoreStressEntry.place(
x=665,
y=657,
width=87,
height=22
)
ignoreStressLabel = Label(root, text="Ignore Stress", bg="#FFFFFF")
ignoreStressLabel.place(x = 473, y = 661)
#####################################################
# train frame
framet = LabelFrame(
root,
bg="#FFFFFF",
bd=0,
highlightthickness=0
)
framet.place(
x=60,
y=794,
width=707,
height=211
)
chooseTrain = Label(root, text="Choose Train", bg="#FFFFFF", font='Helvetica 22 bold')
chooseTrain.place(x = 92, y = 810)
#####################################################
# save button
# close button
########################## tk mainloop ##########################
root.resizable(True, True)
root.mainloop()
|
masonknight22/CE596-RailroadAnalysisMockup
|
analysis p1.py
|
analysis p1.py
|
py
| 12,035
|
python
|
en
|
code
| 0
|
github-code
|
6
|
17970705274
|
#setuptools.setup is looking at one argv parameter; to "build" and "install":
#python3 setup.py install
#libtorrent from pypi has bindings and library now, before was:
# python-libtorrent-bin is at extra require now, but, if was at install requires:
# ok, package python-libtorrent-bin is old. install with pip install --no-deps but add somehow appdirs
# and python-libtorrent on ubuntu
# if it's not old python-libtorrent at pip:
# that+(libtorrent-rasterbar2.0 on ubuntu) can be a solution
pkname='torra'
import pathlib
HERE = pathlib.Path(__file__).parent
#here README is ok, else will be README.md not found for pypi
README = (HERE / "info.md").read_text()
ver=(HERE / "v2").read_text()
from setuptools import setup
setup(name=pkname,
version=ver,
packages=[pkname],
#opt
python_requires='>=3',
install_requires=["appdirs>=1.4.3"
,"libtorrent"
#python-libtorrent-bin it's not updated at pypi (old 3.9)
#,'python-libtorrent-bin>=1.2.9' #;platform_system=="Linux" and platform_machine=="x86_64"'
#,"python-apt"#is from 2012 0.7.8, missing DistUtilsExtra, sudo apt install python-apt is 2.2., verify with pip3 install python-apt
],
#extras_require={
# 'bin': ['python-libtorrent-bin>=1.2.9']
# #,'apt': ['python-apt']
#},
description='Torrent client',
long_description=README,
long_description_content_type="text/markdown",
url='https://github.com/colin-i/tora',
author='cb',
author_email='costin.botescu@gmail.com',
license='MIT',
entry_points = {
'console_scripts': [pkname+'='+pkname+'.main:main']
}
)
|
colin-i/tora
|
setup.py
|
setup.py
|
py
| 1,548
|
python
|
en
|
code
| 2
|
github-code
|
6
|
74743639546
|
import logging
logger = logging.getLogger('camelot.view.controls.formview')
from PyQt4 import QtGui
from PyQt4 import QtCore
from PyQt4.QtCore import Qt
import sip
from camelot.view.art import Icon
from camelot.view.model_thread import post
from camelot.view.model_thread import model_function
from camelot.view.controls.view import AbstractView
from camelot.view.controls.statusbar import StatusBar
from camelot.view import register
from camelot.view.action import ActionFactory
class ContextMenuAction(QtGui.QAction):
default_icon = Icon('tango/16x16/categories/applications-system.png')
def __init__(self, parent, title, icon = None):
"""
:param parent: the widget on which the context menu will be placed
:param title: text displayed in the context menu
:param icon: camelot.view.art.Icon object
"""
super(ContextMenuAction, self).__init__(title, parent)
self.icon = icon
if self.icon:
self.setIcon(self.icon.getQIcon())
else:
self.setIcon(self.default_icon.getQIcon())
class FormWidget(QtGui.QWidget):
"""A form widget comes inside a form view or inside an embedded manytoone editor"""
changed_signal = QtCore.pyqtSignal()
def __init__(self, parent, admin):
QtGui.QWidget.__init__(self, parent)
self._admin = admin
widget_mapper = QtGui.QDataWidgetMapper(self)
widget_mapper.setObjectName('widget_mapper')
widget_layout = QtGui.QHBoxLayout()
widget_layout.setSpacing(0)
widget_layout.setMargin(0)
self._index = 0
self._model = None
self._form = None
self._columns = None
self._delegate = None
self.setLayout(widget_layout)
def get_model(self):
return self._model
def set_model(self, model):
self._model = model
self._model.dataChanged.connect( self._data_changed )
self._model.layoutChanged.connect( self._layout_changed )
self._model.item_delegate_changed_signal.connect( self._item_delegate_changed )
self._model.setObjectName( 'model' )
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.setModel( model )
register.register( model, widget_mapper )
def get_columns_and_form():
return (self._model.getColumns(), self._admin.get_form_display())
post(get_columns_and_form, self._set_columns_and_form)
def clear_mapping(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.clearMapping()
def _data_changed(self, index_from, index_to):
#@TODO: only revert if this form is in the changed range
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.revert()
if not sip.isdeleted(self):
self.changed_signal.emit()
def _layout_changed(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.revert()
self.changed_signal.emit()
@QtCore.pyqtSlot()
def _item_delegate_changed(self):
from camelot.view.controls.delegates.delegatemanager import \
DelegateManager
self._delegate = self._model.getItemDelegate()
self._delegate.setObjectName('delegate')
assert self._delegate != None
assert isinstance(self._delegate, DelegateManager)
self._create_widgets()
def set_index(self, index):
self._index = index
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.setCurrentIndex(self._index)
def get_index(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
return widget_mapper.currentIndex()
def submit(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.submit()
def to_first(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.toFirst()
self.changed_signal.emit()
def to_last(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.toLast()
self.changed_signal.emit()
def to_next(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.toNext()
self.changed_signal.emit()
def to_previous(self):
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if widget_mapper:
widget_mapper.toPrevious()
self.changed_signal.emit()
def export_ooxml(self):
from camelot.view.export.word import open_stream_in_word
def create_ooxml_export(row):
# print self._columns
def ooxml_export():
# TODO insert delegates
fields = self._admin.get_all_fields_and_attributes()
delegates = {}
for field_name, attributes in fields.items():
delegates[field_name] = attributes['delegate'](**attributes)
obj = self._model._get_object(row)
document = self._form.render_ooxml(obj, delegates)
open_stream_in_word( document )
return ooxml_export
post(create_ooxml_export(self.get_index()))
@QtCore.pyqtSlot(tuple)
def _set_columns_and_form(self, columns_and_form ):
self._columns, self._form = columns_and_form
self._create_widgets()
def _create_widgets(self):
"""Create value and label widgets"""
from camelot.view.controls.field_label import FieldLabel
from camelot.view.controls.editors.wideeditor import WideEditor
#
# Dirty trick to make form views work during unit tests, since unit
# tests have no event loop running, so the delegate will never be set,
# so we get it and are sure it will be there if we are running without
# threads
#
if not self._delegate:
self._delegate = self._model.getItemDelegate()
#
# end of dirty trick
#
# only if all information is available, we can start building the form
if not (self._form and self._columns and self._delegate):
return
widgets = {}
widget_mapper = self.findChild(QtGui.QDataWidgetMapper, 'widget_mapper' )
if not widget_mapper:
return
widget_mapper.setItemDelegate(self._delegate)
option = QtGui.QStyleOptionViewItem()
# set version to 5 to indicate the widget will appear on a
# a form view and not on a table view
option.version = 5
#
# this loop can take a while to complete, so processEvents is called
# regulary
#
for i, (field_name, field_attributes ) in enumerate( self._columns):
# if i%10==0:
# QtCore.QCoreApplication.processEvents(
# QtCore.QEventLoop.ExcludeSocketNotifiers,
# 100
# )
model_index = self._model.index(self._index, i)
hide_title = False
if 'hide_title' in field_attributes:
hide_title = field_attributes['hide_title']
widget_label = None
widget_editor = self._delegate.createEditor(
self,
option,
model_index
)
widget_editor.setObjectName('%s_editor'%field_name)
if not hide_title:
widget_label = FieldLabel(
field_name,
field_attributes['name'],
field_attributes,
self._admin
)
widget_label.setObjectName('%s_label'%field_name)
if not isinstance(widget_editor, WideEditor):
widget_label.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
# required fields font is bold
if ('nullable' in field_attributes) and \
(not field_attributes['nullable']):
font = QtGui.QApplication.font()
font.setBold(True)
widget_label.setFont(font)
assert widget_editor != None
assert isinstance(widget_editor, QtGui.QWidget)
widget_mapper.addMapping(widget_editor, i)
widgets[field_name] = (widget_label, widget_editor)
widget_mapper.setCurrentIndex(self._index)
self.layout().insertWidget(0, self._form.render(widgets, self))
#self._widget_layout.setContentsMargins(7, 7, 7, 7)
class FormView(AbstractView):
"""A FormView is the combination of a FormWidget, possible actions and menu
items
.. form_widget: The class to be used as a the form widget inside the form
view"""
form_widget = FormWidget
def __init__(self, title, admin, model, index):
AbstractView.__init__(self)
layout = QtGui.QVBoxLayout()
form_and_actions_layout = QtGui.QHBoxLayout()
form_and_actions_layout.setObjectName('form_and_actions_layout')
layout.addLayout(form_and_actions_layout)
self.model = model
self.admin = admin
self.title_prefix = title
form = FormWidget(self, admin)
form.setObjectName( 'form' )
form.changed_signal.connect( self.update_title )
form.set_model(model)
form.set_index(index)
form_and_actions_layout.addWidget(form)
statusbar = StatusBar(self)
statusbar.setObjectName('statusbar')
statusbar.setSizeGripEnabled(False)
layout.addWidget(statusbar)
layout.setAlignment(statusbar, Qt.AlignBottom)
self.setLayout(layout)
self.change_title(title)
if hasattr(admin, 'form_size') and admin.form_size:
self.setMinimumSize(admin.form_size[0], admin.form_size[1])
self.validator = admin.create_validator(model)
self.validate_before_close = True
def get_actions():
return admin.get_form_actions(None)
post(get_actions, self.setActions)
self.update_title()
#
# Define actions
#
self.setContextMenuPolicy(Qt.ActionsContextMenu)
self.addAction( ActionFactory.view_first(self, self.viewFirst) )
self.addAction( ActionFactory.view_last(self, self.viewLast) )
self.addAction( ActionFactory.view_next(self, self.viewNext) )
self.addAction( ActionFactory.view_previous(self, self.viewPrevious) )
self.addAction( ActionFactory.refresh(self, self.refresh_session) )
self.addAction( ActionFactory.export_ooxml(self, form.export_ooxml) )
@QtCore.pyqtSlot()
def refresh_session(self):
from elixir import session
from camelot.core.orm import refresh_session
refresh_session( session )
@QtCore.pyqtSlot()
def refresh(self):
"""Refresh the data in the current view"""
self.model.refresh()
def update_title(self):
def get_title():
obj = self.getEntity()
return u'%s %s' % (
self.title_prefix,
self.admin.get_verbose_identifier(obj)
)
post(get_title, self.change_title)
def getEntity(self):
form = self.findChild(QtGui.QWidget, 'form' )
if form:
return self.model._get_object(form.get_index())
@QtCore.pyqtSlot(list)
def setActions(self, actions):
form = self.findChild(QtGui.QWidget, 'form' )
layout = self.findChild(QtGui.QLayout, 'form_and_actions_layout' )
if actions and form and layout:
side_panel_layout = QtGui.QVBoxLayout()
from camelot.view.controls.actionsbox import ActionsBox
logger.debug('setting Actions for formview')
actions_widget = ActionsBox(self, self.getEntity)
actions_widget.setObjectName('actions')
action_widgets = actions_widget.setActions(actions)
for action_widget in action_widgets:
form.changed_signal.connect( action_widget.changed )
action_widget.changed()
side_panel_layout.insertWidget(1, actions_widget)
side_panel_layout.addStretch()
layout.addLayout(side_panel_layout)
def viewFirst(self):
"""select model's first row"""
form = self.findChild(QtGui.QWidget, 'form' )
if form:
form.submit()
form.to_first()
def viewLast(self):
"""select model's last row"""
# submit should not happen a second time, since then we don't want
# the widgets data to be written to the model
form = self.findChild(QtGui.QWidget, 'form' )
if form:
form.submit()
form.to_last()
def viewNext(self):
"""select model's next row"""
# submit should not happen a second time, since then we don't want
# the widgets data to be written to the model
form = self.findChild(QtGui.QWidget, 'form' )
if form:
form.submit()
form.to_next()
def viewPrevious(self):
"""select model's previous row"""
# submit should not happen a second time, since then we don't want
# the widgets data to be written to the model
form = self.findChild(QtGui.QWidget, 'form' )
if form:
form.submit()
form.to_previous()
@QtCore.pyqtSlot(bool)
def showMessage(self, valid):
form = self.findChild(QtGui.QWidget, 'form' )
if not valid and form:
reply = self.validator.validityDialog(
form.get_index(), self
).exec_()
if reply == QtGui.QMessageBox.Discard:
# clear mapping to prevent data being written again to the model,
# then we reverted the row
form.clear_mapping()
self.model.revertRow(form.get_index())
self.validate_before_close = False
self.close()
else:
self.validate_before_close = False
self.close()
def validateClose(self):
logger.debug('validate before close : %s' % self.validate_before_close)
form = self.findChild(QtGui.QWidget, 'form' )
if self.validate_before_close and form:
# submit should not happen a second time, since then we don't
# want the widgets data to be written to the model
form.submit()
def validate():
return self.validator.isValid(form.get_index())
post(validate, self.showMessage)
return False
return True
def closeEvent(self, event):
#print 'close event'
logger.debug('formview closed')
if self.validateClose():
event.accept()
else:
event.ignore()
@model_function
def toHtml(self):
"""generates html of the form"""
from jinja2 import Environment
def to_html(d = u''):
"""Jinja 1 filter to convert field values to their default html
representation
"""
def wrapped_in_table(env, context, value):
if isinstance(value, list):
return u'<table><tr><td>' + \
u'</td></tr><tr><td>'.join(
[unicode(e) for e in value]
) + u'</td></tr></table>'
return unicode(value)
return wrapped_in_table
entity = self.getEntity()
fields = self.admin.get_fields()
table = [dict( field_attributes = field_attributes,
value = getattr(entity, name ))
for name, field_attributes in fields]
context = {
'title': self.admin.get_verbose_name(),
'table': table,
}
from camelot.view.templates import loader
env = Environment(loader = loader)
env.filters['to_html'] = to_html
tp = env.get_template('form_view.html')
return tp.render(context)
|
kurtraschke/camelot
|
camelot/view/controls/formview.py
|
formview.py
|
py
| 16,671
|
python
|
en
|
code
| 4
|
github-code
|
6
|
2128326759
|
from cumulusci.tasks.apex.anon import AnonymousApexTask
from cumulusci.core.exceptions import TaskOptionsError
import time
class SetBDIMappingMode(AnonymousApexTask):
"""Change the mapping mode for NPSP BGE/BDI."""
task_docs = """
Use the 'mode' argument to specify either 'Help Text' or 'Data Import Field Mapping'
"""
help_text_apex = """
BDI_MigrationMappingUtility.updateCustomSettings(
BDI_MigrationMappingUtility.HELP_TEXT,
String.valueOf(Metadata.DeployStatus.Succeeded));
"""
data_import_field_mapping_apex = """
BDI_MigrationMappingUtility migrationMappingUtility =
new BDI_MigrationMappingUtility(
new BDI_MigrationMappingHelper());
migrationMappingUtility.migrateHelpTextToCustomMetadata();
Id deploymentId = CMT_MetadataAPI.deployMetadata(
migrationMappingUtility.queuedMetadataTypesForDeploy,
new BDI_MigrationMappingUtility.DeploymentCallback());
"""
task_options = {
"mode": {
"description": "'Help Text' or 'Data Import Field Mapping'",
"required": True,
},
}
def get_org_namespace_prefix(self):
managed = self.options.get("managed") or False
namespaced = self.options.get("namespaced") or False
if managed or namespaced:
return "npsp__"
else:
return ""
def _validate_options(self):
if self.options.get("mode") == "Help Text":
self.options["apex"] = self.help_text_apex
elif self.options.get("mode") == "Data Import Field Mapping":
self.options["apex"] = self.data_import_field_mapping_apex
else:
raise TaskOptionsError(
"You must specify mode as either 'Help Text' or 'Data Import Field Mapping'"
)
super()._validate_options()
def _run_task(self):
super()._run_task()
self.logger.info("Deploying BDI mode {mode}".format(mode=self.options.get("mode")))
for i in range(0, 600):
if self._get_di_mode() == self.options.get("mode"):
return
self.logger.info("Waiting for BDI metadata to deploy.")
time.sleep(3)
raise AssertionError("Data Import mode never updated!")
def _get_di_mode(self):
soql = "SELECT {token}Field_Mapping_Method__c FROM {token}Data_Import_Settings__c"
soql = soql.format(token=self.get_org_namespace_prefix())
res = self.sf.query_all(soql)
if res["records"]:
return res["records"][0][
"{token}Field_Mapping_Method__c".format(
token=self.get_org_namespace_prefix()
)
]
|
SalesforceFoundation/NPSP
|
tasks/set_BDI_mapping_mode.py
|
set_BDI_mapping_mode.py
|
py
| 2,760
|
python
|
en
|
code
| 609
|
github-code
|
6
|
36262108045
|
from collections import deque
def solution(stats):
queue = deque(stats)
answer = []
while queue:
x = queue.popleft()
length = len(answer)
if length < 1:
answer.append([x])
else:
max_index = -1
for i in range(length):
if answer[i][-1] < x:
max_index = i
break
if max_index == -1:
answer.append([x])
else:
answer[max_index].append(x)
return len(answer)
if __name__ == "__main__":
stats = [6, 2, 3, 4, 1, 5]
print(solution(stats))
|
hon99oo/PythonAlgorithmStudy
|
코테/스테이지파이브/solution2/solution.py
|
solution.py
|
py
| 636
|
python
|
en
|
code
| 0
|
github-code
|
6
|
33051867473
|
import numpy as np
import cv2
import pickle
import glob
import matplotlib.pyplot as plt
import os
import Lane_find_functions as Lff
import function_parameters as FP
import time
# video_name = 'test_video_4lanes_1.13.mp4'
# image_folder = './Test_images/dashcam_driving/'
# video_name = 'challenge_video_4lanes_1.8.mp4'
# image_folder = './Test_images/challnege_video/'
# video_name = 'harder_challenge_video_4lanes_1.8_fullscreen.mp4'
# image_folder = './Test_images/harder_challenge_video/'
#
# video_name = 'project_video_4lanes_1.11_confidence.mp4'
# image_folder = './Test_images/project_video/'
count=0
filename = 'frame_count'
filename2 = 'calculated_binary_combinations'
def main():
global count
video_name = FP.video_name
image_folder = FP.dashcam_image_path
frame = cv2.imread(image_folder+"frame1.jpg")
height, width, layers = frame.shape
# fullscreen=False
if FP.fullscreen is False:
height,width=960,1280
else:
height,width=720,1280
print(frame.shape)
#video = cv2.VideoWriter(video_name, -1, 1, (width,height))
video = cv2.VideoWriter(video_name, cv2.VideoWriter_fourcc(*'XVID'), 30, (width,height))
success=1
# count = 215
count = 0
while success:
start = time.time()
outfile = open(filename,'wb')
pickle.dump(count,outfile)
outfile.close()
org_image = cv2.imread(image_folder+"frame%d.jpg" % count)
image=org_image
# image = cv2.resize(org_image,(360,640))
# image = cv2.resize(org_image,(180,320))
# save frame as JPEG file
#if image is None or count > 300: # if image was not read successfully
if image is None: # if image was not read successfully
print ("error: image not read from file \n\n") # print error message to std out
success = 0 # pause so user can see error message
#success,image = vidcap.read()
#imgOriginal=oszv.pipeline(image)
infile = open(filename2,'rb')
new_count = pickle.load(infile)
infile.close()
print('xdxdxdxdxdxd '+str(new_count))
FP.binary_combinations=new_count
processed_image =Lff.process_image_4lanes(image, FP.fullscreen)
cv2.putText(processed_image, 'frame ' + str(count), (40,80), cv2.FONT_HERSHEY_DUPLEX, 1, (255,0,0), 1, cv2.LINE_AA)
#processed_image = cv2.resize(processed_image,width,height)
video.write(processed_image)
count += 1
end = time.time()
# print('frames_to_video_dynamic time= '+str(end - start)+'sec')
print('______________________________________')
print('| wrote a new frame: ', count,' |',str(end - start)+'sec')
print('______________________________________')
f = open("fps_test_log.txt", "a")
write_line=str(FP.video_tip)+' '+'frame:'+str(count)+' '+str(end - start)+' sec'+'\n'
f.write(write_line)
cv2.destroyAllWindows()
video.release()
f.close()
return
###################################################################################################
if __name__ == "__main__":
main()
|
Domagoj-Spoljar/-Python-Algoritam-Prepozunavanje-vozne-trake
|
frames_to_video_dynamic.py
|
frames_to_video_dynamic.py
|
py
| 3,245
|
python
|
en
|
code
| 0
|
github-code
|
6
|
18817086763
|
from gameObject import GameObject
import pygame, time
FALL_VELOCITY = (0, 3.9)
NULL_VELOCITY = (0, 0)
FLAPPING_VELOCITY = (0, -4.5)
FLAPPING_MAX_TIME = 0.15
BIRD_RADIUS = 15
WINDOW_WIDTH = 480
COLOR_RED = (255, 0, 0)
class Bird(GameObject):
def __init__(self, x, y, color, brain):
GameObject.__init__(self, x, y, BIRD_RADIUS, BIRD_RADIUS, FALL_VELOCITY)
self.color = color
self.isFlapping = False
self.flappingTime = 0
self.brain = brain
self.isAlive = True
self.score = 0
self.drawable = True
def draw(self, surface):
if self.drawable:
pygame.draw.circle(surface, self.color, (self.bounds.x, self.bounds.y), self.bounds.height, self.bounds.width)
def update(self, inputs):
inputs[1] -= BIRD_RADIUS
inputs[2] += BIRD_RADIUS
if not self.isAlive:
self.color = COLOR_RED
if self.bounds.y < WINDOW_WIDTH - 10:
self.speed = FALL_VELOCITY
else:
self.drawable = False
self.speed = NULL_VELOCITY
else:
if self.isFlapping and time.time() - self.flappingTime >= FLAPPING_MAX_TIME:
self.speed = FALL_VELOCITY
self.isFlapping = False
else:
prediction = self.brain.feed_forward(inputs)
if len(prediction) == 1 or (len(prediction) > 1 and prediction[0] < prediction[1]):
self.flap()
if inputs[1] < 0 < inputs[2]:
self.brain.increment_fitness(1)
if 0 >= inputs[0] > -2 and inputs[1] < 0 < inputs[2]:
self.brain.increment_fitness(100)
self.score += 1
self.move(*self.speed)
def flap(self):
if not self.isFlapping:
self.flappingTime = time.time()
self.speed = FLAPPING_VELOCITY
self.isFlapping = True
|
JSMarrocco/JSMarrocco_PersonalRepository
|
NEAT_fluppy_bird/bird.py
|
bird.py
|
py
| 1,951
|
python
|
en
|
code
| 0
|
github-code
|
6
|
12608050279
|
# Train model
import sys
import time
import numpy as np
import torch.optim as optim
import pickle
import os
import torch.utils.data
import model as m
import argparse
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--doc_len", type=int, default=300)
parser.add_argument("--lr", type=float, default=0.001)
parser.add_argument("--batchSize", type=int, default=32)
parser.add_argument("--num_workers", type=int, default=4)
parser.add_argument("--flg_cuda", action='store_true')
parser.add_argument("--optType", default='Adam') # Vocabulary size
parser.add_argument("--logInterval", type=int, default=1) # Print test accuracy every n epochs
parser.add_argument("--flgSave", action='store_true')
parser.add_argument("--savePath", default='./')
parser.add_argument("--randSeed", type=int, default=42)
parser.add_argument("--inputPath", default="../aclImdb/df07f20K_stopEng_W_1gram/")
parser.add_argument("--modelPath")
args = parser.parse_args()
torch.manual_seed(args.randSeed) # For reproducible results
if not os.path.isdir(args.savePath):
os.mkdir(args.savePath)
print('General parameters: ', args)
print("Loading Data")
# if args.modelName in ['Enc_SumLSTM', 'Enc_CNN_LSTM']:
#trainset = m.MovieDataset(args.inputPath, 'train.json', transform=m.padToTensor(args.doc_len))
testset = m.MovieDataset(args.inputPath, 'test.json', transform=m.padToTensor(args.doc_len))
print('To Loader')
if args.flg_cuda:
#train_loader = torch.utils.data.DataLoader(trainset, batch_size=args.batchSize, shuffle=True, pin_memory=True)
test_loader = torch.utils.data.DataLoader(testset, batch_size=args.batchSize, shuffle=False, pin_memory=True)
else:
#train_loader = torch.utils.data.DataLoader(trainset, batch_size=args.batchSize, shuffle=True, pin_memory=False)
test_loader = torch.utils.data.DataLoader(testset, batch_size=args.batchSize, shuffle=False, pin_memory=False)
print("Loading model")
if args.flg_cuda:
model = torch.load(args.modelPath + '_model.pt')
model = model.cuda()
else:
model = torch.load(args.modelPath + '_model.pt', map_location=lambda storage, loc: storage)
print(model)
if args.optType == 'Adam':
opt = optim.Adam(model.params, lr=args.lr)
elif args.optType == 'SGD':
opt = optim.SGD(model.params, lr=args.lr)
print("Beginning Training")
train_paras = {'log_interval': [args.logInterval, 1000], 'flg_cuda': args.flg_cuda,
'flgSave': args.flgSave, 'savePath': args.savePath}
m = m.trainModel(train_paras, None, test_loader, model, opt)
m._test(0)
|
jingsliu/NLP_HW
|
HW1/code/eval.py
|
eval.py
|
py
| 2,761
|
python
|
en
|
code
| 0
|
github-code
|
6
|
21764027492
|
# Approach 1: Bit Manipulation
# Time: O(n), n = no. of bits of the number
# Space: O(1)
class Solution:
def minFlips(self, a: int, b: int, c: int) -> int:
answer = 0
while a or b or c:
if c & 1:
answer += 0 if ((a & 1) or (b & 1)) else 1
else:
answer += (a & 1) + (b & 1)
a >>= 1
b >>= 1
c >>= 1
return answer
|
jimit105/leetcode-submissions
|
problems/minimum_flips_to_make_a_or_b_equal_to_c/solution.py
|
solution.py
|
py
| 434
|
python
|
en
|
code
| 0
|
github-code
|
6
|
69809361149
|
from turtle import Turtle
class ScoreBoard(Turtle):
def __init__(self):
super().__init__()
self.color("white")
self.penup()
self.hideturtle()
self.left_score = 0
self.right_score = 0
self.update_scoreboard()
def update_scoreboard(self):
self.clear() # important because the scores would be overwritten by the previous score
self.goto(-100, 200)
self.write(self.left_score, align="center", font=("Courier", 80, "normal"))
self.goto(100, 200)
self.write(self.right_score, align="center", font=("Courier", 80, "normal"))
def update_left_score(self):
self.left_score += 1
self.update_scoreboard()
def update_right_score(self):
self.right_score += 1
self.update_scoreboard()
def gameover(self):
self.goto(0,0)
self.write("GAME OVER", align="center", font=("Courier", 80, "normal"))
|
algebra2boy/pythonTheBest
|
Intermediate/Pong/scoreboard.py
|
scoreboard.py
|
py
| 948
|
python
|
en
|
code
| 1
|
github-code
|
6
|
6661767688
|
import random
import cv2
import numpy as np
import sys
sys.path.insert(1, 'build/lib')
from va_rs import augment
original_cube = np.zeros((32, 32, 32), dtype=np.float32)
original_cube[12:20, 12:20, 12:20] = 1.0
original_cube = original_cube[None, ...]
linear_cube = original_cube.copy()
nearest_cube = original_cube.copy()
rotations = (20, 4, 1)
translations = tuple(np.random.rand(3) * 6 - 3) # <-3, +3>
scaling = tuple(np.random.rand(3) * 0.2 + 0.9) # <0.9; 1.1>
raw_cube_multipliers = (random.random() * 0.2 + 0.9,) # (<0.9; 1.1>, 1.0) - don't multiply frangi data
linear_cube = augment(linear_cube, rotations, interpolation='linear')
nearest_cube = augment(nearest_cube, rotations, interpolation='nearest')
for i in range(32):
cv2.imshow('original', original_cube[0, i, ...])
cv2.imshow('linear', linear_cube[0, i, ...])
cv2.imshow('nearest', nearest_cube[0, i, ...])
cv2.waitKey()
|
PUTvision/volume-augmentations
|
examples/augment.py
|
augment.py
|
py
| 913
|
python
|
en
|
code
| 0
|
github-code
|
6
|
36689627100
|
import sys
import subprocess
from PyQt5 import uic, QtCore
from PyQt5.QtWidgets import QApplication, QMainWindow
form_class = uic.loadUiType("./testBtn.ui")[0]
class WindowClass(QMainWindow, form_class):
def __init__(self):
super().__init__()
self.setupUi(self)
self.testBtn.clicked.connect(self.toggle_exe)
self.testBtn2.clicked.connect(self.toggle2_exe)
self.testBtn3.clicked.connect(self.toggle3_exe)
self.proc = None
def toggle_exe(self):
if self.proc is None:
self.start_exe()
else:
self.stop_exe()
def start_exe(self):
if self.proc is None:
self.statusBar().showMessage("음성인식 파일 실행중...")
self.proc = subprocess.Popen(["python", "C:\\main_ui\\voicecommand_final.py"])
self.testBtn.setText("음성인식 종료") # 버튼 텍스트 변경
def stop_exe(self):
if self.proc is not None:
subprocess.run(['taskkill', '/f', '/t', '/pid', str(self.proc.pid)])
self.proc = None
self.testBtn.setText("음성인식 실행") # 버튼 텍스트 변경
self.statusBar().showMessage("음성인식 종료")
def toggle2_exe(self):
if self.proc is None:
self.start2_exe()
else:
self.stop2_exe()
def start2_exe(self):
if self.proc is None:
self.statusBar().showMessage("모션인식 파일 실행중...")
self.proc = subprocess.Popen(["python", "C:\main_ui\motion_final.py"])
self.testBtn2.setText("모션인식 종료") # 버튼 텍스트 변경
def stop2_exe(self):
if self.proc is not None:
subprocess.run(['taskkill', '/f', '/t', '/pid', str(self.proc.pid)])
self.proc = None
self.testBtn2.setText("모션인식 실행") # 버튼 텍스트 변경
self.statusBar().showMessage("모션인식 종료")
def toggle3_exe(self):
if self.proc is None:
self.start3_exe()
else:
self.stop3_exe()
def start3_exe(self):
if self.proc is None:
self.statusBar().showMessage("아이트레킹 파일 실행중...")
self.proc = subprocess.Popen(["python", "C:\main_ui\eyetrac_final.py"])
self.testBtn3.setText("아이트레킹 종료") # 버튼 텍스트 변경
def stop3_exe(self):
if self.proc is not None:
subprocess.run(['taskkill', '/f', '/t', '/pid', str(self.proc.pid)])
self.proc = None
self.testBtn3.setText("아이트레킹 실행") # 버튼 텍스트 변경
self.statusBar().showMessage("아이트레킹 종료")
if __name__ == "__main__":
app = QApplication(sys.argv)
myWindow = WindowClass()
myWindow.show()
sys.exit(app.exec_())
|
quswjdgns399/air_command
|
main_ui.py
|
main_ui.py
|
py
| 2,953
|
python
|
ko
|
code
| 0
|
github-code
|
6
|
14033064402
|
import matplotlib.pyplot as plt
import xgboost as xgb
import os
from constants import *
from time import gmtime, strftime
from src.models.model_learner import ModelLearner
from src.models.csv_handler import save_feature_importance_res
class XgboostTrainObj(ModelLearner):
def __init__(self,org_name):
self.m_name = 'Xgboost'
self.model_name = 'Xgboost'
ModelLearner.__init__(self,org_name)
def train_model(self,models_folder_name,model_name,datasets):
super().prep_model_training(datasets)
print("---Start training {0} on {1}---\n".format(self.model_name,self.org_name))
self.model = xgb.XGBClassifier(kwargs=XGBS_PARAMS).fit(self.x, self.y, eval_metric=["error", "logloss"], eval_set=[(self.xval, self.yval)])
print("---Learning Curves---\n")
# self.plot_learning_curves()
model_name = os.path.join(MODELS_OBJECTS_PATH, models_folder_name, f"{model_name}.dat")
self.model.save_model(model_name)
print("---{0} model saved---\n".format(self.model_name))
def plot_learning_curves(self):
results = self.model.evals_result()
epochs = len(results['validation_0']['error'])
x_axis = range(0, epochs)
fig, ax = plt.subplots(figsize=(12, 12))
ax.plot(x_axis, results['validation_0']['logloss'], label='Train')
ax.plot(x_axis, results['validation_1']['logloss'], label='Test')
ax.legend()
plt.ylabel('Log Loss')
plt.title('XGBoost Log Loss')
plt.savefig(os.path.join(MODELS_OUTPUT_PATH, 'XGBoost {0} Log Loss.png'.format(self.org_name)))
fig, ax = plt.subplots(figsize=(12, 12))
ax.plot(x_axis, results['validation_0']['error'], label='Train')
ax.plot(x_axis, results['validation_1']['error'], label='Test')
ax.legend()
plt.ylabel('Classification Error')
plt.title('XGBoost Classification Error')
plt.savefig(os.path.join(MODELS_OUTPUT_PATH, 'XGBoost {0} Classification Error.png'.format(self.org_name)))
plt.clf()
def model_explain(self):
print("---Explain model---\n")
# self.feature_importance()
super().model_explain()
def feature_importance(self):
print("feature_importances\n")
importance = self.model.feature_importances_
f_important = sorted(list(zip(self.feature_names, importance)), key=lambda x: x[1], reverse=True)
save_feature_importance_res('{0}_{1}'.format(self.model_name,self.org_name),f_important,'reg')
plt.bar([x[0] for x in f_important[:5]], [x[1] for x in f_important[:5]])
plt.xticks(rotation=20)
title = '{0} {1} f_important'.format(self.model_name, self.org_name)
plt.title(title)
plt.savefig(os.path.join(MODELS_FEATURE_IMPORTANCE, '{0}.png'.format(title)))
plt.clf()
|
EyalHadad/miRNA_transfer
|
src/models/training/xgboos_trainer.py
|
xgboos_trainer.py
|
py
| 2,852
|
python
|
en
|
code
| 0
|
github-code
|
6
|
19412698559
|
import json
def create_row_w_validated_params(cls, validated_params, rqst_errors):
found_rows_w_rqst_name = cls.check_for_rows_with_rqst_name(
validated_params['name'],
rqst_errors
)
new_row = None
if not found_rows_w_rqst_name and not rqst_errors:
new_row = cls()
new_row.name = validated_params['name']
new_row.save()
return new_row
def update_row_w_validated_params(cls, validated_params, rqst_errors):
rqst_id = validated_params['id']
found_rows_w_rqst_name = cls.check_for_rows_with_rqst_name(
validated_params['name'],
rqst_errors,
rqst_id
)
rqst_row = None
if not found_rows_w_rqst_name and not rqst_errors:
try:
rqst_row = cls.objects.get(id=rqst_id)
rqst_row.name = validated_params['name']
rqst_row.save()
except cls.DoesNotExist:
rqst_errors.append("Row does not exist for database id: {}".format(rqst_id))
return rqst_row
def delete_row_w_validated_params(cls, validated_params, rqst_errors):
rqst_id = validated_params['id']
try:
row = cls.objects.get(id=rqst_id)
row.delete()
except cls.DoesNotExist:
rqst_errors.append("Row does not exist for database id: {}".format(rqst_id))
def check_for_rows_with_rqst_name(cls, rqst_name, rqst_errors, current_id=None):
found_row_w_name = False
rows_w_rqst_name = cls.objects.filter(name__iexact=rqst_name)
if rows_w_rqst_name:
found_row_w_name = True
rows_w_rqst_name_ids = []
len_of_rows_w_rqst_name = len(rows_w_rqst_name)
for row in rows_w_rqst_name:
rows_w_rqst_name_ids.append(row.id)
if len_of_rows_w_rqst_name > 1:
rqst_errors.append(
"Multiple rows with name: {} already exist in db. (Hint - Delete one and modify the remaining) id's: {}".format(
rqst_name, json.dumps(rows_w_rqst_name_ids)))
else:
if not current_id or current_id not in rows_w_rqst_name_ids:
rqst_errors.append(
"Row with name: {} already exists in db. (Hint - Modify that entry) id: {}".format(
rqst_name, rows_w_rqst_name_ids[0]))
else:
found_row_w_name = False
return found_row_w_name
|
bbcawodu/careadvisors-backend
|
picmodels/models/care_advisors/healthcare_service_expertise_models/services/create_update_delete.py
|
create_update_delete.py
|
py
| 2,368
|
python
|
en
|
code
| 0
|
github-code
|
6
|
42432440743
|
# MenuTitle: SVG Pen
from fontTools.pens.basePen import BasePen
# (C) 2016 by Jens Kutilek
# https://raw.githubusercontent.com/jenskutilek/TypoLabs2016/master/penCollection/svgPen.py
# See also:
# http://www.w3.org/TR/SVG/paths.html#PathDataBNF
# https://developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Paths
# SVG path parsing code from:
# http://codereview.stackexchange.com/questions/28502/svg-path-parsing
def parse_svg_path(path_data):
digit_exp = "0123456789eE"
comma_wsp = ", \t\n\r\f\v"
drawto_command = "MmZzLlHhVvCcSsQqTtAa"
sign = "+-"
exponent = "eE"
float = False
entity = ""
for char in path_data:
if char in digit_exp:
entity += char
elif char in comma_wsp and entity:
yield entity
float = False
entity = ""
elif char in drawto_command:
if entity:
yield entity
float = False
entity = ""
yield char
elif char == ".":
if float:
yield entity
entity = "."
else:
entity += "."
float = True
elif char in sign:
if entity and entity[-1] not in exponent:
yield entity
float = False
entity = char
else:
entity += char
if entity:
yield entity
def drawSVGPath(pen, path=""):
"""
Draw an SVG path that is supplied as a string. This is limited to SVG paths
that contain only elements that can be matched to the usual path elements
found in a glyph.
"""
path_data = list(parse_svg_path(path))
# print(path_data)
i = 0
prev_x: int | float = 0
prev_y: int | float = 0
while i < len(path_data):
# print(i, path_data[i])
v = path_data[i]
if v in "Cc":
# Cubic curve segment
x1, y1, x2, y2, x3, y3 = path_data[i + 1 : i + 7]
# print(" ", x1, y1, x2, y2, x3, y3)
x1 = float(x1)
y1 = float(y1)
x2 = float(x2)
y2 = float(y2)
x3 = float(x3)
y3 = float(y3)
if v == "c":
x1 += prev_x
y1 += prev_y
x2 += prev_x
y2 += prev_y
x3 += prev_x
y3 += prev_y
pen.curveTo(
(x1, y1),
(x2, y2),
(x3, y3),
)
prev_x = x3
prev_y = y3
i += 7
elif v in "Hh":
# Horizontal line segment
x = path_data[i + 1]
# print(" ", x)
x = float(x)
if v == "h":
x += prev_x
pen.lineTo((x, prev_y))
prev_x = x
i += 2
elif v in "LlMm":
# Move or Line segment
x, y = path_data[i + 1 : i + 3]
# print(" ", x, y)
x = float(x)
y = float(y)
if v in "lm":
x += prev_x
y += prev_y
if v in "Ll":
pen.lineTo((x, y))
else:
pen.moveTo((x, y))
prev_x = x
prev_y = y
i += 3
elif v in "Qq":
# Quadratic curve segment
x1, y1, x2, y2 = path_data[i + 1 : i + 5]
# print(" ", x1, y1, x2, y2)
x1 = float(x1)
y1 = float(y1)
x2 = float(x2)
y2 = float(y2)
if v == "q":
x1 += prev_x
y1 += prev_y
x2 += prev_x
y2 += prev_y
pen.qCurveTo(
(x1, y1),
(x2, y2),
)
prev_x = x2
prev_y = y2
i += 5
elif v in "Vv":
# Vertical line segment
y = path_data[i + 1]
# print(y)
y = float(y)
if v == "v":
y += prev_y
pen.lineTo((prev_x, y))
prev_y = y
i += 2
elif v in "Zz":
pen.closePath()
i += 1
else:
print(
"SVG path element '%s' is not supported for glyph paths."
% path_data[i]
)
break
class SVGpen(BasePen):
def __init__(
self,
glyphSet,
round_coordinates=False,
force_relative_coordinates=False,
optimize_output=False,
):
"""
A pen that converts a glyph outline to an SVG path. After drawing,
SVGPen.d contains the path as string. This corresponds to the SVG path
element attribute "d".
:param glyphSet: The font object
:type glyphSet: :py:class:`fontParts.RFont`
:param round_coordinates: Round all coordinates to integer. Default is
False.
:type round_coordinates: bool
:param force_relative_coordinates: Store all coordinates as relative.
Default is False, i.e. choose whichever notation (absolute or
relative) produces shorter output for each individual segment.
:type force_relative_coordinates: bool
:param optimize_output: Make the output path string as short as
possible. Default is True. Setting this to False also overrides the
relative_coordinates option.
:type optimize_output: bool
"""
self._rnd = round_coordinates
self._rel = force_relative_coordinates
self._opt = optimize_output
BasePen.__init__(self, glyphSet)
self.reset()
def reset(self):
self.prev_x: int | float = 0 # previous point
self.prev_y: int | float = 0
self._set_first_point((0, 0))
self._set_previous_point((0, 0))
self._set_previous_cubic_control(None)
self._set_previous_quadratic_control(None)
self._prev_cmd = None
self.relative = False
self.d = ""
def _append_shorter(self, absolute, relative):
# Check if relative output is smaller
if not self._rel and len(absolute) <= len(relative) or not self._opt:
cmd_str = absolute
self.relative = False
else:
cmd_str = relative
self.relative = True
if cmd_str[0] == self._prev_cmd:
rest = cmd_str[1:]
if rest.startswith("-"):
self.d += rest
else:
self.d += " " + rest
else:
self.d += cmd_str
def _get_shorter_sign(self, value):
if value < 0 and self._opt:
return "%g" % value
else:
return " %g" % value
def _round_pt(self, pt):
# Round the point based on the current rounding settings
if self._rnd:
x, y = pt
return (int(round(x)), int(round(y)))
return pt
def _set_first_point(self, pt):
self.first_x, self.first_y = pt
def _set_previous_point(self, pt):
self.prev_x, self.prev_y = pt
def _set_previous_cubic_control(self, pt):
if pt is None:
self.prev_cx = None
self.prev_cy = None
else:
self._set_previous_quadratic_control(None)
self.prev_cx, self.prev_cy = pt
def _set_previous_quadratic_control(self, pt):
if pt is None:
self.prev_qx = None
self.prev_qy = None
else:
self._set_previous_cubic_control(None)
self.prev_qx, self.prev_qy = pt
def _reset_previous_controls(self):
self._set_previous_cubic_control(None)
self._set_previous_quadratic_control(None)
def _moveTo(self, pt):
x, y = self._round_pt(pt)
cmd = "Mm"
a = "M%g" % x
a += self._get_shorter_sign(y)
r = "m%g" % (x - self.prev_x)
r += self._get_shorter_sign(y - self.prev_y)
self._append_shorter(a, r)
self._set_first_point((x, y))
self._set_previous_point((x, y))
self._reset_previous_controls()
self._prev_cmd = cmd[self.relative]
def _lineTo(self, pt):
x, y = self._round_pt(pt)
if y == self.prev_y:
cmd = "Hh"
a = "H%g" % x
r = "h%g" % (x - self.prev_x)
elif x == self.prev_x:
cmd = "Vv"
a = "V%g" % y
r = "v%g" % (y - self.prev_y)
else:
cmd = "Ll"
a = "L%g" % x
a += self._get_shorter_sign(y)
r = "l%g" % (x - self.prev_x)
r += self._get_shorter_sign(y - self.prev_y)
self._append_shorter(a, r)
self._set_previous_point((x, y))
self._reset_previous_controls()
self._prev_cmd = cmd[self.relative]
def _curveToOne(self, p1, p2, pt):
x1, y1 = self._round_pt(p1)
x2, y2 = self._round_pt(p2)
x3, y3 = self._round_pt(pt)
if self.prev_cx is None:
self._set_previous_cubic_control((self.prev_x, self.prev_x))
if (
self.prev_y - y1 + self.prev_y == self.prev_cy
and self.prev_x - x1 + self.prev_x == self.prev_cx
):
# Control point p1 is mirrored, use S command and omit p1
cmd = "Ss"
a = "S%g" % x2
for coord in [y2, x3, y3]:
a += self._get_shorter_sign(coord)
r = "s%g" % (x2 - self.prev_x)
for coord in [
y2 - self.prev_y,
x3 - self.prev_x,
y3 - self.prev_y,
]:
r += self._get_shorter_sign(coord)
else:
cmd = "Cc"
a = "C%g" % x1
for coord in [y1, x2, y2, x3, y3]:
a += self._get_shorter_sign(coord)
r = "c%g" % (x1 - self.prev_x)
for coord in [
y1 - self.prev_y,
x2 - self.prev_x,
y2 - self.prev_y,
x3 - self.prev_x,
y3 - self.prev_y,
]:
r += self._get_shorter_sign(coord)
self._append_shorter(a, r)
self._set_previous_point((x3, y3))
self._set_previous_cubic_control((x2, y2))
self._prev_cmd = cmd[self.relative]
def _qCurveToOne(self, p1, p2):
x1, y1 = self._round_pt(p1)
x2, y2 = self._round_pt(p2)
if self.prev_qx is None:
self._set_previous_quadratic_control((self.prev_x, self.prev_x))
if (
self.prev_y - y1 + self.prev_y == self.prev_qy
and self.prev_x - x1 + self.prev_x == self.prev_qx
):
# Control point p1 is mirrored, use T command and omit p1
cmd = "Tt"
a = "T%g" % x2
a += self._get_shorter_sign(y2)
r = "t%g" % (x2 - self.prev_x)
r += self._get_shorter_sign(y2 - self.prev_y)
else:
cmd = "Qq"
a = "Q%g" % x1
for coord in [y1, x2, y2]:
a += self._get_shorter_sign(coord)
r = "q%g" % (x1 - self.prev_x)
for coord in [
y1 - self.prev_y,
x2 - self.prev_x,
y2 - self.prev_y,
]:
r += self._get_shorter_sign(coord)
self._append_shorter(a, r)
self._set_previous_point((x2, y2))
self._set_previous_quadratic_control((x1, y1))
self._prev_cmd = cmd[self.relative]
def _closePath(self):
cmd = "z" if self._rel else "Z"
self.d += cmd
self._set_previous_point((self.first_x, self.first_y))
self._reset_previous_controls()
self._prev_cmd = cmd
|
jenskutilek/TypoLabs2016
|
penCollection/svgPen.py
|
svgPen.py
|
py
| 11,820
|
python
|
en
|
code
| 15
|
github-code
|
6
|
72334957309
|
#!/bin/python3
import math
import os
import random
import re
import sys
#
# Complete the 'diagonalDifference' function below.
#
# The function is expected to return an INTEGER.
# The function accepts 2D_INTEGER_ARRAY arr as parameter.
#
def diagonalDifference(arr):
# Write your code here
prim_diag = 0
second_diag = 0
for i in range(n):
prim_diag = prim_diag + arr[i][i]
second_diag = second_diag + arr[i][n-1-i]
return abs(prim_diag - second_diag)
#
#
if __name__ == '__main__':
n = int(input().strip())
arr = []
for _ in range(n):
arr.append(list(map(int, input().rstrip().split())))
result = diagonalDifference(arr)
print(result)
# sum1 = 0
# sum2 = 0
#
# for i in range(len(arr)):
# print("arr[" + str(i) + "]["+ str(i) + "]")
# sum1 = sum1 + arr[i][i]
# sum2 = sum2 + arr[i][n-1]
# n -= 1
#
# print(sum1)
# print(sum2)
# print(arr)
|
ChitraVKumar/My-Algorithms-for-Leetcode
|
diaginal difference.py
|
diaginal difference.py
|
py
| 938
|
python
|
en
|
code
| 0
|
github-code
|
6
|
34632164463
|
import cv2
import numpy as py
#检测拐角
#与边缘检测不同,拐角的检测的过程稍稍有些复杂
# 。但原理相同,所不同的是先用十字形的结构元素膨胀像素,
# 这种情况下只会在边缘处“扩张”,角点不发生变化。
# 接着用菱形的结构元素腐蚀原图像,
# 导致只有在拐角处才会“收缩”,而直线边缘都未发生变化。
image = cv2.imread('img\\building.jpg', 0)
origin = cv2.imread('img\\building')
#构造5 * 5的结构元素,分别为十字形/菱形/方形/X形
cross = cv2.getStructuringElement(cv2.MORPH_CROSS, (5, 5))
#菱形结构元素的定义
diamond = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 5))
diamond[0, 0] = 0
diamond[0, 1] = 0
diamond[1, 0] = 0
diamond[4, 4] = 0
diamond[4, 3] = 0
diamond[3, 4] = 0
diamond[4, 0] = 0
diamond[4, 1] = 0
diamond[3, 0] = 0
diamond[0, 3] = 0
diamond[0, 4] = 0
diamond[1, 4] = 0
square = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 5))
x = cv2.getStructuringElement(cv2.MORPH_CROSS, (5, 5))
#使用cross膨胀图像
result1 = cv2.dilate(image, cross)
#使用菱形腐蚀图像
result1 = cv2.erode(image, diamond)
#使用X膨胀原图像
result2 = cv2.dilate(image, x)
#使用方形腐蚀图像
result2 = cv2.erode(image, square)
#将两幅闭运算的图像相减获得角
result = cv2.absdiff(result2, result1)
#使用阈值获得二值图
retval, result = cv2.threshold(result, 40, 255, cv2.THRESH_BINARY)
#在原图上用半径为5的圆圈将点标出
for j in range(result.size):
y = int(j / result.shape[0])
x = j % result.shape[0]
if result[x, int(y)] == 255:
cv2.circle(image, (int(y), x), 5, (255, 0, 0))
cv2.imshow("result", image)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
liuyuhua-ha/opencvStudy
|
opencvStudy/checkFaceTest.py
|
checkFaceTest.py
|
py
| 1,754
|
python
|
zh
|
code
| 0
|
github-code
|
6
|
34453314320
|
import sys, os
import requests
from bs4 import BeautifulSoup # scraper library
import pandas as pd # tables
from collections import OrderedDict
# Config
base_page_url = 'https://www.teamrankings.com/ncaa-basketball/stat/'
date_range = pd.date_range(pd.datetime(2018, 1, 1), periods=59).tolist()
# dictionary: output_name: url
stat_types = {
'pts_per_game': 'points-per-game',
'pos_per_game': 'possessions-per-game',
'field_goals_attempted': 'field-goals-attempted-per-game',
'field_goals_made': 'field-goals-made-per-game',
'3pt_attempted': 'three-pointers-attempted-per-game',
'3pt_made': 'three-pointers-made-per-game',
'ft_per_100_pos': 'ftm-per-100-possessions',
'off_rebounds': 'offensive-rebounds-per-game',
'ast_per_game': 'assists-per-game',
'to_per_game': 'turnovers-per-game',
'fouls_per_game': 'personal-fouls-per-game',
'opp_pts_per_game': 'opponent-points-per-game',
'opp_pts_from_3pt': 'opponent-points-from-3-pointers',
'opp_pts_from_2pt': 'opponent-points-from-2-pointers',
'def_rebounds': 'defensive-rebounds-per-game',
'blocks_per_game': 'blocks-per-game',
'steals_per_game': 'steals-per-game',
'opp_to_per_game': 'opponent-turnovers-per-game',
'opp_ast_per_game': 'opponent-assists-per-game',
}
def scrape_stats(page_url, output_name):
stats_df = None
stats = {}
for date_i, date in enumerate(date_range):
date = str(date.date())
url = page_url + '?date=' + date
page = requests.get(url) # load page
soup = BeautifulSoup(page.text, 'html5lib') # parse
table = soup.find('table', class_='datatable').find('tbody')
rows = table.find_all('tr')
# Go through rows
for i in range(351):
row = rows[i].find_all('td')
team_name = row[1].get_text()
stat_val = row[2].get_text()
# Add to stats
if team_name not in stats:
stats[team_name] = {}
stats[team_name][date] = stat_val
print(f"{output_name}: Fetching date: {date} [{date_i+1}/{len(date_range)}]", end='\r')
print()
# Convert to pandas dataframe
stats_df_data = [ [ team_name, *v.values() ] for team_name,v in stats.items() ]
stats_df_columns = ['Team Name'] + list(stats[list(stats.keys())[0]].keys())
stats_df = pd.DataFrame(data = stats_df_data, columns = stats_df_columns)
return stats_df
# def main():
# scrape_stats('https://www.teamrankings.com/ncaa-basketball/stat/points-per-game', 'pts_per_game')
def main():
for (output_name, stat_url) in stat_types.items():
# Check if file exists so we don't have to reparse the data
if os.path.isfile(output_name + '.csv'):
print(f"{output_name}: File exists. Skipping...")
continue
page_url = base_page_url + stat_url
print(f"{output_name}: Parsing from `{page_url}`...")
stat = scrape_stats(page_url, output_name)
stat.to_csv(output_name + '.csv')
print(f"{output_name}: Done.")
print()
if __name__ == '__main__':
main()
|
bwu987/March-Madness-Crusher
|
scraper/scraper.py
|
scraper.py
|
py
| 3,218
|
python
|
en
|
code
| 0
|
github-code
|
6
|
8067752722
|
from django.shortcuts import render_to_response
from curriculum.models import TipoProyecto
from django.conf import settings
# Create your views here.
def home(request):
menuInicio = 'selected'
return render_to_response('default/index.html', {'menuInicio': menuInicio,
'settings': settings, })
def estudios(request):
menuEstudios = 'selected'
return render_to_response('default/estudios.html',
{'menuEstudios': menuEstudios,
'settings': settings, })
def proyectos(request):
menuProyectos = 'selected'
tipoProyectos = TipoProyecto.objects.all()
return render_to_response('default/proyectos.html',
{'tipoProyectos': tipoProyectos,
'menuProyectos': menuProyectos,
'settings': settings, })
def contacto(request):
menuContacto = 'selected'
return render_to_response('default/contacto.html',
{'menuContacto': menuContacto,
'settings': settings, })
|
sebasgoldberg/jsg
|
default/views.py
|
views.py
|
py
| 1,106
|
python
|
es
|
code
| 0
|
github-code
|
6
|
28753239359
|
def make_withdraw(balance):
def withdraw(amount):
if amount > balance:
return "You cannot withdraw more than you have in your current balance."
else:
balance -= amount
return balance
return withdraw
init_bal = 1000.00
wd = make_withdraw(init_bal)
wd_amount = 50.00
print("New balance is: ${0:6.2f}".format(wd(wd_amount)))
|
matheuscfernandes/cs107_matheus_fernandes
|
homework/HW2/solutions/P3b.py
|
P3b.py
|
py
| 382
|
python
|
en
|
code
| 0
|
github-code
|
6
|
28102982969
|
from django.urls import path
from .views import *
from django.contrib.auth.views import LogoutView
urlpatterns = [
path('aboutMe/', aboutMe, name="aboutMe"),
path('routePages/',routePages,name="routePages"),
path("routePages/<id>", routePagesId, name="routePagesId"),
path('crearHistoria/',crearHistoria,name="crearHistoria"),
path("eliminarHistoria/", eliminarHistoria, name="eliminarHistoria"),
path("editarHistoria/", editarHistoria, name="editarHistoria"),
# path("listar_curso/", Listar_cursos),
# path("cursos/",cursos, name="cursos"),
# path("profesores/",profesores, name="profesores"),
# path("estudiantes/",estudiantes, name="estudiantes"),
# path("entregables/",entregables, name="entregables"),
# path("cursoFormulario/",cursoFormulario, name="cursoFormulario"),
# path("busquedaComision/", busquedaComison, name="busquedaComision"),
# path("buscar/", buscar, name="buscar"),
# path("eliminarProfesor/<id>", eliminarProfesor, name="eliminarProfesor"),
# path("profesorEditar/<id>", profesorEditar, name="profesorEditar"),
# path("estudiante/list/", EstudianteList.as_view(), name="EstudianteList"),
# path("estudiante/nuevo/", EstudianteCreacion.as_view(), name="EstudianteCrear"),
# path("estudiante/<pk>", EstudianteDetalle.as_view(), name="estudiante_detalle"),
# path("estudiante/borrar/<pk>", EstudianteDelete.as_view(), name="estudiante_borrar"),
# path("estudiante/editar/<pk>", EstudianteUpdate.as_view(), name="estudiante_editar"),
# path('login/',login_request, name='login'),
# path('register/', register, name='register'),
# path('logout/',LogoutView.as_view(), name='logout'),
# #path('logout/',LogoutView.as_view(template_name='logout.html'), name='logout'),
# path('editarPerfil/', editarPerfil, name='editarPerfil'),
# path('agregarAvatar/', agregarAvatar, name='agregarAvatar'),
]
|
ldcomba/ProyectoFinalComba_HistoriaMascotas
|
AppPages/urls.py
|
urls.py
|
py
| 1,924
|
python
|
es
|
code
| 0
|
github-code
|
6
|
4131930332
|
import random
from game import Game, new_game, start_game
from setup import setup_game
def play_again():
answer: str = input("Would you like to play again?\n[Enter 'yes' or 'y' to play again.]\n").lower()
return answer == "yes" or answer == "y"
def get_wins(games: [Game]):
wins = 0
for game in games:
if game.is_won():
wins += 1
return wins
def run_uno():
print("Welcome to Python Uno! \n\n")
games: [Game] = [new_game()]
for game in games:
setup_game(game)
start_game(game)
if play_again():
games.append(new_game())
else:
print("You won {wins} out of {total} games".format(wins=get_wins(games), total=len(games)))
if __name__ == '__main__':
run_uno()
|
GitsAndGlamour/Python-Uno
|
main.py
|
main.py
|
py
| 774
|
python
|
en
|
code
| 0
|
github-code
|
6
|
39350868195
|
# Задача 1
sales = {}
for _ in range(int(input())):
name, item, count = input().split()
sales[name][item] = sales.setdefault(name, {}).setdefault(item, 0) + int(count)
for key in sorted(sales):
print(f'{key}:')
for i in sorted(sales[key].items()):
print(*i)
# Задача2
countries = dict()
country = input()
str_number = 0
while country != "СТОП":
if country not in countries:
countries[country] = [str_number]
else:
countries[country].append(str_number)
str_number += 1
country = input()
for country in countries:
print(f"{country}: {countries[country]}")
# Задача3
a = [int(i) for i in input().split()]
counter = 0
for i in range(1, len(a) - 1):
if a[i - 1] < a[i] > a[i + 1]:
counter += 1
print(counter)
# Задача4
a = [int(s) for s in input().split()]
k = int(input())
for i in range(k + 1, len(a)):
a[i - 1] = a[i]
a.pop()
print(' '.join([str(i) for i in a]))
# Задача5
n = int(input())
a = [[0] * n for i in range(n)]
for i in range(n):
a[i][n - i - 1] = 1
for i in range(n):
for j in range(n - i, n):
a[i][j] = 2
for row in a:
for elem in row:
print(elem, end=' ')
print()
# Задача6
def read_last(lines, file):
if lines > 0:
with open(file, encoding='utf-8') as text:
file_lines = text.readlines()[-lines:]
for line in file_lines:
print(line.strip())
else:
print('Количество строк может быть только целым положительным')
#Задача7
def fib(n):
fib0 = 1
yield fib0
fib1 = 1
yield fib1
for i in range(n - 2):
fib0, fib1 = fib1, fib0 + fib1
yield fib1
# Задача 8
import re
def isCyrillic(text):
return bool(re.search('[а-яА-Я]', text))
points_en = {1:'AEIOULNSTR',
2:'DG',
3:'BCMP',
4:'FHVWY',
5:'K',
8:'JZ',
10:'QZ'}
points_ru = {1:'АВЕИНОРСТ',
2:'ДКЛМПУ',
3:'БГЁЬЯ',
4:'ЙЫ',
5:'ЖЗХЦЧ',
8:'ШЭЮ',
10:'ФЩЪ'}
text = input().upper()
if isCyrillic(text):
print(sum([k for i in text for k, v in points_ru.items() if i in v]))
else:
print(sum([k for i in text for k, v in points_en.items() if i in v]))
#Задача9
def shortener(st):
while '(' in st or ')' in st:
left = st.rfind('(')
right = st.find(')', left)
st = st.replace(st[left:right + 1], '')
return st
# Задача 10
def set_gen(lst):
index = 0
while index < len(lst):
cnt = lst.count(lst[index])
if cnt > 1:
lst[index] = str(lst[index]) * cnt
index += 1
return set(lst)
# Задача 12
y, x = map(int, input().split())
x, y = x - 1, y - 1
board = [[0]*8 for i in range(8)]
board[x][y] = 1
for i in range(x, 7):
board[i+1][0] += board[i][1]
for j in range(1, 7):
board[i+1][j] += board[i][j-1] + board[i][j+1]
board[i+1][7] += board[i][6]
print(sum(board[7]))
# Задача 13
def lcm(a, b):
m = a * b
while a != 0 and b != 0:
if a > b:
a %= b
else:
b %= a
return m // (a + b)
while 1:
try:
x = int(input('a = '))
y = int(input('b = '))
print('НОК:', lcm(x, y))
except ValueError:
break
# Задача 14
def GCD_Loop( a, b):
if a > b:
temp = b
else:
temp = a
for i in range(1, temp + 1):
if(( a % i == 0) and(b % i == 0 )):
gcd = i
return gcd
x = int(input(" Enter the first number: ") )
y =int(input(" Enter the second number: "))
num = GCD_Loop(x, y)
print("GCD of two number is: ")
print(num)
|
kotenak/etoshto
|
dopzadachi.py
|
dopzadachi.py
|
py
| 3,780
|
python
|
en
|
code
| 0
|
github-code
|
6
|
41210621290
|
"""
Tipo de sugerencias
"""
FILE_REDUCE_SIZE = 100
FILE_CHANGE_FORMAT = 101
FILE_ADD_MIPMAPS = 102
FILE_REDUCE_RESOLUTION = 103
FILE_REMOVE_BUMPMAP = 104
FILE_REMOVE_ENVMAP = 105
"""
Tipo de errores
"""
PARAMETER_NOT_ENDED = 200
PARAMETER_MULTIPLE = 201
PARAMETER_COMMENTED = 203
PARAMETER_UNKNOWN = 204
PARAMETER_NOT_RECOMMENDED = 205
"""
Tipo de alertas
"""
TOO_HEAVY = 300
TOO_MANY_FILES = 301
TOO_MANY_UNUSED_FILES = 302
TOO_MANY_PARAMETERS = 303
TOO_MANY_UNUSED_PARAMETERS = 304
|
vicentefelipechile/vtfoptimizer
|
vtfmessages/messages.py
|
messages.py
|
py
| 500
|
python
|
en
|
code
| 0
|
github-code
|
6
|
33351213250
|
#imports
from fltk import *
from globals import *
from GameWidget import *
#--------------------------HEADER--------------------------
#This is the main script launcher of the program, controlling
#the timeline and structure of the game.
#The FRAMEWORK class handles scene management, switching from
#main menu screen to the actual level.
#The level class is responsible for structuring its contents
#and calculating/evaluating the game loop.
class GUIbutton(Fl_Button):
"""Special button class with custom looks for GUI and navigation purposes.
Constructor: GUIbutton(x, y, w, h, label)"""
def __init__(self, x, y, w, h, label=None) -> None:
"""Constructor."""
#Call fltk button instance
Fl_Button.__init__(self, x, y, w, h)
#load images
self.upimg = Fl_PNG_Image(os.path.join(ASSETS, "upbutspr.png"))
self.downimg = Fl_PNG_Image(os.path.join(ASSETS, "downbutspr.png"))
#set image to unpressed by default
self.image(self.upimg.copy(w, h))
#drawing
#removing default box
self.box(FL_NO_BOX)
#creating sub box for label
self.lbox = Fl_Box(x, y-5, w, h)
#label, font, fontsize
self.lbox.label(label)
self.lbox.labelsize(30)
self.lbox.labelfont(FL_COURIER_BOLD)
#flag for redrawing utility
self.pflag = False
#clear ugly dotted border
self.clear_visible_focus()
def handle(self, e) -> int:
"""Overriding handle method to manage its appearance
upon press/release."""
#use FLTK's button handling
a = super().handle(e)
#Button pressed
if a and (e == FL_PUSH or e == FL_DRAG):
#Set image and position label if not done yet
if not self.pflag:
self.pflag = True
self.image(self.downimg.copy(self.w(), self.h()))
self.lbox.position(self.x(), self.lbox.y()+5)
#redraw and return
self.parent().redraw()
return a
#Button released
if a and e == FL_RELEASE:
#Set image and position label if not done yet
if self.pflag:
self.pflag = False
self.lbox.position(self.x(), self.lbox.y()-5)
self.image(self.upimg.copy(self.w(), self.h()))
#redraw and return
self.parent().redraw()
return a
class Level(Fl_Group):
"""Level class. Constructor self(r, c, s, bg, endfunc):
r: rows, c: columns, s: level string, bg: background.
endfunc: listener function to which level object can notify of
level ending.
Inherits from an FLTK group.
LIMITATIONS:
r*c = len(s)"""
def __init__(self, r, c, s, bg, endfunc) -> None:
"""Constructor."""
#inherit from FLTK's group class
Fl_Group.__init__(self, 0, 0, 32*(c-2), 32*(r-2))
#prep array for objects
self.objects = []
#create variable to store player instance
self.chara = None
#character map to the resulting game object.
self.idtomod = {
"X": Solid_Block,
"^": Sawblade,
"*": exitportal,
"=": jumppad,
"k": chest_key
}
#Store listener function
self.endfunc = endfunc
#Begin drawing
self.begin()
#Create background canvas
self.bg = Fl_Box(0, 0, 32*(c-2), 32*(r-2))
#Set background
self.bg.image(Fl_JPEG_Image(os.path.join(ASSETS, bg)).copy(self.bg.w(), self.bg.h()))
#For calculating number of required keys for player
keys = 0
# store key coordinates
self.key_coords = []
#Go through provided textmap
for row in range(r):
for col in range(c):
#Get character
id = s[(row*c)+col]
#Special case for player class
if id == "@":
self.chara = player((col*32)-32, (row*32)-32, 16, 32, self)
#Special case for level key
if id == "k":
keys += 1
self.key_coords.append((col*32 - 32, row*32 - 32))
#Ignore character if unknown
if id not in self.idtomod:
continue
#Create object
newobj = self.idtomod[id]((col*32)-32, (row*32)-32, 32, 32)
#Add object to level's object list for computation
self.objects.append(newobj)
#Set player needed keys
self.chara.needed_keys = keys
self.end()
#Begin event loop
self.event_loop()
def draw(self) -> None:
"""Special drawing method that preserves layering."""
#In order of back to front: background, any game objects, player
super().draw()
self.bg.redraw()
for obj in self.objects:
obj.redraw()
self.chara.redraw()
def collision(self, player, obj) -> bool:
"""Receives an object and player and activates object collision method
on player. Returns collision result. Params: (player, obj)"""
return obj.collis(player)
def event_loop(self) -> None:
"""Calculates all game events. This method is equivalent to
1 gameplay frame. Handles collision order."""
#Ask player to request new coordinates, or apply velocity before collision
self.chara.move()
#Sort objects by distance from player
self.objects.sort(key=lambda a: self.chara.cdist(a.Center()))
#Start a counter - we only check the 12 closest objects to same time
counter = 0
#Run through objects not including player)
for ind, obj in enumerate(self.objects):
#Collide player and object
a = self.collision(self.chara, obj)
if a and isinstance(obj, Sawblade):
for i in range(len(self.objects)-1, -1, -1):
if isinstance(self.objects[i], chest_key):
Fl.delete_widget(self.objects[i])
self.objects.pop(i)
for key_x, key_y in self.key_coords:
self.objects.append(chest_key(key_x, key_y, 32, 32))
break
#Check if player has reached exit
if isinstance(obj, exitportal) and a:
#Check if player has needed amount of keys
if self.chara.keys >= self.chara.needed_keys:
#End level and exit function without scheduling next frame
self.endfunc()
return None
#Key collision special case
elif isinstance(obj, chest_key) and a:
#delete key
Fl.delete_widget(obj)
#remove null ptr
self.objects.pop(ind)
#Object optimization counter increase
counter += 1
#Optimization
if counter >= 12:
break
#Update and redraw player
self.chara.refresh()
#Schedule next frame, attempting a bit over 60 fps
Fl.repeat_timeout(0.015, self.event_loop)
class Framework(Fl_Double_Window):
"""Constructor (None) - This is the general game class, which handles
graphics, running the game, and the event loop."""
def __init__(self, title = "Simple Platformer") -> None:
"""Constructor - Initialize window drawing and preparation"""
Fl_Double_Window.__init__(self, 512, 512, title)
#Level state, level variables
self.state = 0
self.level = None
#Load levels from text file
self.levels = open("levels.txt", "r").read().split("\n\n")
#Create background canvas
self.bg = Fl_Box(0, 0, self.w(), self.h())
#create button
self.startbut = GUIbutton(190, 270, 128, 76, "PLAY")
self.startbut.hide()
#create box for displaying title
self.titlebox = Fl_Box(0, 0, 512, 256)
self.titlebox.hide()
#set callback
self.startbut.callback(self.timeline)
#Start screen
self.startscreen()
#FLTK level display functions
self.show()
Fl.run()
def timeline(self, w=None) -> None:
"""Advances level."""
#Avoid deleting level which doesn't exist
if self.level:
#Remove any running game loop
Fl.remove_timeout(self.level.event_loop)
#Delete currently loaded level
Fl.delete_widget(self.level)
#disable button
self.startbut.hide()
self.startbut.deactivate()
#hide title
self.titlebox.hide()
#Begin drawing
self.begin()
# Dub screen
if self.state >= len(self.levels):
self.resize(self.x(), self.y(), 512, 512)
self.winscreen()
return
#get level and dimensions
nlevel = self.levels[self.state].strip().split("\n")
r = len(nlevel)
c = len(nlevel[0])
#Create level
self.level = Level(r, c, "".join(nlevel) , "background1.jpg", self.timeline)
#Resize level
self.resize(self.x(), self.y(), (c-2)*32, (r-2)*32)
self.state += 1
def startscreen(self) -> None:
"""Manager for the starting screen."""
#reset level
self.state = 0
#Set background
self.bg.image(Fl_JPEG_Image(os.path.join(ASSETS, "background1.jpg")).copy(self.bg.w(), self.bg.h()))
#show start button
self.startbut.show()
#show title
self.titlebox.show()
self.titlebox.image(Fl_PNG_Image(os.path.join(ASSETS, "title.png")))
self.startbut.redraw()
def winscreen(self) -> None:
"""The victory screen."""
self.bg.image(Fl_PNG_Image(os.path.join(ASSETS, "winscreen.png")).copy(self.bg.w(), self.bg.h()))
#Start program
m = Framework()
|
SubwayMan/FLTK-Platformer
|
src/fltkplatformer.py
|
fltkplatformer.py
|
py
| 10,121
|
python
|
en
|
code
| 5
|
github-code
|
6
|
10383049173
|
from typing import Dict
import executorch.backends.qualcomm.python.PyQnnWrapperAdaptor as PyQnnWrapper
import torch
from .node_visitor import NodeVisitor, register_node_visitor
from .qnn_constants import OpDequantize, QNN_OP_PACKAGE_NAME_QTI_AISW
class DequantizeOpBase(NodeVisitor):
def __init__(self, *args) -> None:
super().__init__(*args)
def define_node(
self,
node: torch.fx.Node,
nodes_to_wrappers: Dict[torch.fx.Node, PyQnnWrapper.TensorWrapper],
) -> PyQnnWrapper.PyQnnOpWrapper:
dequant_input_tensors = []
input_node = node.args[0]
input_tensor = self.get_tensor(input_node, node)
inp_tensor_wrapper = self.define_tensor(
input_node,
input_tensor,
PyQnnWrapper.Qnn_TensorType_t.QNN_TENSOR_TYPE_NATIVE,
nodes_to_wrappers,
)
dequant_input_tensors.append(inp_tensor_wrapper)
output_tensor = self.get_tensor(node, node)
output_tensor_wrapper = self.define_tensor(
node,
output_tensor,
PyQnnWrapper.Qnn_TensorType_t.QNN_TENSOR_TYPE_NATIVE,
nodes_to_wrappers,
)
dequant_output_tensors = [output_tensor_wrapper]
dequant_op = PyQnnWrapper.PyQnnOpWrapper(
node.target.__name__,
QNN_OP_PACKAGE_NAME_QTI_AISW,
OpDequantize.op_name,
)
dequant_op.AddInputTensors(dequant_input_tensors)
dequant_op.AddOutputTensors(dequant_output_tensors)
return dequant_op
@register_node_visitor
class PerTensorDequantizeDefault(DequantizeOpBase):
target = "quantized_decomposed.dequantize_per_tensor.default"
@register_node_visitor
class PerTensorDequantizeTensor(DequantizeOpBase):
target = "quantized_decomposed.dequantize_per_tensor.tensor"
@register_node_visitor
class PerChannelDequantizeDefault(DequantizeOpBase):
target = "quantized_decomposed.dequantize_per_channel.default"
@register_node_visitor
class PerChannelDequantizeTensor(DequantizeOpBase):
target = "quantized_decomposed.dequantize_per_channel.tensor"
|
pytorch/executorch
|
backends/qualcomm/builders/op_dequantize.py
|
op_dequantize.py
|
py
| 2,130
|
python
|
en
|
code
| 479
|
github-code
|
6
|
644356051
|
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from .serializers import UserRegistrationSerializer
class UserRegistrationView(APIView):
"""
API endpoint for user registration.
"""
def post(self, request):
"""
Handle user registration POST request.
"""
serializer = UserRegistrationSerializer(data=request.data)
if serializer.is_valid():
user = serializer.save() # Save the new user
response_data = {
"message": "User registered successfully",
"user": {
"id": user.id,
"username": user.username,
"email": user.email,
}
}
return Response(response_data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
soovuh/military-collections-api
|
custom_auth/views.py
|
views.py
|
py
| 951
|
python
|
en
|
code
| 0
|
github-code
|
6
|
37368152653
|
from typing import List, Dict, Tuple, Any
import numpy as np
import pandas as pd
import spacy
import en_core_web_sm
from spacy.matcher import Matcher
from concept_processing.asp.asp_generator import ASPGenerator
from concept_processing.asp.asp_solver import clingo_solve
from concept_processing.asp.clingo_out_parsers import ClingoAnsParser
from concept_processing.enums import ProblemType
from concept_processing.nlp.spacy_wrapper import SpacyWrapper
from concept_processing.nlp.nlp_parser import NLPParser
from concept_processing.nlp.nlp_utils import add_punctuation, truecase, merge_not
from concept_processing.pam import count_datapoints_in_each_feature
# ILASP solution paths
base_dir = {
ProblemType.ATOMISATION: '/Users/Cherry0904/Desktop/roko-for-charlize/ilasp/atomisation',
ProblemType.GENERALISATION: '/Users/Cherry0904/Desktop/roko-for-charlize/ilasp/generalisation',
# ProblemType.ATOMISATION: '/vol/bitbucket/yy3219/roko-for-charlize/ilasp/atomisation',
# ProblemType.GENERALISATION: '/vol/bitbucket/yy3219/roko-for-charlize/ilasp/generalisation',
}
background_knowledge_file_temp = '{}/background.ilasp'
solution_file_temp = '{}/solutions/best_sol.lp'
clingo_out_file_temp = '{}/clingo_out.tmp'
class ConceptsState:
def __init__(self, ids: List[str], label_indices: np.ndarray, label_categories: List[str], concept_pam: np.ndarray,
concept_strings: List[str]):
assert len(ids) == len(label_indices) and len(label_indices) == concept_pam.shape[0] and \
concept_pam.shape[1] == len(concept_strings)
self.ids = ids
self.label_indices = label_indices
self.label_categories = label_categories
self.concept_pam = concept_pam
self.concept_strings = concept_strings
def get_labels(self) -> List[str]:
return [self.label_categories[i] for i in self.label_indices]
def to_dict(self) -> Dict[str, List[Any]]:
return dict(id=self.ids, label=self.get_labels(), concepts=list(self.concept_pam),
explanations=self.concept_strings)
# Replaces the old [row_id, [concept_ids]]
class ConceptBag:
def __init__(self):
self.store = {}
self.ids = []
self.labels = []
def append(self, row_id: str, concept_ids: List[str], label: str):
# Two explanations for a video may exist sometimes.
if row_id not in self.store:
self.store[row_id] = concept_ids
self.ids.append(row_id)
# There is one odd label that needs to be fixed
if label == ' it could be called a strike because the pitch landed in the strike zone before being hit':
label = 'strike'
self.labels.append(label)
else:
self.store[row_id] = list(set(self.store[row_id]).union(concept_ids))
def to_rawbagofconcepts(self) -> List[Tuple[str, List[int]]]:
return [(id, self.store[id]) for id in self.ids]
def to_pam(self) -> np.ndarray:
"""
Creates binary presence-absence matrix (PAM)
"""
N = len(self.ids)
C = 0
for id in self.ids:
curr_max = np.max(self.store[id], initial=0)
C = max(C, curr_max)
C += 1
data = np.zeros((N, C))
for i, id_ in enumerate(self.ids):
data[i, self.store[id_]] = 1
# Remove extraneous columns
cols_to_remove = count_datapoints_in_each_feature(data) == 0
data = data[:, ~cols_to_remove]
return data
# Applies generalisation/atomisation procedure to extract the concepts
class ConceptExtractor:
def __init__(self, nlp: NLPParser):
self.nlp = nlp
self.concept_dict = {}
self.next_concept_id = 0
self.concept_bag = ConceptBag()
def parse(self, row_id: str, premise_sents: str, label: str):
# Non need to include errors
if label != 'none':
premise_sents = self.nlp(premise_sents)
premise_sents = [str(sent) for sent in premise_sents.sentences()]
# Post-process atomic sentences to remove super short ones
atomic_sents = self.split(premise_sents, ProblemType.ATOMISATION)
# atomic_sents = self.remove_short_concepts(atomic_sents)
# atomic_sents = self.post_process_short_concepts(atomic_sents)
generalised_sents = self.split(atomic_sents, ProblemType.GENERALISATION)
# generalised_sents = self.post_process_short_concepts(generalised_sents)
# concept_ids = [self._get_id(sent) for sent in atomic_sents]
concept_ids = [self._get_id(sent) for sent in generalised_sents]
self.concept_bag.append(row_id, concept_ids, label)
def _get_id(self, sent: str):
if sent not in self.concept_dict:
self.concept_dict[sent] = self.next_concept_id
self.next_concept_id += 1
return self.concept_dict[sent]
def get(self) -> Tuple[ConceptBag, List[str]]:
return self.concept_bag, concept_dict_to_list(self.concept_dict)
@staticmethod
def _write(clingo_out_file: str, program: List[str]):
with open(clingo_out_file, 'w') as f:
for elem in program:
f.write(elem + '\n')
def split(self, sents: List[str], problem_type: ProblemType) -> List[str]:
sols = []
for sent in sents:
b_dir = base_dir[problem_type]
asp_generator = ASPGenerator(self.nlp, problem_type)
asp_generator.parse(str(sent))
# Exactly 1 element since we do not have concepts texts
program = asp_generator.get_programs()[0]
clingo_out_file = clingo_out_file_temp.format(b_dir)
solution_file = solution_file_temp.format(b_dir)
background_file = background_knowledge_file_temp.format(b_dir)
self._write(clingo_out_file, program)
atoms = clingo_solve(clingo_out_file, background_file, solution_file)
asp_parser = ClingoAnsParser(problem_type)
sents = asp_parser.get_sentences(atoms)
atomic_sents = [add_punctuation(merge_not(truecase(sent, self.nlp))) for sent in sents]
sols += atomic_sents
return sols
# Remove concepts with only one to three tokens
def remove_short_concepts(self, sents: List[str]) -> List[str]:
num_of_words = [len(sent.split()) for sent in sents]
index_of_short_concepts = [i for i, j in enumerate(num_of_words) if j == 1 or j == 2 or j==3]
index_of_all_concepts = [i for i, j in enumerate(num_of_words)]
index_of_long_concepts = [x for x in index_of_all_concepts if x not in index_of_short_concepts]
sents = [sents[i] for i in index_of_long_concepts]
return sents
# Remove short concepts that satisfy some rules defined on POS tags
def post_process_short_concepts(self, sents: List[str]) -> List[str]:
# nlp = SpacyWrapper()
nlp = en_core_web_sm.load()
excmatcher = self.add_exc_matcher(nlp)
num_of_words = [len(sent.split()) for sent in sents]
index_of_short_concepts = [i for i, j in enumerate(num_of_words) if j == 2 or j==3]
index_of_all_concepts = [i for i, j in enumerate(num_of_words)]
index_of_long_concepts = [x for x in index_of_all_concepts if x not in index_of_short_concepts]
# index_of_long_concepts = [i for i, j in enumerate(num_of_words) if j != 2 and j !=3]
# index_of_3_token_concepts = [i for i, j in enumerate(num_of_words) if j == 3]
index_to_keep = []
for i in index_of_short_concepts:
doc = nlp(sents[i])
match = excmatcher(doc)
if match == []: # If there is no match
index_to_keep.append(i)
sents = [sents[i] for i in index_to_keep + index_of_long_concepts]
return sents
def add_exc_matcher(self, nlp):
# create exclusion matcher for our concepts
excpattern1 = [{"POS": {"IN": ["NOUN", "PRON", "DET"]}}, {"POS": "VERB"}] # such as "it looks", "he's looking"
# excpattern2 = [{"POS": "DET"}, {"POS": "NOUN"}, {"POS": "VERB"}] # such as "the woman looks"
# Dirty way of using SpacyWrapper, kept because this code is not maintained
# excmatcher = Matcher(nlp._nlp.vocab)
excmatcher = Matcher(nlp.vocab)
excmatcher.add("meaningless_short_concept", [excpattern1])
# excmatcher.add("short_concept_3_tokens", [excpattern2])
# nlp._nlp.add_pipe("benepar", config={"model": "benepar_en3"})
return excmatcher
def concept_dict_to_list(concept_dict: Dict[str, int]) -> List[str]:
"""
parameters
----------
concept_dict - dictionary mapping from concept (e.g. strs) to index (int)
where indices are contiguous and starting from zero.
returns
-------
concepts - a list of concepts where concepts[i] is key k such that
concept_dict[k] = i
"""
reverse_dict = {i: s for s, i in concept_dict.items()}
concepts = [reverse_dict[i] for i in range(len(concept_dict))]
return concepts
|
CharlizeY/AI-thesis
|
concept_processing/extraction.py
|
extraction.py
|
py
| 9,204
|
python
|
en
|
code
| 0
|
github-code
|
6
|
35724557326
|
from django.shortcuts import render
from social.apps.django_app.middleware import SocialAuthExceptionMiddleware
from social import exceptions as social_exceptions
class SocialAuthExceptionMiddleware(SocialAuthExceptionMiddleware):
def process_exception(self, request, exception):
if hasattr(social_exceptions, exception.__class__.__name__):
return render(request, 'error.html', {
'errorMessage': 'There was an authentication error.',
'errorDetails': str(exception)
})
# else:
# raise exception
|
jasonwaters/fitcompetition
|
fitcompetition/middleware.py
|
middleware.py
|
py
| 592
|
python
|
en
|
code
| 7
|
github-code
|
6
|
26841047411
|
#学习连接多个字串
#任务1: 加入变量 desc = '我是一个大笨蛋'
#任务2: 印出10次: 我是ON SIR教室学员007,我是一个大笨蛋
hero = '我'
organization = 'ON SIR教室'
identity = '学员'
action = '是'
ID = '007'
print(hero+action+organization+identity+ID)
|
sendtowongsir/shl-python-learning
|
02_printString.py
|
02_printString.py
|
py
| 288
|
python
|
zh
|
code
| 0
|
github-code
|
6
|
17961314435
|
from pymavlink import mavutil
from contextlib import nullcontext
CONNECTION_STRING = "udpin:0.0.0.0:14550"
DRONE_IDS = [3, 4]
def wait_heartbeats_multi(connection):
heartbeats = {id: False for id in DRONE_IDS}
while not all(heartbeats.values()):
msg = connection.recv_match(type="HEARTBEAT")
if msg:
heartbeats[msg.get_srcSystem()] = True
def connect():
connection = mavutil.mavlink_connection(CONNECTION_STRING)
connection.wait_heartbeat()
wait_heartbeats_multi(connection)
return connection
def recv_ack(connection):
while True:
msg = connection.recv_match(type="COMMAND_ACK", blocking=True)
if msg.get_srcSystem() == connection.target_system:
break
print("Received ACK:", msg)
def for_all_drones(f):
def wrapped(connection, *args, **kwargs):
for drone in DRONE_IDS:
connection.target_system = drone
f(connection, *args, **kwargs)
return wrapped
def send_command(connection, cmd, confirm, p1=0, p2=0, p3=0, p4=0, p5=0, p6=0, p7=0, lock=nullcontext(), ack=True):
if type(cmd) == str:
try:
cmd = getattr(mavutil.mavlink, cmd)
except AttributeError:
raise AttributeError(f"Unknown command `{cmd}`")
with lock:
connection.mav.command_long_send(
connection.target_system,
connection.target_component,
cmd,
confirm, p1, p2, p3, p4, p5, p6, p7
)
if ack:
recv_ack(connection)
|
jath03/mavlink-testing
|
utils.py
|
utils.py
|
py
| 1,543
|
python
|
en
|
code
| 0
|
github-code
|
6
|
20288234307
|
# -*- coding: utf-8 -*-
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
REQUIRES = [
'docopt',
'argparse==1.2.1',
'requests==2.8.1',
'trello==0.9.1',
'wsgiref==0.1.2',
]
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errcode = pytest.main(self.test_args)
sys.exit(errcode)
def find_version(fname):
'''Attempts to find the version number in the file names fname.
Raises RuntimeError if not found.
'''
version = ''
with open(fname, 'r') as fp:
reg = re.compile(r'__version__ = [\'"]([^\'"]*)[\'"]')
for line in fp:
m = reg.match(line)
if m:
version = m.group(1)
break
if not version:
raise RuntimeError('Cannot find version information')
return version
__version__ = find_version("trello2text.py")
def read(fname):
with open(fname) as fp:
content = fp.read()
return content
setup(
name='trello2text',
version="0.1.1",
description='Parses trello board and outputs text',
long_description=read("README.md"),
author='Alejandro Cirino',
author_email='alejandro.cirino@devecoop.com',
url='https://github.com/cirinoalejando/trello2text',
install_requires=REQUIRES,
license=read("LICENSE"),
zip_safe=False,
keywords='trello2text',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
],
py_modules=["trello2text", ".utils"],
entry_points={
'console_scripts': [
"trello2text = trello2text:main"
]
},
tests_require=['pytest'],
cmdclass={'test': PyTest}
)
|
cirinoalejandro/trello-to-text
|
setup.py
|
setup.py
|
py
| 2,304
|
python
|
en
|
code
| 4
|
github-code
|
6
|
31935770331
|
import pandas as pd
import numpy as np
import time
import sys
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
np.set_printoptions(threshold=sys.maxsize)
df = pd.read_csv('contourQ3data.csv')
Z = df.pivot_table(index='p1', columns='p3', values='vari').T.values
X_unique = np.sort(df.p1.unique())
Y_unique = np.sort(df.p3.unique())
X, Y = np.meshgrid(X_unique, Y_unique)
fig, ax = plt.subplots()
CS = ax.contourf(X, Y, Z, cmap='RdGy')
ax.set_title('Contour plot of the behavour of avg I')
ax.set_xlabel('p1')
ax.set_ylabel('p3')
ax.set_aspect('equal')
fig.colorbar(CS, format="%.2f")
plt.show()
plt.savefig('contour-plot-of-vari-0.05.png', dpi=300)
|
isabelnic/Modelling-and-Visualisation
|
checkpoint 2/plot_contour_vari.py
|
plot_contour_vari.py
|
py
| 694
|
python
|
en
|
code
| 0
|
github-code
|
6
|
42618233661
|
import unittest
from unittest.mock import patch
from api.whois import whois_search
class WhoisTestCase(unittest.TestCase):
@patch('api.whois.pythonwhois')
def test_whois_search(self, mock_pythonwhois):
# Mock the pythonwhois response
mock_pythonwhois.get_whois.return_value = {'registrant': 'John Doe'}
query = 'example.com'
result = whois_search(query)
expected_result = {'result': {'registrant': 'John Doe'}}
self.assertEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
irfanirshad/flask-api
|
tests/test_whois.py
|
test_whois.py
|
py
| 576
|
python
|
en
|
code
| 1
|
github-code
|
6
|
73632775228
|
# dependencies module
from crypt import methods
import inspect, ctypes, os, socket
from logging import shutdown
import cv2 as CV
from threading import Thread
from dotenv import load_dotenv
from flask import Flask, render_template, request, Response, make_response, jsonify
from random import randint as rand
from flask_socketio import SocketIO, emit
from time import sleep
from datetime import datetime
from newstream import Camera as cam
# modules I have created
from Python.Sensors import *
from Python.sendEmail import sendMail
from Python.ReadVoltages import Adc
from Python.Buzzer import buzzer
from Python.Ultrasonic import Ultrasonic as lt
# Freenove modules but i modified them more explanation are givin on week Five
# https://securityguardjohnmuller.netlify.app/#weekNumFive
# the original code is on
# https://github.com/Freenove/Freenove_4WD_Smart_Car_Kit_for_Raspberry_Pi
from Python.Motor import Motor
from Python.servo import Servo
from Python.led import Led
from Python.Video import VideoStreaming
# create new instance of modules
wheelMotor = Motor()
servoMotor = Servo()
lineTracking = LineTracking()
lightTracking = Light()
batteryInfo = Adc()
newLed = Led()
lightTracking = Light()
ultrasonicSensor = Ultrasonic()
newLineTracking = LineTracking()
videoCapture = CV.VideoCapture(0)
newUltraSonic = lt
# get environment variables
load_dotenv()
serverEmail = os.getenv("ADMIN_EMAIL")
serverPassword = os.getenv("ADMIN_PASSWORD")
# initial and declare variables
maxServoValue = 180
minServoValue = 0
userName = None
serverThreads = []
LEDsThreads = []
sensorsThreads = []
socketConnection = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
socketConnection.connect(("1.1.1.1", 0))
IPAddress = socketConnection.getsockname()[0]
receiverEmailAddress = "Godsaveme2001@gmail.com"
# this this variable changes when the page loads first time and send an email before its status changes
# the reason if it so it send an email when the server starts but not on every re/load
serverHasStarted = os.getenv("serverHasStarted")
pageAlreadyAccessedStarted = os.getenv("pageAlreadyAccessedStarted")
objectsDistance = 0
rightSensorReadings = 0
leftSensorReadings = 0
# get current time for email status
currentTime = datetime.now().strftime("%H:%M:%S")
currentDate = datetime.now().date()
def Color(red, green, blue, white=0):
"""_summary_
this function Convert the provided red, green, blue color to a 24-bit color value.
Each color component should be a value 0-255 where 0 is the lowest intensity
and 255 is the highest intensity.
Args:
red (_int_): _description_ RGB color value between 0 and 255
green (_int_): _description_ RGB color value between 0 and 255
blue (_int_): _description_. RGB color value between 0 and 255
white (int, optional): _description_. Defaults to 0.
Returns:
_int_: _description_. it returns 24 bit color value
"""
return (white << 24) | (red << 16) | (green << 8) | blue
def asyncRaise(thread, executeType):
"""_summary_
this function destroy the thread given thread id if the executeType == system exit
Args:
thread (_integer_): _description_
the thread initialized id thread.ident
executeType (_object_): _description_
SystemExit specifies what to do with the the thread
Raises:
ValueError: _description_
SystemError: _description_
"""
threadIndent = ctypes.c_long(thread)
if not inspect.isclass(executeType):
executeType = type(executeType)
result = ctypes.pythonapi.PyThreadState_SetAsyncExc(
threadIndent, ctypes.py_object(executeType)
)
if result == 0:
raise ValueError("invalid thread id")
elif result != 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(threadIndent, None)
raise SystemError("PyThreadState_SetAsyncExc failed")
def killThread(Threads):
"""_summary_
this function kills the given thread(s)
and removes it from the given list
Args:
Threads (_list_): _description_
get an array of threads
"""
if len(Threads) != 0:
for thread in Threads:
try:
asyncRaise(thread.ident, SystemExit)
Threads.remove(thread)
print(f"{thread} has been stopped") # for testing purposes
except ValueError as error:
print(f"an error occur : \n{error}")
def getTemperature():
"""_summary_
this function gets CPU readings from the terminal and formats it to
get only the float number and returns it
Returns:
_float_: _description_
returns CPU Temperature as a float type
"""
# Read data from Raspberry Pi (specifically read CPU temperature)
temperature = os.popen("/opt/vc/bin/vcgencmd measure_temp").read()
return format(temperature[5:-3])
def getBatteryPercentage():
"""_summary_
this function get voltages readings and converts it to percentage and returns it as integer value
Returns:
_int_: _description_
returns Battery Percentage as an integer
"""
batteryValue = batteryInfo.recvADC(2)
batteryValue = batteryValue / 1.4 * 30
print(batteryValue)
return int(batteryValue)
def colorAnimation(MODE="string"):
"""_summary_
this is a while True function gets executed in the background by calling it via
thread it plays a random LEDs animation
____used like____
calling it inside thread as [ <MODE>Animation = Thread(target=colorAnimation, args("MODE=<MODE>",)) ]
then start it when needed as <MODE>Animation.start()
Args:
MODE (_str_): _description_
___Available MODEs___
MODE == "RGB" it plays RGB animations
MODE == "Random" it plays RGB animations
MODE == "Cycle" it plays RGB animations
MODE == "Animation" it plays RGB animations
MODE == "Rainbow" it plays RGB animations
"""
if MODE == "RGB":
while True:
newLed.customRGB(newLed.strip)
elif MODE == "Random":
while True:
newLed.rainbowCycle(newLed.strip)
elif MODE == "Cycle":
while True:
newLed.customRandomAnimation(newLed.strip)
elif MODE == "Rainbow":
while True:
newLed.rainbow(newLed.strip)
elif MODE == "Animation":
while True:
newLed.theaterChaseRainbow(newLed.strip)
else:
# the below function just for good practice code
print("not supported mode")
return None
def ultrasonicBackground():
"""_summary_
this is a while True function gets execute in the background by calling it via
thread
it gets objects distance reading and stores it in a global variables
____like____
calling it inside thread as [ ultrasonicThread = Thread(target=ultrasonicBackground) ]
then start it when needed as ultrasonicThread.start()
"""
global objectsDistance
while True:
objectsDistance = ultrasonicSensor.getDistance()
sleep(0.5)
# declare a thread and append it to threads list
ultrasonicThread = Thread(target=ultrasonicBackground)
serverThreads.append(ultrasonicThread)
sensorsThreads.append(ultrasonicThread)
def lightSensorReadingsBackground():
"""_summary_
this is a while True function gets execute in the background by calling it via
thread
it gets left and right sensor reading and stores it in a global variables
____like____
calling it inside thread as [ lightSensorsThread = Thread(target=lightSensorReadingsBackground) ]
then start it when needed as lightSensorsThread.start()
"""
global leftSensorReadings
global rightSensorReadings
while True:
leftSensorReadings = batteryInfo.recvPCF8591(0)
rightSensorReadings = batteryInfo.recvPCF8591(2)
sleep(0.5)
# print(leftSensorReadings, rightSensorReadings) # for testing purposes
# declare a thread and append it to threads list
lightSensorsThread = Thread(target=lightSensorReadingsBackground)
serverThreads.append(lightSensorsThread)
sensorsThreads.append(lightSensorsThread)
# destroy all components and send an email
def destroy(Error, shutDown=False):
"""_summary_
Args:
Error (_String/None_): _description_
this function makes sure that the server closed correctly and all
components and threads are set to their default values and send an email that server
has closed manually if Error parameter has value will change such as error exception
the subject and body of the email will change
other wise if Error is set to None the default email will be sent
"""
# send an email that says server is down
isAnErrorOccur = Error != None
if isAnErrorOccur:
messageSubject = "an error occur"
messageBody = f"unexpected error occur cause the server to shutdown at {currentTime}\nThe error was\n{Error}"
else:
messageSubject = "Server went down"
messageBody = (
f"server shutdown manually at {currentTime} all looks good!{Error}"
)
# send email with server status
sendMail(
subject=messageSubject,
receiver=receiverEmailAddress,
body=messageBody,
email=serverEmail,
password=serverPassword,
)
# wait two 1 after email has
sleep(1)
# Stop all wheels
wheelMotor.setMotorModel(0, 0, 0, 0)
# set servo motor one(1) Left right to center
servoOneUpDownCurrentValue = 90
servoMotor.setServoPwm("1", servoOneUpDownCurrentValue)
# set servo motor Zero(0) Left right to center
servoZeroRightLeftCurrentValue = 90
servoMotor.setServoPwm("0", servoZeroRightLeftCurrentValue)
# turn buzzer off
buzzer(False, False, False)
# make sure that the enviorment varibles are set to defults value
os.environ["pageAlreadyAccessedStarted"] = "False"
os.environ["serverHasStarted"] = "False"
# destroy / turn off the LEDs
newLed.colorWipe(newLed.strip, Color(0, 0, 0), 10)
# make sure that all threads are exited / stopped
killThread(serverThreads)
# wait 1 second to make sure that all threads has been stopped
sleep(1)
# if shutdown requested via the user close the server
if shutDown:
os.system("sudo shutdown now")
# then exit / close the programme
exit()
try:
# setting flask server as a single module by setting __name__
# setting template and statics folder so flask sever knows where to look for resources
app = Flask(__name__, template_folder="Templates", static_folder="Static")
# socketio = SocketIO(app, async_mode=None)
# when server starts send an email with it's current ip address
if serverHasStarted == "False":
# for testing purposes
print("Server Running... ")
# to let the user knows that server has started
for _ in range(4):
# turn buzzer on
buzzer(status=True)
newLed.colorWipe(255.166, 99)
buzzer(status=False)
sleep(0.25)
newLed.colorWipe(0.0, 0)
sendMail(
subject="Server has started",
receiver=receiverEmailAddress,
body=f"""Hey, this is the smart car auto mail system\nThe Server has started running on http://{IPAddress}:5500/\nat {currentTime} - {currentDate} everything looks fine! """,
password=serverPassword,
email=serverEmail,
)
os.environ["serverHasStarted"] = "True"
# when host is loaded return to app page index
# and receive data from it
@app.route("/", methods=["POST", "GET"])
def index():
global userName
# credentials = request.get_data()
# password == credentials["password"]
# email == credentials["email"]
# if password == os.environ[str(password)] and email == os.environ[str(email)]
# userName = os.environ[str(email) + "userName"]
# if page is not accessed after server has started running send an email
if pageAlreadyAccessedStarted == "False":
sendMail(
subject="Server has accessed",
receiver=receiverEmailAddress,
body=f"Hey, this is the smart car auto mail system\nThe Server has accessed at {currentTime} by {userName} \n everything looks fine! ",
password=serverPassword,
email=serverEmail,
)
# change the env value
os.environ["pageAlreadyAccessedStarted"] = "True"
return render_template("app.html")
# send CPU and power data to the client
# get other objects distance
@app.route("/data/<type>")
def sendData(type):
userName = " John Muller"
data = {
"username": userName,
"Temperature": getTemperature(), # CPU Temperature
"power": getBatteryPercentage(), # Battery info
"dataDistance": objectsDistance,
"roomTemperature": rand(0, 150),
}
if type == "All":
response = make_response(jsonify(data))
return response
elif type == "Light":
lightSensorData = data["rightSensorReadings": rightSensorReadings,
"leftSensorReadings": leftSensorReadings]
# start reading light distance
lightSensorsThread.start()
lightSensorsThread.join()
response = make_response(jsonify(lightSensorData))
return response
elif type == "distance":
# start reading objects distance
ultrasonicThread.start()
ultrasonicThread.join()
else:
# stopping only these two
killThread(sensorsThreads)
return render_template("app.html")
def captureAnImage():
"""_summary_
this is an infinite function that capture an image and stores it in the PWD directory
then returns the captured image
Yields:
_byte_: _description_ return image data JPEG data in JFIF or Exif formats
"""
while True:
frame = cam.get_frame()
CV.imwrite("pic.jpg", frame)
yield (
b"--frame\r\n"
b"Content-Type: image/jpeg\r\n\r\n" + frame + b"\r\n"
)
@app.route("/videoStream")
def videoStream():
return Response(
captureAnImage(), mimetype="multipart/x-mixed-replace; boundary=frame"
)
# wheel directions and buzzer route
# this route is created to move the car according to the requested value
@app.route("/wheel/<direction>")
# create a function that checks the direction and call wheelMotor function
# to set the wheels and move the car according the given direction
def wheelDirections(direction):
# wheel button is clicked in the frontend (user's interface)
# Do the following
# if objectsDistance >= 20:
# if direction is set to forward
if direction == "forward":
# moving the car forward
# set all wheels value negative so the car moves forward
wheelMotor.setMotorModel(-2000, -2000, -2000, -2000)
# the below function is { return render_template("app.html")} doing nothing in this case
# because in the front end i am not sending request to get new page so the page
# do not refresh
# i just did it so it does not give an error
# also if the user hit to the directory for example host/wheel/forward the flask server will
# redirecting the user to app page but the wheel will keep running and that what wheel happen if i sent
# GET request like "host/wheel/forward"
# i can return none but the flask server will give a warning
# and if i return text like "car is moving forward" it will go to a new page showing the text and it's not
# effecting way
# more explanation are given on week four "https://securityguardjohnmuller.netlify.app/#weekNumFour"
return render_template("app.html")
# if direction is set to backward
elif direction == "backward":
# moving the car forward
# set all wheels value positive so the car moves backward
wheelMotor.setMotorModel(2000, 2000, 2000, 2000)
return render_template("app.html")
# if direction is set to left
elif direction == "left":
# turning the car to the left
# set the left wheels value positive and lower than the right wheels so the car turn to the left
wheelMotor.setMotorModel(-1500, -1500, 1500, 1500)
return render_template("app.html")
# if direction is set to right
elif direction == "right":
# turning the car to the left
# set the right wheels value negative and lower than the left wheels so the car turn to the right
wheelMotor.setMotorModel(1500, 1500, -1500, -1500)
return render_template("app.html")
# if direction is set to buzzer
elif direction == "buzzer":
# turn buzzer on
buzzer(True)
return render_template("app.html")
# if direction is set to stop or anything else
else:
# make sure that wheels are set to zero
wheelMotor.setMotorModel(0, 0, 0, 0)
# and buzzer is turned on
buzzer(False)
# if the user hard coded a the url for example host/wheel/example
# but in my case i am sending stop when button reveals or not clicked
# more explanation are given on week four "https://securityguardjohnmuller.netlify.app/#weekNumFour"
return "invalid request"
# else:
# wheelMotor.setMotorModel(0, 0, 0, 0)
# buzzer(False)
# return render_template("app.html")
# servo directions route
@app.route("/servo/<directionURL>", methods=["POST"])
def servoDirections(directionURL):
# get value from the clint side and store it in a variable
currentDirectionValue = request.get_json()
# print(currentDirectionValue) # for testing purposes
if (
# check if the current value is between the minimum and maximum value
# if value is not in between min and max the function will return invalid request
# the below code is to double check so if someone has make changes on the clint side it won't effect in her
currentDirectionValue >= minServoValue
and currentDirectionValue <= maxServoValue
):
# the upper (left right)servo motor has address is 0
# the bottom (up and down ) servo motor has address 1
# if direction url is "example" will move to it's given direction
if directionURL == "up":
servoMotor.setServoPwm("1", currentDirectionValue)
return render_template("app.html")
elif directionURL == "right":
servoMotor.setServoPwm("0", currentDirectionValue)
return render_template("app.html")
elif directionURL == "down":
servoMotor.setServoPwm("1", currentDirectionValue)
return render_template("app.html")
elif directionURL == "left":
servoMotor.setServoPwm("0", currentDirectionValue)
return render_template("app.html")
else:
servoMotor.setServoPwm("1", 90)
servoMotor.setServoPwm("0", 90)
return render_template("app.html")
else:
return "invalid request please try again "
# send Email Route
@app.route("/Email/<Type>", methods=["POST", "GET"])
def sendEmail(Type):
# when user press on send email if method equal to "POST"
# the data will be sent to the given gmail with it's body
# but the subject is initialed in here
if Type == "POST":
data = request.get_json()
sendMail(
subject="Smart car mail system",
receiver=data["receiver"],
body=data["body"],
password=serverPassword,
email=serverEmail,
)
# if method == "GET" then the system summary message will be sent
elif Type == "GET":
sendMail(
subject="Smart car summary",
receiver=receiverEmailAddress,
body=f"""Hey, this is the smart car auto mail system\n
everything looks fine!{currentTime}\n
one accessed user: {userName}\n
everything looks fine!""",
password=serverPassword,
email=serverEmail,
attachment=True,
)
# LEDs route
@app.route("/LEDs/<stripType>/<LEDStatus>", methods=["POST", "GET"])
def LEDs(stripType, LEDStatus):
if stripType != "single":
# make sure that the animation Mode is switched off
killThread(LEDsThreads)
# make sure that the all LEDs are switched off
sleep(0.5)
newLed.colorWipe(newLed.strip, Color(0, 0, 0), 10)
if stripType == "RGB" and LEDStatus == "on":
RGBModeThread = Thread(target=colorAnimation, args=("RGB",))
serverThreads.append(RGBModeThread)
LEDsThreads.append(RGBModeThread)
print(
"RGB animation mode thread has started!.."
) # for testing purposes
RGBModeThread.start()
elif stripType == "chaserAnimation" and LEDStatus == "on":
theaterChaseRainbow = Thread(target=colorAnimation, args=("Animation",))
serverThreads.append(theaterChaseRainbow)
LEDsThreads.append(theaterChaseRainbow)
print(
"Chaser animation mode thread has started!.."
) # for testing purposes
theaterChaseRainbow.start()
elif stripType == "rainbow" and LEDStatus == "on":
rainbow = Thread(target=colorAnimation, args=("Rainbow",))
serverThreads.append(rainbow)
LEDsThreads.append(rainbow)
print(
"Rainbow animation mode thread has started!.."
) # for testing purposes
rainbow.start()
elif stripType == "cycle" and LEDStatus == "on":
rainbowCycle = Thread(target=colorAnimation, args=("Cycle",))
serverThreads.append(rainbowCycle)
LEDsThreads.append(rainbowCycle)
print(
"Cycle animation mode thread has started!.."
) # for testing purposes
rainbowCycle.start()
elif stripType == "randomColors" and LEDStatus == "on":
randomAnimation = Thread(target=colorAnimation, args=("Random",))
serverThreads.append(randomAnimation)
LEDsThreads.append(randomAnimation)
print(
"Cycle animation mode thread has started!.."
) # for testing purposes
randomAnimation.start()
return render_template("app.html")
elif stripType == "single" and LEDStatus == "on":
data = request.get_json()
print(data) # for testing purposes
index = int(data["index"])
RGB = data["RGB"]
print(f"type of {index} is {type(index)}") # for testing purposes
newLed.ledIndex(index, RGB["R"], RGB["G"], RGB["B"])
return render_template("app.html")
elif stripType == "single" and LEDStatus == "off":
data = request.get_json()
index = int(data["index"])
newLed.ledIndex(index, 0, 0, 0)
return render_template("app.html")
else:
# destroy / turn off the LEDs
killThread(LEDsThreads) # to kill the thread
# to make sure that all LEDs are low / turned off
return "invalid request"
# Sensor mode route
@app.route("/sensor/<sensorType>/<modeStatus>")
def sensors(sensorType, modeStatus):
if modeStatus == "start":
# make sure that the previous modes / threads has been stopped
killThread(sensorsThreads)
# also make sure that wheels are set to 0 if the thread exited and the pins are set to high
wheelMotor.setMotorModel(0, 0, 0, 0)
if sensorType == "ultraSonic":
ultrasonicModeThread = Thread(target=newUltraSonic.run)
sensorsThreads.append(ultrasonicModeThread)
serverThreads.append(ultrasonicModeThread)
print("ultrasonic mode thread has started!...") # for testing purposes
ultrasonicModeThread.start()
return render_template("app.html")
elif sensorType == "lineTracking":
lineTrackingModeThread = Thread(target=newLineTracking.run)
sensorsThreads.append(lineTrackingModeThread)
serverThreads.append(lineTrackingModeThread)
print(
"line tracking mode thread has started!..."
) # for testing purposes
lineTrackingModeThread.start()
return render_template("app.html")
elif sensorType == "lightTracking":
lightTrackingModeThread = Thread(target=lightTracking.run)
sensorsThreads.append(lightTrackingModeThread)
serverThreads.append(lightTrackingModeThread)
lightTrackingModeThread.start()
return render_template("app.html")
elif sensorType == "faceTracking1":
# TODO this code not ready because i am waiting for the camera
faceTrackingModeThread = Thread(target=VideoStreaming.face_detect.run)
sensorsThreads.append(faceTrackingModeThread)
serverThreads.append(faceTrackingModeThread)
faceTrackingModeThread.start()
return render_template("app.html")
else:
killThread(sensorsThreads)
return render_template("app.html")
# if user press on close button and confirm that they sure
# close server
@app.route("/server/disconnect")
def shutServerDown():
destroy(Error=None, shutDown=True)
# this return is not getting returned but flask server keeps giving me an error
return "System went down "
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0", port=5500, threaded=True)
except Exception as errorException:
print("an error occurred\n", errorException) # for testing purposes
buzzer(
status=True, anErrorOccur=True
) # to let the user know that there is an error
destroy(errorException) # make sure that all components are set to low/destroyed
except KeyboardInterrupt:
print("Keyboard Interrupt") # for testing purposes
destroy(None)
finally:
print("finally") # for testing purposes
destroy(None)
|
John11Dark/SecurityGuard
|
Assets/SVG/Smart_Car_Server/app.py
|
app.py
|
py
| 27,452
|
python
|
en
|
code
| 0
|
github-code
|
6
|
73751378748
|
import tkinter.ttk as ttk
from tkinter import *
import time
root = Tk()
root.title("my GUI")
root.geometry("640x480")
#progressbar = ttk.Progressbar(root, maximum= 100, mode="indeterminate")
#progressbar = ttk.Progressbar(root, maximum= 100, mode="determinate")
#milli sec
#progressbar.start(10) #move per 10 millisec
#progressbar.pack()
p_var2 = DoubleVar() #percentage may not be integer type
progressbar2 = ttk.Progressbar(root, maximum=100, length=150, variable=p_var2)
progressbar2.pack()
def btncmd2():
for i in range (101):
time.sleep(0.01) # 0.01 sec delay
p_var2.set(i)
progressbar2.update() # screen update refresh
btn2 = Button(root, text="Start", command = btncmd2)
btn2.pack()
#def btncmd():
# progressbar.stop()
#btn = Button(root, text="Stop", command = btncmd)
#btn.pack()
root.mainloop()
|
Dohwee-Kim/image_merge_tool
|
gui_basic/reference_pys/9_progressbar.py
|
9_progressbar.py
|
py
| 849
|
python
|
en
|
code
| 0
|
github-code
|
6
|
71861354429
|
from collections import Counter
from datetime import datetime
# 示例数据
# 10折 1200 (0/1) 自己
# 10折 1200 VDMzZFF1T0hKdTRjaEJRMkV0N2xiZz09 (0/3) 舞***影(15***33)
def extract_discount(share_str: str) -> int:
return int(share_str.split(" ")[0][:-1])
def extract_price(share_str: str) -> int:
return int(share_str.split(" ")[1])
def extract_suin(share_str: str) -> str:
return share_str.split(" ")[2]
def extract_remaining_times(share_str: str) -> int:
# (0/3)
temp = share_str.split(" ")[3][1:-1]
remaing_times = int(temp.split("/")[0])
return remaing_times
# 清洗并去重
with open(".cached/my_home.csv", encoding="utf-8") as f:
suin_to_share_str: dict[str, str] = {}
f.readline()
for line in f:
line = line.strip()
if line == "":
continue
if line.endswith("自己"):
continue
suin = extract_suin(line)
if suin in suin_to_share_str:
last_info = suin_to_share_str[suin]
if extract_remaining_times(line) <= extract_remaining_times(last_info):
# 之前记录的是新一点的数据
continue
suin_to_share_str[suin] = line
# 排序
share_str_list = []
for s in suin_to_share_str.values():
share_str_list.append(s)
share_str_list.sort(key=lambda s: extract_price(s))
# 统计各个折扣对应数目
discount_to_count: Counter = Counter()
for s in reversed(share_str_list):
discount = extract_discount(s)
discount_to_count[discount] += 1
# 导出
with open(".cached/my_home_processed.csv", "w", encoding="utf-8") as f:
# 导出统计数据
f.write(f"{datetime.now()}\n")
f.write(f"总计: {len(share_str_list)}\n")
for discount in sorted(discount_to_count.keys()):
count = discount_to_count[discount]
f.write(f"{discount:2d} 折: {count}\n")
f.write("-----\n")
# 导出实际数据
for share_str in share_str_list:
f.write(share_str + "\n")
|
fzls/djc_helper
|
process_my_home.py
|
process_my_home.py
|
py
| 2,004
|
python
|
en
|
code
| 319
|
github-code
|
6
|
19124946617
|
from crash.stacktrace import StackFrame
from crash.suspect import Suspect
from crash.scorers import aggregators
from crash.scorers.min_distance import MinDistance
from crash.scorers.test.scorer_test_suite import ScorerTestSuite
from crash.scorers.top_frame_index import TopFrameIndex
class AggregatorsTest(ScorerTestSuite):
def testMultiplier(self):
aggregator = aggregators.Multiplier()
self.assertEqual(aggregator.Aggregate([1, 0.5, 0.2]), 0.1)
self.assertEqual(aggregator([None, None]), None)
self.assertEqual(aggregator([1, 0.5, 0.2]), 0.1)
def testIdentityAggregator(self):
aggregator = aggregators.IdentityAggregator()
self.assertEqual(aggregator.Aggregate([1, 0.5, 0.2]), [1, 0.5, 0.2])
self.assertEqual(aggregator([1, 0.5, 0.2]), [1, 0.5, 0.2])
def testChangedFilesAggregator(self):
aggregator = aggregators.ChangedFilesAggregator()
file_info_list = [
[
{'file': 'f1.cc', 'blame_url': 'https://repo_url',
'info': 'scorer 1'},
{'file': 'f2.cc', 'blame_url': 'https://repo_url',
'info': 'scorer 1'},
],
[
{'file': 'f1.cc', 'blame_url': 'https://repo_url',
'info': None},
{'file': 'f2.cc', 'blame_url': 'https://repo_url',
'info': 'scorer 2'},
],
]
self.assertEqual(aggregator(file_info_list),
[{'file': 'f1.cc', 'blame_url': 'https://repo_url',
'info': 'scorer 1'},
{'file': 'f2.cc', 'blame_url': 'https://repo_url',
'info': 'scorer 1\nscorer 2'}])
|
mithro/chromium-infra
|
appengine/findit/crash/scorers/test/aggregators_test.py
|
aggregators_test.py
|
py
| 1,634
|
python
|
en
|
code
| 0
|
github-code
|
6
|
26825006942
|
from __future__ import annotations
import pickle # nosec
import struct
from typing import Final, Optional
from ..packet import Packet
from ..sign import Signatures
__all__ = ['UdpPack']
_prefix = struct.Struct('!BBI')
class UdpPack:
"""Packs and unpacks SWIM protocol :class:`~swimprotocol.packet.Packet`
objects from raw UDP packets or TCP connections. The :mod:`pickle` module
is used for serialization, so :class:`~swimprotocol.sign.Signatures` is
used to sign the payloads.
Args:
signatures: Generates and verifies cluster packet signatures.
pickle_protocol: The :mod:`pickle` protocol version number.
prefix_xor: A 4-byte string used to XOR the packet prefix, as a sanity
check to detect malformed or incomplete UDP packets.
"""
def __init__(self, signatures: Signatures, *,
pickle_protocol: int = pickle.HIGHEST_PROTOCOL,
prefix_xor: bytes = b'SWIM?!') -> None:
super().__init__()
if len(prefix_xor) != _prefix.size:
raise ValueError(f'{prefix_xor!r} must be {_prefix.size} bytes')
self.signatures: Final = signatures
self.pickle_protocol: Final = pickle_protocol
self.prefix_xor: Final = prefix_xor
def _xor_prefix(self, prefix: bytes) -> bytes:
zipped = zip(prefix, self.prefix_xor, strict=True)
return bytes([left ^ right for left, right in zipped])
def pack(self, packet: Packet) -> bytes:
"""Uses :mod:`pickle` to serialize *packet*, generates a digital
signature of the pickled data, and returns a byte-string that can be
sent as a raw UDP packet.
The resulting byte-string starts with a 4-byte :mod:`struct` prefix
(XOR'ed with *prefix_xor*) with the `struct format
<https://docs.python.org/3/library/struct.html#format-strings>`_
``!BBH``. The first byte is the length of the salt, the second byte is
the length of the signature, and the final two bytes are the length of
the pickled payload. After the prefix, the salt, digest, and pickled
payload byte-strings are concatenated.
Args:
packet: The SWIM protocol packet to serialize.
"""
pickled = pickle.dumps(packet, self.pickle_protocol)
salt, digest = self.signatures.sign(pickled)
salt_start = _prefix.size
digest_start = salt_start + len(salt)
data_start = digest_start + len(digest)
prefix = _prefix.pack(len(salt), len(digest), len(pickled))
packed = bytearray(data_start + len(pickled))
packed[0:salt_start] = self._xor_prefix(prefix)
packed[salt_start:digest_start] = salt
packed[digest_start:data_start] = digest
packed[data_start:] = pickled
return packed
def unpack(self, data: bytes) -> Optional[Packet]:
"""Deserializes a byte-string that was created using :meth:`.pack` into
a SWIM protocol packet. If any assumptions about the serialized data
are not met, including an invalid signature, ``None`` is returned to
indicate that *data* was malformed or incomplete.
Args:
data: The serialized byte-string of the SWIM protocol packet.
"""
data_view = memoryview(data)
salt_start = _prefix.size
prefix = self._xor_prefix(data_view[0:salt_start])
try:
salt_len, digest_len, data_len = _prefix.unpack(prefix)
except struct.error:
return None
digest_start = salt_start + salt_len
data_start = digest_start + digest_len
data_end = data_start + data_len
salt = data_view[salt_start:digest_start]
digest = data_view[digest_start:data_start]
pickled = data_view[data_start:data_end]
signatures = self.signatures
if len(digest) != signatures.digest_size or len(pickled) != data_len:
return None
if signatures.verify(pickled, (salt, digest)):
packet = pickle.loads(pickled) # noqa: S301
assert isinstance(packet, Packet)
return packet
else:
return None
|
icgood/swim-protocol
|
swimprotocol/udp/pack.py
|
pack.py
|
py
| 4,184
|
python
|
en
|
code
| 6
|
github-code
|
6
|
36546618587
|
import sys, getopt
import collections
def main(argv):
inputFile = ''
try:
opts, args = getopt.getopt(argv, 'hi:')
except getopt.GetoptError:
print('test.py -i <inputfile>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('test.py -i <inputfile>')
sys.exit()
elif opt == '-i':
inputFile = arg
displaySignals = []
displayVals = []
with open(inputFile, "r") as fileIn:
for line in fileIn:
signals, values = line.split(" | ")
displaySignals.append(signals.split())
displayVals.append(values.split())
#part 1: Considerd how many times the digits 1, 4, 7, 8 appear accros all displays
#part A
valLengthSort = lenSort(displayVals)
for key in valLengthSort:
print(key, ":", len(valLengthSort[key]))
#part 2: (PER DISPLAY) create map...
totalVals = []
for signals, values in zip(displaySignals, displayVals):
#sort by length
signalLengthSort = {}
for signal in signals:
if not len(signal) in signalLengthSort:
signalLengthSort[len(signal)]=[]
signalLengthSort[len(signal)].append(signal)
#only lengths 2 (=1), 3 (=7), and 4 (=4) are useful.
infoKeys = [2, 3, 4]
digitMap = {8: "abcdefg"}
for info in infoKeys:
if info in signalLengthSort:
if info == 2:
digitMap[1] = signalLengthSort[info][0]
if info == 3:
digitMap[7] = signalLengthSort[info][0]
if info == 4:
digitMap[4] = signalLengthSort[info][0]
deductKeys = [5,6]
for deduct in deductKeys:
for signal in signalLengthSort[deduct]:
overlap147 = 0
for char in list(signal):
if char in digitMap[1]: overlap147 += 1
if char in digitMap[4]: overlap147 += 1
if char in digitMap[7]: overlap147 += 1
overlap147 += deduct
if not 0 in digitMap and overlap147 == 14:
digitMap[0] = signal
if not 2 in digitMap and overlap147 == 10:
digitMap[2] = signal
if not 3 in digitMap and overlap147 == 13:
digitMap[3] = signal
if not 5 in digitMap and overlap147 == 11:
digitMap[5] = signal
if not 6 in digitMap and overlap147 == 12:
digitMap[6] = signal
if not 9 in digitMap and overlap147 == 15:
digitMap[9] = signal
digits = []
for value in values:
for key in digitMap:
if collections.Counter(value) == collections.Counter(digitMap[key]):
digits.append(str(key))
totalVals.append(int(("".join(digits))))
print(totalVals)
print(sum(totalVals))
def lenSort(outVals):
sortedLengths={}
for entry in outVals:
for val in entry:
if not len(val) in sortedLengths:
sortedLengths[len(val)]=[]
sortedLengths[len(val)].append(val)
return sortedLengths
def decompose(sorted):
decomposition={}
for key in sorted:
decomposition[key]={}
for entry in sorted[key]:
for char in list(entry):
if not char in decomposition[key]:
decomposition[key][char]=0
decomposition[key][char] += 1
return decomposition
if __name__ == "__main__":
main(sys.argv[1:])
|
Cranzai/AdventofCode
|
2021/day08/python/day8.py
|
day8.py
|
py
| 3,788
|
python
|
en
|
code
| 0
|
github-code
|
6
|
20105435432
|
import ConfigParser, logging, datetime, os, json
from flask import Flask, render_template, request
import mediacloud
CONFIG_FILE = 'settings.config'
basedir = os.path.dirname(os.path.realpath(__file__))
# load the settings file
config = ConfigParser.ConfigParser()
config.read(os.path.join(basedir, 'settings.config'))
# set up logging
logging.basicConfig(level=logging.DEBUG)
logging.info("Starting the MediaCloud example Flask app!")
# clean a mediacloud api client
mc = mediacloud.api.MediaCloud( config.get('mediacloud','api_key') )
app = Flask(__name__)
@app.route("/")
def home():
return render_template("search-form.html")
@app.route("/search",methods=['POST'])
def search_results():
keywords = request.form['keywords']
start_date = request.form['start_date']
end_date = request.form['end_date']
# now = datetime.datetime.now()
results = mc.sentenceCount(keywords,
solr_filter=[mc.publish_date_query(datetime.datetime.strptime(start_date, "%Y-%m-%d"),
datetime.datetime.strptime(end_date, "%Y-%m-%d")),
'tags_id_media:9139487'],
split=True,
split_start_date=start_date,
split_end_date=end_date)
print(json.dumps(results['split'], indent=4, separators=(',', ': ')))
clean_data = {}
for key in results['split']:
# if a date, append to clean_data dict
if len(key.encode('utf-8')) > 5:
clean_data[key.encode('utf-8')] = results['split'][key]
# print(type(key.encode('utf-8')))
# print(json.dumps(clean_data))
# print(type(clean_data))
# print(type(json.dumps(clean_data)))
# print(type(json.loads(json.dumps(clean_data))))
return render_template("search-results.html",
keywords=keywords,
sentenceCount=results['count'],
weeklyResults=json.dumps(clean_data))
if __name__ == "__main__":
app.debug = True
app.run()
|
freeeal/MAS.500
|
hw3/mcserver.py
|
mcserver.py
|
py
| 2,145
|
python
|
en
|
code
| 0
|
github-code
|
6
|
41675609350
|
# 덧셈하여 타겟을 만들 수 있는 배열의 두 숫자 인덱스를 리턴하라.
nums = [2, 7, 11, 15]
target = 9
# 브루투포스
def way1(nums: list[int], target: int) -> tuple[int]:
for i in range(len(nums)):
for j in range(i + 1, len(nums)):
if nums[i] + nums[j] == target:
return i, j
# in을 이용한 탐색
def way2(nums: list[int], target: int) -> tuple[int]:
for i, n in enumerate(nums):
complement = target - n
if complement in nums[i + 1 :]:
return nums.index(n), nums[i + 1 :].index(complement) + (i + 1)
# 첫 번째 수를 뺀 결과 키 조회 (풀이 2번을 딕셔너리로 최적화)
def way3(nums: list[int], target: int) -> tuple[int]:
nums_map = {}
# 키와 값을 마꿔서 딕셔너리로 저장
for i, num in enumerate(nums):
nums_map[num] = i
# 타겟에서 첫 번째 수를 뺀 결과를 키로 조회
for i, num in enumerate(nums):
if target - num in nums_map and i != nums_map[target - num]:
return nums.index(num), nums_map[target - num]
# 조회 구조 개선 (3번과 구현 방법, 성능은 비슷하나 코드는 더 간결하다)
def way4(nums: list[int], target: int) -> tuple[int]:
nums_map = {}
# 하나의 for문으로 통합
for i, num in enumerate(nums):
if target - num in nums_map:
return nums_map[target - num], i
nums_map[num] = i
# 투 포인터 이용 (정렬된 배열에서만 가능한 방법으로 인덱스를 반환하는 이 문제에서는 사용할 수 없다.)
def way5(nums: list[int], target: int) -> tuple[int]:
nums.sort()
left, right = 0, len(nums) - 1
while not left == right:
# 합이 타겟보다 작으면 오른쪽 포인터를 왼쪽으로
if nums[left] + nums[right] < target:
left += 1
# 합이 타겟보다 크면 왼쪽 포인터를 오른쪽으로
elif nums[left] + nums[right] > target:
right -= 1
else:
return left, right
print(way5(nums, target))
|
jisupark123/Python-Coding-Test
|
study/두 수의 합.py
|
두 수의 합.py
|
py
| 2,101
|
python
|
ko
|
code
| 1
|
github-code
|
6
|
29827009348
|
import string
from os.path import exists
import pandas as pd
from datetime import datetime
from random import shuffle
import numpy as np
if exists('D:\GRE\my word list\words.csv'):
df = pd.read_csv('D:\GRE\my word list\words.csv')
wordcount = df.shape[0]
else:
df = pd.DataFrame(columns = ['word', 'meaning', 'date', 'times_correct', 'times_incorrect'])
wordcount = 0
print("*****WELCOME*****")
def get_game():
if exists('D:\GRE\my word list\high_score.csv'):
high_df = pd.read_csv('D:\GRE\my word list\high_score.csv')
else:
high_df = pd.DataFrame(columns = ['score', 'date', 'time'])
if exists('D:\GRE\my word list\words.csv'):
df = pd.read_csv('D:\GRE\my word list\words.csv')
wordcount = df.shape[0]
if wordcount < 10:
print('Sorry, the word list should atleast contain 10 words')
return
else:
print('File doesnt exist!')
return
lives = 3
score = 0
datentime = datetime.now()
new_date = datentime.strftime('%d-%m-%Y')
new_time = datentime.strftime('%H-%M-%S')
while(lives > 0):
print('You have %d lives left!'%lives)
word_index = np.random.randint(low = 0, high = wordcount)
selected_word = df.iloc[word_index, 0]
selected_word_meaning = df.iloc[word_index, 1]
random_meanings = []
random_meanings_index = np.random.randint(low = 0, high = wordcount, size = (4))
for x in random_meanings_index:
random_meanings.append(df.iloc[x, 1])
random_meanings.append(selected_word_meaning)
shuffle(random_meanings)
print('\n', selected_word)
for i in range(5):
print('\n%d) %s'%(i, random_meanings[i]))
while True:
choice = int(input("\nEnter your choice!"))
if choice in list(range(5)):
break
else:
print('Wrong choice')
if random_meanings[choice] == selected_word_meaning:
score += 1
print('Correct! Your score now is:', score)
df.loc[word_index, 'times_correct'] += 1
else:
print('Sorry! Wrong answer')
print('\n%s means %s'%(selected_word, selected_word_meaning))
lives -= 1
df.loc[word_index, 'times_incorrect'] += 1
df.to_csv('D:\GRE\my word list\words.csv', index = False, columns = ['word', 'meaning', 'date', 'times_correct', 'times_incorrect'])
print('Sorry, you just went out of lives, your highscore for %s at %s was %d'%(new_date, new_time, score))
high_df.loc[high_df.shape[0]+1, :] = [score, new_date, new_time]
high_df.sort_values(by = 'score', ascending = False)
print(high_df)
high_df.to_csv('D:\GRE\my word list\high_score.csv', index = False, columns = ['score', 'date', 'time'])
return
def get_stats():
print('Statistics')
return
def get_meaning(get_word_meaning):
if exists('D:\GRE\my word list\words.csv'):
df = pd.read_csv('D:\GRE\my word list\words.csv')
wordcount = df.shape[0]
else:
print('File doesnt exist!')
return
found = False
for i in range(wordcount):
if df.iloc[i, 0].lower == get_word_meaning.lower:
print('\n%s means %s'%(get_word_meaning, df.iloc[i, 1]))
found = True
break
if found == False:
print('\nSorry, word was not found in your list')
return
if __name__ == '__main__':
choice = 1
while(choice != '*'):
print("1. Add new word\n2. Play word game\n3. Get word meaning\n4. Get Statistics\n*. TO EXIT!")
print("\nEnter your choice!")
choice = input()
if choice == str(1):
print("\nAdding new word!")
new_word = input('\nPlease enter the word: ')
word_meaning = input('\nPlease enter the meaning: ')
date = datetime.now()
date = date.strftime('%d-%m-%Y')
corr = 0
incorr = 0
print('Number of words in list', wordcount+1)
df.loc[wordcount, :] = [new_word, word_meaning, date, corr, incorr]
wordcount += 1
df.to_csv('D:\GRE\my word list\words.csv', index = False, columns = ['word', 'meaning', 'date', 'times_correct', 'times_incorrect'])
elif choice == str(2):
print("\nLets play word game!")
get_game()
elif choice == str(3):
get_word_meaning = input('\nGetting word meaning, so please enter the word: ')
get_meaning(get_word_meaning)
elif choice == str(4):
get_stats()
elif choice == str('*'):
df.to_csv('D:\GRE\my word list\words.csv', index = False, columns = ['word', 'meaning', 'date', 'times_correct', 'times_incorrect'])
break
else :
print('\nWrong choice, Please try again')
|
Geeks-Sid/GRE-word-game
|
play.py
|
play.py
|
py
| 5,044
|
python
|
en
|
code
| 0
|
github-code
|
6
|
33806961121
|
class test:
q: int
w: int
def __init__(self, q, w) -> None:
self.q = q
self.w = w
w = test(1,2)
w.w = 3
print(w.w)
class test_2:
q: int
w: int
def __init__(self, q, w) -> None:
self.q = q
self.w = w
e = test_2(1,2)
print(e.w)
|
ndimqa/ElfBarBot
|
test.py
|
test.py
|
py
| 286
|
python
|
en
|
code
| 1
|
github-code
|
6
|
37858257254
|
#using a shallow net(2 layers)
#not tested
import numpy as np
import matplotlib.pyplot as plt
import sklearn
import sklearn.datasets
import sklearn.linear_model
from utils import load_dataset
#loading the dataset using utils
x,y = load_dataset()
shape_x = x.shape
shape_y = y.shape
m = shape_x[1]
#first trying to fit the data using sime LR
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(x.T, y.T)
print(clf.predict(x.T))
#now trying with multi layer nn model
#helper functions
def layer_sizes(x,y):
nx = x.shape[0]
nh = 4
ny = y.shape[0]
return(nx,nh,ny)
def init_weights(nx,nh,ny):
np.random.seed(9)
w1 = np.random.randn(nh,nx)*0.01
b1 = np.zeros((nh,1))
w2 = np.random.randn(ny,nh)*0.01
b2 = np.zeros((ny,1))
params = {"w1":w1, "b1":b1,"w2":w2,"b2":b2}
return params
#forward propagation
def propagate(x,y,params):
m = x.shape[1]
w1 = params["w1"]
b1 = params["b1"]
w2 = params["w2"]
b2 = params["b2"]
z1 = np.dot(w1,x)+b1
a1 = np.tanh(z1)
z2 = np.dot(w2,a1)+b2
a2 = 1/(1+np.exp(-z2))
logprobs = np.multiply(np.log(a2),y)+np.multiply(np.log(1-a2),(1-y))
cost =-np.sum(logprobs)/m
cost = np.squeeze(cost)
cache = {"z1":z1,"a1":a1,"z2":z2,"a2":a2}
return cache, cost
#backprop for optimization
def optimize(params, cache, x,y, alpha = 1.2):
m = x.shape[1]
a1 = cache["a1"]
a2 = cache["a2"]
dz2 = a2-y
dw2 = np.dot(dz2,a1.T)/m
db2 = np.sum(dz2)/m
dz1 = np.dot(w2.T, dz2)*(1-np.power(a1,2))
dw1 = np.dot(dz1,x.T)/m
db1 = np.sum(dz1)/m
w1 = params["w1"]
w2 = params["w2"]
b1 = params["b1"]
b2 = params["b2"]
w1 -= alpha*dw1
b1 -= alpha*db1
w2 -= alpha*dw2
b2 -= alpha*db2
params = {"w1":w1,"b1":b1,"w2":w2,"b2":b2}
grads = {"dw1":dw1,"db1":db1,"dw2":dw2,"db2":db2}
return params, grads
#final model
def model(x,y,iterations):
np.random.seed(9)
nx = layer_sizes(x,y)[0]
ny = layer_sizes(x,y)[2]
nh = 4
params = init_weights(nx,nh,ny)
for i in range(iterations):
cost, cache = propagate(x,y, params)
params,grads = optimize(params, cache,x,y)
if(i%1000 == 0):
print(i,cost)
return params
def predict(params, x):
m = x.shape[1]
w1 = params["w1"]
b1 = params["b1"]
w2 = params["w2"]
b2 = params["b2"]
z1 = np.dot(w1,x)+b1
a1 = np.tanh(z1)
z2 = np.dot(w2,a1)+b2
a2 = 1/(1+np.exp(-z2))
predictions = (a2>0.5)
return predictions
params = model(x,y,10000)
predictions = predict(params, x)
print((np.dot(y,predictions.T)+np.dot(1-y, 1-predictions.T))/y.size)
|
thepavankoushik/Project-Reboot
|
shallow networks/planardata_classify.py
|
planardata_classify.py
|
py
| 2,445
|
python
|
en
|
code
| 0
|
github-code
|
6
|
15430713008
|
class Solution:
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
c = 0
l = len(nums)
index =[]
t =0
while c <l:
#print(nums[i])
if nums[t] == 0:
nums.pop(t)
nums.append(0)
else:
t +=1
#print(nums)
c += 1
|
dipalira/LeetCode
|
Arrays/283_Move_Zeroes.py
|
283_Move_Zeroes.py
|
py
| 465
|
python
|
en
|
code
| 0
|
github-code
|
6
|
72789776189
|
import gc
import itertools as it
import os.path as osp
from typing import List
import warnings
from collections import deque, namedtuple
import numpy as np
import torch
from examples.speech_recognition.data.replabels import unpack_replabels
from fairseq import tasks
from fairseq.utils import apply_to_sample
from omegaconf import open_dict
from fairseq.dataclass.utils import convert_namespace_to_omegaconf
# try:
# from flashlight.lib.text.dictionary import create_word_dict, load_words
# from flashlight.lib.sequence.criterion import CpuViterbiPath, get_data_ptr_as_bytes
# from flashlight.lib.text.decoder import (
# CriterionType,
# LexiconDecoderOptions,
# KenLM,
# LM,
# LMState,
# SmearingMode,
# Trie,
# LexiconDecoder,
# )
# except:
# warnings.warn(
# "flashlight python bindings are required to use this functionality. Please install from https://github.com/facebookresearch/flashlight/tree/master/bindings/python"
# )
# LM = object
# LMState = object
class W2lDecoder(object):
def __init__(self, args, tgt_dict):
self.tgt_dict = tgt_dict
self.vocab_size = len(tgt_dict)
self.nbest = args.nbest
self.blank = (
tgt_dict.index("<ctc_blank>")
if "<ctc_blank>" in tgt_dict.indices
else tgt_dict.bos()
)
if "<sep>" in tgt_dict.indices:
self.silence = tgt_dict.index("<sep>")
elif "|" in tgt_dict.indices:
self.silence = tgt_dict.index("|")
else:
self.silence = tgt_dict.eos()
self.asg_transitions = None
def generate(self, models, sample, **unused):
"""Generate a batch of inferences."""
# model.forward normally channels prev_output_tokens into the decoder
# separately, but SequenceGenerator directly calls model.encoder
encoder_input = {
k: v for k, v in sample["net_input"].items() if k != "prev_output_tokens"
}
emissions = self.get_emissions(models, encoder_input)
return self.decode(emissions)
def get_emissions(self, models, encoder_input):
"""Run encoder and normalize emissions"""
model = models[0]
encoder_out = model(**encoder_input)
if hasattr(model, "get_logits"):
emissions = model.get_logits(encoder_out) # no need to normalize emissions
else:
emissions = model.get_normalized_probs(encoder_out, log_probs=True)
return emissions.transpose(0, 1).float().cpu().contiguous()
def get_tokens(self, idxs):
"""Normalize tokens by handling CTC blank, ASG replabels, etc."""
idxs = (g[0] for g in it.groupby(idxs))
idxs = filter(lambda x: x != self.blank, idxs)
return torch.LongTensor(list(idxs))
class W2lViterbiDecoder(W2lDecoder):
def __init__(self, args, tgt_dict):
super().__init__(args, tgt_dict)
|
lovemefan/Wav2vec2-webserver
|
fairseq_lib/examples/speech_recognition/w2l_decoder.py
|
w2l_decoder.py
|
py
| 2,977
|
python
|
en
|
code
| 1
|
github-code
|
6
|
7122911694
|
from django.conf.urls import url
from one import views
from one.views import CreateStudent
urlpatterns = [
url(r'^index/', views.index),
url(r'^print/',views.PrintTable,name='print'),
url(r'^studentname/(\d+)/',views.stuname,name='studentname'),
url(r'^detail/',views.detail,name='detail'),
url(r'^CreateStudent/', views.CreateStudent,name='CreateStudent'),
]
|
lao1a0/Django-1
|
one/urls.py
|
urls.py
|
py
| 385
|
python
|
en
|
code
| 0
|
github-code
|
6
|
30793136225
|
import tkinter
window = tkinter.Tk()
window.title("Buttons in tkinter")
window.minsize(width=500,height=300)
# Label
my_label = tkinter.Label(text="I am a Label", font=("Arial", 24, "bold"))
# places the label on to the screen and automatically centers it
my_label.pack()
# configure or updating the properties of particular component
my_label.config(text="New text")
# Button
# def button_clicked():
# print("I got clicked")
def button_clicked():
my_label["text"] = "Button got clicked"
button = tkinter.Button(text="Click Me", command=button_clicked) # 'command' takes the name of a function not calling of the function, so '()' are not required
button.pack()
# keeps the window on the screen
window.mainloop()
|
shrijanlakhey/100-days-of-Python
|
027/buttons_in_tkinter.py
|
buttons_in_tkinter.py
|
py
| 735
|
python
|
en
|
code
| 0
|
github-code
|
6
|
27314665022
|
''' Crie um programa onde o usuário possa digitar sete valores númericos
e cadastre-os em uma única lista que mantenha separados os valores
pares e ímpares. No final, mostre os valores pares e ímpares em ordem
crescente. '''
lista = [[], []]
num = int(input('Quantos valores deseja digitar? '))
print('-=-'*30)
for i in range(num):
valor = int(input(f'Digite o {i+1}º valor: '))
if valor % 2 == 0:
lista[0].append(valor)
if valor % 2 == 1:
lista[1].append(valor)
lista[0].sort()
lista[1].sort()
print(f'Valores pares em ordem crescente: {lista[0]}')
print(f'Valores impares em ordem crescente: {lista[1]}')
|
alelimasilva/Exercicios-em-Python
|
ex036.py
|
ex036.py
|
py
| 690
|
python
|
pt
|
code
| 0
|
github-code
|
6
|
38930719861
|
#!/usr/bin/python3
"""
Python script that takes GitHub credentials
(username and password) and uses the GitHub API to display id
"""
import requests
import sys
if __name__ == '__main__':
if len(sys.argv) != 3:
print("Usage: ./10-my_github.py <username> <token>")
sys.exit(1)
username, token = sys.argv[1], sys.argv[2]
# Make the request with Basic Authentication using your token
response = requests.get('https://api.github.com/user', auth=(username, token))
if response.status_code == 200:
try:
user_data = response.json()
user_id = user_data.get('id')
if user_id is not None:
print(user_id)
else:
print("None")
except ValueError:
print("Invalid JSON response")
else:
print("None")
|
Bellamalwa/alx-higher_level_programming
|
0x11-python-network_1/10-my_github.py
|
10-my_github.py
|
py
| 851
|
python
|
en
|
code
| 0
|
github-code
|
6
|
41464656819
|
#! python3
# -*- coding: utf-8 -*-
import datetime
start_bench_no_bench = datetime.datetime.now()
__version__ = "8.2.8-alpha"
import os
import sys
import copy
import platform
import pkgutil
FRACKING_INPUT_DEBUG = False
# todo version diff
# todo export script as json?
# todo compare jsons?
# todo save changes as commit message?
# this shit for pycharm:
colorama = None; cprint = None; copypaste = None; pyautogui = None; Tk = None; Button = None; mainloop = None; paramiko = None
def get_Bench(start=False): # return class with those functions:
class Bench(object): # dir ignore
time_start = datetime.datetime.now()
time_end = None
quiet = False # d argument for disable print to terminal bnl1
prefix = "Bench runned in" # d what have been finished, will print if bnl1
# d "quiet" variable of class is False
@classmethod
def start(cls): # set time of begin to now
cls.time_start = datetime.datetime.now()
@classmethod
def get(cls): # dir ignore
cls.time_end = datetime.datetime.now()
delta = cls.time_end - cls.time_start
delta_combined = delta.seconds + delta.microseconds / 1E6
return delta_combined
@classmethod
def end(cls): # return delta between start and end
delta_combined = cls.get()
if not cls.quiet:
try:
colorama.init()
cprint(cls.prefix + " " + str(round(delta_combined, 2)) + " seconds", "grey", "on_white")
except TypeError:
print(cls.prefix + " " + str(round(delta_combined, 2)) + " seconds")
except AttributeError:
print(cls.prefix + " " + str(round(delta_combined, 2)) + " seconds")
return delta_combined
return Bench
class OS:
is_python3 = sys.version_info >= (3, 0) # d boolean
python_implementation = None # d string with name of python implementation: "cpython" or "pypy"
python_version_major = sys.version_info.major # d int of major python version
python_commandline_version = ""
if is_python3:
python_commandline_version = "3" # d string of addable "3" to commandline apps if python is 3rd version
family = None # d string with family of OS: "nt" or "unix"
name = None # d string with name of OS: "windows", "linux", or "macos"
windows_version = None # d only on Windows, integer of major version of Windows
display = None # d didn't work yet
cyrrilic_support = None # d boolean variable of cyrrilic output support
if sys.platform == "linux" or sys.platform == "linux2":
name = "linux"
elif sys.platform == "win32" or sys.platform == "cygwin":
name = "windows"
windows_version = sys.getwindowsversion().major
elif sys.platform == "darwin":
name = "macos"
if platform.python_implementation == "PyPy":
python_implementation = "pypy"
else:
python_implementation = "cpython"
if name == "windows":
family = "nt"
elif name in ["macos", "linux"]:
family = "unix"
try: # todo make this work!
if name == "linux":
from Xlib.display import Display
display = True
except ImportError:
display = False
print("Your system haven't display -_-")
try:
cyrline = "йцукенгшщзхъфывапролджэячсмитьбюЙЦУКЕНГШЩЗХЪФЫВАПРОЛДЖЭЯЧСМИТЬБЮ"
for cyrsybol in cyrline:
print(cyrsybol*2, end="\r")
print(" ", end="\r")
cyrrilic_support = True
except UnicodeEncodeError as err:
cyrrilic_support = False
# print (err)
print ("Your system doesn't properly work with cyrrilic -_-")
class Internal:
@staticmethod
def mine_import(module_name, objects=None, justdownload=False, az=None): # import
# d module, if module not found, trying to install it by pip
# check for pip module
if FRACKING_INPUT_DEBUG: debug_Bench = get_Bench()
if FRACKING_INPUT_DEBUG: debug_Bench.start()
def just_install(module_name):
import pip
pip.main(['install', module_name])
modules_list = []
for item in pkgutil.iter_modules():
modules_list.append(item[1])
if "pip" not in modules_list:
if OS.name == "linux":
os.system("sudo apt-get install python" + OS.python_commandline_version + "-pip")
if module_name not in modules_list:
###########RARE###########
if module_name == "pyautogui":
if OS.name == "linux":
if OS.is_python3:
os.system("apt-get install python-xlib")
else:
os.system("apt-get install python3-Xlib")
if OS.name == "macos":
for package in ["python" + OS.python_commandline_version + "-xlib",
"pyobjc-core", "pyobjc"]:
just_install(package)
if OS.python_implementation == "pypy":
Print.debug("Yep, PyPy doesn't support pyobjc")
if module_name in ["win32api","win32con"]:
just_install("pypiwin32")
else:
###########RARE###########
just_install(module_name)
if not justdownload:
if az and objects:
if len(objects.split(",")) == 1:
globals()[az] = importlib.import_module(objects[0], package=module_name)
print("Internal.mine_import doesn't support both attributes use 'az' and 'objects', so only 'objects' will apply.")
az = None
if az:
import importlib
globals()[az] = importlib.import_module(module_name)
elif objects:
# import importlib # todo better code
# for object in objects.split(",")
# globals()[object] = importlib.import_module(name, package=module_name):
#### if " as " in object поделить и применить правильно, то есть имя назначить второе, а импортировать из первого
exec("from " + module_name + " import " + objects, globals())
else:
import importlib
globals()[module_name] = importlib.import_module(module_name)
if FRACKING_INPUT_DEBUG: debug_Bench.prefix = module_name + " " + str(objects)
if FRACKING_INPUT_DEBUG: debug_Bench.end()
@staticmethod
def dir_c(): # d print all functionality of commands8
first_func_after_class = 1
cnt_of_all_def = 0
cnt_of_commented_def = 0
for line in Str.nl(File.read(Path.extend(Path.commands8(), "commands8.py"))): # dir ignore
if "# dir ignore" not in line: # dir ignore
if "bnl" in line: # dir ignore
print(newline*Str.get_integers(line)[-1], end="") # dir ignore
line = line.replace("bnl"+str(Str.get_integers(line)[-1]),"")
if "def " in line: # dir ignore
print(newline*first_func_after_class + line) # dir ignore
first_func_after_class = 1
cnt_of_all_def += 1
if " # " in line: cnt_of_commented_def += 1
elif ("class " in line) and (line[0:4] != " "): # dir ignore
first_func_after_class = 0
print(newline + line) # dir ignore
elif "# d " in line: # dir ignore
print(line.replace("# d ", "# ", 1)) # dir ignore
Print.debug(cnt_of_all_def, cnt_of_commented_def)
@staticmethod
def rel(quiet=False): # d reload commands8, if you use it not in REPL, activate quiet argument
# d require additional line of code after reload if you import not entrie commands8
# d you need manually add "from commands8 import *" to script/REPL
# d if you import like "import commands8", additional line of code not needed
import commands8, importlib
commands8 = importlib.reload(commands8)
del commands8
string = "from commands8 import *" # you need to manually add this string to code :(
if not quiet:
print('"'+string+'" copied to clipboard')
import copypaste
copypaste.copy(string)
pass
if OS.display:
if OS.python_implementation != "pypy":
if OS.name != "macos:":
Internal.mine_import("pyautogui", justdownload=True)
Internal.mine_import("paramiko", justdownload=True)
Internal.mine_import("tkinter") # from tkinter import *
import json, \
shutil, \
time, \
random, \
subprocess, \
datetime, \
re, \
ctypes, \
getpass
if OS.name == "windows":
Internal.mine_import("win_unicode_console")
Internal.mine_import("win32api")
Internal.mine_import("win32con")
Internal.mine_import("termcolor")
Internal.mine_import("colorama")
colorama.init()
colorama.deinit()
Internal.mine_import("termcolor", objects="colored, cprint") # print_green_on_cyan = lambda x: cprint(x, 'green', 'on_cyan')
if OS.name == "windows":
Internal.mine_import("pyperclip", az="copypaste")
else:
Internal.mine_import("copypaste")
newline = '\n' # d string with newline bnl3
ruble = u"\u20bd" # d string with ₽ symbol
backslash = "\ "[:1] # d string with backslash
newline2 = "\r\n" # d string with other newline
class Print():
@staticmethod
def debug(*arguments, raw=False): # d just more notable print, only for
# d debugging
line = "-" * Console.width()
print("<<<Debug sheet:>>>")
for arg in arguments:
print(line, end="")
if raw:
print(repr(arg))
else:
print(arg)
print(line)
print("<<<End of debug sheet>>>")
@staticmethod
def rewrite(*arguments, sep=" ", raw=False): # d string, that can be rewritable
# d note, that you need to rewrite string to remove characters
line = " " * Console.width()
if OS.name == "windows":
line = line[:-1]
print(line, end="\r")
print(*arguments, sep=sep, end="\r")
class Str:
@staticmethod
def to_quotes(some_string): # d just place input string inside "" quotes
return '"' + str(some_string) + '"'
@staticmethod
def to_quotes_2(some_string): # d place input string inside '' quotes
return "'" + str(some_string) + "'"
@staticmethod
def get_integers(string): # d return list_ of integers from string, !!!floating not supported!!!
# todo add support for floating numbers, it will be cool!
integer_found = False
integers = []
current_integer = 0
negative = False
for symbol in str(string) + " ": # in exception some processing, meh :(
try:
if symbol in ['-', '—']:
negative = True
continue
int(symbol)
current_integer = current_integer*10 + int(symbol)
integer_found = True
except ValueError:
if integer_found:
if negative:
current_integer = -current_integer
integers = integers + [current_integer]
current_integer = 0
integer_found = False
negative = False
return integers
@staticmethod
def newlines_to_strings(string, quiet=False): # split long string with line
# d breaks to separate strings in list_
if string:
string = str(string)
if OS.name == "windows":
strings = string.split(newline2)
if len(strings) == 1:
strings = strings[0].split(newline)
elif OS.name in ["macos", "linux"]:
strings = string.split(newline)
return strings
else:
if not quiet:
print("None can't be splitted")
@classmethod
def nl(cls, string): # alias to newline
return cls.newlines_to_strings(string=string)
@staticmethod
def split_every(string, chars): # split string every
chars = int(chars)
output_lines = []
char_exists = "."
char_can_be_exists = ".?"
regexp = char_exists + char_can_be_exists*(chars-1)
for line in re.findall(regexp, str(string)): # todo can I just return this list_?
output_lines += [line]
return output_lines
@staticmethod
def leftpad(string, leng, ch="0", rightpad=False): # d return string with
# d added characters to left side. If string longer — return original string
string = str(string)
if len(string) >= leng:
return string
strOfCh = str(ch) * leng
string_output = strOfCh[len(string):leng] + string
if rightpad:
string_output = string + strOfCh[len(string):leng]
return string_output
@classmethod
def rightpad(cls, string, leng, ch="0"): # return string with added
# d characters to right side. If string longer — return original string
return cls.leftpad(string, leng, ch=ch, rightpad=True)
@staticmethod
def substring(string, before, after=None, return_after_substring=False): # return
# d string that between "before", and "after" strings, not including
# d those. If "return_after_substring", return typle with substring and
# d part of string after it.
startfrom = string.find(before)
if startfrom != -1:
startfrom = string.find(before) + len(before)
else:
startfrom = 0
if (after) or (after == ""):
end_at = string[startfrom:].find(after)
if end_at != -1:
end_at = startfrom + string[startfrom:].find(after)
substring = string[startfrom:end_at]
after_substring = string[end_at:]
else:
substring = string[startfrom:]
after_substring = ""
else:
substring = string[startfrom:]
if return_after_substring:
#try:
# after_substring
#except UnboundLocalError:
# Print.debug("string", string,
# "before", before,
# "after", after,
# "return_after_substring", return_after_substring,
# "substring", substring,
# "after_substring", "UnboundLocalError: local variable 'after_substring' referenced before assignment")
return substring, after_substring
return substring
@staticmethod
def diff_simple(string_a, string_b): # d print all symbol differents.
# d Not all mine code, must rewrite.
# todo rewrite this shit.
import difflib
strings = [(string_a, string_b)] # for furthurer support for unlimited srtings
for a, b in strings:
print('{} => {}'.format(a, b))
for i, s in enumerate(difflib.ndiff(a, b)):
if s[0] == ' ':
continue
elif s[0] == '-':
print(u'Delete "{}" from position {}'.format(s[-1], i))
elif s[0] == '+':
print(u'Add "{}" to position {}'.format(s[-1], i))
print()
@staticmethod
def input_pass(string="Password:"): # d return string from user, securely
# d inputed by getpass library
return getpass.getpass(string)
@staticmethod
def input_int(message="Input integer: ", minimum=None, maximum=None, default=None, quiet=False):
# d return integer from user with multible parameters.
output_int = "jabla fitta"
if default:
message = "(Enter = " + str(default) + ")"
while output_int == "jabla fitta": # цикл, пока не получит итоговое число
integer = input(message)
if integer != "":
try:
integer = Str.get_integers(integer)[0]
except IndexError:
print("Это не число")
continue
elif default and integer != "":
output_int = default
elif integer == "":
print("Это не число")
raise ValueError
if minimum:
if int < minimum:
print("Число должно быть больше", minimum)
raise ValueError
if maximum:
if int > maximum:
print("Число должно быть меньше", maximum)
raise ValueError
output_int = integer
break
if not quiet:
print("Итоговое число:", output_int)
return output_int
class Console():
@staticmethod
def clean(): # wipe terminal output. Not tested on linux
# todo test on linux
if OS.name == "windows":
os.system("cls")
elif OS.name == "linux":
print(newline * shutil.get_terminal_size().lines)
elif OS.name == "macos":
os.system(r"clear && printf '\e[3J'")
@staticmethod
def width(): # return width of terminal window in characters
if OS.name == "windows":
io = Console.get_output("mode con")
width_ = Str.get_integers(io)[1]
elif OS.name in ["linux", "macos"]:
io = Console.get_output("stty size")
width_ = Str.get_integers(io)[1]
return int(width_)
@staticmethod
def height(): # return height of terminal window in characters
if OS.name == "windows":
modecon = Console.get_output("mode con")
height = Str.get_integers(modecon)[0]
elif OS.name in ["linux", "macos"]:
sttysize = Console.get_output("stty size")
height = Str.get_integers(sttysize)[0]
if height > 100:
height = 100
return int(height)
@classmethod
def blink(cls, width=None, height=None, symbol="#", sleep=0.5):
# d fastly print to terminal characters with random color. Completely shit.
# d arguments width and height changing size of terminal, works only in
# d Windows.
if width is not None and height is not None:
os.system("mode con cols=" + str(width) + " lines=" + str(height))
if width is None:
width = cls.width()
if height is None:
height = cls.height()
colorama.reinit()
while True:
colors = ["grey", "red", "green", "yellow", "blue", "magenta", "cyan", "white"]
highlights = ["on_grey", "on_red", "on_green", "on_yellow", "on_blue", "on_magenta", "on_cyan", "on_white"]
string = symbol * width
color = random.choice(colors)
colors.pop(colors.index(color))
highlight = random.choice(highlights)
try: # New version with one long line. Works perfect, as I see.
string = string * height
print(termcolor.colored(string, color, highlight))
time.sleep(sleep)
except KeyboardInterrupt as err:
print(termcolor.colored("OK", "white", "on_grey"))
colorama.deinit()
cls.clean()
break
@staticmethod
def get_output(command, quiet=True, split_lines=False): # d return output
# d of executing command. Doesn't output it to terminal in realtime.
# d can be output after finished if "quiet" argument activated.
p = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
if OS.name == "windows":
output = p.decode("cp866")
elif OS.family == "unix":
output = p.decode("utf8")
if split_lines:
output = Str.nl(output)
return output
class Ssh:
@staticmethod
def get_output(host, username, password, command, safe=False): # return
# d output from command, runned on SSH server. Support only
# d username:password autorisation.
# todo autorisation by key.
if OS.python_implementation != "pypy":
Internal.mine_import("paramiko")
else:
raise OSError("paramiko doesn't supported by PyPy")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # automatically add unknown hosts
ssh.connect(host, username=username, password=password)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command("uptime")
if (ssh_stderr.read() != b'') and not safe:
raise IOError("ssh_stderr = " + str(ssh_stderr))
ssh.close()
return str(ssh_stdout.read(), 'utf8')
@classmethod
def get_avg_load_lin(cls, host, username, password, safe=False): # return
# d list_ of average loads from SSH linux server. Shit, I know
output = cls.get_output(host=host, username=username, password=password, command="uprime", safe=safe)
output = Str.substring(output, before="load average: ", after=newline)
output = output.split(", ")
return output
@classmethod
def get_uptime_lin(cls, host, username, password, safe=False): # return
# d string with uptime of SSH linux server. As I said before... :(
output = cls.get_output(host=host, username=username, password=password, command="uprime", safe=safe)
output = Str.substring(output, before=" up ", after=", ")
return output
class Path:
@staticmethod
def full(path):
return os.path.abspath(path)
@staticmethod
def commands8():
return os.path.dirname(os.path.realpath(__file__))
@staticmethod
def working():
return os.getcwd()
@classmethod
def extend(cls, *paths): # paths input strings of path1 pieces, return
# d string with path1, good for OS
for path_ in paths:
try:
path = os.path.join(str(path), str(path_))
except NameError: # first path1 piece is very important
if (OS.name == "windows") and path_ == backslash: # support for smb windows paths like \\ip_or_pc\dir\
path = backslash * 2
elif (OS.name == "windows") and (len(path_) <= 3):
path = os.path.join(path_, os.sep)
elif OS.name == "windows":
path = path_
elif OS.family == "unix":
if path_ == "..":
path = path_
elif path_ == ".":
path = path_
elif path_ == "~":
path = cls.home()
else:
path = os.path.join(os.sep, path_)
else:
raise FileNotFoundError("path_" + str(path_) + "is not expected")
return path
@staticmethod
def home(): # return path1 of home directory of current user. Not tested in
# d linux.
# todo test in lunux!
if OS.name == "windows":
path = Console.get_output(r"echo %userprofile%")
path = path.rstrip(newline2)
else:
path = Console.get_output("echo $HOME", split_lines=True)[0]
path = path.rstrip(newline)
return path
@staticmethod
def set_current(path, quiet=True): # changes current working directory.
# d If quiet is disabled, prints directory.
os.chdir(path)
if not quiet:
Print.debug("os.getcwd() # current directory is", os.getcwd())
class Locations:
if OS.name == "windows": # d ...
texteditor = "notepad" # d notepad is in every version of Windows, yea?
py = "py"
pyw = "pyw"
elif OS.name == "macos": # d ...
texteditor = "open" # d just open default program for file
py = "python3"
pyw = "python3"
elif OS.name == "linux": # d ...
texteditor = "nano" # d nano is everywhere, I suppose? ]-:
py = "python3"
pyw = "python3"
class Dir:
@staticmethod
def create(filename): # create dir if didn't exist
if not os.path.exists(filename):
os.makedirs(filename)
@staticmethod
def commands8(): return Path.commands8() # alias to Path.commands8
@staticmethod
def working(): return Path.working() # alias to Path.working
@staticmethod
def list_of_files(path): # return list_ of files in folder
return os.listdir(path)
@staticmethod
def number_of_files(path, quiet=False): # return integer of number of files
# d in directory
try:
dir_contents = Dir.contents(path)
if not quiet:
print(os.path.split(path)[1], "contain", len(dir_contents), "files")
return len(dir_contents)
except FileNotFoundError:
if not quiet:
print("Path", path, "isn't found")
return None
@classmethod
def batch_rename(cls, directory, input_str, output_str, quiet=False):
for filename in cls.contain(directory):
if input_str in filename:
final_name = filename.replace(input_str, output_str)
File.rename(filename, final_name)
if not quiet:
print(filename, "renamed to", final_name)
class File:
@staticmethod
def create(filename):
filename = Path.full(filename)
if os.path.split(filename)[0] != "":
Dir.create(os.path.split(filename)[0])
if not os.path.exists(filename):
with open(filename, 'a'): # open file and close after
os.utime(filename, None) # changes time of file modification
if not os.path.exists(filename):
raise FileNotFoundError("error while creating file " + filename +
"try to repair script at " + Path.full(sys.argv[0]))
@staticmethod
def delete(path, quiet=False): # ...
if os.path.isdir(path):
raise IsADirectoryError(path + " is directory, use Dir.delete to delete")
try:
os.remove(path)
except FileNotFoundError:
if not quiet:
print("file", path, "is not exist")
if not quiet:
print("file", path, "is deleted")
time.sleep(0.05)
if os.path.exists(path):
raise FileExistsError(path + " is not deleted")
@staticmethod
def move(input_file, output_file): # ...
shutil.move(input_file, output_file)
@staticmethod
def copy(input_file, output_file): # ...
shutil.copy2(input_file, output_file)
@staticmethod
def rename(input_file, output_file): # ...
File.move(input_file, output_file)
@staticmethod
def hide(filename, quiet=True): # adding dot to filename and set attribute
# d FILE_ATTRIBUTE_HIDDEN to file, if running on Windows
filename = Path.full(filename)
if OS.name == "windows":
win32api.SetFileAttributes(filename, win32con.FILE_ATTRIBUTE_HIDDEN) # hiding file like windows do
dotted_file = Path.extend(os.path.split(filename)[0], "." + os.path.split(filename)[1]) # adding dot
File.rename(filename, dotted_file)
if not quiet:
print ("file", filename, "is hidden now")
return dotted_file
@classmethod
def backup(cls, filename, subfolder="bak", hide=True, quiet = False):
# d move file to subfolder, adds sort of timestamp to filename and hide
# d file if necessary
filename = Path.full(filename) # normalize filename
backupfilename = str(filename) + "." + Time.dotted() + ".bak" # add dottedtime to backup filename
backupfilename = os.path.split(backupfilename) # splitting filename to folder and file
try: # if subfolder has no len
if len(subfolder) < 1: # if subfolder has zero len
raise TypeError("subfolder must have non-zero len")
except TypeError: # if subfolder has no len
subfolder = "bak" # set subfolder to default
print("len(subfolder) < 1, so subfolder = 'bak'") # print error
subfolder = Path.extend(backupfilename[0], subfolder) # append subfolder name
Dir.create(subfolder) # create subfolder
backupfilename = Path.extend(subfolder, backupfilename[1]) # backup file name full path1
shutil.copy2(filename, backupfilename) # finally backup file
if hide:
backupfilename = cls.hide(backupfilename) # hiding file
if not os.path.isfile(backupfilename): # if file is not created
raise FileNotFoundError(backupfilename + " isn't created while backup")
if not quiet: # if finction is not shutted up
print("backup of file", filename, "created as", backupfilename) # all is ok, print that
return backupfilename
@staticmethod
def wipe(path): # clean content of file
file = open(path, 'w')
file.close()
@staticmethod
def read(path): # return pipe to file content
with open(path, "r") as f:
return f.read()
class Time:
rnd = str(random.randint(1,100))
@classmethod
def fuck(cls):
print("fuck it all "+cls.rnd)
@staticmethod
def stamp():
return time.time()
@staticmethod
def dotted():
dateandtime = Time.get("year") + "." + Time.get("month", 2) + "." + \
Time.get("day", 2) + "_at_" + Time.get("hour", 2) + "." + \
Time.get("minute", 2) + "." + Time.get("second", 2) + "." + \
Time.get("microsecond", 6)
return dateandtime
@staticmethod
def get(size, zfill=0):
return Str.leftpad(eval("str(datetime.datetime.now()." + size + ")"), leng=zfill, ch=0)
@staticmethod
def rustime(customtime=None):
if customtime:
day = datetime.datetime.fromtimestamp(customtime).strftime('%d')
month = datetime.datetime.fromtimestamp(customtime).strftime('%m')
year = datetime.datetime.fromtimestamp(customtime).strftime('%Y')
hour = datetime.datetime.fromtimestamp(customtime).strftime('%H')
minute = datetime.datetime.fromtimestamp(customtime).strftime('%M')
second = datetime.datetime.fromtimestamp(customtime).strftime('%S')
else:
gettime = datetime.datetime.now()
day = gettime.strftime("%d")
month = gettime.strftime('%m')
year = gettime.strftime('%Y')
hour = gettime.strftime('%H')
minute = gettime.strftime('%M')
second = gettime.strftime('%S')
rustime = str(day) + " числа " + str(month) + " месяца " + str(year) + " года в " \
+ str(hour) + ":" + str(minute) + ":" + str(second)
if not OS.cyrrilic_support:
rustime = str(day) + "." + str(month) + "." + str(year) + "y at " \
+ str(hour) + ":" + str(minute) + ":" + str(second)
return rustime
@staticmethod
def timer(seconds, check_per_sec=10):
Countdown = get_Bench()
Countdown.start()
secs_second_var = int(seconds)
while Countdown.get() < seconds:
time.sleep(1/check_per_sec)
secs_left_int = int(seconds - Countdown.get())
if secs_left_int != secs_second_var:
secs_second_var = secs_left_int
Print.rewrite("Timer for " + str(seconds) + " seconds. " + str(secs_left_int) + " left")
Print.rewrite("")
class Json():
@classmethod
def check(cls, filename):
try:
cls.load(filename)
return True
except: # any exception is False
print("JSON is bad")
return False
@classmethod
def save(cls, filename, jsonstring, quiet=False, debug=False):
try:
File.create(filename)
settingsJsonTextIO = open(filename, "w")
json.dump(jsonstring, settingsJsonTextIO)
settingsJsonTextIO.close()
if not quiet:
print("JSON succesfull saved")
if debug:
print("sys.argv[0] =",sys.argv[0])
print(jsonstring)
except:
raise IOError("error while saving JSON, try to repair script at path1 " +
Path.full(sys.argv[0]))
json_test_string = cls.load(filename, quiet=True)
if jsonstring != json_test_string:
Print.debug("jsonstring_to_save", jsonstring, "json_test_string_from_file", json_test_string)
raise IOError("error while saving JSON, try to repair script at path1 " +
Path.full(sys.argv[0])) # exception
@classmethod
def load(cls, filename, quiet = False, debug=False):
try:
if not os.path.isfile(filename):
File.create(filename)
cleanjson = {}
cls.save(filename, cleanjson)
settingsJsonTextIO = open(filename)
jsonStringInMemory = json.load(settingsJsonTextIO)
settingsJsonTextIO.close()
if not quiet:
print("JSON succesfull loaded")
if debug:
print(jsonStringInMemory)
return jsonStringInMemory
except:
raise IOError("error while loading JSON, try to repair script at path1 " +
Path.full(sys.argv[0]))
class List:
@staticmethod
def flatterize(input_list):
if not ((isinstance(input_list,list)) or (isinstance(input_list,tuple))):
raise TypeError("object of type '"+str(type(input_list))+"' can't be flatterized")
output_list = copy.deepcopy(list(input_list))
cnt = 0
for object in output_list:
if not isinstance(object, (str,int)):
output_list.pop(cnt)
for item in reversed(object):
output_list.insert(cnt, item)
cnt+=1
return output_list
@staticmethod
def split_every(list_input, count):
count = int(count)
output_lists = [list_input[x:x+count] for x in range(0, len(list_input), count)] # https://stackoverflow.com/questions/9671224/split-a-python-list-into-other-sublists-i-e-smaller-lists
return output_lists # todo отдебажить пограничные моменты
class Process():
@staticmethod
def kill(process):
if OS.name == "windows":
command_ = "taskkill /f /im " + str(process) + ".exe"
try:
int(process)
command_ = "taskkill /f /pid " + str(process)
except:
pass
elif OS.name == "macos":
command_ = "killall " + str(process)
try:
int(process)
command_ = "kill " + str(process)
except:
pass
else:
Gui.warning("OS " + str(OS.name) + " not supported")
os.system(command_)
@staticmethod
def start(*arguments, new_window=False, debug=False, pureshell=False):
arguments = List.flatterize(arguments)
if debug:
Print.debug("Process.start arguments", arguments)
if new_window or pureshell:
for argument_ in arguments:
if " " in argument_ and argument_[:1] != "-":
if OS.name == "windows":
argument_ = Str.to_quotes(argument_)
else:
argument_ = Str.to_quotes_2(argument_)
try:
command = command + " " + argument_
except NameError:
if new_window:
if OS.name == "windows":
command = 'start "" ' + argument_
elif OS.name == "macos":
Gui.warning("macOS doesn't support creating new window now")
#command = "" +
else:
command = argument_
os.system(command)
else:
if OS.name == "windows":
commands = []
for argument_ in arguments:
commands.append(str(argument_))
subprocess.call(commands)
elif OS.name == "macos":
commands = ""
for argument_ in arguments:
commands += str(argument_) + " "
# print(commands)
os.system(commands)
class Dict:
@staticmethod
def iterable(dict):
return dict.items()
class Codegen:
debug = False
@classmethod
def start(cls, file_path):
File.wipe(file_path)
cls.file = open(file_path, "wb")
@classmethod
def add_line(cls, code):
cls.file.write(code.encode('utf8'))
if cls.debug:
print(code)
@classmethod
def end(cls, quiet=False):
cls.file.close()
shebang = "#! python3" + newline + \
"# -*- coding: utf-8 -*-" + newline
def plog(logfile, logstring="some shit happened", customtime=None, quiet=False, backup=True):
if not quiet:
print(logstring)
File.create(logfile)
if backup:
File.backup(logfile, quiet=True)
file = open(logfile, "a")
if customtime:
file.write(Time.rustime(customtime) + " " + str(logstring) + newline)
else:
file.write(Time.rustime() + " " + str(logstring) + newline)
file.close()
class Network:
@staticmethod
def getDomainOfUrl(url):
url_output = Str.substring(url, "://", "/")
if url_output == "":
url_output = Str.substring(url, "://")
return url_output
@classmethod
def ping(Network, domain ="127.0.0.1", count=1, quiet=False, logfile=None, timeout=10000):
# с таким эксепшном можно сделать куда проще это всё
domain = Network.getDomainOfUrl(domain)
if not quiet:
colorama.reinit()
Print.rewrite("Pinging", domain, count, "times...")
up_message = domain + " is up!"
down_message = domain + " is down."
try:
if OS.name == "windows":
count_arg = "n"
timeout_arg = "w"
if OS.name in ["macos", "linux"]:
count_arg = "c"
timeout_arg = "W"
if OS.name == "linux":
timeout = int(timeout/1000)
command = "ping " + domain + " -" + count_arg + " " + str(count) + \
" -" + timeout_arg + " " + str(timeout)
ping_output = Console.get_output(command)
except KeyboardInterrupt:
sys.exit()
except: # any exception is not good ping
ping_output = ""
if ("TTL" in ping_output) or ("ttl" in ping_output):
up = True
else:
up = False
if logfile:
if up:
plog(logfile, domain + " is up!", quiet=True)
cprint(up_message, "white", "on_green")
else:
plog(logfile, down_message, quiet=True)
cprint(down_message, "white", "on_red")
elif not quiet:
Print.rewrite("")
if up:
cprint(up_message, "white", "on_green")
else:
cprint(down_message, "white", "on_red")
colorama.deinit()
return up
class Fix:
def winRepair_UnicodeEncodeError(quiet=""):
if quiet:
quiet = " > null"
os.system("chcp 65001" + quiet)
os.system("set PYTHONIOENCODING = utf - 8")
class Bash:
escapable_chars = [backslash]
@classmethod
def argument_escape(cls, argument):
for char in cls.escapable_chars:
argument = argument.replace(char, backslash+char)
return Str.to_quotes(argument)
class macOS:
class osascript:
@staticmethod
def quotes_escape(string):
quote_1 = '"'
#quote_2 = "'"
# if there any already escaped symbols:
string = string.replace(backslash, backslash*3) # if there any other escaped symbols except quotes
string = string.replace(backslash*3+quote_1, backslash*2+quote_1) # removing one backslash, because it will added furthurer
#string = string.replace(backslash*3+quote_2, backslash*2+quote_2)
# usual quotes escape
escaped_1 = backslash + quote_1
#escaped_2 = backslash + quote_2
string = string.replace(quote_1,escaped_1)
#string = string.replace(quote_2, escaped_2)
return string
@classmethod
def notification(cls, message, title="python3", subtitle=None, sound=None, list_of_sounds=False):
# https://apple.stackexchange.com/questions/57412/how-can-i-trigger-a-notification-center-notification-from-an-applescript-or-shel# - just applescript
# better realizations:
# advanced commandline tool - https://github.com/vjeantet/alerter
# simpler commandline tool - https://github.com/vjeantet/alerter
# commands = "display notification \"message\" with title \"title\" subtitle \"subtitle\" sound name \"Sosumi\""
commands = "display notification " + Str.to_quotes(cls.osascript.quotes_escape(message))
if title or subtitle:
commands += " with "
if title:
commands += "title " + Str.to_quotes(cls.osascript.quotes_escape(title)) + " "
if subtitle:
commands += "subtitle " + Str.to_quotes(cls.osascript.quotes_escape(subtitle)) + " "
if sound:
commands += " sound name " + Str.to_quotes(cls.osascript.quotes_escape(sound))
commands = cls.osascript.quotes_escape(commands) # escaping quotes:
commands = Str.to_quotes(commands) # applescript to quotes
Process.start("osascript", "-e", commands) # f start(*arguments, new_window=False, debug=False, pureshell=False):
if list_of_sounds:
Print.debug("global sounds", Dir.list_of_files(Path.extend("System", "Library", "Sounds")), "local sounds", Dir.list_of_files(Path.extend("~", "Library", "Sounds")))
class Gui:
def warning(message):
try:
try:
sys.ps1
sys.ps2
interactive_mode = True
except:
interactive_mode = False
Print.debug("interactive_mode", interactive_mode)
try:
not_dot_py = sys.argv[0][-3] != ".py" # todo check logic
except:
not_dot_py = True
if (not_dot_py or (sys.argv[0] != "")) and (not interactive_mode):
Print.debug("sys.argv", sys.argv)
Print.debug("Something wrong with sys.argv. Tkinter doesn't like it.")
input()
except IndexError:
Print.debug("sys.argv", sys.argv)
raise RuntimeError ("Something wrong with sys.argv. Tkinter doesn't like it.")
if OS.name == 'macos':
macOS.notification(message)
if OS.name != "macos" and OS.python_implementation != "pypy":
Internal.mine_import("pyautogui")
pyautogui.alert(message)
else:
Print.debug("PyPy doesn't support pyautogui, so warning is here:", warning)
input("Press Enter to continue")
class Tkinter():
@staticmethod
def color(red, green, blue): # return string of color matching for use in
# d Tkinter
return str('#%02x%02x%02x' % (red, green, blue))
class Windows:
@staticmethod
def lock(): # locking screen, work only on Windows < 10
if OS.windows_version and (OS.windows_version != 10):
ctypes.windll.LockWorkStation() # todo fix Windows 10
else:
raise OSError("Locking work only on Windows < 10")
class Random:
@staticmethod
def integer(min, max): # return random integer
return random.randrange(min, max+1)
@staticmethod
def float(min, max): # return random floating number
return random.uniform(min, max)
@staticmethod
def string(length):
import string
return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=length))
class Wget:
@staticmethod
def download(url, output, quiet=False): # just wrapper for commandline wget
arguments = '--header="Accept: text/html" ' + \
'--user-agent="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_1) ' + \
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3167.0 Safari/537.36"'
if quiet:
command = "wget '" + url + "' -O " + output + " " + arguments
return Console.get_output(command)
else:
url = url.replace("&", backslash + "&")
Process.start("wget", url, "-O", output, arguments, pureshell=True)
# Another way to fix blocks by creating ~/.wgetrc file https://stackoverflow.com/a/34166756
class Int:
@staticmethod
def from_to(start, end, to_str=False): # return list_ of integers, if argument
# g "to_str" activated, return list_ of strings with equal length
roots = range(start, end + 1)
if to_str:
output = []
max_len = max(len(str(start)), len(str(end)))
for root in roots:
if root < 0:
output.append("-" + Str.leftpad(-root, max_len-1, 0))
else:
output.append(Str.leftpad(root, max_len, 0))
return output
else:
return roots
class CLI():
@staticmethod
def get_y_n(question=""):
while True:
inputtt = input(str(question) + " (y/n)?")
inputtt = inputtt.strip(" ")
if inputtt == "y":
return True
if inputtt == "n":
return False
wait_update_pos = 0
@classmethod
def wait_update(CLI, quiet=False):
if CLI.wait_update_pos == 0:
stick = "|"
elif CLI.wait_update_pos == 1:
stick = "/"
elif CLI.wait_update_pos == 2:
stick = "-"
elif CLI.wait_update_pos == 3:
stick = "\ "[:1]
elif CLI.wait_update_pos == 4:
stick = "|"
elif CLI.wait_update_pos == 5:
stick = "/"
elif CLI.wait_update_pos == 6:
stick = "-"
elif CLI.wait_update_pos == 7:
stick = "\ "[:1]
CLI.wait_update_pos = -1
CLI.wait_update_pos += 1
if not quiet:
Print.rewrite(stick)
else:
return stick
@staticmethod
def progressbar(count, of):
Console.width()
class Repl:
@staticmethod
def loop(safe=False): # mine shitty implementation of REPL
def main(): # dir ignore
while True:
try:
command = input(">>")
exec (command)
exec("print(" + Str.substring(command, before = '', after=' ') + ")", globals())
except KeyboardInterrupt:
break
except SyntaxError as err:
print(err)
if safe:
try:
main()
except:
pass
else:
main()
colorama.reinit()
LoadTimeBenchMark = get_Bench()
LoadTimeBenchMark.time_start = start_bench_no_bench
LoadTimeBenchMark.prefix = "commands8 v" + __version__ + " loaded in"
LoadTimeBenchMark.end()
#if __name__ == "__main__":
# Internal.dir_c()
# Repl.loop()
|
egigoka/test
|
acl_edit/commands8.py
|
commands8.py
|
py
| 50,047
|
python
|
en
|
code
| 2
|
github-code
|
6
|
11845591367
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os, json, subprocess
from tempfile import NamedTemporaryFile
cur_dir = os.path.dirname(os.path.abspath(__file__))
file_path = cur_dir + "/../eval_video.py"
def run_and_check_result(cmd):
cmd_result = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf8")
data = json.loads(cmd_result.stdout)
assert "video" in data
assert type(data["video"]) == float
assert data["video"] >= 0 and data["video"] <= 100
# check output file
with NamedTemporaryFile('w+t') as output:
cmd.extend(["--output", output.name])
cmd_result = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf8")
data = json.loads(output.read())
assert "video" in data
assert type(data["video"]) == float
assert data["video"] >= 0 and data["video"] <= 100
def check_video_vmaf(src_video, dst_video):
cmd = ["python3", file_path, "--video_eval_method", "vmaf", "--src_video", src_video, "--dst_video", dst_video]
run_and_check_result(cmd)
def check_yuv_video_vmaf(src_video, dst_video, video_size, pixel_format, bitdepth):
cmd = ["python3", file_path, "--video_eval_method", "vmaf", "--src_video", src_video, "--dst_video", dst_video, \
"--video_size", video_size, "--pixel_format", pixel_format, "--bitdepth", bitdepth]
run_and_check_result(cmd)
def check_align_video_vmaf(src_video, dst_video, align_method):
cmd = ["python3", file_path, "--video_eval_method", "vmaf", "--src_video", src_video, "--dst_video", dst_video]
if align_method:
cmd.extend(["--frame_align_method", align_method])
run_and_check_result(cmd)
def test_y4m_yuv_compare(y4m_video, yuv_video):
src_video = y4m_video["path"]
dst_video = yuv_video["path"]
check_video_vmaf(src_video, dst_video)
check_video_vmaf(dst_video, src_video)
def test_yuv_yuv_compare(yuv_video):
src_video = yuv_video["path"]
dst_video = yuv_video["path"]
video_size, pixel_format, bitdepth = yuv_video["video_size"], yuv_video["pixel_format"], yuv_video["bitdepth"]
check_yuv_video_vmaf(src_video, dst_video, video_size=video_size, pixel_format=pixel_format, bitdepth=bitdepth)
def test_y4m_align_compare(y4m_video, align_method):
src_video = y4m_video["path"]
dst_video = y4m_video["path"]
check_align_video_vmaf(src_video, dst_video, align_method)
|
OpenNetLab/Challenge-Environment
|
metrics/tests/test_eval_video.py
|
test_eval_video.py
|
py
| 2,516
|
python
|
en
|
code
| 8
|
github-code
|
6
|
17384594576
|
import os
import pandas as pd
import numpy as np
# set default data path methode
def get_data_path():
if os.path.isdir(os.path.join(os.getcwd(), 'data')):
return os.path.join(os.getcwd(), 'data')
elif os.path.isdir(os.path.join(os.getcwd(), "../data")):
return os.path.join(os.getcwd(), "../data")
else:
raise FileNotFoundError
# Cast the duration from timedelta to number of minutes
def cast_timedelta_to_number(series_timedelta):
# return a series with duration in minutes for each trip
return pd.Series(data=list(map(lambda x: pd.to_timedelta(series_timedelta[x]).total_seconds()/60, series_timedelta.index)))
# method to perform cleaning data frame from wrong data.
def drop_short_long_trips(df):
df = pd.DataFrame(df)
# search for trips where the duration is under or equal 3 minutes and the position doesnt changed
short_trip = df[(df["Duration"] <= float(3)) & (df["End_position_UID"] == df["Start_position_UID"])]
# write DF without reduction
long_trips = df[(df["Duration"] > float(120))]
# drop short values from data frame
df.drop(short_trip.index, inplace=True)
df.drop(long_trips.index, inplace=True)
return df
# Clean df from wrong data
def clean_df(df):
print(df)
# create data frame
df = pd.DataFrame(df)
# take all columns to control for na values except p_number contain 0 for bike place
null_columns = np.array(list(filter(lambda x: (str(x) != "p_number"), df.columns)))
# drop all rows that contains na values in the columns
df.loc[:, null_columns].dropna(axis=0, subset=null_columns, inplace=True)
# drop all rows that contains recordings and missing island in place name
missing_island = pd.DataFrame(df[df["p_name"].str.contains("^(Missing)")])
df.drop(missing_island.index, inplace=True)
return df
# Cleaning of new data frame
def cleaning_new_df(df):
df = pd.DataFrame(df)
# Drop trips with recording in place name
recording_df = pd.DataFrame(df[df["Start_Place"].str.contains("^(recording)")])
df.drop(recording_df.index, inplace=True)
# Drop trip with negative lat or long position in england
negative_df = pd.DataFrame(df[(df["Start_Latitude"].astype(float) < 0) | (df["Start_Longitude"].astype(float) < 0)])
df.drop(negative_df.index, inplace=True)
return df
def create_zip_code_data(df, geo_data):
df_new = pd.DataFrame(df)
# load geo data index are location coordinates
zip_code = pd.DataFrame(geo_data)
# create column for join
df_new["Coordinates"] = df_new["Start_Latitude"].str.cat(df_new["Start_Longitude"], sep=", ")
# join the zip code column based on lat long Coordinates
df_new = df_new.join(zip_code, on=["Coordinates"], lsuffix="_s", )
# drop trips with wrong coordinates
wrong_coordinates = df_new[df_new["Zip_codes"].astype(str).str.contains("^(?![0-9]{5})")].index
df_new = df_new.drop(wrong_coordinates)
# drop bookings they are not in frankfurt
not_in_frankfurt = pd.DataFrame(columns=["Zipcode"])
for i in df_new.index:
# zip_codes of frankfurt
if (int(df_new.loc[i]["Zip_codes"]) in [60306, 60308, 60311, 60313, 60314, 60316, 60318,
60320, 60322, 60323, 60325, 60326, 60327, 60329,
60385, 60386, 60388, 60389, 60431, 60433, 60435,
60437, 60438, 60439, 60486, 60487, 60488, 60489,
60528, 60529, 60547, 60549, 60594, 60596, 60598,
60599, 61352, 63067, 65929, 65931, 65933, 65934, 65936]):
continue
else:
not_in_frankfurt.loc[i, "Zipcode"] = df_new.loc[i]["Zip_codes"]
not_in_frankfurt
df_new = df_new.drop(not_in_frankfurt.index)
return df_new
# method to delete batch bookings
def drop_reallocation_trips(df):
# get trips start times
time_frames = pd.DataFrame(df["Start_Time"].value_counts())
# get start times occur more then 4 times number of bikes that can be lent in parall
time_frames = time_frames[time_frames["Start_Time"] > 4]
# get bookings in df
wrong = df[df["Start_Time"].isin(time_frames.index)].sort_values("Start_Time")
# find bookings that has also same end
time_frames_end = pd.DataFrame(df["End_time"].value_counts())
# occur more then 4 times with same end
time_frames_end = time_frames_end[time_frames_end["End_time"] > 4]
# get the wrong trips with the same ends and starts with more then 4 occur
wrong_eND = wrong[wrong["End_time"].isin(time_frames_end.index)]
# get statistics of bookings
wrong_eND.groupby("Start_Time")["Duration"].mean()
wrong_eND.groupby("Start_Time")["Duration"].count()
wrong_eND.groupby("Start_Time")["Duration"].min()
# orchestrate statistics to data frame
wrong_data = pd.DataFrame(columns=["Mean", "Count", "min"], index=wrong_eND["Start_Time"],
data={"Mean": wrong_eND.groupby("Start_Time")["Duration"].mean(),
"min": wrong_eND.groupby("Start_Time")["Duration"].min(),
"Count": wrong_eND.groupby("Start_Time")["Duration"].count()})
# get dates that has to be dropped
true = wrong_data.groupby("Start_Time").max().sort_values("min")["Count"] > 4
start_time = true.index.astype(str)
# drop trip from data frame
df.drop(df[df["Start_Time"].isin(start_time)].index, inplace=True)
return df
# method to drop every outlier that is as far away from the times 1.5 distanz to the 25 and the 75 quantile
def drop_outlier(df):
for month in ["01", "02", "03", "04", "05", "06", "08", "09", "10", "11", "12"]:
print(month)
for hour in range(0, 24):
for days in range(0, 8):
df_temp = df[
(df["month"] == month) & (df["hour"].astype(int) == hour) & (df["day"].astype(int) == days)]
median = df_temp["Duration"].describe()[5]
Q25 = df_temp["Duration"].describe()[4]
Q75 = df_temp["Duration"].describe()[6]
Q = Q75 - Q25
wh_ = Q * 1.5
upper = Q75 + wh_
lower = Q25 - wh_
if lower < 0:
lower = 0
# print(upper, lower, wh_, median, Q, Q75,Q25)
outlier_Ja_upper = df_temp[(df_temp["Duration"] > upper)]
outlier_Ja_lower = df_temp[(df_temp["Duration"] < lower)]
df_no_out = df_no_out.drop(outlier_Ja_upper.index)
df_no_out = df_no_out.drop(outlier_Ja_lower.index)
print(len(df_no_out))
print(len(df))
return df_no_out
|
gassnerm/PDS_Project
|
PDS_Project_nextbike/io/utils.py
|
utils.py
|
py
| 6,862
|
python
|
en
|
code
| null |
github-code
|
6
|
39475022455
|
# coding: utf-8
import os
class QuestionList(object) :
__verbs = []
__index = 0
def __init__(self) :
question_file = os.path.dirname(os.path.realpath(__file__)) +\
'/questions.20181214.txt'
with open(question_file, 'r') as fh :
line = fh.read()
lines = line.splitlines()
for line in lines :
self.__verbs.append(line)
fh.close()
self.__index = 0
def getQuestion(self) :
if self.__index < len(self.__verbs) :
q = self.__verbs[self.__index]
self.__index += 1
return q
else :
return None
|
zhang8929/zhangyuguang
|
NLP/database/questions.py
|
questions.py
|
py
| 674
|
python
|
en
|
code
| 0
|
github-code
|
6
|
23972767732
|
#!/usr/bin/env python3
import rospy
import tf2_ros
from tf.transformations import *
from geometry_msgs.msg import Vector3Stamped, QuaternionStamped, TransformStamped, Quaternion, Vector3
from sensor_msgs.msg import Imu
from std_msgs.msg import Float64
from utils import *
import numpy as np
import threading
import time
import pickle
class BagfileConverter:
def __init__(self):
self.dataset_path = self.create_dataset_path()
self.init_ros()
self.dataset_dict_list = []
def create_dataset_path(self):
# Save dataset
dataset_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data/dataset")
if not os.path.exists(dataset_dir):
os.makedirs(dataset_dir)
# Find last indexed dataset
for i in range(100):
file_path = os.path.join(dataset_dir, "dataset_{}.pkl".format(i))
if not os.path.exists(file_path):
return os.path.join(dataset_dir, "dataset_{}.pkl".format(i))
def save_dataset(self, data_dict_list, file_path):
pickle.dump(data_dict_list, open(file_path, "wb"))
def init_ros(self):
rospy.init_node("bagfile_converter")
self.tfBuffer = tf2_ros.Buffer()
self.tflistener = tf2_ros.TransformListener(self.tfBuffer)
self.wheel_speed_sub = subscriber_factory("/wheel_speed", Float64)
self.dv_sub = subscriber_factory("/imu/dv", Vector3Stamped)
self.imu_sub = subscriber_factory("/imu/data", Imu)
self.quat_sub = subscriber_factory("/filter/quaternion", QuaternionStamped)
self.subscriber_list = []
self.subscriber_list.append(self.wheel_speed_sub)
self.subscriber_list.append(self.dv_sub)
self.subscriber_list.append(self.imu_sub)
self.subscriber_list.append(self.quat_sub)
self.ros_rate = rospy.Rate(200)
time.sleep(0.5)
def gather(self):
# Wait until all subscribers have a message to begin
while not rospy.is_shutdown():
if np.all([s.get_msg() is not None for s in self.subscriber_list]): break
# Do the gathering
print("Started gathering")
while not rospy.is_shutdown():
# Get messages from all subscribers
dataset_dict = {}
for s in self.subscriber_list:
dataset_dict[s.topic_name] = s.get_msg(copy_msg=True)
self.dataset_dict_list.append(dataset_dict)
# Maintain that 200hz
self.ros_rate.sleep()
print("Saving dataset")
self.save_dataset(self.dataset_dict, self.dataset_path)
if __name__=="__main__":
bagfile_converter = BagfileConverter()
bagfile_converter.gather()
|
silverjoda/buggycontrol
|
src/nodes/imu_odom_bagfile_to_dict_converter.py
|
imu_odom_bagfile_to_dict_converter.py
|
py
| 2,714
|
python
|
en
|
code
| null |
github-code
|
6
|
11120983307
|
import abc
import typing as tp
from librarius.domain.messages import (
TAbstractMessage,
TAbstractCommand,
TAbstractQuery,
TAbstractEvent,
)
if tp.TYPE_CHECKING:
from librarius.domain.messages import AbstractCommand, AbstractQuery, AbstractEvent
from librarius.domain.models import Entity
from librarius.service.uow import TAbstractUnitOfWork
TAbstractHandler = tp.TypeVar("TAbstractHandler", bound="AbstractHandler")
class AbstractHandler(tp.Generic[TAbstractHandler, TAbstractMessage], abc.ABC):
def __init__(self, uow: "TAbstractUnitOfWork"):
self.uow = uow
@abc.abstractmethod
def __call__(self, message: "TAbstractMessage"):
raise NotImplementedError
class AbstractCommandHandler(
AbstractHandler["AbstractCommandHandler", TAbstractCommand],
tp.Generic[TAbstractCommand],
abc.ABC,
):
@abc.abstractmethod
def __call__(self, cmd: "TAbstractCommand") -> None:
raise NotImplementedError
class AbstractEventHandler(
AbstractHandler["AbstractEventHandler", TAbstractEvent],
tp.Generic[TAbstractEvent],
abc.ABC,
):
@abc.abstractmethod
def __call__(self, event: "TAbstractEvent") -> None:
raise NotImplementedError
class AbstractQueryHandler(
AbstractHandler["AbstractQueryHandler", TAbstractQuery],
tp.Generic[TAbstractQuery],
abc.ABC,
):
@abc.abstractmethod
def __call__(
self, query: "TAbstractQuery"
) -> tp.Union[tp.Iterable["Entity"], "Entity"]:
raise NotImplementedError
|
adriangabura/vega
|
librarius/service/handlers/abstract.py
|
abstract.py
|
py
| 1,538
|
python
|
en
|
code
| 1
|
github-code
|
6
|
22291138485
|
import re
import requests
from bs4 import BeautifulSoup
def scrape_page_title(soup):
""" Function to extract the title of an article from the scrapped code """
title = soup.find('h1', class_='content__headline').get_text()
title = re.sub('\n', '', title)
return title
def scrape_page_topic(soup):
""" Function to extract the topic name of an article from the scrapped code """
try:
label = soup.find('span', class_='label__link-wrapper').get_text()
except AttributeError:
return "-"
label = re.sub('\n', '', label)
return label
def scrape_page_authors(soup):
""" Function to extract the names of authors of an article from the scrapped code """
authors = [re.sub('\n', '', a.get_text()) for a in soup.find_all('span', itemprop="name")]
authors = ' & '.join([str(a) for a in authors])
return authors
def scrape_page_datetime(soup):
""" Function to extract the date of publication of an article from the scrapped code """
datetime = [re.sub('\n', '', d.get_text()) for d in soup.find_all('time', itemprop="datePublished")]
datetime = re.sub('\xa0', ' ', datetime[0])
return datetime
def scrape_page_description(soup):
""" Function to extract the description of the article from the scrapped code """
try:
descr = soup.find('div', class_="content__standfirst").get_text()
descr = re.sub('\n', '', descr)
except AttributeError:
return "-"
return descr
def scrape_page_text(soup):
""" Function to extract the text of the article from the scrapped code """
text = soup.find('div', class_="content__article-body")
text = [t.get_text() for t in text.find_all('p')]
text = ' '.join(t for t in text)
return text
def scrape_page_related_topics(soup):
""" Function to extract the links of the related topics of a given article from the scrapped code """
links = [a['href'] for a in soup.find_all('a', class_='submeta__link')]
return links
def scrape_page(link):
""" Function returns the different information extracted from the page of an article"""
page = requests.get(link)
soup = BeautifulSoup(page.content, 'html.parser')
title = scrape_page_title(soup)
link = link
topic = scrape_page_topic(soup)
related_topics_links = scrape_page_related_topics(soup)
authors = scrape_page_authors(soup)
datetime = scrape_page_datetime(soup)
description = scrape_page_description(soup)
text = scrape_page_text(soup)
return title, link, topic, related_topics_links, authors, datetime, description, text
def home_page(link):
""" Function returns the links to other articles from a given topic page """
next_pages = []
page = requests.get(link)
soup = BeautifulSoup(page.content, 'html.parser')
for n in soup.find_all('a', class_='fc-item__link'):
if n['href'] not in next_pages:
next_pages.append(n['href'])
return next_pages
|
mcmxlix/the_guardian_crawler
|
Crawler/scrape_infos.py
|
scrape_infos.py
|
py
| 2,969
|
python
|
en
|
code
| 1
|
github-code
|
6
|
72683633467
|
from matplotlib import pyplot as plt
if __name__ == '__main__':
slope = 0.0008588
y_intercept = -0.1702
rainfall_values = [50 * x for x in range(0, 18)]
y = [max(slope * x + y_intercept, 0) for x in rainfall_values]
plt.title('Bifurcation diagram of Scanlon model')
plt.xlabel('Rainfall (mm/year)')
plt.ylabel('Steady state density')
plt.plot(rainfall_values, y)
plt.plot(rainfall_values[4:], [0 for _ in range(4, 18)], linestyle='dashed')
plt.show()
|
tee-lab/patchy-ecosterics
|
thesis_code/scanlon_transitions/phase_transition.py
|
phase_transition.py
|
py
| 498
|
python
|
en
|
code
| 2
|
github-code
|
6
|
13480667519
|
import requests, os, json
from flask import Flask, render_template, redirect, url_for, request
from dotenv import load_dotenv
from anvil import Anvil, User
load_dotenv()
app = Flask(__name__)
anvil = Anvil()
user = anvil.load_user()
worlds = anvil.load_worlds(user)
anvil.current_world = worlds[0]
@app.route('/', methods=['POST', 'GET'])
def index():
if request.method == 'POST':
anvil.current_world = anvil.get_world(worlds, request.form["world"])
return render_template('index.html', name=user.name, worlds=worlds, current_world=anvil.current_world)
@app.route('/new_article', methods=['POST', 'GET'])
def new_article():
if request.method == 'POST':
return redirect(url_for("/"))
return render_template('new_article.html', name=user.name, worlds=worlds, current_world=anvil.current_world)
if __name__ == "__main__":
app.run(debug=True)
|
oaster2000/NPC-Writer
|
app.py
|
app.py
|
py
| 898
|
python
|
en
|
code
| 0
|
github-code
|
6
|
11495041377
|
import socket
import re
def is_valid_ip_address(ip_address):
pattern = re.compile(r'^(\d{1,3}\.){3}\d{1,3}$')
return bool(pattern.match(ip_address))
HOST = ''
PORT = 5000 # Arbitrary non-privileged port
server_socket = None
while True:
while True:
print("Please enter in address to bind to",end="")
cur_host = input(": ")
if is_valid_ip_address(cur_host):
HOST = cur_host
break
else:
print("That wasn't a valid IP address format try again")
while True:
print("Please enter in a port",end="")
cur_port = input(": ")
try:
cur_port = int(cur_port)
PORT = cur_port
break
except Exception as e:
print("that is not a valid port... Try again")
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
server_socket.bind((HOST, PORT))
break
except Exception as e:
print("Failed to bind for server port or address...")
print("error: ",e)
print("Try again....")
# Listen for incoming connections
server_socket.listen(1)
print('Echo server listening on port', PORT)
while True:
# Wait for a connection
client_socket, client_address = server_socket.accept()
print('Client connected:', client_address)
# Receive and echo back data until the client closes the connection
while True:
data = client_socket.recv(1024)
if not data:
break
print("recieved data: %s" % (data))
client_socket.sendall(data)
# Clean up the connection
print('Client disconnected:', client_address)
client_socket.close()
|
Schlitzohr101/pythonEchoServer
|
python_server.py
|
python_server.py
|
py
| 1,700
|
python
|
en
|
code
| 0
|
github-code
|
6
|
75126801467
|
from model.upload import Upload
from aws.s3_wrapper import S3Wrapper
def lambda_handler(event: dict, _) -> str:
"""
Called by AWS lambda
"""
try:
upload = Upload(event.get('from_url'), event.get('to_path'))
s3Wrapper = S3Wrapper()
s3Wrapper.uploadFromUrl(upload.from_url, upload.to_path)
return 'ok'
except Exception as e:
print(f'error uploading data: {e}')
return 'error'
|
nxn128/serverless-query
|
src/smallquery/functions/upload_data/app.py
|
app.py
|
py
| 446
|
python
|
en
|
code
| 0
|
github-code
|
6
|
37219263893
|
from typing import List, Dict
import csv
def get_unique_industries(path: str) -> List[str]:
with open(path, mode="r") as file:
data_file = csv.DictReader(file)
list_data_file = []
for data in data_file:
list_data_file.append(data)
industries = set([industry['industry']
for industry in list_data_file
if industry['industry'] != ''])
return industries
def filter_by_industry(jobs: List[Dict], industry: str) -> List[Dict]:
filtered_industries = []
for job in jobs:
if job['industry'] == industry:
filtered_industries.append(job)
return filtered_industries
|
Gilson-SR/job-insights
|
src/insights/industries.py
|
industries.py
|
py
| 684
|
python
|
en
|
code
| 0
|
github-code
|
6
|
71184192189
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function, unicode_literals)
from multiprocessing import cpu_count
from eventlet import monkey_patch
from eventlet.greenpool import GreenPool
from contextlib import closing
monkey_patch()
import requests
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
monkey_patch()
def read_stock_url(symbol):
urlname = 'http://finance.yahoo.com/q?s=' + symbol.lower() + \
'&ql=0'
with closing(requests.get(urlname, stream=True)) as url_:
for line in url_.iter_lines():
line = line.decode(errors='ignore')
if 'yfs_l84_%s' % symbol.lower() in line:
price = line.split('yfs_l84_%s\">' % symbol.lower())[1]
price = float(price.split('</')[0].replace(',', ''))
return symbol, price
return symbol, -1
def run_stock_parser():
stock_symbols = []
with open('symbols.txt', 'r') as symfile:
for n, line in enumerate(symfile):
sym = line.strip()
if sym:
stock_symbols.append(sym)
ncpu = cpu_count()
pool = GreenPool(ncpu * 4)
stock_prices = []
for symbol, price in pool.imap(read_stock_url, stock_symbols):
stock_prices.append((symbol, price))
with open('stock_prices.csv', 'w') as outfile:
outfile.write('Stock,Price\n')
for symbol, price in stock_prices:
outfile.write('%s,%s\n' % (symbol, price))
if __name__ == '__main__':
run_stock_parser()
|
ddboline/programming_tests
|
python/stock_parser_greenpool.py
|
stock_parser_greenpool.py
|
py
| 1,591
|
python
|
en
|
code
| 0
|
github-code
|
6
|
35042464781
|
from re import A
import time
from numpy import result_type
st = time.time()
#a, b, c = map(int, input().split())
#a, b = map(int, sys.stdin.readline().rstrip().split())
from collections import Counter
import math
import itertools
import sys
n = int(sys.stdin.readline().rstrip())
people = []
result = [1]*n
for i in range(n):
w, h = map(int, sys.stdin.readline().rstrip().split())
people.append([w,h])
for num, i in enumerate(people):
for j in people:
if i == j:
continue
elif i[0] < j[0] and i[1] < j[1]:
result[num] += 1
for i in result:
print("{}".format(i),end=" ")
et = time.time()
print("time : {}".format(et-st))
|
lpigeon/BOJ
|
7568.py
|
7568.py
|
py
| 720
|
python
|
en
|
code
| 2
|
github-code
|
6
|
28237496421
|
#!/usr/bin/python
from copy import deepcopy
from fodft_tools import *
import argparse
import os
import sys, traceback
from ase import Atoms
spec_path = "/data/schober/code/fhiaims_develop/fhiaims_supporting_work/species_defaults/"
aims_params = {
"xc" : "blyp",
"spin" : "collinear",
"occupation_type" : "gaussian 0.01",
"mixer" : "pulay",
"n_max_pulay" : "10",
"charge_mix_param" : "0.5",
"sc_accuracy_rho" : "1E-4",
"sc_accuracy_eev" : "1E-2",
"sc_accuracy_etot" : "1E-6",
"relativistic" : "none",
"species_dir" : "",
#"species_dir" : os.path.join(self.spec_path, self.avail_species[self.species])
}
# global parameters
#spec_path = "/data/schober/code/fhiaims_develop/fhiaims_supporting_work/species_defaults/"
avail_species = {"light" : "light",
"tight" : "tight",
"cc.3" : "non-standard/NAO-VCC-nZ/NAO-VCC-3Z",
"cc.4" : "non-standard/NAO-VCC-nZ/NAO-VCC-4Z",
"cc.5" : "non-standard/NAO-VCC-nZ/NAO-VCC-5Z",
"tight.ext" : "tight.ext",
"cc.3.ext" : "non-standard/NAO-VCC-nZ/NAO-VCC-3Z.ext"
}
parser = argparse.ArgumentParser(description="Get parameters for fodft")
parser.add_argument('filename', nargs='+', help='Geometry file with the dimer or a list(blob)')#, dest='filename')
parser.add_argument('-e, --extension', help='Format of the geometry file, if not .xyz', dest='fformat', metavar='FORMAT', default='xyz')
parser.add_argument('-d, --dir', help='-d = subfoldername, will create project files there', dest='dir', default='./')
parser.add_argument('-f, --full', help='Create inputs for basic and polarized fodft', dest='full', action="store_true")
parser.add_argument('-c, --cubes', help="Automatically adds cube output command for guessed states", dest="cubes", action="store_true")
parser.add_argument('-a, --automagic', help="Tries to find fragments by a clustering algorithm. Check the result carefully! See also:--cutoff", dest="magic", action="store_true")
parser.add_argument('--cutoff', help="Optional: Defines the cutoff for the clustering algorithm. Works best with values larger than inter-molecular distances and smaller than inter-molecular distances! Default is 1.7 for C-C-bonds!", dest="cutoff", type=float)
parser.add_argument('-o, --orbitals', help="FO-Orbitals for determination of matrix elements (form: state1 state2 range1 range2)", dest="orbitals", nargs='+', type=int)
parser.add_argument('-i, --image', help="If more than one geometry in .xyz (e.g. a trajectory), which image to choose? Default: Last structure", type=int, dest="image", default=0)
# additional, optinal arguments for the fodft-class
#parser.add_argument('-
arguments = parser.parse_args()
filename = arguments.filename
fformat = arguments.fformat
#if len(filename) > 1 and arguments.dir:
#print("Using one folder and more than one input doesn't work! Bye!")
#sys.exit()
#example = fo_aims(Atoms('CO', positions=[(0, 0, 0), (0, 0, 1)]), arguments.image-1)
#example.avail_species = avail_species
arg_dict = {
"xc" : ["Which XC functional (Default: blyp): ", "blyp"],
"charge_in" : ["Charges on [frag1], [frag2] (Default: +1 0]): ", "+1 0"],
"embedding" : ["Use embedding? [y/n] (Default: no): ", ".false."],
"species" : ["Specify basis set, available options: \n\n {0} \n\n(Default: tight). Please choose: ".format(avail_species.keys()), "tight"],
"fo_type" : ["FO_Type, hole or elec (Default: hole): ", "hole"],
}
# get params for fodft
arg_dict_values = deepcopy(arg_dict)
# First, get user input
for item in arg_dict:
arg_dict_values[item][1] = raw_input("{0}".format(arg_dict[item][0]))
# Fill up with defaults were no user input exists
for item in arg_dict_values:
if arg_dict_values[item][1] is "":
arg_dict_values[item][1] = arg_dict[item][1]
# Special post processing of inputs
if arg_dict_values['embedding'][1] == "y":
arg_dict_values['embedding'][1] = ".true."
elif arg_dict_values['embedding'][1] == "n":
arg_dict_values['embedding'][1] = ".false."
for file in filename:
system = fo_aims(file, arguments.image-1, fformat)
system.spec_path = spec_path
system.avail_species = avail_species
system.aims_params = aims_params
system.aims_params["species_dir"] = os.path.join(system.spec_path, system.avail_species[arg_dict_values["species"][1]])
if len(filename) > 1:
dirname = file.rpartition(".")[0]
if dirname == filename:
dirname = "fo" + dirname
arguments.dir = dirname
if arguments.dir == "./":
print("Creating files in current working directory ({0})".format(os.getcwd()))
else:
try:
cwd_start = os.getcwd()
os.mkdir(arguments.dir)
os.chdir(arguments.dir)
print("Creating files in {0}!".format(arguments.dir))
except:
print("Error when creating folder {0}:".format(arguments.dir))
raise
print("Creating basic and embedded input: {0}".format(arguments.full))
# Set the values.
system.aims_params['xc'] = arg_dict_values['xc'][1]
system.charge_in = arg_dict_values['charge_in'][1].strip("[]").split()
system.embedding = arg_dict_values['embedding'][1]
system.species = arg_dict_values['species'][1]
system.fo_type = arg_dict_values['fo_type'][1]
print("Got all information, now create the fragments!")
if arguments.magic:
if arguments.cutoff:
system.magic_cutoff = arguments.cutoff
system.magic_fragmentation()
else:
system.create_fragments()
if arguments.orbitals:
system.frontiers = arguments.orbitals[0:2]
system.fo_range = arguments.orbitals[2:4]
system.update_calculators()
if arguments.cubes is True:
system.set_cube_files()
if arguments.full is True:
print("Now creating input files for basic fo_dft...")
os.mkdir("basic")
os.mkdir("embedded")
cwd = os.getcwd()
os.chdir("basic")
#print("Now creating the input files!")
system.write_inputs()
os.chdir(cwd)
os.chdir("embedded")
print("Now creating input files for embedded fo_dft...")
system.embedding = ".true."
system.update_calculators()
system.set_cube_files()
system.write_inputs()
else:
print("Now creating the input files!")
system.write_inputs()
print("Done.")
try:
os.chdir(cwd_start)
except:
pass
|
schober-ch/fodft_tools
|
fodft.py
|
fodft.py
|
py
| 6,769
|
python
|
en
|
code
| 0
|
github-code
|
6
|
5131582706
|
from django.contrib.auth.models import AbstractUser
from django.db import models
class User(AbstractUser):
ADMIN = 'admin'
MODERATOR = 'moderator'
USER = 'user'
USER_ROLE_CHOICES = [
(ADMIN, 'admin'),
(MODERATOR, 'moderator'),
(USER, 'user'),
]
confirmation_code = models.CharField(
max_length=100,
blank=True
)
username = models.CharField(
max_length=150,
unique=True,
db_index=True
)
email = models.EmailField(
verbose_name='Email',
help_text='Введите адрес эл.почты',
unique=True
)
bio = models.TextField(
verbose_name='О пользователе',
help_text='Расскажите о себе',
blank=True,
null=True
)
role = models.CharField(
'Роль пользователя',
max_length=20,
choices=USER_ROLE_CHOICES,
default=USER,
blank=True,
)
@property
def is_user(self):
return self.role == self.USER
@property
def is_moderator(self):
return self.role == self.MODERATOR
@property
def is_admin(self):
return self.role == self.ADMIN or self.is_staff
class Meta:
verbose_name = 'Пользователь'
verbose_name_plural = 'Пользователи'
def __str__(self):
return self.name
|
Toksi86/yamdb_final
|
api_yamdb/users/models.py
|
models.py
|
py
| 1,428
|
python
|
en
|
code
| 0
|
github-code
|
6
|
7153685231
|
# solarmap.py
import collections
import heapq
class SolarSystem:
"""
Solar system handler
"""
def __init__(self, key):
self.id = key
self.connected_to = {}
def add_neighbor(self, neighbor, weight):
self.connected_to[neighbor] = weight
def get_connections(self):
return self.connected_to.keys()
def get_id(self):
return self.id
def get_weight(self, neighbor):
return self.connected_to[neighbor]
class SolarMap:
"""
Solar map handler
"""
GATE = 0
WORMHOLE = 1
def __init__(self, eve_db):
self.eve_db = eve_db
self.systems_list = {}
self.total_systems = 0
def add_system(self, key):
self.total_systems += 1
new_system = SolarSystem(key)
self.systems_list[key] = new_system
return new_system
def get_system(self, key):
if key in self.systems_list:
return self.systems_list[key]
else:
return None
def get_all_systems(self):
return self.systems_list.keys()
def add_connection(
self,
source,
destination,
con_type,
con_info=None,
):
if source not in self.systems_list:
self.add_system(source)
if destination not in self.systems_list:
self.add_system(destination)
if con_type == SolarMap.GATE:
self.systems_list[source].add_neighbor(self.systems_list[destination], [SolarMap.GATE, None])
self.systems_list[destination].add_neighbor(self.systems_list[source], [SolarMap.GATE, None])
elif con_type == SolarMap.WORMHOLE:
[sig_source, code_source, sig_dest, code_dest, wh_size, wh_life, wh_mass, time_elapsed] = con_info
self.systems_list[source].add_neighbor(
self.systems_list[destination],
[SolarMap.WORMHOLE, [sig_source, code_source, wh_size, wh_life, wh_mass, time_elapsed]]
)
self.systems_list[destination].add_neighbor(
self.systems_list[source],
[SolarMap.WORMHOLE, [sig_dest, code_dest, wh_size, wh_life, wh_mass, time_elapsed]]
)
else:
# you shouldn't be here
pass
def __contains__(self, item):
return item in self.systems_list
def __iter__(self):
return iter(self.systems_list.values())
def shortest_path(
self,
source,
destination,
avoidance_list,
size_restriction,
ignore_eol,
ignore_masscrit,
age_threshold
):
path = []
size_restriction = set(size_restriction)
if source in self.systems_list and destination in self.systems_list:
if source == destination:
path = [source]
else:
queue = collections.deque()
visited = set([self.get_system(x) for x in avoidance_list])
parent = {}
# starting point
root = self.get_system(source)
queue.append(root)
visited.add(root)
while len(queue) > 0:
current_sys = queue.popleft()
if current_sys.get_id() == destination:
# Found!
path.append(destination)
while True:
parent_id = parent[current_sys].get_id()
path.append(parent_id)
if parent_id != source:
current_sys = parent[current_sys]
else:
path.reverse()
return path
else:
# Keep searching
for neighbor in [x for x in current_sys.get_connections() if x not in visited]:
# Connection check (gate or wormhole size)
[con_type, con_info] = current_sys.get_weight(neighbor)
if con_type == SolarMap.GATE:
proceed = True
elif con_type == SolarMap.WORMHOLE:
proceed = True
[_, _, wh_size, wh_life, wh_mass, time_elapsed] = con_info
if wh_size not in size_restriction:
proceed = False
elif ignore_eol and wh_life == 0:
proceed = False
elif ignore_masscrit and wh_mass == 0:
proceed = False
elif 0 < age_threshold < time_elapsed:
proceed = False
else:
proceed = False
if proceed:
parent[neighbor] = current_sys
visited.add(neighbor)
queue.append(neighbor)
return path
def shortest_path_weighted(
self,
source,
destination,
avoidance_list,
size_restriction,
security_prio,
ignore_eol,
ignore_masscrit,
age_threshold
):
path = []
size_restriction = set(size_restriction)
if source in self.systems_list and destination in self.systems_list:
if source == destination:
path = [source]
else:
priority_queue = []
visited = set([self.get_system(x) for x in avoidance_list])
distance = {}
parent = {}
# starting point
root = self.get_system(source)
distance[root] = 0
heapq.heappush(priority_queue, (distance[root], root))
while len(priority_queue) > 0:
(_, current_sys) = heapq.heappop(priority_queue)
visited.add(current_sys)
if current_sys.get_id() == destination:
# Found!
path.append(destination)
while True:
parent_id = parent[current_sys].get_id()
path.append(parent_id)
if parent_id != source:
current_sys = parent[current_sys]
else:
path.reverse()
return path
else:
# Keep searching
for neighbor in [x for x in current_sys.get_connections() if x not in visited]:
# Connection check (gate or wormhole size)
[con_type, con_info] = current_sys.get_weight(neighbor)
if con_type == SolarMap.GATE:
proceed = True
risk = security_prio[self.eve_db.system_type(neighbor.get_id())]
elif con_type == SolarMap.WORMHOLE:
proceed = True
risk = security_prio[3]
[_, _, wh_size, wh_life, wh_mass, time_elapsed] = con_info
if wh_size not in size_restriction:
proceed = False
elif ignore_eol and wh_life == 0:
proceed = False
elif ignore_masscrit and wh_mass == 0:
proceed = False
elif 0 < age_threshold < time_elapsed:
proceed = False
else:
proceed = False
if proceed:
if neighbor not in distance:
distance[neighbor] = float('inf')
if distance[neighbor] > distance[current_sys] + risk:
distance[neighbor] = distance[current_sys] + risk
heapq.heappush(priority_queue, (distance[neighbor], neighbor))
parent[neighbor] = current_sys
return path
|
farshield/shortcircuit
|
src/shortcircuit/model/solarmap.py
|
solarmap.py
|
py
| 8,726
|
python
|
en
|
code
| 56
|
github-code
|
6
|
33280054752
|
# %%
import numpy as np
import pandas as pd
import datetime as dt
#from cohorts_pipeline_woof_v4 import df_cleaning
#from cohorts_pipeline_woof_v4 import cohorts_pipeline
import mysql.connector
from mysql.connector import Error
# %%
#df_og = pd.read_csv('./Data/orders.csv', sep=';', decimal=',')
query_orders = 'SELECT alo.Customer_ID, alo.Hashed_Email, alo.Conv_Date, alo.Conv_ID, CASE WHEN alo.Conv_Date = first_orders.first_date THEN "New" ELSE "Returning" END AS "Customer_Type", alo.Revenue, alo.Revenue_excl_VAT FROM api_shopware.api_shopware_orders alo JOIN ( SELECT Hashed_Email, MIN(Conv_Date) AS "first_date" FROM api_shopware.api_shopware_orders alo WHERE Account = "WOOOF" AND Conv_Status != "cancelled" GROUP BY 1) AS first_orders ON first_orders.Hashed_Email = alo.Hashed_Email WHERE Account = "WOOOF" AND Conv_Status != "cancelled" AND Shipping_Country = "Germany"'
#%%
try:
connection = mysql.connector.connect(host='attribution-system-fsg-new.cob86lv75rzo.eu-west-1.rds.amazonaws.com',
database='api_lightspeed',
user='fsg',
password='Attribution3.0')
if connection.is_connected():
db_Info = connection.get_server_info()
print("Connected to MySQL Server version ", db_Info)
cursor = connection.cursor()
cursor.execute("select database();")
record = cursor.fetchone()
print("Your connected to database: ", record)
df_og = pd.read_sql(query_orders,con=connection)
except Error as e:
print("Error while connecting to MySQL", e)
finally:
if (connection.is_connected()):
cursor.close()
connection.close()
print("MySQL connection is closed")
#%%
#df_og.columns
df_og.head()
#%%
df_og.Conv_Date.max()
#%%
df = df_og.copy()
#%%
df.set_index('Customer_ID', inplace=True)
df['First_Order'] = df.groupby(level=0)['Conv_Date'].min()
df['First_Order_YM'] = df.groupby(level=0)['Conv_Date'].min().apply(lambda x: x.strftime('%Y-%m'))
df.reset_index(inplace=True)
df['Creation_Date_YM'] = df['Conv_Date'].apply(lambda x: x.strftime('%Y-%m'))
#%%
df['Year'] = df['Conv_Date'].dt.year
df['Week'] = df['Conv_Date'].dt.week
df['Year_Week'] = df['Conv_Date'].dt.strftime("%Y-%W")
#%%
df.head()
#%%
months = df['First_Order_YM'].unique()
output_dfs = {p: df[df['First_Order_YM'] == p] for p in months}
cohort_orders = pd.DataFrame()
cohort_customers = pd.DataFrame()
cohort_values = pd.DataFrame()
trans_summary = pd.DataFrame()
projections = pd.DataFrame()
rev_summary = pd.DataFrame()
rev_summary['New'] = df[df['Customer_Type']=='New'].groupby('Creation_Date_YM')['ValueNOVAT'].sum()
rev_summary['Returning'] = df[df['Customer_Type']=='Returning'].groupby('Creation_Date_YM')['ValueNOVAT'].sum()
#%%
from cohorts_pipeline_woof_v4 import df_cleaning
from cohorts_pipeline_woof_v4 import cohorts_pipeline
# %%
df = df_cleaning(df_og)
# %%
df.head()
# %%
transactions, reorder_rates = cohorts_pipeline(df)
#%%
transactions = transactions.fillna(0)
transactions.head()
# %%
transactions = transactions.astype(int)
# %%
reorder_rates = reorder_rates.astype(str)
# %%
reorder_rates = reorder_rates.apply(lambda x: x.str.replace('.',','))
reorder_rates = reorder_rates.apply(lambda x: x.str.replace('nan',''))
# %%
from gspread_pandas import Spread, Client
# %%
wooof_cohorts_data_dump = Spread('WOOOF_COHORTS_DUMP')
# %%
# Push Transactions
wooof_cohorts_data_dump.df_to_sheet(transactions, index=True, sheet='TRANSACTIONS_JULY', start='A1', replace=True)
wooof_cohorts_data_dump.df_to_sheet(reorder_rates, index=True, sheet='REORDER_RATES_JULY', start='A1', replace=True)
# %%
transactions
# %%
|
rahichan/angela_legacy
|
WOOOF/WOOOF_COHORTS_BUILDER.py
|
WOOOF_COHORTS_BUILDER.py
|
py
| 3,732
|
python
|
en
|
code
| 0
|
github-code
|
6
|
34354801205
|
import pickle
# Load the Q-table from the pickle file
with open("./agent_code/qagent/q_table.pickle", "rb") as file:
q_table = pickle.load(file)
# Print the Q-table
for state, action_values in q_table.items():
print(f"State: {state}")
for action, q_value in action_values.items():
print(f" Action: {action}, Q-Value: {q_value}")
|
miri-stack/bomberman
|
agent_code/qagent/checks.py
|
checks.py
|
py
| 352
|
python
|
en
|
code
| 0
|
github-code
|
6
|
30477959760
|
# Create Tree from Level Order Traversal
import queue
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def __repr__(self):
return f"Node({self.data})"
def buildFromLevelOrder():
data = int(input("Enter the data for root node (-1 for no node): "))
if data == -1:
return None
root = Node(data)
q = queue.Queue()
q.put(root)
while not q.empty():
current = q.get()
left_data = int(input(f"Enter left child of {current.data} (-1 for no node): "))
if left_data != -1:
current.left = Node(left_data)
q.put(current.left)
right_data = int(input(f"Enter right child of {current.data} (-1 for no node): "))
if right_data != -1:
current.right = Node(right_data)
q.put(current.right)
return root
def inOrder(root):
if root is None:
return
inOrder(root.left)
print(root.data, end = " ")
inOrder(root.right)
if __name__ == "__main__":
root = buildFromLevelOrder()
print("Inorder Traversal")
inOrder(root)
# Count Leaf Nodes in Binary Tree (Preorder Traversal)
def countLeaves(root):
if root is None:
return 0
if root.left is None and root.right is None:
return 1
leftCount = countLeaves(root.left)
rightCount = countLeaves(root.right)
return leftCount + rightCount
|
prabhat-gp/GFG
|
Binary Trees/Love Babbar/4_create_tree_level_order.py
|
4_create_tree_level_order.py
|
py
| 1,484
|
python
|
en
|
code
| 0
|
github-code
|
6
|
20477618446
|
from typing import List, Optional, Union
def _make_mrkdown_block(mrkdown: str):
"""
Wraps the mrkdown in a block kit block.
"""
return {
"type": "section",
"text": {
"type": "mrkdwn",
"text": mrkdown,
},
}
def _make_header_block(heading: str):
"""
Wraps the heading in a block kit block.
"""
return {
"type": "header",
"text": {
"type": "plain_text",
"text": heading,
},
}
def _make_block_message(
blocks: Union[None, str, dict, List[Union[str, dict]]],
header: Optional[str] = None,
visible_in_channel: bool = True,
):
"""
Generates slack block kit messages from a variety of input types.
str -> Wrap the str in a mrkdown section and in a top level response.
dict -> Wrap the dict in a top level response.
list -> Wrap the altered contents in a top level response.
str -> Wrap the str in a mrkdown section
dict -> add to top level response as is
"""
output_blocks: List[dict] = []
if blocks is None or blocks == "":
return {}
if isinstance(blocks, dict):
output_blocks = [blocks]
elif isinstance(blocks, list):
formatted_blocks = []
for block in blocks:
if isinstance(block, str):
formatted_blocks.append(_make_mrkdown_block(block))
if isinstance(block, dict):
formatted_blocks.append(block)
output_blocks = formatted_blocks
else:
output_blocks = [_make_mrkdown_block(str(blocks))]
if header:
output_blocks = [_make_header_block(header)] + output_blocks
return {
"blocks": output_blocks,
"response_type": "in_channel" if visible_in_channel else "ephemeral",
}
|
henryivesjones/slash-slack
|
slash_slack/blocks.py
|
blocks.py
|
py
| 1,823
|
python
|
en
|
code
| 2
|
github-code
|
6
|
37975869389
|
'''
Get completed data as Dataframe for charts
Calls to MongoDB for data
Test data is a separate module
'''
from datetime import datetime
session = {'defaults':{'trial':{"id":123, 'start_date':datetime.now().timestamp-(30*24*60*60)}}}
from test_data import get_test_data
from chart_frame import three_column_frame
test_struct={'group':{'name':'Trial_Id', 'label':'Trial_Id'},
'x_col':{'name':'trial_day', 'label':'Day'},
'y_col':{'name':'temp', 'label':'Temperature'}}
def test_chart_data():
# generate data frame for test_chart
json = get_test_data()
data = three_column_frame(json, test_struct)
return data
def env_data(trial_id, attribute):
# environmental data for an individual trial
start_date = session["defaults"]['trial']["start_date"]
end_date = datetime.now().timestamp()
print("env_data", attribute, start_time, end_time)
match = {"$match":{
"trial.id":trial_id,
"status.status_qualifier":SUCCESS,
"subject.attribute.name": attribute,
"$and":[{"time.timestamp":{"$gt":start_time}},
{"time.timestamp":{"$lt":end_time}}]
}}
sort = {"$sort":{"time.timestamp":1}}
query = [match, sort]
mu = MongoUtil(DB)
recs = mu.aggregate(DB, OB_COL, query)
test_struct={'group':{'name':'Trial_Id', 'label':'Trial_Id'},
'x_col':{'name':'trial_day', 'label':'Day'},
'y_col':{'name':attribute, 'label':'Temperature'}}
return three_column_frame(recs, test_struct)
ts = []
value = []
for doc in recs:
#print(start_time, doc[TIME][TIMESTAMP], end_time)
ts.append(doc["time"][TIME_STR])
value.append(doc[SUBJECT][ATTRIBUTE][VALUE])
return DataFrame({TIME:ts, attribute:value})
def test():
print("Test of test chart data")
f = test_chart_data()
print(f)
if __name__ == '__main__':
test()
|
webbhm/MARSFarm-web_VX
|
gbet_charts/functions/hold/chart_data.py
|
chart_data.py
|
py
| 1,935
|
python
|
en
|
code
| 0
|
github-code
|
6
|
37301779528
|
n=int(input("n= "))
i=2
k=666
while i<n:
if n%i!=0:
k=1
else:
k=0
break
i+=1
if k==1:
print("Число просте")
else:
print("Число не просте")
|
oly17/-
|
лб 1 1/2 завдан.py
|
2 завдан.py
|
py
| 211
|
python
|
ru
|
code
| 0
|
github-code
|
6
|
10887485994
|
def sekvencijalna_pretraga(niz, element):
if(len(niz) == 0):
return print("Niz je prazan.")
for i in range(0, len(niz)):
if(niz[i] == element):
print("Element je pronadjen. Prva pojava elementa je na indeksu:", i)
return element
print("Trazeni element nije pronadjen.")
def binarna_pretraga(niz, element): #da bi ovo radilo niz MORA biti sortiran !!!!!
if(len(niz) == 0):
return print("Niz je prazan.")
pocetak = 0
kraj = len(niz) - 1
while(pocetak <= kraj):
sredina = (pocetak + kraj) // 2 #celobrojno deljenje, jer ne mora dobijeni broj biti deljiv sa 2
if(element < niz[sredina]): #napravljeno je za rastuci sortirani niz
kraj = sredina - 1
if(element > niz[sredina]):
pocetak = sredina + 1
else:
print("Element pronadjen. Prva pojava elementa na indeksu:", sredina)
return niz[sredina]
# niz22 = [0, 11, 2, 56, 90, 4, 77, 32]
# print(sekvencijalna_pretraga(niz22, 56))
# niz22.sort()
# print(niz22)
# print(binarna_pretraga(niz22, 56))
|
marko-smiljanic/vezbanje-strukture-podataka
|
vezbanje-strukture-podataka/Domaci-PREDAVANJA/domaci4_pretrage/test_search.py
|
test_search.py
|
py
| 1,115
|
python
|
sr
|
code
| 0
|
github-code
|
6
|
36646568157
|
"""Evaluate explanation technique on the CLEVR XAI dataset.
This module computes the saliency maps for the relation network
and evaluates how well the explanation technique matches the
ground truth heatmaps.
"""
# from lrp_relations import enable_deterministic # noqa isort:skip
import dataclasses
import pickle
from typing import Optional, cast
import numpy as np
import pandas as pd
import savethat
import torch
from savethat import logger
from torch.utils.data import DataLoader
from tqdm import tqdm
from lrp_relations import data, lrp, train_clevr
from relation_network import model as rel_model
@dataclasses.dataclass(frozen=True)
class GroundTruthEvalArgs(savethat.Args):
model_key: str
dataset: str = "../data/clevr/CLEVR_v1.0/"
question_type: str = "simple" # "simple" or "complex"
ground_truth: str = "single_object" # "single_object" or "all_objects"
n_samples: int = -1 # -1 for all samples
device: str = "cuda" if torch.cuda.is_available() else "cpu"
batch_size: int = 50
checkpoint: str = "best"
@dataclasses.dataclass(frozen=True)
class GroundTruthEvalResults:
relevance_mass: torch.Tensor
relevance_rank_accuracy: torch.Tensor
correct: torch.Tensor
def as_dataframe(self) -> pd.DataFrame:
rank = self.relevance_rank_accuracy.cpu().numpy()
return pd.DataFrame(
{
"relevance_mass": self.relevance_mass.cpu().numpy(),
"relevance_rank_accuracy": rank,
}
)
def accuracy(self) -> float:
return self.correct.float().mean().item()
def load_model(
storage: savethat.Storage,
key: str,
checkpoint: str = "best",
map_location: Optional[torch.device] = None,
) -> tuple[rel_model.RelationNetworks, train_clevr.TrainArgs]:
"""Load the model from the storage.
Args:
storage: Storage to load the model from.
key: Key of the model to load.
Returns:
The model.
"""
if not (storage / key).exists():
storage.download(key)
if checkpoint == "best":
with open(storage / key / "results.pickle", "rb") as f:
results = cast(train_clevr.TrainedModel, pickle.load(f))
ckpt = results.checkpoints[-1]
ckpt_path = storage / key / "checkpoints" / ckpt.path
logger.debug(
f"Loading model with accuracy {ckpt.accuracy:.4f} from {ckpt_path}"
)
else:
ckpt_path = storage / key / "checkpoints" / checkpoint
logger.debug(f"Loading model from {ckpt_path}")
model = rel_model.RelationNetworks(data.get_n_words())
model.load_state_dict(torch.load(ckpt_path, map_location=map_location))
args = train_clevr.TrainArgs.from_json(storage / key / "args.json")
return model, args
def relevance_mass(
saliency: torch.Tensor,
mask: torch.Tensor,
reduce: tuple[int, ...] = (1, 2, 3),
) -> torch.Tensor:
"""Compute the relevance mass.
Args:
saliency: Saliency map.
mask: Mask to apply.
reduce: Dimensions to reduce.
Returns:
The relevance mass.
"""
within = (saliency * mask).sum(dim=reduce)
total = saliency.sum(dim=reduce)
return within / total
def l2_norm_sq(x: torch.Tensor, dim: int = 1) -> torch.Tensor:
"""Compute the L2 norm squared.
Args:
x: Tensor to compute the L2 norm squared.
Returns:
The L2 norm squared.
"""
return (x**2).sum(dim, keepdim=True)
def max_norm(x: torch.Tensor, dim: int = 1) -> torch.Tensor:
"""Compute the max norm.
Args:
x: Tensor to compute the max norm.
dim: Dimension to compute the max norm.
Returns:
The max norm.
"""
max, _ = x.abs().max(dim, keepdim=True)
return max
# -----------------------------------------------------------------------------
# Functions for computing the relevance rank accuracy
# copied from IBA code ;)
def to_index_map(hmap: np.ndarray) -> np.ndarray:
"""Return a heatmap, in which every pixel has its value-index as value"""
order_map = np.zeros_like(hmap, dtype=np.int64)
for i, idx in enumerate(to_index_list(hmap)):
order_map[idx] = -i
return order_map
def to_index_list(
hmap: np.ndarray, reverse: bool = False
) -> list[tuple[np.ndarray]]:
"""Return the list of indices that would sort this map.
Sorting order is highest pixel first, lowest last
"""
order = np.argsort((hmap if reverse else -hmap).ravel())
indices = np.unravel_index(order, hmap.shape) # array of two tuples
indices_trans = np.transpose(np.stack(indices))
return [tuple(i) for i in np.stack(indices_trans)] # type: ignore
def get_ration_in_mask(heatmap: np.ndarray, mask: np.ndarray) -> float:
if mask.ndim != 2:
raise ValueError("Expected 2 dimensions")
if heatmap.ndim != 2:
raise ValueError("Expected 2 dimensions")
if mask.shape != heatmap.shape:
raise ValueError("Shapes must match")
heatmap_idxs = to_index_map(heatmap).astype(np.int64)
mask_np = mask > 0.5
heatmap_bbox_idxs = heatmap_idxs.copy()
heatmap_bbox_idxs[mask_np == 0] = heatmap_idxs.min()
n_pixel_in_mask = mask_np.sum()
return float(
(heatmap_bbox_idxs > (-n_pixel_in_mask)).sum() / n_pixel_in_mask.sum()
)
class GroundTruthEval(
savethat.Node[GroundTruthEvalArgs, GroundTruthEvalResults]
):
def _run(self):
device = torch.device(self.args.device)
model, model_args = load_model(
self.storage,
self.args.model_key,
self.args.checkpoint,
map_location=device,
)
model.to(device)
lrp_model = lrp.LRPViewOfRelationNetwork(model)
lrp_model.to(device)
dataset = data.CLEVR_XAI(
self.args.dataset,
self.args.question_type,
self.args.ground_truth,
model_args.reverse_question,
use_preprocessed=True,
)
if self.args.n_samples == -1:
n_samples = len(dataset)
else:
n_samples = self.args.n_samples
loader = DataLoader(
dataset,
batch_size=self.args.batch_size,
shuffle=False,
num_workers=1,
pin_memory=False,
collate_fn=dataset.collate_data,
)
# mse = 0
pbar = tqdm(loader)
rel_mass = []
rel_rank = []
total_samples = 0
correct = []
for i, (image, question, q_len, answer, gt) in enumerate(pbar):
if i > n_samples // self.args.batch_size:
break
image, question, answer, gt = (
image.to(device),
question.to(device),
answer.to(device),
gt.to(device),
)
saliency, logits = lrp_model.get_lrp_saliency_and_logits(
image,
question,
q_len,
target=answer,
normalize=False,
)
correct.append((logits.argmax(1) == answer).cpu())
rel_mass.append(
relevance_mass(l2_norm_sq(saliency), gt).detach().cpu()
)
ranks = [
get_ration_in_mask(
max_norm(s, dim=0).cpu().detach().numpy()[0],
gt_mask.cpu().detach().numpy()[0],
)
for gt_mask, s in zip(gt, saliency)
]
rel_rank.append(torch.tensor(ranks).cpu())
total_samples += len(image)
res = GroundTruthEvalResults(
relevance_mass=torch.cat(rel_mass),
relevance_rank_accuracy=torch.cat(rel_rank),
correct=torch.cat(correct),
)
print("-" * 80)
print(f"Statistics on {total_samples} samples:")
print(res.as_dataframe().describe())
print("-" * 80)
print(f"Accuracy: {res.accuracy():.4f}")
print("-" * 80)
return res
|
berleon/A-Rigorous-Study-Of-The-Deep-Taylor-Decomposition
|
lrp_relations/gt_eval.py
|
gt_eval.py
|
py
| 8,038
|
python
|
en
|
code
| 5
|
github-code
|
6
|
24199854767
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 23 09:56:39 2019
@author: Administrator
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head:
return
list_length, end = self.list_length(head)
k = k % list_length
cur = head
for i in range(list_length - k - 1):
cur = cur.next
end.next = head
head = cur.next
cur.next = None
return head
def list_length(self, head):
n = 0
cur = head
while cur and cur.next:
n += 1
cur = cur.next
return n + 1, cur
|
AiZhanghan/Leetcode
|
code/61. Rotate List.py
|
61. Rotate List.py
|
py
| 836
|
python
|
en
|
code
| 0
|
github-code
|
6
|
2169470310
|
import psycopg
from matplotlib import pyplot as plt
import numpy as np
import datetime
DB_NAME = "########"
DB_USER = "########"
DB_PASSWORD = "########"
conn = psycopg.connect(
dbname=DB_NAME,
user=DB_USER,
password=DB_PASSWORD)
def LoadQuery(fileName):
return open(f"tools/Queries/{fileName}.sql", "r").read()
def test_register_date_karma_hist(capsys):
registerDate = LoadQuery("registerDate")
registerDatePositiveKarma = LoadQuery("registerDatePositiveKarma")
registerDateZeroKarma = LoadQuery("registerDateZeroKarma")
registerDateNegativeKarma = LoadQuery("registerDateNegativeKarma")
with capsys.disabled():
with conn.cursor() as cur:
fig, ax1 = plt.subplots()
cur.execute(registerDate)
res = cur.fetchall()
y = np.array([t[0].timestamp() for t in res])
print(len(res))
years = [x for x in range(2006, 2025)]
plt.xticks(range(2006, 2025, 2))
hist_all = np.histogram(y, 18, (datetime.datetime(2006, 1, 1).timestamp(), datetime.datetime(2024, 1, 1).timestamp()))
bottom = np.zeros(len(hist_all[0]))
cur.execute(registerDateNegativeKarma)
res = cur.fetchall()
y = np.array([t[0].timestamp() for t in res])
print(len(res))
hist_negative = np.histogram(y, 18, (datetime.datetime(2006, 1, 1).timestamp(), datetime.datetime(2024, 1, 1).timestamp()))
hist_n = hist_negative[0]
ax1.bar(years[:-1], hist_n, width=np.diff(years), align="edge", color=(1,0,0,0.5), bottom=bottom)
bottom += hist_n
cur.execute(registerDatePositiveKarma)
res = cur.fetchall()
y = np.array([t[0].timestamp() for t in res])
print(len(res))
hist_positive = np.histogram(y, 18, (datetime.datetime(2006, 1, 1).timestamp(), datetime.datetime(2024, 1, 1).timestamp()))
hist_p = hist_positive[0]
ax1.bar(years[:-1], hist_p, width=np.diff(years), align="edge", color=(0,1,0,0.5), bottom=bottom)
bottom += hist_p
cur.execute(registerDateZeroKarma)
res = cur.fetchall()
y = np.array([t[0].timestamp() for t in res])
print(len(res))
hist_zero = np.histogram(y, 18, (datetime.datetime(2006, 1, 1).timestamp(), datetime.datetime(2024, 1, 1).timestamp()))
hist_z = hist_zero[0]
ax1.bar(years[:-1], hist_z, width=np.diff(years), align="edge", color=(0,0,1,0.5), bottom=bottom)
bottom += hist_z
ax1.bar(years[:-1], hist_all[0], width=np.diff(years), edgecolor="black", align="edge", color=(0,0,1,0))
fig.tight_layout()
plt.savefig(fname=f"./pics/analitics/register_date_karma_distr.png", bbox_inches='tight', dpi=200)
|
AS2/habrolink
|
tools/Analitics/test_register_karma_hist.py
|
test_register_karma_hist.py
|
py
| 2,940
|
python
|
en
|
code
| 0
|
github-code
|
6
|
71192689147
|
import tensorflow.examples.tutorials.mnist.input_data as input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
########################################模型构建####################################
# 导入tensorflow
import tensorflow as tf
test_images=mnist.test.images;
test_labels=mnist.test.labels;
# x不是一个特定的值,而是一个占位符placeholder,我们在TensorFlow运行计算时输入这个值。 (这里的None表示此张量的第一个维度可以是任何长度的。)
x = tf.placeholder(tf.float32, [None, 784])
# 一个Variable代表一个可修改的张量,存在在TensorFlow的用于描述交互性操作的图中。它们可以用于计算输入值,也可以在计算中被修改。
# 对于各种机器学习应用,一般都会有模型参数,可以用Variable表示。
W = tf.Variable(tf.zeros([784,10])) # 代表权重Wi ; zero应该为创建全为 0 的矩阵; W的维度是[784,10],因为我们想要用784维的图片向量乘以它以得到一个10维的证据值向量,每一位对应不同数字类。
b = tf.Variable(tf.zeros([10])) # 代表偏置量Bi 因为W和B的值需要学习来确定,所以初始值设为0
# 实现模型,y为由Xs计算得到的
y = tf.nn.softmax(tf.matmul(x,W) + b)
# 为了计算交叉熵,用一个新的占位符placeholder表示用于输入正确值(作为一个输入接口),y_是实际的分布(我们输入的one-hot vector),即标签ys
y_ = tf.placeholder("float", [None,10])
# 计算交叉熵,我们的损失函数是目标类别和预测类别之间的交叉熵
cross_entropy = -tf.reduce_sum(y_*tf.log(y))
# 最小化交叉熵
train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)
#####################################至此,完成模型的设置###########################
#初始化变量
init = tf.global_variables_initializer()
# 在一个Session里面启动我们的模型,并且初始化变量
sess = tf.Session()
sess.run(init)
###############################评估模型(测试集)###################################
# tf.argmax(y,1)返回的是模型对于任一输入x预测到的标签值,而 tf.argmax(y_,1) 代表正确的标签,我们可以用 tf.equal 来检测我们的预测
# 是否真实标签匹配(索引位置一样表示匹配)
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
#这行代码会给我们一组布尔值。为了确定正确预测项的比例,我们可以把布尔值转换成浮点数,然后取平均值。
#例如,[True, False, True, True] 会变成 [1,0,1,1] ,取平均值后得到 0.75.
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
#计算所学习到的模型在测试数据集上面的正确率
##############################训练模型##############################################
# 开始训练模型,这里我们让模型循环训练1000次,[60000,784]
#该循环的每个步骤中,我们都会随机抓取训练数据中的100个批处理数据样本,然后我们用这些数据点作为参数替换之前的占位符来运行train_step。
#使用一小部分的随机数据来进行训练被称为随机训练(stochastic training)- 在这里更确切的说是随机梯度下降训练。
for i in range(5000): # range(x), x是训练的次数
batch_xs, batch_ys = mnist.train.next_batch(100) # DataSet.next_batch()是用于获取以batch_size为大小的一个元组,其中包含了一组图片和标签
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys}) # feed的作用:给x,y_赋值;
if i%50==0:
print('step', i, 'accurcy', sess.run(accuracy, feed_dict={x: batch_xs, y_: batch_ys})) # 把测试集所有数据输入
print(sess.run(accuracy, feed_dict={x: test_images, y_: test_labels})) # 把测试集所有数据输入
|
lyx997/Python-learning
|
softmax + CNN/ceshi.py
|
ceshi.py
|
py
| 3,894
|
python
|
zh
|
code
| 0
|
github-code
|
6
|
42483897749
|
import networkx as nx
def hierarchical_layout(G: nx.Graph) -> tuple:
"""Function to create dictionary with positions of nodes with hierarchical
arrangement.
Paramaters:
-----------
G: nx.Graph
NetworkX Graph object
Returns:
(int, int, dict)
Tuple with canvas size for the loan and non-loan, and a dictionary with
the node as key and the position as coordinate list or tuple
"""
# Record shortest distance between node pairs (to evaluate hierarchy levels)
spl = dict(nx.all_pairs_shortest_path_length(G))
# Count number of nodes for agg, sub1, sub2 levels
agg = []
sub1 = []
sub2 = []
for node in list(G.nodes()):
if spl['CA'][node] == 2:
agg.append(node)
elif spl['CA'][node] == 3:
sub1.append(node)
elif spl['CA'][node] == 4:
sub2.append(node)
# Attribute agg, sub1, sub2 containers to loan or non-loan
loan = []
non_loan = []
for node in list(G.nodes()):
if spl['01'][node] > spl['02'][node]:
non_loan.append(node)
else:
loan.append(node)
# Resize canvas based on how many nodes are present (loan)
largest_row_loan = max(
[
len([x for x in agg if x in loan]),
len([x for x in sub1 if x in loan]),
len([x for x in sub2 if x in loan]),
]
)
if largest_row_loan > 4:
canvas_loan_size = 0.25 * largest_row_loan
else:
canvas_loan_size = 1
# Resize canvas based on how many nodes are present (non-loan)
largest_row_nonloan = max(
[
len([x for x in agg if x in non_loan]),
len([x for x in sub1 if x in non_loan]),
len([x for x in sub2 if x in non_loan]),
]
)
print(f'Max row NON-LOAN: {largest_row_nonloan}')
if largest_row_nonloan > 4:
canvas_nonloan_size = 0.3 * largest_row_loan
else:
canvas_nonloan_size = 1
# Define canvas size for lower levels
canvas_loan_size_sub1 = canvas_loan_size / len([x for x in agg if x in loan])
canvas_loan_size_sub2 = canvas_loan_size / len([x for x in sub1 if x in loan])
canvas_nonloan_size_sub1 = canvas_nonloan_size / len([x for x in agg if x in non_loan])
canvas_nonloan_size_sub2 = canvas_nonloan_size / len([x for x in sub1 if x in non_loan])
# Assign x, y coordinates to nodes
agg_loan_iter = 0
agg_nonloan_iter = 0
position = {}
# CA, sections, and agg-fac
for node in list(G.nodes()):
if node == 'CA':
x, y = 0, 0.8
elif node == '01':
x, y = -0.5, 0.4
elif node == '02':
x, y = 0.5, 0.4
else:
if node in loan:
if node in agg:
x = - (0.5 + agg_loan_iter) * canvas_loan_size / len([x for x in agg if x in loan])
y = 0
agg_loan_iter += 1
elif node in non_loan:
if node in agg:
x = (0.5 + agg_nonloan_iter) * canvas_nonloan_size / len([x for x in agg if x in non_loan])
y = 0
agg_nonloan_iter += 1
position[node] = (x, y)
# sub-fac 1
for node in [x for x in agg if x in loan]:
sub1_loan_iter = 0
children = [y for y in G.neighbors(node) if y in sub1]
for child in children:
x0 = position[node][0]
x = (x0 + 0.5 * canvas_loan_size_sub1) - (0.5 + sub1_loan_iter) * canvas_loan_size_sub1 / len(children)
y = - 0.4
sub1_loan_iter += 1
position[child] = (x, y)
for node in [x for x in agg if x in non_loan]:
sub1_nonloan_iter = 0
children = [y for y in G.neighbors(node) if y in sub1]
for child in children:
x0 = position[node][0]
x = (x0 - 0.5 * canvas_nonloan_size_sub1) + (0.5 + sub1_nonloan_iter) * canvas_nonloan_size_sub1 / len(children)
y = - 0.4
sub1_nonloan_iter += 1
position[child] = (x, y)
# sub-fac 2
for node in [x for x in sub1 if x in loan]:
sub2_loan_iter = 0
children = [y for y in G.neighbors(node) if y in sub2]
for child in children:
x0 = position[node][0]
x = (x0 + 0.5 * canvas_loan_size_sub2) - (0.5 + sub2_loan_iter) * canvas_loan_size_sub2 / len(children)
y = - 0.8
sub2_loan_iter += 1
position[child] = (x, y)
for node in [x for x in sub1 if x in non_loan]:
sub2_nonloan_iter = 0
children = [y for y in G.neighbors(node) if y in sub2]
for child in children:
x0 = position[node][0]
x = (x0 - 0.5 * canvas_nonloan_size_sub2) + (0.5 + sub2_nonloan_iter) * canvas_nonloan_size_sub2 / len(children)
y = - 0.8
sub2_nonloan_iter += 1
position[child] = (x, y)
# if node in loan:
# if node in agg:
# x = - (0.5 + agg_loan_iter) * canvas_loan_size / len([x for x in agg if x in loan])
# y = 0
# agg_loan_iter += 1
# elif node in sub1:
# x = - (0.5 + sub1_loan_iter) * canvas_loan_size / len([x for x in sub1 if x in loan])
# y = -0.4
# sub1_loan_iter += 1
# elif node in sub2:
# x = - (0.5 + sub2_loan_iter) * canvas_loan_size / len([x for x in sub2 if x in loan])
# y = -0.8
# sub2_loan_iter += 1
# elif node in non_loan:
# if node in agg:
# x = (0.5 + agg_nonloan_iter) * canvas_nonloan_size / len([x for x in agg if x in non_loan])
# y = 0
# agg_nonloan_iter += 1
# elif node in sub1:
# x = (0.5 + sub1_nonloan_iter) * canvas_nonloan_size / len([x for x in sub1 if x in non_loan])
# y = -0.4
# sub1_nonloan_iter += 1
# elif node in sub2:
# x = (0.5 + sub2_nonloan_iter) * canvas_nonloan_size / len([x for x in sub2 if x in non_loan])
# y = -0.8
# sub2_nonloan_iter += 1
# position[node] = (x, y)
return canvas_loan_size, canvas_nonloan_size, position
|
diegopintossi/graph_network
|
custom_hierarchical_layout.py
|
custom_hierarchical_layout.py
|
py
| 6,468
|
python
|
en
|
code
| 0
|
github-code
|
6
|
22759981162
|
from fastapi import FastAPI
from fastapi.exceptions import RequestValidationError
from starlette.requests import Request
from starlette.responses import JSONResponse
from app import config as settings
from app.api.dependency import init_model
from app.api.v1.endpoint import router
from app.exceptions import APIException, APIExceptionErrorCodes, APIExceptionTypes
app = FastAPI(title=settings.PROJECT_NAME)
app.include_router(router)
@app.on_event("startup")
async def startup_event():
init_model()
@app.exception_handler(RequestValidationError)
async def validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
return JSONResponse(
status_code=APIExceptionErrorCodes.SCHEMA_ERROR[1],
content={
"error": {
"message": "schema error. please refer to data for details",
"type": APIExceptionTypes.DATA_VALIDATION,
"code": APIExceptionErrorCodes.SCHEMA_ERROR[0],
"data": exc.errors(),
}
},
)
@app.exception_handler(APIException)
async def api_exception_handler(request: Request, exc: APIException) -> JSONResponse:
return JSONResponse(status_code=exc.status_code, content=exc.get_exception_content().dict())
|
SW13-Monstera/AI-server
|
app/main.py
|
main.py
|
py
| 1,277
|
python
|
en
|
code
| 5
|
github-code
|
6
|
17510462413
|
'''
start: 1:29pm
end: 1:40pm
nums
2 ways:
1st way:
-sort it
-and then use 2 pointers from opposite sides to find the twosum hitting target
-complexity: O(n lg n). S: O(n). timsort
2nd way:
run trhough the nums, if target - val is in lookup. return val and target - val
T: O(n). S: O(n)
constraints:
'''
class Solution:
def twoSum(self, nums: List[int], target: int) -> List[int]:
lookup = {}
for idx, val in enumerate(nums):
if target - val in lookup:
return idx, lookup[target - val]
lookup[val] = idx
|
soji-omiwade/cs
|
dsa/before_rubrik/twosum_20210627.py
|
twosum_20210627.py
|
py
| 577
|
python
|
en
|
code
| 0
|
github-code
|
6
|
6767301700
|
import json
from flask import Flask, request, Response
app = Flask(__name__)
required_fields = ['title', 'topics', 'ownerId', 'locationString']
tables = [
{
"tableId": 1,
"title": "TableC @ BostonHacks",
"topics": ["#masseffect", "#typescript", "#rickandmorty"],
"ownerId": 42,
"locationString": "Metcalf Hall, upstairs",
},
{
"tableId": 2,
"title": "Spline Reticulation",
"topics": ["#anything", "#anime"],
"ownerId": 69,
"locationString": "Sherman Gallery"
}
]
counter = 3
@app.route('/api/v1/tables', methods=['GET', 'POST'])
def table_resource():
if request.method == 'GET':
return Response(json.dumps(tables), mimetype='application/json')
elif request.method == 'POST':
return Response(insert_into_dict(request.json))
def insert_into_dict(new_table: dict):
in_fields = new_table.keys()
for field in required_fields:
if field not in in_fields:
return 400
global counter
new_table['tableId'] = counter
tables.append(new_table)
counter += 1
return 204
if __name__ == '__main__':
app.run('0.0.0.0', 8069, debug=True)
|
shawnrc/hackascraps_bu17
|
dummy_api.py
|
dummy_api.py
|
py
| 1,198
|
python
|
en
|
code
| 0
|
github-code
|
6
|
30763371491
|
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 28 13:39:19 2016
@author: Shahidur Rahman
"""
#import numpy as np;
#list declaration
#a_list = []
#b_list = []
#numpy array declaration
#left = np.array([])
#right = np.array([])
#convert the list to numpy array
#a = np.array(a_list)
#b = np.array(b_list)
#call the explorer library
import explorers
import stringRecorder
import pandas
from sqlalchemy import create_engine
import random
from mmh3 import hash128
i=0
#create sql connection
engine = create_engine('mysql+pymysql://root:shahidur_123@localhost:3306/mwt')
#open file and read
f = open(r"D:\Work\MWT\Data\VW_raw\rcv1.train.raw.txt")
try:
for line in f:
l=line.split("|")[0]
r=line.split("|")[1]
#a_list.append(l)
#b_list.append(r)
i=i+1
print(i)
#random number generator
epsilon = round(random.random(),3)
#print('\n'+'mwt : epsilon'+str(epsilon))
#unique key generation
unique_key =hash128('my string of doom ', seed=1234)
#print('mwt : unique_key '+str(unique_key))
#number of actions registered
noOfActions = 3
#print('mwt : noOfActions : '+str(noOfActions))
######################################################
#space for the policy action called
#to get the actionID for default policy
policyDecision = 3
#print('mwt : policyDecision : '+str(policyDecision))
scores = [1,2,3,4,5,6,7,8,9,10]
#for j in scores:
# print('mwt : scores : '+str(j))
######################################################
#print('mwt context : '+i)
callExplorer = explorers.explorers(epsilon,noOfActions,policyDecision,scores)
storeValues = callExplorer.algoSelection()
#print('storeValues : '+str(storeValues))
record = stringRecorder.stringRecorder(r, storeValues['actionID'], storeValues['actionProbability'], unique_key, storeValues['isExplore'], epsilon, noOfActions,policyDecision,storeValues['explorerAlgo'])
record=record.sewStrings()
#print('record : '+str(record))
#read data in data frame
#print('connection built')
colList="context,actionID,actionProbability,unique_key,isExplore,epsilon,noOfActions,policyDecision,explorerAlgo".split(',')
c1=['col1']
df = pandas.DataFrame(data=record,index=colList)
df=df.T
#print("printing panda df here")
#print(df)
#push data in sql
df.to_sql(con=engine, name='stringrecord', if_exists='append',index=False)
#close the opened file
finally:
f.close()
|
skshahidur/nlp_paper_implementation
|
Word-Embedding/mwt.py
|
mwt.py
|
py
| 2,843
|
python
|
en
|
code
| 0
|
github-code
|
6
|
22218957716
|
from director.consoleapp import ConsoleApp
from director import mainwindowapp
from director import affordancemanager
from director import affordanceitems
from director import affordanceurdf
from director import affordancepanel
from director import objectmodel as om
from director import visualization as vis
from director import pointpicker
from director import viewbehaviors
from director.debugVis import DebugData
from PythonQt import QtCore, QtGui
def newBox():
desc = dict(classname='BoxAffordanceItem', Name='test box', Dimensions=[0.5, 0.2, 0.1], pose=((0.5,0.0,1.0), (1,0,0,0)))
return affordanceManager.newAffordanceFromDescription(desc)
def newSphere():
desc = dict(classname='SphereAffordanceItem', Name='test sphere', Radius=0.2, pose=((0.5,0.0,1.0), (1,0,0,0)))
return affordanceManager.newAffordanceFromDescription(desc)
def newCylinder():
desc = dict(classname='CylinderAffordanceItem', Name='test cylinder', Radius=0.05, Length=0.5, pose=((0.5,0.0,1.0), (1,0,0,0)))
return affordanceManager.newAffordanceFromDescription(desc)
def newCapsule():
desc = dict(classname='CapsuleAffordanceItem', Name='test capsule', Radius=0.05, Length=0.5, pose=((0.5,0.0,1.0), (1,0,0,0)))
return affordanceManager.newAffordanceFromDescription(desc)
def newCapsuleRing():
desc = dict(classname='CapsuleRingAffordanceItem', Name='test capsule ring', pose=((0.5,0.0,1.0), (1,0,0,0)))
return affordanceManager.newAffordanceFromDescription(desc)
def newMesh():
d = DebugData()
d.addArrow((0,0,0), (0,0,0.3))
pd = d.getPolyData()
meshId = affordanceitems.MeshAffordanceItem.getMeshManager().add(pd)
desc = dict(classname='MeshAffordanceItem', Name='test mesh', Filename=meshId, pose=((0.5,0.0,1.0), (1,0,0,0)))
return affordanceManager.newAffordanceFromDescription(desc)
def testAffordanceToUrdf():
affs = [func() for func in (newSphere, newBox, newCylinder, newCapsule, newMesh)]
print(affordanceurdf.urdfStringFromAffordances(affs))
#for aff in affs:
# om.removeFromObjectModel(aff)
def printAffordanceUrdf():
affs = affordanceManager.getAffordances()
print(affordanceurdf.urdfStringFromAffordances(affs))
def onAffordancePick(objs):
obj = objs[0]
if obj == affordanceToAdd:
return
print(affordanceToAdd.getProperty('Name'))
print(obj.getProperty('Name'))
frameSync = obj.getChildFrame().getFrameSync()
frameSync.addFrame(affordanceToAdd.getChildFrame(), ignoreIncoming=True)
def getAffordanceContextMenuActions(view, pickedObj, pickedPoint):
if pickedObj not in affordanceManager.getAffordances():
return []
global affordanceToAdd
affordanceToAdd = pickedObj
def onSelectAffordanceParent():
objectPicker.start()
actions = [
(None, None),
('Select parent...', onSelectAffordanceParent),
]
return actions
viewbehaviors.registerContextMenuActions(getAffordanceContextMenuActions)
app = mainwindowapp.construct()
view = app.view
affordanceManager = affordancemanager.AffordanceObjectModelManager(view)
if affordancemanager.lcmobjectcollection.USE_LCM:
affordanceitems.MeshAffordanceItem.getMeshManager().collection.sendEchoRequest()
affordanceManager.collection.sendEchoRequest()
objectPicker = pointpicker.ObjectPicker(view=view, callback=onAffordancePick, getObjectsFunction=affordanceManager.getAffordances)
panel = affordancepanel.AffordancePanel(view, affordanceManager)
dock = app.app.addWidgetToDock(panel.widget, QtCore.Qt.RightDockWidgetArea)
printButton = QtGui.QPushButton('Print URDF')
printButton.connect('clicked()', printAffordanceUrdf)
panel.ui.spawnTab.layout().addWidget(printButton)
app.app.start()
|
RobotLocomotion/director
|
src/python/tests/testAffordancePanel.py
|
testAffordancePanel.py
|
py
| 3,734
|
python
|
en
|
code
| 176
|
github-code
|
6
|
20420593181
|
from confluent_kafka.admin import AdminClient, NewTopic
topic = 'Kafka_Image_Processing'
client_id = "admin_hagar"
conf = {'bootstrap.servers': "34.70.120.136:9094,35.202.98.23:9094,34.133.105.230:9094",
'client.id': client_id}
ac = AdminClient(conf)
res = ac.create_topics([NewTopic(topic, num_partitions=3, replication_factor=2)])
res[topic].result()
|
HagarIbrahiem/Kafka_ImgProcessing
|
admin.py
|
admin.py
|
py
| 373
|
python
|
en
|
code
| 0
|
github-code
|
6
|
72650319227
|
#
# @lc app=leetcode id=455 lang=python3
#
# [455] Assign Cookies
#
# @lc code=start
class Solution:
def findContentChildren(self, g: List[int], s: List[int]) -> int:
i = 1
j = 1
k = 0
g.sort()
s.sort()
while i <= len(g) and j <= len(s):
if g[-i] <= s[-j]:
k +=1
i,j=i+1,j+1
else: i+=1
return k
# @lc code=end
|
hieun314/leetcode_NguyenKimHieu
|
455.assign-cookies.py
|
455.assign-cookies.py
|
py
| 428
|
python
|
en
|
code
| 0
|
github-code
|
6
|
21725267349
|
#PYthon program to find largest number using the array elements
def biggestNumber(array, length):
extval, ans=[],""
l = len(str(max(array))) + 1
for i in array:
temp = str(i) * l;
extval.append((temp[:l:], i))
extval.sort(reverse=True)
for i in extval:
ans += str(i[1])
print(ans)
a = [1, 34, 3, 98, 9, 76,
45, 4, 12, 121]
biggestNumber(a, len(a))
|
ItsSamarth/ds-python
|
DataStructures/array/biggestNumber.py
|
biggestNumber.py
|
py
| 415
|
python
|
en
|
code
| 0
|
github-code
|
6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.