blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 5 133 | path stringlengths 2 333 | src_encoding stringclasses 30 values | length_bytes int64 18 5.47M | score float64 2.52 5.81 | int_score int64 3 5 | detected_licenses listlengths 0 67 | license_type stringclasses 2 values | text stringlengths 12 5.47M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
1c5547f781096e70d696d2c00148755e1d5d2595 | Python | mayankmusaddi/hgnn | /plot_osmfish.py | UTF-8 | 1,243 | 2.59375 | 3 | [] | no_license | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
import h5py
file1 = "osmfish/osmFISH_SScortex_mouse_all_cells.loom"
f = h5py.File(file1,mode = 'r')
meta = f['col_attrs']
cell_types = np.asarray(meta['ClusterID'])
cell_names = np.asarray(meta['ClusterName'])
X = np.load('/home/anant/precog/hyp/Hyperbolic-GNNs/embeddings/embeddings.npy')
# X = np.zeros((913,4))
print(X.shape)
print(X[692])
embeddings2d = TSNE(n_components=2).fit_transform(X)
# # Create DF
embeddingsdf = pd.DataFrame()# Add game names
# embeddingsdf['game'] = gameslist# Add x coordinate
embeddingsdf['x'] = embeddings2d[:,0]# Add y coordinate
embeddingsdf['y'] = embeddings2d[:,1]# Check
embeddingsdf.head()
yy = set(cell_types)
print(yy)
# Set figsize
fig, ax = plt.subplots(figsize=(10,8))# Scatter points, set alpha low to make points translucent
for g in np.unique(cell_types):
i = np.where(cell_types == g)
print(i, g)
ax.scatter(embeddings2d[i,0], embeddings2d[i,1], label = cell_names[np.where(cell_types==g)][0],alpha = 0.7)
ax.legend()
# ax.scatter(embeddingsdf.x, embeddingsdf.y, alpha=.5, c=t)
# ax.scatter(X[:,2], X[:,10], alpha=.5, c=t)
plt.title('t-SNE Scatter-Plot')
plt.show()
| true |
e8abd6c0b31f6f824c46b4f2c2128adf7d53b2a0 | Python | Yue2u/Coursera_Python | /Diving_in_Python/W5/Multithread_programming.py | UTF-8 | 4,034 | 3.234375 | 3 | [] | no_license | import time
# import os
# from multiprocessing import Process
from threading import Thread
# import threading
# from concurrent.futures import ThreadPoolExecutor, as_completed
# from queue import Queue
# pid = os.getpid()
#
# Make process to watch on oit in linux console
# while True:
# print(pid, time.time())
# time.sleep(2)
# Only in linux console
# Create new process by using fork()
#
# pid = os.fork()
# if pid == 0:
# while True:
# print('Child:', os.getpid())
# time.sleep(5)
# else:
# print('Parent:', os.getpid())
# os.wait()
# Create process with multiprocessing
# Only in linux console
#
# def foo(name):
# print("hello", name)
#
#
# p = Process(target=foo, args=("Oleg", ))
# p.start()
# p.join()
# Only in linux console
# Inheritance from Process to make new process(override method 'run')
#
# class PrintProcess(Process):
# def __init__(self, name):
# super().__init__()
# self.name = name
#
# def run(self):
# print("Hello,", self.name)
#
#
# p = PrintProcess("Oleg")
# p.start()
# p.join()
# Creating new thread in the same process (thread is a part of a process)
#
# def foo(name):
# print("hello", name)
#
#
# th = Thread(target=foo, args=("Oleg", ))
# th.start()
# th.join()
# Inheritance from Thread to make new thread(override method 'run')
#
# class PrintThread(Thread):
# def __init__(self, name):
# super().__init__()
# self.name = name
#
# def run(self):
# print("Hello,", self.name)
#
#
# p = PrintThread("Oleg")
# p.start()
# p.join()
# Using ThreadPool to make some threads
# def foo(a):
# return a * a
#
#
# # .shutdown() in exit
# with ThreadPoolExecutor(max_workers=3) as pool:
# results = [pool.submit(foo, i) for i in range(10)]
#
# for future in as_completed(results):
# print(future.result())
# Using Queue to pass data between threads
#
# def worker(q, n):
# while True:
# item = q.get()
# if item is None:
# break
# print("process data:", n, item)
#
#
# q = Queue(5)
# th1 = Thread(target=worker, args=(q, 1))
# th2 = Thread(target=worker, args=(q, 2))
# th1.start()
# th2.start()
#
# for i in range(50):
# q.put(i)
#
# q.put(None)
# q.put(None)
# th1.join()
# th2.join()
# Synchronizing threads with context manager
#
# class Point(object):
# def __init__(self):
# self._mutex = threading.RLock()
# self._x = self._y = 0
#
# def get(self):
# with self._mutex:
# return (self._x, self._y)
#
# def set(self, x, y):
# with self._mutex:
# self._x = x
# self._y = y
# Synchronizing threads by hands (may have deadlock)
#
# a = threading.RLock()
# b = threading.RLock()
#
#
# def foo():
# try:
# a.acquire()
# b.acquire()
# finally:
# a.release()
# b.release()
# # Synchronizing threads witch conditional variables
#
# class Queue:
# def __init__(self, size=5):
# self._size = size
# self._queue = []
# self._mutex = threading.RLock()
# self._empty = threading.Condition(self._mutex)
# self._full = threading.Condition(self._mutex)
#
# def put(self, val):
# with self._mutex:
# while len(self._queue) >= self._size:
# self._full.wait()
#
# self._queue.append(val)
# self._empty.notify()
#
# def get(self):
# with self._mutex:
# while len(self._queue) == 0:
# self._empty.wait()
#
# val = self._queue.pop(0)
# self._full.notify()
# return val
# Cpu bound program
#
def count(n):
while n > 0:
n -= 1
# Series rum
t0 = time.time()
count(100_000_000)
count(100_000_000)
print(time.time() - t0)
# Parallel run
t0 = time.time()
th1 = Thread(target=count, args=(100_000_000,))
th2 = Thread(target=count, args=(100_000_000,))
th1.start()
th2.start()
th1.join()
th2.join()
print(time.time() - t0)
| true |
9249f20e4a4c110c31f46aff975624fae5dd67a9 | Python | bishwa3141/Math450 | /FloatingPoint/Zeta1.py | UTF-8 | 157 | 3.0625 | 3 | [] | no_license | #!/usr/bin/python
import sys
N = 10**int(sys.argv[1]);
sum = 0
for i in xrange(N,0,-1):
print i
sum += 1/float(i)
print('sum(%d) = %f' % (N , sum))
| true |
582892f53a5d552caacee3c1261a5648787cf337 | Python | victorou22/donation-analytics | /src/donation_analytics_driver.py | UTF-8 | 1,177 | 2.578125 | 3 | [] | no_license | import argparse
import os
from donation_analytics_validations import *
from donation_analytics_process import *
def main():
# Parses command line arguments for the paths
parser = argparse.ArgumentParser(description='Parses the path for the percentile file and the contributions file.')
parser.add_argument('contributions_path')
parser.add_argument('percentile_path')
parser.add_argument('output_path')
args = parser.parse_args()
# If the repeat_donors.txt already exists, remove it first
try:
os.remove(args.output_path)
except OSError:
pass
percentile = read_percentile(args.percentile_path)
input_stream = read_data(args.contributions_path)
donors = {}
contributions = {}
for line in input_stream:
record = validate_record(line)
if not record:
continue
record = check_repeat_donor(record, donors)
if record:
update_contributions(record, contributions)
result = generate_result(record, contributions, percentile)
write_output(args.output_path, result)
input_stream.close()
if __name__ == '__main__':
main() | true |
efce3c5c7795b97d164c3b31ae3b37c7277b1143 | Python | payneio/babybot_junk | /Pin.py | UTF-8 | 727 | 3.296875 | 3 | [] | no_license | import math
class Pin:
def __init__(self, board, pin_no):
self.pin = board.get_pin("d:"+str(pin_no)+":p")
self.board = board
def transition(self, a, b, secs):
steps = math.fabs((b - a) * 100.0)
wait_time = secs/float(steps)
increment = (b-a) / 100.0
position = a
epsilon = .05
while math.fabs(b-position) > epsilon :
position = position + increment
# Note: Value range for PWM is 0.0 till 1.0
if position < 0.0 and position > 1.0:
break
self.pin.write(position)
# print(position)
self.board.pass_time(wait_time)
self.pin.write(b)
# print("###################")
def set(self, a):
self.pin.write(a)
self.board.pass_time(.5)
| true |
9dd62d31a8ef82d59fe8dcc0f951b81466c9bc1c | Python | andreyvit/yoursway-python-commons | /utils/sequtil.py | UTF-8 | 798 | 3.015625 | 3 | [] | no_license |
def index_by_key(entities):
return index(lambda e: e.key(), entities)
def group(func, iterable):
result = {}
for i in iterable:
result.setdefault(func(i), []).append(i)
return result
def slice(count, iterable):
result = []
for i in iterable:
if len(result) == 0 or len(result[-1]) == count:
result.append([])
result[-1].append(i)
return result
def index(func, iterable):
result = {}
for i in iterable:
result[func(i)] = i
return result
def flatten(l, ltypes=(list, tuple)):
ltype = type(l)
l = list(l)
i = 0
while i < len(l):
while isinstance(l[i], ltypes):
if not l[i]:
l.pop(i)
i -= 1
break
else:
l[i:i + 1] = l[i]
i += 1
return ltype(l)
| true |
720703f2fc2bccd85ef24d25414788e7d2e89ebe | Python | GSIL-Monitor/Share | /python/qt/QtDemo02.py | UTF-8 | 1,262 | 2.546875 | 3 | [] | no_license | # coding=utf-8
import sys
from PyQt5 import uic
from PyQt5.QtWidgets import QMainWindow
from PyQt5.QtWidgets import QMessageBox
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.ui = uic.loadUi('mainwindow.ui')
self.ui.closeEvent = self.closeEvent
self.ui.pushButton.clicked.connect(self.btnClickEvent)
# 调用函数需要其他参数使用lambda
# self.ui.pushButton.clicked.connect(lambda: self.btnClickEvent(1))
self.ui.show()
def btnClickEvent(self, event):
msg_box = QMessageBox()
msg_box.setIcon(QMessageBox.Information)
msg_box.setWindowTitle('Warning')
msg_box.setText(self.ui.lineEdit.text())
msg_box.exec_()
# 关闭事件
def closeEvent(self, event):
print("event")
reply = QMessageBox.question(self, 'Message',
"Are you sure to quit?", QMessageBox.Yes, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == "__main__":
from PyQt5.QtWidgets import QApplication, QMainWindow
app = QApplication(sys.argv)
win = MainWindow()
sys.exit(app.exec_())
| true |
355ecaec0fe31022694ae73e09b1478f5f2af715 | Python | RohanGautam/Algorithm-implementations | /sorting/quickSort.py | UTF-8 | 1,423 | 3.75 | 4 | [] | no_license | def partition(L):
start, end, mid = 0, len(L), len(L)//2
L[start], L[mid] = L[mid], L[start] # move pivot to the beginning
pivot = L[start] # make pivot the middle value (it was swapped with the first). We need it's value so we can make comparisions with it later on
last_small = start # last_small keeps track of he boundary between the portions of the list smaller and bigger than the pivot
for i in range(last_small+1, end): # in L[1:]
if L[i] < pivot:
''' If current is smaller than pivot, swap it with
ele after the last_small, and update last_small
to be the index of that element. To visualize easier,
think of a case [P, a, b, _c, _d, e, f, g] where you are
at the index of e with last_small at index of b.
a,b,e,f,g are smaller than P and _c, _d are bigger than P'''
L[i], L[last_small+1] = L[last_small+1], L[i]
last_small += 1
L[last_small], L[start] = L[start], L[last_small]
return (last_small, L)
def quickSort(L):
if len(L) <= 1:
return L
else :
pivotPos, L = partition(L)
L[:pivotPos] = quickSort(L[:pivotPos])
L[pivotPos+1:] = quickSort(L[pivotPos+1:]) # pivotPos is already sorted so only sort before and after it
return L
L = [4, 3, 5, 6, 7, 19, 0, 9, 12]
# print(L)
print(partition(L))
# print(quickSort(L))
| true |
188b1ad34cb0eae1f1238fced85f888c9f29b28d | Python | CodeSteak/write-your-python-program | /python/tests/testDefinedLater.py | UTF-8 | 469 | 3 | 3 | [
"BSD-3-Clause"
] | permissive | import unittest
from writeYourProgram import *
setDieOnCheckFailures(True)
class TestDefinedLater(unittest.TestCase):
def test_isSome(self):
ty = DefinedLater('Name')
Name = Record("Name", "firstName", str, "lastName", str)
myName = Name("Stefan", "Wehr")
self.assertTrue(ty.isSome(myName))
self.assertFalse(ty.isSome(42))
List[Name] # just use it
List[ty] # just use it
List[DefinedLater('foo')]
| true |
2fd3d351d02cd415c36134c863ab3fd1deade8fe | Python | Media1129/elastic_search | /create_index.py | UTF-8 | 1,759 | 2.65625 | 3 | [] | no_license | import json
from elasticsearch import Elasticsearch, helpers
from tqdm import tqdm
# RECIPE1M_FILE = "layer1.json"
RECIPE1M_FILE = "../recipes_with_nutritional_info.json"
def get_connection():
es = Elasticsearch([{'host': 'localhost', 'port': 9200}])
if not es.ping():
raise ConnectionError("Cannot connect to local elasticsearch server")
return es
if __name__ == "__main__":
es = get_connection()
body = {
"settings": {
"index": {
"number_of_shards": 1,
"number_of_replicas": 0
}
},
"mappings": { # mapping 相當於資料表的結構
"properties": {
"id": {
"type": "text"
},
"title": {
"type": "text"
},
"ingredients": {
"type": "text"
},
"instructions": {
"type": "text"
}
}
}
}
# index 相當於資料庫
if not es.indices.exists(index="recipes"):
result = es.indices.create(index='recipes', ignore=400, body=body)
print(result)
with open(RECIPE1M_FILE) as jsonfile:
data = json.load(jsonfile)
actions = []
for entry in tqdm(data):
doc = {
"id": entry["id"],
"title": entry["title"],
"ingredients": [ing["text"] for ing in entry["ingredients"]],
"instructions": [ing["text"] for ing in entry["instructions"]]
}
actions.append({
"_index": "recipes",
"_op_type": "index",
"_source": doc
})
helpers.bulk(es, actions)
| true |
2902857b999f16f50f99daf82e524564a681b9e5 | Python | danieldfc/trainling-python | /Atividades/Yuri/Aula/numeros_sequencia.py | UTF-8 | 86 | 3.828125 | 4 | [] | no_license | numero = int(input('Informe um número -> '))
for i in range(numero + 1):
print(i)
| true |
3822c8397d4cb34d1c99bc1d8cc4c9d83dd7e4c1 | Python | jlumpe/python-emacs | /emacs/elisp/ast.py | UTF-8 | 3,556 | 3.625 | 4 | [
"MIT"
] | permissive | """Base classes for Emacs Lisp abstract syntax trees."""
from typing import Union, Tuple, Iterable
from .util import escape_emacs_string
class Expr:
"""Base for classes which represent Elisp expressions."""
def __str__(self):
"""Render the expression as elisp code."""
raise NotImplementedError()
def quote(self) -> 'Expr':
"""Return a quoted form of this expression."""
return Quote(self)
@property
def q(self):
"""Shortcut for ``self.quote()``."""
return self.quote()
def __repr__(self):
return '<el %s>' % self
def _repr_quoted(self) -> str:
"""Get representation within a quoted expression."""
return str(self)
class Literal(Expr):
"""Basic self-evaluating expressions like strings, numbers, etc.
Attributes
----------
pyvalue
The Python value of the literal.
"""
PY_TYPES = (str, int, float)
pyvalue: Union[PY_TYPES]
def __init__(self, pyvalue: Union[PY_TYPES]):
if not isinstance(pyvalue, self.PY_TYPES):
raise TypeError('Instances of %s not allowed as Elisp literals' % type(pyvalue))
self.pyvalue = pyvalue
def __eq__(self, other):
return isinstance(other, Literal) \
and type(other.pyvalue) is type(self.pyvalue) \
and other.pyvalue == self.pyvalue
def __str__(self):
if isinstance(self.pyvalue, str):
return escape_emacs_string(self.pyvalue, quotes=True)
else:
return str(self.pyvalue)
class Symbol(Expr):
"""An Elisp symbol."""
name: str
def __init__(self, name: str):
assert isinstance(name, str) and name
self.name = name
def __eq__(self, other):
return isinstance(other, Symbol) and other.name == self.name
@property
def isconst(self) -> bool:
return self.name.startswith(':') or self.name in ('nil', 't')
def __call__(self, *args, **kwargs) -> 'List':
"""Produce a function call expression from this symbol.
See :func:`emacs.elisp.ast.funccall`.
"""
from .exprs import funccall
return funccall(self, *args, **kwargs)
def __str__(self):
return self.name
class Cons(Expr):
"""A cons cell."""
car: Expr
cdr: Expr
def __init__(self, car: Expr, cdr: Expr):
self.car = car
self.cdr = cdr
def __eq__(self, other):
return isinstance(other, Cons) \
and other.car == self.car \
and other.cdr == self.cdr
def __str__(self):
return '(cons %s %s)' % (self.car, self.cdr)
def _repr_quoted(self) -> str:
return '(%s . %s)' % (self.car._repr_quoted(), self.cdr._repr_quoted())
class List(Expr):
"""An Elisp list expression.
Attributes
----------
items
Items in the list.
"""
items: Tuple[Expr, ...]
def __init__(self, items: Iterable[Expr]):
self.items = tuple(items)
def __eq__(self, other):
return isinstance(other, List) and other.items == self.items
def __str__(self):
return '(%s)' % ' '.join(map(str, self.items))
def _repr_quoted(self) -> str:
return '(%s)' % ' '.join(item._repr_quoted() for item in self.items)
class Quote(Expr):
"""A quoted Elisp expression.
Attributes
----------
expr
The quoted Elisp expression.
"""
def __init__(self, expr: Expr):
self.expr = expr
def __eq__(self, other):
return isinstance(other, Quote) and other.expr == self.expr
def __str__(self):
return "'" + self.expr._repr_quoted()
class Raw(Expr):
"""Just raw Elisp code to be pasted in at this point.
Attributes
----------
src
Raw Elisp source code.
"""
src: str
def __init__(self, src: str):
self.src = src
def __eq__(self, other):
return isinstance(other, Raw) and other.src == self.src
def __str__(self):
return self.src
| true |
d866c8f40ffd324a1e80524d8a5db444a456b59a | Python | MediaPreneur/Introduction-to-python | /while-else.py | UTF-8 | 82 | 3.234375 | 3 | [
"CC0-1.0"
] | permissive | i=0
while i<5:
print i
i=i+1
else:
print "the value execeeds 5" | true |
7105902139a3e65b990cb80ba7913fb06914b673 | Python | SmithGeorge/khiimel-oyuun | /khiimel-oyuun.py | UTF-8 | 1,177 | 3.3125 | 3 | [] | no_license | normal_answers = ["你们懂吗","人生的意义是什么","你们的开发经验是什么","我要屎了","男人要的是到底什么样的女人","加我飞书","我不行了","我要建立资产", "龙腾世纪","内裤","我为什么总是有很多的想法","你们用扫地机器人吗","你们思考吗","你们学数学吗","五感","为别人着想"]
special_answers = {"lqt":"lqt的家像宫殿一样","美女":"哪里有有才的美女,在电脑上查找美女","你们有Google voice吗":"没有","养孩子":"自己的事情都没搞好 养什么孩子"}
import random
import re
def answer(question):
if re.match(question,r"\s+"):
return "我们教信息技术的老师很漂亮,我很想草她"
elif random.uniform(0,1) <= (random_rate := 0.114514):
return "是的,你很懂{}".format(question)
elif random.uniform(0,1) <= (random_rate := 0.233):
return "什么是{}".format(question)
elif question in special_answers.keys():
return special_answers[question]
return random.choice(normal_answers)
while True:
user_input = input("<<< ")
print(">>> {}".format(answer(user_input)))
| true |
5089153c105a4280641fb186cfdae6d2ecfac978 | Python | chrisliu529/winminer | /bench.py | UTF-8 | 1,685 | 2.8125 | 3 | [] | no_license | #!/usr/bin/python3
import subprocess
import sys
import re
import time
from string import Template
from concurrent.futures import ProcessPoolExecutor
def get_output(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
out, err = p.communicate()
if p.returncode != 0:
print(out)
print(err)
sys.exit(p.returncode)
return out.rstrip() # remove '\n' in the end
def wins(s):
return [int(w) for w in re.match(r'.*\((.*)\).*', s).group(1).split(',')]
def score(w):
return w[0] + 2*w[1] + 4*w[2]
def ratio(w):
return [('%s%%' % int(round(f*100))) for f in [w[0]/10000.0, w[1]/5000.0, w[2]/2500.0]]
def config_file(s, g):
return f'{"-".join(s)}-{g}.toml'
def bench(args):
s, g = args
with open('template.toml') as f:
t = Template(f.read())
c = config_file(s, g)
with open(c, 'w') as f:
f.write(t.substitute(strategies=s, guess=g))
t = time.time()
out = get_output(f'./winminer -c {c} | grep win:')
ct = time.time() - t
ws = wins(out.decode('utf-8'))
si = score(ws)
print('strategies = %s, guess = %s' % (s, g))
print('score=%s %s %s, cost %.2f seconds' % (si, ws, ratio(ws), ct))
if len(args) < 1:
return
if si < int(args[0]):
sys.exit(1)
def bench_combinations():
strategies = ["diff", "reduce", "isle"]
gs = ["first", "random", "corner", "min"]
args = []
for i in range(len(strategies)):
for j in range(len(gs)):
args.append((strategies[:i+1], gs[j]))
with ProcessPoolExecutor() as executor:
executor.map(bench, args)
if __name__ == '__main__':
bench_combinations()
| true |
ee24b5e2ba197808d25e6b15c2cd1d22198b85d2 | Python | uhla/fler-downloader | /downloader/excel_item_reader.py | UTF-8 | 1,171 | 2.75 | 3 | [] | no_license | from os import path
import xlrd
from downloader.catalog_item_configuration import CustomizedCatalogItem
class ExcelItemReader:
def read_configuration(self, filename):
customized_catalog_items = {}
if path.exists(filename):
print("Loading customized configuration from file: " + filename)
wb = xlrd.open_workbook(filename)
sheet = wb.sheet_by_index(0)
for row_number in range(1, sheet.nrows):
if str(sheet.cell_value(row_number, 1)) != '':
catalog_item = CustomizedCatalogItem(int(sheet.cell_value(row_number, 1)),
type=sheet.cell_value(row_number, 3),
styles=sheet.cell_value(row_number, 4),
other_colors=sheet.cell_value(row_number, 5))
customized_catalog_items[catalog_item.id] = catalog_item
else:
print("Unable to locate customized configuration file " + filename + ". No customization will be applied.")
return customized_catalog_items
| true |
5048db1e210b65aa6d21c0c6c7a1e96166b407f4 | Python | yaopoppysong/Partial-Least-Square-Regression | /RealExample.py | UTF-8 | 945 | 3.453125 | 3 | [] | no_license | # Using Real Data Set
# load package
import pandas as pd
from sklearn.cross_decomposition import PLSRegression
from sklearn.preprocessing import scale
import matplotlib.pyplot as plt
# read in data set
wine = pd.read_excel('wine.xlsx')
wine.head()
wine_new = wine.copy()
# normalize the data set
n = len(wine.columns)
for i in range(n):
wine_new.ix[:, i] = scale(wine.ix[:, i])
wine_new = np.array(np.matrix(wine_new))
# separate the data set
wine_newX = wine_new[:, 3:]
wine_newY = wine_new[:, :3]
wine_newX
# Using partial least square function
fit = PLS(wine_newX, wine_newY, wine_newX, 3, 1e-06)
Y_pred = fit.pls_prediction(wine_newX, 3)
np.sum((wine_newY-Y_pred)**2) # PRESS
# Using Partial Least Square Package in Python
pls1 = PLSRegression(n_components = 3)
pls1.fit(wine_newX, wine_newY)
Y_pred1 = pls1.predict(wine_newX)
np.sum((wine_newY-Y_pred1)**2) # PRESS
# Check the number of components by PRESS
fit.pls_ncomponents()
| true |
77dbf1a4ebf84f27a1711b223abd551b2deea6ec | Python | ashukumar27/DeepLearning | /Image_identification.py | UTF-8 | 1,170 | 2.828125 | 3 | [] | no_license | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue May 22 10:31:47 2018
@author: ashutosh
Image Classification with VGG19
"""
import os
import time
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(42)
os.chdir("/Users/ashutosh/Documents/analytics/DeepLearning/VGG19")
from keras.applications.vgg19 import VGG19
from keras.applications.vgg19 import preprocess_input, decode_predictions
from keras.preprocessing import image
from keras.models import Model
import cv2
# load pre-trained model
model = VGG19(weights='imagenet', include_top=True)
# display model layers
model.summary()
# display the image
img_disp = plt.imread('./peacock.jpg')
#img_disp = cv2.cvtColor(img_disp, cv2.COLOR_BGR2RGB)
plt.imshow(img_disp)
plt.axis("off")
plt.show()
# pre-process the image
img = image.load_img('./peacock.jpg', target_size=(224, 224))
img = image.img_to_array(img)
img = np.expand_dims(img, axis=0)
img = preprocess_input(img)
# predict the output
preds = model.predict(img)
# decode the prediction
pred_class = decode_predictions(preds, top=3)[0][0]
print ("Predicted Class: %s"%pred_class[1])
print ("Confidance: %s"%pred_class[2]) | true |
bec7eca3ef985cd3c62f9bea525cef0abf02d0e2 | Python | Luc1103/CSForum | /CSForum/WelcomePage.py | UTF-8 | 5,746 | 3.171875 | 3 | [] | no_license | import tkinter as tk
import mysql.connector
import HomePage
#Connects to the database
mydb = mysql.connector.connect(
host = "localhost",
user = "root",
passwd = "sD6G7Bx@f8cve$i3",
database = "forum"
)
#Allows editing of the database
mycursor = mydb.cursor()
#Displays the welcome page UI
def displayUI(window):
#Splits the screens into the two sections
signUpFrame = tk.Frame(window.frame, highlightbackground="black", highlightthickness=1)
signUpFrame.place(relwidth=0.5, relheight=1, relx=0, rely=0)
loginFrame = tk.Frame(window.frame, highlightbackground="black", highlightthickness=1)
loginFrame.place(relwidth=0.5, relheight=1, relx=0.5, rely=0)
#Input fields and labels for the signup section
signUpLabel = tk.Label(signUpFrame, text="Sign Up")
signUpLabel.place(relwidth=0.9, relheight=0.04, relx=0.05, rely=0.05)
usernameLabel = tk.Label(signUpFrame, text="Username:", anchor="nw", justify="left")
usernameLabel.place(relwidth=0.3, relheight=0.04, relx=0.05, rely=0.15)
usernameSignUp = tk.Entry(signUpFrame)
usernameSignUp.place(relwidth=0.9, relheight=0.05, relx=0.05, rely=0.2)
password1Label = tk.Label(signUpFrame, text="Password:", anchor="nw", justify="left")
password1Label.place(relwidth=0.3, relheight=0.04, relx=0.05, rely=0.35)
password1SignUp = tk.Entry(signUpFrame)
password1SignUp.place(relwidth=0.9, relheight=0.05, relx=0.05, rely=0.4)
password2Label = tk.Label(signUpFrame, text="Repeat Password:", anchor="nw", justify="left")
password2Label.place(relwidth=0.3, relheight=0.04, relx=0.05, rely=0.55)
password2SignUp = tk.Entry(signUpFrame)
password2SignUp.place(relwidth=0.9, relheight=0.05, relx=0.05, rely=0.6)
#Input fields and labels for the login section
loginLabel = tk.Label(loginFrame, text="Login")
loginLabel.place(relwidth=0.9, relheight=0.04, relx=0.05, rely=0.05)
usernameLabelLogin = tk.Label(loginFrame, text="Username:", anchor="nw", justify="left")
usernameLabelLogin.place(relwidth=0.3, relheight=0.04, relx=0.05, rely=0.25)
usernameLogin = tk.Entry(loginFrame)
usernameLogin.place(relwidth=0.9, relheight=0.05, relx=0.05, rely=0.3)
passwordLabel = tk.Label(loginFrame, text="Password:", anchor="nw", justify="left")
passwordLabel.place(relwidth=0.3, relheight=0.04, relx=0.05, rely=0.45)
passwordLogin = tk.Entry(loginFrame)
passwordLogin.place(relwidth=0.9, relheight=0.05, relx=0.05, rely=0.5)
#Adds the buttons to each frame
#Lambda means that the function is run when the button is clicked not when the button is made
signUpBtn = tk.Button(signUpFrame,
text="Sign up",
command=lambda: addUser(usernameSignUp.get(), password1SignUp.get(), password2SignUp.get(), window)
)
signUpBtn.place(relwidth=0.3, relheight=0.1, relx=0.35, rely=0.75)
loginBtn = tk.Button(loginFrame,
text="Login",
command=lambda: login(usernameLogin.get(), passwordLogin.get(), window)
)
loginBtn.place(relwidth=0.3, relheight=0.1, relx=0.35, rely=0.75)
window.root.mainloop()
#Takes the user details and adds them to the database
def addUser(username, password1, password2, window):
#Clears all the leading and trailing whitespace
username = username.strip()
password1 = password1.strip()
password2 = password2.strip()
#Ensures all the fields have been completed
if username == "" or password1 == "" or password2 == "":
print("Complete all fields")
else:
#Ensures the user does not already exist
if checkForExistingUser(username):
print("User already exists")
else:
#Ensures both passwords match
if password1 != password2:
print("Passwords do not match")
else:
#Inserts the user into the database
mycursor.execute("INSERT INTO users VALUES (%s, %s)", (username, password1))
mydb.commit() #Makes the entry permanent
HomePage.displayUI(window, username, False) #Takes the user to the homepage
#Returns true if there is a user with that username
def checkForExistingUser(username):
#Counts the number of rows that have the given username
mycursor.execute("SELECT COUNT(username) FROM users WHERE username = %s", (username, ))
#Stores the count
count = mycursor.fetchall()[0][0]
if count == 0:
return False
else:
return True
def login(username, password, window):
#Clears all the leading and trailing whitespace
username = username.strip()
password = password.strip()
#Ensures all the fields have been completed
if username == "" or password == "":
print("Complete all fields")
else:
#Ensures the user exists in the database
if not checkForExistingUser(username):
print("User does not exist")
#Logs the user in
else:
#Fetches the password for the specified user
mycursor.execute("SELECT password FROM users WHERE username = %s", (username, ))
#Stores the password from the database
dbPassword = mycursor.fetchall()[0][0]
if dbPassword == password:
print("Success")
#Displays the homepage for the user
HomePage.displayUI(window, username, False)
else:
print("Password wrong")
#hello
| true |
204b6da42a0022de9d0375b340b2d1918dd6cd73 | Python | Lenazhou/simple-faster-rcnn-interact | /data/ut_dataset.py | UTF-8 | 5,775 | 2.921875 | 3 | [
"MIT"
] | permissive | import os
import xml.etree.ElementTree as ET
import json
import numpy as np
from .util import read_image
class UTDataset:
"""Bounding box dataset for PASCAL `VOC`_.
.. _`VOC`: http://host.robots.ox.ac.uk/pascal/VOC/voc2012/
The index corresponds to each image.
When queried by an index, if :obj:`return_difficult == False`,
this dataset returns a corresponding
:obj:`img, bbox, label`, a tuple of an image, bounding boxes and labels.
This is the default behaviour.
If :obj:`return_difficult == True`, this dataset returns corresponding
:obj:`img, bbox, label, difficult`. :obj:`difficult` is a boolean array
that indicates whether bounding boxes are labeled as difficult or not.
The bounding boxes are packed into a two dimensional tensor of shape
:math:`(R, 4)`, where :math:`R` is the number of bounding boxes in
the image. The second axis represents attributes of the bounding box.
They are :math:`(y_{min}, x_{min}, y_{max}, x_{max})`, where the
four attributes are coordinates of the top left and the bottom right
vertices.
The labels are packed into a one dimensional tensor of shape :math:`(R,)`.
:math:`R` is the number of bounding boxes in the image.
The class name of the label :math:`l` is :math:`l` th element of
:obj:`VOC_BBOX_LABEL_NAMES`.
The array :obj:`difficult` is a one dimensional boolean array of shape
:math:`(R,)`. :math:`R` is the number of bounding boxes in the image.
If :obj:`use_difficult` is :obj:`False`, this array is
a boolean array with all :obj:`False`.
The type of the image, the bounding boxes and the labels are as follows.
* :obj:`img.dtype == numpy.float32`
* :obj:`bbox.dtype == numpy.float32`
* :obj:`label.dtype == numpy.int32`
* :obj:`difficult.dtype == numpy.bool`
Args:
data_dir (string): Path to the root of the training data.
i.e. "/data/image/voc/VOCdevkit/VOC2007/"
split ({'train', 'val', 'trainval', 'test'}): Select a split of the
dataset. :obj:`test` split is only available for
2007 dataset.
year ({'2007', '2012'}): Use a dataset prepared for a challenge
held in :obj:`year`.
use_difficult (bool): If :obj:`True`, use images that are labeled as
difficult in the original annotation.
return_difficult (bool): If :obj:`True`, this dataset returns
a boolean array
that indicates whether bounding boxes are labeled as difficult
or not. The default value is :obj:`False`.
"""
def __init__(self, data_dir, split='trainval_new',
use_difficult=False, return_difficult=False,
):
# if split not in ['train', 'trainval', 'val']:
# if not (split == 'test' and year == '2007'):
# warnings.warn(
# 'please pick split from \'train\', \'trainval\', \'val\''
# 'for 2012 dataset. For 2007 dataset, you can pick \'test\''
# ' in addition to the above mentioned splits.'
# )
id_list_file = os.path.join(
data_dir, 'ut_set/{0}.txt'.format(split))
self.ids = [id_.strip() for id_ in open(id_list_file)]
self.data_dir = data_dir
self.use_difficult = use_difficult
self.return_difficult = return_difficult
self.label_names = UT_BBOX_LABEL_NAMES
def __len__(self):
return len(self.ids)
def get_example(self, i):
"""Returns the i-th example.
Returns a color image and bounding boxes. The image is in CHW format.
The returned image is RGB.
Args:
i (int): The index of the example.
Returns:
tuple of an image and bounding boxes
"""
id_ = self.ids[i]
anno =os.path.join(self.data_dir, 'ut_tidy_anno/add_interact_x', id_ + '.json')
interact_bbox = list()
interact_label = list()
interact_difficult = list()
with open(anno, 'r') as f:
frame_info = json.load(f)
# 打开这个文件,并取出coor信息
for coor_info in frame_info['coorlist']:
coordinate = coor_info['coor']
coor_num=len(coordinate)
# 转str为int,并减掉1
coordinate = list(map(float, coordinate))
coordinate = list(map(lambda x: x - 1, coordinate))
coor_ = []
# 换位
coor_.append(coordinate[1])
coor_.append(coordinate[0])
coor_.append(coordinate[3])
coor_.append(coordinate[2])
if coor_num==5:
coor_.append(coordinate[4])
# 获取该box的action
action = coor_info['action']
if action == 'nad':
action = 'na'
# 将单人框和交互框做分开处理
if action =='interact':
interact_bbox.append(coor_)
interact_label.append(11)
interact_difficult.append(0)
#如果没有interact动作,转化为numpy array的方法将改变
interact_bbox = np.stack(interact_bbox).astype(np.float32)
interact_label = np.stack(interact_label).astype(np.int32)
interact_difficult = np.array(interact_difficult, dtype=np.bool).astype(np.uint8)
# Load a image
img_file = os.path.join(self.data_dir, 'frame', id_ + '.jpg')
img = read_image(img_file, color=True)
# if self.return_difficult:
# return img, bbox, label, difficult
return img, interact_bbox, interact_label,interact_difficult
__getitem__ = get_example
UT_BBOX_LABEL_NAMES = (
'interact'
)
| true |
e65a9d84966614051af998ea0faea69994319ca4 | Python | stasvorosh/pythonintask | /PINp/2014/DASHA_ZABOLOTNOVA/task_2_44.py.py | UTF-8 | 621 | 3.328125 | 3 | [
"Apache-2.0"
] | permissive | #Задача 2. Вариант 44.
#Напишите программу, которая будет выводить на экран наиболее понравившееся вам высказывание, автором которого является Эразм Роттердамский. Не забудьте о том, что автор должен быть упомянут на отдельной строке
#Zabolotnova D.K.
#23.02.2016
print ("Нет ничего отважнее, чем победа над самим собой... ")
print ( "\n\t\t\t Эразм Pоттердамский")
input ("\nНажмите Enter , чтобы закрыть")
| true |
5f1981c50ae29eb94fca8ea505e32667cd4e0aee | Python | snowmanunderwater/hr | /hr.py | UTF-8 | 1,629 | 3.09375 | 3 | [] | no_license | #!/usr/bin/env python3
import argparse
import subprocess
import sys
# read terminal width
columns = int(subprocess.check_output(['stty', 'size']).decode().split()[1])
# argparse
parser = argparse.ArgumentParser(description='Horizontal rule')
parser.add_argument('-c',
dest='color', # argument name
type=str,
help='Character color')
parser.add_argument('-b',
dest='background', # argument name
type=str,
help='Background color')
parser.add_argument('-s',
dest='string', # argument name
type=str,
help='Character',
default='-')
args = parser.parse_args()
# take parameters from arguments
string = args.string
color = args.color
background = args.background
# colors
def frgnd(color=''):
# foreground pallet
pallete = {
'black': '30',
'red': '31',
'green': '32',
'yellow': '33',
'blue': '34',
'magenta': '35',
'cyan': '36',
'white': '37',
}
return pallete.get(color, '')
def bckgrnd(color=''):
# background pallet
pallete = {
'black': '40',
'red': '41',
'green': '42',
'yellow': '43',
'blue': '44',
'magenta': '45',
'cyan': '46',
'white': '47',
}
return pallete.get(color, '')
# create string
create_string = args.string * columns
colors = '0;' + frgnd(color) + ';' + bckgrnd(background)
print('\x1b[%sm%s\x1b[0m' % (colors, create_string[:columns]))
| true |
fd62cd8d27b971fdc88286644bf49c4be1527cdb | Python | 06hong/Marvel | /marvel_stuff/authentication/routes.py | UTF-8 | 1,893 | 2.859375 | 3 | [] | no_license | from flask import Blueprint, render_template, request, redirect, url_for, flash
from marvel_stuff.forms import UserLoginForm
from marvel_stuff.models import db, User, check_password_hash
from flask_login import login_user, logout_user, login_required
auth = Blueprint('auth',__name__, template_folder='auth_templates')
@auth.route('/signup', methods=['GET','POST'])
def signup():
form = UserLoginForm()
if request.method == 'POST' and form.validate_on_submit():
email = form.email.data
password = form.password.data
print(email, password)
new_user = User(email, password)
db.session.add(new_user)
db.session.commit()
flash(f'You have created an account for {email}', 'auth-success')
redirect(url_for('auth.signin'))
return render_template('signup.html', form = form) #render html template
@auth.route('/signin', methods=['GET','POST'])
def signin():
form = UserLoginForm()
if request.method == 'POST' and form.validate_on_submit():
email = form.email.data
password = form.password.data
print(email, password)
logged_user = User.query.filter(User.email == email).first() #query my entire database and give me a list of people
if logged_user and check_password_hash(logged_user.password, password): #do you exist in my database is that a correct email?
login_user(logged_user)
flash(f'Logged in as {email}', 'auth-success')
return redirect(url_for('site.home'))
else:
flash('Incorrect email/password. Please try again. ', 'auth-fail')
return redirect(url_for('auth.signin'))
return render_template('signin.html', form = form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash(f'Successfully logged out', 'auth-success')
return redirect(url_for('site.home')) | true |
f8a25b6332add240d5ea0a90c518521dfbc214e9 | Python | Sachin-ninja/2D-Plot | /GUI-2D-Plot-master/Final_Project.py | UTF-8 | 5,464 | 3.453125 | 3 | [] | no_license | from tkinter import *
#import array
import numpy as np
#for exit option in file
import sys
#for getting the file from the computer we use the modul filename
from tkinter import filedialog
import os
import matplotlib.pyplot as plt
#function to select a file from the system upon clicking the button
def getfile():
root.filename=(filedialog.askopenfilename(title="Choose your File",filetypes=(("txt","*.txt"),("All files","*.*"))))
se.manipulate()
#to transfer file to display the columns to be selected
class se:
def manipulate():
print(root.filename)
f = open(root.filename, "r") #opening the file selected by the user
scrollbar.pack(side="right", fill=Y) #to place it vertically
listb.pack(); #pack is the method again
#for getting the first line of the File selected
for row in f:
break
arr=[" "] #for creation of an array to place an the list of columns in the Listbox
arr=row.split( ) #to split the words based on spaces
#inserting the Columns of the file in the Listbox
for i in range((len(arr))):
listb.insert(END,arr[i])
#Function to select x attributes upon clicking selx button
def selectx():
lbx=Label(root,text="Select only 1 attribute for x-coordinate for plotting")
lbx.place(x=1000,y=180)
listb.bind('<<ListboxSelect>>', se.onselect)#bind function is used to send the clicked option
#Function to select y attributes upon clicking sely button
def selecty():
lby=Label(root,text="Select only 1 attribute for y-coordinate for plotting")
lby.place(x=1000,y=430)
listb.bind('<<ListboxSelect>>',se.onselect1)
#Onselect function is used to get the index of the column clicked
def onselect(event):
w = event.widget #to get the widget
idx=int(w.curselection()[0]) #curselection is the function to know which widget is selected
idx1.append(idx)
value.append(w.get(idx))
lbxd=Label(root,text=value)
lbxd.place(x=1000,y=210)
#onselect1 function is used to get the index of the column clicked
def onselect1(event):
w = event.widget
idx=int(w.curselection()[0]) #curselection is the function to know which widget is clicked
idx1.append(idx)
value1.append(w.get(idx))
lbyd=Label(root,text=value1)
lbyd.place(x=1000,y=450)
#plot function used for plotting the data
def plot1():
x1=int(idx1[0]) #to convert the index from string to int
y1=int(idx1[1])
f=open(root.filename,"r") #open the file selected
f1=f.readline() #to eliminate the first line of the file selected
f2=f.readlines() #to read the file line by line
t=[ ] #
spl=[ ]
for row in f2:
t.append(float(row.split( )[x1]))
spl.append(float(row.split( )[y1]))
x=t #assigning the x selected attributes to the x-axis
y=spl #assigning the y selected attributes to the y-axis
fig=plt.figure() #to initialise the figure from matplotlib
ax1=fig.add_subplot(111) #subplot is used to get the plot of the figure
ax1.set_title("Analysis of data") #to set the title
ax1.set_xlabel(value) #
ax1.set_ylabel(value1)
ax1.plot(x,y,c='r')
#leg=ax1.legend()
plt.xticks(np.arange(min(x),max(x)+1,5.0))
plt.yticks(np.arange(min(y),max(y)+1,1.0))
plt.show()
#creating a window using Tkinter
root=Tk()
root.title("2D PLOT")
#addig a strins s to display the items in list
global s
s=[]
global arr
global value
value=[]
global idx
idx=[]
global idx1
idx1=[]
global value1
value1=[]
global listb
#creating the frame for listbox
frame1=Frame(root)
frame1.pack()
scrollbar = Scrollbar(frame1, orient="vertical") #scrollbar is used for Listbox for selection
listb=Listbox(frame1,yscrollcommand=scrollbar.set) #listbox is used to display the comments
scrollbar.config(command=listb.yview) #to configure the scrollbar
#adding Menu's
menu = Menu(root)
root.config(menu=menu) #to config the menu's in the menu
filemenu = Menu(menu)
menu.add_cascade(label='File', menu=filemenu) #cascade is used to add the menu to the frame
filemenu.add_command(label='New') #adding the options in the menu File
filemenu.add_command(label='Open')
filemenu.add_separator() #to add the line which seperates the menu options
filemenu.add_command(label='Exit',command=root.destroy)
helpmenu = Menu(menu)
menu.add_cascade(label='Help', menu=helpmenu)
helpmenu.add_command(label='About')
#creation of button to load file
button=Button(root,text='Load File',width=25,command=getfile)
button.place(x=100,y=250)
#creating object for class se
p1=se
#button for selecting the x-axis
selx=Button(root,text='SelectX',width=25,command=se.selectx)
selx.place(x=1050,y=150)
#button for selecting the y-axis
sely=Button(root,text='SelectY',width=25,command=se.selecty)
sely.place(x=1050,y=400)
#button for plotting
ploto=Button(root,text="PLOT",width=25,command=se.plot1)
ploto.place(x=590,y=470)
root.geometry("1500x1500") #geometry function is used for initialising the frame size
root.mainloop()
| true |
d3ff54885df6c8e854306bbf6c7789cb9db8a00a | Python | saadmohmed/exif-gps-tracer | /getexif.py | UTF-8 | 1,716 | 2.546875 | 3 | [] | no_license | from PIL import Image
from PIL.ExifTags import TAGS
from PIL.ExifTags import GPSTAGS
def get_exif(filename):
image = Image.open(filename)
image.verify()
return image._getexif(),filename
def get_gpstags(exif,filename):
if not exif:
print("No EXIF metadata found for "+filename)
if exif:
geotagging = {}
for (idx, tag) in TAGS.items():
if tag == 'GPSInfo':
if idx not in exif:
print("No EXIF GeoTag found on "+filename)
break
for (key, val) in GPSTAGS.items():
if key in exif[idx]:
geotagging[val] = exif[idx][key]
return geotagging
def get_datetags(exif,filename):
if not exif:
print("No EXIF metadata found "+filename)
if exif:
datetagging = {}
for (idx, tag) in TAGS.items():
if tag == 'DateTimeOriginal':
if idx not in exif:
print("No EXIF Date found on "+filename)
break
for (k,datetagging) in exif.items():
if TAGS.get(k) == 'DateTimeOriginal':
return datetagging
def get_decimal_from_dms(dms, ref):
degrees = dms[0]
minutes = dms[1]/ 60.0
seconds = dms[2] / 3600.0
if ref in ['S', 'W']:
degrees = -degrees
minutes = -minutes
seconds = -seconds
return round(degrees + minutes + seconds, 5)
def get_coordinates(geotags):
lat = get_decimal_from_dms(geotags['GPSLatitude'], geotags['GPSLatitudeRef'])
lon = get_decimal_from_dms(geotags['GPSLongitude'], geotags['GPSLongitudeRef'])
return (lat,lon)
| true |
e2feb2e7737fc69b31e7bdac4f5f85ecac9c0677 | Python | JustinLokHinWu/OpenEyeTap | /OpenEyetap_Applications/Bluetooth/notificationservice.py | UTF-8 | 4,731 | 2.796875 | 3 | [
"MIT"
] | permissive | from tkinter import *
import PIL
from PIL import ImageTk, Image
import os
import threading
import time
from queue import Queue
import json
class NotificationService(threading.Thread):
def run(self):
# set up UI
self.root = Tk()
self.root.overrideredirect(True)
width = self.root.winfo_screenwidth()
height = self.root.winfo_screenheight() // 3
self.frame = Frame(self.root, width=width, height=height,
borderwidth=2, relief=RAISED)
self.frame.pack_propagate(False)
#self.frame.config(bg="blue")
self.frame.pack()
# set up subframes
self.left_frame = Frame(self.frame)
self.left_frame.config(bg="blue")
self.left_frame.pack(side=LEFT)
self.image = ImageTk.PhotoImage(Image.open("resources/notif.png"))
self.image_panel = Label(self.left_frame, image=self.image, borderwidth=0, highlightthickness=0)
self.image_panel.pack()
self.right_frame = Frame(self.frame)
self.right_frame.config(bg="blue")
self.right_frame.pack(side=LEFT)
self.label_title = Label(self.right_frame, text="Title", fg="white", bg="blue")
self.label_title.config(font=("Arial", 60))
self.label_title.pack()
self.label_package = Label(self.right_frame, text="Title", fg="white", bg="blue")
self.label_package.config(font=("Arial", 20))
self.label_package.pack()
self.label_text = Label(self.right_frame, text="Title", fg="white", bg="blue")
self.label_text.config(font=("Arial", 30))
self.label_text.pack()
# set up notification queue
self.notifications = Queue()
# for increasing/decreasing opacity
self.opacity = 0.0
self.increasing_opacity = False
self.location = 0
# self.fade_notification()
#self.root.attributes('-alpha', 0.0)
self.root.mainloop()
def get_data(self, data):
print("Processing notification")
json_string = data.decode("utf-8")
print(json_string)
notif_data = json.loads(json_string)
if("package" in notif_data):
print("Package: " + notif_data["package"])
if("title" in notif_data):
print("Title: " + notif_data["title"])
if("text" in notif_data):
print("Text: " + notif_data["text"])
if("img" in notif_data):
notif_data["img"] = "temp/" + notif_data["img"]
print("Image: " + notif_data["img"])
else:
notif_data["img"] = "resources/notif.png"
print("Image: " + notif_data["img"])
self.prepare_notifications(notif_data)
def prepare_notifications(self, data):
if self.notifications.empty:
self.notifications.put(data)
self.increasing_opacity = True
self.display_notifications()
# only call this function when notifications empty
def display_notifications(self):
if not self.notifications.empty:
notif = self.notifications.get()
if 'title' in notif:
self.label_title['text'] = notif['title']
if 'package' in notif:
self.label_package['text'] = notif['package']
if 'text' in notif:
self.label_text['text'] = notif['text']
if 'img' in notif:
self.image = ImageTk.PhotoImage(Image.open(notif['img']))
self.image_panel.configure(image=self.image)
self.image_panel.image = self.image
self.root.after(3000, self.display_notifications)
else:
self.increasing_opacity = False
def fade_notification(self):
# opacity decrease if notifications empty, else increase
# cap at 0.0 and 1.0
#if(self.increasing_opacity):
# if(self.opacity < 0.0):
# self.opacity = 0.0
# else:
# self.opacity -= 0.01
#else:
# if(self.opacity > 1.0):
# self.opacity = 1.0
# else:
# self.opacity += 0.01
#self.root.attributes('-alpha', self.opacity)
if not self.increasing_opacity:
if(self.location != 0):
self.opacity -= 1
else:
if(self.location != self.root.winfo_height()):
self.location += 1
pos = "+0+" + str(self.root.winfo_screenheight() - self.location)
self.root.geometry(pos)
self.root.after(2, self.fade_notification)
| true |
fccaf2359df5faf467918c4e19d36fc0e8142c5d | Python | sarudalf3/poo-python | /stores.py | UTF-8 | 857 | 3.109375 | 3 | [] | no_license | class Store:
def __init__(self, name): #, market):
self.name = name
self.productsList = []
def add_product(self, new_product):
self.productsList.append(new_product)
def sell_product(self, product):
for prod in self.productsList:
if prod.ID == product.ID:
self.productsList.remove(prod)
def inflation(self, percent):
for products in self.productsList:
products.update_price(percent, True)
def set_clearance (self, category, percent_discount):
for product in self.productsList:
if product.category == category:
product.update_price(percent_discount, False)
def __str__(self):
out = f"Store: {self.name}"
for prod in self.productsList:
out += f"\n{prod.name} "
return out | true |
807ae744c8c634c73bbe7f1e78002fe503a63a5b | Python | bong1915016/Introduction-to-Programming-Using-Python | /evennumberedexercise/Exercise4_8.py | UTF-8 | 336 | 4 | 4 | [] | no_license | # Enter three numbers
number1, number2, number3 = eval(input("Enter three integers: "))
if number1 > number2:
number1, number2 = number2, number1
if number2 > number3:
number2, number3 = number3, number2
if number1 > number2:
number1, number2 = number2, number1
print("The sorted numbers are", number1, number2, number3) | true |
c8330fac27d5f375b02e34b08bea3a549c24658f | Python | arunmcherian94/library-rest-api | /core_apis/crud/models.py | UTF-8 | 5,126 | 2.578125 | 3 | [] | no_license | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.validators import RegexValidator
from django.db import models
from core_apis import settings
import uuid
# Create your models here.
class Member(models.Model):
"""
Member table to store member parameters.
"""
phone_regex = RegexValidator(regex=r'^\+?1?\d{9,15}$', message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.")
first_name = models.CharField(max_length=40, default=None, null=True, blank=True, verbose_name="Member's first name.")
last_name = models.CharField(max_length=40, default=None, null=True, blank=True, verbose_name="Member's last name.")
email = models.EmailField(unique=True, verbose_name="Member's email id.")
password = models.CharField(max_length=32, default=None, null=True, blank=True, verbose_name="To store the encrypted password.")
phone = models.CharField(max_length=32, verbose_name="Telephone number", validators=[phone_regex])
is_active = models.BooleanField(default=True, verbose_name="Member active status")
is_deleted = models.BooleanField(default=False, verbose_name="Member deletion status")
joined_on = models.DateTimeField(auto_now_add=True, verbose_name="Member joining date")
modified_on = models.DateTimeField(auto_now=True,verbose_name="Member details modified date")
member_type = models.CharField(max_length=1, default='A', verbose_name="Member type. Admin/User")
expires_on = models.DateField(verbose_name="Membership expiry date.")
misc_details = models.CharField(max_length=1024, default=None, null=True, blank=True, verbose_name="Membership miscellaneous details.")
def __str__(self):
""" String representation of the Model."""
return '{"Member Name": "%s", "Email": "%s", "Expired on: "%s"}' % (self.first_name, self.email, self.expires_on)
class Author(models.Model):
""" Table to store author details. """
first_name = models.CharField(max_length=40, default=None, null=True, blank=True, verbose_name="Author's first name.")
last_name = models.CharField(max_length=40, default=None, null=True, blank=True, verbose_name="Author's last name.")
email = models.EmailField(unique=True, verbose_name="Author's email id.")
def __str__(self):
""" String representation of the Model."""
return '{"Author Name": "%s %s"}' % (self.first_name, self.last_name)
class BookManager(models.Manager):
""" To fetch count of books by title. """
def title_count(self, keyword):
return self.filter(title__icontains=keyword).count()
class Book_master(models.Model):
""" Stores book details. """
author = models.ForeignKey('Author', on_delete=models.PROTECT, verbose_name="Unique id of the book author.")
isbn = models.CharField(max_length=13, unique=True, verbose_name="ISBN of the book.")
title = models.CharField(max_length=100, default=None, null=True, blank=True, verbose_name="Title of the book.")
no_of_copies = models.IntegerField(default=1, verbose_name="Total number of copies available.")
is_deleted = models.BooleanField(default=False, verbose_name="Book deletion status")
added_on = models.DateTimeField(auto_now_add=True, verbose_name="Book addition date")
modified_on = models.DateTimeField(auto_now=True,verbose_name="Book details modified date")
misc_details = models.CharField(max_length=1024, default=None, null=True, blank=True, verbose_name="Extra details of book.")
objects = BookManager()
def __str__(self):
""" String representation of the Model."""
return '{"Book Name": "%s", "ISBN": "%s"}' % (self.title,self.isbn)
class Book(models.Model):
""" Stores particular book's copy details. """
book_master = models.ForeignKey('Book_master', on_delete=models.CASCADE, verbose_name="Id of the parent book.")
last_borrowed_date = models.DateTimeField(auto_now_add=True, verbose_name="Most reccent borrow date for this copy.")
book_id = models.UUIDField(default=uuid.uuid4, editable=False)
available = models.BooleanField(default=True, verbose_name="Book availability.")
def __str__(self):
""" String representation of the Model."""
return '{"Book master id": "%s"}' % (self.book_master_id)
class BookAction(models.Model):
""" Table that stores borrow/return data. """
member = models.ManyToManyField(Member, verbose_name="Member id of the user.")
copy = models.ForeignKey('Book', on_delete=models.PROTECT, verbose_name="Id of the book copy issued.")
borrowed_date = models.DateTimeField(default = None, null=True, blank=True, verbose_name="Borrowed date.")
due_date = models.DateTimeField(verbose_name="Borrowed date.")
is_returned = models.BooleanField(default=False, verbose_name="Book return status.")
fine_collected = models.DecimalField(default=0.00, max_digits=6, decimal_places=2, verbose_name="Fine collected.")
def __str__(self):
""" String representation of the Model."""
return '{"Book action. Member": "%s", "Copy: " "%s"}' % (self.member,self.copy) | true |
9192d51447a7f9e87fa54bfd029c56719c0fa4d3 | Python | RanadheerDanda/Selenium-Python | /SeleniumPractice/Slider.py | UTF-8 | 1,027 | 2.890625 | 3 | [] | no_license | from selenium import webdriver
import time
from selenium.webdriver.common.by import By
from selenium.webdriver import ActionChains
class SliderExample:
def slider_test(self):
path='F:\\selenium-java-3.141.59\\geckodriver.exe'
baseUrl='https://jqueryui.com/slider/'
driver = webdriver.Firefox(executable_path=path)
driver.maximize_window()
driver.get(baseUrl)
driver.implicitly_wait(10)
driver.switch_to.frame(driver.find_elements_by_tag_name('iframe')[0])
slider_element = driver.find_element(By.XPATH,'//div[@id="slider"]//span')
try:
actions = ActionChains(driver)
print('sliding in right direction')
actions.drag_and_drop_by_offset(slider_element,500,0).perform()
time.sleep(5)
except:
print('sliding is failed')
finally:
driver.quit()
test=SliderExample()
test.slider_test() | true |
f8416ed101fd036510d6e4a55882f6bf34337fe1 | Python | snakedragon/udacity-dlnd | /language-translation/doTrans.py | UTF-8 | 2,567 | 2.890625 | 3 | [
"MIT"
] | permissive | import tensorflow as tf
import numpy as np
import helper
import problem_unittests as tests
# Number of Epochs
epochs = 50
# Batch Size
batch_size = 64
# RNN Size
rnn_size = 108
# Number of Layers
num_layers = 2
# Embedding Size
encoding_embedding_size = 100
decoding_embedding_size = 100
# Learning Rate
learning_rate = 0.001
# Dropout Keep Probability
keep_probability = 0.5
display_step = 10
_, (source_vocab_to_int, target_vocab_to_int), (source_int_to_vocab, target_int_to_vocab) = helper.load_preprocess()
load_path = helper.load_params()
def sentence_to_seq(sentence, vocab_to_int):
"""
Convert a sentence to a sequence of ids
:param sentence: String
:param vocab_to_int: Dictionary to go from the words to an id
:return: List of word ids
"""
# TODO: Implement Function
words = [word for word in sentence.split()]
seq = []
for ow in words:
index = vocab_to_int.get(ow, vocab_to_int['<UNK>'])
seq.append(index)
return seq
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_sentence_to_seq(sentence_to_seq)
translate_sentence = 'he saw a old yellow truck .'
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
translate_sentence = sentence_to_seq(translate_sentence, source_vocab_to_int)
loaded_graph = tf.Graph()
with tf.Session(graph=loaded_graph) as sess:
# Load saved model
loader = tf.train.import_meta_graph(load_path + '.meta')
loader.restore(sess, load_path)
input_data = loaded_graph.get_tensor_by_name('input:0')
logits = loaded_graph.get_tensor_by_name('predictions:0')
target_sequence_length = loaded_graph.get_tensor_by_name('target_sequence_length:0')
source_sequence_length = loaded_graph.get_tensor_by_name('source_sequence_length:0')
keep_prob = loaded_graph.get_tensor_by_name('keep_prob:0')
translate_logits = sess.run(logits, {input_data: [translate_sentence] * batch_size,
target_sequence_length: [len(translate_sentence) * 2] * batch_size,
source_sequence_length: [len(translate_sentence)] * batch_size,
keep_prob: 1.0})[0]
print('Input')
print(' Word Ids: {}'.format([i for i in translate_sentence]))
print(' English Words: {}'.format([source_int_to_vocab[i] for i in translate_sentence]))
print('\nPrediction')
print(' Word Ids: {}'.format([i for i in translate_logits]))
print(' French Words: {}'.format(" ".join([target_int_to_vocab[i] for i in translate_logits])))
| true |
70a40cde8c7c2fb6a06e19d3642aa3632d2cbae5 | Python | Not2Day2Die/PySnow | /createTable.py | UTF-8 | 548 | 2.75 | 3 | [] | no_license | import pymssql
conn = pymssql.connect(host='60.251.238.43',
user='sa',
password='8179311!QAZ',
database='db8780',
charset='utf8',
port=8433)
#查看连接是否成功
cursor = conn.cursor()
'CREATE TABLE Customer(First_Name char(50),Last_Name char(50),Address char(50),City char(50),Country char(25),Birth_Date datetime);'
sql = ''
cursor.execute(sql)
#用一个rs变量获取数据
rs = cursor.fetchall()
print(rs) | true |
6f99dced5a4b2e17ecdd67695f60b0d33182c644 | Python | ehudb9/cyberBall | /venv/Experiment/control_modes/keyboard_control_mode.py | UTF-8 | 1,116 | 2.859375 | 3 | [] | no_license | from venv.Experiment.arm_movement_control import ArmMotorControl
from venv.Experiment.constants import *
from pynput import keyboard
class KeyboardControlMode:
def __init__(self):
self.arm = ArmMotorControl()
self.arm.set_moving_speed(MOTOR_NAME, 120)
self.arm.set_acceleration(MOTOR_NAME, 2)
def start(self):
with keyboard.Listener(
on_press=self.on_press,
on_release=self.on_release) as listener:
listener.join()
print("Listening started...")
def on_press(self, key):
try:
pass
except AttributeError:
print('special key {0} pressed'.format(key))
def on_release(self, key):
if key == keyboard.Key.right:
self.arm.turn_counter_clockwise(MOTOR_NAME)
if key == keyboard.Key.left:
self.arm.turn_clockwise(MOTOR_NAME)
if key == keyboard.Key.up:
self.arm.turn_full_circle(MOTOR_NAME)
if key == keyboard.Key.esc:
# Stop listener
print("Listening stopped")
return False
| true |
5e2045a72218448c42c0388df9e8a06f6034e419 | Python | lmquan1609/robot | /01_sample_search_and_return/05_decision_to_go.py | UTF-8 | 1,226 | 3.109375 | 3 | [] | no_license | import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
from decision_extra_functions import *
# Define a function to convert from cartesian to polar coordinates
def to_polar_coords(xpix, ypix):
# Calculate distance to each pixel
dist = np.sqrt(xpix ** 2 + ypix ** 2)
angles = np.arctan2(ypix, xpix)
return dist, angles
image = mpimg.imread('angle_example.jpg')
warped = perspect_transform(image)
colorsel = color_thresh(warped, rgb_thresh=(160, 160, 160))
xpix, ypix = rover_coords(colorsel)
distances, angles = to_polar_coords(xpix, ypix)
avg_angle = angles.mean()
# Do some plotting
fig = plt.figure(figsize=(12,9))
plt.subplot(221)
plt.imshow(image)
plt.subplot(222)
plt.imshow(warped)
plt.subplot(223)
plt.imshow(colorsel, cmap='gray')
plt.subplot(224)
plt.plot(xpix, ypix, '.')
plt.ylim(-160, 160)
plt.xlim(0, 160)
arrow_length = 100
x_arrow = arrow_length * np.cos(avg_angle)
y_arrow = arrow_length * np.sin(avg_angle)
plt.arrow(0, 0, x_arrow, y_arrow, color='red', zorder=2, head_width=10, width=2)
plt.show()
avg_angle_degrees = avg_angle * 180/np.pi
steering = np.clip(avg_angle_degrees, -15, 15)
print(f'Steering at {steering}, with {avg_angle_degrees}') | true |
a16e1fc7a4c94579bcbe24b00e522485afc3152c | Python | Alfonsxh/Python | /LoggerTest/Test_logger.py | UTF-8 | 1,565 | 3.015625 | 3 | [] | no_license | """
@Author : Alfons
@Contact: alfons_xh@163.com
@File : Test_logger.py
@Time : 2019/5/7 16:41
"""
import os
import logging
def Init(level, filename, console):
"""
日志初始化函数
:param level: 日志等级
:param filename: 日志输出文件名
:param console: 是否在控制台输出
:return:
"""
logger = logging.getLogger()
# 设置等级
# 可以为数字: FATAL = 50, ERROR = 40, WARN = WARNING = 30, INFO = 20, DEBUG = 10, NOTSET = 0
# 也可以为字符:'CRITICAL', 'FATAL', 'ERROR', 'WARN', 'WARNING', 'INFO', 'DEBUG', 'NOTSET',
logger.setLevel(level.upper() if str(level) == level else level)
# 日志格式
BASIC_FORMAT = '%(asctime)s [%(name)s] %(filename)s[%(lineno)d] [%(levelname)s] %(message)s'
DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter(BASIC_FORMAT, DATE_FORMAT)
# 设置文件日志
os.makedirs(os.path.dirname(filename), exist_ok=True)
file_handler = logging.FileHandler(filename) # 输出到文件的handler
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# 设置控制台输出日志
if console:
console_handler = logging.StreamHandler() # 输出到控制台的handler
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
if __name__ == '__main__':
Init(logging.DEBUG, "/logger.log", True)
# Init(logging.DEBUG, "/tmp/logger.log", False)
logging.info('this is info')
logging.debug('this is debug')
import unit
pass | true |
84d9e2001b0c977317c5b8a4b76f7b61f12fac76 | Python | sadashiv30/pyPrograms | /rmotr/class2-Lists-Tuples-Comprehensions/factorial.py | UTF-8 | 671 | 4.40625 | 4 | [] | no_license | """
Write a function that produces all the members to compute
the factorial of a number. Example:
The factorial of the number 5 is defined as: 5! = 5 x 4 x 3 x 2 x 1
The terms o compute the factorial of the number 5 are: [5, 4, 3, 2, 1].
Once you have that function write other function that will compute the
factorial using the reduce funcion (related to functiona programming).
Example:
terms = factorial_terms(5) # [5, 4, 3, 2, 1]
factorial = compute_factorial(terms) # 120
"""
def factorial_terms(a_number):
terms = range(a_number,0,-1)
return terms
def compute_factorial(terms):
fact=1;
for i in terms:
fact*=i
return fact | true |
3a8d5b4bb7668b44ca1b05ee9b3aaa2ba9e9334d | Python | olimpiadi-informatica/oii | /2013/nazionali/fermata/gen/generatore.py | UTF-8 | 2,156 | 3.234375 | 3 | [] | no_license | #!/usr/bin/env python2
from limiti import *
usage="""Generatore per "Fermata".
Parametri:
* K (tipo di generazione)
* S (seed)
* Se K == 0:
Generazione random:
* N (primo numero in input)
* ST (numero di stati)
* D (massimo valore assoluto dei salti)
* Se K == 1:
Generazione a catene e cicli:
* N (numero di celle)
* S (numero di stati)
* Nchain (numero di catene che conducono al termine)
* Kchain (lunghezza di ogni catena)
* Ncycle (numero di cicli)
* Kcycle (lunghezza di ogni ciclo)
Constraint:
* 2 <= N <= %d
""" % MAXN
from sys import argv, exit, stderr
import os
from numpy.random import random, randint, seed as nseed
from random import choice, sample, shuffle, seed as rseed
import cycler
def run(N, ST, D):
transitions = []
characters = [0 for _ in xrange(N)]
for i in xrange(N-1, 0, -1):
min_delta = -i
max_delta = N-1-i
good_characters = []
for j, t in enumerate(transitions):
if min(t) >= min_delta and max(t) <= max_delta:
good_characters += [j]
choice = randint(0, len(good_characters)+1)
if choice == len(good_characters):
# Aggiungi un nuovo elemento.
transitions.append([randint(max(min_delta, -D), min(max_delta,D)) for _ in xrange(ST)])
characters[i] = len(transitions)-1
else:
characters[i] = good_characters[choice]
characters[0] = randint(0, len(transitions))
C = len(transitions)
print N, ST, C
for cur_st in range(0,ST):
for cur_c in range(0,C):
print cur_st, cur_c, randint(0,ST), transitions[cur_c][cur_st]
for i in xrange(0,N):
print characters[i]
def example_case():
print """5 2 3
0 0 1 -2
0 1 0 -2
0 2 0 1
1 0 1 -1
1 1 0 -1
1 2 0 2
0
2
1
0
1"""
if __name__ == "__main__":
S, K = map(int, argv[1:3])
args = map(int, argv[3:])
if (K == 0 and len(args) != 3) or \
(K == 1 and len(args) != 6):
print usage
exit(1)
nseed(S)
rseed(S)
if K == -1:
example_case()
elif K == 0:
run(*args)
else:
cycler.genera(*args)
| true |
79f40a90d1d9faea046ddfce546456555dd6f8e3 | Python | quasarbright/quasarbright.github.io | /python/oop.py | UTF-8 | 2,807 | 3.78125 | 4 | [
"MIT"
] | permissive | import unittest
'''
an object is what an object has
hashmap of fields and methods
goals:
inheritance
dynamic dispatch
field and method access
tools: functions, lambdas, dictionaries
'''
def dot(obj, attribute_name, args=None):
if attribute_name in obj['fields']:
return obj['fields'][attribute_name]
elif attribute_name in obj['methods']:
# check if args is iterable
try:
iter(args)
if isinstance(args, str):
raise TypeError()
except TypeError:
raise TypeError('a list of arguments must be passed to a method')
# check if args length is correct for the method
f = obj['methods'][attribute_name]
expected_len_args = f.__code__.co_argcount - 1
if len(args) != expected_len_args:
raise TypeError('{} takes {} positional arguments, but {} were given'.format(
attribute_name, expected_len_args, len(args)))
# everything is good, call the method and return output
return obj['methods'][attribute_name](obj, *args)
else:
# this attribute doesn't exist in the object
# TODO look for attribute in super type
raise AttributeError(
'unknown attribute for {}: {}'.format(obj, attribute_name))
# point = {
# 'fields': {
# "x":3,
# "y":4
# },
# 'methods': {
# }
# }
def make_object():
obj = {
'super': None
'fields':{},
'methods':{
'toString':
}
}
def make_point(x, y):
point = {
'super': make_object,
'fields': {
'x': x,
'y': y
},
'methods': {
'mag': lambda this: (dot(this, 'x')**2 + dot(this, 'y')**2)**0.5
}
}
return point
class TestOOP(unittest.TestCase):
def setUp(self):
self.point = make_point(3, 4)
def test_field_access(self):
self.assertEqual(dot(self.point, 'x'), 3)
self.assertEqual(dot(self.point, 'y'), 4)
with self.assertRaises(Exception, msg='a list of arguments must be passed to a method'):
dot(self.point, 'mag')
with self.assertRaises(AttributeError):
dot(self.point, 'nonfield')
def test_direct_method_use(self):
self.assertEqual(dot(self.point, 'mag', []), 5.0)
def test_method_arg_validation(self):
with self.assertRaises(TypeError, msg='a list of arguments must be passed to a method'):
dot(self.point, 'mag', 234)
with self.assertRaises(TypeError, msg='a list of arguments must be passed to a method'):
dot(self.point, 'mag', 'hey i am iterable')
with self.assertRaises(TypeError):
dot(self.point, 'mag', ['an argument where there should be none'])
if __name__ == '__main__':
unittest.main()
| true |
28940b522a11c6b7022443a3136a0090e14a85ec | Python | blackadar/xray-qa | /measure.py | UTF-8 | 7,907 | 3.453125 | 3 | [] | no_license | """
Algorithmically measures the distance between bones in a hand joint.
"""
import pathlib
import numpy as np
import numpy.polynomial.polynomial as poly
from PIL import Image
read_from = pathlib.Path('data/out/')
def find_horizontal_range(image, show_plots=True):
"""
Algorithmically discovers the approximate horizontal range of a joint.
:param show_plots: Display intermediate plots for the algorithm
:param image: np.ndarray Image to analyze
:return: (start, stop) Approximation of starting and stopping columns
"""
# Variables in the operation which can be tuned to the data
tb_rows = 5 # Number of rows on the top and bottom of the image to consider in the row average
polyfit_degree = 6 # Degree of the polynomial fit to the averaged rows
tb_gradient_poll_rate = 3 # Polling rate of the gradient of the row average
ignore_cols = 25 # Number of columns to ignore on the left and right when finding the max rate of change
# Pre-compute some stats to make things easier
num_cols = image.shape[1]
cols_range = np.arange(0, num_cols)
# Trim the top and bottom of the joint
top = image[0:tb_rows, :]
bottom = image[-tb_rows:, :]
tb = np.vstack([top, bottom])
# Compute stats on the compiled top and bottom rows...
# Average the rows together to get a single average row of values:
tb_avg = np.mean(tb, axis=(0, ))
# Find a Polynomial to fit that average row:
tb_poly = poly.Polynomial(poly.polyfit(cols_range, tb_avg, deg=polyfit_degree))(cols_range)
# Find the derivative of the average (with a sampling rate to reduce amplitude from noise):
tb_prime = np.abs(np.gradient(tb_avg, tb_gradient_poll_rate))
# Find the derivative of the polynomial:
poly_prime = np.abs(np.gradient(tb_poly))
# Ignore the edge maxes as they're not what we're looking for.
denoise_poly_prime = poly_prime[ignore_cols:-ignore_cols]
# If the image follows the observed pattern, there will be two inflections to find on either side of the joint,
# for the start and end of bone in the image. We'll split the image in half (we can assume it's centered post-QA)
# to find the inflection points with argmax.
dnpp_1 = denoise_poly_prime[:len(denoise_poly_prime)//2]
dnpp_2 = denoise_poly_prime[len(denoise_poly_prime)//2:]
# Finally, find the max of the arrays and offset them to match the real image column indices.
bone_start = np.argmax(dnpp_1) + ignore_cols # We took some columns off the edge earlier
bone_end = np.argmax(dnpp_2) + ignore_cols + len(denoise_poly_prime)//2 # Same as above, also offset
if show_plots:
import matplotlib.pyplot as plt
import matplotlib.patches as patches
plt.plot(tb_avg, label="Image Column Average")
plt.plot(tb_poly, label="Polyfit")
plt.vlines(bone_start, 0, np.max(tb_poly), linestyles="--", colors='red')
plt.vlines(bone_end, 0, np.max(tb_poly), linestyles="--", colors='red')
plt.xlabel('Image Row (x)')
plt.ylabel('Average Value')
plt.title('Image Average Column')
plt.legend()
plt.show()
plt.plot(tb_prime, label="Image Gradient")
plt.plot(poly_prime, label="Polyfit Gradient")
plt.vlines(bone_start, 0, np.max(poly_prime), linestyles="--", colors='red')
plt.vlines(bone_end, 0, np.max(poly_prime), linestyles="--", colors='red')
plt.xlabel('Image Row (x)')
plt.ylabel('Absolute Value, Gradient of Column')
plt.title('Image Rate of Change')
plt.legend()
plt.show()
fig, ax = plt.subplots(1)
ax.imshow(image, cmap='gist_gray')
rect = patches.Rectangle((bone_start, 0), bone_end-bone_start, image.shape[1], alpha=0.2)
ax.add_patch(rect)
plt.show()
return bone_start, bone_end
def measure_gaps(image, horizontal_range, show_plots=True):
"""
Algorithmically measures the average gap distance between joint bones over a horizontal range.
:param show_plots: Display intermediate plots for the algorithm
:param image: np.ndarray Image to analyze
:param horizontal_range: (start, stop) Columns to run over, can be estimated by find_horizontal_range()
:return: (start, end) Algorithm approximation of start and end rows of the joint gap
"""
# Parameters for the algorithm
threshold = 0.6
tolerance = 5 # Number of pixels that can be below the threshold while maintaining the region
max_length = 20 # TODO: Determine max #pixels a joint space could be
min_length = 10 # TODO: Determine min #pixels a joint space could be
valid_range = (60, 90) # TODO: Determine valid range of pixels a joint space could be in
polyfit_degree = 5
num_cols = horizontal_range[1] - horizontal_range[0]
cols_range = np.arange(0, num_cols)
trim = image[:, horizontal_range[0]:horizontal_range[1]]
col_grads = np.array([np.abs(np.gradient(col)) for col in trim.T]).T
# col_grads_polyfits = np.array([poly.Polynomial(poly.polyfit(cols_range, col, deg=polyfit_degree))(cols_range) for col in col_grads])
grad_avg = np.mean(col_grads, axis=1)
thresh_indices = np.argwhere(grad_avg >= np.max(grad_avg) * threshold)
runs = [] # List of tuples (start, stop)
prev = thresh_indices[0][0]
start = thresh_indices[0][0]
for idx in thresh_indices[1:]:
diff = idx - prev
if diff > tolerance:
# Break the run
runs.append((start, prev))
start = idx[0]
prev = idx[0]
continue
prev = idx[0]
if len(runs) == 0:
print("No runs found!")
return None
# Ideally, this should result in a single run. But sometimes it won't so we'll need to pick.
# The safest bet is the one that encompasses the center of the image, since the joint gap should be very close.
# TODO: Investigate Longest Run Plausibility
result = None
if len(runs) > 1:
for start, end in runs:
# If we never pass there's no result that encompassed the center of the image. We'll just choose the first.
result = runs[0]
if image.shape[0]//2 in range(start, end):
result = (start, end)
break
else:
result = runs[0]
if show_plots:
import matplotlib.pyplot as plt
import matplotlib.patches as patches
plt.plot(grad_avg, label="Avg Gradient")
plt.hlines(np.max(grad_avg) * threshold, 0, image.shape[0],
linestyles="--", colors='orange', label="Threshold")
plt.vlines(result[0], np.min(grad_avg), np.max(grad_avg), linestyles="--", colors='red', label="Gap Start")
plt.vlines(result[1], np.min(grad_avg), np.max(grad_avg), linestyles="--", colors='red', label="Gap End")
plt.legend()
plt.xlabel("Image Rows (x)")
plt.ylabel("Average Gradient Amplitude")
plt.title("Region Gradient Analysis")
plt.show()
fig, ax = plt.subplots(1)
ax.imshow(image, cmap='gist_gray')
rect = patches.Rectangle((horizontal_range[0], 0), horizontal_range[1] - horizontal_range[0],
image.shape[0] - 1, alpha=0.2)
ax.add_patch(rect)
ax.hlines(result[0], 0, image.shape[1] - 1, linestyles="--", colors='red')
ax.hlines(result[1], 0, image.shape[1] - 1, linestyles="--", colors='red')
plt.show()
return result
def main():
"""
Run measurement across the input folder, and output to TODO
:return: None
"""
image = Image.open('data/out/9000099_v06_dip2.png')
i = np.array(image)
h = find_horizontal_range(i, show_plots=True)
measure_gaps(i, h, show_plots=True)
# TODO: Read entire directory
# TODO: Output measurement results
if __name__ == "__main__":
main()
| true |
5539633a7a901d247c183eb87a466b2d6148dffe | Python | sadhudgp91/Smart-City | /Camera.py | UTF-8 | 566 | 2.75 | 3 | [] | no_license | # import the necessary packages
import time
import sys, os
import RPi.GPIO as GPIO
# Use BCM GPIO references
# instead of physical pin numbers
GPIO.setmode(GPIO.BCM)
# Define GPIO signals to use
# Physical pins 18
# GPIO18
pin_button = 18
GPIO.setup(pin_button, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# allow the camera to warmup
time.sleep(0.5)
i=0
while True:
input_state = GPIO.input(pin_button)
if input_state == False:
print('button pressed')
os.system("fswebcam -r 1280X720 -S 15 image_" + str(i) +".jpg")
time.sleep(0.3)
i += 1 | true |
f88625e0fb77d5fc2684bf9a6eccfc574a2c3ac6 | Python | Lich2013/leetcode | /Reorganize String.py | UTF-8 | 808 | 3.078125 | 3 | [] | no_license | class Solution:
def reorganizeString(self, S: str) -> str:
if len(S) == 0:
return ''
letterCount = [0]*26
for x in S:
letterCount[ord(x)-97] += 1
if max(letterCount)*2 > len(S)+1:
return ''
cur = letterCount.index(max(letterCount))
letterCount[cur] -= 1
s = chr(cur+97)
for _ in range(len(S)-1):
maxNum = 0
index = 0
for i, v in enumerate(letterCount):
if i == cur:
continue
if maxNum < v:
maxNum, index = v, i
s += chr(index+97)
cur = index
letterCount[cur] -= 1
return s
if __name__ == '__main__':
print(Solution().reorganizeString("bbrst")) | true |
6153f79f91c40164683321b62db00deb207d5bc2 | Python | nailanawshaba/Tyler | /homeControllerClient.py | UTF-8 | 748 | 2.890625 | 3 | [] | no_license | #!/usr/bin/python
import urllib
import urllib2
import json
class HomeControllerClient:
switchList = dict()
def __init__(self, serverIP):
self.serverAddress = "http://" + serverIP + ":5000/"
def getSwitches(self):
req = urllib2.Request(self.serverAddress + "wemo/list")
resp = urllib2.urlopen(req)
responseBody = resp.read()
jsonResponse = json.loads(responseBody)
for s in jsonResponse:
self.switchList[str(s['name'])] = s['state']
return self.switchList
def toggleSwitch(self, switchName):
print "TOGGLE: " + switchName
order = "on"
if(self.switchList[switchName] == 1):
order = "off"
req = urllib2.Request(self.serverAddress + "wemo/" + order + "/" + urllib.quote(switchName))
resp = urllib2.urlopen(req)
| true |
0a8c352cb2c225a77d8f52b4dabef02d4955b313 | Python | sarahappleby/cgm | /absorption/ml_project/train_spectra/tpot_forest_lines.py | UTF-8 | 3,199 | 2.578125 | 3 | [] | no_license | ### Routine to apply the sklearn randomm forest to the line by line absorption data
import h5py
import numpy as np
import pandas as pd
import pickle
import sys
from tpot import TPOTRegressor
from sklearn import preprocessing
from sklearn.metrics import r2_score, explained_variance_score, mean_squared_log_error, mean_squared_error
from scipy.stats import pearsonr
np.random.seed(1)
if __name__ == '__main__':
model = sys.argv[1]
wind = sys.argv[2]
snap = sys.argv[3]
line = sys.argv[4]
generations = 100
population_size=100
cv = 5
random_state = 1
verbosity = 2
n_jobs = 4
lines = ["H1215", "MgII2796", "CII1334", "SiIII1206", "CIV1548", "OVI1031"]
lines_short = ['HI', 'MgII', 'CII', 'SiIII', 'CIV', 'OVI']
features = ['N', 'b', 'EW', 'dv', 'r_perp', 'mass', 'ssfr', 'kappa_rot']
predictor = 'Z'
model_dir = f'/disk04/sapple/cgm/absorption/ml_project/train_spectra/models/'
export_script = f'tpot/{model}_{wind}_{snap}_{lines_short[lines.index(line)]}_lines_tpot_scaled_{predictor}.py'
# Step 1) read in the training data
df_full = pd.read_csv(f'data/{model}_{wind}_{snap}_{line}_lines.csv')
train = df_full['train_mask']
# Step 2) Scale the data such that means are zero and variance is 1
feature_scaler = preprocessing.StandardScaler().fit(df_full[train][features])
predictor_scaler = preprocessing.StandardScaler().fit(np.array(df_full[train][predictor]).reshape(-1, 1) )
# Step 3) Set up and run the TPOT optimizer to find the best tree-based pipeline
pipeline_optimizer = TPOTRegressor(generations=generations,
population_size=population_size,
cv=cv,
random_state=random_state,
verbosity=verbosity,
n_jobs=n_jobs)
#pipeline_optimizer.fit(df_full[train][features], df_full[train][predictor])
pipeline_optimizer.fit(feature_scaler.transform(df_full[train][features]), predictor_scaler.transform(np.array(df_full[train][predictor]).reshape(-1, 1) ))
print(pipeline_optimizer.score(df_full[~train][features], df_full[~train][predictor]))
pipeline_optimizer.export(export_script)
# Step 4) Predict conditions
#conditions_pred = pipeline_optimizer.predict(df_full[~train][features] )
conditions_pred = predictor_scaler.inverse_transform(np.array( pipeline_optimizer.predict(feature_scaler.transform(df_full[~train][features]))).reshape(-1, 1) )
conditions_pred = pd.DataFrame(conditions_pred,columns=[predictor])
conditions_true = pd.DataFrame(df_full[~train],columns=[predictor])
# Step 5) Evaluate performance
pearson = round(pearsonr(df_full[~train][predictor],conditions_pred[predictor])[0],3)
err = pd.DataFrame({'Predictors': conditions_pred.columns, 'Pearson': pearson})
scores = {}
for _scorer in [r2_score, explained_variance_score, mean_squared_error]:
err[_scorer.__name__] = _scorer(df_full[~train][predictor],
conditions_pred, multioutput='raw_values')
print(err)
| true |
a2ab3c1a60f6e9fd84769b5fb221921b49a78977 | Python | Shaheen-Ebrahimi/COMHAND | /control.py | UTF-8 | 822 | 2.890625 | 3 | [] | no_license | def openTab():
'''Opens tab when whole hand visible'''
import webbrowser
import speech
print('Open')
website = speech.listen().strip().split(' ')
url = ''
if(('Google' in website[0] or 'google' in website[0]) and len(website)>1):
url += 'https://www.google.com/search?q='
for words in range(1,len(website)):
url += website[words]
url += ' '
elif(len(website)>1):
for words in website:
url += website
else:
url += 'https://www.' + website[0] + '.com'
print('link is:',url)
webbrowser.open_new_tab(url)
def closeTab():
'''Closes tab when fist made'''
import os
print('Close')
os.system('cd ~')
os.system('killall firefox')
os.system('cd Documents/Projects/TAMUHack')
| true |
06e7515013ef77e4f7fe383a63e3e2bbb1ac43ac | Python | jzhoucliqr/kube-auto-label | /quick-and-dirty/feature.py | UTF-8 | 332 | 2.75 | 3 | [] | no_license | #!/bin/env python
import json
with open('./data.json') as f:
content = f.readlines()
xf = open('./x.txt', 'w')
yf = open('./y.txt', 'w')
for c in content:
j = json.loads(c)
print j
print>>xf, (j['Title'] + j['Body']).encode('utf-8').replace("\r\n", " ").replace("\n", " ")
print>>yf, ",".join(j['Labels'])
| true |
39feada77ce902b3257baff30d3c29dae8467841 | Python | sassyfire/abe487 | /problems/grph/grph.py.save | UTF-8 | 1,186 | 2.78125 | 3 | [] | no_license | #!/usr/bin/env python3
import os
import sys
from Bio import SeqIO
from collections import defaultdict
args = sys.argv[1:]
if len(args) != 1:
print('Usage: {} FILE'.format(os.path.basename(sys.argv[0])))
sys.exit(1)
file = args[0]
if not os.path.isfile(file):
print('"{}" is not a file'.format(file))
sys.exit(1)
#def sequence_graph(file):
seq_records = SeqIO.parse(file, "fasta")
seq_suf = []
seq_pre = []
suffixes = defaultdict(list)
prefixes = defaultdict(list)
for record in seq_records:
seq_suf = str(record.seq[-3:])
seq_pre = str(record.seq[0:3])
suffixes[seq_pre].append(record.id)
prefixes[seq_suf].append(record.id)
for seq_pre, seq_suf in suffixe
print('suf {} pre {}'.format(sorted(suffixes.items()), sorted(prefixes.items())))
#print('{} {}'.format(suffixes, prefixes))
#pairs = list(zip(suffixes.items(), prefixes.items()))
#pairs = list(zip(suffixes, prefixes))
#print(' {} {} '.format(suffixes, prefixes))
#open a file and make sure it is a file read an open file as long as two sequences
#are not the same, match the suffix of one sequence to the prefix of another
#print the sequence ids in that respective order
| true |
574ad8b0844bdecf95481a086d78d7988e9bf464 | Python | kaiwensun/leetcode | /0001-0500/0085.Maximal Rectangle.2.py | UTF-8 | 1,487 | 3.015625 | 3 | [] | no_license | class Solution:
def maximalRectangle(self, matrix: List[List[str]]) -> int:
if not matrix:
return 0
n = len(matrix[0])
data = [[0, float('inf'), float('inf')] for _ in range(n)] # [[heights, left_arms, right_arms], ...]
res = 0
for row in matrix:
row = list(map(int, row))
new_data = [[0] * 3 for _ in range(n)]
# new heights
for i in range(n):
new_data[i][0] = row[i] * (data[i][0] + row[i])
# new left arms
left = 0
for i in range(n):
left = row[i] * (left + row[i])
if i == 0:
new_data[i][1] = row[i]
else:
if data[i][0] == 0:
new_data[i][1] = left
else:
new_data[i][1] = min(left, data[i][1])
# new right arms
right = 0
for i in range(n - 1, -1, -1):
right = row[i] * (right + row[i])
if i == n - 1:
new_data[i][2] = row[i]
else:
if data[i][0] == 0:
new_data[i][2] = right
else:
new_data[i][2] = min(right, data[i][2])
res = max(res, new_data[i][0] * (new_data[i][1] + new_data[i][2] - 1))
data = new_data
return res
| true |
0ea32ef3c2cd7d89c6af8e16826f1e7bc62b3ff5 | Python | BowenNCSU/lecture-notes | /project/spring2018.py | UTF-8 | 3,070 | 3.8125 | 4 | [] | no_license | """
This assignement will walk you through gathering data for various
econmic indexes and calculating the correlation between them
using the daily returns.
The Federal Reserve Bank of St. Louis (FRED) makes various economic data
available for research: https://fred.stlouisfed.org/
Data from the site can be downloaded in various in a CSV format.
Below is the url for daily prices of contentional gasoline at New Your Harbor
(denoted by the symbol DGASNYH) from Jan 1, 2017 to Dec 31, 2017.
https://fred.stlouisfed.org/graph/fredgraph.csv?cosd=2017-01-01&coed=2017-12-31&id=DGASNYH
The data is returned in CSV (comma separated format) with the following columns: Date, Price
The daily return is defined by:
(P_n - P_n-1) / P_n-1
where P_n denotes the nth price and P_n-1 denotes the (n-1)th price. Here
n is ordered by date, ascending.
The function signatures for various steps of this process have been
given below. The names and parameters of these functions should not
be changed. You are free to write additional functions or classes as
needed. You are welcome to use any modules in the Python
standard library as well as NumPy, SciPy, and Pandas external
libraries. All code must run on Python 3.6.4.
"""
def build_request_url(symbol, start_date, end_date):
"""
This function should take a symbol as a string
along with the start and end dates as Python dates
and return the FRED csv download url.
"""
pass
def get_fred_data(url):
"""
This function should take a url as returned by build_request_url
and return a list of tuples with each tuple containing the
date (as a Python date) and the price (as a float).
Any date which values which is non-numeric should be removed from list.
"""
pass
def calculate_returns(data):
"""
This function should take a list of tuples as returned by get_fred_data
(date, price) and return the list of daily returns (date, return).
Note: This list will have one less item than original list.
"""
pass
def calculate_correlation(data):
"""
This function should take a list containing two lists of the form
returned by calculate_returns (list of date, return tuples) and
return the correlation of the daily returns as defined above.
Here the correlation refers to the Pearson correlation coeffcient:
https://en.wikipedia.org/wiki/Pearson_correlation_coefficient
"""
pass
def main():
"""
This function should get the daily price data for the Dow Jows Industrial
Average (DJIA) and the US/Euro exchange rate (DEXUSEU)
for Jan 1, 2017 to Dec 31, 2017. Using that
data it should calculate and print the correlation of the daily returns.
Note that only dates where both DJAI and DEXUSEU have values should be
used when calculating the returns before calculating the correlation
of those returns.
"""
pass
if __name__ == "__main__":
"""
When this module as run as a script it will call the main function.
You should not modify this code.
"""
main()
| true |
81d6bfae9de4177de983df305415e0bd7af23b9f | Python | kut-info-ase-2019/raspberry-pi-g05 | /ultrasonic.py | UTF-8 | 3,111 | 3.640625 | 4 | [] | no_license | import RPi.GPIO as GPIO
import time
def pulse_in(pin, value=GPIO.HIGH, timeout=1.0):
"""
ピンに入力されるパルスを検出します。
valueをHIGHに指定した場合、pulse_in関数は入力がHIGHに変わると同時に時間の計測を始め、
またLOWに戻るまでの時間(つまりパルスの長さ)をマイクロ秒単位(*1)で返します。
タイムアウトを指定した場合は、その時間を超えた時点で0を返します。
*1 pythonの場合はtimeパッケージの仕様により実装依存ですが、概ねnanosecで返ると思います。
:param pin: ピン番号、またはGPIO 番号(GPIO.setmodeに依存。)
:param value: パルスの種類(GPIO.HIGH か GPIO.LOW。default:GPIO.HIGH)
:param timeout: タイムアウト(default:1sec)
:return: パルスの長さ(秒)タイムアウト時は0
"""
start_time = time.time()
not_value = (not value)
# 前のパルスが終了するのを待つ
while GPIO.input(pin) == value:
if time.time() - start_time > timeout:
return 0
# パルスが始まるのを待つ
while GPIO.input(pin) == not_value:
if time.time() - start_time > timeout:
return 0
# パルス開始時刻を記録
start = time.time()
# パルスが終了するのを待つ
while GPIO.input(pin) == value:
if time.time() - start_time > timeout:
return 0
# パルス終了時刻を記録
end = time.time()
return end - start
def init_sensors(trig, echo, mode=GPIO.BCM):
"""
初期化します
:param trig: Trigger用ピン番号、またはGPIO 番号
:param echo: Echo用ピン番号、またはGPIO 番号
:param mode: GPIO.BCM、または GPIO.BOARD (default:GPIO.BCM)
:return: なし
"""
GPIO.cleanup()
GPIO.setmode(mode)
GPIO.setup(trig, GPIO.OUT)
GPIO.setup(echo, GPIO.IN)
def get_distance(trig, echo, temp=15):
"""
距離を取得します。取得に失敗した場合は0を返します。
:param trig: Trigger用ピン番号、またはGPIO 番号(GPIO.setmodeに依存。)(GPIO.OUT)
:param echo: Echo用ピン番号、またはGPIO 番号(GPIO.setmodeに依存。)(GPIO.IN)
:param temp: 取得可能であれば温度(default:15℃)
:return: 距離(cm)タイムアウト時は 0
"""
# 出力を初期化
GPIO.output(trig, GPIO.LOW)
time.sleep(0.3)
# 出力(10us以上待つ)
GPIO.output(trig, GPIO.HIGH)
time.sleep(0.000011)
# 出力停止
GPIO.output(trig, GPIO.LOW)
# echo からパルスを取得
dur = pulse_in(echo, GPIO.HIGH, 1.0)
# ( パルス時間 x 331.50 + 0.61 * 温度 ) x (単位をcmに変換) x 往復
# return dur * (331.50 + 0.61 * temp) * 100 / 2
return dur * (331.50 + 0.61 * temp) * 50
if __name__ == "__main__":
GPIO_TRIG = 26
GPIO_ECHO = 19
init_sensors(GPIO_TRIG, GPIO_ECHO)
while True:
print("距離:{0} cm".format(get_distance(GPIO_TRIG, GPIO_ECHO)))
time.sleep(2) | true |
e52107f26a037b6e88aaf45b72247702ef6d0cbd | Python | Kinddle-tick/ML_Bayesian_prediction | /Myfunc.py | UTF-8 | 26,080 | 2.8125 | 3 | [] | no_license | import pyqtgraph as pg
import pandas as pd
import numpy as np
from pyqtgraph.Qt import QtCore, QtGui
from collections import Counter
import time
from sklearn.cluster import Birch
from scipy import stats
from matplotlib import pyplot as plt
'''
在该目录下可以编写和注册自己的函数,根据GUI的特点,函数有以下约束:
1.函数的第一个参数必须是点的np.array类型数据
2.函数可以有三个可以在GUI窗口上直接控制的float类型的数值,如果实际使用时不是float形,需要在函数内部转换(如int)
3.函数的返回值应当是一个元祖,第一个元素为所有点按照输入顺序所给出的聚类结果,第二个元素为聚类中心,若无聚类中心可以为空列表[]
为了方便函数的展示,在AlgorithmList中进行注册,格式为:{"显示函数名": {"func":函数的程序内名, "para":{"显示的第一个参数名":默认值,}},}
其中,para可以为空字典,para内部元素的定义顺序必须和函数要求的相同
'''
prime_list=[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103,
107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223,
227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293]
def DistanceMatrix(point:[[2,11],[3,5],]):
G=np.dot(point,point.T)
H=np.tile(np.diag(G),(len(G),1))
D=H+H.T-2*G
return D
def CoopDistanceMatrix(point:[[2,11],[3,5],],point2):
G=np.dot(point,point2.T)
H=np.expand_dims(np.array(np.sum(point**2,axis=1)),axis=1)
K=np.expand_dims(np.array(np.sum(point2**2,axis=1)),axis=0)
D=H+K-2*G
return D
def Coopcheckdiv(x,y):
'''
:param x: 一行分类数据 narray
:param y: 一行分类数据,与x等长 narray
:return: 两份数据的"乐观准确率"
'''
divx=set(x)
xdivnum=len(divx)
divy=set(y)
ydivnum=len(divy)
num=2
# 防止串号
while prime_list[num]<len(divx)+len(divy):
num+=1
for i in divx:
if i<0: # 噪声不属于任何一类
x[np.where(x == i)] = 0
xdivnum-=1
continue
x[np.where(x == i)] = prime_list[num]
num+=1
for i in divy:
if i<0:
x[np.where(x == i)] = 0
ydivnum-=1
continue
y[np.where(y == i)] = prime_list[num]
num += 1
# T = np.dot(x.reshape(-1,1),y.reshape(1,-1))
# rightlist = Counter(T.reshape(-1)).most_common(min(len(divx),len(divy)))
# print(rightlist)
rightlist = Counter((x*y)[x*y!=0]).most_common(min(xdivnum,ydivnum))
right = sum(np.array(rightlist)[:,1])
all =len(x)
return right/all
print(x,y)
return 0
def Prim(D_M):
D_df = pd.DataFrame(D_M)
lens = len(D_M)
V= {0}
ALL = set([i for i in range(lens)])
E=[]
while len(V)<lens:
tmp= D_df.iloc[list(V),list(ALL-V)]
x,y = np.where(D_M == np.min(np.array(tmp)))
line = [x[0],y[0]]
E.append(line)
V=V|set(line)
return E
def KMeans(point,div=2):
div=int(div)
m=np.array([point[i] for i in np.random.choice(range(len(point)),size=div)])
# time=0
point=np.array(point)
old_m = m.copy()+1
while not np.all(old_m==m):
# time+=1
DM=CoopDistanceMatrix(point,m)
raw_div=np.argmin(DM,axis=1)
old_m=m.copy()
for i in range(div):
m[i]=np.average(point[np.where(raw_div==i)[0]],axis=0)
if np.any(np.isnan(m)):
m[np.where(np.any(np.isnan(m), axis=1))] = \
np.array([point[i] for i in np.random.choice(range(len(point)),
size=np.sum(np.any(np.isnan(m), axis=1)))])
div=len(m)
# print(time)
return raw_div,m
def DB_scan(point,Eps=0.05,MinPts=5):
# t=time.time_ns()
D = DistanceMatrix(point)
# print((time.time_ns()-t)/1e6)
Dlink = D<Eps
heart = np.where(np.sum(Dlink, axis=1) > MinPts + 1)
div = np.array([-1]*len(point))
divnum=0
tmp=set(list(heart[0])) # 等待聚类的核心点
while tmp:
i = tmp.pop()
# print(i)
div[i] = divnum
xlist=[i] # 所有可能的可达核心点
for i in xlist:
tmp_link_index = np.where((Dlink)[i] * div == -1)[0].tolist() # 选出尚未聚类的可达点
xlist.extend(set(tmp_link_index) & tmp) # 将尚未聚类的可达核心点加入到xlist中
div[tmp_link_index] = divnum # 将所有可达点标号
tmp = tmp - set(xlist) #从等待聚类的核心点中 去除已经聚类过的所有核心点
divnum+=1
return div,[]
def DPCA(point,Eps=1):
# D = DistanceMatrix(point)
lens = len(point)
div = np.array([-1] * lens)
D = DistanceMatrix(point)
p = np.sum(D < Eps, axis=1)
deta = np.empty(lens)
link = np.empty(lens)
for i in range(lens):
x = D[i][np.where(p > p[i])] # 取出密度比他大的点
if len(x) == 0:
deta[i] = np.max(D)
link[i] = -1
else:
deta[i] = np.min(x)
link[i] = np.where(D[i] == deta[i])[0][0]
# pdeta=np.array([p,deta])
pdeta = p * deta
gama = np.sort(pdeta)
game_arg = np.argsort(pdeta)
# 主观猜测 那些点是中心点-- #
# figs = plt.figure()
# plt.scatter(p,deta)
# plt.bar(np.arange(lens), poss, width=1.0, color=[(i[0] / 255, i[1] / 255, i[2] / 255) for i in c])
# plt.plot(np.arange(lens), [x] * lens, c="r")
# plt.ylim(0, 3)
# plt.show()
may_heart = np.where((gama[1:] - gama[:-1]) / gama[:-1] > 1)
heart_line_index = game_arg[may_heart[0][np.where(may_heart[0] > lens * 0.8)]][0]
heart_line = pdeta[heart_line_index]
heart = np.where(pdeta > heart_line)[0]
m = point[heart]
# print(heart)
# print(m)
# print(DistanceMatrix(m))
check = np.array(np.where(DistanceMatrix(m) < Eps)).transpose()
# print(check)
fix = check[np.where((check[:, 0] - check[:, 1]) > 0)]
# print(fix)
link[heart] = -1
for i in fix:
link[heart[i[0]]] = heart[i[1]]
heart[i[0]] = heart[i[1]]
divnum = 0
heart = list(set(heart))
m = point[heart]
# print(heart)
for center in heart:
div[center] = divnum
xlist = [center]
for i in xlist:
neighbor = np.where(link == i)
xlist.extend(neighbor[0][np.where(div[neighbor] == -1)[0]])
div[neighbor] = divnum
divnum += 1
return div,m
def OPTICS(point,Eps=0,Minpts=5):
Minpts=int(Minpts)
D = DistanceMatrix(point)
lens = len(point)
div = np.ones(lens, dtype=np.int) * -1
if Eps == 0:
Eps = np.inf
core = np.where(np.sum(D < Eps, axis=1) > Minpts)[0]
core_distance = np.sort(D, axis=0)[Minpts + 1]
rd_yx = np.max(np.dstack([np.tile(core_distance, [lens, 1]), D]), axis=2)
P = []
I = set(np.arange(lens))
r = np.ones(lens) * np.inf
while I:
i = I.pop()
P.append(i)
if i in core:
tmp_rd = rd_yx[i].copy()
tmp_rd[(list(set(P)),)] = np.inf
seedlist = list(np.where(tmp_rd != np.inf)[0])
while seedlist:
# print(len(seedlist))
j = seedlist[np.argmin(r[(seedlist,)])]
seedlist.remove(j)
P.append(j)
if j in core:
tmp_rd_2 = rd_yx[j].copy()
r[(seedlist,)] = np.min(np.vstack([r[(seedlist,)], tmp_rd_2[(seedlist,)]]), axis=0)
tmp_rd_2[(list(set(P)),)] = np.inf
tmp_rd_2[(seedlist,)] = np.inf
seedlist.extend(list(np.where(tmp_rd_2 != np.inf)[0]))
I = I - set(P)
poss = r[(P,)].copy()
x = np.average(poss[np.where(poss != np.inf)])
# y = (poss[1:] - poss[:-1]) / x
color_set = [tuple([40, 64, 64, 255])] + [tuple(list(i) + [255]) for i in
np.random.randint(64, 256, size=[30, 3])]
color_set = np.array(color_set[0:10 + 10],
dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte), ('alpha', np.ubyte)])
n = 0
x = np.average(poss[np.where(poss != np.inf)])
c = [tuple([40, 64, 64, 255])] * lens
poss[2:] = (poss[2:] + poss[1:-1] + poss[:-2] * 0.1) / 2.1
for i in range(len(poss)):
if poss[i] != np.inf and poss[i] - poss[i - 1] < -0.52 * x:
n += 1
elif poss[i] > 2 * x:
div[P[i]] = -1
c[i] = color_set[0]
continue
div[P[i]] = n
c[i]=color_set[n+1]
figs = plt.figure()
plt.bar(np.arange(lens), poss, width=1.0, color=[(i[0] / 255, i[1] / 255, i[2] / 255) for i in c])
plt.plot(np.arange(lens), [x*2] * lens, c="r")
plt.ylim(0, 3)
plt.show()
return div,[]
def Birch_lff(point,cluster_num):
# lff
cluster_num = int(cluster_num)
X = point
# g_truth = DataMat[:, 0]
# for 'five_cluser.txt':threshold=1.5,branching_factor=20
# for 'spiral.txt':不适用
# for 'ThreeCircles.txt':不适用
# for 'Twomoons.txt':不适用
y_pred = Birch(n_clusters=cluster_num, threshold=1.5, branching_factor=20).fit_predict(X)
return y_pred,[]
# 所有算法的名称与函数地址映射表
# AlgorithmList= {"K-mean": {"func":KMeans, "para":{"prediv":5,}},
# "DBscan": {"func":DB_scan, "para":{"Eps":2.0,"MinPts":2,}},
# "DPCA": {"func":DPCA, "para":{"Eps":1.5,}},
# "Birch": {"func":Birch_lff, "para":{"prediv":5,}},
# "OPTICS_beta": {"func": OPTICS, "para": {"Eps": 0,"Minpts":5, }},
# }
def moment(train,label):
rtndic={}
if train.ndim==1:
dimen=1
else:
dimen=len(train.columns)
for key in label.unique():
index = label[label==key].index
meta= np.array(train.loc[index]).transpose().reshape([dimen,-1])
rtndic[key] = stats.multivariate_normal(np.mean(meta,axis=1),np.cov(meta)).pdf
return rtndic
def rect(train, label, width=10):
rtndic={}
if train.ndim==1:
dimen=1
else:
dimen=len(train.columns)
class func():
def __init__(self,data,h):
self.data=data
self.h=h
def pdf(self,x):
x = np.array(x).reshape([-1, dimen])
if type(self.h) == float or type(self.h) == int:
self.h = np.array([self.h] * dimen)
assert len(self.h) == dimen
V = np.prod(self.h)
tmp_ = (np.tile(self.data, [len(x), 1, 1]) - x.reshape(len(x), -1, dimen)) / self.h
return np.sum(np.all(np.abs(tmp_) < 0.5, axis=2), axis=1) / len(self.data) / V
for key in label.unique():
index = label[label == key].index
meta = np.array(train.loc[index]).reshape([-1,dimen])
tmps=func(meta.copy(),width)
rtndic[key] = tmps.pdf
return rtndic
def exponent(train, label, width=10):
rtndic={}
if train.ndim==1:
dimen=1
else:
dimen=len(train.columns)
class func():
def __init__(self,data,h):
self.data=data
self.h=h
def pdf(self,x):
x = np.array(x).reshape([-1, dimen])
if type(self.h) == float or type(self.h) == int:
self.h = np.array([self.h] * dimen)
assert len(self.h) == dimen
V = np.prod(self.h)
tmp_ = (np.tile(self.data, [len(x), 1, 1]) - x.reshape(len(x), -1, dimen)) / self.h
return np.sum(np.prod(np.exp(-np.abs(tmp_)), axis=2), axis=1) / len(self.data) / V
for key in label.unique():
index = label[label == key].index
meta = np.array(train.loc[index]).reshape([-1,dimen])
tmps=func(meta.copy(),width)
rtndic[key] = tmps.pdf
return rtndic
def triangle(train, label, width=10):
rtndic={}
if train.ndim==1:
dimen=1
else:
dimen=len(train.columns)
class func():
def __init__(self,data,h):
self.data=data
self.h=h
def pdf(self,x):
x = np.array(x).reshape([-1, dimen])
if type(self.h) == float or type(self.h) == int:
self.h = np.array([self.h] * dimen)
assert len(self.h) == dimen
V = np.prod(self.h)
tmp_ = (np.tile(self.data, [len(x), 1, 1]) - x.reshape(len(x), -1, dimen)) / self.h
tmp_[np.where(np.abs(tmp_)>1)]=0
# print(tmp_)
return np.sum(np.prod(np.abs(tmp_), axis=2), axis=1) / len(self.data) / V
for key in label.unique():
index = label[label == key].index
meta = np.array(train.loc[index]).reshape([-1,dimen])
tmps=func(meta.copy(),width)
rtndic[key] = tmps.pdf
return rtndic
def norm(train, label, width=10):
# print(width)
rtndic={}
if train.ndim==1:
dimen=1
else:
dimen=len(train.columns)
class func():
def __init__(self,data,h):
self.data=data
self.h=h
def pdf(self,x):
x = np.array(x).reshape([-1, dimen])
if type(self.h) == int or type(self.h) == float:
self.h = np.array([self.h] * dimen)
assert len(self.h) == dimen
V = np.prod(self.h)
tmp_ = (np.tile(self.data, [len(x), 1, 1]) - x.reshape(len(x), -1, dimen)) / self.h
return np.sum(np.prod(stats.norm.pdf(tmp_), axis=2), axis=1) / len(self.data) / V
# 并不知道为什么要prod累乘 毕竟不一定独立 这里有疑点
for key in label.unique():
index = label[label == key].index
meta = np.array(train.loc[index]).reshape([-1,dimen])
tmps=func(meta.copy(),width)
rtndic[key] = tmps.pdf
return rtndic
def K_near(train,label,Kn=3):
rtndic={}
if train.ndim==1:
dimen=1
else:
dimen=len(train.columns)
class func():
def __init__(self,data,Kn,dimen):
self.data=data
self.Kn=Kn
self.dimen = dimen
def pdf(self,x):
dimen = self.dimen
x = np.array(x).reshape([-1, dimen])
# if type(self.Kn) == int or type(self.Kn) == float:
# self.Kn = np.array([self.Kn] * dimen)
# assert len(self.Kn) == dimen
tmp_ = np.abs(np.tile(self.data, [len(x), 1, 1]) - x.reshape(len(x), -1, dimen))
h = np.sort(tmp_,axis=1)[:,int(self.Kn-1),:]
V = np.prod(h,axis=1)
return self.Kn / len(self.data) / V
# 并不知道为什么要prod累乘 毕竟不一定独立 这里有疑点
for key in label.unique():
index = label[label == key].index
meta = np.array(train.loc[index]).reshape([-1,dimen])
tmps=func(meta.copy(),Kn,dimen)
rtndic[key] = tmps.pdf
return rtndic
AlgorithmList= {"parameter": {"func":moment, "para":{}},
"rect-win": {"func":rect, "para":{"width":14}},
"Gaussian-win": {"func":norm, "para":{"width":3}},
"exponent-win": {"func": exponent, "para": {"width": 3}},
"triangle-win": {"func": triangle, "para": {"width": 5}},
"K-near": {"func": K_near, "para": {"K_n": 6}},
}
if __name__ == '__main__':
import pyqtgraph as pg
import pandas as pd
import numpy as np
from pyqtgraph.Qt import QtCore, QtGui
# from Myfunc import DistanceMatrix
from collections import Counter
from matplotlib import pyplot as plt
from scipy.integrate import nquad
from scipy.misc import derivative
from scipy.integrate import quad
def moment(train,label):
rtndic={}
for key in label.unique():
index = label[label==key].index
meta= np.array(train.loc[index]).transpose()
rtndic[key] = stats.multivariate_normal(np.mean(meta,axis=1),np.cov(meta)).pdf
return rtndic
def rect(train, label, width=10):
rtndic={}
def func(x,data,h=width):
rtn = []
dimen=data.shape[1]
x=np.array(x).reshape([-1,dimen])
if type(h)==int:
h=np.array([h]*dimen)
assert len(h)==dimen
V=np.prod(h)
tmp_=(np.tile(data,[len(x),1,1])-x.reshape(len(x),-1,dimen))/h
return np.sum(np.all(np.abs(tmp_) < 0.5, axis=2), axis=1)/len(data)/V
for key in label.unique():
index = label[label == key].index
meta = np.array(train.loc[index])
rtndic[key] = lambda x:func(x,meta,width)
return rtndic
def norm(train, label, width=10):
rtndic={}
def func(x,data,h=width):
rtn = []
dimen=data.shape[1]
x=np.array(x).reshape([-1,dimen])
if type(h)==int:
h=np.array([h]*dimen)
assert len(h)==dimen
V=np.prod(h)
tmp_=(np.tile(data,[len(x),1,1])-x.reshape(len(x),-1,dimen))/h
return np.sum(np.prod(stats.norm.pdf(tmp_),axis=2),axis=1)/len(data)/V
# 并不知道为什么要prod累乘 毕竟不一定独立
for key in label.unique():
index = label[label == key].index
meta = np.array(train.loc[index])
rtndic[key] = lambda x:func(x,meta,width)
return rtndic
# def edge(train,label,func):
#
#
# pass
data = pd.read_csv("datas.csv", sep=',', header=0, index_col=0)
newer = pd.read_csv("std.csv", sep=',', header=0, index_col=0)
train = data.iloc[:,:-1]
label = data.iloc[:,-1]
new = newer
# train_d = np.array(train).transpose()
# Themean = np.mean(train_d, axis=1)
# Thecov = np.cov(train_d) # 无偏
# func = stats.multivariate_normal(Themean,Thecov)
# c = lambda z:derivative(lambda x: func.cdf([np.inf, np.inf, np.inf, x]), z, dx=1e-6)
rtn= moment(train,label)
rtn1 = rect(train,label,10)
rtn2 = norm(train,label,10)
ddd = rtn1[0](np.mean(np.array(train), axis=0))
ccc = rtn2[0](np.mean(np.array(train), axis=0))
# pg.setConfigOptions(antialias=True)
#
# w = pg.GraphicsLayoutWidget(show=True)
# w.setWindowTitle('pyqtgraph example: GraphItem')
# v = w.addViewBox()
# v.setAspectLocked()
#
# g = pg.GraphItem()
# v.addItem(g)
#
# # file_list = ["five_cluster.txt", "spiral.txt",
# # "ThreeCircles.txt", "Twomoons.txt"]
# file_list = ["datas.txt"]
# color_set = [tuple([40, 64, 64, 255])] + [tuple(list(i) + [255]) for i in np.random.randint(64, 256, size=[20, 3])]
#
# # Eps=0.03
# # MinPts=5
#
# for file in file_list:
# train = pd.read_csv(file, sep=' ', header=None)
# answer = train.iloc[:, 0]
# point=np.array(train.iloc[:, 1:3].copy())
#
#
# div = np.array([-1]*len(point))
# m=[]
# Eps=0
# Minpts = 5
#
# # test function
# # inf = np.inf
# D=DistanceMatrix(point)
# lens=len(point)
# div=np.ones(lens,dtype=np.int)*-1
# if Eps == 0:
# Eps = np.inf
#
# core=np.where(np.sum(D<Eps,axis=1)>Minpts)[0]
# core_distance = np.sort(D,axis=0)[Minpts+1]
# rd_yx = np.max(np.dstack([np.tile(core_distance, [lens, 1]), D]),axis=2)
# # fix=np.array([np.inf]*lens)
# # fix[core]=1
# # rd_yx = rd_yx_raw*fix # 去除矩阵中不是核心点的部分的数据
# # rd_yx=rd_yx_raw
# rd = np.ones(lens)*np.inf
# # P = np.zeros(lens,dtype=np.int)
# P=[]
# seedlist=[]
# I = set(np.arange(lens))
# r= np.ones(lens)*np.inf
# while I:
# i=I.pop()
# P.append(i)
# if i in core:
# tmp_rd=rd_yx[i].copy()
# tmp_rd[(list(set(P)),)]=np.inf
# seedlist=list(np.where(tmp_rd!=np.inf)[0])
# # index1=np.where(np.sort(tmp_rd)!=np.inf)
# # insert_seed_arg = np.argsort(tmp_rd)[index1]
# # insert_seed = np.sort(tmp_rd)[index1]
# # seedlist=list(insert_seed_arg)
#
# while seedlist:
# # print(len(seedlist))
# j=seedlist[np.argmin(r[(seedlist,)])]
# seedlist.remove(j)
# P.append(j)
# if j in core:
# tmp_rd_2=rd_yx[j].copy()
# r[(seedlist,)]=np.min(np.vstack([r[(seedlist,)], tmp_rd_2[(seedlist,)]]),axis=0)
#
# tmp_rd_2[(list(set(P)),)]=np.inf
# tmp_rd_2[(seedlist,)] = np.inf
# seedlist.extend(list(np.where(tmp_rd_2!=np.inf)[0]))
# # index2 = np.where(np.sort(tmp_rd_2) != np.inf)
# # insert_seed_2 = np.argsort(tmp_rd_2)[np.where(np.sort(tmp_rd_2)!=np.inf)]
# # # print(insert_seed_2)
# # seedlist.extend(insert_seed_2)
# # seedlist=list(insert_seed_2)
# # seedlist.extend(insert_seed_2)
# I=I-set(P)
# # div+=2
# # div[(P[:1000],)]=2
#
# figs = plt.figure()
# poss=r[(P,)].copy()
# x = np.average(poss[np.where(poss!=np.inf)])
# y = (poss[1:]-poss[:-1])/x
#
# color_set = [tuple([40, 64, 64, 255])] + [tuple(list(i) + [255]) for i in
# np.random.randint(64, 256, size=[30, 3])]
# color_set=np.array(color_set[0:10 + 10],
# dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte), ('alpha', np.ubyte)])
# n=0
# x = np.average(poss[np.where(poss!=np.inf)])
# c = [tuple([40, 64, 64, 255])] * lens
# poss[2:]=(poss[2:]+poss[1:-1]+poss[:-2]*0.1)/2.1
# for i in range(len(poss)):
# # div[P[i]]=n
# # if poss[i] > 2*x:
# # div[P[i]]=-1
#
# if poss[i]!=np.inf and poss[i]-poss[i-1]<-0.52*x:
# n+=1
# elif poss[i] > 2*x:
# div[P[i]]=-1
# c[i] = color_set[0]
# continue
# div[P[i]]=n
# c[i]=color_set[n+1]
#
# # plt.plot(np.arange(lens-1),y)
# # plt.ylim(-4, 1)
# plt.bar(np.arange(lens),poss,width=1.0,color=[(i[0]/255,i[1]/255,i[2]/255) for i in c])
# plt.plot(np.arange(lens),[x]*lens,c="r")
# plt.ylim(0, 3)
# #
# plt.show()
#
#
# # testfunc end
# DRAW = True
# if DRAW:
# pointsize=0.1
# divnum=len(set(div))
# pos = np.array(point)
# color_set = [tuple([40, 64, 64, 255])] + [tuple(list(i) + [255]) for i in
# np.random.randint(64, 256, size=[30, 3])]
# color_set = np.array(color_set[0:divnum + 10],
# dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte),
# ('alpha', np.ubyte)])
#
# color = np.array([color_set[i + 1] for i in div],
# dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte), ('alpha', np.ubyte)])
# symbol = np.array(["o" if i >= 0 else "t" for i in div])
#
# pos_m = np.array(m).reshape(-1, 2)
# color_m = np.array([color_set[i + 1] for i in range(len(m))],
# dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte), ('alpha', np.ubyte)])
# symbol_m = ['+'] * len(m)
# symbols = np.hstack([symbol, symbol_m])
# # symbols[np.where()]
# sizes = [pointsize] * len(div) + [pointsize * 5] * len(m)
#
# g.setData(pos=np.vstack([pos, pos_m]), size=sizes, symbol=symbols,
# symbolBrush=np.hstack([color, color_m]),
# pxMode=False)
# # pointsize=0.1
# # divnum=len(set(div))
# # pos = np.array(point)
# # # color_set=[tuple(list(i)+[255]) for i in np.random.randint(64,256,size=[divnum,3])]
# # color_set = np.array(color_set[0:divnum + 10],
# # dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte), ('alpha', np.ubyte)])
# #
# # color = np.array([color_set[i + 1] for i in div],
# # dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte), ('alpha', np.ubyte)])
# # symbol = np.array(["o" if i >= 0 else "t" for i in div])
# #
# # pos_m = np.array(m).reshape(-1, 2)
# # color_m = np.array([color_set[i + 1] for i in range(len(m))],
# # dtype=[('red', np.ubyte), ('green', np.ubyte), ('blue', np.ubyte), ('alpha', np.ubyte)])
# # symbol_m = ['+'] * len(m)
# # symbols = np.hstack([symbol, symbol_m])
# # # symbols[np.where()]
# # sizes = [pointsize] * len(div) + [pointsize * 5] * len(m)
# #
# # g.setData(pos=np.vstack([pos, pos_m]), size=sizes, symbol=symbols, symbolBrush=np.hstack([color, color_m]),
# # pxMode=False)
#
# # g.setData(pos=pos, adj=None, size=0.01, pxMode=False)
#
#
# import sys
# if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
# QtGui.QApplication.instance().exec_()
| true |
7b09b67222673e345cc238d41b82ffb99d28d2fa | Python | LoboAnimae/FinalProyect_Graficas | /pygame_functions.py | UTF-8 | 343 | 2.859375 | 3 | [] | no_license | from pygame.display import set_mode
from pygame.time import Clock
import pygame
def initPygame(height: int = 800, width: int = 600)->object:
try:
pygame.init()
screen = set_mode((height, width), pygame.OPENGL | pygame.DOUBLEBUF)
clock = Clock()
except Exception as e:
print(e)
return None, None
return screen, clock | true |
f55f278ad24b34fd4d1adfad6f3e0ce15c47ad1b | Python | SalmaQueen/Flask-Rest-Api-Prototype | /app/models.py | UTF-8 | 3,038 | 2.75 | 3 | [
"MIT"
] | permissive | from werkzeug.security import check_password_hash, generate_password_hash
from app.extensions import db
class Book(db.Model):
__tablename__ = 'books'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), nullable=False)
price = db.Column(db.Float, nullable=False)
isbn = db.Column(db.Integer)
author = db.Column(db.String(128))
writeable_properties = ['price', 'name']
def json(self):
return {'name': self.name, 'price': self.price, 'isbn': self.isbn, 'author': self.author}
@classmethod
def update(cls, isbn, **kwargs):
book = cls.query.filter_by(isbn=isbn).first()
for key, value in kwargs.items():
if key not in cls.writeable_properties:
raise ValueError
setattr(book, key, value)
db.session.add(book)
db.session.commit()
@classmethod
def add_book(cls, _name, _price, _isbn, _author):
new_book = cls(name=_name, price=_price, isbn=_isbn, author=_author)
db.session.add(new_book)
db.session.commit()
@classmethod
def get_all_books(cls):
return [cls.json(book) for book in cls.query.all()]
@classmethod
def get_book(cls, _isbn):
book = cls.query.filter_by(isbn=_isbn).first()
return book.json() if book else None
@classmethod
def delete_book(cls, _isbn):
try:
cls.query.filter_by(isbn=_isbn).delete()
db.session.commit()
except Exception as e:
print(e)
return False
return True
@classmethod
def update_book_price(cls, _isbn, _price):
book_to_update = cls.query.filter_by(isbn=_isbn).first()
book_to_update.price = _price
db.session.commit()
@classmethod
def update_book_name(cls, _isbn, _name):
book_to_update = cls.query.filter_by(isbn=_isbn).first()
book_to_update.name = _name
db.session.commit()
@classmethod
def replace_book(cls, _isbn, _name, _price):
book_to_replace = cls.query.filter_by(isbn=_isbn).first()
if book_to_replace is not None:
book_to_replace.price = _price
book_to_replace.name = _name
db.session.commit()
def __repr__(self):
return '<Book name={} isbn={} price={}>'.format(self.name, self.isbn, self.price)
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(128), unique=True, nullable=False)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(128))
@property
def password(self):
raise AttributeError('Password is not a readable attribute!')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {}>'.format(self.username)
| true |
c7fcb233e2802e7d06555778579304786394869d | Python | manon2012/python | /work/Do/testlist.py | UTF-8 | 236 | 3.4375 | 3 | [] | no_license | a=[1,9,0,2] # out [0,1,2,9]
# a.sort()
newa=[]
for i in range(len(a)):
newa.append(min(a))
# a.pop(a.index(min(a)))
a.remove(min(a))
print (newa)
# print (a)
#a.reverse()
# print (a)
#
# b=list(reversed(a))
# print (b)
| true |
0ed2867ab70461fb542167f3fce0197d3630fe51 | Python | NathanRuprecht/CS210_IntroToProgramming | /DailyLabs/Lsn35/SharedData.py | UTF-8 | 1,985 | 4.03125 | 4 | [] | no_license | # CS 210 - Introduction to Programming - Fall 2014
#
# Author: Maj. Caswell, Dr. Bower
#
# Documentation Statement: None.
#
from threading import Thread
""" This file contains a example of the difficult
of sharing data between threads.
"""
from threading import Thread
MAX = 1000000
def main():
""" Main program to run the demo. """
print( "Counting to {}.".format( MAX ), flush=True )
# In order to pass an integer value by reference, make a list with
# a single value in it. If this is unclear, see the diagrams here:
# http://interactivepython.org/runestone/static/thinkcspy/Lists/ObjectsandReferences.html
counter = [ 0 ]
# Count sequentially, in a single thread, producing the expected result of 0.
print( "Counting sequentially, in a single thread ... ", end="", flush=True )
increment( counter )
decrement( counter )
print( "counter = {}".format( counter[ 0 ] ), flush=True )
# Count in parallel, using two threads. What is the expected result?
print( "Counting in two parallel threads ... ", end="", flush=True )
t1 = Thread( target=increment, args=( counter, ) )
t2 = Thread( target=decrement, args=( counter, ) )
# Start both threads and then wait for both to finish.
t1.start()
t2.start()
t1.join()
t2.join()
print( "counter = {}".format( counter[ 0 ] ), flush=True )
def increment( counter ):
for i in range( MAX ):
counter[ 0 ] += 1
def decrement( counter ):
for i in range( MAX ):
counter[ 0 ] -= 1
def run_stopwatch( start_time, stop_event ):
""" Runs a stopwatch loop showing the time elapsed at regular intervals. """
while not stop_event.is_set():
sleep( 0.05 ) # Accurate to about 1/20th of a second.
print( "{:.2f}".format( time() - start_time ), flush=True ) # Show current running time.
######## Main program ########
if __name__ == "__main__":
main()
| true |
54d569aec0383ac4bd76f5c5d7e933fea52b5feb | Python | AdamZhouSE/pythonHomework | /Code/CodeRecords/2795/60683/235052.py | UTF-8 | 283 | 3.15625 | 3 | [] | no_license | n = eval(input())
nums = [int(x) for x in input().split()]
sole = []
for i in range(n):
if nums[i] not in sole:
sole.append(nums[i])
if len(sole) == 1:
print(sole[0])
elif len(sole) == 2:
print(abs(sole[0] - sole[1]))
else:
print((max(sole) - min(sole)) // 2) | true |
82997202bfe503456e518dff541e2983088d3b87 | Python | kangfend/bahasa | /bahasa/stemmer/disambiguator/prefixes/rule_40.py | UTF-8 | 854 | 3.078125 | 3 | [
"MIT"
] | permissive | import re
class Rule40a(object):
"""Disambiguate Prefix Rule 40a (CC infix rules)
Rule 40a : CinV -> CinV
"""
def disambiguate(self, word):
"""Disambiguate Prefix Rule 40a (CC infix rules)
Rule 40a : CinV -> CinV
"""
matches = re.match(r'^([bcdfghjklmnpqrstvwxyz])(in[aiueo])(.*)$', word)
if matches:
return matches.group(1) + matches.group(2) + matches.group(3)
class Rule40b(object):
"""Disambiguate Prefix Rule 40b (CC infix rules)
Rule 40b : CinV -> CV
"""
def disambiguate(self, word):
"""Disambiguate Prefix Rule 40b (CC infix rules)
Rule 40b : CinV -> CV
"""
matches = re.match(r'^([bcdfghjklmnpqrstvwxyz])in([aiueo])(.*)$', word)
if matches:
return matches.group(1) + matches.group(2) + matches.group(3)
| true |
c16423c837e1041a93334ec04c4c938ab2cdec30 | Python | MatellioLiyaquet/hackathonML | /hackathon-backend/plot3.py | UTF-8 | 730 | 2.765625 | 3 | [] | no_license | import pandas
import numpy as np
import base64 as base64
import matplotlib.pyplot as plt
Tweet= pandas.read_csv("tmp/csv/Tweets.csv")
import matplotlib.pyplot as plt
Mood_count=Tweet['sentiments'].value_counts()
labels = 'negative','neutral','positive'
sizes = Mood_count
explode = (0, 0, 0.1)
fig1, ax1 = plt.subplots()
ax1.pie(sizes, explode=explode, labels=labels, colors=['red', 'yellow', 'green'], autopct='%1.1f%%', shadow=True, startangle=90)
ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
#plt.show()
from io import BytesIO
figfile = BytesIO()
plt.savefig('tmp/plots/plot3.jpg', format='png')
figfile.seek(0)
my_base64_jpgData = base64.b64encode(figfile.read())
print(my_base64_jpgData) | true |
58ee7aca55f93be663c240e6ab1bbc2b7eaa0df3 | Python | wnstlr/ligo | /IMR_PN_plot.py | UTF-8 | 5,995 | 2.875 | 3 | [] | no_license | #!/opt/local/bin/python2.6
'''Plots the Amplitude against frequency plot for Post-Newtonian Approach and
Inspiral Merger Ringdown Template.'''
import numpy
import scipy
from matplotlib import *
import pylab
def generatePSD(f):
'''This function calculates the Power Spectral Density using the fitted
curve equation.'''
x = f / 245.4
S_h = 10 ** (-48) * (0.0152 * x ** (-4) + 0.2935 * x ** (9./4.) + 2.7951\
* x ** (3./2.) - 6.5080 * x ** (3./4.) + 17.7622)
return S_h
def generateNoise(S_h):
'''This function generates noise in the frequency domain that follows the
PSD provided in the argument.'''
nreal = numpy.sqrt(S_h) * numpy.random.randn(len(S_h)) / 2.
nimag = numpy.sqrt(S_h) * numpy.random.randn(len(S_h)) / 2.
return (nreal + complex(0, 1) * nimag)
def computeNewtonianChirpAmplitudeFD(distance,mchirp,frequency):
'''This function creates an A(f) amplitude of Newtonian chirp in
frequency domain.'''
return ((numpy.pi**(-2.0/3.0))*(numpy.sqrt(5.0/24.0))*(distance**(-1.0))\
*(mchirp**(5.0/6.0))*((frequency)**(-7./6.)))
def computeIMRChirpAmplitudeFD(f,f1,f2,f3,sigma,c_constant,alpha2,alpha3,\
epsilon1,epsilon2,m,w_m,w_r):
'''This function creates improved wave signal under GR using IMRPhenomB wave
form. Takes in distance, frequency, time, phase, total mass, symmetric mass
ratio, a constant, alpha2, alpha3, epsilon1, epsilon2, total mass, and
normalization constant as arguments.'''
nu = (numpy.pi * m * f) ** (1./3.)
f_prime = f / f1
lorentzian = 1. / (2 * numpy.pi) * sigma / ((f - f2) ** 2 + sigma ** 2 / 4.)
amplitude = c_constant*f1**(-7./6.)
if (f < f1):
amplitude = amplitude*f_prime**(-7./6.)*(1+alpha2*nu**2+alpha3*nu**3)
elif ((f1 <= f) & (f < f2)):
amplitude = amplitude*w_m*f_prime**(-2./3.)*(1+epsilon1*nu+epsilon2*nu**2)
elif ((f2 <= f) & (f < f3)):
amplitude = amplitude*w_r*lorentzian
else:
amplitude = 0
return amplitude
if __name__ == '__main__':
l = int(raw_input("Number of data: "))
#m1 = float(raw_input("Mass1 in solar mass: "))
#m2 = float(raw_input("Mass2 in solar mass: "))
#d = float(raw_input("Distance in megaparsecs: "))
### Compute physical values of the binary system
MSOLAR_SI = 1.98892e30 # 1 solar mass in kg
MPC_IN_SI = 3.08568025e22 # 1 mega parsec in meters
G_NEWT = 6.67300 * 10 ** (-11) # gravitational constant in SI
LIGHT_SPEED_SI = 2.998 * 10 ** 8 # Speed of light in SI
MSOLAR_IN_SEC = G_NEWT * MSOLAR_SI / LIGHT_SPEED_SI ** 3
m1 = 10. # mass1 in solar mass
m2 = 10. # mass2 in solar mass
print ">> mass1 in solarmass=" + str(m1)
print ">> mass2 in solarmass=" + str(m2)
d_mpc = 1000. # distance in megaparsecs
print ">> Distance in megaparsecs=" + str(d_mpc)
d = d_mpc * MPC_IN_SI / LIGHT_SPEED_SI # distance in seconds
m1 = m1 * MSOLAR_IN_SEC # mass1 in seconds
m2 = m2 * MSOLAR_IN_SEC # mass2 in seconds
m = m1 + m2 # total mass
eta = m1 * m2 / m ** 2 # symmetric mass ratio
mc = m * eta ** (3./5.) # chirp mass
print ">> Total mass in sec=" + str(m)
print ">> Chirp mass in sec=" + str(mc)
print ">> Symmetric mass ratio:=" + str(eta)
print ">> Distance in sec=" + str(d)
### Compute phenomological phase parameters
psi2 = 3715./756-920.9*eta+6742*eta**2-1.34e4*eta**3
psi3 = -16*numpy.pi+1.702e4*eta-1.214e5*eta**2+2.386e5*eta**3
psi4 = 15293365./508032.-1.254e5*eta+8.735e5*eta**2-1.694e6*eta**3
psi5 = 0.
psi6 = -8.898e5*eta+5.981e6*eta**2-1.128e7*eta**3
psi7 = 8.696e5*eta-5.838e6*eta**2+1.089e7*eta**3
f1 = (1-4.455+3.521+0.6437*eta-0.05822*eta**2-7.092*eta**3)/(numpy.pi*m)
f2 = ((1-0.63)/2.+0.1469*eta-0.0249*eta**2+2.325*eta**3)/(numpy.pi*m)
f3 = (0.3236-0.1331*eta-0.2714*eta**2+4.922*eta**3)/(numpy.pi*m)
sigma = ((1-0.63)/4.-0.4098*eta+1.829*eta**2-2.87*eta**3)/(numpy.pi*m)
print ">> [psi2, psi3, psi4, psi5, psi6, psi7]="
print psi2, psi3, psi4, psi5, psi6, psi7
print ">> [f1, f2, f3, sigma]="
print f1, f2, f3, sigma
alpha2 = -323. / 224. + 451. * eta / 168.
alpha3 = 0
epsilon1 = -1.8897
epsilon2 = 1.6557
redshift = 0.21
f_isco = (1./6.) ** (3./2.) / (numpy.pi * m)
print ">> f_isco=" + str(f_isco)
f = numpy.linspace(10, 2048, l)
c_constant = (m ** (5./6.) / (d * numpy.pi ** (2./3.))) \
* numpy.sqrt(5. * eta / 24.)
print ">> C=" + str(c_constant)
### Compute normalization constants
vMerg = (numpy.pi * m * f1) ** (1./3.)
vRing = (numpy.pi * m * f2) ** (1./3.)
w_m = 1. + alpha2 * vMerg ** 2 + alpha3 * vMerg ** 3
w_m = w_m / (1. + epsilon1 * vMerg + epsilon2 * vMerg ** 2)
w_r = w_m*(numpy.pi*sigma/2.)*(f2/f1)**(-2./3.)*(1.+epsilon1*vRing+epsilon2\
*vRing**2)
PN_amp = computeNewtonianChirpAmplitudeFD(d, mc, f)
IMR_amp = numpy.empty(len(f))
for i in range(len(f)):
IMR_amp[i] = computeIMRChirpAmplitudeFD(f[i],f1,f2,f3,sigma,c_constant,\
alpha2,alpha3,epsilon1,\
epsilon2,m,w_m,w_r)
S_h = generatePSD(f)
nk = generateNoise(S_h)
sigmak = 2 * numpy.sqrt(S_h)
#pylab.plot(f, numpy.sqrt(S_h), 'g', label="PSD of Advanced LIGO")
pylab.loglog(f, PN_amp, 'b--', label="Post-Newtonian")
pylab.loglog(f, IMR_amp, 'r', label="Insipiral Merger Ringdown")
pylab.xlabel("Frequency (Hz)")
pylab.ylabel("Amplitude $|A(f)|$")
pylab.title("Post-Newtonian Amplitude with\nInsipiral Merger Ringdown Amplitude")
pylab.legend(loc="upper right", prop={'size':10})
pylab.grid(True)
pylab.show()
#pylab.savefig("PN_IMR_plot.pdf")
#pylab.close()
| true |
b1d2fc0461bcce656ba690203d19d3c687b18919 | Python | pacellyjcax/ProgrammingChallenge | /URI/1536 - Libertadores.py | UTF-8 | 584 | 3.28125 | 3 | [] | no_license | def saldoDeGols(l1,l2):
return (int(l1[0])+int(l2[2])) - (int(l1[2])+int(l2[0]))
def golsNoAdversario(l1,l2):
if l1[2] > l2[2]:
return "Time 2"
elif l1[2] < l2[2]:
return "Time 1"
return "Penaltis"
res = []
n = int(raw_input())
for i in range(n):
p1 = [x for x in raw_input().split()]
p2 = [x for x in raw_input().split()]
if saldoDeGols(p1,p2) == 0:
res.append(golsNoAdversario(p1,p2))
elif saldoDeGols(p1,p2) > 0:
res.append("Time 1")
else:
res.append("Time 2")
for e in res:
print e
| true |
cccea2f04929a0b39cbe261fb6d2db4ef3a96e86 | Python | natebrunelle/cs1110f18 | /markdown/files/cs1111_19fa/clicking_example.py | UTF-8 | 399 | 2.90625 | 3 | [] | no_license | import pygame
import gamebox
camera = gamebox.Camera(800,600)
logo = gamebox.from_image(-100, -100, "https://www.python.org/static/img/python-logo.png")
score = 0
def tick(keys):
if pygame.K_UP in keys:
print(" the up key is currently being pressed")
if camera.mouseclick:
logo.center = camera.mouse
camera.draw(logo)
camera.display()
gamebox.timer_loop(30, tick) | true |
c0954bf2e49b0bc90fa9b1928457ecb0d4f2e2d8 | Python | philipgreat/pla-article-classify | /parse-dict.py | UTF-8 | 477 | 2.9375 | 3 | [] | no_license | # encoding=utf-8
import jieba
import jieba.analyse
import math
'''
从文件中得到向量
'''
def dict_from_file(file_path):
dict = {}
number = 1
with open(file_path,'r') as inf:
for line in inf:
print str(number)+": "+line+" "
(key, val) = line.strip().split(' ')
#dict.append(eval(line))
dict[key] = float(val)
number +=1
#
return dict
dict_from_file("extradict/idf.txt") | true |
98f2245d845e3ccfce96c00053c698ce216597d2 | Python | abeltomvarghese/Data-Analysis | /Learning/basics.py | UTF-8 | 972 | 3.421875 | 3 | [] | no_license | import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import style
import numpy as np
style.use('ggplot')
web_stats = {'Day': [1,2,3,4,5,6],
'Visitors': [43,53,34,45,64,34],
'Bounce_Rate': [65,72,62,64,54,66]}
df = pd.DataFrame(web_stats)
##print(df) #printing the entire dataframe
##print(df.head()) #print the first n-1 rows
##print(df.tail()) #print the last bit of dataframe
##print(df.tail(2)) #prints last 2 rows of dataframe
##print(df.set_index('Day')) #set the day as the index
##df2 = df.set_index('Day')
##print(df2)
#to select a particular column
#print(df['Visitors'])
#print(df.Visitors)
#printing out select columns
#print(df[['Bounce_Rate','Visitors']])
#printing out a column in a list
#print(df.Visitors.tolist())
# #converting columns to an array
# print(np.array(df[['Bounce_Rate','Visitors']]))
#print(np.array(df['Bounce_Rate']))
df3 = pd.DataFrame(np.array(df[['Bounce_Rate','Visitors']]))
print(df3) | true |
ed66db1627fdf79be3c0b61b5d0b3dd49ebd2ff3 | Python | oguncan/YMGK | /YMGK.py | UTF-8 | 14,241 | 2.53125 | 3 | [] | no_license | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 7 16:15:30 2020
@author: Ogün Can KAYA
"""
import joblib
import numpy as np
import pandas as pd
import math
import matplotlib.pyplot as plt
import seaborn as sns
import datetime
# from sklearn.preprocessing import Imputer
import seaborn as seabornInstance
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn import metrics
import requests
from sklearn.preprocessing import LabelEncoder
# %%
airQualityDF= pd.read_excel("istanbul2.xlsx")
#%%
airQualityDF["Day"] = [da.day for da in airQualityDF["Tarih"]]
airQualityDF["Month"] = [da.month for da in airQualityDF["Tarih"]]
airQualityDF["Year"] = [da.year for da in airQualityDF["Tarih"]]
airQualityDF["Hour"] = [da.hour for da in airQualityDF["Tarih"]]
airQualityDF["Minute"] = [da.minute for da in airQualityDF["Tarih"]]
airQualityDF["Second"] = [da.second for da in airQualityDF["Tarih"]]
# %%
airQualityDF.fillna(0,inplace=True)
# %% İBB TRAFİK SPLİT %% #
ibbTrafficDF = pd.read_excel("trafficDF.xlsx")
ibbTrafficDF["Date"] = [str(da.date()) for da in ibbTrafficDF["Trafik İndeks Tarihi"]]
ibbTrafficDF["Day"] = [da.day for da in ibbTrafficDF["Trafik İndeks Tarihi"]]
ibbTrafficDF["Month"] = [da.month for da in ibbTrafficDF["Trafik İndeks Tarihi"]]
ibbTrafficDF["Year"] = [da.year for da in ibbTrafficDF["Trafik İndeks Tarihi"]]
ibbTrafficDF["Hour"] = [da.hour for da in ibbTrafficDF["Trafik İndeks Tarihi"]]
ibbTrafficDF["Minute"] = [da.minute for da in ibbTrafficDF["Trafik İndeks Tarihi"]]
# %% Trafik İndex Mean Year-Day-Month-Hour
# %%
# %% datexDF[(datexDF.Year== 2020) & (datexDF.Day== 4) & (datexDF.Month==2) & (datexDF.Hour==11)].İndeks.mean()
def meanTrafficIndex():
#Not run always / i did run one time and write trafficDF.xlsx file
ibbTrafficDF["IndexMean"] = 0
for row1 in ibbTrafficDF.Year.unique():
for row2 in ibbTrafficDF.Day.unique():
for row3 in ibbTrafficDF.Month.unique():
for row4 in ibbTrafficDF.Hour.unique():
x=ibbTrafficDF[(ibbTrafficDF.Year== row1) & (ibbTrafficDF.Day== row2) & (ibbTrafficDF.Month==row3) & (ibbTrafficDF.Hour==row4)]["Trafik İndeks"].mean()
if(math.isnan(x)):
pass
else:
for i in (ibbTrafficDF[(ibbTrafficDF.Year== row1) & (ibbTrafficDF.Day== row2) & (ibbTrafficDF.Month==row3) & (ibbTrafficDF.Hour==row4)].index):
ibbTrafficDF['IndexMean'][i] = x
# %%
def calculateAirQualityIndexSO2(so2):
soi2=0
if (so2>=0 and so2<=100):
soi2= ((50-0)/(100-0))*(so2-0) + 0
if (so2>=101 and so2<=250):
soi2= ((100-51)/(250-101))*(so2-101) + 51
if (so2>=251 and so2<=500):
soi2= ((150-101)/(500-251))*(so2-251) + 101
if (so2>=501 and so2<=850):
soi2= ((200-151)/(850-501))*(so2-501) + 151
if (so2>=851 and so2<=1100):
soi2= ((300-201)/(1100-851))*(so2-851) + 201
if (so2>=1101 and so2<= 1500):
soi2= ((500-301)/(1500-1101))*(so2-1101) + 301
return soi2
# %%
def calculateAirQualityIndexNo2(no2):
noi2=0
if (no2>=0 and no2<=100):
noi2= ((50-0)/(100-0))*(no2-0) + 0
if (no2>=101 and no2<=200):
noi2= ((100-51)/(200-101))*(no2-101) + 51
if (no2>=201 and no2<=500):
noi2= ((150-101)/(500-201))*(no2-201) + 101
if (no2>=501 and no2<=1000):
noi2= ((200-151)/(1000-501))*(no2-501) + 151
if (no2>=1001 and no2<=2000):
noi2= ((300-201)/(2000-1001))*(no2-1001) + 201
if (no2>=2001 and no2<= 3000):
noi2= ((500-301)/(3000-2001))*(no2-2001) + 301
return noi2
# %%
def calculateAirQualityIndexPM10(pm10):
pm10i2=0
if (pm10>=0 and pm10<=50):
pm10i2= ((50-0)/(50-0))*(pm10-0) + 0
if (pm10>=51 and pm10<=100):
pm10i2= ((100-51)/(100-51))*(pm10-51) + 51
if (pm10>=101 and pm10<=260):
pm10i2= ((150-101)/(260-101))*(pm10-101) + 101
if (pm10>=261 and pm10<=400):
pm10i2= ((200-151)/(400-261))*(pm10-261) + 151
if (pm10>=401 and pm10<=520):
pm10i2= ((300-201)/(520-401))*(pm10-401) + 201
if (pm10>=521 and pm10<= 620):
pm10i2= ((500-301)/(620-521))*(pm10-521) + 301
return pm10i2
# %%
def calculateAirQualityIndexPM25(pm25):
pm25i2=0
if (pm25>=0 and pm25<=12):
pm25i2= ((50-0)/(12-0))*(pm25-0) + 0
if (pm25>=12.1 and pm25<=35.4):
pm25i2= ((100-51)/(35.4-12.1))*(pm25-12.1) + 51
if (pm25>=35.5 and pm25<=55.4):
pm25i2= ((150-101)/(55.4-35.5))*(pm25-35.5) + 101
if (pm25>=55.5 and pm25<=150.4):
pm25i2= ((200-151)/(150.4-55.5))*(pm25-55.5) + 151
if (pm25>=150.5 and pm25<=250.4):
pm25i2= ((300-201)/(250.4-150.5))*(pm25-150.5) + 201
if (pm25>=250.5 and pm25<= 350.4):
pm25i2= ((400-301)/(350.4-250.5))*(pm25-250.5) + 301
if (pm25>=350.5 and pm25<= 505.4):
pm25i2= ((500-401)/(505.4-350.5))*(pm25-350.5) + 401
return pm25i2
# %%
def calculateAirQualityIndexCO(CO):
coi2=0
if (CO>=0 and CO<=5500):
coi2= ((50-0)/(5500-0))*(CO-0) + 0
if (CO>=5501 and CO<=10000):
coi2= ((100-51)/(10000-5501))*(CO-5501) + 51
if (CO>=10001 and CO<=16000):
coi2= ((150-101)/(16000-10001))*(CO-10001) + 101
if (CO>=16001 and CO<=24000):
coi2= ((200-151)/(24000-16001))*(CO-16001) + 151
if (CO>=24001 and CO<=32000):
coi2= ((300-201)/(32000-24001))*(CO-24001) + 201
if (CO>=32001 and CO<=40000):
coi2= ((500-301)/(40000-32001))*(CO-32001) + 301
return coi2
# %%
def calculateAirQualityIndexO3(O3):
o3i2=0
if (O3>=0 and O3<=120):
o3i2= ((50-0)/(120-0))*(O3-0) + 0
if (O3>=121 and O3<=160):
o3i2= ((100-51)/(160-121))*(O3-121) + 51
if (O3>=161 and O3<=180):
o3i2= ((150-101)/(180-161))*(O3-161) + 101
if (O3>=181 and O3<=240):
o3i2= ((200-151)/(240-181))*(O3-181) + 151
if (O3>=241 and O3<=700):
o3i2= ((300-201)/(700-241))*(O3-241) + 201
if (O3>=701 and O3<=1700):
o3i2= ((500-301)/(1700-701))*(O3-701) + 301
return o3i2
# %%
def calculateHKI():
listPM10 = list(airQualityDF.filter(like='PM10').columns)
listSO2 = list(airQualityDF.filter(like='SO2').columns)
listNO2 = list(airQualityDF.filter(like='NO2').columns)
listCO = list(airQualityDF.filter(like='CO').columns)
listO3 = list(airQualityDF.filter(like='O3').columns)
listPM25 = list(airQualityDF.filter(like='PM 2.5').columns)
for pm10 in listPM10:
airQualityDF["HKI"+pm10] = airQualityDF[pm10].apply(calculateAirQualityIndexPM10)
for so2 in listSO2:
airQualityDF["HKI"+so2] = airQualityDF[so2].apply(calculateAirQualityIndexSO2)
for no2 in listNO2:
airQualityDF["HKI"+no2] = airQualityDF[no2].apply(calculateAirQualityIndexNo2)
for co in listCO:
airQualityDF["HKI"+co] = airQualityDF[co].apply(calculateAirQualityIndexCO)
for O3 in listO3:
airQualityDF["HKI"+O3] = airQualityDF[O3].apply(calculateAirQualityIndexO3)
for PM25 in listPM25:
airQualityDF["HKI"+PM25] = airQualityDF[PM25].apply(calculateAirQualityIndexPM25)
calculateHKI()
# %%
def splitHKIValueAndValue(newList):
listMaterialName=[]
listHighValue = []
for dongu in airQualityDF[newList].values:
listMaterialName.append(newList[dongu.argmax()].split('-')[1])
listHighValue.append(dongu.max())
columnName = newList[0].split('HKI')[1].split('-')[0]
columnType = columnName+'Type'
airQualityDF['AQI-'+columnName] = listHighValue
airQualityDF['AQI-'+columnType] = listMaterialName
#%%
def calculateGoodOrBadAir(listValues):
hkiString=""
for value in listValues:
listHKIString=[]
for hkiValues in airQualityDF[value]:
if(hkiValues>=0 and hkiValues<=50):
hkiString = 0
if(hkiValues>=51 and hkiValues<=100):
hkiString = 1
if(hkiValues>=101 and hkiValues<=150):
hkiString = 2
if(hkiValues>=151 and hkiValues<=200):
hkiString = 3
if(hkiValues>=201 and hkiValues<=300):
hkiString = 4
if(hkiValues>=301 and hkiValues<=500):
hkiString = 5
listHKIString.append(hkiString)
airQualityDF['HKIStr-'+value.split('-')[1]] = listHKIString
# %%
if __name__ == '__main__':
# %%
hkiKandilli = list(airQualityDF.filter(regex = 'HKIKandilli-').columns)
hkiUskudar = list(airQualityDF.filter(regex = 'HKIÜsküdar-').columns)
hkiSirinevler = list(airQualityDF.filter(regex = 'HKISirinevler-').columns)
hkiMecidiyekoy = list(airQualityDF.filter(regex = 'HKIMecidiyekoy-').columns)
hkiUmraniye =list(airQualityDF.filter(regex = 'HKIUmraniye-').columns)
hkiBasaksehir = list(airQualityDF.filter(regex = 'HKIBasaksehir-').columns)
hkiEsenyurt = list(airQualityDF.filter(regex = 'HKIEsenyurt-').columns)
hkiSultanbeyli = list(airQualityDF.filter(regex = 'HKISultanbeyli-').columns)
hkiKagithane = list(airQualityDF.filter(regex = 'HKIKagithane-').columns)
hkiSultangazi = list(airQualityDF.filter(regex = 'HKISultangazi-').columns)
hkiSilivri = list(airQualityDF.filter(regex = 'HKISilivri-').columns)
hkiSile = list(airQualityDF.filter(regex = 'HKISile-').columns)
splitHKIValueAndValue(hkiKandilli)
splitHKIValueAndValue(hkiUskudar)
splitHKIValueAndValue(hkiSirinevler)
splitHKIValueAndValue(hkiMecidiyekoy)
splitHKIValueAndValue(hkiUmraniye)
splitHKIValueAndValue(hkiBasaksehir)
splitHKIValueAndValue(hkiEsenyurt)
splitHKIValueAndValue(hkiSultanbeyli)
splitHKIValueAndValue(hkiKagithane)
splitHKIValueAndValue(hkiSultangazi)
splitHKIValueAndValue(hkiSilivri)
splitHKIValueAndValue(hkiSile)
# %%
aqiList = list(airQualityDF.filter(regex = 'AQI-').columns)[::2]
calculateGoodOrBadAir(aqiList)
ibbUniqueList = ibbTrafficDF['Date'].unique()
for index, uniques in enumerate(ibbUniqueList):
for date in ibbTrafficDF['Trafik İndeks Tarihi']:
if(ibbUniqueList[index]==str(date.date())):
print(date.date())
#%%
kandilliAllListColumnnName = list(airQualityDF.filter(regex = 'Kandilli').columns)
kandilliAllListColumnns = (airQualityDF.filter(regex = 'Kandilli').values)
kandilliAllList = pd.DataFrame(kandilliAllListColumnns, columns=kandilliAllListColumnnName)
X = kandilliAllList.iloc[:, 8:14]
y = kandilliAllList.iloc[:, 16]
X_train, X_valid, y_train, y_valid = train_test_split(X, y, test_size=0.3, random_state=0)
from keras.utils import to_categorical
y_train = to_categorical(y_train)
y_valid = to_categorical(y_valid)
#%%
nb_features = 6
nb_classes = 4
X_train = np.array(X_train).reshape(7810,6,1)
X_valid = np.array(X_valid).reshape(3348,6,1)
#%%
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dense, Activation, Dropout, Flatten, BatchNormalization, Conv1D, MaxPooling1D
model=Sequential()
# model.add(Conv1D(512,1,input_shape=(nb_features,1)))
# model.add(Activation("relu"))
# model.add(MaxPooling1D(2))
model.add(LSTM(512, input_shape=(nb_features,1)))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add((Flatten()))
model.add(Dropout(0.15))
model.add(Dense(2048, activation="relu"))
model.add(Dense(1024, activation="relu"))
model.add(Dense(4, activation="softmax"))
model.summary()
model.compile(loss="categorical_crossentropy", optimizer ="adam", metrics = ["accuracy"])
score = model.fit(X_train, y_train, epochs = 50, validation_data=(X_valid, y_valid))
# %%
import matplotlib.pyplot as plt
plt.plot(score.history["acc"])
plt.plot(score.history["val_acc"])
plt.title("Model başarımları")
plt.ylabel("Başarım")
plt.xlabel("Epok sayısı")
plt.legend(["Eğitim","Doğrulama"], loc="upper left")
plt.show()
#%%
plt.plot(score.history["loss"],color="g")
plt.plot(score.history["val_loss"],color="r")
plt.title("Model Kayıpları")
plt.ylabel("Kayıp")
plt.xlabel("Epok sayısı")
plt.legend(["Eğitim","Doğrulama"], loc="upper left")
# %%
#ortalama değerin verilmesi
print(("Ortalama eğitim kaybı: ", np.mean(score.history["loss"])))
print(("Ortalama Eğitim Başarımı: ", np.mean(score.history["acc"])))
print(("Ortalama Doğrulama kaybı: ", np.mean(score.history["val_loss"])))
print(("Ortalama Doğrulama Başarımı: ", np.mean(score.history["val_acc"])))
# %%
#%%
#%% Uskudar API
r = requests.get('https://api.waqi.info/feed/@8159/?token=891351b0c50bf07574dddd0c24d86cd0fc37707a')
json = r.json()
apiUskudarCo = json['data']['iaqi']['co']['v']
apiUskudarNo2 = json['data']['iaqi']['no2']['v']
apiUskudarO3 = json['data']['iaqi']['o3']['v']
apiUskudarPm10 = json['data']['iaqi']['pm10']['v']
apiUskudarPm25 = json['data']['iaqi']['pm25']['v']
apiUskudarSo2 = json['data']['iaqi']['so2']['v']
addPm10 = calculateAirQualityIndexPM10(pd.Series([apiUskudarPm10])[0])
addCo = calculateAirQualityIndexCO(pd.Series([apiUskudarCo])[0])
addNo2 = calculateAirQualityIndexNo2(pd.Series([apiUskudarNo2])[0])
addO3 = calculateAirQualityIndexO3(pd.Series([apiUskudarO3])[0])
# addPm25 = calculateAirQualityIndexPM25(pd.Series([apiUskudarPm25])[0])
addSo2 = calculateAirQualityIndexSO2(pd.Series([apiUskudarSo2])[0])
# %%
predictList = [addPm10, addSo2, addNo2, addCo, addO3, 0]
predictList = np.array(predictList)
#%%
x_input = predictList.reshape((1, 6, 1))
yPredict = model.predict_classes(x_input)
print(yPredict)
## 0 - İyi
## 1 - Orta
## 2 - Hassas
## 3 - Sağlıksız
## 4 - Kötü
## 5 - Tehlikeli
#%%
| true |
d373581a481069828b5266d051b7afebe01a073c | Python | ace964/Azubot | /Control/azubot_gpio.py | UTF-8 | 4,300 | 3 | 3 | [
"MIT"
] | permissive | from IPin import IPin
from IServo import IServo
from IPwm import IPwm
from IAzubot import IAzubot
import time
from pygame import mixer # Load sound library
import pigpio
# Initialize connection to local pigpio server
pi=pigpio.pi(port=8888)
# Create Custom Error for hardware interaction
class AccessError(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return repr(self.value)
# Class handling pwm signals for speed control of the chain drive
class PWMAccess(IPwm):
maxVal = 255
# sets the corresponding pwm pin as output
def __init__(self, IO_Nr):
global pi
self.IO_Nr = IO_Nr
pi.set_mode(self.IO_Nr, pigpio.OUTPUT)
# changes pwm dutycycle (chain drive speed)
def write(self, dutycycle_percent):
global pi
pi.set_PWM_dutycycle(self.IO_Nr, dutycycle_percent*self.maxVal/100)
# turns pwm on and therefor moves chaindrive
def start(self, dutycycle_percent):
self.write(dutycycle_percent)
# Stops the chain drive (sets pwm to 0)
def stop(self):
self.write(0)
# Class handling communication with servos (control of the head)
class ServoAccess(IServo):
def __init__(self, IO_Nr, minAngle=-30, maxAngle=30, minPulsewidth=500, maxPulsewidth=2500, midPulsewidth=1500):
global pi
self.IO_Nr = IO_Nr
pi.set_mode(self.IO_Nr, pigpio.OUTPUT)
pi.set_servo_pulsewidth(self.IO_Nr, 0) # initialize and switch off
self.minAngle = minAngle
self.maxAngle = maxAngle
self.minPulsewidth = minPulsewidth
self.maxPulsewidth = maxPulsewidth
self.midPulsewidth = midPulsewidth
# Sets the minimal angle of the servo to prevent overbending the cables
def setMin(self, minAngle):
self.minAngle = minAngle
self.checkConsistency()
# Sets the maximal angle of the servo to prevent overbending the cables
def setMax(self, maxAngle):
self.maxAngle = maxAngle
self.checkConsistency()
# check if settings are reasonable
def checkConsistency(self):
global pi
if self.minAngle > self.maxAngle:
pi.set_servo_pulsewidth(self.IO_Nr, 0)
return False
# sets the angle of servo to be taken if it is in the given min/max range
def setAngle(self, angle):
global pi
if self.minAngle <= angle <= self.maxAngle:
pi.set_servo_pulsewidth(self.IO_Nr, self.getPulsewidthFromAngle(angle))
# centers the servos
def reset(self):
global pi
pi.set_servo_pulsewidth(self.IO_Nr, 1500)
# converts angle to pulsewidth
def getPulsewidthFromAngle(self, angle):
if angle < 0:
return self.midPulsewidth - abs(self.midPulsewidth-self.minPulsewidth)*angle/self.minAngle
elif angle > 0:
return self.midPulsewidth + abs(self.midPulsewidth-self.maxPulsewidth)*angle/self.maxAngle
else:
pi.set_servo_pulsewidth(self.IO_Nr, self.midPulsewidth)
time.sleep(0.05)
return 0
# Handling gpio Acess for light etc.
class IOAccess(IPin):
OUTPUT = 1
INPUT = 2
# Initializes gpio as input or output
def __init__(self,IO_Nr,Mode):
global pi
if Mode != IOAccess.OUTPUT and Mode != IOAccess.INPUT:
raise ValueError
else:
self.IO_Nr = IO_Nr
self.Mode = pigpio.OUTPUT if Mode == IOAccess.OUTPUT else pigpio.INPUT
pi.set_mode(self.IO_Nr,self.Mode)
# Read State of pin
def read(self):
global pi
return pi.read(self.IO_Nr)
# set pin to specific state
def write(self,value):
global pi
if self.Mode != IOAccess.OUTPUT:
raise AccessError("No writing access on an input")
else:
pi.write(self.IO_Nr,value)
# Class initializing speaker offering to play sounds
class SoundPlayer:
#default directory where sounds can be placed.
soundDirectory = '/home/pi/sounds/'
# initializes connection to soundcard
def __init__(self):
mixer.init()
# play sounds from sd card
def play(self, soundFile):
mixer.music.load(self.soundDirectory+soundFile)
mixer.music.play() | true |
6755ae8afa96314d1472795f24dca0809804b830 | Python | bekhnam/Telco-Churn-app | /churn-app.py | UTF-8 | 3,462 | 2.59375 | 3 | [] | no_license | from numpy.lib.npyio import load
import streamlit as st
import pandas as pd
import numpy as np
import pickle
import base64
import seaborn as sns
import matplotlib.pyplot as plt
st.write("""
# Churn Prediction App
Customer churn is defined as the loss of customers after a certain period of time. Companies are
interested in targeting customers who are likely to churn. They can target these customers with
special deals and promotions to influence them to stay with the company.
This app predicts the probability of a customer churning using Telco Customer data. Here customer churn
means the customer does not make another purchase after a period of time.
""")
df_selected = pd.read_csv("dataset/Telco-Customer-Churn.csv")
df_selected_all = df_selected[['gender', 'Partner', 'Dependents', 'PhoneService', 'tenure',
'MonthlyCharges']].copy()
def filedownload(df):
csv = df.to_csv(index=False)
b64 = base64.b64encode(csv.encode()).decode()
href = f'<a href="data:file/csv;base64,{b64}" download="churn_data.csv">Download CSV File</a>'
return href
st.set_option('deprecation.showPyplotGlobalUse', False)
st.markdown(filedownload(df_selected_all), unsafe_allow_html=True)
uploaded_file = st.sidebar.file_uploader("Upload your input CSV file", type=['csv'])
if uploaded_file is not None:
input_df = pd.read_csv(uploaded_file)
else:
def user_input_features():
gender = st.sidebar.selectbox('gender', ('Male', 'Female'))
PaymentMethod = st.sidebar.selectbox('PaymentMethod', ('Bank transfer (automatic)', 'Credit card (automatic)', 'Mailed check', 'Electronic check'))
MonthlyCharges = st.sidebar.slider('Monthly Charges', 18.0, 118.0, 18.0)
tenure = st.sidebar.slider('tenure', 0.0, 72.0, 0.0)
data = {'gender': [gender],
'PaymentMethod': [PaymentMethod],
'MonthlyCharges': [MonthlyCharges],
'tenure': [tenure]
}
features = pd.DataFrame(data)
return features
input_df = user_input_features()
# display the output model and the default input parameters
churn_raw = pd.read_csv('dataset/Telco-Customer-Churn.csv')
churn_raw.fillna(0, inplace=True)
churn = churn_raw.drop(columns=['Churn'])
df = pd.concat([input_df, churn], axis=0)
encode = ['gender', 'PaymentMethod']
for col in encode:
dummy = pd.get_dummies(df[col], prefix=col)
df = pd.concat([df, dummy], axis=1)
del df[col]
# select user input
df = df[:1]
df.fillna(0, inplace=True)
# select the features we want to display:
features = ['MonthlyCharges', 'tenure', 'gender_Female', 'gender_Male',
'PaymentMethod_Bank transfer (automatic)',
'PaymentMethod_Credit card (automatic)',
'PaymentMethod_Electronic check', 'PaymentMethod_Mailed check']
df = df[features]
# display the user input features
st.subheader('User Input features')
print(df.columns)
if uploaded_file is not None:
st.write(df)
else:
st.write('Awaiting CSV file to be uploaded. Currently using example input parameters (shown below).')
st.write(df)
# load the model
load_clf = pickle.load(open('churn_clf.pkl', 'rb'))
# generate binary scores and prediction probabilities
prediction = load_clf.predict(df)
prediction_proba = load_clf.predict_proba(df)
churn_labels = np.array(['No', 'Yes'])
st.subheader('Prediction')
st.write(churn_labels[prediction])
st.subheader('Prediction Probability')
st.write(prediction_proba)
| true |
53702c266817a8e3fd52bb9f69b8c19f881f5107 | Python | vrillusions/jeelink-receiver | /util/garage_door_status.py | UTF-8 | 5,133 | 2.796875 | 3 | [
"Unlicense"
] | permissive | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python Template.
Environment Variables
LOGLEVEL: overrides the level specified here. Choices are debug, info,
warning, error, and critical. Default is warning.
"""
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import os
import sys
import logging
import sqlite3
import cPickle as pickle
import smtplib
import email.message
import errno
from optparse import OptionParser
__version__ = '0.1.0-dev'
# Logger config
# DEBUG, INFO, WARNING, ERROR, or CRITICAL
# This will set log level from the environment variable LOGLEVEL or default
# to warning. You can also just hardcode the error if this is simple.
_LOGLEVEL = getattr(logging, os.getenv('LOGLEVEL', 'WARNING').upper())
_LOGFORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
logging.basicConfig(level=_LOGLEVEL, format=_LOGFORMAT)
class PickleWrap(object):
def __init__(self, filename):
self.log = logging.getLogger()
self.filename = filename
self._load_file()
def _load_file(self):
try:
with open(self.filename, 'rb') as fh:
self.content = pickle.load(fh)
except IOError as exc:
if exc.errno == errno.ENOENT:
# Just make content blank and save to create file
self.log.info("File didn't exist, creating")
self.content = {}
self.save()
else:
raise
def save(self):
with open(self.filename, 'wb') as fh:
pickle.dump(self.content, fh, -1)
def _parse_opts(argv=None):
"""Parse the command line options.
:param list argv: List of arguments to process. If not provided then will
use optparse default
:return: options,args where options is the list of specified options that
were parsed and args is whatever arguments are left after parsing all
options.
"""
parser = OptionParser(version='%prog {}'.format(__version__))
parser.set_defaults(verbose=False)
parser.add_option('-c', '--config', dest='config', metavar='FILE',
help='Use config FILE (default: %default)', default='config.ini')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
help='Be more verbose (default is no)')
parser.add_option('-f', '--file-cache', dest='file_cache', metavar='FILE',
help='Use FILE for cache (default: %default%)',
default='./garage_door_status.cache')
parser.add_option('-n', '--count', dest='hitcount', default='6',
help='Send email after this number of times (default: %default%)')
(options, args) = parser.parse_args(argv)
return options, args
def format_doorstatus(int1):
# First off cast them int in case sent as strings
int1 = int(int1)
if int1 == 0:
result = "CLOSED"
else:
result = "OPEN"
return result
def send_notification(door_status, count):
msg = email.message.Message()
msg['Subject'] = 'Garage door is {}'.format(door_status)
msg['From'] = 'vr@vrillusions.com'
msg['To'] = '3306207260@txt.att.net'
msg.set_payload('Garage door is currently {}'.format(door_status))
smtpobj = smtplib.SMTP('localhost', 25, 'vrillusions.com')
smtpobj.sendmail(msg['From'], msg['To'], msg.as_string())
return True
def main(argv=None):
"""The main function.
:param list argv: List of arguments passed to command line. Default is None,
which then will translate to having it set to sys.argv.
:return: Optionally returns a numeric exit code. If not given then will
default to 0.
:rtype: int
"""
log = logging.getLogger()
if argv is None:
argv = sys.argv
#(options, args) = _parse_opts(argv[1:])
# If not using args then don't bother storing it
options = _parse_opts(argv)[0]
if options.verbose:
log.setLevel(logging.DEBUG)
cache = PickleWrap(options.file_cache)
conn = sqlite3.connect('jeelink-receiver.sqlite3')
c = conn.cursor()
c.execute("SELECT port4 FROM nodes WHERE node_id = 2;")
if c.rowcount == 0:
log.error('Unable to get current status')
return 1
doornum = c.fetchone()[0]
log.debug(doornum)
door = format_doorstatus(doornum)
log.debug('Door status: {}'.format(door))
if door == 'OPEN':
log.info('Door opened, incrementing counter')
if 'opencount' in cache.content:
cache.content['opencount'] = cache.content['opencount'] + 1
if cache.content['opencount'] >= int(options.hitcount):
log.info('Door has been open for {} checks, send notification'.
format(cache.content['opencount']))
send_notification(door, cache.content['opencount'])
else:
log.info('initializing new counter')
cache.content['opencount'] = 1
else:
log.info('Door closed, setting count to 0')
cache.content['opencount'] = 0
cache.save()
if __name__ == "__main__":
sys.exit(main())
| true |
e3b6b6a5e7c87f176656a47ad39279a1e1e950fb | Python | boisgera/audio.frames | /audio/frames.py | UTF-8 | 6,686 | 3.375 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env python
# coding: utf-8
"""
Audio Frames Toolkit
"""
# Python Standard Library
from __future__ import division
import doctest
# Third-Party Libraries
import numpy as np
#
# Metadata
# ------------------------------------------------------------------------------
#
__main__ = (__name__ == "__main__")
from audio.about_frames import *
#
# TODO
# ------------------------------------------------------------------------------
#
# - support split/merge of multi-channel data.
#
#
# Application Programming Interface
# ------------------------------------------------------------------------------
#
def split(data, frame_length, pad=False, overlap=0, window=None):
"""
Split an array into frames.
Arguments
---------
- `data`: a sequence of numbers,
- `frame_length`: the desired frame length,
- `zero_pad`: if `True`, zeros are added to the last frame to make it
match the prescribed frame length, otherwise it may be shorter than
the others; defaults to `False`.
- `overlap`: number of samples shared between successive frames,
defaults to `0`.
- `window`: an optional window applied to each frame after the split.
The default (rectangular window) does not modify the frames.
Result
------
- `frames`: a sequence of numpy arrays.
"""
data = np.array(data, copy=False)
length = len(data)
if overlap >= frame_length:
error = "overlap >= frame_length"
raise ValueError(error)
frame_shift = frame_length - overlap
num_frames, remain = divmod(length - overlap, frame_shift)
extra = (frame_shift - remain) % frame_shift
if extra:
if pad is False:
error = "cannot split the data into an entire number of frames."
raise ValueError(error)
else:
data = np.r_[data, np.zeros(extra, dtype=data.dtype)]
length = len(data)
num_frames += 1
if window is None:
window = np.ones
window_ = window(frame_length)
frames = np.empty((num_frames, frame_length), dtype=data.dtype)
for i in range(num_frames):
start = i * frame_shift
stop = start + frame_length
frames[i] = window_ * data[start:stop]
return frames
def merge(frames, overlap=0, window=None):
"""
Merge a sequence of frames of the same length.
Arguments
---------
- `frames`: a sequence of frames with the same length,
- `overlap`: number of overlapping samples between successive frames,
defaults to `0`.
- `window`: an optional window applied to each frame before the merge.
The default (rectangular window) does not modify the frames.
Result
------
- `data`: a numpy array.
"""
frames = np.array(frames, copy=False)
num_frames, frame_length = np.shape(frames)
if overlap >= frame_length:
error = "overlap >= frame_length"
raise ValueError(error)
frame_shift = frame_length - overlap
if window is None:
window = np.ones
window_ = window(frame_length)
data = np.zeros(frame_length + (num_frames - 1) * frame_shift,
dtype=frames.dtype)
for i in range(num_frames):
start = i * frame_shift
stop = start + frame_length
data[start:stop] += window_ * frames[i]
return data
#
# Doctests
# ------------------------------------------------------------------------------
#
__doc__ += \
"""
Preamble
--------------------------------------------------------------------------------
>>> import numpy as np
Test sequence
--------------------------------------------------------------------------------
>>> data = [1, 2, 3, 4, 5, 6]
Basic Usage
--------------------------------------------------------------------------------
>>> split(data, 1)
array([[1],
[2],
[3],
[4],
[5],
[6]])
>>> split(data, 2)
array([[1, 2],
[3, 4],
[5, 6]])
>>> split(data, 3)
array([[1, 2, 3],
[4, 5, 6]])
>>> split(data, 4) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: ...
>>> split(data, 5) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: ...
>>> split(data, 6)
array([[1, 2, 3, 4, 5, 6]])
>>> split(data, 7) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: ...
Zero Padding Enabled
--------------------------------------------------------------------------------
>>> split(data, 1, pad=True)
array([[1],
[2],
[3],
[4],
[5],
[6]])
>>> split(data, 2, pad=True)
array([[1, 2],
[3, 4],
[5, 6]])
>>> split(data, 3, pad=True)
array([[1, 2, 3],
[4, 5, 6]])
>>> split(data, 4, pad=True)
array([[1, 2, 3, 4],
[5, 6, 0, 0]])
>>> split(data, 5, pad=True)
array([[1, 2, 3, 4, 5],
[6, 0, 0, 0, 0]])
>>> split(data, 6, pad=True)
array([[1, 2, 3, 4, 5, 6]])
>>> split(data, 7, pad=True)
array([[1, 2, 3, 4, 5, 6, 0]])
Overlapping Frames
--------------------------------------------------------------------------------
>>> split(data, 2, overlap=1)
array([[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6]])
>>> split(data, 3, overlap=1) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: ...
>>> split(data, 3, pad=True, overlap=1)
array([[1, 2, 3],
[3, 4, 5],
[5, 6, 0]])
>>> split(data, 3, overlap=2)
array([[1, 2, 3],
[2, 3, 4],
[3, 4, 5],
[4, 5, 6]])
>>> split(data, 3, overlap=3) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: ...
Windows
--------------------------------------------------------------------------------
>>> data = np.ones(24)
>>> frames = split(data, 6, window=np.hanning)
>>> all(all(frame == np.hanning(6)) for frame in frames)
True
Merging Frames
--------------------------------------------------------------------------------
>>> frames = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
>>> merge(frames)
array([1, 2, 3, 4, 5, 6, 7, 8, 9])
>>> merge(frames, overlap=1)
array([ 1, 2, 7, 5, 13, 8, 9])
>>> merge(frames, overlap=2)
array([ 1, 6, 15, 14, 9])
>>> merge(frames, window=np.bartlett)
array([0, 2, 0, 0, 5, 0, 0, 8, 0])
"""
| true |
6baaa05b9681f1538f99bb1ae9fd200a1e576c60 | Python | whiteydoublee/Python | /Test1/1_8.py | UTF-8 | 309 | 3.40625 | 3 | [] | no_license | """
날짜: 2021/08/12
이름: 김예은
내용: 파이썬 최대값 최소값 연습문제
"""
scores=[62,82,76,88,54,92]
max = scores[0]
min = scores[0]
for score in scores:
if max< score:
max = score
if min > score:
min = score
print('최대값: ',max)
print('최소값: ',min) | true |
e773cb9261811accaa7af537bd883d4c2162f963 | Python | Jekwulum/Hackerrank | /nested_lists.py | UTF-8 | 543 | 3.390625 | 3 | [] | no_license | n = int(input())
def get_2nd_lowest(args):
val= sorted(set(args))[1]
return val
def nested_list(n):
if n not in range(2, 6):
return
my_list = []
for _ in range(n):
name = input()
score = float(input())
new_list = [name, score]
my_list.append(new_list)
my_list = sorted(my_list)
vals = []
for i, j in my_list:
vals.append(j)
val = get_2nd_lowest(vals)
for i, j in my_list:
if j == val:
print(i)
nested_list(n)
| true |
a0c3373a1fd9ae209bf5a0bdaf5dc5392e4b7906 | Python | fs-akjha/Hacker_Rank_Practises | /HackerRank_Solutions_Python/program37.py | UTF-8 | 186 | 2.75 | 3 | [] | no_license | import numpy
n = int(input())
a = numpy.array([input().split() for _ in range(n)], int)
b = numpy.array([input().split() for _ in range(n)], int)
result=numpy.dot(a, b)
print(result) | true |
695fb8e01353cf4be916471a1193232e195e8763 | Python | tmg1991/Python | /Batch_rename/test.py | UTF-8 | 1,364 | 3.21875 | 3 | [
"MIT"
] | permissive | print('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$')
print('Üdvözöllek az egyszerû számológépben! Ver. 1.0')
print('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$')
print(
'Help: Eloszor add meg, hogy mit szeretnel(Osszeadas, Kivonas, Szorzas, Osztas, Hatvanyozas ) \n majd add meg a tagokat(Ha nincs tobb tag,akkor irj 0 -t! ')
print('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$')
print('')
All = input('Osszeadas(1), Kivonas(2), Szorzas(3), Osztas(4), Hatvanyozas(5): ')
All2 = 0
print('')
num1 = int(input('Add meg az elsõ tagot: '))
num2 = int(input('Add meg a második tagot: '))
print('')
if All == "1":
All2 = int(num1) + int(num2)
print(num1, '+', num2, '=', All2)
if All == "2":
All2 = int(num1) - int(num2)
print(num1, '-', num2, '=', All2)
if All == "3":
All2 = int(num1) * int(num2)
print(num1, '*', num2, '=', All2)
if All == "4":
All2 = int(num1) / int(num2)
print(num1, '/', num2, '=', All2)
if All == "5":
All2 = int(num1) ** int(num2)
print(num1, '**', num2, '=', All2)
print('')
print('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$')
nothing = input('') | true |
62dd690a9c5ad89c3b9bf0b2888a39e273fc421d | Python | rockym93/hotleaf | /hotleaf.py | UTF-8 | 4,992 | 2.953125 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env python3
import os
import markdown
import sandwich
import datetime
import json
from operator import itemgetter
class Leaf(dict):
def __format__(self, formatstring):
return formatstring.format(**self)
def __missing__(self, key):
return ''
def navsetup(self, pot):
'''Adds navigational helper classes'''
self['prev'] = Navigator(self,'prev', pot)
self['next'] = Navigator(self,'next', pot)
self['index'] = Indexer(pot)
self['if'] = Conditional(self)
class Stem(str):
def __getitem__(self, index):
return self.split('/')[index] #Returns list
class InfuseList(list):
'''a list that can infuse each of its members'''
def __format__(self, formatstring):
print(self)
returnstring = ''
for i in self:
if i[0] != '!': #ignore hidden tags
returnstring += formatstring.format(i)
return returnstring #Returns string
# def __getitem__(self,index):
# return InfuseList(list.__getitem__(list(self),index))
class Indexer(list):
'''a list which gets items by search string, rather than by index'''
def __getitem__(self, search):
if type(search) is int:
return list(self)[search] #Returns leaf
elif search[0] == '#': # tag; returns Indexer
return Indexer([leaf for leaf in self if search[1:] in leaf['tags']])
elif search[0] == '/': # path; returns Indexer
return Indexer([leaf for leaf in self if search[1:] in leaf['stem']])
def __format__(self, formatstring):
returnstring = ''
for i in self:
returnstring += formatstring.format(**i)
return returnstring #Returns string
class Navigator():
def __init__(self, leaf, direction, pot):
self.leaf = leaf
self.direction = direction
self.pot = Indexer(pot)
def __getitem__(self, search):
searched = list(self.pot[search])
poslist = [i['stem'] for i in searched]
index = poslist.index(self.leaf['stem'])
if self.direction == 'prev':
try:
return searched[index+1] #Returns leaf
except IndexError:
return self.leaf #return this post if if this is the oldest post
elif self.direction == 'next':
if index != 0:
return searched[index-1] #Returns leaf
else:
return self.leaf #return this post if if this is the oldest post
class Conditional():
def __init__(self, leaf, state=False):
self.leaf = leaf
self.state = state
def __getitem__(self, search):
if search[0] == '#': # tag
if search[1:] in self.leaf['tags']:
return Conditional(self.leaf, True)
else:
return self
elif search[0] == '/': # path
if search[1:] in self.leaf['stem']:
return Conditional(self.leaf, True)
else:
return self
def __format__(self, formatstring):
if self.state:
return formatstring.format(**self.leaf)
else:
return ''
def pick(filename, pot=[]):
'''pick a leaf up from a file ready for brewing'''
with open(filename, encoding='utf-8') as f:
leaf = Leaf(sandwich.load(f.read()))
#Set some sensible defaults
leaf['stem'] = Stem(os.path.splitext(filename)[0])
leaf['tip'] = '.html'
leaf['summary'],leaf['image'] = sandwich.markstrip(leaf['text'].strip().splitlines()[0])
leaf['template'] = '.template'
if not leaf['title']:
leaf['title'] = leaf['stem'][-1]
if not leaf['timestamp']:
leaf['timestamp'] = datetime.datetime.fromtimestamp(os.path.getmtime(filename))
leaf['text'] = markdown.markdown(leaf['text'])
leaf['tags'] = InfuseList(leaf['tags'])
#If there's an image file with the same name, use it. Otherwise, use the site icon.
if not leaf['image']:
if os.path.exists(str(leaf['stem']) + '.jpg'):
leaf['image'] = str(leaf['stem']) + '.jpg'
elif os.path.exists(str(leaf['stem']) + '.png'):
leaf['image'] = str(leaf['stem']) + '.png'
else:
leaf['image'] = "favicon.png"
#Replace defaults with page-specific metadata (if it exists)
try:
with open(leaf['stem']+'.json') as f:
leaf.update(json.load(f))
except FileNotFoundError:
pass
return leaf
def scoop(tip='.txt'):
'''populate the pot with leaves'''
pot = []
for directory in os.walk('.'):
for filename in directory[2]:
if os.path.splitext(filename)[1] == tip:
path = directory[0] + '/' + filename
path = path.split('./',1)[1]
print('picking: ' + path)
pot.append(pick(path,pot))
pot.sort(key=itemgetter('timestamp'), reverse=True)
now = datetime.datetime.now()
pot = [leaf for leaf in pot if leaf['timestamp'] < now] #exclude future-dated posts
for leaf in pot:
leaf.navsetup(pot)
for leaf in pot:
leaf['text'] = leaf['text'].format(**leaf)
return pot
def infuse(leaf):
'''produce output from a given leaf.'''
print('infusing: ' + leaf['stem'])
with open(leaf['template'],encoding='utf-8',) as f:
plate = f.read()
return plate.format(**leaf)
def pour(leaf):
'''put a leaf in the right spot'''
with open(leaf['stem'] + leaf['tip'], 'w', encoding='utf-8',) as html:
html.write(infuse(leaf))
def brew():
'''brew up a whole pot of tasty hot leaf juice'''
pot = scoop('.txt')
for leaf in pot:
pour(leaf)
if __name__ == "__main__":
brew()
| true |
d4d5874a893027c398cb9f06ee9e562b80e8653a | Python | hazrmard/Agents | /src/agents/agent/gpi/offpolicy.py | UTF-8 | 3,319 | 3.28125 | 3 | [] | no_license | """
Contains off-policy temporal difference agents:
* Q-Learning
* TD(lambda)
* N-step Tree Backup
"""
import numpy as np
from ...helpers.schedule import Schedule
from ...algorithm import q, nsteptd
from .agent import Agent, GREEDY
class QAgent(Agent):
"""
Implements Q-Learning: Off-policy temporal difference learning which
only considers immediate rewards.
"""
def learn(self, episodes: int=100, policy: str=GREEDY,\
discount: Schedule=Schedule(1.,), epsilon: Schedule=Schedule(0,),\
**kwargs) -> np.ndarray:
"""
Calls the learning algorithm `episodes` times.
Args:
* episodes: Number of eposides to learn over.
* policy (str): The action selection policy. Used during learning/
exploration to randomly select actions from a state. One of
`agent.[UNIFORM | GREEDY | SOFTMAX]`. Default UNIFORM.
* discount: The discount level for future rewards. Between 0 and 1.
* maxsteps: Number of steps at most to take if episode continues.
* epsilon: A `Schedule` instance describing how the exploration rate
changes for each episode (for GREEDY policy).
* memsize: Size of experience memory. Default 1 most recent observation.
* batchsize: Number of past experiences to replay. Default 1.
If a parameter is a `Schedule`, it is evaluated for each episode and
passed as a number.
Returns:
* An array of rewards for each episode.
"""
kwargs['discount'] = discount
return super().learn(algorithm=q, episodes=episodes, policy=policy,
epsilon=epsilon, **kwargs)
class NStepTDAgent(Agent):
"""
Implements `n-step TD`: Off-policy temporal difference learning with
delayed rewards up to a horizon of `n` steps into the future.
"""
def learn(self, episodes: int=100, policy: str=GREEDY, steps: int=5,
discount: Schedule=Schedule(1.,), epsilon: Schedule=Schedule(0,),\
**kwargs) -> np.ndarray:
"""
Calls the learning algorithm `episodes` times.
Args:
* episodes: Number of eposides to learn over.
* policy (str): The action selection policy. Used durung learning/
exploration to randomly select actions from a state. One of
`agent.[UNIFORM | GREEDY | SOFTMAX]`. Default UNIFORM.
* steps: The number of steps to accumulate reward. Default=5.
* epsilon: A `Schedule` instance describing how the exploration rate
changes for each episode (for GREEDY policy).
* discount: The discount level for future rewards. Between 0 and 1. If -1,
then return is average of rewards instead of a discounted sum.
* maxsteps: Number of steps at most to take if episode continues.
* memsize: Size of experience memory. Default 1 most recent observation.
* batchsize: Number of past experiences to replay. Default 1.
If a parameter is a `Schedule`, it is evaluated for each episode and
passed as a number.
Returns:
* An array of rewards for each episode.
"""
kwargs['discount'] = discount
return super().learn(algorithm=nsteptd, episodes=episodes, policy=policy,
epsilon=epsilon, steps=steps, **kwargs)
| true |
e570af4aedec5812c8cc5b68b40f3c7f837531a0 | Python | chanjulee/Algorithm | /프로그래머스_해시/해시.py | UTF-8 | 2,478 | 4.46875 | 4 | [] | no_license | #딕셔너리 만들기,추가,삭제,사용
def dictionary():
#key 값이 중복되면 하나빼고 무시됨
#key 값은 리스트 못 씀
dic = dict() #빈 딕셔너리
dic = {'name':'Hong', 'phone':'12345678', 'birth':'1218'}
#딕셔너리 쌍 추가하기
dic['e-mail'] = ['123@gmail','456@naver']
#딕셔너리 요소 삭제하기
del dic['birth']
#딕셔너리에서 key 사용해 value 얻기
print(dic['name'])
#딕셔너리 관련 함수들
def dictionaryfunction():
#key 리스트 만들기 : keys()
dic = {'name': 'pey', 'phone': '0119993323', 'birth': '1118'}
#dict_keys 객체 생성
#리스트처럼 보이지만 append,insert,pop,remove,sort 사용불가
print(dic.keys())
#dict_keys 객체 리스트로 변환
dicKeysList = list(dic.keys())
dicKeysList.append('추가')
#value 리스트 만들기 : values()
print(dic.values())
#key,value 쌍 얻기 : items()
print(dic.items())
#key:value 쌍 모두 지우기 : clear()
#dic.clear()
#print(dic)
#key로 value 얻기 : get(key)
print(dic.get('name'))
print(dic.get('nokey')) #None 반환
#print(dic['nokey']) #KeyError 발생
#key 값이 없을 경우 디폴트 값 반환
print(dic.get('nokey','key값없음'))
#print(dic['nokey']) #값이 생기는것 아님
#해당 key가 딕셔너리 안에 있는지 조사 : in
print('name' in dic) #True
print('nokey' in dic) #False
#딕셔너리 정렬해보기
def dictionarySort():
#dict() 순서가 없는 자료형
#sort() 안됨. sorted()로.
dic = {'JS':[19,180], 'JN':[21,176], 'CL':[20,178], 'RJ':[21,170], 'JM':[21,176]}
#dict() key 기준으로 정렬하기
dicSorted = sorted(dic.keys(), key=lambda x: x) #이름순(key) 정렬. 이름만
print(dicSorted)
dicSorted = sorted(dic.items(), key=lambda x: x[0]) #이름순(key) 정렬. 쌍으로
print(dicSorted)
#dict() value 기준으로 정렬하기
dicSorted = sorted(dic.values(), key=lambda x: x[0]) #나이순 정렬. value값만
print(dicSorted)
dicSorted = sorted(dic.values(), key=lambda x: x[1]) #키순 정렬. value값만
print(dicSorted)
dicSorted = sorted(dic.items(), key=lambda x: (x[1][0],x[0])) #나이순,이름순
print(dicSorted)
if __name__ == "__main__":
#dictionary()
#dictionaryfunction()
dictionarySort()
#출처 https://wikidocs.net/16#key-value | true |
3991a3f9dca1c327e745909d2e55cf836d40ca7a | Python | lightningmonkey/dino | /main.py | UTF-8 | 7,482 | 3.375 | 3 | [] | no_license | from import_all import *
from sets import Set
from player import Player
from background import Background
from scenery import GenericScenery, SceneryTests
from animals import AnimalsTests, GenericAnimal
from text import Eating, GenericText
# from map import Map, MapTests
class MainLoop(object):
""" The main event loop for the game"""
def __init__(self, file_name):
pygame.init()
self.display_surf = pygame.display.set_mode((WINDOW_SIZE_X, WINDOW_SIZE_Y), 0, 32)
pygame.display.set_caption("Dinosaurs Evolved")
self.change = False # Used to see if the board needs to be redrawn
self.down_keys = Set() # The keys that the user is currently holding down
self.player = Player()
self.background = Background(file_name)
self.loop()
def change_movement(self, event):
""" When a key is pushed down or let up, change the down_keys list """
if event.type == KEYDOWN:
self.down_keys.add(event.key)
elif event.type == KEYUP:
self.down_keys.remove(event.key)
def bounds_check(self, x, y):
""" Make sure the player can no leave the playable surface """
background_rect = pygame.Rect(x + OFFSET_X, y + OFFSET_Y, self.background.map.PLAYABLE_DIMENSION_X,
self.background.map.PLAYABLE_DIMENSION_Y)
player_rect = pygame.Rect(WINDOW_SIZE_X / 2, WINDOW_SIZE_Y / 2, PLAYER_DIMENSION_X, PLAYER_DIMENSION_Y)
return background_rect.contains(player_rect)
def object_check(self, x, y):
""" Make sure the player can not run over any object on the map """
player_rect = pygame.Rect(x, y, PLAYER_DIMENSION_X, PLAYER_DIMENSION_Y)
for current_object in self.background.all_objects:
object_rect = pygame.Rect(current_object.x, current_object.y, current_object.surface_width,
current_object.surface_height)
if player_rect.colliderect(object_rect):
if isinstance(current_object, GenericScenery):
food_qty = current_object.get_food()
if food_qty > 0:
print("NOM NOM")
self.player.eat_food(food_qty)
text = Eating(current_object)
logging.info("Added text: {0}".format(str(text)))
self.background.add_object(text)
self.change = True
if isinstance(current_object, GenericAnimal):
player_attack = self.player.attack()
enemy_attack = current_object.attack()
player_dead = self.player.take_damage(enemy_attack)
enemy_dead = current_object.take_damage(player_attack)
print 'Player {0} {1} enemy {2} {3}'.format(self.player.get_health(), player_dead,
current_object.get_health(), enemy_dead)
logging.info(
'Attack! Player {0} alive {1} enemy {2} alive {3}'.format(self.player.get_health(), player_dead,
current_object.get_health(),
enemy_dead))
if not enemy_dead:
logging.info('Enemy killed: {0}'.format(str(current_object)))
self.background.all_objects.remove(current_object)
self.background.draw_all()
return False
return True
def move_check(self, x, y):
""" Given the x,y that the player wants to move to make sure nothing is in the way """
return self.bounds_check(x, y) and self.object_check(-x, -y)
def move(self):
"""Move the background, not the player.
As the player moves around, we also want to make sure they are centered in the screen. This means
that it is the background that needs to move while the player stays stationary. Thus all the movements
are 'backwards' below. The the player wants to move to the right, move the board to the left.
"""
for k in self.down_keys:
if k == K_DOWN or k == K_s:
tmpy = self.background.y - STEP_SIZE
if self.move_check(self.background.x, tmpy):
self.background.y = tmpy
elif k == K_UP or k == K_w:
tmpy = self.background.y + STEP_SIZE
if self.move_check(self.background.x, tmpy):
self.background.y = tmpy
elif k == K_RIGHT or k == K_d:
tmpx = self.background.x - STEP_SIZE
if self.move_check(tmpx, self.background.y):
self.background.x = tmpx
elif k == K_LEFT or k == K_a:
tmpx = self.background.x + STEP_SIZE
if self.move_check(tmpx, self.background.y):
self.background.x = tmpx
self.player.x = -self.background.x # Since we start at (0.0) this will always be true
self.player.y = -self.background.y
def redraw(self):
""" Update the display """
if self.change:
self.background.redraw()
self.change = False
self.display_surf.blit(self.background.get_surface(), (self.background.x, self.background.y))
self.display_surf.blit(self.player.get_surface(), (WINDOW_SIZE_X / 2, WINDOW_SIZE_Y / 2))
pygame.display.update()
def update_objects(self):
for current_object in self.background.all_objects:
if isinstance(current_object, GenericTimedSurface):
if isinstance(current_object, GenericScenery):
self.change = self.change or current_object.food_respawn()
elif isinstance(current_object, GenericText):
if current_object.timer_fire():
current_object.get_parent().set_change()
self.background.all_objects.remove(current_object)
self.change = True
def loop(self):
""" The main loop that drives the game"""
assert (0 == self.player.x) # if the player does not start at (0,0) the later positions are all screwed up
assert (0 == self.player.y)
logging.info('Starting main loop')
fps_clock = pygame.time.Clock()
while True:
self.display_surf.fill(WHITE)
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == KEYDOWN or event.type == KEYUP:
self.change_movement(event)
self.update_objects()
self.move()
self.redraw()
fps_clock.tick(FPS)
def run_tests():
logging.info('Run tests')
pygame.init()
display_surf = pygame.display.set_mode((WINDOW_SIZE_X, WINDOW_SIZE_Y), 0, 32)
pygame.display.set_caption("Dinosaurs Evolved")
unittest.main()
if __name__ == '__main__':
#run_tests()
main = MainLoop('map_definitions/Second.xml') | true |
45bebb130937111547bec7e0503cc12f5b1805a9 | Python | ibigio/deep-learning-final | /project/cheating_assignment.py | UTF-8 | 9,316 | 2.65625 | 3 | [] | no_license | import pyspiel
from pylab import *
import numpy as np
import tensorflow as tf
from cheating_model import ReinforceWithBaseline
from liars_dice_gym import LiarsDiceEnv
from safe_naive_agent import SafeNaiveAgent
from random_agent import RandomAgent
def visualize_data(total_rewards):
"""
Takes in array of rewards from each episode, visualizes reward over episodes.
:param rewards: List of rewards from all episodes
"""
x_values = arange(0, len(total_rewards), 1)
y_values = total_rewards
plot(x_values, y_values)
xlabel('episodes')
ylabel('cumulative rewards')
title('Reward by Episode')
grid(True)
show()
def discount(rewards, discount_factor=.99):
"""
Takes in a list of rewards for each timestep in an episode,
and returns a list of the sum of discounted rewards for
each timestep. Refer to the slides to see how this is done.
:param rewards: List of rewards from an episode [r_{t1},r_{t2},...]
:param discount_factor: Gamma discounting factor to use, defaults to .99
:return: discounted_rewards: list containing the sum of discounted rewards for each timestep in the original
rewards list
"""
# Compute discounted rewards (trust me this works and hopefully it's super fast)
timesteps = len(rewards) # make into matrix
rewards = tf.convert_to_tensor([rewards],dtype=tf.float32)
# create lower triangular matrix of discount_factor weights
T = tf.convert_to_tensor([[max(1+i-j,0) for j in range(timesteps)] for i in range(timesteps)],dtype=tf.float32)
T = tf.math.pow(discount_factor, T)
T = tf.linalg.band_part(T, -1, 0)
# apply discount factor
return tf.matmul(rewards, T)
def generate_trajectory(env, model, adversary):
"""
Generates lists of states, actions, and rewards for one complete episode.
:param env: The openai gym environment
:param model: The model used to generate the actions
:return: A tuple of lists (states, actions, rewards), where each list has length equal to the number of timesteps
in the episode
"""
calls = []
hands = []
ad_hands = []
actions = []
rewards = []
time_step = env.reset()
cur_agent, next_agent = model, adversary
model_player_id = 0
# TODO: add random starting
last_call = None
while not time_step.last():
# get cur player id and hand
cur_player_id = int(time_step.observations['current_player'])
hand_id = time_step.observations['info_state'][cur_player_id]
ad_hand_id = time_step.observations['info_state'][1-cur_player_id]
# If adversary's turn, make move and update last call
if cur_player_id != model_player_id:
action = adversary.step(last_call, hand_id)
time_step = env.step([action])
if time_step.last():
rewards[-1] = max(time_step.rewards[model_player_id],0)
last_call = action
cur_agent, next_agent = next_agent, cur_agent
continue
# get action from agent
if last_call == None:
last_call = 1
last_call_tensor = tf.convert_to_tensor([last_call], dtype=tf.float32)
hand_id_tensor = tf.convert_to_tensor([hand_id], dtype=tf.float32)
ad_hand_id_tensor = tf.convert_to_tensor([ad_hand_id], dtype=tf.float32)
prbs = cur_agent.call(last_call_tensor, hand_id_tensor, ad_hand_id_tensor)[0].numpy()
# mask out illegal actions
legal_actions = time_step.observations['legal_actions'][cur_player_id]
legal_actions_mask = np.ones(env.num_actions, dtype=bool)
legal_actions_mask[legal_actions] = False
prbs[legal_actions_mask] = 0
# renormalize probabilities
norm = np.sum(prbs)
# TODO: check for zero norm
if norm == 0:
old_prbs = prbs
prbs = np.zeros(env.num_actions)
prbs[legal_actions] += (1/len(legal_actions))
else:
prbs = prbs / norm
# select action weighted by prbs
action = np.random.choice(list(range(len(prbs))), p=prbs)
# apply action to env
time_step = env.step([action])
# update calls, hands, actions, and rewards
calls.append(last_call)
hands.append(hand_id)
ad_hands.append(hand_id)
actions.append(action)
rewards.append(max(time_step.rewards[cur_player_id],0))
last_call = action
cur_agent, next_agent = next_agent, cur_agent
return calls, hands, ad_hands, actions, rewards
def train(env, model, adversary):
"""
This function should train your model for one episode.
Each call to this function should generate a complete trajectory for one episode (lists of states, action_probs,
and rewards seen/taken in the episode), and then train on that data to minimize your model loss.
Make sure to return the total reward for the episode.
:param env: The openai gym environment
:param model: The model
:return: The total reward for the episode
"""
# TODO:
# 1) Use generate trajectory to run an episode and get states, actions, and rewards.
# 2) Compute discounted rewards.
# 3) Compute the loss from the model and run backpropagation on the model.
with tf.GradientTape() as tape:
calls, hands, ad_hands, actions, rewards = generate_trajectory(env, model, adversary)
discounted = discount(rewards)
loss = model.loss(np.array(calls), np.array(hands), np.array(ad_hands), np.array(actions), discounted)
gradients = tape.gradient(loss, model.trainable_variables)
model.optimizer.apply_gradients(zip(gradients, model.trainable_variables))
return np.sum(rewards)
def test(env, model, adversary):
calls, hands, ad_hands, actions, rewards = generate_trajectory(env, model, adversary)
return np.sum(rewards)
def test_random(env, model):
time_step = env.reset()
model_player_id = 1
last_call = None
while not time_step.last():
# get cur player id and hand
cur_player_id = int(time_step.observations['current_player'])
hand_id = time_step.observations['info_state'][cur_player_id]
ad_hand_id = time_step.observations['info_state'][1-cur_player_id]
# If adversary's turn, make move and update last call
if cur_player_id != model_player_id:
action = np.random.choice(time_step.observations['legal_actions'][cur_player_id])
time_step = env.step([action])
last_call = action
continue
# get action from agent
if last_call == None:
last_call = 1
last_call_tensor = tf.convert_to_tensor([last_call], dtype=tf.float32)
hand_id_tensor = tf.convert_to_tensor([hand_id], dtype=tf.float32)
ad_hand_id_tensor = tf.convert_to_tensor([ad_hand_id], dtype=tf.float32)
prbs = model.call(last_call_tensor, hand_id_tensor, ad_hand_id_tensor)[0].numpy()
# mask out illegal actions
legal_actions = time_step.observations['legal_actions'][cur_player_id]
legal_actions_mask = np.ones(env.num_actions, dtype=bool)
legal_actions_mask[legal_actions] = False
prbs[legal_actions_mask] = 0
# renormalize probabilities
norm = np.sum(prbs)
# TODO: check for zero norm
if norm == 0:
old_prbs = prbs
prbs = np.zeros(env.num_actions)
prbs[legal_actions] += (1/len(legal_actions))
else:
prbs = prbs / norm
# select action weighted by prbs
action = np.random.choice(list(range(len(prbs))), p=prbs)
# apply action to env
time_step = env.step([action])
last_call = action
return max(time_step.rewards[model_player_id],0)
def test_n_random(env, model, n):
rewards = []
for i in range(n):
rewards.append(test_random(env, model))
return np.mean(rewards)
def main():
env = LiarsDiceEnv()
num_actions = env.num_actions
# Initialize model
model = ReinforceWithBaseline(num_actions)
adversary = SafeNaiveAgent(env)
# TODO:
# 1) Train your model for 650 episodes, passing in the environment and the agent.
all_rewards = []
smoothed_rewards = []
random_test_rewards = []
smoothed_random_test_rewards = []
epochs = 10000
for i in range(epochs):
all_rewards.append(train(env, model, adversary))
# random_test_rewards.append(test_random(env, model))
if i % 100 == 0:
smooth = np.mean(all_rewards[-100:])
smoothed_rewards.append(smooth)
print(f"Reward of past 100/{i}:",smooth)
# smooth = np.mean(random_test_rewards[-1000:])
# smoothed_random_test_rewards.append(smooth)
random = test_n_random(env, model, 1000)
random_test_rewards.append(random)
print("Reward against random:", random)
# 2) Append the total reward of the episode into a list keeping track of all of the rewards.
# 3) After training, print the average of the last 50 rewards you've collected.
# TODO: Visualize your rewards.
visualize_data(smoothed_rewards)
visualize_data(random_test_rewards)
if __name__ == '__main__':
main()
| true |
7ea26915922db448688853a399632baf1ad40c52 | Python | gyoforit/study-algorithm | /programmers/위클리_복서정렬하기.py | UTF-8 | 576 | 2.765625 | 3 | [] | no_license | def solution(weights, head2head):
answer = []
L = len(weights)
for i in range(L):
records = head2head[i]
weight = weights[i]
total_fights = records.count('W') + records.count('L')
winrate = 0 if not total_fights else records.count('W') / total_fights
wincnt = 0
for j in range(L):
if weights[j] > weight and records[j] == 'W':
wincnt += 1
answer.append((winrate, wincnt, weight, i + 1))
answer.sort(key=lambda x: (-x[0], -x[1], -x[2], x[3]))
return [a[3] for a in answer] | true |
9a1e3e1d53a9ad3f74a6b1fa375396ed149ba7c7 | Python | HRG-Lab/UGR_2017-2018 | /jfreking/CV_for_NI/colorDetect.py | UTF-8 | 4,449 | 2.6875 | 3 | [] | no_license | import numpy as np
import cv2
from socket import *
import socket
import os
import sys
import pandas as pd
import netifaces as ni
# Get codewords from Excel file
codebook = pd.read_excel('/home/jfreking/Desktop/1920_codewords_azimuth_only.xlsx',header=0)
txCodes = codebook['TX Codewords']
#print txCodes
# Get IP address (check the available links with cmd: ifconfig -- connection may be 'eth0')
ni.ifaddresses('enp3s0')
ip = ni.ifaddresses('enp3s0')[ni.AF_INET][0]['addr']
print ip
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind the socket to the port
server_address = (ip, 8085) #server name, port number
print >>sys.stderr, 'starting up on %s port %s' % server_address
try:
sock.bind(server_address)
except socket.error as msg:
print 'Bind failed.Error Code: ' + str(msg[0]) + ' Message: ' + msg[1]
sys.exit()
print 'Socket bind complete'
# If no connection is available in 2 seconds of trying to send data, raise error
sock.setblocking(1)
sock.settimeout(0.1)
# Listen for incoming connections -- accept waits for an incoming connection
sock.listen(10)
print 'Socket listening'
cap = cv2.VideoCapture(1)
#codeword = '12'
# Uncomment to tune tracker to a different color
def nothing(x):
pass
cv2.namedWindow('HSV Tuner')
cv2.createTrackbar('Hmin', 'HSV Tuner', 0, 180, nothing)
cv2.createTrackbar('Hmax', 'HSV Tuner', 0, 180, nothing)
cv2.createTrackbar('Smin', 'HSV Tuner', 0, 255, nothing)
cv2.createTrackbar('Smax', 'HSV Tuner', 0, 255, nothing)
cv2.createTrackbar('Vmin', 'HSV Tuner', 0, 255, nothing)
cv2.createTrackbar('Vmax', 'HSV Tuner', 0, 255, nothing)
while True:
# Uncomment to tune tracker to a different color
# Get slider positions
hMin = cv2.getTrackbarPos('Hmin', 'HSV Tuner')
hMax = cv2.getTrackbarPos('Hmax', 'HSV Tuner')
sMin = cv2.getTrackbarPos('Smin', 'HSV Tuner')
sMax = cv2.getTrackbarPos('Smax', 'HSV Tuner')
vMin = cv2.getTrackbarPos('Vmin', 'HSV Tuner')
vMax = cv2.getTrackbarPos('Vmax', 'HSV Tuner')
# Set HSV thresholds
# Uncomment to tune tracker to a different color
lw_range = np.array([hMin,sMin,vMin])
up_range = np.array([hMax,sMax,vMax])
# Get frame from camera
ret, frame = cap.read()
# Logitech C920 has a resolution of 1920x1080
frame = cv2.resize(frame, (1920,1080))
# Convert frame to HSV
hsv_img = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)
# Define frame threshold with HSV thresholds
frame_threshold = cv2.inRange(hsv_img, lw_range, up_range)
# Find contours
ret,thresh = cv2.threshold(frame_threshold, 127, 255, 0)
_, contours, heirarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# Find center of largest contour and produce a codeword based on position
# Note: codeword MUST be a string for TCP/IP communication
if contours != []:
areas = [cv2.contourArea(c) for c in contours]
maxIndex = np.argmax(areas)
cnt = contours[maxIndex]
x,y,w,h = cv2.boundingRect(cnt)
cv2.rectangle(frame, (x,y), (x+w, y+h), (0,255,0), 2)
cv2.rectangle(frame_threshold, (x,y), (x+w, y+h), (180,255,255), 2)
X = x+w/2
Y = y+h/2
#resolution: 1920x1080
codeword = str(txCodes[X])
#DEGBUGGING
"""
print("x: {}".format(X))
print("y: {}".format(Y))
print(" ")
print 'codeword: ' + codeword
print(" ")
"""
# Wait for a connection
print >>sys.stderr, 'waiting for a connection'
# Try to accept a client
try:
conn, client_address = sock.accept() #returns open connection btwn server and client and the client address
# Send codeword if there is a connection, if no connection, print error and continue
try:
conn.sendall(codeword)
except socket.error as msg:
print 'No connection available. Error Code: ' + str(msg[0]) + ' Error Msg: ', msg[1]
continue
except timeout:
print 'caught a timeout'
cv2.imshow("Show", frame)
cv2.imshow("HSV", frame_threshold)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
conn.close()
cap.release()
cv2.destroyAllWindows()
| true |
039685f0084173435e3dd6ef08848e5c3400d00b | Python | huaxr/Pyweby | /Pyweby/handle/template.py | UTF-8 | 9,649 | 2.59375 | 3 | [] | no_license | #coding: utf-8
import re
import types
import os
class BaseEngine(object):
WHATEVER = 0
_template_cache = {}
re_variable = re.compile(r'\{\{ .*? \}\}')
re_comment = re.compile(r'\{# .*? #\}')
re_tag = re.compile(r'\{% .*? %\}')
re_extends = re.compile(r'\{% extends (?P<name>.*?) %\}')
re_blocks = re.compile(
r'\{% block (?P<name>\w+) %\}'
r'(?P<code>.*?)'
r'\{% endblock \1 %\}', re.DOTALL)
re_block_super = re.compile(r'\{\{ block\.super \}\}')
re_tokens = re.compile(r'((?:\{\{ .*? }\})|(?:\{\# .*? \#\}|(?:\{% .*? %\})))', re.X)
def __init__(self, raw_html):
self.raw_html = raw_html
def _parse(self):
self._handle_extends()
tokens = self.re_tokens.split(self.raw_html)
# ['<h1>', '{% if score >= 80 %}', ' A\n ', '{% elif score >= 60 %}',
# ' B\n ', '{% else %}', ' C\n ', '{% endif %}', '</h1>']
handlers = (
(self.re_variable.match, self._handle_variable), # {{ variable }}
(self.re_tag.match, self._handle_tag), # {% tag %}
(self.re_comment.match, self._handle_comment), # {# comment #}
)
default_handler = self._handle_string # normal string
for token in tokens:
for match, handler in handlers:
if match(token):
handler(token)
break
else:
default_handler(token)
def _handle_variable(self, token):
"""variable handler"""
raise NotImplementedError
def _handle_comment(self, token):
"""annotation handler"""
raise NotImplementedError
def _handle_string(self, token):
"""string handler"""
raise NotImplementedError
def _handle_tag(self, tag):
raise NotImplementedError
def _handle_extends(self):
raise NotImplementedError
def safe_exec(self, co, kw):
assert isinstance(co, types.CodeType)
'''
every user control value should be sterilize/disinfect here.
'''
# for i in kw.values():
# if '__import__' in i:
# # raise DangerTemplateError('malicious code found.')
# return self.WHATEVER
exec(co, kw)
class Builder(object):
STEPER = 1
def __init__(self, indent=0):
# record the steps
self.indent = indent
# save code line by line in this list
self.lines = []
def goahead(self):
self.indent += self.STEPER
def goback(self):
self.indent -= self.STEPER
def add(self, code):
self.lines.append(code)
def add_line(self, code):
self.lines.append('\t' * self.indent + code)
def __str__(self):
return '\n'.join(map(str, self.lines))
def __repr__(self):
return str(self)
class TemplateEngine(BaseEngine):
'''
Template Parse Engine.
Reference:
1: Tornado source code
2: uri: http://python.jobbole.com/85155/
'''
def __init__(self, raw_html, template_dir='', file_path='', global_locals=None, indent=0,
magic_func='__exists_func', magic_result='__exists_list'):
self.raw_html = raw_html
self.template_dir = template_dir
self.file_path = file_path
self.buffered = []
self.magic_func = magic_func
self.magic_result = magic_result
# for user define namespace
self.global_locals = global_locals or {}
self.encoding = 'utf-8'
self.builder = Builder(indent=indent)
self.__generate_python_func()
super(TemplateEngine, self).__init__(self.raw_html)
def render(self, kwargs):
_ignore = kwargs.pop('ignore_cache', False)
# add defined namespace first
kwargs.update(self.global_locals)
'''
if ignore cache then(when _ignore is True). find the cache dict value
and return object if cache exist else do the compile.
'''
if _ignore or self.file_path not in BaseEngine._template_cache:
co = compile(str(self.builder), self.file_path, 'exec')
BaseEngine._template_cache[self.file_path] = co
else:
co = BaseEngine._template_cache[self.file_path]
__ = self.safe_exec(co, kwargs)
if __ is not None:
return ''
result = kwargs[self.magic_func]()
return result
def __generate_python_func(self):
builder = self.builder
builder.add_line('def {}():'.format(self.magic_func))
builder.goahead()
builder.add_line('{} = []'.format(self.magic_result))
self._parse()
self.clear_buffer()
builder.add_line('return "".join({})'.format(self.magic_result))
builder.goback()
def clear_buffer(self):
line = '{0}.extend([{1}])'.format(self.magic_result, ','.join(self.buffered))
self.builder.add_line(line)
self.buffered = []
def _handle_variable(self, token):
"""variable handler"""
variable = token.strip(' {} ')
# >>> {{ title }} -> title
self.buffered.append('str({})'.format(variable))
def _handle_comment(self, token):
"""annotation handler"""
pass
def _handle_string(self, token):
"""string handler"""
'''
handler default values, which may contains whitespace word,
using strip() eliminate them.
'''
self.buffered.append('{}'.format(repr(token.strip())))
def _handle_tag(self, token):
"""
tag handler
when calling this , you should save the code generate before
and clear the self.buffer for the next Builder's code.
"""
self.clear_buffer()
tag = token.strip(' {%} ')
tag_name = tag.split()[0]
# tag: if score > 88
# tag_name: if
if tag_name == 'include':
self._handle_include(tag)
else:
self._handle_statement(tag, tag_name)
def _handle_statement(self, tag, tag_name):
"""handler if/elif/else/for/break"""
if tag_name in ('if', 'elif', 'else', 'for'):
if tag_name in ('elif', 'else'):
self.builder.goback()
self.builder.add_line('{}:'.format(tag))
self.builder.goahead()
elif tag_name in ('break',):
self.builder.add_line(tag)
elif tag_name in ('endif', 'endfor'):
self.builder.goback()
def _handle_include(self, tag):
'''
The include tag acts like rendering another template using the namespace
where the include is located and then using the rendered result.
So we can treat the include template file as a normal template file,
replace the include location with the code generated by parsing that template,
and append the result to `__exists_list`.
'''
filename = tag.split()[1].strip('"\'') # index.html
included_template = self._parse_template_file(filename)
self.builder.add(included_template.builder)
self.builder.add_line(
'{0}.append({1}())'.format(
self.magic_result, included_template.magic_func
)
)
def _parse_template_file(self, filename):
template_path = os.path.realpath(
os.path.join(self.template_dir, filename)
)
name_suffix = str(hash(template_path)).replace('-', '_')
# in the main function generate another function which return call
# will append into the self.builder
magic_func = '{}_{}'.format(self.magic_func, name_suffix)
magic_result = '{}_{}'.format(self.magic_result, name_suffix)
# recursion the Module to generate the small part include.
with open(template_path, encoding=self.encoding) as fp:
template = self.__class__(
fp.read(), indent=self.builder.indent,
global_locals=self.global_locals,
magic_func=magic_func, magic_result=magic_result,
template_dir=self.template_dir
)
return template
def _handle_extends(self):
match_extends = self.re_extends.match(self.raw_html)
if match_extends is None:
return
parent_template_name = match_extends.group('name').strip('"\' ') # return extends.html
parent_template_path = os.path.join(
self.template_dir, parent_template_name
)
# get all the block in the template
child_blocks = self._get_all_blocks(self.raw_html)
with open(parent_template_path, encoding=self.encoding) as fp:
parent_text = fp.read()
new_parent_text = self._replace_parent_blocks(parent_text, child_blocks)
# print(new_parent_text)
# child_header {{ block.super }}
# parent_footer
self.raw_html = new_parent_text
def _replace_parent_blocks(self, parent_text, child_blocks):
def replace(match):
name = match.group('name')
parent_code = match.group('code')
child_code = child_blocks.get(name, '')
# return child_code or parent_code
child_code = self.re_block_super.sub(parent_code, child_code)
new_code = child_code or parent_code
return new_code
return self.re_blocks.sub(replace, parent_text)
def _get_all_blocks(self, text):
# print(self.re_blocks.findall(text))
# [('header', ' child_header {{ block.super }} ')]
return {name: code for name, code in self.re_blocks.findall(text)} | true |
3ee10985a43ca3ac4a7508188edb197db41c240f | Python | techadddict/Python-programmingRG | /Reading text files/reading textfiles4.py | UTF-8 | 547 | 3.78125 | 4 | [] | no_license | #Write a program that asks the user for a file name and prints the number of characters, words and lines in
#that file
filename = input('Please enter a filename to open')
#filename ='lyricso.txt' #use filename of your choice
file =open (filename ,'r')
lines=file.readlines()
lettersCount=0
wordsCount= 0
for line in lines:
words=line.strip('/*,!./').strip(.rstrip().split()
lettersCount = lettersCount + len(line)
wordsCount = wordsCount + len(words)
numLines =len(lines)
print(numLines)
print(lettersCount)
print(wordsCount)
| true |
e007bf7ea86170e80429d453313a65075d79bbb1 | Python | ebcarty/grapl | /src/aws-provision/swarm/configure_docker_daemon.py | UTF-8 | 1,067 | 2.515625 | 3 | [
"Apache-2.0"
] | permissive | import json
import os
import shlex
import subprocess
import sys
from typing import Dict
def _merge_daemon_config(config_update: Dict) -> Dict:
"""merge the given configuration update with the existing
configuration in /etc/docker/daemon.json
"""
config = {}
if os.path.exists("/etc/docker/daemon.json"):
with open("/etc/docker/daemon.json", "r") as infile:
config = json.load(infile)
for k, v in config_update.items():
config[k] = v
subprocess.run(
[
"sudo",
"bash",
"-c",
" ".join(
[
"echo",
shlex.quote(json.dumps(config, separators=(",", ":"))),
">",
"/etc/docker/daemon.json",
]
),
],
check=True,
)
return config
def main(raw_config: str) -> None:
config = _merge_daemon_config(json.loads(raw_config))
sys.stdout.write(json.dumps(config))
if __name__ == "__main__":
main(sys.argv[1])
| true |
0f6021b763ee190b8c74249c936baab0e811d42f | Python | OTRF/OSSEM | /resources/scripts/xlsx_to_yaml.py | UTF-8 | 3,856 | 2.9375 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env python3
# Project: OSSEM Data Dictionaries
# Author: Jose Rodriguez (@Cyb3rPandaH)
# License: GPLv3
# Importing libraries
import yaml
yaml.Dumper.ignore_aliases = lambda *args : True
import glob
import os
from os import path
import openpyxl
# Creating a list with ms excel files' names in your current directory
excel_files = glob.glob(path.join(path.dirname(__file__),"*.xlsx"))
# Parsing every ms excel file in your current directory
for excel_file_path in excel_files:
# Getting name of file (includes extensions such as .xlsx)
file_name = os.path.basename(excel_file_path)
# Getting name of file (without extensions such as .xlsx)
file_name = file_name[:file_name.find('.')]
# Getting content of ms excel file
wb = openpyxl.load_workbook(excel_file_path)
sheetnames = wb.sheetnames
# Parsing every sheet within the file
for sheet in sheetnames:
# Defining sheet to parse
sheet_to_parse = wb[sheet]
# Defining a control variable to parse sections of the ms excel file
control = ''
# Defining references
event_fields_yaml = []
references_yaml = []
tags_yaml = []
for line in sheet_to_parse.iter_rows(values_only=True):
# Updating control variable to identify sections of the ms excel file
if line[0] == 'standard_name':
control = 'data dictionary'
continue
if line[0] == 'references':
control = 'references'
continue
if line[0] == 'tags':
control = 'tags'
continue
# Getting values to create yaml file
if line[0] == 'title':
title_yaml = line[1].rstrip()
if line[0] == 'description':
description_yaml = line[1].rstrip()
if line[0] == 'platform':
platform_yaml = line[1]
if line[0] == 'log_source':
log_source_yaml = line[1]
if line[0] == 'event_code':
event_code_yaml = line[1]
if line[0] == 'event_version':
event_version_yaml = line[1]
if line[0] == 'attack_data_sources':
attack_data_sources_yaml = line[1].split(',')
if control == 'data dictionary':
if line == (None,None,None,None,None):
continue
dict = {'standard_name' : line[0],
'name' : line[1],
'type' : line[2],
'description' : line[3],
'sample_value' : line[4]}
event_fields_yaml.append(dict)
if control == 'references':
if line == (None,None,None,None,None):
continue
references_dict_yaml = {'text':line[0],'link':line[1]}
references_yaml.append(references_dict_yaml)
if control == 'tags':
tags_yaml.append(line[0].rstrip())
# Dictionary of data to create yaml file
data_dict = {'title' : title_yaml,
'description' : description_yaml,
'platform' : platform_yaml,
'log_source' : log_source_yaml,
'event_code' : event_code_yaml,
'event_version' : event_version_yaml,
'attack_data_sources' : attack_data_sources_yaml,
'event_fields' : event_fields_yaml,
'references' : references_yaml,
'tags' : tags_yaml}
# Formatting sheet name
sheet = sheet.replace(' ','_')
# Creating yaml file
with open(sheet + '.yaml', 'w') as file:
yaml.dump(data_dict, file, sort_keys = False, width = float("inf")) | true |
56509f67d43ffa04911bd137ddc93252fce9595d | Python | pyaephyokyaw15/PythonFreeCourse | /chapter3/eq_comparison.py | UTF-8 | 331 | 3.59375 | 4 | [] | no_license | print("True ==1 ", True == 1)
print("False ==0 ", False == 0)
print("'False' ==0 ", 'True' == 1)
lst1 = [1,2,3]
lst2 = ["1",2,3]
tp1 = (1,2,3)
tp2 = (1,2,3)
print("lst1 == lst2 ",lst1 == lst2)
print("lst1 == tp1 ",lst1 == tp1)
print("tp1 == tp2 ",tp1 == tp2)
set1 = {1,2,3}
set2 = {1,2,3}
print("Set 1 == set2 ",set1 ==set2) | true |
cc0996556412b5cdd69185bc74797640802779bd | Python | ChandlerBang/Simple-SearchEngine | /spider.py | UTF-8 | 4,458 | 2.875 | 3 | [] | no_license | from lxml import html
import os
import requests
from bs4 import BeautifulSoup
seed_url = u"http://shakespeare.mit.edu/"
file_folder = 'The Complete Works of William Shakespeare/'
<<<<<<< HEAD
# this part of code is beyond the range of this course
=======
# this part of code is beyond the range of our class
>>>>>>> 3f83adafd4226c76c362e47a5e7617686c847ae7
# and the process of crawling the webpage is really boring and time-consuming
# so I do not have much to comment
# if you are interest in this, welcome to contact me after the project closed.
def main():
x = html.parse(seed_url)
categories = x.xpath('//tr/td/h2/text()')
for i in range(1, 5):
if (i < 4):
book_names= x.xpath('//table[@align="center"]/tr[2]/td[{0}]/a'.format(i))
for book in book_names:
href1 = book.xpath('attribute::href')[0] # Now get the link for this book Act&Scene
Go_to_ScenePage(seed_url + href1, categories[i-1], book.text) # categories[i-1] means the correspoding Comedy/Tragedy/...
else:
book_names= x.xpath('//table[@align="center"]/tr[2]/td/em/a')
for book in book_names:
href1 = book.xpath('attribute::href')[0] # 现在得到了这本书对应的Act&Scene链接
GetPoetry(seed_url + href1, categories[3].replace('\n', ''), book.text.replace('\n', ''))
# go to new link page
def Go_to_ScenePage(href1, category, book):
href2 = html.parse(href1)
# Find Act&Scene corresponding name and links
scenes_numbers = href2.xpath('/html/body/p[starts-with(text(),"\nAct")]/text()')
scenes_names = href2.xpath('/html/body/p[starts-with(text(),"\nAct")]/a')
scenes_numbers = [x for x in scenes_numbers if len(x)>1] # Remove line breaks ['\n'][' ']
for number, name in zip(scenes_numbers, scenes_names):
number = number.replace(":", " ")[1:] # remove ['\nAct1 Scene:'] \n
content_href = href1[:-10] + name.xpath("attribute::href")[0] # new links
name = name.text.replace(":", " ")
Go_to_ContentPage(content_href, category.replace('\n', ''), book.replace('\n', ''), number+name)
# go to new link page
def GetPoetry(href, category, book):
href1 = html.parse(href)
path = file_folder + category + r'/' + book + r'/'
if not os.path.exists(path):
os.makedirs(path)
if book == 'The Sonnets':
names = href1.xpath('//a[contains(@href,"sonnet")]')
for name in names:
content_href = 'http://shakespeare.mit.edu/Poetry/'+ name.xpath("attribute::href")[0]
name = name.text.replace('?','').replace(':', '')
filename = path + name + '.txt'
content_href = html.parse(content_href)
main_text = content_href.xpath('/html/body/blockquote/text()')
f = open(filename, 'w')
f.write(content_href.xpath('/html/body/h1/text()')[0] +'\n')
for text in main_text:
f.write(text + '\n')
f.close()
else:
filename = path + book + '.txt'
r = requests.get(href, timeout=30)
r.raise_for_status()
r.encoding = 'utf-8'
soup = BeautifulSoup(r.text, "html.parser")
f = open(filename, 'w')
f.write(soup.get_text())
f.close()
def Go_to_ContentPage(content_href, category, book, name):
# go to new link
content_href = html.parse(content_href)
path = file_folder + category + r'/' + book + r'/'
if not os.path.exists(path):
os.makedirs(path)
filename = path + name + '.txt'
if os.path.exists(filename): # If the file already exists, it is no longer written
return
# Write txt file
title = content_href.xpath('/html/body/h3/text()')[0] # 文章标题
abstracts = content_href.xpath('/html/body/blockquote/i/text()') # Article Summary, May be more than one line
# Write txt file
f = open( filename, 'w')
f.write(title + '\n')
for abstract in abstracts:
f.write(abstract +'\n')
subtitles = content_href.xpath('/html/body/a/b/text()') # Article subtitle
print(name)
for i in range(len(subtitles)):
part_text = content_href.xpath('/html/body/blockquote[{0}]/a/text()'.format(i + 2)) # The text starts with blockquote[2]
f.write(subtitles[i] + '\n')
for text in part_text:
f.write(text + '\n')
f.close()
if __name__ == '__main__':
main()
| true |
a4387882d0765e961c70ae2456c4f6b6e6463004 | Python | nikita-sunyata/codeforces | /466A/466A.py | UTF-8 | 618 | 3.375 | 3 | [] | no_license | while True :
try:
data=input()
n,m,a,b = [int(i) for i in data.split()]
#check if same
if b == m * a:
print( n * a )
else:
normal = n*a
special_part1 = (n//m)*b
if (n%m) * a <= b:
special_part2 = (n%m)*a
special = special_part1 + special_part2
else:
special_part2 = b
special = special_part1 + special_part2
if normal < special:
print(normal)
else:
print(special)
except:
break | true |
5a56740f18af53da753a63b03e22c2efc943faa8 | Python | Andrey0563/Kolocvium | /№ 58.py | UTF-8 | 624 | 4.1875 | 4 | [] | no_license | '''
№58
Дан одновимірний масив цілих чисел. Знайдіть, скільки разів в ньому
повторюється найчастіше число.
Дужак Андрій 122-Г
'''
import random
a = []
for i in range(20):
a.append(random.randint(-30, 30))
b = 0
for i in range(len(a)): # Знаходження елементу який повторюється найчастіше
c = 0
for j in range(i, len(a)):
if a[i] == a[j]:
c += 1
if c > b:
b = c
print(a)
print(f'Найчастіше число повторюється {b} разів')
| true |
3df952d7f2830d9939b308a9d5fb69891386a379 | Python | bumpo/bot-scripts | /fleck.py | UTF-8 | 240 | 3.21875 | 3 | [] | no_license | #!/usr/bin/env python
import sys
message = " ".join(sys.argv[1:])
message = message.replace("fuck", "fleck")
message = message.replace("FUCK", "FLECK")
new_message = "Excuse me, I think you meant: {0}".format(message)
print(new_message)
| true |
2beac2eb30c0f0997dd3b2af1eec4809bed1f4a3 | Python | shivendra036/python-programs | /ex38.py | UTF-8 | 562 | 3.515625 | 4 | [] | no_license | ten_things= "Apple Oranges Crows Telephone Light Sugar"
print "Wait there is not 10 things in that list,let's fix that."
stuff = ten_things.split(' ')
more_stuff = ["Days","Night","Songs","Frisbee","Corn","Banana","Girl","boy"]
while len(stuff) != 10:
next_one = more_stuff.pop()
print "adding:",next_one
stuff.append(next_one)
print "There's %d items now." %len(stuff)
print "there we go:",more_stuff
print "let's do some things with stuff."
print stuff[1]
print stuff[-1]
print stuff.pop()
print ' '.join(stuff)
print '#'.join(stuff[3:5])
| true |
e90a667d2f87fe69ec6235058c77d92fc38d077e | Python | mike10004/subprocess-java | /src/main/site/render_readme.py | UTF-8 | 3,316 | 2.96875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env python3
"""
Program that produces the README file for the repository.
This program reads a source file in the Jinja2 template syntax and
interpolates variables defined with `--define` as well as code snippets
demarcated by `README_SNIPPET theSnippetName` in source code files.
This ensures that content such as version strings stays up to date in the
readme file and that the code snippets provided there compile.
"""
from __future__ import print_function
import re
import glob
import jinja2
import logging
from typing import List, TextIO
from collections import defaultdict
from argparse import ArgumentParser, Namespace
_log = logging.getLogger(__name__)
_RE_SNIPPET_BOOKEND = r'^\s*//\s*README_SNIPPET\s+(?P<id>\w+)\s*.*$'
_STATE_INSIDE = 'in'
_STATE_OUTSIDE = 'out'
class Snippet(object):
def __init__(self, id_, text):
self.id = id_
assert id_ is not None
self.text = text or ''
@classmethod
def load(cls, ifile: TextIO, chop: int = 0) -> List['Snippet']:
curr_id = None
bucket = defaultdict(list)
for line in ifile:
m = re.match(_RE_SNIPPET_BOOKEND, line)
if m:
if curr_id is None:
curr_id = m.group('id')
elif curr_id == m.group('id'):
curr_id = None
else:
if curr_id is not None:
bucket[curr_id].append(line)
snippets = []
for id_ in bucket:
lines = [line[chop:] for line in bucket[id_]]
snippets.append(Snippet(id_, ''.join(lines)))
return snippets
def build_model(args: Namespace):
model = {}
for definition in args.definitions:
definition = definition[0]
key, value = definition.split('=', 2)
model[key] = value
if args.snippet_sources:
snippets = []
for pathname in glob.glob(args.snippet_sources):
with open(pathname, 'r') as ifile:
snippets += Snippet.load(ifile, args.snippet_chop)
for snippet in snippets:
model[snippet.id] = snippet.text
return model
def main():
p = ArgumentParser()
p.add_argument("template", help="template file to render")
p.add_argument("-o", "--output", default="/dev/stdout", help="output file")
p.add_argument("--define", dest="definitions", nargs=1, action='append', help="define a model property")
p.add_argument("--snippet-sources", metavar="PATTERN", help="define snippet sources with a wildcard pattern")
p.add_argument("--snippet-chop", type=int, default=0, help="number of chars to chop from front of each snippet line")
p.add_argument("--log-level", choices=('DEBUG', 'WARN', 'INFO', 'ERROR'), default='INFO', help="set log level")
args = p.parse_args()
logging.basicConfig(level=logging.__dict__[args.log_level])
model = build_model(args)
with open(args.template, 'r') as template_ifile:
template_src = template_ifile.read()
env = jinja2.Environment(variable_start_string='${', variable_end_string='}')
template = env.from_string(template_src)
rendering = template.render(model)
with open(args.output, 'w') as ofile:
print(rendering, file=ofile)
return 0
if __name__ == '__main__':
exit(main())
| true |
477e3217d3c7289b2cc5250f2b9d1bc0342a159c | Python | regreg/regreg | /regreg/problems/tests/test_newton.py | UTF-8 | 1,595 | 2.75 | 3 | [
"BSD-2-Clause"
] | permissive | import numpy as np
from ...atoms.seminorms import l1norm
from ...smooth.glm import glm
from ..newton import quasi_newton
from ..simple import simple_problem
def test_lagrange():
n, p, s = 1000, 50, 5
X = np.random.standard_normal((n, p))
beta = np.zeros(p)
beta[:s] = 20 * np.random.standard_normal(s) / np.sqrt(n)
eta = X.dot(beta)
pi = np.exp(eta) / (1 + np.exp(eta))
Y = np.random.binomial(1, pi)
assert(Y.shape == pi.shape)
loss = glm.logistic(X, Y)
penalty = l1norm(p, lagrange=4)
qn = quasi_newton(loss,
penalty,
X.T.dot(X) / 4.)
soln_newton = qn.solve(niter=1000, tol=1.e-6,
maxfun=5, maxiter=5)
problem = simple_problem(loss, penalty)
soln_simple = problem.solve(min_its=200, tol=1.e-14)
assert(np.linalg.norm(soln_newton - soln_simple) / np.linalg.norm(soln_simple) < 1.e-6)
def test_bound():
n, p = 1000, 50
X = np.random.standard_normal((n, p))
Y = np.random.binomial(1, 0.5, size=(n,))
loss = glm.logistic(X, Y)
penalty = l1norm(p, bound=0.5)
qn = quasi_newton(loss,
penalty,
X.T.dot(X) / 4.)
soln_newton = qn.solve(niter=1000, tol=1.e-10,
maxfun=5, maxiter=5)
problem = simple_problem(loss, penalty)
soln_simple = problem.solve(tol=1.e-14)
assert(np.linalg.norm(soln_newton - soln_simple) / max(np.linalg.norm(soln_simple), 1) < 1.e-5)
assert(np.fabs(problem.objective(soln_newton) - problem.objective(soln_simple)) < 1.e-6)
| true |
ea6ea84829b5b2a29f65dfe9c2bc030deb96a65f | Python | nguyepe2/class_courses | /CS160 Computer Science Orientation/lab8.py | UTF-8 | 1,304 | 3.796875 | 4 | [] | no_license | import random
def main():
num=random.randint(1,20)
attempts=[]
# for i in range(5):
i=0
while i < 5:
guess=input("Guess a number 1-20: ")
if guess==str(num):
# if guess==str(num):
print(str(guess)+" is the right number")
break
elif guess in attempts:
# elif list(attempts)==guess:
print("You've already guessed that number")
else:
i=i+1
print("nope")
attempts.append(guess)
# i=i+1
print("Your list of incorrect guesses: "+str(attempts))
def make_multiplication_table():
n=int(input("How many rows of a multiplication table would you like to see? (1-13): "))
x=n
for n in range(x):
table=[[0,0,0,0,0,0,0,0,0,0,0,0],
[0,1,2,3,4,5,6,7,8,9,10,11,12],
[0,2,4,6,8,10,12,14,16,18,20,22,24],
[0,3,6,9,12,15,18,21,24,27,30,33,36],
[0,4,8,12,16,20,24,28,32,36,40,44,48],
[0,5,10,15,20,25,30,35,40,45,50,55,60],
[0,6,12,18,24,30,36,42,48,54,60,66,72],
[0,7,14,21,28,35,42,49,56,63,70,77,84],
[0,8,16,24,32,40,48,56,64,72,80,88,96],
[0,9,18,27,36,45,54,63,72,81,90,99,108],
[0,10,20,30,40,50,60,70,80,90,100,110,120],
[0,11,22,33,44,55,66,77,88,99,110,121,132],
[0,12,24,36,48,60,72,84,96,108,120,132,144]]
m_table=str(table[n])
n_table=str(m_table.strip('[]'))
print(n_table)
x=x+1
make_multiplication_table()
main()
| true |
d92533cb97ed5d94bcc8b94ab2bbf5fcd7a9c88c | Python | teaduwow/Tutorial | /CLASS/4.number.py | UTF-8 | 429 | 4.09375 | 4 | [] | no_license | #如何只用數字 數字的用法
print(8+5)
print(8*5)
#整數除法
print(8//5)
print((8+8)*5)
number = -8
#取於數
print(number%5)
#字串
print("會印出數字"+str(number))
#取絕對值
print(abs(number))
#次方
print(pow(2,5))
print(max(2,100,88,3))
print(min(1,10,-10,100))
#4捨5入
print(round(99/7))
from math import *
#無條件捨去
print(floor(5.1))
#無條件進位
print(ceil(6.3))
print(sqrt(64)) | true |
d3cb021c52475deeba3e3dcd0aa2280f86514903 | Python | KannanVS/Password_Changer | /index.py | UTF-8 | 2,477 | 3.015625 | 3 | [] | no_license | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import getpass
import json
import time
def main():
f = open("details.JSON", "r")
data = json.load(f)
# Fetching details from user
userName = input('Enter the username: ')
password = getpass.getpass('Enter the password: ')
newPassword = getpass.getpass('Enter the new password: ')
# driver initialisation and navigating to instagram
driver = webdriver.Firefox()
driver.get('https://www.instagram.com')
uid = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, data["username"]))) # sending username
uid.click()
uid.send_keys(userName)
uid = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, data["password"]))) # sending password
uid.click()
uid.send_keys(password)
time.sleep(2)
btn = driver.find_element_by_css_selector(data['logIn']) # login to instagram
btn.click()
time.sleep(5)
prf = driver.find_element_by_css_selector(data['profile']) # navigating to profile
prf.click()
time.sleep(5)
setting = driver.find_element_by_css_selector(data['setting']) # navigating to setting
setting.click()
time.sleep(5)
changePassword = driver.find_element_by_css_selector(data['change_password']) # selecting change password
changePassword.click()
time.sleep(5)
oldPassword = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, data["old_password"]))) # entering old password in text box
oldPassword.click()
oldPassword.send_keys(password)
time.sleep(2)
newPass = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, data["new_password"]))) # entering new password in text box
newPass.click()
newPass.send_keys(newPassword)
confirmPassword = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, data["confirm_password"]))) # confirmation of new password
confirmPassword.click()
confirmPassword.send_keys(newPassword)
change = driver.find_element_by_css_selector(data['change']) # changing password
change.click()
time.sleep(5)
print('Password changed Successfully')
if __name__ == '__main__':
main()
| true |
4511000b869532d0e12492521c1ec6b718bd48dc | Python | bellettif/hawkes_processes | /RNG_test/main.py | UTF-8 | 817 | 3.046875 | 3 | [] | no_license | '''
Created on 3 nov. 2013
This script computes an histogram of the values simulated
by the mt19937 pseudo random generator.
@author: francois belletti
'''
from matplotlib import pyplot as plt
from datetime import datetime
import rng
import time
# Test of the Mersenne-Twister mt19937
n_sims = 1000000
start_time = time.clock()
now_time = datetime.now()
n_micros = (now_time.day * 24 * 60 * 60 + now_time.second) * 1e6 \
+ now_time.microsecond
temp = rng.gen_array(n_sims, n_micros)
elapsed_time = time.clock() - start_time
print 'Elapsed time %.2f' % (time.clock() - start_time)
plt.hist(temp, bins = 10000)
plt.title('mt19937 rng (%d sims, %.2f secs)' % (n_sims, elapsed_time))
plt.xlabel('genrand_real3')
plt.savefig('Rng_analysis_mt19937.png', dpi = 300)
plt.close() | true |
0a67ee707f4a73d7a967828b2c12be288ecc93c3 | Python | mattvenn/atbristol-megadrawbz | /tests/reflecto-homing/encoder/test.py | UTF-8 | 505 | 2.859375 | 3 | [] | no_license | import serial
import struct
import time
port_name = '/dev/ttyACM1'
print("opening port " + port_name)
enc_port=serial.Serial()
enc_port.port=port_name
enc_port.timeout=1
enc_port.baudrate=115200
enc_port.open()
time.sleep(2)
def send(pos, port):
bin = struct.pack('<h',pos)
port.write(bin)
# will block while stepper turns
bin = port.read(4)
pos, = struct.unpack('<L',bin)
return pos
send(0, enc_port)
while True:
pos = send(1, enc_port)
print pos
time.sleep(0.1)
| true |
eb7254892a72122092bcad724b5e8c52b12b9363 | Python | seangao14/clairvoyance | /clairvoyance/champ_utils.py | UTF-8 | 1,566 | 2.65625 | 3 | [] | no_license | import pandas as pd
import json
# rid = riot id for the champion
# maps riot id to index
def idx_from_rid():
with open('clairvoyance/data/champions.json', encoding='utf-8') as f:
champs = json.load(f)
df = pd.DataFrame.from_dict(champs['data'], orient='index')
names = df['key']
names = pd.Series(dict((v,k) for k,v in names.iteritems()))
names_dict = names.to_dict()
# maps champion name to index
# champ_dict = dict((champ, idx) for idx, champ in enumerate(names_dict.values()))
# maps riot champion id to index
nums_dict = dict((champ_key, idx) for idx, champ_key in enumerate(names_dict.keys()))
return nums_dict
# maps riot id to champion name
def name_from_rid():
with open('clairvoyance/data/champions.json', encoding='utf-8') as f:
champs = json.load(f)
df = pd.DataFrame.from_dict(champs['data'], orient='index')
names = df['key']
names = pd.Series(dict((v,k) for k,v in names.iteritems()))
names_dict = names.to_dict()
return names_dict
def idx_from_name():
with open('clairvoyance/data/champions.json', encoding='utf-8') as f:
champs = json.load(f)
df = pd.DataFrame.from_dict(champs['data'], orient='index')
names = df['key']
names = pd.Series(dict((v,k) for k,v in names.iteritems()))
names_dict = names.to_dict()
champ_dict = dict((champ, idx) for idx, champ in enumerate(names_dict.values()))
return champ_dict
idx_rid_dict = idx_from_rid()
name_rid_dict = name_from_rid()
idx_name_dict = idx_from_name() | true |
fd168b64360143a7c8d7d797e04350c684010180 | Python | Thewessen/hello-world | /Exercism/python/armstrong-numbers/armstrong_numbers.py | UTF-8 | 173 | 3.578125 | 4 | [
"MIT"
] | permissive | def is_armstrong_number(number: int) -> bool:
"""Checks if a number is an Armstrong number."""
n = str(number)
return sum(int(d) ** len(n) for d in n) == number
| true |
6d02747d720c2acdf884552f1799aafb6b017642 | Python | TrendingTechnology/pyrustic | /pyrustic/widget/scrollbox.py | UTF-8 | 11,453 | 3.03125 | 3 | [
"MIT",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | import tkinter as tk
from pyrustic import widget
from pyrustic.tkmisc import get_cnf
from pyrustic.view import View
# Components
CANVAS = "canvas"
BOX = "box"
HSB = "hsb"
VSB = "vsb"
# Orient
BOTH = "both"
VERTICAL = "vertical"
HORIZONTAL = "horizontal"
class Scrollbox(widget.Frame):
"""
Scrollbox is a scrollable surface. You just need to use its property "box" as
your layout's parent.
Example:
import tkinter as tk
from pyrustic.widget.scrollbox import Scrollbox
root = tk.Tk()
scrollbox = Scrollbox(root)
scrollbox.build_pack()
# Pack 50 Label on the box
for i in range(50):
label = tk.Label(scrollbox.box, text="Label {}".format(i))
label.pack(anchor=tk.W)
root.mainloop()
"""
def __init__(self,
master=None,
orient=VERTICAL,
box_sticky="nswe",
resizable_box=True,
options=None,
extra_options=None):
"""
- master: widget parent. Example: an instance of tk.Frame
- orient: could be one of: VERTICAL, HORIZONTAL, BOTH
- options: dictionary of widgets options
The widgets keys are: BODY, CANVAS, BOX, HSB, VSB
Example: Assume that you want to set the CANVAS background to red
options = {CANVAS: {"background": "red"}}
"""
super().__init__(master=master,
class_="Scrollbox",
cnf=options if options else {},
on_build=self.__on_build,
on_display=self.__on_display,
on_destroy=self.__on_destroy)
self.__orient = orient
self.__box_sticky = box_sticky
self.__resizable_box = resizable_box
self.__options = options
self.__extra_options = extra_options
self.__canvas_options = None
self.__canvas = None
self.__box = None
self.__box_id = None
self.__vsb = None
self.__hsb = None
self.__hsb_under_mouse = False
self.__is_scrollable = False
self.__components = {}
# build
self.__view = self.build()
# ==============================================
# PROPERTIES
# ==============================================
@property
def box(self):
return self.__box
@property
def orient(self):
return self.__orient
@property
def components(self):
"""
Get the components (widgets instances) used to build this scrollbox.
This property returns a dict. The keys are:
BODY, CANVAS, BOX, HSB, VSB
Warning: check the presence of key before usage. Example,
the widget linked to the HSB key may be missing because
only VSB is used
"""
return self.__components
# ==============================================
# PUBLIC METHODS
# ==============================================
def xview_moveto(self, fraction):
"""
Calls canvas's method 'xview_moveto'
Set:
- 0: to scroll to left
- 1: to scroll to right
"""
if self.__canvas:
self.update_idletasks()
self.__canvas.xview_moveto(fraction)
def yview_moveto(self, fraction):
"""
Calls canvas's method 'yview_moveto'
Set:
- 0: to scroll to top
- 1: to scroll to bottom
"""
if self.__canvas:
self.update_idletasks()
self.__canvas.yview_moveto(fraction)
def box_config(self, **options):
"""
As the BOX is an item compared to CANVAS, some
the options concerning the BOX can be edited only via
CANVAS "itemconfig" method.
Use this method to edit these options.
itemconfig options are: anchor, state, height, width.
Warning: these options are not the same as the arguments
of BOX's own constructor !
"""
if self.__box:
self.__canvas.itemconfig(self.__box_id, cnf=options)
def clear(self):
"""
Clears the Scrollbox.
This method doesn't destruct this object but BOX's children
"""
if self.__box:
for x in self.__box.winfo_children():
x.destroy()
# ==============================================
# PRIVATE METHODS
# ==============================================
def __on_build(self):
self.bind("<Enter>", self.__on_enter_body, "+")
self.bind("<Leave>", self.__on_leave_body, "+")
self.bind("<Unmap>", self.__on_unmap_body, "+")
self.bind("<Destroy>", self.__on_destroy_body, "+")
self.bind_all("<MouseWheel>", self.__on_mouse_wheel, "+")
self.bind_all("<Button-4>", self.__on_mouse_wheel, "+")
self.bind_all("<Button-5>", self.__on_mouse_wheel, "+")
self.columnconfigure(0, weight=1, uniform=1)
self.rowconfigure(0, weight=1, uniform=1)
self.winfo_toplevel().bind("<Configure>",
self.__on_configure_box_canvas, "+")
# canvas
self.__canvas = tk.Canvas(self,
name=CANVAS,
width=0,
height=0,
cnf=get_cnf(CANVAS,
self.__extra_options))
self.__components[CANVAS] = self.__canvas
self.__canvas.grid(row=0, column=0, sticky=self.__box_sticky)
# box
self.__box = tk.Frame(self.__canvas,
name=BOX,
cnf=get_cnf(BOX, self.__extra_options))
self.__components[BOX] = self.__box
self.__box_id = self.__canvas.create_window(0, 0, window=self.__box, anchor="nw")
self.__box.bind("<Configure>", self.__on_configure_box_canvas, "+")
# scrollbar
self.__set_scrollbars()
def __on_display(self):
pass
def __on_destroy(self):
self.__unbind_funcs()
def __on_mouse_wheel(self, event):
if not self.__orient or not self.__is_scrollable:
return
# scroll down (value: 1) -> event.num = 5 or event.delta < 0
# scroll up (value: -1) -> event.num = 4 or event.delta >= 0
scroll = 1 if event.num == 5 or event.delta < 0 else -1
if self.__orient in ("horizontal", "x", "h"):
self.__canvas.xview_scroll(scroll, "units")
elif self.__orient in ("both", "vertical", "y", "v"):
if self.__hsb_under_mouse:
self.__canvas.xview_scroll(scroll, "units")
else:
self.__canvas.yview_scroll(scroll, "units")
def __set_scrollbars(self):
if self.__orient in ("both", "horizontal", "h", "x"):
self.__hsb = tk.Scrollbar(self, orient="horizontal",
name=HSB,
command=self.__canvas.xview,
cnf=get_cnf(HSB, self.__extra_options))
self.__components[HSB] = self.__hsb
self.__hsb.grid(row=1, column=0, columnspan=2, sticky="swe")
self.__canvas.config(xscrollcommand=self.__hsb.set)
self.__bind_enter_leave_to_hsb()
if self.__orient in ("both", "vertical", "v", "y"):
self.__vsb = tk.Scrollbar(self, orient="vertical",
name=VSB,
command=self.__canvas.yview,
cnf=get_cnf(VSB, self.__extra_options))
self.__components[VSB] = self.__vsb
self.__vsb.grid(row=0, column=1, sticky=self.__box_sticky)
self.__canvas.config(yscrollcommand=self.__vsb.set)
def __bind_enter_leave_to_hsb(self):
def enter_hsb(event):
self.__hsb_under_mouse = True
def leave_hsb(event):
self.__hsb_under_mouse = False
self.__hsb.bind('<Enter>', enter_hsb, "+")
self.__hsb.bind('<Leave>', leave_hsb, "+")
def __on_configure_box_canvas(self, event):
if self.__box:
if self.__orient in ("horizontal", "h", "x"):
if self.__resizable_box:
self.__canvas.itemconfig(self.__box_id,
height=self.__canvas.winfo_height())
else:
self.__canvas.config(height=self.__box.winfo_height())
elif self.__orient in ("vertical", "v", "y"):
if self.__resizable_box:
self.__canvas.itemconfig(self.__box_id,
width=self.__canvas.winfo_width())
else:
self.__canvas.config(width=self.__box.winfo_width())
self.__canvas.config(scrollregion=self.__canvas.bbox("all"))
def __on_enter_body(self, event):
self.__is_scrollable = True
def __on_leave_body(self, event):
self.__is_scrollable = False
def __on_unmap_body(self, event):
self.__is_scrollable = False
def __on_destroy_body(self, event):
self.__is_scrollable = False
def __unbind_funcs(self):
try:
for val in ("<Enter>", "<Leave>",
"<Unmap>", "<Destroy>",
"<MouseWheel>", "<Button-4>",
"<Button-5>", "<Configure>"):
self.unbind(val)
except Exception as e:
pass
class _ScrollboxTest(View):
def __init__(self, root):
super().__init__()
self._root = root
self._body = None
def _on_build(self):
self._body = tk.Frame(self._root)
# Pane 1
pane_1 = tk.Frame(self._root)
pane_1.pack(side=tk.LEFT, padx=10,
pady=10, expand=1, fill=tk.BOTH)
# Scrollbox 1
scrollbox_1 = Scrollbox(pane_1, orient=VERTICAL)
scrollbox_1.pack(pady=5, expand=1, fill=tk.BOTH)
# Button 1
command = (lambda self=self, box=scrollbox_1.box, side=tk.TOP:
self._on_click_add(box, side))
button_1 = tk.Button(pane_1, text="Add",
command=command)
button_1.pack(side=tk.BOTTOM)
# Pane 2
pane_2 = tk.Frame(self._root)
pane_2.pack(side=tk.LEFT, padx=10,
pady=10, expand=1, fill=tk.BOTH)
# Scrollbox 2
scrollbox_2 = Scrollbox(pane_2, orient=HORIZONTAL)
scrollbox_2.pack(pady=5, expand=1, fill=tk.BOTH)
# Button 2
command = (lambda self=self, box=scrollbox_2.box, side=tk.LEFT:
self._on_click_add(box, side))
button_2 = tk.Button(pane_2, text="Add",
command=command)
button_2.pack(side=tk.BOTTOM)
def _on_display(self):
pass
def _on_destroy(self):
pass
def _on_click_add(self, frame, side=tk.TOP):
label = tk.Label(frame, text="Hello Friend")
label.pack(side=side)
if __name__ == "__main__":
root = tk.Tk()
scrollbox_test = _ScrollboxTest(root)
scrollbox_test.build_pack(fill=tk.BOTH, expand=1)
root.mainloop()
| true |
cb79010677f1e01ec09051be63055bbf918016b0 | Python | svetakeda/project_oop_1 | /book.py | UTF-8 | 700 | 3.5625 | 4 | [] | no_license | class Book:
def __init__(self, name, year, author, cost):
self.__name = name
self.__year = year
self.__author = author
self.__cost = cost
@property
def name(self):
return self.__name
@property
def year(self):
return self.__year
@property
def author(self):
return self.__author
@property
def cost(self):
return self.__cost
@cost.setter
def cost(self, value):
self.__cost = value
def __str__(self):
return f"{self.__name} {self.__year} {self.__author} {self.__cost}"
def __repr__(self):
return f"{self.__name} {self.__year} {self.__author} {self.__cost}"
| true |
66dd140a57094f9f2d4fd11d671ee2f34b4c5657 | Python | iacsstudent/quizgame | /quizgame.py | UTF-8 | 536 | 3.265625 | 3 | [] | no_license | import math
import random
# This is my quiz game
# I'm going to put my test functions and stuff in here
def get_addition_question (level):
addend1 = random.randint(1,10*level)
addend2 = random.randint(1,10*level)
return '%i+%i'%(addend1,addend2),addend1+addend2
def test_get_addition_question ():
for l in range(1,10):
print 'Level ',l,'questions:'
for tst in range(3):
q,a = get_addition_question(l)
print 'Question: ',q,'Answer:',a
test_get_addition_question()
| true |