blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5e771c96653ccd976e1cda3327980c4cadbd0201
|
8fb7a7b4fb09ce457ad413d19191235cf4805851
|
/notes code/segment image/watershed/watershed.py
|
cebf084c38d6b13d61bf5ce98e5883c3ad31b9e6
|
[] |
no_license
|
clambering-goat/honner-progect
|
df8ab2e22c223cf0f8cb59b93b132eea3d9030f2
|
ea996ea34ac13867dea6d4935f9760c6915b206f
|
refs/heads/master
| 2020-04-15T19:32:57.303438
| 2019-05-13T17:51:56
| 2019-05-13T17:51:56
| 164,954,694
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,163
|
py
|
import numpy as np
import cv2
#img = cv2.imread('coins.jpg')
#img=cv2.imread("depth_carmea.png")
img=cv2.imread("image_3.png")
img = cv2.bitwise_not(img)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(gray,0,255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
cv2.imshow("frame",thresh)
cv2.waitKey()
# noise removal
kernel = np.ones((3,3),np.uint8)
opening = cv2.morphologyEx(thresh,cv2.MORPH_OPEN,kernel, iterations = 4)
# sure background area
sure_bg = cv2.dilate(opening,kernel,iterations=5)
# Finding sure foreground area
dist_transform = cv2.distanceTransform(opening,cv2.DIST_L2,5)
ret, sure_fg = cv2.threshold(dist_transform,0.7*dist_transform.max(),255,0)
# Finding unknown region
sure_fg = np.uint8(sure_fg)
unknown = cv2.subtract(sure_bg,sure_fg)
cv2.imshow("frame",sure_fg)
cv2.waitKey()
# Marker labelling
ret, markers = cv2.connectedComponents(sure_fg)
# Add one to all labels so that sure background is not 0, but 1
markers = markers+1
# Now, mark the region of unknown with zero
markers[unknown==255] = 0
markers = cv2.watershed(img,markers)
img[markers == -1] = [255,0,0]
cv2.imshow("frame",img)
cv2.waitKey()
|
[
"camerondrain@gmail.com"
] |
camerondrain@gmail.com
|
41445ab01c3c5a306e5316ff89bad93d2ad8b5ef
|
197fddc8a034c8736c0cfd8629d37d4a5968e730
|
/diffevol.py
|
033d495824a47ca32635b8150ad758566d641e64
|
[] |
no_license
|
xomaiya/LRR_DE
|
127b2de32b6723e64002b3ad82f88c5b8cab9e20
|
428432fb8179d068a5094a412f3d06d4fd686a25
|
refs/heads/master
| 2020-07-27T04:54:10.834849
| 2020-03-03T13:14:33
| 2020-03-03T13:14:33
| 208,875,612
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,780
|
py
|
import random
import time
import numpy as np
import scipy.sparse
from ridgeRegression import RidgeRegression, LOOCV
from moleculToVector import StructDescription, AmberCoefficients
from xyz2bat import xyz2bat2constr_H_map, xyz2bat2constr_HH_map
import matplotlib.pyplot as plt
from fast_ridge_regression import FastJaxRidgeRegression
class DE:
def __init__(self, all_coords, struct_description: StructDescription, amber_coeffs: AmberCoefficients, y,
test_structs):
self.all_coords = all_coords
self.y = y
self.struct_description = struct_description
self.amber_coeffs = amber_coeffs
self.test_structs = test_structs
self.fjrr = FastJaxRidgeRegression()
self.forces = np.array(y.reshape(-1, 57, 3))
def genes2thetas(self, x):
l_bonds = len(self.amber_coeffs.bonds_zero_values)
l_angles = len(self.amber_coeffs.angles_zero_values)
l_torsions = len(self.amber_coeffs.torsions_zero_phase)
l_q = len(self.amber_coeffs.qs)
l_sigma = len(self.amber_coeffs.sigma_for_vdw)
l_epsilon = len(self.amber_coeffs.epsilons_for_vdw)
l, x = x[0], x[1:]
bonds, x = x[:l_bonds], x[l_bonds:]
angles, x = x[:l_angles], x[l_angles:]
torsions, x = x[:l_torsions], x[l_torsions:]
q, x = x[:l_q], x[l_q:]
sigma_for_vdw, x = x[:l_sigma], x[l_sigma:]
epsilon_for_vdw, x = x[:l_epsilon], x[l_epsilon:]
assert len(x) == 0
thetas = {'bonds': np.abs(bonds),
'angles': angles,
'torsions': torsions,
'q': q,
'sigma_for_vdw': np.abs(sigma_for_vdw),
'epsilon_for_vdw': np.abs(epsilon_for_vdw)}
return l, thetas
def f(self, x):
start_time = time.time()
l, thetas = self.genes2thetas(x)
HH = xyz2bat2constr_HH_map(self.all_coords, self.struct_description.as_dict(), thetas)
# HH = HH.reshape(-1, HH.shape[-1])
# HH = scipy.sparse.csr_matrix(HH)
# _, y_est = RidgeRegression(HH, self.y, l)
# err = LOOCV(HH, self.y, y_est)
C, y_est, err = self.fjrr.calculate(HH, self.forces, l)
print(f'err: {err}')
print(f'time: {time.time() - start_time}')
return err
def f_for_population(self, P):
b = len(self.struct_description.bonds)
a = len(self.struct_description.angles)
t = len(self.struct_description.torsions)
p = len(self.struct_description.pairs)
# TODO: считывать теты из файла и варьировать их относительно считанных: |delta bond_0| < 0.5 A,
# |delta angle_0| < 15 grad, |q| < 0.5 Col and sum(q) < 1, |sigma| < 0.8 A
# ограничение для длин связей (только положительные)
P[:, 0:b] = np.abs(P[:, 0:b])
# ограничение для зарядов (от -0.5 до 0.5)
P[:, b + a + t + p:b + a + t + p * 2] = np.clip(P[:, b + a + t + p:b + a + t + p * 2], -0.5, 0.5)
return np.array([self.f(p) for p in P])
def mutation(self, P, F):
V = np.zeros_like(P)
N = P.shape[0]
for i in range(N):
p = random.randint(0, N - 1)
q = random.randint(0, N - 1)
r = random.randint(0, N - 1)
V[i] = P[p] + F * (P[q] - P[r])
return V
def crossover(self, V, P, Cr):
U = np.zeros_like(V)
N = P.shape[0]
k = P.shape[1]
j_rand = np.random.randint(0, k - 1)
for i in range(N):
for j in range(k):
r = np.random.randn()
if r <= Cr or j == j_rand:
U[i, j] = V[i, j]
else:
U[i, j] = P[i, j]
return U
def selection(self, P, fp, U):
fu = self.f_for_population(U)
to_replace = fp > fu
P[to_replace] = U[to_replace]
fp[to_replace] = fu[to_replace]
return P, fp
def run(self, k, N, F=0.7, Cr=0.85):
"""
Запуск алгоритма дифференциальной эволюции
:param k: количество элементов в векторе пробного решения (количество оптимизируемых гиперпараметров: [l, thetas])
:param N: размер популяции (количество векторов пробного решения)
:param F: дифференциальный вес (вероятность мутации донорного вектора)
:param Cr: коэффициент кроссовера (скорость кроссовера)
:return:
"""
P = self.amber_coeffs.get_theta() + 0.001 * np.random.randn(N, k)
fp = self.f_for_population(P)
while True:
V = self.mutation(P, F)
U = self.crossover(V, P, Cr)
P, fp = self.selection(P, fp, U)
self.best_p = P[np.argmin(fp)]
# self.test_for_best_p()
# print(np.min(fp))
def test(self, C, thetas):
"""
Функция, которая рассчитывает корреляцию между полученными энергиями для тестовых структур и QM-энергиями
:return:
"""
energy_test_qm = np.array([struct.energy for struct in self.test_structs])
forces_test_qm = np.array([struct.forces for struct in self.test_structs])
test_all_coords = np.array([struct.coords for struct in self.test_structs])
H = xyz2bat2constr_H_map(test_all_coords, self.struct_description.as_dict(), thetas)
HH = xyz2bat2constr_HH_map(test_all_coords, self.struct_description.as_dict(), thetas)
energy_test_mm = H.dot(C)
forces_test_mm = HH.dot(C)
print(f'forces error for test train:\t{((forces_test_qm - forces_test_mm) ** 2).sum(axis=(1, 2)).mean()}')
print(f'energy correlation for test train:\t{np.corrcoef(energy_test_qm, energy_test_mm)[0][1]}')
plt.scatter(energy_test_qm, energy_test_mm)
plt.show()
print()
print()
def test_for_best_p(self):
l, thetas = self.genes2thetas(self.best_p)
HH = xyz2bat2constr_HH_map(self.all_coords, self.struct_description.as_dict(), thetas)
C, energy_est = RidgeRegression(scipy.sparse.csr_matrix(HH.reshape(-1, HH.shape[-1])), self.y, l)
predicted = HH.dot(C)
true = self.y.reshape(predicted.shape)
print(f'forces error for train train:\t{((predicted - true) ** 2).sum(axis=(1, 2)).mean()}')
self.test(C, thetas)
|
[
"xomaiya@gmail.com"
] |
xomaiya@gmail.com
|
108aa5f4a3f4b5816f1daadda9e00e2530f73728
|
148ea30a45676e77a96822711ac683237361c116
|
/course/laboratoriski-vezbi/lab05-tf-idf/zad01-najslicen-dokument.py
|
d3a6a76c43b339bc31948630af709ccf764eb4b6
|
[] |
no_license
|
marioanchevski/SNZ
|
d0d0994967482794bab2d33f88abe56d2a3a1ff2
|
d4dfabecd88f8e116cebdaf9b4e73236834a6554
|
refs/heads/master
| 2023-02-25T15:38:06.291069
| 2021-01-29T21:35:57
| 2021-01-29T21:35:57
| 315,758,240
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,440
|
py
|
from frameworks.tfidf import *
train_data = [
("""What Are We Searching for on Mars?
Martians terrified me growing up. I remember watching the 1996 movie Mars Attacks! and fearing that the Red Planet harbored hostile alien neighbors. Though I was only 6 at the time, I was convinced life on Mars meant little green men wielding vaporizer guns. There was a time, not so long ago, when such an assumption about Mars wouldn't have seemed so far-fetched.
Like a child watching a scary movie, people freaked out after listening to "The War of the Worlds," the now-infamous 1938 radio drama that many listeners believed was a real report about an invading Martian army. Before humans left Earth, humanity's sense of what-or who-might be in our galactic neighborhood was, by today's standards, remarkably optimistic.
""",
"science"),
("""Mountains of Ice are Melting, But Don't Panic (Op-Ed)
If the planet lost the entire West Antarctic ice sheet, global sea level would rise 11 feet, threatening nearly 13 million people worldwide and affecting more than $2 trillion worth of property.
Ice loss from West Antarctica has been increasing nearly three times faster in the past decade than during the previous one - and much more quickly than scientists predicted.
This unprecedented ice loss is occurring because warm ocean water is rising from below and melting the base of the glaciers, dumping huge volumes of additional water - the equivalent of a Mt. Everest every two years - into the ocean.
""",
"science"),
("""Some scientists think we'll find signs of aliens within our lifetimes. Here's how.
Finding extraterrestrial life is the essence of science fiction. But it's not so far-fetched to predict that we might find evidence of life on a distant planet within a generation.
"With new telescopes coming online within the next five or ten years, we'll really have a chance to figure out whether we're alone in the universe," says Lisa Kaltenegger, an astronomer and director of Cornell's new Institute for Pale Blue Dots, which will search for habitable planets. "For the first time in human history, we might have the capability to do this."
""",
"science"),
("""'Magic' Mushrooms in Royal Garden: What Is Fly Agaric?
Hallucinogenic mushrooms are perhaps the last thing you'd expect to find growing in the Queen of England's garden.
Yet a type of mushroom called Amanita muscaria — commonly known as fly agaric, or fly amanita — was found growing in the gardens of Buckingham Palace by the producers of a television show, the Associated Press reported on Friday (Dec. 12).
A. muscaria is a bright red-and-white mushroom, and the fungus is psychoactive when consumed.
""",
"science"),
("""Upcoming Parks : 'Lost Corner' Finds New Life in Sandy Springs
At the corner of Brandon Mill Road, where Johnson Ferry Road turns into Dalrymple Road, tucked among 24 forested acres, sits an early 20th Century farmhouse. A vestige of Sandy Springs' past, the old home has found new life as the centerpiece of Lost Forest Preserve. While the preserve isn't slated to officially debut until some time next year, the city has opened the hiking trails to the public until construction begins on the permanent parking lot (at the moment the parking lot is a mulched area). The new park space includes community garden plots, a 4,000-foot-long hiking trail and an ADA-accessible trail through the densely wooded site. For Atlantans seeking an alternate escape to serenity (or those who dig local history), it's certainly worth a visit.
""",
"science"),
("""Stargazers across the world got a treat this weekend when the Geminids meteor shower gave the best holiday displays a run for their money.
The meteor shower is called the "Geminids" because they appear as though they are shooting out of the constellation of Gemini. The meteors are thought to be small pieces of an extinct comment called 3200 Phaeton, a dust cloud revolving around the sun. Phaeton is thought to have lost all of its gas and to be slowly breaking apart into small particles.
Earth runs into a stream of debris from 3200 Phaethon every year in mid-December, causing a shower of meteors, which hit its peak over the weekend.
""",
"science"),
("""Envisioning a River of Air
By the classification rules of the world of physics, we all know that the Earth's atmosphere is made of gas (rather than liquid, solid, or plasma). But in the world of flying it's often useful to think
""",
"science"),
("""Following Sunday's 17-7 loss to the Seattle Seahawks, the San Francisco 49ers were officially eliminated from playoff contention, and they have referee Ed Hochuli to blame. OK, so they have a lot of folks to point the finger at for their 7-7 record, but Hochuli's incorrect call is the latest and easiest scapegoat.
"""
, "sport"),
("""Kobe Bryant and his teammates have an odd relationship. That makes sense: Kobe Bryant is an odd guy, and the Los Angeles Lakers are an odd team.
They’re also, for the first time this season, the proud owners of a three-game winning streak. On top of that, you may have heard, Kobe Bryant passed Michael Jordan on Sunday evening to move into third place on the NBA’s all-time scoring list.
"""
, "sport"),
("""The Patriots continued their divisional dominance and are close to clinching home-field advantage throughout the AFC playoffs. Meanwhile, both the Colts and Broncos again won their division titles with head-to-head wins.The Bills' upset of the Packers delivered a big blow to Green Bay's shot at clinching home-field advantage throughout the NFC playoffs. Detroit seized on the opportunity and now leads the NFC North.
"""
, "sport"),
("""If you thought the Washington Redskins secondary was humbled by another scintillating performance from New Yorks Giants rookie wide receiver sensation Odell Beckham Jr., think again.In what is becoming a weekly occurrence, Beckham led NFL highlight reels on Sunday, collecting 12 catches for 143 yards and three touchdowns in Sunday's 24-13 victory against an NFC East rival.
"""
, "sport")
, ("""That was two touchdowns and 110 total yards for the three running backs. We break down the fantasy implications.The New England Patriots' rushing game has always been tough to handicap. Sunday, all three of the team's primary running backs put up numbers, and all in different ways, but it worked for the team, as the Patriots beat the Miami Dolphins, 41-13.
"""
, "sport"),
("""General Santos (Philippines) (AFP) - Philippine boxing legend Manny Pacquiao vowed to chase Floyd Mayweather into ring submission after his US rival offered to fight him next year in a blockbuster world title face-off. "He (Mayweather) has reached a dead end. He has nowhere to run but to fight me," Pacquiao told AFP late Saturday, hours after the undefeated Mayweather issued the May 2 challenge on US television. The two were long-time rivals as the "best pound-for-pound" boxers of their generation, but the dream fight has never materialised to the disappointment of the boxing world.
"""
, "sport"),
("""When St. John's landed Rysheed Jordan, the consensus was that he would be an excellent starter.
So far, that's half true.
Jordan came off the bench Sunday and tied a career high by scoring 24 points to lead No. 24 St. John's to a 74-53 rout of Fordham in the ECAC Holiday Festival.
''I thought Rysheed played with poise,'' Red Storm coach Steve Lavin said. ''Played with the right pace. Near perfect game.''
"""
, "sport"),
("""Five-time world player of the year Marta scored three goals to lead Brazil to a 3-2 come-from-behind win over the U.S. women's soccer team in the International Tournament of Brasilia on Sunday. Carli Lloyd and Megan Rapinoe scored a goal each in the first 10 minutes to give the U.S. an early lead, but Marta netted in the 19th, 55th and 66th minutes to guarantee the hosts a spot in the final of the four-team competition.
"""
, "sport")
]
if __name__ == '__main__':
text = input()
docs = []
for item in train_data:
docs.append(item[0])
kosinusno = rank_documents(text, docs, cosine)
index_k = kosinusno[0][1]
print(train_data[index_k][0])
pirsonovo = rank_documents(text, docs, pearson)
index_p = pirsonovo[0][1]
print(train_data[index_p][0])
print('True' if index_p == index_k else 'False')
|
[
"marioancevski25@gmail.com"
] |
marioancevski25@gmail.com
|
2f6d15e2504f558d4d885bca4b89803442c707a5
|
159aed4755e47623d0aa7b652e178296be5c9604
|
/data/scripts/templates/object/draft_schematic/weapon/shared_rifle_blaster_dlt20a.py
|
30238be987d3bd0ff0267b1a64c7908017e392d9
|
[
"MIT"
] |
permissive
|
anhstudios/swganh
|
fb67d42776864b1371e95f769f6864d0784061a3
|
41c519f6cdef5a1c68b369e760781652ece7fec9
|
refs/heads/develop
| 2020-12-24T16:15:31.813207
| 2016-03-08T03:54:32
| 2016-03-08T03:54:32
| 1,380,891
| 33
| 44
| null | 2016-03-08T03:54:32
| 2011-02-18T02:32:45
|
Python
|
UTF-8
|
Python
| false
| false
| 456
|
py
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/weapon/shared_rifle_blaster_dlt20a.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
[
"rwl3564@rit.edu"
] |
rwl3564@rit.edu
|
1788caa7d83ad3c675752e43756da24a7a89a7ea
|
590c9237ba14c74b35ef460587d485cc10441cd9
|
/data.py
|
ed49f09a14e03b68999cb87390c3e86c68e344ec
|
[] |
no_license
|
trongphuongpro/expensetracker
|
5198c5a3cd03a24df92c4df879121ace8979e0ab
|
b654d0dc05068fd512de6c20ad27d4a55aa3f3fe
|
refs/heads/main
| 2023-03-13T12:33:22.871881
| 2021-03-01T16:11:43
| 2021-03-01T16:11:43
| 336,000,367
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,124
|
py
|
from enum import IntEnum
class CommandCode(IntEnum):
WAIT = 0
ADD = 1
CHECK = 2
UPDATE = 3
BACK = 4
QUIT = 5
# dict {command key: [command, callback]}
command_map = {
CommandCode.ADD: {'command': 'add', 'action': 'func_add'},
CommandCode.CHECK: {'command': 'check', 'action': 'func_check'},
CommandCode.UPDATE: {'command': 'update', 'action': 'func_update'},
CommandCode.WAIT: {'command': 'wait', 'action': 'func_wait'},
CommandCode.BACK: {'command': 'back', 'action': 'func_back'},
CommandCode.QUIT: {'command': 'quit', 'action': 'func_quit'}
}
command_code_map = {
'wait': CommandCode.WAIT,
'add': CommandCode.ADD, 'a': CommandCode.ADD,
'check': CommandCode.CHECK, 'c': CommandCode.CHECK,
'update': CommandCode.UPDATE, 'u': CommandCode.UPDATE,
'back': CommandCode.BACK,
'quit': CommandCode.QUIT
}
menu_wait = {
'a': {'command': 'add', 'action': 'func_add'},
'c': {'command': 'check', 'action': 'func_check'},
'u': {'command': 'update', 'action': 'func_update'},
'q': {'command': 'quit', 'action': 'func_quit'}
}
# dict {category index: [detail, category]}
menu_add = {
'1': {'command': 'Quy chi tieu can thiet', 'category': 'living', 'action': 'updateDatabase'},
'2': {'command': 'Quy tiet kiem dai han', 'category': 'saving', 'action': 'updateDatabase'},
'3': {'command': 'Quy giao duc', 'category': 'education', 'action': 'updateDatabase'},
'4': {'command': 'Quy huong thu', 'category': 'playing', 'action': 'updateDatabase'},
'5': {'command': 'Quy tu do tai chinh', 'category': 'free', 'action': 'updateDatabase'},
'6': {'command': 'Quy tu thien', 'category': 'giving', 'action': 'updateDatabase'},
'z': {'command': 'undo', 'action': 'deleteLastExpense'},
'b': {'command': 'back', 'action': 'func_back'},
'q': {'command': 'quit', 'action': 'func_quit'}
}
menu_check = {
'1': {'command': 'expense', 'action': 'checkExpenseRecord'},
'2': {'command': 'budget', 'action': 'checkBudgetRecord'},
'b': {'command': 'back', 'action': 'func_back'},
'q': {'command': 'quit', 'action': 'func_quit'}
}
menu_check_expense = {
'1': {'command': 'by time', 'action': 'checkExpenseRecordByTime'},
'2': {'command': 'N most recent expense', 'action': 'checkExpenseRecordRecent'},
'b': {'command': 'back', 'action': 'func_back'},
'q': {'command': 'quit', 'action': 'func_quit'}
}
table_header_template = '''
{0:+<48}
+{fields[0]:^12}+{fields[1]:^8}+{fields[2]:^24}+
{0:+<48}
'''
table_row_template = "+{values[0]:^12}+{values[1]:^8}+{values[2]:^24}+\n"
|
[
"mr.trongphuongpro@gmail.com"
] |
mr.trongphuongpro@gmail.com
|
a1056193fbec0b87c5ed7094ab73e719d4fc0b1c
|
9abec7752387c44715e254e4cf03e708a3b97384
|
/api/waitlist/tdf/fitcheck.py
|
5602c0fcb5b19e13c8db8084ef040fb45bc8b96f
|
[
"MIT"
] |
permissive
|
Aquanable/tdf-waitlist
|
d4b1a06c99d5ab4717bcfd236847f9971cddc16c
|
4937f6d3cda97c81d7a3c2c51fd2cd28db09020b
|
refs/heads/main
| 2023-07-07T23:39:26.424220
| 2021-07-27T13:58:57
| 2021-07-27T13:58:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,753
|
py
|
from typing import List, Dict, Set, Optional, Any, Tuple
import yaml
from ..eft2dna import split_dna
from ..data.evedb import id_of, name_of, type_variations
from . import skills, fits, modules, implants
BANNED_MODULES = modules.load_banned()
def _build_category_rules(raw: List[Dict[str, str]]) -> List[Tuple[int, str]]:
result = []
for entry in raw:
item_id = id_of(entry["item"])
if not "meta" in entry:
result.append((item_id, entry["category"]))
continue
variations = type_variations(item_id)
for variation_id, level in variations.items():
if entry["meta"] == "le" and not level <= variations[item_id]:
continue
if entry["meta"] == "lt" and not level < variations[item_id]:
continue
if entry["meta"] == "ge" and not level >= variations[item_id]:
continue
if entry["meta"] == "gt" and not level > variations[item_id]:
continue
result.append((variation_id, entry["category"]))
return result
with open("./waitlist/tdf/categories.yaml", "r") as fileh:
_yamldata = yaml.safe_load(fileh)
CATEGORIES: Dict[str, str] = _yamldata["categories"]
CATEGORY_RULES = _build_category_rules(_yamldata["rules"])
class FitCheckResult: # pylint: disable=too-few-public-methods
def __init__(self) -> None:
self.approved = False
self.tags: Set[str] = set()
self.category: str = "starter" # Default
self.errors: List[str] = []
self.fit_check: Dict[str, Any] = {}
class FitChecker: # pylint: disable=too-many-instance-attributes
def __init__(
self,
dna: str,
skilldata: Dict[int, int],
implantdata: List[int],
time_in_fleet: int,
):
self.ship, self.modules, self.cargo = split_dna(dna)
self.skills = skilldata
self.implants = implantdata
self.time_in_fleet = time_in_fleet
self.result = FitCheckResult()
self.base_implants: Optional[str] = None
self.fit: Optional[fits.FitSpec] = None
self.fitcheck: Optional[fits.CheckResult] = None
self.disable_approval = False
def _add_tag(self, tag: str) -> None:
self.result.tags.add(tag)
def check_skills(self) -> None:
if not skills.skillcheck(self.ship, self.skills, "min"):
self._add_tag("STARTER-SKILLS")
self.disable_approval = True
elif skills.skillcheck(self.ship, self.skills, "gold"):
self._add_tag("GOLD-SKILLS")
elif skills.skillcheck(self.ship, self.skills, "elite"):
self._add_tag("ELITE-SKILLS")
def check_tank_skills(self) -> None:
if self.fit and self.fit.is_starter:
min_comps = 2
else:
min_comps = 4
comps = skills.get_armor_comps_level(self.skills)
if comps < min_comps:
self.result.errors.append(
"Missing minimum Armor Compensation skills (level %d)" % min_comps
)
def check_implants(self) -> None:
self.base_implants, is_full = implants.detect_implants(self.ship, self.implants)
if self.base_implants and is_full:
self._add_tag("%s1-10" % self.base_implants)
def check_fit(self) -> None:
can_amulet = self.base_implants == "AMULET"
can_hybrid = self.base_implants in ["AMULET", "HYBRID"]
self.fit = fits.best_match(
self.ship, self.modules, self.cargo, can_amulet, can_hybrid
)
self.result.fit_check["name"] = self.fit.fitname if self.fit else None
if not self.fit:
return
self.fitcheck = self.fit.check(self.modules, self.cargo)
if self.fitcheck.fit_ok and self.fit.is_elite:
self._add_tag("ELITE-FIT")
# Export the results of the fit check
fit_check_ids: Set[int] = set()
if self.fitcheck.missing:
self.result.fit_check["missing"] = self.fitcheck.missing
fit_check_ids.update(self.fitcheck.missing.keys())
if self.fitcheck.extra:
self.result.fit_check["extra"] = self.fitcheck.extra
fit_check_ids.update(self.fitcheck.extra.keys())
if self.fitcheck.downgraded:
self.result.fit_check["downgraded"] = self.fitcheck.downgraded
fit_check_ids.update(self.fitcheck.downgraded.keys())
for downgrade in self.fitcheck.downgraded.values():
fit_check_ids.update(downgrade.keys())
if self.fitcheck.cargo_missing:
self.result.fit_check["cargo_missing"] = self.fitcheck.cargo_missing
fit_check_ids.update(self.fitcheck.cargo_missing.keys())
self.result.fit_check["_ids"] = sorted(list(fit_check_ids))
def check_category(self) -> None:
items = {self.ship: 1, **self.modules}
for item_id, then_category in CATEGORY_RULES:
if items.get(item_id, 0):
self.result.category = then_category
break
if "STARTER-SKILLS" in self.result.tags:
if self.result.category != "logi":
self.result.category = "starter"
def check_banned_modules(self) -> None:
for module_id in BANNED_MODULES:
if self.modules.get(module_id, 0):
self.result.errors.append(
"Fit contains banned module: %s" % name_of(module_id)
)
def check_logi_implants(self) -> None:
if self.ship in [id_of("Nestor"), id_of("Guardian")]:
if not id_of("% EM-806", fuzzy=True) in self.implants:
self.disable_approval = True
self._add_tag("NO-EM-806")
def set_approval(self) -> None:
# We previously decided to reject the approval
if self.disable_approval:
return
# The fit/cargo is wrong or we don't recognize it
if (
not self.fit
or not self.fitcheck
or not self.fitcheck.fit_ok
or not self.fitcheck.cargo_ok
):
return
# If the fit isn't elite, do a time-based check
if not (
(
(
"ELITE-SKILLS" in self.result.tags
or "GOLD-SKILLS" in self.result.tags
)
and self.fit.is_elite
)
or (
(self.fit.is_elite or self.fit.is_advanced)
and self.time_in_fleet < 150 * 3600
)
or (self.fit.is_basic and self.time_in_fleet < 120 * 3600)
or (self.fit.is_starter and self.time_in_fleet < 75 * 3600)
):
return
self.result.approved = True
def merge_tags(self) -> None:
tags = self.result.tags # Alias, not a copy
if "ELITE-FIT" in tags:
if "ELITE-SKILLS" in tags:
tags.remove("ELITE-FIT")
tags.remove("ELITE-SKILLS")
tags.add("ELITE")
if "GOLD-SKILLS" in tags:
tags.remove("ELITE-FIT")
tags.remove("GOLD-SKILLS")
tags.add("ELITE-GOLD")
def run(self) -> FitCheckResult:
self.check_skills()
self.check_implants()
self.check_fit()
self.check_category()
self.check_banned_modules()
self.check_logi_implants()
self.check_tank_skills()
self.set_approval()
self.merge_tags()
return self.result
def check_fit(
dna: str, skilldata: Dict[int, int], implantdata: List[int], time_in_fleet: int
) -> FitCheckResult:
return FitChecker(dna, skilldata, implantdata, time_in_fleet).run()
|
[
"info@tvdw.eu"
] |
info@tvdw.eu
|
705368fe31d27b07fa56fa7263c1b73359596d6e
|
427eb6d48233424db3d1f36943506b655ba47d4f
|
/hostap/tests/hwsim/hostapd.py
|
2ca1e3dacb1f275b8105b204588b54b1d3fffba3
|
[
"BSD-3-Clause"
] |
permissive
|
jskripchuk/hostapd-cvorg
|
9b628c8a94867c93585d3fd91a6f3babb311cd91
|
9c9fe885393e404ff4ca79cea6510587b096c3c6
|
refs/heads/master
| 2020-04-30T12:03:47.339697
| 2019-05-03T20:50:21
| 2019-05-03T20:50:21
| 176,817,320
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,093
|
py
|
# Python class for controlling hostapd
# Copyright (c) 2013-2019, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import os
import time
import logging
import binascii
import struct
import wpaspy
import remotehost
import utils
import subprocess
logger = logging.getLogger()
hapd_ctrl = '/var/run/hostapd'
hapd_global = '/var/run/hostapd-global'
def mac2tuple(mac):
return struct.unpack('6B', binascii.unhexlify(mac.replace(':', '')))
class HostapdGlobal:
def __init__(self, apdev=None, global_ctrl_override=None):
try:
hostname = apdev['hostname']
port = apdev['port']
except:
hostname = None
port = 8878
self.host = remotehost.Host(hostname)
self.hostname = hostname
self.port = port
if hostname is None:
global_ctrl = hapd_global
if global_ctrl_override:
global_ctrl = global_ctrl_override
self.ctrl = wpaspy.Ctrl(global_ctrl)
self.mon = wpaspy.Ctrl(global_ctrl)
self.dbg = ""
else:
self.ctrl = wpaspy.Ctrl(hostname, port)
self.mon = wpaspy.Ctrl(hostname, port)
self.dbg = hostname + "/" + str(port)
self.mon.attach()
def cmd_execute(self, cmd_array, shell=False):
if self.hostname is None:
if shell:
cmd = ' '.join(cmd_array)
else:
cmd = cmd_array
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, shell=shell)
out = proc.communicate()[0]
ret = proc.returncode
return ret, out.decode()
else:
return self.host.execute(cmd_array)
def request(self, cmd, timeout=10):
logger.debug(self.dbg + ": CTRL(global): " + cmd)
return self.ctrl.request(cmd, timeout)
def wait_event(self, events, timeout):
start = os.times()[4]
while True:
while self.mon.pending():
ev = self.mon.recv()
logger.debug(self.dbg + "(global): " + ev)
for event in events:
if event in ev:
return ev
now = os.times()[4]
remaining = start + timeout - now
if remaining <= 0:
break
if not self.mon.pending(timeout=remaining):
break
return None
def add(self, ifname, driver=None):
cmd = "ADD " + ifname + " " + hapd_ctrl
if driver:
cmd += " " + driver
res = self.request(cmd)
if "OK" not in res:
raise Exception("Could not add hostapd interface " + ifname)
def add_iface(self, ifname, confname):
res = self.request("ADD " + ifname + " config=" + confname)
if "OK" not in res:
raise Exception("Could not add hostapd interface")
def add_bss(self, phy, confname, ignore_error=False):
res = self.request("ADD bss_config=" + phy + ":" + confname)
if "OK" not in res:
if not ignore_error:
raise Exception("Could not add hostapd BSS")
def remove(self, ifname):
self.request("REMOVE " + ifname, timeout=30)
def relog(self):
self.request("RELOG")
def flush(self):
self.request("FLUSH")
def get_ctrl_iface_port(self, ifname):
if self.hostname is None:
return None
res = self.request("INTERFACES ctrl")
lines = res.splitlines()
found = False
for line in lines:
words = line.split()
if words[0] == ifname:
found = True
break
if not found:
raise Exception("Could not find UDP port for " + ifname)
res = line.find("ctrl_iface=udp:")
if res == -1:
raise Exception("Wrong ctrl_interface format")
words = line.split(":")
return int(words[1])
def terminate(self):
self.mon.detach()
self.mon.close()
self.mon = None
self.ctrl.terminate()
self.ctrl = None
class Hostapd:
def __init__(self, ifname, bssidx=0, hostname=None, port=8877):
self.hostname = hostname
self.host = remotehost.Host(hostname, ifname)
self.ifname = ifname
if hostname is None:
self.ctrl = wpaspy.Ctrl(os.path.join(hapd_ctrl, ifname))
self.mon = wpaspy.Ctrl(os.path.join(hapd_ctrl, ifname))
self.dbg = ifname
else:
self.ctrl = wpaspy.Ctrl(hostname, port)
self.mon = wpaspy.Ctrl(hostname, port)
self.dbg = hostname + "/" + ifname
self.mon.attach()
self.bssid = None
self.bssidx = bssidx
def cmd_execute(self, cmd_array, shell=False):
if self.hostname is None:
if shell:
cmd = ' '.join(cmd_array)
else:
cmd = cmd_array
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, shell=shell)
out = proc.communicate()[0]
ret = proc.returncode
return ret, out.decode()
else:
return self.host.execute(cmd_array)
def close_ctrl(self):
if self.mon is not None:
self.mon.detach()
self.mon.close()
self.mon = None
self.ctrl.close()
self.ctrl = None
def own_addr(self):
if self.bssid is None:
self.bssid = self.get_status_field('bssid[%d]' % self.bssidx)
return self.bssid
def request(self, cmd):
logger.debug(self.dbg + ": CTRL: " + cmd)
return self.ctrl.request(cmd)
def ping(self):
return "PONG" in self.request("PING")
def set(self, field, value):
if "OK" not in self.request("SET " + field + " " + value):
raise Exception("Failed to set hostapd parameter " + field)
def set_defaults(self):
self.set("driver", "nl80211")
self.set("hw_mode", "g")
self.set("channel", "1")
self.set("ieee80211n", "1")
self.set("logger_stdout", "-1")
self.set("logger_stdout_level", "0")
def set_open(self, ssid):
self.set_defaults()
self.set("ssid", ssid)
def set_wpa2_psk(self, ssid, passphrase):
self.set_defaults()
self.set("ssid", ssid)
self.set("wpa_passphrase", passphrase)
self.set("wpa", "2")
self.set("wpa_key_mgmt", "WPA-PSK")
self.set("rsn_pairwise", "CCMP")
def set_wpa_psk(self, ssid, passphrase):
self.set_defaults()
self.set("ssid", ssid)
self.set("wpa_passphrase", passphrase)
self.set("wpa", "1")
self.set("wpa_key_mgmt", "WPA-PSK")
self.set("wpa_pairwise", "TKIP")
def set_wpa_psk_mixed(self, ssid, passphrase):
self.set_defaults()
self.set("ssid", ssid)
self.set("wpa_passphrase", passphrase)
self.set("wpa", "3")
self.set("wpa_key_mgmt", "WPA-PSK")
self.set("wpa_pairwise", "TKIP")
self.set("rsn_pairwise", "CCMP")
def set_wep(self, ssid, key):
self.set_defaults()
self.set("ssid", ssid)
self.set("wep_key0", key)
def enable(self):
if "OK" not in self.request("ENABLE"):
raise Exception("Failed to enable hostapd interface " + self.ifname)
def disable(self):
if "OK" not in self.request("DISABLE"):
raise Exception("Failed to disable hostapd interface " + self.ifname)
def dump_monitor(self):
while self.mon.pending():
ev = self.mon.recv()
logger.debug(self.dbg + ": " + ev)
def wait_event(self, events, timeout):
start = os.times()[4]
while True:
while self.mon.pending():
ev = self.mon.recv()
logger.debug(self.dbg + ": " + ev)
for event in events:
if event in ev:
return ev
now = os.times()[4]
remaining = start + timeout - now
if remaining <= 0:
break
if not self.mon.pending(timeout=remaining):
break
return None
def get_status(self):
res = self.request("STATUS")
lines = res.splitlines()
vals = dict()
for l in lines:
[name, value] = l.split('=', 1)
vals[name] = value
return vals
def get_status_field(self, field):
vals = self.get_status()
if field in vals:
return vals[field]
return None
def get_driver_status(self):
res = self.request("STATUS-DRIVER")
lines = res.splitlines()
vals = dict()
for l in lines:
[name, value] = l.split('=', 1)
vals[name] = value
return vals
def get_driver_status_field(self, field):
vals = self.get_driver_status()
if field in vals:
return vals[field]
return None
def get_config(self):
res = self.request("GET_CONFIG")
lines = res.splitlines()
vals = dict()
for l in lines:
[name, value] = l.split('=', 1)
vals[name] = value
return vals
def mgmt_rx(self, timeout=5):
ev = self.wait_event(["MGMT-RX"], timeout=timeout)
if ev is None:
return None
msg = {}
frame = binascii.unhexlify(ev.split(' ')[1])
msg['frame'] = frame
hdr = struct.unpack('<HH6B6B6BH', frame[0:24])
msg['fc'] = hdr[0]
msg['subtype'] = (hdr[0] >> 4) & 0xf
hdr = hdr[1:]
msg['duration'] = hdr[0]
hdr = hdr[1:]
msg['da'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
hdr = hdr[6:]
msg['sa'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
hdr = hdr[6:]
msg['bssid'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
hdr = hdr[6:]
msg['seq_ctrl'] = hdr[0]
msg['payload'] = frame[24:]
return msg
def mgmt_tx(self, msg):
t = (msg['fc'], 0) + mac2tuple(msg['da']) + mac2tuple(msg['sa']) + mac2tuple(msg['bssid']) + (0,)
hdr = struct.pack('<HH6B6B6BH', *t)
res = self.request("MGMT_TX " + binascii.hexlify(hdr + msg['payload']).decode())
if "OK" not in res:
raise Exception("MGMT_TX command to hostapd failed")
def get_sta(self, addr, info=None, next=False):
cmd = "STA-NEXT " if next else "STA "
if addr is None:
res = self.request("STA-FIRST")
elif info:
res = self.request(cmd + addr + " " + info)
else:
res = self.request(cmd + addr)
lines = res.splitlines()
vals = dict()
first = True
for l in lines:
if first and '=' not in l:
vals['addr'] = l
first = False
else:
[name, value] = l.split('=', 1)
vals[name] = value
return vals
def get_mib(self, param=None):
if param:
res = self.request("MIB " + param)
else:
res = self.request("MIB")
lines = res.splitlines()
vals = dict()
for l in lines:
name_val = l.split('=', 1)
if len(name_val) > 1:
vals[name_val[0]] = name_val[1]
return vals
def get_pmksa(self, addr):
res = self.request("PMKSA")
lines = res.splitlines()
for l in lines:
if addr not in l:
continue
vals = dict()
[index, aa, pmkid, expiration, opportunistic] = l.split(' ')
vals['index'] = index
vals['pmkid'] = pmkid
vals['expiration'] = expiration
vals['opportunistic'] = opportunistic
return vals
return None
def dpp_qr_code(self, uri):
res = self.request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
return int(res)
def dpp_bootstrap_gen(self, type="qrcode", chan=None, mac=None, info=None,
curve=None, key=None):
cmd = "DPP_BOOTSTRAP_GEN type=" + type
if chan:
cmd += " chan=" + chan
if mac:
if mac is True:
mac = self.own_addr()
cmd += " mac=" + mac.replace(':', '')
if info:
cmd += " info=" + info
if curve:
cmd += " curve=" + curve
if key:
cmd += " key=" + key
res = self.request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
return int(res)
def add_ap(apdev, params, wait_enabled=True, no_enable=False, timeout=30,
global_ctrl_override=None):
if isinstance(apdev, dict):
ifname = apdev['ifname']
try:
hostname = apdev['hostname']
port = apdev['port']
logger.info("Starting AP " + hostname + "/" + port + " " + ifname)
except:
logger.info("Starting AP " + ifname)
hostname = None
port = 8878
else:
ifname = apdev
logger.info("Starting AP " + ifname + " (old add_ap argument type)")
hostname = None
port = 8878
hapd_global = HostapdGlobal(apdev,
global_ctrl_override=global_ctrl_override)
hapd_global.remove(ifname)
hapd_global.add(ifname)
port = hapd_global.get_ctrl_iface_port(ifname)
hapd = Hostapd(ifname, hostname=hostname, port=port)
if not hapd.ping():
raise Exception("Could not ping hostapd")
hapd.set_defaults()
fields = ["ssid", "wpa_passphrase", "nas_identifier", "wpa_key_mgmt",
"wpa",
"wpa_pairwise", "rsn_pairwise", "auth_server_addr",
"acct_server_addr", "osu_server_uri"]
for field in fields:
if field in params:
hapd.set(field, params[field])
for f, v in list(params.items()):
if f in fields:
continue
if isinstance(v, list):
for val in v:
hapd.set(f, val)
else:
hapd.set(f, v)
if no_enable:
return hapd
hapd.enable()
if wait_enabled:
ev = hapd.wait_event(["AP-ENABLED", "AP-DISABLED"], timeout=timeout)
if ev is None:
raise Exception("AP startup timed out")
if "AP-ENABLED" not in ev:
raise Exception("AP startup failed")
return hapd
def add_bss(apdev, ifname, confname, ignore_error=False):
phy = utils.get_phy(apdev)
try:
hostname = apdev['hostname']
port = apdev['port']
logger.info("Starting BSS " + hostname + "/" + port + " phy=" + phy + " ifname=" + ifname)
except:
logger.info("Starting BSS phy=" + phy + " ifname=" + ifname)
hostname = None
port = 8878
hapd_global = HostapdGlobal(apdev)
hapd_global.add_bss(phy, confname, ignore_error)
port = hapd_global.get_ctrl_iface_port(ifname)
hapd = Hostapd(ifname, hostname=hostname, port=port)
if not hapd.ping():
raise Exception("Could not ping hostapd")
return hapd
def add_iface(apdev, confname):
ifname = apdev['ifname']
try:
hostname = apdev['hostname']
port = apdev['port']
logger.info("Starting interface " + hostname + "/" + port + " " + ifname)
except:
logger.info("Starting interface " + ifname)
hostname = None
port = 8878
hapd_global = HostapdGlobal(apdev)
hapd_global.add_iface(ifname, confname)
port = hapd_global.get_ctrl_iface_port(ifname)
hapd = Hostapd(ifname, hostname=hostname, port=port)
if not hapd.ping():
raise Exception("Could not ping hostapd")
return hapd
def remove_bss(apdev, ifname=None):
if ifname == None:
ifname = apdev['ifname']
try:
hostname = apdev['hostname']
port = apdev['port']
logger.info("Removing BSS " + hostname + "/" + port + " " + ifname)
except:
logger.info("Removing BSS " + ifname)
hapd_global = HostapdGlobal(apdev)
hapd_global.remove(ifname)
def terminate(apdev):
try:
hostname = apdev['hostname']
port = apdev['port']
logger.info("Terminating hostapd " + hostname + "/" + port)
except:
logger.info("Terminating hostapd")
hapd_global = HostapdGlobal(apdev)
hapd_global.terminate()
def wpa2_params(ssid=None, passphrase=None):
params = {"wpa": "2",
"wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP"}
if ssid:
params["ssid"] = ssid
if passphrase:
params["wpa_passphrase"] = passphrase
return params
def wpa_params(ssid=None, passphrase=None):
params = {"wpa": "1",
"wpa_key_mgmt": "WPA-PSK",
"wpa_pairwise": "TKIP"}
if ssid:
params["ssid"] = ssid
if passphrase:
params["wpa_passphrase"] = passphrase
return params
def wpa_mixed_params(ssid=None, passphrase=None):
params = {"wpa": "3",
"wpa_key_mgmt": "WPA-PSK",
"wpa_pairwise": "TKIP",
"rsn_pairwise": "CCMP"}
if ssid:
params["ssid"] = ssid
if passphrase:
params["wpa_passphrase"] = passphrase
return params
def radius_params():
params = {"auth_server_addr": "127.0.0.1",
"auth_server_port": "1812",
"auth_server_shared_secret": "radius",
"nas_identifier": "nas.w1.fi"}
return params
def wpa_eap_params(ssid=None):
params = radius_params()
params["wpa"] = "1"
params["wpa_key_mgmt"] = "WPA-EAP"
params["wpa_pairwise"] = "TKIP"
params["ieee8021x"] = "1"
if ssid:
params["ssid"] = ssid
return params
def wpa2_eap_params(ssid=None):
params = radius_params()
params["wpa"] = "2"
params["wpa_key_mgmt"] = "WPA-EAP"
params["rsn_pairwise"] = "CCMP"
params["ieee8021x"] = "1"
if ssid:
params["ssid"] = ssid
return params
def b_only_params(channel="1", ssid=None, country=None):
params = {"hw_mode": "b",
"channel": channel}
if ssid:
params["ssid"] = ssid
if country:
params["country_code"] = country
return params
def g_only_params(channel="1", ssid=None, country=None):
params = {"hw_mode": "g",
"channel": channel}
if ssid:
params["ssid"] = ssid
if country:
params["country_code"] = country
return params
def a_only_params(channel="36", ssid=None, country=None):
params = {"hw_mode": "a",
"channel": channel}
if ssid:
params["ssid"] = ssid
if country:
params["country_code"] = country
return params
def ht20_params(channel="1", ssid=None, country=None):
params = {"ieee80211n": "1",
"channel": channel,
"hw_mode": "g"}
if int(channel) > 14:
params["hw_mode"] = "a"
if ssid:
params["ssid"] = ssid
if country:
params["country_code"] = country
return params
def ht40_plus_params(channel="1", ssid=None, country=None):
params = ht20_params(channel, ssid, country)
params['ht_capab'] = "[HT40+]"
return params
def ht40_minus_params(channel="1", ssid=None, country=None):
params = ht20_params(channel, ssid, country)
params['ht_capab'] = "[HT40-]"
return params
def cmd_execute(apdev, cmd, shell=False):
hapd_global = HostapdGlobal(apdev)
return hapd_global.cmd_execute(cmd, shell=shell)
|
[
"jimmyskripchuk@gmail.com"
] |
jimmyskripchuk@gmail.com
|
bd928f3173001ac131fc3c4f47d1793665e2933f
|
3e78b520bbd91dafb60e5b1132c919e9c75e9f74
|
/Final_Project/Final_Project/videosocket.py
|
aeed01f9c7ef9e3c4c52ef5e3891d8686a54b85a
|
[] |
no_license
|
shmer26/TechCall
|
dbc1b9ab86fa5236c08e7022de3e13d1047ca4ef
|
9e95a0f031ccd21463f8aa55e3a8f49149e503db
|
refs/heads/master
| 2020-05-17T00:38:56.200740
| 2019-04-25T10:15:52
| 2019-04-25T10:15:52
| 183,401,438
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,956
|
py
|
import socket
class videosocket:
'''A special type of socket to handle the sending and receiveing of fixed
size frame strings over ususal sockets
Size of a packet or whatever is assumed to be less than 100MB
'''
def __init__(self , sock=None):
if sock is None:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
else:
self.sock= sock
def connect(self,host,port):
self.sock.connect((host,port))
def vsend(self, framestring):
totalsent = 0
metasent = 0
length =len(framestring)
lengthstr=str(length).zfill(8)
while metasent < 8 :
sent = self.sock.send(lengthstr[metasent:])
if sent == 0:
raise RuntimeError("Socket connection broken")
self.sock.close()
metasent += sent
while totalsent < length :
sent = self.sock.send(framestring[totalsent:])
if sent == 0:
raise RuntimeError("Socket connection broken")
self.sock.close()
totalsent += sent
def vreceive(self):
totrec=0
metarec=0
msgArray = []
metaArray = []
while metarec < 8:
chunk = self.sock.recv(8 - metarec)
if chunk == '':
raise RuntimeError("Socket connection broken")
self.sock.close()
metaArray.append(chunk)
metarec += len(chunk)
lengthstr= ''.join(metaArray)
length=int(lengthstr)
while totrec<length :
chunk = self.sock.recv(length - totrec)
if chunk == '':
raise RuntimeError("Socket connection broken")
self.sock.close()
msgArray.append(chunk)
totrec += len(chunk)
return ''.join(msgArray)
def vclose(self):
self.sock.close()
|
[
"noreply@github.com"
] |
shmer26.noreply@github.com
|
f427941f64f421f804c781fa1351c3816da55a2a
|
a3f32d4d47294771e0de86ed7280ee723f25af22
|
/Image_recognition-main/project/wsgi.py
|
7b5e538b52c1759652225a29d2d30dea490bd0c1
|
[] |
no_license
|
mdsariful/Image_recognition-main
|
b1333021fd5864ca5250d96d4f9b1ab3106e7d79
|
b1236cdf6dbb9d9a7138bbe8ace14a111c10e454
|
refs/heads/master
| 2023-08-07T19:17:32.727961
| 2021-09-01T13:08:52
| 2021-09-01T13:08:52
| 402,064,693
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 389
|
py
|
"""
WSGI config for ujasi project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project.settings')
application = get_wsgi_application()
|
[
"sarif2435@gmail.com"
] |
sarif2435@gmail.com
|
a784cccfceb1d08e26461544355e3cfdf633708a
|
8d163bafc4f00de99909fb72ee0658d9a593c9db
|
/photo/models.py
|
f983f6023063d96559520862deefbdc450a7e811
|
[] |
no_license
|
SheepEatLion/playcarnival
|
edcb00ed090c6908d4bf73853289329ceec6529a
|
a0a8781506cf6136dc3d7c6f97a0721e898e3816
|
refs/heads/master
| 2023-01-19T01:15:00.668788
| 2020-11-14T11:24:57
| 2020-11-14T11:24:57
| 312,723,187
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 737
|
py
|
from django.db import models
# Create your models here.
from django.contrib.auth.models import User
from django.urls import reverse
class Photo(models.Model):
author = models.ForeignKey(User, on_delete=models.CASCADE, related_name='user_photos')
photo = models.ImageField(upload_to='photos/%Y/%m/%d', default='photos/no_image.png')
text = models.TextField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ['-updated']
def __str__(self):
return self.author.username + " " + self.created.strftime("%Y-%m-%d %H:%M:%S")
def get_absolute_url(self):
return reverse('photo:photo_detail', args=[str(self.id)])
|
[
"gowjr207@gmail.com"
] |
gowjr207@gmail.com
|
915d94df776b6e6303deac8b659aafc1959926e9
|
8b602a08da7f67971c8da967b4ff1230fc5e486e
|
/venv/Scripts/easy_install-3.7-script.py
|
b08a1893df8c08e03b06bac0a62f75131afd864f
|
[] |
no_license
|
elybelyaev/protocols
|
1f941b9e9ff3d39aa4cbe8b98222c6280b647328
|
17ed4ee2baed7e60492f90abecb11a372aa221bc
|
refs/heads/master
| 2023-03-11T09:26:38.742890
| 2020-06-20T18:39:23
| 2020-06-20T18:39:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 470
|
py
|
#!"C:\Users\Belyaev Ilya\Desktop\projects\protocols\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
|
[
"50026308+CaptainBelyash@users.noreply.github.com"
] |
50026308+CaptainBelyash@users.noreply.github.com
|
a34a561589a46b29e86bac06cbbdf09613c3d05b
|
a52b76dabc7e742b3b2c6d1399f6119d79e16940
|
/gluon/gluoncv2/models/model_store.py
|
8265eb1ca51e50de96c9916372a7a501f1ff794d
|
[
"MIT"
] |
permissive
|
Mrmdzz/imgclsmob
|
ec07e8fa6ed38e0be58de87c409f79cab8fd9f32
|
0b7221655776fee32de2827db0e59e0df976441d
|
refs/heads/master
| 2023-02-17T05:43:04.306787
| 2021-01-18T08:31:17
| 2021-01-18T08:31:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 59,115
|
py
|
"""
Model store which provides pretrained models.
"""
__all__ = ['get_model_file']
import os
import zipfile
import logging
from mxnet.gluon.utils import download, check_sha1
_model_sha1 = {name: (error, checksum, repo_release_tag) for name, error, checksum, repo_release_tag in [
('alexnet', '1610', '4dd7cfb6275229c3b3889ec7ad29f8d48a499193', 'v0.0.481'),
('alexnetb', '1705', '0181007ac2ba5d5c051346af1eea4a00c36f34e9', 'v0.0.485'),
('zfnet', '1678', '3299fdce9712697f9e03e1a0cf741745553a28cf', 'v0.0.395'),
('zfnetb', '1459', '7a654810eced689323e414030165cf1392c9684f', 'v0.0.400'),
('vgg11', '1016', '3d78e0ec95d358577acf8b2e2f768a72ec319ee3', 'v0.0.381'),
('vgg13', '0950', 'd2bcaaf3704afb47b2818066255b3de65d0c03e1', 'v0.0.388'),
('vgg16', '0832', '22fe503aa438ebfd1f3128b676db21a06de3cd59', 'v0.0.401'),
('vgg19', '0767', 'e198aa1f6519b3331e5ebbe782ca35fd2aca6213', 'v0.0.420'),
('bn_vgg11', '0934', '3f79cab1e9e0bab0d57c0f80a6c9782e5cdc765d', 'v0.0.339'),
('bn_vgg13', '0887', '540243b0a3eb2ae3974c2d75cf82e655fa4f0dda', 'v0.0.353'),
('bn_vgg16', '0757', '90441925c7c19c9db6bf96824c2440774c0e54df', 'v0.0.359'),
('bn_vgg19', '0689', 'cd8f4229e5e757ba951afc6eb1a4ec7a471022f7', 'v0.0.360'),
('bn_vgg11b', '0975', '685ae89dcf3916b9c6e887d4c5bc441a34253744', 'v0.0.407'),
('bn_vgg13b', '0912', 'fc678318391d45465bda4aa17744dc0d148135e3', 'v0.0.488'),
('bn_vgg16b', '0775', '77dad99b4f72176870655ffbf59e96f3b9570a0b', 'v0.0.489'),
('bn_vgg19b', '0735', '8d9a132bfd9230304e588ec159170ee3173d64ba', 'v0.0.490'),
('bninception', '0754', '75225419bae387931f6c8ccc19dc48fb1cc8cdae', 'v0.0.405'),
('resnet10', '1253', '651853ca974959c978f39b965b928d7c616796d3', 'v0.0.483'),
('resnet12', '1203', 'a41948541130b9ebf8285412b38c668198e4ad4f', 'v0.0.485'),
('resnet14', '1086', '5d9f22a7aa6c821eb69fc043af2da040f36110eb', 'v0.0.491'),
('resnetbc14b', '1033', '4ff348bebc7696cd219be605668bb76546dd9641', 'v0.0.481'),
('resnet16', '0978', '2c28373c9ce1f50ff6a4b47a16a3d1bc2e557c75', 'v0.0.493'),
('resnet18_wd4', '1740', 'a74ea15d056a84133e02706a6c7f8ed8f50c8462', 'v0.0.262'),
('resnet18_wd2', '1284', '9a5154065311c8ffbbc57b20a443b205c7a910fa', 'v0.0.263'),
('resnet18_w3d4', '1066', '1a574a4198a5bbf01572c2b3f091eb824ff8196e', 'v0.0.266'),
('resnet18', '0867', '711ed8ab624c4c580e90321989aef6174ad2991d', 'v0.0.478'),
('resnet26', '0823', 'a2746eb21d73c3c85edacabe36d680988986f890', 'v0.0.489'),
('resnetbc26b', '0758', '2b5e8d0888936a340ea13c7e8ba30b237cd62f1c', 'v0.0.313'),
('resnet34', '0743', '5cdeeccda6f87fe13aed279213006061a8b42037', 'v0.0.291'),
('resnetbc38b', '0672', '820944641ba54f4aaa43d2a305ab52b9dcb740c7', 'v0.0.328'),
('resnet50', '0604', 'a71d1d2a8e8e4259742bbd67c386623233b57c6c', 'v0.0.329'),
('resnet50b', '0611', 'ca12f8d804000bf5202e2e3838dec7ef6b772149', 'v0.0.308'),
('resnet101', '0599', 'a6d3a5f4933794d56b61867c050ee730f6310f1b', 'v0.0.1'),
('resnet101b', '0512', 'af5c4233b28c8b7acd3b3ebe02f9f2eda2c77824', 'v0.0.357'),
('resnet152', '0535', 'bbdd7ed1f33a9b33c75635d78143e8bd00e204e0', 'v0.0.144'),
('resnet152b', '0480', '7277968cbbc5e55e681b6f13784c93c4e4ec3d6d', 'v0.0.378'),
('preresnet10', '1401', '2b96c0818dbabc422e98d8fbfc9b684c023922ed', 'v0.0.249'),
('preresnet12', '1321', 'b628efb5415784075e18b6734b1ba1e5c7280dee', 'v0.0.257'),
('preresnet14', '1218', 'd65fa6287414d9412e34ac0df6921eaa5646a2b6', 'v0.0.260'),
('preresnetbc14b', '1151', 'c712a235b75ad4956411bab265dfd924c748726e', 'v0.0.315'),
('preresnet16', '1081', '5b00b55f74adb9ee4a6ba5f946aafd48b4d8aa47', 'v0.0.261'),
('preresnet18_wd4', '1778', '3d949d1ae20b9188a423b56a1f7a89b4bcecc3d2', 'v0.0.272'),
('preresnet18_wd2', '1319', '63e55c24bc0ae93a8f8daefa4b35dc3e70147f65', 'v0.0.273'),
('preresnet18_w3d4', '1068', 'eb5698616757fd0947851f62c33fc4d7b4a5f23a', 'v0.0.274'),
('preresnet18', '0951', '71279a0b7339f1efd12bed737219a9ed76175a9d', 'v0.0.140'),
('preresnet26', '0834', 'c2ecba0948934c28d459b7f87fbc1489420fd4fb', 'v0.0.316'),
('preresnetbc26b', '0786', '265f591f320db0915c18c16f4ed0e2e53ee46567', 'v0.0.325'),
('preresnet34', '0751', 'ba9c829e72d54f8b02cf32ea202c195d36568467', 'v0.0.300'),
('preresnetbc38b', '0633', '809d2defea82276fdd7ff2faafc1f6ffe57c93b5', 'v0.0.348'),
('preresnet50', '0620', '50f13b2d3fd197c8aa721745adaf2d6615fd8c16', 'v0.0.330'),
('preresnet50b', '0632', '951de2dc558f94f489ce62fedf979ccc08361641', 'v0.0.307'),
('preresnet101', '0575', 'e2887e539f2519c36aea0fc991d6503ed384c4fc', 'v0.0.2'),
('preresnet101b', '0540', '3839a4733a8a614bb6b7b4f555759bb5b8013d42', 'v0.0.351'),
('preresnet152', '0532', '31505f719ad76f5aee59d37a695ac7a9b06230fc', 'v0.0.14'),
('preresnet152b', '0500', '6929c862634ff7a7ac2317ab5e97bd1c62dc3278', 'v0.0.386'),
('preresnet200b', '0564', '38f849a61f59924d85a9353923424889a77c93dc', 'v0.0.45'),
('preresnet269b', '0556', 'f386e3e70ab4df48fd8b444858bd6acd8afcbe6b', 'v0.0.239'),
('resnext14_16x4d', '1223', '1f8072e8d01d1427941a06bbca896211e98e2b75', 'v0.0.370'),
('resnext14_32x2d', '1247', '2ca8cc2544045c21d0a7ca740483097491ba0855', 'v0.0.371'),
('resnext14_32x4d', '1110', '9be6190e328c15a06703be3ba922d707c2f4d8e7', 'v0.0.327'),
('resnext26_32x2d', '0850', 'a1fb4451be6336d9f648ccc2c2dedacc5704904a', 'v0.0.373'),
('resnext26_32x4d', '0721', '5264d7efd606e1c95a2480050e9f03a7a2f02b09', 'v0.0.332'),
('resnext50_32x4d', '0546', 'd0ae6b111b6c973c9057b7b762930bba9bfe32fb', 'v0.0.417'),
('resnext101_32x4d', '0492', 'c31cce1c38e8017ae97519a0eb721e65722d8fc2', 'v0.0.417'),
('resnext101_64x4d', '0483', '44b79943cd9050e2703c169364334b458a8ade89', 'v0.0.417'),
('seresnet10', '1169', '675d4b5bbaaab1b87988df96819cd19e5323512c', 'v0.0.486'),
('seresnet18', '0920', '85a6b1da19645419cc3075852588cc7e7da5715f', 'v0.0.355'),
('seresnet26', '0803', '9f9004192240ae0125399d2f6acbb5359027039d', 'v0.0.363'),
('seresnetbc26b', '0682', '15ae6e19b1626107029df18bf3f5140e6fcb2b02', 'v0.0.366'),
('seresnetbc38b', '0575', 'f80f0c3c2612e1334204df1575b8a7cd18f851ff', 'v0.0.374'),
('seresnet50', '0560', 'e75ef498abfc021f356378b2c806de8927287fe7', 'v0.0.441'),
('seresnet50b', '0533', '0d8f0d23bb980621095e41de69e3a68d7aaeba45', 'v0.0.387'),
('seresnet101', '0589', '4c10238dd485a540a464bf1c39a8752d2da040b9', 'v0.0.11'),
('seresnet101b', '0462', '59fae71a5db6ad1f1b5a79f7339920aa1191ed70', 'v0.0.460'),
('seresnet152', '0577', 'de6f099dd39f374390639ca8854b2954af3c59b9', 'v0.0.11'),
('sepreresnet10', '1306', 'cbdd10532dd79915228f12624577c58a2daa078d', 'v0.0.377'),
('sepreresnet18', '0938', 'f9645ed3b56b85778dc697dc7749851d14e3564a', 'v0.0.380'),
('sepreresnetbc26b', '0636', '33c94c9dd2986d0643f7c432d5203294ecf2124e', 'v0.0.399'),
('sepreresnetbc38b', '0563', 'd8f0fbd35b840743e312b9ec944c7fc141824ace', 'v0.0.409'),
('sepreresnet50b', '0532', '5b620ff7175c674ef41647e2e79992dceefeec19', 'v0.0.461'),
('seresnext50_32x4d', '0505', '7f3fae1ff9d2774fa36067703c28fd57708889f2', 'v0.0.418'),
('seresnext101_32x4d', '0460', '6cb1ee646fdd47b745ffa09e0e8ecfec3ce695a3', 'v0.0.418'),
('seresnext101_64x4d', '0466', '15e1673077240a648027505540fc893a68401e87', 'v0.0.418'),
('senet16', '0806', 'ba26802160725af926e5a3217f8848bd3a6599fd', 'v0.0.341'),
('senet28', '0591', 'd5297a35a2e56ecc892499c1bd4373125a40b783', 'v0.0.356'),
('senet154', '0465', 'dd2445078c0770c4a52cd22aa1d4077eb26f6132', 'v0.0.13'),
('resnestabc14', '0634', '4b0cbe8c59e9d764027f51670096037e2f058970', 'v0.0.493'),
('resnesta18', '0689', '8f37b6927751b27a72c1b8c625de265da9b31570', 'v0.0.489'),
('resnestabc26', '0470', 'f88d49d7bd6cdf1f448afdf4bc5dadd9921798dd', 'v0.0.495'),
('resnesta50', '0451', '445a013a228b65a3e626fe71710ebd1c8350843a', 'v0.0.465'),
('resnesta101', '0399', 'ab6c6f89407d7b2cf89406f7bb62a48e86d7cabc', 'v0.0.465'),
('resnesta200', '0340', '3bd1f0c8d9a2862b89590c1c490a38d6d982522c', 'v0.0.465'),
('resnesta269', '0336', '8333862a0c016432f6cd5b45f7e9de7ee6e3f319', 'v0.0.465'),
('ibn_resnet50', '0559', '0f75710a144ea1483e6235f6909f8b8c3555ec01', 'v0.0.495'),
('ibn_resnet101', '0587', '946e7f1072a70b19f2bbc9776f73b818473482c3', 'v0.0.127'),
('ibnb_resnet50', '0697', '0aea51d29d4123676e447b92db800f5a574a35be', 'v0.0.127'),
('ibn_resnext101_32x4d', '0562', '05ddba79597927b5c0fa516d435c3788803438f6', 'v0.0.127'),
('ibn_densenet121', '0646', '82ee3ff44fd308107ec07d759c52a99e94af7041', 'v0.0.493'),
('ibn_densenet169', '0682', '6d7c48c5519c6b8595223514564b1061268742a2', 'v0.0.127'),
('airnet50_1x64d_r2', '0621', '347358cc4a3ac727784665e8113cd11bfa79c606', 'v0.0.120'),
('airnet50_1x64d_r16', '0646', '0b847b998253ba22409eed4b939ec2158928a33f', 'v0.0.120'),
('airnext50_32x4d_r2', '0575', 'ab104fb5225b17836d523a525903db254f5fdd99', 'v0.0.120'),
('bam_resnet50', '0696', '7e573b617562d7dab94cda3b1a47ec0085aaeba2', 'v0.0.124'),
('cbam_resnet50', '0638', '78be56658e9f9452d7c2472c994b332d97807a17', 'v0.0.125'),
('scnet50', '0511', '359d35d017e838d3c3c2bdf1cc83074610dc3c5b', 'v0.0.493'),
('scnet101', '0599', '8c30a4c7b0b5bfb36485ee5106648436ab8df9e3', 'v0.0.472'),
('scneta50', '0463', '4b2031a8bcc69523f1da8af7ad52b69795d2f965', 'v0.0.472'),
('regnetx002', '1038', '7800b310f45b4666ef6c862bc8a2573f65ddaa40', 'v0.0.475'),
('regnetx004', '0855', 'b933a72fe7304a31391a9e9fcdf7ffc47ea05353', 'v0.0.479'),
('regnetx006', '0756', 'd41aa087bd288266105142b88b03b67d74352a46', 'v0.0.482'),
('regnetx008', '0724', '79309908dee31eda64e824a9b3bd33c0afaaf5a8', 'v0.0.482'),
('regnetx016', '0613', '018dbe2d89d2bac26f41d98b09ba57b049ed4cfe', 'v0.0.486'),
('regnetx032', '0568', '6d4372fcf0b9d6e3edefa99e72dd411b6a0c676c', 'v0.0.492'),
('regnetx040', '0469', 'c22092c7ceeac713a9102c697f23bec0b7cfaec0', 'v0.0.495'),
('regnetx064', '0541', 'd9c902de21804f6f6df3e31ce8ee0b30e86d0821', 'v0.0.473'),
('regnetx080', '0545', '7eb99b19aed61972bb834b798523f0a84a2ea341', 'v0.0.473'),
('regnetx120', '0522', '22c6c13843884293f13be2d86f3087ba796258d3', 'v0.0.473'),
('regnetx160', '0505', 'f2dac945f02a1af1b9751a3e4531e66c68a4cb98', 'v0.0.473'),
('regnetx320', '0489', '80ef5db7027fdadd3e6b61e67de300d6b0e68f2f', 'v0.0.473'),
('regnety002', '0953', 'b37fcac05e59ba5e751701da8e81195bbfbf3db8', 'v0.0.476'),
('regnety004', '0747', '5626bdf45eaf0f23ddc6e1b68f6ad2db7ca119cf', 'v0.0.481'),
('regnety006', '0697', '81372679b5f9601a5bc72caa13aff378d2ac4233', 'v0.0.483'),
('regnety008', '0645', 'd92881be768a267f1cd2b540d087884bbe93f644', 'v0.0.483'),
('regnety016', '0568', 'c4541a25e92ebf31d9bddb4975738c08fe929836', 'v0.0.486'),
('regnety032', '0413', '9066698526cbc930706fe00b21ca56c31ad7e2e4', 'v0.0.473'),
('regnety040', '0467', '6039a215ee79a7855156c5166df4c6b34f8d501d', 'v0.0.494'),
('regnety064', '0512', '66d05225c4c7a77c8add4c7fd3784fb3253a41da', 'v0.0.473'),
('regnety080', '0509', '687183a2b431c9fa5cf536cbe884eaefaef57e59', 'v0.0.473'),
('regnety120', '0482', '0946781aa91bef0f89307f33defdac7d5f385861', 'v0.0.473'),
('regnety160', '0497', 'e458ce58977d36ffb48043853a65e471c4a100c2', 'v0.0.473'),
('regnety320', '0458', '900b95919cafc18d2a3404dcbe89e008f44170c2', 'v0.0.473'),
('pyramidnet101_a360', '0652', '08d5a5d1af3d514d1114ce76277223e8c1f5f426', 'v0.0.104'),
('diracnet18v2', '1117', '27601f6fa54e3b10d77981f30650d7a9d4bce91e', 'v0.0.111'),
('diracnet34v2', '0946', '1faa6f1245e152d1a3e12de4b5dc1ba554bc3bb8', 'v0.0.111'),
('crunet56', '0825', 'ad16523bfa306aefae5f931ed3bd6d01cd6d1804', 'v0.0.197'),
('densenet121', '0685', 'd3a1fae8b311343498f736e494d60d32e35debfb', 'v0.0.314'),
('densenet161', '0592', '29897d410ea5ae427278df060de578911df74667', 'v0.0.432'),
('densenet169', '0605', '9c045c864828e773f92f998199821fc0c21e0eb4', 'v0.0.406'),
('densenet201', '0590', '89aa8c295b21fdd682df0027d2232e6dabf2cace', 'v0.0.426'),
('condensenet74_c4_g4', '0864', 'cde68fa2fcc9197e336717a17753a15a6efd7596', 'v0.0.4'),
('condensenet74_c8_g8', '1049', '4cf4a08e7fb46f5821049dcae97ae442b0ceb546', 'v0.0.4'),
('peleenet', '0979', '758d3cf992a6c92731069be091be2e8ebd3209e2', 'v0.0.496'),
('wrn50_2', '0612', 'f8013e680bf802301e6830e5ca12de73382edfb1', 'v0.0.113'),
('drnc26', '0789', 'ee56ffabbcceba2e4063c80a3f84a4f4f8461bff', 'v0.0.116'),
('drnc42', '0692', 'f89c26d6a3792bef0850b7fe09ee10f715dcd3ce', 'v0.0.116'),
('drnc58', '0627', '44cbf15ccaea33ee1e91b780e70170e8e66b12d7', 'v0.0.116'),
('drnd22', '0852', '085747529f2d4a0490769e753649843c40dea410', 'v0.0.116'),
('drnd38', '0736', 'c7d53bc0f70196dda589fcf0bfac904b5d76d872', 'v0.0.116'),
('drnd54', '0627', '87d44c87953d98241f85007802a61e3cefd77792', 'v0.0.116'),
('drnd105', '0581', 'ab12d66220c1bbf4af5c33db78aaafc9f0d9bd5a', 'v0.0.116'),
('dpn68', '0658', '07251919c08640c94375670cbc5f0fbc312ed59b', 'v0.0.310'),
('dpn98', '0528', 'fa5d6fca985afde21f6374e4a4d4df788d1b4c3a', 'v0.0.17'),
('dpn131', '0522', '35ac2f82e69264e0712dcb979da4d99675e2f2aa', 'v0.0.17'),
('darknet_tiny', '1746', '16501793621fbcb137f2dfb901760c1f621fa5ec', 'v0.0.69'),
('darknet_ref', '1668', '3011b4e14b629f80da54ab57bef305d588f748ab', 'v0.0.64'),
('darknet53', '0556', 'e9486353868e0cf78bdc8fa377437de4d02733bb', 'v0.0.150'),
('irevnet301', '0897', 'cef9b5bfe9dd51c7d7946de432fd358f54239d35', 'v0.0.251'),
('bagnet9', '3544', 'ea1ae64532fc58e4efe585b3154aa4b42a677d77', 'v0.0.255'),
('bagnet17', '2152', '4b3a621287346dc836fe42de0b0888fb9a1c9075', 'v0.0.255'),
('bagnet33', '1495', '87527d8247b62bccfdd76a9d5e6e914ebfa5362a', 'v0.0.255'),
('dla34', '0705', '557c5f4f1db66481af6101c75d9ba52b486eda25', 'v0.0.486'),
('dla46c', '1286', '5b38b67fecf2d701b736eb23e1301b6dd7eb5fb9', 'v0.0.282'),
('dla46xc', '1225', 'e570f5f00a098b0de34e657f9d8caeda524d39f3', 'v0.0.293'),
('dla60', '0554', '88b141c4ca81598dbe4333bd4ddd5a1554772348', 'v0.0.494'),
('dla60x', '0553', '58924af84faf6ac0d980b047ccb505619925d97d', 'v0.0.493'),
('dla60xc', '1074', '1b4e4048847e1ba060eb76538ee09e760f40be11', 'v0.0.289'),
('dla102', '0644', 'cadbb1cc5feb58497198aac2c8028c843a6d5a9f', 'v0.0.202'),
('dla102x', '0602', '193568a7ab3c0a3b6702fcc3e326b7fef011f752', 'v0.0.202'),
('dla102x2', '0553', '30c8f409240872053c5bb8523baeb274ba9da4ee', 'v0.0.202'),
('dla169', '0587', '4f3e6a6e604cc06ebcf63fc86cc5287399f32683', 'v0.0.202'),
('fishnet150', '0638', '5cbd08ec1534f2d5e7861395cc6f224ecbe8cb76', 'v0.0.168'),
('espnetv2_wd2', '2107', 'f2e17f0a8845b72f4645a0c06f2cfc8d9ef0253e', 'v0.0.238'),
('espnetv2_w1', '1427', '538f31fb92699fddabc27784fe746871fd8a635b', 'v0.0.238'),
('espnetv2_w5d4', '1273', 'b119ad9e52bf8c2d88db8eeb62a1fd6e23a1b6a6', 'v0.0.238'),
('espnetv2_w3d2', '1194', '3804a85006d874273b80026339b36943e9d813e5', 'v0.0.238'),
('espnetv2_w2', '0994', 'c212d81a9d12361b7d49ec841599f6a0f84f7b44', 'v0.0.238'),
('hrnet_w18_small_v1', '0873', '1060c1c562770adb94115de1cad797684dbb5703', 'v0.0.492'),
('hrnet_w18_small_v2', '0802', '052ff8f941529378a3f6a9885e8e5232f9d2b992', 'v0.0.421'),
('hrnetv2_w18', '0685', '21c708c4144246058c4346d9296f6d180682af99', 'v0.0.421'),
('hrnetv2_w30', '0607', '93553fe4cccd22afaac7f73937b98e8ac89850cd', 'v0.0.421'),
('hrnetv2_w32', '0607', 'e68bcf9079982540f80607d7fb1331594f721fd6', 'v0.0.421'),
('hrnetv2_w40', '0571', '60aa3b9d3a91d20535753fcef0d668aeae466b10', 'v0.0.421'),
('hrnetv2_w44', '0592', 'ff313e2963d1464251f5dbb7032502ffd028d612', 'v0.0.421'),
('hrnetv2_w48', '0578', '8823f8445b793c8ab5d1f2dd772978c2a71b7850', 'v0.0.421'),
('hrnetv2_w64', '0552', '3d8ef6e5f59fbce8746b9e658b568be9236df21a', 'v0.0.421'),
('vovnet39', '0548', '20b60ee6bb8c59c684f8ffed7fbda3d76b1a7280', 'v0.0.493'),
('vovnet57', '0660', 'f4f3ed330c374008498ba34ca7f54759a40ac1ba', 'v0.0.431'),
('selecsls42b', '0596', 'f5a35c74880fbe94fbe3770a96968fc81186b42b', 'v0.0.493'),
('selecsls60', '0511', '960edec5159b56bdba4ce606df203c8ce14cb8ba', 'v0.0.496'),
('selecsls60b', '0537', '7f83801b1c158502d93a4523bdecda43017448d5', 'v0.0.495'),
('hardnet39ds', '0864', '72e8423ee0b496c10b48ae687a417385d9667394', 'v0.0.485'),
('hardnet68ds', '0738', '012bf3ac31f38c78d5cdef1367cc3c27447236af', 'v0.0.487'),
('hardnet68', '0710', 'c8d4c059f2688f0005a7cd1160ac0fbf093b69cd', 'v0.0.491'),
('hardnet85', '0572', '3baa0a7dd204196fd1afa631cf6de082a5cc0a36', 'v0.0.495'),
('squeezenet_v1_0', '1734', 'e6f8b0e8253cef1c5c071dfaf2df5fdfc6a64f8c', 'v0.0.128'),
('squeezenet_v1_1', '1739', 'd7a1483aaa1053c7cd0cf08529b2b87ed2781b35', 'v0.0.88'),
('squeezeresnet_v1_0', '1767', '66474b9b6a771055b28c37b70621c026a1ef6ef4', 'v0.0.178'),
('squeezeresnet_v1_1', '1784', '26064b82773e7a7175d6038976a73abfcd5ed2be', 'v0.0.70'),
('sqnxt23_w1', '1866', '73b700c40de5f7be9d2cf4ed30cc8935c670a3c3', 'v0.0.171'),
('sqnxt23v5_w1', '1743', '7a83722e7d362cef950d8534020f837caf9e6314', 'v0.0.172'),
('sqnxt23_w3d2', '1321', '4d733bcd19f1e502ebc46b52f0b69d959636902e', 'v0.0.210'),
('sqnxt23v5_w3d2', '1268', '4f98bbd3841d8d09a067100841f64ce3eccf184a', 'v0.0.212'),
('sqnxt23_w2', '1063', '95d9b55a5e857298bdb7974db6e3dbd9ecc94401', 'v0.0.240'),
('sqnxt23v5_w2', '1024', '707246f323bc95d0ea2d5608e9e85ae9fe59773a', 'v0.0.216'),
('shufflenet_g1_wd4', '3677', 'ee58f36811d023e1b2e651469c470e588c93f9d3', 'v0.0.134'),
('shufflenet_g3_wd4', '3617', 'bd08e3ed6aff4993cf5363fe8acaf0b22394bea0', 'v0.0.135'),
('shufflenet_g1_wd2', '2238', 'f77dcd18d3b759a3046bd4a2443c40e4ff455313', 'v0.0.174'),
('shufflenet_g3_wd2', '2060', 'ea6737a54bce651a0e8c0b533b982799842cb1c8', 'v0.0.167'),
('shufflenet_g1_w3d4', '1675', '2f1530aa72ee04e3599c5296b590a835d9d50e7f', 'v0.0.218'),
('shufflenet_g3_w3d4', '1609', 'e008e926f370af28e587f349384238d240a0fc02', 'v0.0.219'),
('shufflenet_g1_w1', '1350', '01934ee8f4bf7eaf4e36dd6442debb84ca2a2849', 'v0.0.223'),
('shufflenet_g2_w1', '1332', 'f5a1479fd8523032ee17a4de00fefd33ff4d31e6', 'v0.0.241'),
('shufflenet_g3_w1', '1329', 'ac58d62c5f277c0e9e5a119cc1f48cb1fcfc8306', 'v0.0.244'),
('shufflenet_g4_w1', '1310', '73c039ebf56f9561dd6eecc4cbad1ab1db168ed1', 'v0.0.245'),
('shufflenet_g8_w1', '1319', '9a50ddd9ce67ec697e3ed085d6c39e3d265f5719', 'v0.0.250'),
('shufflenetv2_wd2', '1830', '156953de22d0e749c987da4a58e0e53a5fb18291', 'v0.0.90'),
('shufflenetv2_w1', '1123', '27435039ab7794c86ceab11bd93a19a5ecab78d2', 'v0.0.133'),
('shufflenetv2_w3d2', '0913', 'f132506c9fa5f0eb27398f9936b53423d0cd5b66', 'v0.0.288'),
('shufflenetv2_w2', '0823', '2d67ac62057103fd2ed4790ea0058e0922abdd0f', 'v0.0.301'),
('shufflenetv2b_wd2', '1782', '845a9c43cf4a9873f89c6116634e74329b977e64', 'v0.0.157'),
('shufflenetv2b_w1', '1101', 'f679702f7c626161413320160c6c9c199de9b667', 'v0.0.161'),
('shufflenetv2b_w3d2', '0879', '4022da3a5922127b1acf5327bd9f1d4d55726e05', 'v0.0.203'),
('shufflenetv2b_w2', '0810', '7429df751916bf24bd7fb86bc137ae36275b9d19', 'v0.0.242'),
('menet108_8x1_g3', '2030', 'aa07f925180834389cfd3bf50cb22d2501225118', 'v0.0.89'),
('menet128_8x1_g4', '1913', '0c890a76fb23c0af50fdec076cb16d0f0ee70355', 'v0.0.103'),
('menet160_8x1_g8', '2028', '4f28279a94e631f6a51735de5ea29703cca69845', 'v0.0.154'),
('menet228_12x1_g3', '1289', '2dc2eec7c9ebb41c459450e1843503b5ac7ecb3a', 'v0.0.131'),
('menet256_12x1_g4', '1216', '7caf63d15190648e266a4e7520c3ad677716f388', 'v0.0.152'),
('menet348_12x1_g3', '0936', '62c72b0b56460f062d4da7155bd64a524f42fb88', 'v0.0.173'),
('menet352_12x1_g8', '1167', '5892fea4e44eb27814a9b092a1a06eb81cea7844', 'v0.0.198'),
('menet456_24x1_g3', '0780', '7a89b32c89f878ac63fc96ddc71cb1a5e91c84d6', 'v0.0.237'),
('mobilenet_wd4', '2218', '3185cdd29b3b964ad51fdd7820bd65f091cf281f', 'v0.0.62'),
('mobilenet_wd2', '1330', '94f13ae1375b48892d8ecbb4a253bb583fe27277', 'v0.0.156'),
('mobilenet_w3d4', '1051', '6361d4b4192b5fc68f3409100d825e8edb28876b', 'v0.0.130'),
('mobilenet_w1', '0865', 'eafd91e9369abb09726f2168aba24453b17fc22e', 'v0.0.155'),
('mobilenetb_wd4', '2165', '2070764e0b3be74922eb5fa0a4342c693821ba90', 'v0.0.481'),
('mobilenetb_wd2', '1271', '799ef980b2726a77d4b68d99f520d9d6bc7d86dc', 'v0.0.480'),
('mobilenetb_w3d4', '1020', 'b01c8bacda6f8e26b34a0313a4dc3883511760f7', 'v0.0.481'),
('mobilenetb_w1', '0788', '82664eb4c1f2ddd0ac163f50263237f7667223f3', 'v0.0.489'),
('fdmobilenet_wd4', '3053', 'd4f18e5b4ed63e5426eafbf5db7f8e2a97c28581', 'v0.0.177'),
('fdmobilenet_wd2', '1969', '242b9fa82d54f54f08b4bdbb194b7c89030e7bc4', 'v0.0.83'),
('fdmobilenet_w3d4', '1601', 'cb10c3e129706d3023d752e7402965af08f91ca7', 'v0.0.159'),
('fdmobilenet_w1', '1312', '95fa0092aac013c88243771faf66ef1134b7574d', 'v0.0.162'),
('mobilenetv2_wd4', '2412', 'd92b5b2dbb52e27354ddd673e6fd240a0cf27175', 'v0.0.137'),
('mobilenetv2_wd2', '1442', 'd7c586c716e3ea85e793f7c5aaf9cae2a907117b', 'v0.0.170'),
('mobilenetv2_w3d4', '1044', '768454f4bdaae337c180bb81248b8c5b8d31040b', 'v0.0.230'),
('mobilenetv2_w1', '0864', '6e58b1cb96852e4c6de6fc9cd11241384af21df9', 'v0.0.213'),
('mobilenetv2b_wd4', '2338', '77ba7e8d41542d311e240dab75e4d29fa0677fb9', 'v0.0.483'),
('mobilenetv2b_wd2', '1373', '3bfc8a592a0881c2cb025f52a09fb5057a7896be', 'v0.0.486'),
('mobilenetv2b_w3d4', '1064', '5d4dc4e5622043697382272183a3d0bd43dbc218', 'v0.0.483'),
('mobilenetv2b_w1', '0884', 'ab0ea3993e7c533f0aea5793331dc6302d715e9c', 'v0.0.483'),
('mobilenetv3_large_w1', '0729', 'db741a9938acc8a5fd9544aaf53b41aebb98e021', 'v0.0.491'),
('igcv3_wd4', '2830', '71abf6e0b6bff1d3a3938bfea7c752b59ac05e9d', 'v0.0.142'),
('igcv3_wd2', '1703', '145b7089e1d0e0ce88f17393a357d5bb4ae37734', 'v0.0.132'),
('igcv3_w3d4', '1096', '3c7c86fc43df2e5cf95a451ebe07fccf2d9dc076', 'v0.0.207'),
('igcv3_w1', '0900', 'e2c3da1cffd8e42da7a052b80db2f86758c8d35b', 'v0.0.243'),
('mnasnet_b1', '0723', 'a6f74cf912fa5b1ee4bb9825f3143a0c1ced03be', 'v0.0.493'),
('mnasnet_a1', '0705', '3efe98a3bd6ea0a8cb0902a4dba424c134145d5f', 'v0.0.486'),
('darts', '0756', 'c2c7c33ba60d1052f95bcae72128fc47b1214cff', 'v0.0.485'),
('proxylessnas_cpu', '0750', '256da7c8a05cd87a59e30e314b22dc1d4565946e', 'v0.0.324'),
('proxylessnas_gpu', '0724', 'd9ce80964e37fb30bddcc552f1d68361b1a94873', 'v0.0.333'),
('proxylessnas_mobile', '0780', 'b8bb5a64f333562475dcfc09eeb7e603d6e66afb', 'v0.0.326'),
('proxylessnas_mobile14', '0651', 'f08baec85343104994b821581cde3ee965a2c593', 'v0.0.331'),
('fbnet_cb', '0761', '3db688f2fa465bc93bc546b4432389fe33aec5b3', 'v0.0.486'),
('xception', '0556', 'bd2c1684a5dc41dd00b4676c194a967558ed577e', 'v0.0.115'),
('inceptionv3', '0559', '6c087967685135a321ed66b9ad2277512e9b2868', 'v0.0.92'),
('inceptionv4', '0525', 'f7aa9536392ea9ec7df5cc8771ff53c19c45fff2', 'v0.0.105'),
('inceptionresnetv2', '0494', '3328f7fa4c50c785b525e7b603926ec1fccbce14', 'v0.0.107'),
('polynet', '0453', '742803144e5a2a6148212570726350da09adf3f6', 'v0.0.96'),
('nasnet_4a1056', '0790', 'f89dd74f47e42c35c9a1182f248df1d319524db7', 'v0.0.495'),
('nasnet_6a4032', '0424', '73cca5fee009db77412c5fca7c826b3563752757', 'v0.0.101'),
('pnasnet5large', '0428', '998a548f44ac1b1ac6c4959a721f2675ab5c48b9', 'v0.0.114'),
('spnasnet', '0776', '09cc881e024d69ab3bd88bef332becb6147a0651', 'v0.0.490'),
('efficientnet_b0', '0722', '041a8346ad6a13dbb66384d9a41ed20d959d3e77', 'v0.0.364'),
('efficientnet_b1', '0626', '455dcb2a05a1295c8f6b728d4f6c72c507e1369a', 'v0.0.376'),
('efficientnet_b0b', '0670', '8892ba581b0d81dbc8fe56c49d81d3dd007b1db8', 'v0.0.403'),
('efficientnet_b1b', '0565', 'c29a1b67804b70856ed3cc329256b80e3afc04ad', 'v0.0.403'),
('efficientnet_b2b', '0516', '7532826e5c14f7ade9e8ea9ac92044d817575b06', 'v0.0.403'),
('efficientnet_b3b', '0431', '1e342ec2160e6de813f9bd4d0ab9ce3b5749780e', 'v0.0.403'),
('efficientnet_b4b', '0376', 'b60e177974539fc6dcc9fef3b90591bfc9949514', 'v0.0.403'),
('efficientnet_b5b', '0334', 'cd70ae717ddca72430efe91cf7a3c4e28bcd61ac', 'v0.0.403'),
('efficientnet_b6b', '0312', 'f581d9f046032e28e532082fa49bfd373952db4f', 'v0.0.403'),
('efficientnet_b7b', '0311', '2b8a6040588aea44b57df89e2d9239d906737508', 'v0.0.403'),
('efficientnet_b0c', '0646', '81eabd2992ba7bb80c1c1a7e20373e7c65aa1286', 'v0.0.433'),
('efficientnet_b1c', '0555', '10b5589de6ee9af3c67f0ae35a424b4adc0a9e35', 'v0.0.433'),
('efficientnet_b2c', '0489', '6f649ece72d0334e5191da78767f2ac9149e85f0', 'v0.0.433'),
('efficientnet_b3c', '0434', 'e1e2a1b7f3457bdd8f46a5858472ad4acd3d2362', 'v0.0.433'),
('efficientnet_b4c', '0359', 'cdb2012d6688b0208527d36817589095a1db1031', 'v0.0.433'),
('efficientnet_b5c', '0302', '3240f368eb5e7ed78b9ac0d68800fbea9b220e9b', 'v0.0.433'),
('efficientnet_b6c', '0285', 'e71a1ccc3e876a7d64818c83518dd0e64c630d3e', 'v0.0.433'),
('efficientnet_b7c', '0277', 'feea7daf3478645131c94319c71141df559dce19', 'v0.0.433'),
('efficientnet_b8c', '0270', '050ec6358583d0a48c74b02563474ecc1d9dacba', 'v0.0.433'),
('efficientnet_edge_small_b', '0629', '5b398abc73c4c4870d88c62452ff919bec2440c9', 'v0.0.434'),
('efficientnet_edge_medium_b', '0553', '0b3c86d49b3684d19e6589030aeceb918faa648c', 'v0.0.434'),
('efficientnet_edge_large_b', '0477', '055436da0fa440933b906c528cb34b200e28f73c', 'v0.0.434'),
('mixnet_s', '0703', '135aa0426712a9f60afe2cfff2df9607f3fc2d68', 'v0.0.493'),
('mixnet_m', '0631', '0881aba9281e3cae3e7cddd75d385c3cccdf7e25', 'v0.0.493'),
('mixnet_l', '0603', '0576f35d60442379df422268e0f3070af82c3b72', 'v0.0.414'),
('resneta10', '1159', 'a66e01d9f567ce747e7c255adac848a33c54a3a5', 'v0.0.484'),
('resnetabc14b', '0956', '6f8c36067feb2d27b5e0813bf18ef29038db0d70', 'v0.0.477'),
('resneta18', '0802', '225dd3ae0eac3ce3815732ac4ab0e25df709a75c', 'v0.0.486'),
('resneta50b', '0534', '28eff48a72d892802dde424db3fd0e1a9c12be16', 'v0.0.492'),
('resneta101b', '0487', 'fad05e5777b8a519dc24e2884ed17dffbc8c3dbb', 'v0.0.452'),
('resneta152b', '0465', '05f96c54acfa844c249b34a40e360fc8e29d4064', 'v0.0.452'),
('resnetd50b', '0549', '17d6004b5c6c1b97cfb47377ae5076810c5d88be', 'v0.0.296'),
('resnetd101b', '0461', 'fead1bcb86bba2be4ed7f0033fa972dc613e3280', 'v0.0.296'),
('resnetd152b', '0467', 'd0fe2fe09c6462de17aca4a72bbcb08b76a66e02', 'v0.0.296'),
('nin_cifar10', '0743', '9696dc1a8f67e7aa233836bcbdb99625769b1e86', 'v0.0.175'),
('nin_cifar100', '2839', 'eed0e9af2cd8e5aa77bb063204525812dbd9190f', 'v0.0.183'),
('nin_svhn', '0376', '7cb750180b0a981007194461bf57cfd90eb59c88', 'v0.0.270'),
('resnet20_cifar10', '0597', '13c5ab19145591d75873da3497be1dd1bd2afd46', 'v0.0.163'),
('resnet20_cifar100', '2964', '4e1443526ee96648bfe4d4954871a97a9c9622f4', 'v0.0.180'),
('resnet20_svhn', '0343', '7ac0d94a4563c9611092ce08f2124a3828103139', 'v0.0.265'),
('resnet56_cifar10', '0452', 'a73e63e9d0f3f7adde59b4142323c0dd05930de7', 'v0.0.163'),
('resnet56_cifar100', '2488', '590977100774a289b91088245dd2bd0cbe6567e6', 'v0.0.181'),
('resnet56_svhn', '0275', 'e676e4216a771b7d0339e87284c7ebb03af8ed25', 'v0.0.265'),
('resnet110_cifar10', '0369', 'f89f1c4d9fdd9e5cd00949a872211376979ff703', 'v0.0.163'),
('resnet110_cifar100', '2280', '6c5fa14bb4ced2dffe6ee1536306687aae57f9cb', 'v0.0.190'),
('resnet110_svhn', '0245', '0570b5942680cf88c66ae9a76c0e7ff0a41e71a6', 'v0.0.265'),
('resnet164bn_cifar10', '0368', 'e7941eeeddef9336664522eaa3af92d77128cac0', 'v0.0.179'),
('resnet164bn_cifar100', '2044', 'c7db7b5e6fbe6dc0f9501d25784f1a107c6e0315', 'v0.0.182'),
('resnet164bn_svhn', '0242', '8cdce67452d2780c7c69f4d0b979e80189d4bff8', 'v0.0.267'),
('resnet272bn_cifar10', '0333', '99dc36ca2abc91f3f82db181a14c5364cd5526be', 'v0.0.368'),
('resnet272bn_cifar100', '2007', '088af5c23634fe75206081d946fc82fdc9e999ad', 'v0.0.368'),
('resnet272bn_svhn', '0243', '39d741c8d081ebd2266a114e82363839ffdf8ebb', 'v0.0.368'),
('resnet542bn_cifar10', '0343', 'e687b254e1eace223ceef39ad17106e61b8649ba', 'v0.0.369'),
('resnet542bn_cifar100', '1932', 'df8bd5264c1db11dd545f62e9c750c7976edccc9', 'v0.0.369'),
('resnet542bn_svhn', '0234', '4f78075cbcba196fc8f5297b71730906c1bf7d8a', 'v0.0.369'),
('resnet1001_cifar10', '0328', 'bb979d53089138b5060b418cad6c8ad9a940bf81', 'v0.0.201'),
('resnet1001_cifar100', '1979', '692d9516620bc8b7a4da30a98ebcb7432243f5e9', 'v0.0.254'),
('resnet1001_svhn', '0241', '031fb0ce5e5ddbebca2fd7d856d63ddd147fe933', 'v0.0.408'),
('resnet1202_cifar10', '0353', '377510a63595e544333f6f57523222cd845744a8', 'v0.0.214'),
('resnet1202_cifar100', '2156', '1d94f9ccdd81e1785ea6ec02a861a4a05f39e5c9', 'v0.0.410'),
('preresnet20_cifar10', '0651', 'daa895737a34edda75c40f2d8566660590c84a3f', 'v0.0.164'),
('preresnet20_cifar100', '3022', '37f15365d48768f792f4551bd6ccf5259bc70530', 'v0.0.187'),
('preresnet20_svhn', '0322', '608cee12c0bc3cb59feea96386f6c12c6da91ba5', 'v0.0.269'),
('preresnet56_cifar10', '0449', 'cb37cb9d4524d4e0f5724aeed9face455f527efc', 'v0.0.164'),
('preresnet56_cifar100', '2505', '4c39e83f567f15d6ee0d69bf2dcaccd62067dfe5', 'v0.0.188'),
('preresnet56_svhn', '0280', 'b974c2c96a18ff2278f1d33df58c8537f9139ed9', 'v0.0.269'),
('preresnet110_cifar10', '0386', 'd6d4b7bd9f154eca242482a7559413d5c7b6d465', 'v0.0.164'),
('preresnet110_cifar100', '2267', '18cf4161c67c03e50cff7eb30988a559f3f97260', 'v0.0.191'),
('preresnet110_svhn', '0279', '6804450b744fa922d9ec22aa4c792d3a5da812f6', 'v0.0.269'),
('preresnet164bn_cifar10', '0364', '7ecf30cb818f80908ef4a77af4660c1080d0df81', 'v0.0.196'),
('preresnet164bn_cifar100', '2018', 'a20557c8968c04d8d07e40fdc5b0d1ec1fb3339d', 'v0.0.192'),
('preresnet164bn_svhn', '0258', '4aeee06affea89767c058fe1650b7476f05d8563', 'v0.0.269'),
('preresnet272bn_cifar10', '0325', '944ba29df55afcf2789399de552e91578edd4295', 'v0.0.389'),
('preresnet272bn_cifar100', '1963', '38e296beff0cd92697235c717a801ec422cdafe3', 'v0.0.389'),
('preresnet272bn_svhn', '0234', '7ff97873447fbfb1d823fd43439e34351f149c13', 'v0.0.389'),
('preresnet542bn_cifar10', '0314', 'ac40a67bb3b7f02179ff3c4fa0d6533ff3e2dd9f', 'v0.0.391'),
('preresnet542bn_cifar100', '1871', 'd536ad01fc40fe19605a3409efb995bf8593aa29', 'v0.0.391'),
('preresnet542bn_svhn', '0236', '3a4633f14e96cce30086ef4149b52c0b7cbccce6', 'v0.0.391'),
('preresnet1001_cifar10', '0265', '50507ff74b6047abe6d04af6471d9bacafa05e24', 'v0.0.209'),
('preresnet1001_cifar100', '1841', '185e033d77e61cec588196e3fe8bf8dcb43acfab', 'v0.0.283'),
('preresnet1202_cifar10', '0339', '942cf6f22d80b5428256825234a252b8d6ebbe9d', 'v0.0.246'),
('resnext20_1x64d_cifar10', '0433', '0661d12e534a87bd5f3305d541afce5730d45492', 'v0.0.365'),
('resnext20_1x64d_cifar100', '2197', 'e7073542469be79876e3b3aeccb767638a136b93', 'v0.0.365'),
('resnext20_1x64d_svhn', '0298', '3c7febc8eee0887ebc54d5e1a4411a46066f0624', 'v0.0.365'),
('resnext20_2x32d_cifar10', '0453', 'afb48ca4764efaff92e4cc50ec11566993180ac1', 'v0.0.365'),
('resnext20_2x32d_cifar100', '2255', '995281ee5b0d021bf6e9e984802e30a0042e0290', 'v0.0.365'),
('resnext20_2x32d_svhn', '0296', '54189677f599b4f42d2469ad75921165f84d82dc', 'v0.0.365'),
('resnext20_2x64d_cifar10', '0403', '6f0c138fe13a73c9f149053065f1f430761e75be', 'v0.0.365'),
('resnext20_2x64d_cifar100', '2060', '5f6dfa3ff5f1b0e3e1441f554a80c504c1991a03', 'v0.0.365'),
('resnext20_2x64d_svhn', '0283', '9c77f074dcd6333a859da6ddd833a662031406ea', 'v0.0.365'),
('resnext20_4x16d_cifar10', '0470', 'ae1ba8697ec0a62a58edb9c87ef0d9f290d3c857', 'v0.0.365'),
('resnext20_4x16d_cifar100', '2304', '2c9d578ab797ef887b3e8c51d897a8e720880bfd', 'v0.0.365'),
('resnext20_4x16d_svhn', '0317', '6691c8f56f7a63b5638408356a7f9fe4931d7ac9', 'v0.0.365'),
('resnext20_4x32d_cifar10', '0373', 'cf6960607fb51269cb80d6900eadefd16c6bb511', 'v0.0.365'),
('resnext20_4x32d_cifar100', '2131', '2c558efca7c66c412843603cff137e9565d7c016', 'v0.0.365'),
('resnext20_4x32d_svhn', '0298', '1da9a7bf6bed55f89b1c19a9b6ff97f7fa674c81', 'v0.0.365'),
('resnext20_8x8d_cifar10', '0466', '280e5f89e24ca765a771e733f53377ede244207f', 'v0.0.365'),
('resnext20_8x8d_cifar100', '2282', '363f03e8040da3a2f1cfe88df99481b23382e28c', 'v0.0.365'),
('resnext20_8x8d_svhn', '0318', 'c1536efbb2d7c78ee5b9323b43e3d0d56e9d2aa4', 'v0.0.365'),
('resnext20_8x16d_cifar10', '0404', '4d7f72818137674c3c17257427b0c5788b250320', 'v0.0.365'),
('resnext20_8x16d_cifar100', '2172', '3fc47c7072039fd5437ed276e92a0840c94fe82e', 'v0.0.365'),
('resnext20_8x16d_svhn', '0301', '41b28fd308e8195ee74050a2c56e2085552a55e0', 'v0.0.365'),
('resnext20_16x4d_cifar10', '0404', '426b5b2f994bc8154cb17957cf7d487b156d4fe2', 'v0.0.365'),
('resnext20_16x4d_cifar100', '2282', '508d32271ea1d34d40305a67d64cf1547ca58ae6', 'v0.0.365'),
('resnext20_16x4d_svhn', '0321', '854df3b71743fe871b5cd9b72ff6197b34c57b5e', 'v0.0.365'),
('resnext20_16x8d_cifar10', '0394', 'f81d05668bf293e5d84150ec54f94c4a50b79d9c', 'v0.0.365'),
('resnext20_16x8d_cifar100', '2173', 'a246aea51d1fd165c9f6db2aa528a2afa6a03dcd', 'v0.0.365'),
('resnext20_16x8d_svhn', '0293', '31f4b14e3113085a6b9a8e384a3131af764afe82', 'v0.0.365'),
('resnext20_32x2d_cifar10', '0461', '2d6ee8362c497b7782b91f85d5c9fd0f4ac0470e', 'v0.0.365'),
('resnext20_32x2d_cifar100', '2322', 'ce65201429a93cf9175c0e2b8601ceaf538a2438', 'v0.0.365'),
('resnext20_32x2d_svhn', '0327', '2499ff6de736f1241bc58422ae9d1d39d8ac1014', 'v0.0.365'),
('resnext20_32x4d_cifar10', '0420', 'a365893948daea8ef5b20b61bc3519fe199381af', 'v0.0.365'),
('resnext20_32x4d_cifar100', '2213', '5b2ffba877ea3dd0aa86d7c38ab5467d4ed856e6', 'v0.0.365'),
('resnext20_32x4d_svhn', '0309', 'ddbef9ac7da748e162ac886621833a5aef84cb86', 'v0.0.365'),
('resnext20_64x1d_cifar10', '0493', '6618e9ac6df3bc2614c23ba0e39ea0d0d5d21eec', 'v0.0.365'),
('resnext20_64x1d_cifar100', '2353', '9c789af45dd1455b4b7fe94a21709bb9dc8c411b', 'v0.0.365'),
('resnext20_64x1d_svhn', '0342', '2591ea440078f6c089513ace548ed79fb0f9bb76', 'v0.0.365'),
('resnext20_64x2d_cifar10', '0438', '32fe188b44b9e7f80ddec8024799be871dd63c4e', 'v0.0.365'),
('resnext20_64x2d_cifar100', '2235', '62fcc38a8e37550ace0f67927d2f5a74990de920', 'v0.0.365'),
('resnext20_64x2d_svhn', '0314', '4c01490b9566eeb2aaf27446ca8f5bac621894f0', 'v0.0.365'),
('resnext29_32x4d_cifar10', '0315', 'c8a1beda8ba616dc9af682d3ac172bfdd7a2472d', 'v0.0.169'),
('resnext29_32x4d_cifar100', '1950', '5f2eedcdd5cea6fdec1508f261f556a953ae28c2', 'v0.0.200'),
('resnext29_32x4d_svhn', '0280', 'dcb6aef96fbd76aa249e8f834093e2384b898404', 'v0.0.275'),
('resnext29_16x64d_cifar10', '0241', '76b97a4dd6185602a8ca8bdd77a70f8ddfcd4e83', 'v0.0.176'),
('resnext29_16x64d_cifar100', '1693', '1fcec90d6425e0405c61a1e90a80701ea556beca', 'v0.0.322'),
('resnext29_16x64d_svhn', '0268', 'c57307f3bf70d0f39d6cfb1dc2c82d8ef9e89603', 'v0.0.358'),
('resnext56_1x64d_cifar10', '0287', '8edd977c69cfec45604f1b0dbbe36fb2b65f122d', 'v0.0.367'),
('resnext56_1x64d_cifar100', '1825', 'b78642c10e21afc477c27076e2ab9ec6854cca13', 'v0.0.367'),
('resnext56_1x64d_svhn', '0242', '860c610caa23e742093d7c7677253ee5b917af44', 'v0.0.367'),
('resnext56_2x32d_cifar10', '0301', 'd0284dff7382d969c59e602f66b25a72b8d9e232', 'v0.0.367'),
('resnext56_2x32d_cifar100', '1786', '32205070280db6bd6431a6b7ad0537524af5a6f0', 'v0.0.367'),
('resnext56_2x32d_svhn', '0246', 'ffb8df9ba6a5053c79dbdecf7d0b6f1d248fcfdf', 'v0.0.367'),
('resnext56_4x16d_cifar10', '0311', 'add022e7dd21245720e789a0a16a7475d15b5702', 'v0.0.367'),
('resnext56_4x16d_cifar100', '1809', '366de7b5abba9b09d1ffde87e3e9f379db73d691', 'v0.0.367'),
('resnext56_4x16d_svhn', '0244', 'f7b697f924eeb64ed14d21f576a4a82007937bbe', 'v0.0.367'),
('resnext56_8x8d_cifar10', '0307', '4f0b72469f9890079471db355e9bfb405d443317', 'v0.0.367'),
('resnext56_8x8d_cifar100', '1806', '827a485e2d5f3154251d6453563a73a511028445', 'v0.0.367'),
('resnext56_8x8d_svhn', '0247', 'f0550cd045d9f4a4f8648f0c9c517421a34f4a11', 'v0.0.367'),
('resnext56_16x4d_cifar10', '0312', '93d71b610a356f8e285dde08ad4fe96d79efcadd', 'v0.0.367'),
('resnext56_16x4d_cifar100', '1824', '9cb7a1326870878315dccb50b0be51968d6ddfa7', 'v0.0.367'),
('resnext56_16x4d_svhn', '0256', '943386bd2e0fabba153955d80171016a95482609', 'v0.0.367'),
('resnext56_32x2d_cifar10', '0314', 'ea8b43351206a567f865a9a53ecae65790da8a60', 'v0.0.367'),
('resnext56_32x2d_cifar100', '1860', '3f65de935e532fe3c18324f645260983c61c0c76', 'v0.0.367'),
('resnext56_32x2d_svhn', '0253', 'ba8c809dcaa9f6f6e3b2e12dfd9df1b123d11be7', 'v0.0.367'),
('resnext56_64x1d_cifar10', '0341', '12a684ad58d1d3407d61b7de4b2cedc042225d20', 'v0.0.367'),
('resnext56_64x1d_cifar100', '1816', 'b80f4315d53963dd3d1ac856d3c2dbf660b836eb', 'v0.0.367'),
('resnext56_64x1d_svhn', '0255', '144bab62a5ed27bcb2465d9ab8b9b4ce0bf31dc5', 'v0.0.367'),
('resnext272_1x64d_cifar10', '0255', 'c1a3fddc4de9f6ebee2a588bd585ede68974da4f', 'v0.0.372'),
('resnext272_1x64d_cifar100', '1911', 'e0b3656a204c40e0b4ee6ade1e8d262c521e842d', 'v0.0.372'),
('resnext272_1x64d_svhn', '0235', '025ee7b915fa25d617e9774b0303f35488445d78', 'v0.0.372'),
('resnext272_2x32d_cifar10', '0274', '23b391ce58d9694ea35240ec3b76ce8b0ebf66b8', 'v0.0.375'),
('resnext272_2x32d_cifar100', '1834', '4802083b5fda38a4a30b8a2b1f24eb4b8fdf55ad', 'v0.0.375'),
('resnext272_2x32d_svhn', '0244', 'b65ddfe317dabceb4d4d7e910ca07c0c575ad9b8', 'v0.0.375'),
('seresnet20_cifar10', '0601', '3411e5ad1060975c45fe6d0d836755a92e3bb27c', 'v0.0.362'),
('seresnet20_cifar100', '2854', '184ad148171fd9244bf8570eee6647a996678ab4', 'v0.0.362'),
('seresnet20_svhn', '0323', 'a3a3c67731eb8bea0cd3af3b8b2f88c1cc70987e', 'v0.0.362'),
('seresnet56_cifar10', '0413', '21bac136e4cac21abb3e08b60254c73b16f0190f', 'v0.0.362'),
('seresnet56_cifar100', '2294', '989d4d9227c4fc33440c267f1e1ac324fd246ad4', 'v0.0.362'),
('seresnet56_svhn', '0264', '63a155acd6407b5e96516b86f6a7cd9e6855c372', 'v0.0.362'),
('seresnet110_cifar10', '0363', 'fa3f09a88d24282e938488c4588968e273770605', 'v0.0.362'),
('seresnet110_cifar100', '2086', '5345be4166268ce2cd44d88eb9edba9f86ccc864', 'v0.0.362'),
('seresnet110_svhn', '0235', 'd129498ad625983d92048f32b80de5d16987779a', 'v0.0.362'),
('seresnet164bn_cifar10', '0339', '11c923152587746a5539a9e4f140db847b9b61c1', 'v0.0.362'),
('seresnet164bn_cifar100', '1995', '6c9dc66b86de6be67df1e59d4aff4a592d7d98b8', 'v0.0.362'),
('seresnet164bn_svhn', '0245', 'd97ea6c83b0fd1da3a976d518977b19bd466d015', 'v0.0.362'),
('seresnet272bn_cifar10', '0339', 'da4073add21614f22b231d8663867007f3f2312d', 'v0.0.390'),
('seresnet272bn_cifar100', '1907', '754af9375f060f55f8c3393a70659df460b8d47d', 'v0.0.390'),
('seresnet272bn_svhn', '0238', '9ffe8acad2a03cd98f6746d3f1528fe5f294aea4', 'v0.0.390'),
('seresnet542bn_cifar10', '0347', 'e64d9ca4b98349973b802572af6625879bc3c4a4', 'v0.0.385'),
('seresnet542bn_cifar100', '1887', 'cd76c769c06b886d2a94268c95691dfc905bac64', 'v0.0.385'),
('seresnet542bn_svhn', '0226', '05ce3771c46aa8b7af7c2056398f83b2b9f116db', 'v0.0.385'),
('sepreresnet20_cifar10', '0618', 'e55551e6e35d04fe8e35d24a5e3d608a08e8dfa2', 'v0.0.379'),
('sepreresnet20_cifar100', '2831', 'ee5d3bd66ce643950e54d54825036b98b13b31cd', 'v0.0.379'),
('sepreresnet20_svhn', '0324', 'd5bb6768cc3134137371a832c5ebc289d982a8db', 'v0.0.379'),
('sepreresnet56_cifar10', '0451', '56c299345242bcfca52c83f77cfff80b7058b1fe', 'v0.0.379'),
('sepreresnet56_cifar100', '2305', '313a7a30a129c174ecaf3de6a94a5fd34dc8d711', 'v0.0.379'),
('sepreresnet56_svhn', '0271', 'f556af3db771e6e4e5b847bd4eddef71b879b8d0', 'v0.0.379'),
('sepreresnet110_cifar10', '0454', '67eea1cc03f76ee0054d39f004ab10f7a70978bb', 'v0.0.379'),
('sepreresnet110_cifar100', '2261', '3291a56be67afd1154d9d2d05e2e1411c12dcb4a', 'v0.0.379'),
('sepreresnet110_svhn', '0259', '5c09cacbcf786e18509947c40de695883d6b3328', 'v0.0.379'),
('sepreresnet164bn_cifar10', '0373', 'ac72ac7fa9d78e66a719717f922f738bbe7f9699', 'v0.0.379'),
('sepreresnet164bn_cifar100', '2005', 'd93993672a414e8f19cb12d2489be930e8605b8f', 'v0.0.379'),
('sepreresnet164bn_svhn', '0256', 'a45d1a65092900bb768969c52f26c61292838caa', 'v0.0.379'),
('sepreresnet272bn_cifar10', '0339', '3e47d575280a70c1726d6b2eb8d0d7c069a3e472', 'v0.0.379'),
('sepreresnet272bn_cifar100', '1913', 'd243b0580717b197bf9eb32a8959c83d21a3124f', 'v0.0.379'),
('sepreresnet272bn_svhn', '0249', '34b910cdfa34bf318f58c3312bc074059c7c669f', 'v0.0.379'),
('sepreresnet542bn_cifar10', '0308', '05f7d4a6bfb1af1825b734eac99b3b46dd8c4b91', 'v0.0.382'),
('sepreresnet542bn_cifar100', '1945', '4dd0e21d02fef2ae1fe3fb3a8cd8c72db11bb685', 'v0.0.382'),
('sepreresnet542bn_svhn', '0247', '456035daf4daecd909e957da090078deba6cb449', 'v0.0.382'),
('pyramidnet110_a48_cifar10', '0372', '35b94d0575c2081a142e71955c8ceea8c51ec5e5', 'v0.0.184'),
('pyramidnet110_a48_cifar100', '2095', '00fd42a00492b2bbb28cacfb7b1a6c63072c37a3', 'v0.0.186'),
('pyramidnet110_a48_svhn', '0247', 'd8a5c6e20b6cc01989a52f9e307caf640169ed0a', 'v0.0.281'),
('pyramidnet110_a84_cifar10', '0298', '81710d7ab90838a8a299bf5f50aed2a3fa41f0e3', 'v0.0.185'),
('pyramidnet110_a84_cifar100', '1887', '6712d5dc69452f2fde1fcc3ee32c3164dcaffc4e', 'v0.0.199'),
('pyramidnet110_a84_svhn', '0243', '473cc640c4ad0a1642500c84f2ef848498d12a37', 'v0.0.392'),
('pyramidnet110_a270_cifar10', '0251', '1e769ce50ef915a807ee99907912c87766fff60f', 'v0.0.194'),
('pyramidnet110_a270_cifar100', '1710', '2732fc6430085192189fd7ccfd287881cc5a6c0d', 'v0.0.319'),
('pyramidnet110_a270_svhn', '0238', '034be5421b598e84f395f421182f664495ca62ca', 'v0.0.393'),
('pyramidnet164_a270_bn_cifar10', '0242', 'c4a79ea3d84344b9d352074122e37f593ee98fd2', 'v0.0.264'),
('pyramidnet164_a270_bn_cifar100', '1670', '08f46c7ff99e9c3fd7b5262e34dc8a00b316646f', 'v0.0.312'),
('pyramidnet164_a270_bn_svhn', '0233', '27b67f1494ed508e0192e3e0f09ac86e32e1e734', 'v0.0.396'),
('pyramidnet200_a240_bn_cifar10', '0244', '52f4d43ec4d952f847c3a8e0503d5a4e6286679c', 'v0.0.268'),
('pyramidnet200_a240_bn_cifar100', '1609', 'e61e7e7eb6675aaf7a18461fea9bb3a53538d43b', 'v0.0.317'),
('pyramidnet200_a240_bn_svhn', '0232', '02bf262e70d9b3ce8038255d0abdec2bc5161f6d', 'v0.0.397'),
('pyramidnet236_a220_bn_cifar10', '0247', '1bd295a7fb834f639b238ffee818b3bde4126c81', 'v0.0.285'),
('pyramidnet236_a220_bn_cifar100', '1634', 'f066b3c6a4d217c42f5e8872fe23d343afe378ec', 'v0.0.312'),
('pyramidnet236_a220_bn_svhn', '0235', '1a0c0711f013035c0e05145501c93fa2519603ea', 'v0.0.398'),
('pyramidnet272_a200_bn_cifar10', '0239', 'd7b23c5460f059ac82ebc7b2cd992a203e098476', 'v0.0.284'),
('pyramidnet272_a200_bn_cifar100', '1619', '486e942734d91cd62d6bcbc283e1d7b56b734507', 'v0.0.312'),
('pyramidnet272_a200_bn_svhn', '0240', 'dcd9af34f57708f598bba723824bf3525f6e42c7', 'v0.0.404'),
('densenet40_k12_cifar10', '0561', '28dc0035549e51dcb53d1360707bd6f1558a5dcd', 'v0.0.193'),
('densenet40_k12_cifar100', '2490', '908f02ba7dbd7b8138f264193189e762a5590b1c', 'v0.0.195'),
('densenet40_k12_svhn', '0305', '645564c186a4e807293a68fb388803e36916e7b2', 'v0.0.278'),
('densenet40_k12_bc_cifar10', '0643', '7fdeda31c5accbddf47ab0f0b9a32cff723bf70d', 'v0.0.231'),
('densenet40_k12_bc_cifar100', '2841', '35cd8e6a2ae0896a8af2b689e076057fa19efa9b', 'v0.0.232'),
('densenet40_k12_bc_svhn', '0320', '6f2f98243fac9da22be26681bcd0a4d08e0f4baf', 'v0.0.279'),
('densenet40_k24_bc_cifar10', '0452', '13fa807e095b44ecaf3882e488b33a890d9d1e29', 'v0.0.220'),
('densenet40_k24_bc_cifar100', '2267', '2c4ef7c4bbe7f64784ad18b3845f4bf533f2ce57', 'v0.0.221'),
('densenet40_k24_bc_svhn', '0290', '03e136dd71bc85966fd2a4cb15692cfff3886df2', 'v0.0.280'),
('densenet40_k36_bc_cifar10', '0404', '4c154567e25619994a2f86371afbf1ad1e7475e9', 'v0.0.224'),
('densenet40_k36_bc_cifar100', '2050', 'd7275d39bcf439151c3bbeb707efa54943714b03', 'v0.0.225'),
('densenet40_k36_bc_svhn', '0260', 'b81ec8d662937851beecc62f36209fd8db464265', 'v0.0.311'),
('densenet100_k12_cifar10', '0366', '4e371ccb315d0fcd727a76255ca62ae9e92059cc', 'v0.0.205'),
('densenet100_k12_cifar100', '1964', '2ed5ec27a4d4a63876a4cacf52be53c91fbecb5f', 'v0.0.206'),
('densenet100_k12_svhn', '0260', '3e2b34b2087fe507a3672bfce1520747fca58046', 'v0.0.311'),
('densenet100_k24_cifar10', '0313', '9f795bac946d1390cf59f686b730fe512c406bd2', 'v0.0.252'),
('densenet100_k24_cifar100', '1808', '9bfa3e9c736a80906d163380cb361b940c2188bf', 'v0.0.318'),
('densenet100_k12_bc_cifar10', '0416', '6685d1f4844b092471f7d03dfc3fa64a302008e6', 'v0.0.189'),
('densenet100_k12_bc_cifar100', '2119', 'fbd8a54c1c9e4614f950b8473f8524d25caba4a7', 'v0.0.208'),
('densenet190_k40_bc_cifar10', '0252', '87b15be0620c0adff249d33540c20314188b16d7', 'v0.0.286'),
('densenet250_k24_bc_cifar10', '0267', 'dad68693d83a276d14a87dce6cebc5aceebca775', 'v0.0.290'),
('densenet250_k24_bc_cifar100', '1739', '598e91b7906f427296ab72cf40032f0846a52d91', 'v0.0.303'),
('xdensenet40_2_k24_bc_cifar10', '0531', '66c9d384d3ef4ec4095c9759bb8b7986f2f58e26', 'v0.0.226'),
('xdensenet40_2_k24_bc_cifar100', '2396', '73d5ba88a39b971457b9cea2cd72d1e05ab4d165', 'v0.0.227'),
('xdensenet40_2_k24_bc_svhn', '0287', '745f374b398bce378903af8c71cb3c67f6891d7f', 'v0.0.306'),
('xdensenet40_2_k36_bc_cifar10', '0437', 'e9bf419295f833b56fa3da27218107ed42310307', 'v0.0.233'),
('xdensenet40_2_k36_bc_cifar100', '2165', '78b6e754d90774d7b6ec3d811e6e57192148cfbf', 'v0.0.234'),
('xdensenet40_2_k36_bc_svhn', '0274', '4377e8918c1e008201aafc448f642642474eab14', 'v0.0.306'),
('wrn16_10_cifar10', '0293', 'ecf1c17c0814763095df562cb27d15a5aeb51836', 'v0.0.166'),
('wrn16_10_cifar100', '1895', 'bcb5c89ca71ffc99bc09b861b339724047724659', 'v0.0.204'),
('wrn16_10_svhn', '0278', '76f4e1361f9eca82fa4c2764b530f57280a34cfe', 'v0.0.271'),
('wrn28_10_cifar10', '0239', '16f3c8a249993f23b0f81d9ce3650faef5e455d8', 'v0.0.166'),
('wrn28_10_cifar100', '1788', '67ec43c6e913d43c8936809f04b0780035a24835', 'v0.0.320'),
('wrn28_10_svhn', '0271', 'fcd7a6b03a552b22ec25ee9a3833dc260976a757', 'v0.0.276'),
('wrn40_8_cifar10', '0237', '3b81d261706b751f5b731149b05fa92f500218e8', 'v0.0.166'),
('wrn40_8_cifar100', '1803', '114f6be2d5f8d561a5e3b4106fac30028defe300', 'v0.0.321'),
('wrn40_8_svhn', '0254', 'be7a21da6bc958c79725d7a29502c6a781cc67d9', 'v0.0.277'),
('wrn20_10_1bit_cifar10', '0326', 'c1a8ba4f1e1336a289c4b2eec75e25445b511ca6', 'v0.0.302'),
('wrn20_10_1bit_cifar100', '1904', 'adae01d6bec92d4fe388cddbb7f7eb598b1655d1', 'v0.0.302'),
('wrn20_10_1bit_svhn', '0273', 'ce9f819cf117fa66af112d9cbb0b65568623118d', 'v0.0.302'),
('wrn20_10_32bit_cifar10', '0314', '355496184493a55323c99bad9f79b0803548d373', 'v0.0.302'),
('wrn20_10_32bit_cifar100', '1812', 'd064f38aeaa14e9a2f4e9893ef6cca65615c53f9', 'v0.0.302'),
('wrn20_10_32bit_svhn', '0259', 'd9e8b46e180a34c0a765e22d24741f3849fca13a', 'v0.0.302'),
('ror3_56_cifar10', '0543', 'ee31a69a0503b41878c49d8925ac8e7ee813293b', 'v0.0.228'),
('ror3_56_cifar100', '2549', '4334559313cd9291af3d6ec0df144b21e695228b', 'v0.0.229'),
('ror3_56_svhn', '0269', '56617cf90e0902e88686af14939605c45d1170cf', 'v0.0.287'),
('ror3_110_cifar10', '0435', '0359916596cba01dfa481f105094c1047f592980', 'v0.0.235'),
('ror3_110_cifar100', '2364', 'b8c4d317241f54990180443d7fd9702d79c57ccc', 'v0.0.236'),
('ror3_110_svhn', '0257', '0677b7dfee32659a92719a5a16a7f387a5635f0b', 'v0.0.287'),
('ror3_164_cifar10', '0393', 'cc11aa06d928d0805279baccbf2b82371c31f503', 'v0.0.294'),
('ror3_164_cifar100', '2234', 'eb6a7fb8128240d84843a8e39adb00f606b6e2cf', 'v0.0.294'),
('ror3_164_svhn', '0273', 'b008c1b01386aca1803a1286607c5e1f843fc919', 'v0.0.294'),
('rir_cifar10', '0328', '5bed6f3506055b3ab5c4780a540cfebe014490ec', 'v0.0.292'),
('rir_cifar100', '1923', 'c42563834a971e18eacfc2287585aa2efa8af3eb', 'v0.0.292'),
('rir_svhn', '0268', '1c0718deaef5836efca4d5ded6140f0cd51424ab', 'v0.0.292'),
('shakeshakeresnet20_2x16d_cifar10', '0515', 'a7b8a2f77457e151da5d5ad3b9a2473594fecfc0', 'v0.0.215'),
('shakeshakeresnet20_2x16d_cifar100', '2922', 'e46e31a7d8308b57d9c0687000c40f15623998c2', 'v0.0.247'),
('shakeshakeresnet20_2x16d_svhn', '0317', '7a48fde5e1ccd5ff695892adf7094c15368ec778', 'v0.0.295'),
('shakeshakeresnet26_2x32d_cifar10', '0317', '21e60e626765001aaaf4eb26f7cb8f4a69ea3dc1', 'v0.0.217'),
('shakeshakeresnet26_2x32d_cifar100', '1880', 'bd46a7418374e3b3c844b33e12b09b6a98eb4e6e', 'v0.0.222'),
('shakeshakeresnet26_2x32d_svhn', '0262', 'f1dbb8ef162d9ec56478e2579272f85ed78ad896', 'v0.0.295'),
('diaresnet20_cifar10', '0622', '3e47641d76c1992652d8f973294f4763ecef1987', 'v0.0.340'),
('diaresnet20_cifar100', '2771', '3a58490ea95538ad5809c05739b4362088ea6961', 'v0.0.342'),
('diaresnet20_svhn', '0323', '579535ddc8b7c9becfe9bf97393ab33d9d5e7d0b', 'v0.0.342'),
('diaresnet56_cifar10', '0505', '45df69745c9692168697a7b980ade080ef7af07d', 'v0.0.340'),
('diaresnet56_cifar100', '2435', 'e45b7f281bb63c90104ff79d1519b4785a975a92', 'v0.0.342'),
('diaresnet56_svhn', '0268', '8f2c0574380bf14b0e9711d6370b2898f337cab0', 'v0.0.342'),
('diaresnet110_cifar10', '0410', '56f547ec833f419ea216f51439de50287dfef3c3', 'v0.0.340'),
('diaresnet110_cifar100', '2211', 'e99fad4ef0b2e7f09376beb314d672db7c3b6a55', 'v0.0.342'),
('diaresnet110_svhn', '0247', 'c587ac09f45fd7a29adfc1da62ad50174fd248ec', 'v0.0.342'),
('diaresnet164bn_cifar10', '0350', '533e7c6a30fce31c4f65686782cf761e7913750c', 'v0.0.340'),
('diaresnet164bn_cifar100', '1953', '43fa3821ab72e94187c12f7f950a2343649b3657', 'v0.0.342'),
('diaresnet164bn_svhn', '0244', 'eba062dce4033fd85ff78c2530b363b3768c036e', 'v0.0.342'),
('diapreresnet20_cifar10', '0642', 'ec36098cfbbb889fdd124083e785d0e21ba34792', 'v0.0.343'),
('diapreresnet20_cifar100', '2837', '32f0f1be9aa1da73f8fdb74f27ebaa49e7f9ace6', 'v0.0.343'),
('diapreresnet20_svhn', '0303', 'e33be387b0e71a4b0597558157ffbdb79c6db30c', 'v0.0.343'),
('diapreresnet56_cifar10', '0483', 'cba6950f21643a70b8e61b7197ca9cee9b2d0545', 'v0.0.343'),
('diapreresnet56_cifar100', '2505', 'c9f8bd4380d35e3806e1697c1c8d80bf7341c04e', 'v0.0.343'),
('diapreresnet56_svhn', '0280', '98a2a0bab42ff2605bd2cd4e63280b3631b042cb', 'v0.0.343'),
('diapreresnet110_cifar10', '0425', 'f4eae5abe2edebb1e224f7cf092ba02a873eb781', 'v0.0.343'),
('diapreresnet110_cifar100', '2269', '78d79bab215a5dc7221859d0b2688d040a55afb2', 'v0.0.343'),
('diapreresnet110_svhn', '0242', 'decb3765e92f5620580fe6b440ee2a82811d412e', 'v0.0.343'),
('diapreresnet164bn_cifar10', '0356', '9cf07392dc9714324e470fd50efb92ef286296ac', 'v0.0.343'),
('diapreresnet164bn_cifar100', '1999', '1625154f3cce7e131f25d8ee0b315b3fcc6fb760', 'v0.0.343'),
('diapreresnet164bn_svhn', '0256', '8476c5c9176abf21ea380dd00074b0ec30bbc530', 'v0.0.343'),
('resnet10_cub', '2765', '9dab9a498c380e6b7447827e00996d7cc61cc414', 'v0.0.335'),
('resnet12_cub', '2658', 'a46b8ec2d8dcd66a628dcfcb617acb15ef786b95', 'v0.0.336'),
('resnet14_cub', '2435', '0b9801b2e3aa3908bbc98f50d3ae3e986652742b', 'v0.0.337'),
('resnet16_cub', '2321', '031374ada9830869372a63e132c2477a04425444', 'v0.0.338'),
('resnet18_cub', '2330', 'e72712003928ed70ccf44b953e9cec4f78a75eea', 'v0.0.344'),
('resnet26_cub', '2252', '61cce1ea575f650a7e12a08b1a09335afa6cb605', 'v0.0.345'),
('seresnet10_cub', '2739', '7060c03f78bc60df09288b433eb6117c0e167210', 'v0.0.361'),
('seresnet12_cub', '2604', 'ee095118bde6e05ea102f5b945401a7221b7b7fb', 'v0.0.361'),
('seresnet14_cub', '2363', '5d2049d53c0445d7c66c849a5cd805ce39a37ddb', 'v0.0.361'),
('seresnet16_cub', '2321', '576e58eff57730a516094b8ba79452092187b693', 'v0.0.361'),
('seresnet18_cub', '2308', '3d2496d66efd6a00ca516c1a7a5a091f90043237', 'v0.0.361'),
('seresnet26_cub', '2251', '8d54edb2800b2ff071ee5beedde285eb9553bc22', 'v0.0.361'),
('mobilenet_w1_cub', '2346', 'efcad3dcf1975552f15028255a15f86a16b60987', 'v0.0.346'),
('proxylessnas_mobile_cub', '2188', '36d33231029b466638b3b1f8b2d1392e22d1afa7', 'v0.0.347'),
('ntsnet_cub', '1326', '75ae8cdcf4beb1ab60c1a983c9f143baaebbdea0', 'v0.0.334'),
('pspnet_resnetd101b_voc', '8144', 'e15319bf5428637e7fc00dcd426dd458ac937b08', 'v0.0.297'),
('pspnet_resnetd50b_ade20k', '3687', 'f0dcdf734f8f32a879dec3c4e7fe61d629244030', 'v0.0.297'),
('pspnet_resnetd101b_ade20k', '3797', 'c1280aeab8daa31c0893f7551d70130c2b68214a', 'v0.0.297'),
('pspnet_resnetd101b_cityscapes', '7172', 'd5ad2fa4c4208f439ab0b98267babe0c4d9e6e94', 'v0.0.297'),
('pspnet_resnetd101b_coco', '6741', '87582b79c48c4e995de808ff0cbc162c55b52031', 'v0.0.297'),
('deeplabv3_resnetd101b_voc', '8024', '8ee3099c5c983ef1cc0ce23b23d91db40b2986b8', 'v0.0.298'),
('deeplabv3_resnetd152b_voc', '8120', '88fb315dc3c58a84f325e63105fbfe322932073f', 'v0.0.298'),
('deeplabv3_resnetd50b_ade20k', '3713', '5d5e2f74008ab3637a05b6b1357c9c339296188c', 'v0.0.298'),
('deeplabv3_resnetd101b_ade20k', '3784', '6224836f8f31a00be1718a530a20670136bb3958', 'v0.0.298'),
('deeplabv3_resnetd101b_coco', '6773', '74dc9914078e47feb3ff64fba717d1d4040d8235', 'v0.0.298'),
('deeplabv3_resnetd152b_coco', '6899', 'edd79b4ca095f1674e7a68ee0dc8ed8bcd0b6a26', 'v0.0.298'),
('fcn8sd_resnetd101b_voc', '8040', 'f6c67c75bce4f9a3e17bf555369c0c9332ab5c1f', 'v0.0.299'),
('fcn8sd_resnetd50b_ade20k', '3339', '9856c5ee8186d1ac4b0eb5177c73e76c4cd63bb0', 'v0.0.299'),
('fcn8sd_resnetd101b_ade20k', '3588', '081774b2fb373d7b759cda2160fa0d2599b1c5f1', 'v0.0.299'),
('fcn8sd_resnetd101b_coco', '6011', '05e97cc5f5fcdf1c5ec5c617062d43adfe150d88', 'v0.0.299'),
('icnet_resnetd50b_cityscapes', '6402', '6c8f86a53526ae107e58d5f645bc4de0da9c1bb1', 'v0.0.457'),
('fastscnn_cityscapes', '6576', '9e0d75e56bde8d1643d3ff0053e55114c0a77ee9', 'v0.0.474'),
('sinet_cityscapes', '6031', '47d8ae7824bd297bbf25c2f33e3d4a86a503aefa', 'v0.0.437'),
('bisenet_resnet18_celebamaskhq', '0000', 'd72f0cf3101625bb4265e4cf5ae557b994f84d67', 'v0.0.462'),
('danet_resnetd50b_cityscapes', '6799', '9880a0eb9523ba2c1f98025f7d7115a2a4c1f376', 'v0.0.468'),
('danet_resnetd101b_cityscapes', '6810', 'ea69dcea31f5b250254a226f70df130875ff18b2', 'v0.0.468'),
('alphapose_fastseresnet101b_coco', '7415', '70082a53d3cdeb7ebe4c7d8c16a6a39830f1ed23', 'v0.0.454'),
('simplepose_resnet18_coco', '6631', '5a6198e5103a28faab4e49c687121634c7f7d196', 'v0.0.455'),
('simplepose_resnet50b_coco', '7102', '6315ffa72993eea3f573ae1cc23f84d0275f0fbe', 'v0.0.455'),
('simplepose_resnet101b_coco', '7244', '0491ab951827782492ffbd8fa57aa6dd599c7e9f', 'v0.0.455'),
('simplepose_resnet152b_coco', '7253', '4590c1c555eb54c4e9afdc83fa8a199132afd212', 'v0.0.455'),
('simplepose_resneta50b_coco', '7170', 'fa09a84ee2e085ad6b641c4fe0cc483651861789', 'v0.0.455'),
('simplepose_resneta101b_coco', '7297', '7ddd6cb20bcd626e05e8a627601bf19b704ba6a9', 'v0.0.455'),
('simplepose_resneta152b_coco', '7344', '9ec1a3dc2a23a19cb7f5ac5466cebba8069a0f93', 'v0.0.455'),
('simplepose_mobile_resnet18_coco', '6625', '8ff93eed70ac73503c8c1e346ddd1ade5d9e3edf', 'v0.0.456'),
('simplepose_mobile_resnet50b_coco', '7110', 'e0f2e587ffdf5e074a29f877890b13b99a58c6c2', 'v0.0.456'),
('simplepose_mobile_mobilenet_w1_coco', '6410', '0867e5aa76d5ec37cde08c71de8324a9c2913922', 'v0.0.456'),
('simplepose_mobile_mobilenetv2b_w1_coco', '6374', '07e9c6295a8aa2b7bb9a1d0b7c716141e7ee71dc', 'v0.0.456'),
('simplepose_mobile_mobilenetv3_small_w1_coco', '5434', 'cb837c0e32edec68dc8598d71599ea7404936f96', 'v0.0.456'),
('simplepose_mobile_mobilenetv3_large_w1_coco', '6367', '7ba036a5ade736042531a0fe500ecec368dbf157', 'v0.0.456'),
('lwopenpose2d_mobilenet_cmupan_coco', '3999', 'b4a22e7c2a05e53fe22185002c48e81f76c2d918', 'v0.0.458'),
('lwopenpose3d_mobilenet_cmupan_coco', '3999', '4658738ec27d46ee01f2cad4aa975914e9f7108c', 'v0.0.458'),
('ibppose_coco', '6486', '024d1fafb7471572129ccbb07d662f6d8ccdc758', 'v0.0.459'),
]}
imgclsmob_repo_url = 'https://github.com/osmr/imgclsmob'
def get_model_name_suffix_data(model_name):
if model_name not in _model_sha1:
raise ValueError("Pretrained model for {name} is not available.".format(name=model_name))
error, sha1_hash, repo_release_tag = _model_sha1[model_name]
return error, sha1_hash, repo_release_tag
def get_model_file(model_name,
local_model_store_dir_path=os.path.join("~", ".mxnet", "models")):
"""
Return location for the pretrained on local file system. This function will download from online model zoo when
model cannot be found or has mismatch. The root directory will be created if it doesn't exist.
Parameters:
----------
model_name : str
Name of the model.
local_model_store_dir_path : str, default $MXNET_HOME/models
Location for keeping the model parameters.
Returns:
-------
file_path
Path to the requested pretrained model file.
"""
error, sha1_hash, repo_release_tag = get_model_name_suffix_data(model_name)
short_sha1 = sha1_hash[:8]
file_name = "{name}-{error}-{short_sha1}.params".format(
name=model_name,
error=error,
short_sha1=short_sha1)
local_model_store_dir_path = os.path.expanduser(local_model_store_dir_path)
file_path = os.path.join(local_model_store_dir_path, file_name)
if os.path.exists(file_path):
if check_sha1(file_path, sha1_hash):
return file_path
else:
logging.warning("Mismatch in the content of model file detected. Downloading again.")
else:
logging.info("Model file not found. Downloading to {}.".format(file_path))
if not os.path.exists(local_model_store_dir_path):
os.makedirs(local_model_store_dir_path)
zip_file_path = file_path + ".zip"
download(
url="{repo_url}/releases/download/{repo_release_tag}/{file_name}.zip".format(
repo_url=imgclsmob_repo_url,
repo_release_tag=repo_release_tag,
file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(local_model_store_dir_path)
os.remove(zip_file_path)
if check_sha1(file_path, sha1_hash):
return file_path
else:
raise ValueError("Downloaded file has different hash. Please try again.")
|
[
"osemery@gmail.com"
] |
osemery@gmail.com
|
e7a357df777bcf68a8086cac9f9b4ef9aae9644a
|
98cb999e8c13b0bab4d51e2f3374a95c3f414bcb
|
/accounts/urls.py
|
28ab90b01a00b1aca21d63b585705e95e4d16412
|
[] |
no_license
|
dhalianeza/asterlove
|
e91f8b192a9bbe962f94b013283b718867bc55b2
|
81f25f6cb3c9587228bee6daef4ec3ad78dc5bc1
|
refs/heads/master
| 2023-03-07T11:52:17.667323
| 2021-02-11T19:15:43
| 2021-02-11T19:15:43
| 338,124,298
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
from django.urls import path
from .views import UserRegistrationView, UserDetailView, UserUpdateView
urlpatterns = [
path('registration/', UserRegistrationView.as_view(), name='register'),
path('profile/<int:pk>/', UserDetailView.as_view(), name='profile'),
path('profile/<int:pk>/edit', UserUpdateView.as_view(), name='editprofile'),
]
|
[
"dhalianeza02@gmail.com"
] |
dhalianeza02@gmail.com
|
db1a809412f1147088960366b8088eceafe24d9c
|
9437e8ab6163ff172095305586e9901294480bdc
|
/rastermodel/utils.py
|
73e067406f10b8b33a3b2b767829208b61585f60
|
[
"MIT"
] |
permissive
|
bparment1/rastermodel
|
8b742839ecf7b0d15b028cd42558719c1d35db1b
|
ddeea801caace4945d57b32c6fbc9b1d830ff4fb
|
refs/heads/master
| 2022-02-23T04:01:18.225968
| 2019-09-19T19:32:04
| 2019-09-19T19:32:04
| 192,760,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 960
|
py
|
###### Library used in this script
import numpy as np
#import matplotlib.pyplot as plt
#import matplotlib.cm as cm
#import matplotlib.colors as colors
#import seaborn as sns
import rasterio
import subprocess
import pandas as pd
import os, glob
from rasterio import plot
import geopandas as gpd
import georasters as gr
import gdal
import rasterio
import descartes
import pysal as ps
from cartopy import crs as ccrs
from pyproj import Proj
from osgeo import osr
#from shapely.geometry import Point
#from collections import OrderedDict
#import webcolors
import sklearn
from sklearn.linear_model import LinearRegression
from sklearn import metrics
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
#debugusing pdb
import pdb
from sklearn.ensemble import RandomForestRegressor
from sklearn import svm
from sklearn.neural_network import MLPRegressor
from itertools import repeat
|
[
"meshghi@uoregon.edu"
] |
meshghi@uoregon.edu
|
b8c43baef01e0c73c3f1db61fec5adc9cea51ac3
|
f4a66b3428ac2e66f5cf7b1c9bf95767cc375ce0
|
/vu_speech/custom_admin/admin.py
|
e0ce4a1d35514ffbff982bd7a782add63c6b4f55
|
[] |
no_license
|
arnobpl/vu-speech
|
4763e42f67acacd4ad3ab77c6489e14f01838d10
|
1b96905f2ce69fabf7673d3d0e08216ca530753a
|
refs/heads/master
| 2023-05-07T17:57:57.549115
| 2021-05-07T15:01:23
| 2021-05-12T16:27:20
| 342,685,847
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,411
|
py
|
import json
from django.contrib import admin
from django.core.serializers.json import DjangoJSONEncoder
from django.http import JsonResponse
from django.urls import path
# Register your models here.
class BaseChartDataAdmin(admin.ModelAdmin):
change_list_template = 'admin/chart_data/change_list.html'
chart_label = None
chart_background_color = 'rgba(220,20,20,0.5)'
chart_table = True
# Inject chart data on page load in the ChangeList view
def changelist_view(self, request, extra_context=None):
if (self.chart_label is None) and (hasattr(self.model, 'Meta')):
self.chart_label = 'new ' + self.model.Meta.verbose_name_plural.lower()
chart_data = self.chart_data(request)
as_json = json.dumps(list(chart_data), cls=DjangoJSONEncoder)
extra_context = extra_context or {}
extra_context['chart_data'] = as_json
extra_context['chart_label'] = self.chart_label
extra_context['chart_background_color'] = self.chart_background_color
extra_context['chart_table'] = self.chart_table
extra_context['chart_fetch_url'] = request.path + 'chart_data/'
if not self.chart_table:
extra_context['title'] = 'Select chart to view details'
return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
urls = super().get_urls()
extra_urls = [
path('chart_data/', self.admin_site.admin_view(self.chart_data_endpoint))
]
# NOTE! The custom URLs have to go before the default URLs, because they default ones match anything.
return extra_urls + urls
# JSON endpoint for generating chart data that is used for dynamic loading via JS.
def chart_data_endpoint(self, request):
chart_data = self.chart_data(request)
return JsonResponse(list(chart_data), safe=False)
def chart_data(self, request):
"""
Return a queryset for plotting the chart. This queryset must include ``date``
datetime field for x-axis and ``y`` number field for y-axis.
"""
return self.get_queryset(request)
class NoWriteAdmin(admin.ModelAdmin):
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
return False
|
[
"arnobpl@gmail.com"
] |
arnobpl@gmail.com
|
ed5bf40ffe91b06060eb3a57eac9e1d3c9d458a3
|
c01dad1a1b742423d59d18d0d25755605fc6efcd
|
/RNN/main.py
|
07b2e4caba5d33499bef3025be76dabe3e4eb6a8
|
[
"Apache-2.0"
] |
permissive
|
hjdw2/embedded
|
d0e76ad08dca33fcb90b14e3eeb1f332c4dd1af7
|
f1991a56588903ac89747c9cefe28d11ad8e65bb
|
refs/heads/master
| 2022-07-12T02:50:25.986500
| 2020-05-12T01:50:14
| 2020-05-12T01:50:14
| 261,961,895
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,203
|
py
|
from __future__ import print_function
import argparse
import time
import math
import torch
import torch.nn as nn
from torch.autograd import Variable
import multiprocessing as mp
from collections import deque
from data import Corpus
from utils import *
from models import *
from train import *
def main():
parser = argparse.ArgumentParser(description='PyTorch PennTreeBank RNN Language Model')
parser.add_argument('--data', type=str, default='input', help='location of the data corpus')
parser.add_argument('--epochs', type=int, default=10, help='upper epoch limit')
parser.add_argument('--batch_size', type=int, default=20, help='batch size')
parser.add_argument('--lr', type=float, default=20, help='initial learning rate')
parser.add_argument('--clip', type=float, default=0.25, help='gradient clipping')
parser.add_argument('--emsize', type=int, default=128, help='size of word embeddings')
parser.add_argument('--nlayers', type=int, default=1, help='number of layers')
parser.add_argument('--nhid', type=int, default=1500, help='number of hidden units per layer')
parser.add_argument('--bptt', type=int, default=35, help='sequence length')
parser.add_argument('--log-interval', type=int, default=100, help='report interval')
parser.add_argument('--seed', type=int, default=1, help='random seed')
parser.add_argument('--no-cuda', default=False, help='disables CUDA training')
parser.add_argument('--save-model', default=True, help='For Saving the current Model')
parser.add_argument('--save_path', type=str, default='checkpoints', help='Folder to save checkpoints and log.')
parser.add_argument('--time_sleep_iteration', type=int, default=0, help='Time sleep for prevetioning from overhitting CPU or GPU.')
args = parser.parse_args()
torch.manual_seed(args.seed)
use_cuda = not args.no_cuda and torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
# Data
print('==> Preparing data..')
corpus = Corpus('input')
ntokens = len(corpus.dictionary)
train_data = batchify(corpus.train, args.batch_size)
val_data = batchify(corpus.valid, args.batch_size)
test_data = batchify(corpus.test, args.batch_size)
# Model
print('==> Building model..')
lower_network = RNN_lower('LSTM', ntokens, args.emsize, args.nhid, args.nlayers)
upper_network = RNN_upper('LSTM', ntokens, args.nhid, args.nhid, args.nlayers)
local_network = RNN_local(ntokens, args.nhid, args.nhid, args.nlayers)
LC_Train(lower_network, upper_network, local_network, train_data, val_data, test_data, args, device,corpus, ntokens)
if args.save_model:
print('Saving..')
state = {
'lower_network': lower_network.state_dict(),
'upper_network': upper_network.state_dict(),
'local_network': local_network.state_dict(),
}
if not os.path.isdir(args.save_path):
os.mkdir(args.save_path)
torch.save(state, args.save_path+'/ckpt.pth')
if __name__ == '__main__':
try:
mp.set_start_method('spawn')
except RuntimeError:
pass
main()
|
[
"noreply@github.com"
] |
hjdw2.noreply@github.com
|
e470af9f6da97e2b821fbaa5ffedbfbf354b415a
|
e9435342722ccc3bdf1475f1e8ed49166d940875
|
/ch8/preprocess_test.py
|
4222308d752c46cbcec771adbd530c3c298c574d
|
[] |
no_license
|
SongJongbeen/chatbot
|
81761de94be6fe3b557a2758122dd23e22108ad4
|
0a2fcccd91d56e26991b772a18a071f45f1467c4
|
refs/heads/main
| 2023-07-10T06:56:01.910701
| 2021-08-17T07:17:41
| 2021-08-17T07:17:41
| 386,115,295
| 0
| 1
| null | 2021-07-17T12:28:28
| 2021-07-15T00:39:57
|
Python
|
UTF-8
|
Python
| false
| false
| 934
|
py
|
# from utils.Preprocess import Preprocess
# from tensorflow.keras import preprocessing
#
# sent = "내일 오전 10시에 짬뽕 주문하고 싶어ㅋㅋ"
# p = Preprocess(word2index_dic='../train_tools/dict/chatbot_dict.bin',
# userdic = '../utils/user_dic.tsv')
#
# pos = p.pos(sent)
# keywords = p.get_keywords(pos, without_tag=False)
#
# print(keywords)
#
# # w2i = p.get_wordidx_sequence(keywords)
# # sequences = [w2i]
# #
# # MAX_SEQ_LEN = 15 # 임베딩 벡터 크기
# # padded_seqs = preprocessing.sequence.pad_sequences(sequences, maxlen=MAX_SEQ_LEN, padding='post')
# #
# # print(keywords)
# # print(sequences)
# # print(padded_seqs)
from Preprocess import Preprocess
sent = "내일 오전 10시에 탕수육 주문하고 싶어"
p = Preprocess(userdic='user_dic.tsv')
pos = p.pos(sent)
ret = p.get_keywords(pos, without_tag = False)
print(ret)
ret = p.get_keywords(pos, without_tag = True)
print(ret)
|
[
"1041489@gmail.com"
] |
1041489@gmail.com
|
8edf87b3a7118ec90cf504ea3618047131ed2a49
|
a30012860abb799620d361adac1037468dc7cc08
|
/api/python/lib/ezconfiguration/properties.py
|
787eb71e5882025688ed0659f1b785d6886fd296
|
[
"Apache-2.0"
] |
permissive
|
ezbake/ezbake-configuration
|
991a2f22fc7e63512e5b2aeeab52665a380e2bc3
|
2ebb281ca2e5683e0e178db3f287b16c76e5a1a1
|
refs/heads/master
| 2021-01-01T06:00:20.303421
| 2014-10-15T12:48:41
| 2014-10-15T12:48:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,480
|
py
|
# Copyright (C) 2013-2014 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
'''
import os
from threading import RLock
from ezconfiguration.security.CryptoImplementations import TextCrytoImplementation
from ezconfiguration.utils.propertyencryptionutil import isEncryptedValue, decryptPropertyValue, encryptPropertyValue
from ezconfiguration.utils import openshiftutil
class DuplicatePropertyException(RuntimeError):
'''
Exception class for duplicate properties collisions
'''
def __init__(self, collisions):
super(DuplicatePropertyException, self).__init__(str(collisions))
class EzProperties(dict):
'''
This class extends dict to add convenience methods to get primitives from strings,
deal with encrypted values. If you are going to be dealing with encrypted properties
then you are going to have to make sure to the set the TextCryptoProvider eg:
EzProperties ezProperties = new EzProperties();
ezProperties.setTextCryptoImplementer(new SharedSecretTextCryptoImplementation(os.getenv("SHARED_SECRET")));
'''
def __init__(self, *args):
super(EzProperties, self).__init__(*args)
self._cryptoImplementer = None
self.__lock = RLock()
def setTextCryptoImplementer(self, implementer):
'''
Set the crypto implementation to use when encrypting and decrypting values
Args:
implementer (TextCrytoImplementation): a text crypto implementation to use
'''
if not isinstance(implementer, TextCrytoImplementation):
raise TypeError('implementer must be of type TextCrytoImplementation')
with self.__lock:
self._cryptoImplementer = implementer
def getTextCryptoImplementer(self):
'''
Get the crypto implementer that this object is using to encrypt and decrypt values
Returns:
TextCrytoImplementation: a text crypto implementation
'''
return self._cryptoImplementer
def getProperty(self, key, defaultValue=None):
'''
Obtains the property value for the specified key decrypting it if needed.
Args:
key (str): the property key
defaultValue (str, optional): the default value to return if property doesn't exist
'''
return self.get(key, defaultValue)
def setProperty(self, key, value, isEncrypted=False):
'''
Set a property value
Args:
key (str): the key to be placed in this properties object
value (str): the value corresponding to the key
isEncrypted (bool, optional): whether or not we should encrypt the property. Defaults to False
'''
self[key] = encryptPropertyValue(value, self._cryptoImplementer) if isEncrypted else value
def getBoolean(self, propertyName, defaultValue):
'''
Get a property as a bool, if the property doesn't exist or can't be converted then we return the default value
Args:
propertyName (str): name of the property we are looking for (the key)
defaultValue (bool): value to return if the key doesn't exist or the value can't be converted to a bool
Returns:
(bool): either the properly parsed bool or the default value if the key doesn't exist or can't be converted
'''
value = self.getProperty(propertyName, defaultValue)
if isinstance(value, bool):
return value
if isinstance(value, basestring):
value = value.strip().lower()
if value == 'true' or value == 't' or value == '1':
return True
elif value == 'false' or value == 'f' or value == '0':
return False
return defaultValue
def getDouble(self, propertyName, defaultValue):
'''
Get a property as a float, if the property doesn't exist or can't be converted then we return the default value
Method returns a python float which is a C double
Args:
propertyName (str): name of the property we are looking for (the key)
defaultValue (float): value to return if the key doesn't exist or the value can't be converted to a float
Returns:
(float): either the properly parsed float or the default value if the key doesn't exist or can't be converted
'''
return self.getFloat(propertyName, defaultValue)
def getFloat(self, propertyName, defaultValue):
'''
Get a property as a float, if the property doesn't exist or can't be converted then we return the default value
Args:
propertyName (str): name of the property we are looking for (the key)
defaultValue (float): value to return if the key doesn't exist or the value can't be converted to a float
Returns:
(float): either the properly parsed float or the default value if the key doesn't exist or can't be converted
'''
try:
return float(self.getProperty(propertyName, defaultValue))
except ValueError:
return defaultValue
def getInteger(self, propertyName, defaultValue):
'''
Get a property as an int, if the property doesn't exist or can't be converted then we return the default value
Args:
propertyName (str): name of the property we are looking for (the key)
defaultValue (int): value to return if the key doesn't exist or the value can't be converted to an int
Returns:
(int): either the properly parsed int or the default value if the key doesn't exist or can't be converted
'''
try:
return int(self.getProperty(propertyName, defaultValue))
except ValueError:
return defaultValue
def getLong(self, propertyName, defaultValue):
'''
Get a property as a long, if the property doesn't exist or can't be converted then we return the default value
Args:
propertyName (str): name of the property we are looking for (the key)
defaultValue (long): value to return if the key doesn't exist or the value can't be converted to a long
Returns:
(long): either the properly parsed long or the default value if the key doesn't exist or can't be converted
'''
try:
return long(self.getProperty(propertyName, defaultValue))
except ValueError:
return defaultValue
def getCollisions(self, toCheck):
'''
Check to see what properties would "collide" (have the same key name)
Args:
toCheck (EzProperty): the properties that we want to compare our properties with
Returns:
set: a set of string which are they keys that overlap
'''
if not isinstance(toCheck, dict):
raise TypeError('toCheck must be of type dict')
return self.viewkeys() & toCheck.viewkeys()
def getPath(self, propertyName, defaultValue):
'''
Get file system path to represented by the property name
If we're in an openshift container, prepend the openshift repo directory
'''
path = self.getProperty(propertyName, defaultValue)
if path is None:
return defaultValue
if not openshiftutil.inOpenShiftContainer() or os.path.isabs(path):
return path
return os.path.join(openshiftutil.getRepoDir(), path)
def mergeProperties(self, toMerge, shouldOverride=True):
'''
Merge one set of properties with our properties.
Args:
toMerge (EzProperties): properties that we want to merge into our own
shouldOverride (bool, optional): whether or not we should just override. Default is True
'''
if not isinstance(toMerge, dict):
raise TypeError('toMerge must be of type dict')
if not shouldOverride:
collisions = self.getCollisions(toMerge)
if len(collisions) > 0:
raise DuplicatePropertyException(collisions)
self.update(toMerge)
def __decrypt(self, value):
if not isEncryptedValue(value):
return value
return decryptPropertyValue(value, self._cryptoImplementer)
def __getitem__(self, *args):
return self.__decrypt(super(EzProperties, self).__getitem__(*args))
def get(self, *args):
return self.__decrypt(super(EzProperties, self).get(*args))
def pop(self, *args):
raise NotImplementedError('EzPropreties does not support the pop api')
def popitem(self):
raise NotImplementedError('EzPropreties does not support the popitem api')
|
[
"jhastings@42six.com"
] |
jhastings@42six.com
|
6ecaf15aff563b360820e6b498b118457ca332e9
|
2832041359eaa910488b2cd748975060f5252c89
|
/messages.py
|
36b4da3f65f9de238860417438e554f893fc2a89
|
[] |
no_license
|
olegbelyaev/likeabot
|
b67124eeab3ce2172c6540c0de540c8d37a0c2e6
|
f1f3d0ec363095868f37ea7966f0c9ab04e26755
|
refs/heads/master
| 2022-11-13T12:57:17.172306
| 2020-07-05T21:57:27
| 2020-07-05T21:57:27
| 277,381,134
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 93
|
py
|
HELLO_MESSAGE = 'hi'
VERSION_OF_BOT = '1.0'
AUTHOR = 'this bot was created by oleg ggwp gang'
|
[
"noreply@github.com"
] |
olegbelyaev.noreply@github.com
|
4781d99de8b6254524080f6844332daf943beb29
|
ba222782f45e0a1f267821bf022d30d05c7e33ef
|
/scripts/generate_async_api.py
|
73aed1c116171be75042a3bfad5c2246930d22bc
|
[
"Apache-2.0"
] |
permissive
|
DonatelloJiang/playwright-python
|
ae89f15207a7504dabc2fd4cc94329ff0b5066b9
|
fa7d908d8a1374b1a67f6fc7c2f91d27a7205753
|
refs/heads/master
| 2023-02-12T03:45:19.408255
| 2021-01-09T02:33:35
| 2021-01-09T02:33:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,643
|
py
|
#!/usr/bin/env python
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import re
from types import FunctionType
from typing import Any, get_type_hints # type: ignore
from playwright._impl._helper import to_snake_case
from scripts.documentation_provider import DocumentationProvider
from scripts.generate_api import (
all_types,
api_globals,
arguments,
header,
process_type,
return_type,
return_value,
short_name,
signature,
)
documentation_provider = DocumentationProvider()
def generate(t: Any) -> None:
print("")
class_name = short_name(t)
base_class = t.__bases__[0].__name__
base_sync_class = (
"AsyncBase"
if base_class == "ChannelOwner" or base_class == "object"
else base_class
)
print(f"class {class_name}({base_sync_class}):")
print("")
print(f" def __init__(self, obj: {class_name}Impl):")
print(" super().__init__(obj)")
for [name, type] in get_type_hints(t, api_globals).items():
print("")
print(" @property")
print(f" def {name}(self) -> {process_type(type)}:")
documentation_provider.print_entry(class_name, name, {"return": type})
[prefix, suffix] = return_value(type)
prefix = " return " + prefix + f"self._impl_obj.{name}"
print(f"{prefix}{suffix}")
for [name, value] in t.__dict__.items():
if name.startswith("_"):
continue
if not name.startswith("_") and str(value).startswith("<property"):
value = value.fget
print("")
print(" @property")
print(
f" def {name}({signature(value, len(name) + 9)}) -> {return_type(value)}:"
)
documentation_provider.print_entry(
class_name, name, get_type_hints(value, api_globals)
)
[prefix, suffix] = return_value(
get_type_hints(value, api_globals)["return"]
)
prefix = " return " + prefix + f"self._impl_obj.{name}"
print(f"{prefix}{arguments(value, len(prefix))}{suffix}")
for [name, value] in t.__dict__.items():
if (
not name.startswith("_")
and isinstance(value, FunctionType)
and "expect_" not in name
and "remove_listener" != name
):
is_async = inspect.iscoroutinefunction(value)
print("")
async_prefix = "async " if is_async else ""
await_prefix = "await " if is_async else ""
print(
f" {async_prefix}def {name}({signature(value, len(name) + 9)}) -> {return_type(value)}:"
)
documentation_provider.print_entry(
class_name, name, get_type_hints(value, api_globals)
)
[prefix, suffix] = return_value(
get_type_hints(value, api_globals)["return"]
)
prefix = prefix + f"{await_prefix}self._impl_obj.{name}("
suffix = ")" + suffix
print(
f"""
try:
log_api("=> {to_snake_case(class_name)}.{name} started")
result = {prefix}{arguments(value, len(prefix))}{suffix}
log_api("<= {to_snake_case(class_name)}.{name} succeded")
return result
except Exception as e:
log_api("<= {to_snake_case(class_name)}.{name} failed")
raise e"""
)
if "expect_" in name:
print("")
return_type_value = return_type(value)
return_type_value = re.sub(r"\"([^\"]+)Impl\"", r"\1", return_type_value)
event_name = re.sub(r"expect_(.*)", r"\1", name)
event_name = re.sub(r"_", "", event_name)
event_name = re.sub(r"consolemessage", "console", event_name)
print(
f""" def {name}({signature(value, len(name) + 9)}) -> Async{return_type_value}:
\"\"\"{class_name}.{name}
Returns context manager that waits for ``event`` to fire upon exit. It passes event's value
into the ``predicate`` function and waits for the predicate to return a truthy value. Will throw
an error if the page is closed before the ``event`` is fired.
async with page.expect_{event_name}() as event_info:
await page.click("button")
value = event_info.value
Parameters
----------
predicate : Optional[typing.Callable[[Any], bool]]
Predicate receiving event data.
timeout : Optional[int]
Maximum wait time in milliseconds, defaults to 30 seconds, pass `0` to disable the timeout.
The default value can be changed by using the browserContext.set_default_timeout(timeout) or
page.set_default_timeout(timeout) methods.
\"\"\""""
)
wait_for_method = "wait_for_event(event, predicate, timeout)"
if event_name == "request":
wait_for_method = "wait_for_request(url_or_predicate, timeout)"
elif event_name == "response":
wait_for_method = "wait_for_response(url_or_predicate, timeout)"
elif event_name == "loadstate":
wait_for_method = "wait_for_load_state(state, timeout)"
elif event_name == "navigation":
wait_for_method = "wait_for_navigation(url, wait_until, timeout)"
elif event_name != "event":
print(f' event = "{event_name}"')
print(
f" return AsyncEventContextManager(self._impl_obj.{wait_for_method})"
)
print("")
print(f"mapping.register({class_name}Impl, {class_name})")
def main() -> None:
print(header)
print(
"from playwright._impl._async_base import AsyncEventContextManager, AsyncBase, mapping"
)
print("NoneType = type(None)")
for t in all_types:
generate(t)
documentation_provider.print_remainder()
if __name__ == "__main__": # pragma: no cover
main()
|
[
"noreply@github.com"
] |
DonatelloJiang.noreply@github.com
|
3cc931eccfb596a8705f26100b644a126938ad30
|
17cc70b4c70b76ad3fcfc7bc3af4b2f4d6d3740b
|
/tests/workflow/tasks/qchem.py
|
0226c5b9944c271e8d94e2979c4f1be0bda123a9
|
[
"MIT"
] |
permissive
|
kijanac/materia
|
82b8cf7391729109784a4f703ba0bcf06f80b617
|
b49af518c8eff7d3a8c6caff39783e3daf80a7a0
|
refs/heads/main
| 2021-07-11T18:21:13.371228
| 2021-05-06T19:52:00
| 2021-05-06T19:52:00
| 237,714,956
| 0
| 0
|
MIT
| 2021-03-05T15:30:28
| 2020-02-02T03:53:32
|
Python
|
UTF-8
|
Python
| false
| false
| 4,818
|
py
|
import materia
import textwrap
import unittest.mock as mock
def test_qchem_rttddft():
settings = materia.Settings(
dt=0.02,
Stabilize=0,
TCLOn=0,
MaxIter=100,
ApplyImpulse=1,
ApplyCw=0,
FieldFreq=0.7,
Tau=0.07,
FieldAmplitude=0.001,
ExDir=1.0,
EyDir=1.0,
EzDir=1.0,
Print=0,
StatusEvery=10,
SaveDipoles=1,
DipolesEvery=2,
SavePopulations=0,
SaveFockEnergies=0,
WriteDensities=0,
SaveEvery=500,
FourierEvery=5000,
MMUT=1,
LFLPPC=0,
)
out_str = textwrap.dedent(
""" dt=0.02
Stabilize=0
TCLOn=0
MaxIter=100
ApplyImpulse=1
ApplyCw=0
FieldFreq=0.7
Tau=0.07
FieldAmplitude=0.001
ExDir=1.0
EyDir=1.0
EzDir=1.0
Print=0
StatusEvery=10
SaveDipoles=1
DipolesEvery=2
SavePopulations=0
SaveFockEnergies=0
WriteDensities=0
SaveEvery=500
FourierEvery=5000
MMUT=1
LFLPPC=0"""
)
# FIXME: make this test work on Windows using pathlib
mock_open = mock.mock_open()
# mock_expand = mock.MagicMock(side_effect=lambda s: s)
mock_os_makedirs = mock.MagicMock(side_effect=lambda s: s)
with mock.patch("builtins.open", mock_open):
with mock.patch("os.makedirs", mock_os_makedirs):
materia.WriteQChemTDSCF(
settings=settings, work_directory="/mock/path/to/dir"
).run()
# mock_expand.assert_called_once_with("/mock/path")
mock_os_makedirs.assert_called_once_with("/mock/path/to/dir")
mock_open.assert_called_once_with("/mock/path/to/dir/TDSCF.prm", "w")
mock_open().write.assert_called_once_with(out_str)
def test_write_qchem_tdscf():
settings = materia.Settings(
dt=0.02,
Stabilize=0,
TCLOn=0,
MaxIter=100,
ApplyImpulse=1,
ApplyCw=0,
FieldFreq=0.7,
Tau=0.07,
FieldAmplitude=0.001,
ExDir=1.0,
EyDir=1.0,
EzDir=1.0,
Print=0,
StatusEvery=10,
SaveDipoles=1,
DipolesEvery=2,
SavePopulations=0,
SaveFockEnergies=0,
WriteDensities=0,
SaveEvery=500,
FourierEvery=5000,
MMUT=1,
LFLPPC=0,
)
out_str = textwrap.dedent(
""" dt=0.02
Stabilize=0
TCLOn=0
MaxIter=100
ApplyImpulse=1
ApplyCw=0
FieldFreq=0.7
Tau=0.07
FieldAmplitude=0.001
ExDir=1.0
EyDir=1.0
EzDir=1.0
Print=0
StatusEvery=10
SaveDipoles=1
DipolesEvery=2
SavePopulations=0
SaveFockEnergies=0
WriteDensities=0
SaveEvery=500
FourierEvery=5000
MMUT=1
LFLPPC=0"""
)
# FIXME: make this test work on Windows using pathlib
mock_open = mock.mock_open()
# mock_expand = mock.MagicMock(side_effect=lambda s: s)
mock_os_makedirs = mock.MagicMock(side_effect=lambda s: s)
with mock.patch("builtins.open", mock_open):
with mock.patch("os.makedirs", mock_os_makedirs):
materia.WriteQChemTDSCF(
settings=settings, work_directory="/mock/path/to/dir"
).run()
# mock_expand.assert_called_once_with("/mock/path")
mock_os_makedirs.assert_called_once_with("/mock/path/to/dir")
mock_open.assert_called_once_with("/mock/path/to/dir/TDSCF.prm", "w")
mock_open().write.assert_called_once_with(out_str)
|
[
"kijana@mit.edu"
] |
kijana@mit.edu
|
316c02f8d33733a6471ba9ee57338c014e9809ed
|
dd76697bc5fccb3637fc6056e395f96075f03d8c
|
/Day-009/exercise-3-auction.py
|
3e068fd79bd3507c9bcf4255aa09d2b6ca3313e5
|
[] |
no_license
|
georgggg/python-bootcamp
|
449f759ee339864e8e3bd4d00e06f32292616a9c
|
0edacb45bb05bf5622c457568a350ebb7ad4451b
|
refs/heads/master
| 2023-08-21T12:02:32.546186
| 2021-09-16T02:17:29
| 2021-09-16T02:17:29
| 358,429,006
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 864
|
py
|
from clear_library import clear
#HINT: You can call clear() to clear the output in the console.
from art import logo
print(logo)
keep_going = True
bids = {}
winner = ""
winning_amount = 0
while keep_going == True:
name = input("What is your name? ").lower()
bid = float(input("What is your bid? $"))
bids[name] = bid
choice = input("Type 'yes' to register more bids or 'no' to stop accepting bids ").lower()
if choice == "yes":
clear()
elif choice == "no":
keep_going = False
else:
print("You provided an invalid input, I am finishing the auction now.")
keep_going = False
for key in bids:
if bids[key] > winning_amount:
winner = key
winning_amount = bids[key]
if winning_amount == 0:
print("No one wins this auction")
else:
print(f"{winner} won this auction with a bid of ${winning_amount}")
|
[
"giorgggg.napone@gmail.com"
] |
giorgggg.napone@gmail.com
|
52f6bf0e2777b76196fcc0f081013bd6f87368a3
|
0daeb1e9b81538e5784df932f25b72d2dfebb527
|
/app/db/models.py
|
3106f52af469ffd8187cd99ca66e8fccd67a97e9
|
[] |
no_license
|
termmerge/backend
|
94c8ed4b1e1f6d42fc3000db4f534f444208df4a
|
436a76690c087b506a03298011b02ccb278b8df2
|
refs/heads/master
| 2021-01-22T19:41:43.883055
| 2017-03-18T21:35:14
| 2017-03-18T21:35:14
| 85,229,183
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,041
|
py
|
from sqlalchemy import \
create_engine, \
Column, ForeignKey, \
MetaData
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from os import environ
db_engine = create_engine(
'postgres://{}:{}@{}/{}'.format(
environ.get("POSTGRES_USER"),
environ.get("POSTGRES_PASS"),
environ.get("POSTGRES_HOST"),
environ.get("POSTGRES_DB")
)
)
metadata = MetaData(bind=db_engine)
SessionClass = sessionmaker(bind=db_engine)
Base = declarative_base(bind=db_engine, metadata=metadata)
class User(Base):
__tablename__ = "user"
id = Column(postgresql.INTEGER,
nullable=False,
primary_key=True,
autoincrement=True)
name = Column(postgresql.TEXT,
nullable=False)
username = Column(postgresql.TEXT,
nullable=False,
unique=True)
password = Column(postgresql.TEXT,
nullable=False)
created_at = Column(postgresql.TIMESTAMP,
nullable=False)
class Report(Base):
__tablename__ = "report"
id = Column(postgresql.INTEGER,
nullable=False,
primary_key=True,
autoincrement=True)
user_id = Column(postgresql.INTEGER,
ForeignKey("user.id"),
nullable=False)
created_at = Column(postgresql.TIMESTAMP,
nullable=False)
converge = Column(postgresql.TEXT,
nullable=False)
class WordTimeline(Base):
__tablename__ = "word_timeline"
report_id = Column(postgresql.INTEGER,
ForeignKey("report.id"),
nullable=False,
primary_key=True)
word = Column(postgresql.TEXT,
nullable=False)
branch = Column(postgresql.INTEGER,
nullable=False,
primary_key=True)
epoch = Column(postgresql.INTEGER,
nullable=False,
primary_key=True)
|
[
"alastairparagas@gmail.com"
] |
alastairparagas@gmail.com
|
c8bc144fd4a0b4a4827eb792d625d9ba544a7810
|
29cb65dbd79ec0a396289ad838649ea8c50872f7
|
/main.py
|
9cdeb8afc200a304b0dcefb8f52a31f183790928
|
[] |
no_license
|
Introduction-to-Programming-OSOWSKI/3-7-last-letter-JustinInthaly21
|
644d0085b9dfb0f0b5576868ef6f14a51c6b0151
|
59d5c2e1d588d8a73dadc22951df5aebab6d5247
|
refs/heads/master
| 2023-03-24T22:02:39.595979
| 2021-01-26T20:49:03
| 2021-01-26T20:49:03
| 333,210,870
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 64
|
py
|
def lastLetter(w):
len(w)
return w[len(w)-1]
|
[
"825134@mystma.org"
] |
825134@mystma.org
|
ea62b37e3d046ce86e29d628bd31460c525fd6d7
|
1d928c3f90d4a0a9a3919a804597aa0a4aab19a3
|
/python/spaCy/2016/12/tokenizer_exceptions.py
|
6cf144b44974a963fb3695866040e07a2a302a64
|
[
"MIT"
] |
permissive
|
rosoareslv/SED99
|
d8b2ff5811e7f0ffc59be066a5a0349a92cbb845
|
a062c118f12b93172e31e8ca115ce3f871b64461
|
refs/heads/main
| 2023-02-22T21:59:02.703005
| 2021-01-28T19:40:51
| 2021-01-28T19:40:51
| 306,497,459
| 1
| 1
| null | 2020-11-24T20:56:18
| 2020-10-23T01:18:07
| null |
UTF-8
|
Python
| false
| false
| 720
|
py
|
# encoding: utf8
from __future__ import unicode_literals
from ..symbols import *
from ..language_data import PRON_LEMMA
TOKENIZER_EXCEPTIONS = {
}
ORTH_ONLY = [
"ang.",
"anm.",
"bil.",
"bl.a.",
"dvs.",
"e.Kr.",
"el.",
"e.d.",
"eng.",
"etc.",
"exkl.",
"f.d.",
"fid.",
"f.Kr.",
"forts.",
"fr.o.m.",
"f.ö.",
"förf.",
"inkl.",
"jur.",
"kl.",
"kr.",
"lat.",
"m.a.o.",
"max.",
"m.fl.",
"min.",
"m.m.",
"obs.",
"o.d.",
"osv.",
"p.g.a.",
"ref.",
"resp.",
"s.",
"s.a.s.",
"s.k.",
"st.",
"s:t",
"t.ex.",
"t.o.m.",
"ung.",
"äv.",
"övers."
]
|
[
"rodrigosoaresilva@gmail.com"
] |
rodrigosoaresilva@gmail.com
|
facbccd8226ba680e7647576246385ecb89a2e65
|
a1e8b3efafcf3171d57f48472b666993e15e6129
|
/scraper/settings.py
|
eabfbf4d21705a60caf4d45a7271fb8a28818e7f
|
[
"MIT"
] |
permissive
|
digawp/MyScraper
|
1217f1a2c02c16849c132dfe38150497a7a592c1
|
1f0bcb47a1b81002bf70f0869949e16ab10c90e6
|
refs/heads/master
| 2021-03-27T10:19:18.095727
| 2017-05-09T08:02:27
| 2017-05-10T05:23:51
| 82,699,513
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,777
|
py
|
# -*- coding: utf-8 -*-
# Scrapy settings for scraper project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'scraper'
SPIDER_MODULES = ['scraper.spiders']
NEWSPIDER_MODULE = 'scraper.spiders'
JOBDIR = 'jobs/crunchbase'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
# Firefox
# USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:52.0) Gecko/20100101 Firefox/52.0'
# Edge
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
DOWNLOAD_DELAY = 6
# The download delay setting will honor only one of:
CONCURRENT_REQUESTS_PER_DOMAIN = 1
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
# COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
# DEFAULT_REQUEST_HEADERS = {
# 'Upgrade-Insecure-Requests': '1',
# 'Connection': 'keep-alive',
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
# }
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'scraper.middlewares.ScraperSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'scraper.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'scraper.pipelines.ScraperPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
HTTPCACHE_ENABLED = True
HTTPCACHE_EXPIRATION_SECS = 0
HTTPCACHE_DIR = 'httpcache'
HTTPCACHE_IGNORE_HTTP_CODES = [416]
HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
##########################################################
# Uncomment the following if using scrapy-rotating-proxies
##########################################################
# DOWNLOADER_MIDDLEWARES = {
# 'rotating_proxies.middlewares.RotatingProxyMiddleware': 610,
# 'rotating_proxies.middlewares.BanDetectionMiddleware': 620,
# }
# def load_lines(path):
# with open(path, 'rb') as f:
# return [line.strip() for line in
# f.read().decode('utf8').splitlines()
# if line.strip()]
# ROTATING_PROXY_LIST = load_lines('proxies.txt')
############################################
## Uncomment the following if using Scrapoxy
############################################
CONCURRENT_REQUESTS_PER_DOMAIN = 1
RETRY_TIMES = 3
# PROXY
PROXY = 'http://127.0.0.1:8888/?noconnect'
# SCRAPOXY
API_SCRAPOXY = 'http://127.0.0.1:8889/api'
API_SCRAPOXY_PASSWORD = 'password'
# BLACKLISTING
BLACKLIST_HTTP_STATUS_CODES = [ 500, 416 ]
DOWNLOADER_MIDDLEWARES = {
'scrapoxy.downloadmiddlewares.proxy.ProxyMiddleware': 100,
'scrapoxy.downloadmiddlewares.wait.WaitMiddleware': 101,
'scrapoxy.downloadmiddlewares.scale.ScaleMiddleware': 102,
'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': None,
'scrapoxy.downloadmiddlewares.blacklist.BlacklistDownloaderMiddleware': 950,
}
|
[
"digawp@gmail.com"
] |
digawp@gmail.com
|
4a8e362ba59d666b12090b79750312bf1c69b322
|
6589ceeae7853ce40d79ce1ed79d7d1dd393724b
|
/manage.py
|
174492cb7932d69782ee5b45bec1a22510695bc1
|
[] |
no_license
|
anshikam97/e-commerce-django-project
|
810ea3101e2120e4e4ce1c45293d4440dd0157e8
|
40f6a7c66f415aef453f43bcd68f7be83334970f
|
refs/heads/master
| 2023-03-30T14:03:51.168724
| 2021-04-08T07:31:04
| 2021-04-08T07:31:04
| 355,803,225
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 537
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'learn.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
|
[
"39756723+anshikam97@users.noreply.github.com"
] |
39756723+anshikam97@users.noreply.github.com
|
6f7916128485e55485ac5c878f35b7cc780f27c3
|
c4f9fa0671164bc8299864a531f535afcae2a924
|
/multimodal_model.py
|
035355af63eccbe6741476f832895b06a9f13be6
|
[
"MIT"
] |
permissive
|
mdanilevicz/maize_early_yield_prediction
|
7ae45177975e3a5fb630ca68eef485f69f209bd9
|
e1090e6555a544a13bec19c974d628efccbcbeca
|
refs/heads/main
| 2023-08-23T18:10:47.523660
| 2021-10-25T08:26:01
| 2021-10-25T08:26:01
| 388,364,227
| 3
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,229
|
py
|
# +
# Import the libraries
from fastai.vision.all import *
import fastai
from fastai.tabular.all import *
from fastai.data.load import _FakeLoader, _loaders
from glob import glob
import torch
import pandas as pd
import numpy as np
import os
# Custom functions
from msi_utils import *
from fold_utils import *
from multimodal_utisl import *
# -
global glb_tab_logits
def get_tab_logits(self, inp, out):
global glb_tab_logits
glb_tab_logits = inp
global glb_vis_logits
def get_vis_logits(self, inp, out):
global glb_vis_logits
glb_vis_logits = inp
class TabVis(nn.Module):
# Modify the architecture here if you want more or less layers at the fusion module
def __init__(self, tab_model, vis_model, num_classes=1):
super(TabVis, self).__init__()
self.tab_model = tab_model
self.vis_model = vis_model
# Add the fusion module
self.mixed_reg = nn.Sequential(nn.Linear(612,612),
nn.ReLU(inplace=True),
nn.Linear(612, num_classes))
# receive the weights from tab and spectral modules
self.tab_reg = nn.Linear(100, num_classes)
self.vis_reg = nn.Linear(512, num_classes)
# register hook that will grab the module's weights
self.tab_handle = self.tab_model.layers[2][0].register_forward_hook(get_tab_logits)
self.vis_handle = self.vis_model[11].register_forward_hook(get_vis_logits)
def remove_my_hooks(self):
self.tab_handle.remove()
self.vis_handle.remove()
return None
def forward(self, x_cat, x_cont, x_im):
# Tabular Regressor
tab_pred = self.tab_model(x_cat, x_cont)
# Spectral Regressor
vis_pred = self.vis_model(x_im)
# Logits
tab_logits = glb_tab_logits[0] # Only grabbling weights, not bias'
vis_logits = glb_vis_logits[0] # Only grabbling weights, not bias'
mixed = torch.cat((tab_logits, vis_logits), dim=1)
# Mixed classifier block
mixed_pred = self.mixed_reg(mixed)
return (tab_pred, vis_pred, mixed_pred)
class GradientBlending(nn.Module):
def __init__(self, tab_weight=0.0, visual_weight=0.0, tab_vis_weight=1.0, loss_scale=1.0):
"Expects weights for each model, the combined model, and an overall scale"
super(myGradientBlending, self).__init__()
self.tab_weight = tab_weight
self.visual_weight = visual_weight
self.tab_vis_weight = tab_vis_weight
self.scale = loss_scale
def remove_my_hooks(self):
self.tab_handle.remove()
self.vis_handle.remove()
#self.print_handle.remove()
return None
def forward(self, xb, yb):
tab_out, visual_out, tv_out = xb
targ = yb
# Add some hook here to log the modules losses in a csv
"Gathers `self.loss` for each model, weighs, then sums"
t_loss = root_mean_squared_error(tab_out, targ) * self.scale
v_loss = root_mean_squared_error(visual_out, targ) * self.scale
tv_loss = root_mean_squared_error(tv_out, targ) * self.scale
weighted_t_loss = t_loss * self.tab_weight
weighted_v_loss = v_loss * self.visual_weight
weighted_tv_loss = tv_loss * self.tab_vis_weight
loss = weighted_t_loss + weighted_v_loss + weighted_tv_loss
return loss
# Metrics
def t_rmse(inp, targ):
"Compute rmse with `targ` and `pred`"
pred = inp[0].flatten()
return root_mean_squared_error(*flatten_check(pred,targ))
def v_rmse(inp, targ):
"Compute rmse with `targ` and `pred`"
pred = inp[1].flatten()
return root_mean_squared_error(*flatten_check(pred,targ))
def tv_rmse(inp, targ):
"Compute rmse with `targ` and `pred`"
pred = inp[2].flatten()
return root_mean_squared_error(*flatten_check(pred,targ))
def weighted_RMSEp(inp, targ, w_t=0.333, w_v=0.333, w_tv=0.333):
# normalised by the max -min
delta = df['Yield'].max() - df['Yield'].min()
tv_inp = (inp[2].flatten())
rmsep = root_mean_squared_error(*flatten_check(tv_inp,targ)) / delta
return rmsep * 100
|
[
"monica.danilevicz@gmail.com"
] |
monica.danilevicz@gmail.com
|
6249ed7986e6f6289fcf9ae8b0a86a12eb328d11
|
50371ec1039b3c4bff8b764b5eec3128023e2cae
|
/genero/views.py
|
a9bbf6702398246be80cbdc809443e5b96473ef0
|
[] |
no_license
|
juniorjrml/IGTIFlixWeb
|
ddd6f4384ad142242d44286e26f00798432232c0
|
d03160a4f45c4220ef93a1856e18a1087a1841e8
|
refs/heads/master
| 2023-01-30T21:43:16.725590
| 2020-12-17T21:05:28
| 2020-12-17T21:05:28
| 306,674,555
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,559
|
py
|
from django.shortcuts import render
from . import forms
from . import models
from django.http import HttpResponseNotAllowed
def cadastro(request):
form = forms.GeneroForm()
if request.method == 'POST':
form = forms.GeneroForm(request.POST)
if form.is_valid():
form.save(commit=True)
else:
print("formulário não é válido")
generos_list = models.Genero.objects.order_by('descricao')
data_dict = {'form': form, 'generos_records':generos_list}
return render(request, 'genero/genero.html', data_dict)
def delete(request,id):
try:
models.Genero.objects.filter(id=id).delete()
form = forms.GeneroForm()
generos_list = models.Genero.objects.order_by('descricao')
data_dict = {'form':form,'generos_records':generos_list}
return render(request,'genero/genero.html',data_dict)
except:
return HttpResponseNotAllowed()
def update(request, id):
item = models.Genero.objects.get(id = id)
if request.method == "GET":
form = forms.GeneroForm(initial = {'descricao': item.descricao})
data_dict = {'form':form}
return render(request, 'genero/genero_upd.html',data_dict)
else:
form = forms.GeneroForm(request.POST)
item.descricao = form.data['descricao']
item.save()
generos_list = models.Genero.objects.order_by('descricao')
data_dict = {'form': forms.GeneroForm(), 'generos_records': generos_list}
return render(request, 'genero/../redirect.html', data_dict)
|
[
"jancarlolcj@gmail.com"
] |
jancarlolcj@gmail.com
|
2576a0b4a3d534130187cea2aa4f8e24e83df3c5
|
f82757475ea13965581c2147ff57123b361c5d62
|
/gi-stubs/repository/LibvirtGConfig/CapabilitiesGuestDomainClass.py
|
207f38ddbaec803324795fd4bfa621a30e1eedb8
|
[] |
no_license
|
ttys3/pygobject-stubs
|
9b15d1b473db06f47e5ffba5ad0a31d6d1becb57
|
d0e6e93399212aada4386d2ce80344eb9a31db48
|
refs/heads/master
| 2022-09-23T12:58:44.526554
| 2020-06-06T04:15:00
| 2020-06-06T04:15:00
| 269,693,287
| 8
| 2
| null | 2020-06-05T15:57:54
| 2020-06-05T15:57:54
| null |
UTF-8
|
Python
| false
| false
| 4,764
|
py
|
# encoding: utf-8
# module gi.repository.LibvirtGConfig
# from /usr/lib64/girepository-1.0/LibvirtGConfig-1.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gobject as __gobject
class CapabilitiesGuestDomainClass(__gi.Struct):
"""
:Constructors:
::
CapabilitiesGuestDomainClass()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
padding = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
parent_class = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(CapabilitiesGuestDomainClass), '__module__': 'gi.repository.LibvirtGConfig', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'CapabilitiesGuestDomainClass' objects>, '__weakref__': <attribute '__weakref__' of 'CapabilitiesGuestDomainClass' objects>, '__doc__': None, 'parent_class': <property object at 0x7fa8bf3b39a0>, 'padding': <property object at 0x7fa8bf3b3a90>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(CapabilitiesGuestDomainClass)
|
[
"ttys3@outlook.com"
] |
ttys3@outlook.com
|
a70c27df042b2926854d59e73db71126c8aa9c0e
|
a98f3e82f30085851b8c7cb9cb576a645cdc369e
|
/script/bead02.py
|
23d6924c201112445cc0a0d0be1e1eab6b6d1e8b
|
[] |
no_license
|
marko1777/School
|
03a07834af6a370ef0c8a2ecb8c5d9d759a1b9d6
|
c19fc32f883aebcdc0a12fbf9b72f6f898546e66
|
refs/heads/master
| 2021-06-22T12:36:18.722167
| 2017-08-03T15:44:38
| 2017-08-03T15:44:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,272
|
py
|
#! /usr/bin/env python
import re
import fileinput
import glob
sequence = ".*"
manual = "(\[\[).*(\]\])"
loop = "(CIKLUS)" + sequence + manual
branching = "(ELAGAZAS)" + sequence + manual
for file in glob.glob("*.prog"):
inF = open(file,"r")
outF = open(file.split(".")[0] + ".py","w")
for itInF in inF:
inFTemp = itInF.strip()
matchValue = loop + "|" + branching + "|" + sequence
inFMap = re.match(matchValue,inFTemp)
tempString = inFMap.group(0)
if inFMap.group(1) == "CIKLUS":
tempString = tempString.replace("CIKLUS","for")
if inFMap.group(4) == "ELAGAZAS":
tempString = tempString.replace("ELAGAZAS","if")
if tempString.find("[[") != -1:
tempString = tempString.replace("[[",":\n ")
while tempString.find(";") != -1:
tempString = tempString.replace(";","\n ")
else:
tempString = tempString.replace(";","\n")
prevTempString = tempString
tempString = tempString.replace("]]","\n")
if prevTempString == tempString:
outF.write(tempString + "\n")
else:
outF.write(tempString)
inF.close()
outF.close()
|
[
"noreply@github.com"
] |
marko1777.noreply@github.com
|
64c4ec41ef59225ecd8f657f6f5fcb6a4058108e
|
7555197be824ba7c5da992b7370cc46e5a83ffe3
|
/docs/conf.py
|
2e0a5b486a8ff9c7f8f29f412641dfe8537d254f
|
[
"MIT"
] |
permissive
|
piotrbajger/bibtexmagic
|
453038a1bfbdcd428c2f6f31156ed8f56ce7c778
|
6577002d7822beb5cb04cc2605c9adc5a87632c3
|
refs/heads/master
| 2020-04-27T09:07:35.720866
| 2019-03-19T08:09:16
| 2019-03-19T08:09:16
| 174,201,652
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,969
|
py
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0,
os.path.join(os.path.dirname(__file__), '..', '..'))
# -- Project information -----------------------------------------------------
project = 'BibTexMagic'
copyright = '2018, Piotr Bajger'
author = 'Piotr Bajger'
# The short X.Y version
version = '0.1'
# The full version, including alpha/beta/rc tags
release = '0.1alpha'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'BibTexMagicdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'BibTexMagic.tex', 'BibTexMagic Documentation',
'Piotr Bajger', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'bibtexmagic', 'BibTexMagic Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'BibTexMagic', 'BibTexMagic Documentation',
author, 'BibTexMagic', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
|
[
"piotr.bajger@hotmail.com"
] |
piotr.bajger@hotmail.com
|
f86b69725bc17be1ec3889284eaa4827b4530088
|
37f1563cdacf4b37b5b927b892538218aae79c77
|
/medium/dfs/numDistinctIslands.py
|
e64b1e67ecc0648115b9cd6988585380ea18c0a9
|
[] |
no_license
|
unsortedtosorted/elgoog
|
9dee49a20f981305910a8924d86e8f2a16fe14c2
|
5be9fab24c0c1fd9d5dc7a7bdaca105f1ca873ee
|
refs/heads/master
| 2020-04-15T00:51:12.114249
| 2019-05-19T04:37:24
| 2019-05-19T04:37:24
| 164,254,079
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,966
|
py
|
"""
694. Number of Distinct Islands
1. first get r,c list of all islands
2. get absolute shape of each island
3. compare shapes return result
"""
class Solution(object):
def numDistinctIslands(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
m = {}
seen = set()
def getshape(r,c,key):
if (r,c) in seen:
return
seen.add((r,c))
if grid[r][c] == 1:
if key in m:
m[key].append((r,c))
else:
m[key] =[(r,c)]
#check up
if r-1>=0 and grid[r-1][c]==1:
getshape(r-1,c,key)
#check down
if r+1 < len(grid) and grid[r+1][c] ==1 :
getshape(r+1,c,key)
#check left
if c-1 >=0 and grid[r][c-1]==1:
getshape(r,c-1,key)
#check right
if c+1 < len(grid[r]) and grid[r][c+1] ==1:
getshape(r,c+1,key)
for i,x in enumerate(grid):
for j,y in enumerate(x):
if y == 1 and (i,j) not in seen:
getshape(i,j,(i,j))
s = set()
count = 0
rs = {}
for x in m:
y = m[x]
y = sorted(y, key = lambda y: y)
r = y[0][0]
c = y[0][1]
i = 0
temp = set()
while i < len(y):
temp.add((y[i][0]-r,y[i][1]-c))
i+=1
if tuple(temp) not in s:
count+=1
s.add(tuple(temp))
return count
|
[
"noreply@github.com"
] |
unsortedtosorted.noreply@github.com
|
c78ef5cc9f53a08c6310e505b04a124c9c7a95df
|
7dc5fba7e2fa9f4c4b874eb79fb504d924813895
|
/fiasco/util/util.py
|
0711caaa985f12870131a7bd29040db8926fbc3c
|
[
"BSD-3-Clause"
] |
permissive
|
eblur/fiasco
|
d335901c97a716cacd541266ebc1b2301932e4e8
|
4ab14756f285e3551bd52cf47a1c9b49eda8b798
|
refs/heads/master
| 2020-03-14T17:49:15.406452
| 2018-04-27T07:49:15
| 2018-04-27T07:49:15
| 131,729,003
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,584
|
py
|
"""
Basic utilities
"""
import os
import sys
import warnings
import configparser
from distutils.util import strtobool
from builtins import input
FIASCO_HOME = os.path.join(os.environ['HOME'], '.fiasco')
__all__ = ['setup_paths', 'get_masterlist']
def setup_paths():
"""
Parse .rc file and set ASCII and HDF5 database paths.
"""
paths = {}
if os.path.isfile(os.path.join(FIASCO_HOME, 'fiascorc')):
config = configparser.ConfigParser()
config.read(os.path.join(FIASCO_HOME, 'fiascorc'))
if 'database' in config:
paths = dict(config['database'])
if 'ascii_dbase_root' not in paths:
paths['ascii_dbase_root'] = os.path.join(FIASCO_HOME, 'chianti_dbase')
if 'hdf5_dbase_root' not in paths:
paths['hdf5_dbase_root'] = os.path.join(FIASCO_HOME, 'chianti_dbase.h5')
if 'use_remote_data' not in paths:
paths['use_remote_data'] = False
else:
paths['use_remote_data'] = bool(strtobool(paths['use_remote_data']))
# If using remote data, need endpoint and domain
if paths['use_remote_data']:
assert 'remote_domain' in paths
assert 'remote_endpoint' in paths
return paths
def get_masterlist(ascii_dbase_root):
"""
Parse CHIANTI filetree and return list of all files, separated by category. This will be only
be useful when dealing with the raw ASCII data.
"""
skip_dirs = ['version_3', 'deprecated', 'masterlist', 'ioneq', 'dem', 'ancillary_data', 'ip',
'abundance', 'continuum', 'instrument_responses']
# List of all files associated with ions
ion_files = []
for root, sub, files in os.walk(ascii_dbase_root):
if not any([sd in root for sd in skip_dirs]) and not any([sd in sub for sd in skip_dirs]):
ion_files += [f for f in files if f[0] != '.']
# List all of the non-ion files, excluding any "dot"/hidden files
def walk_sub_dir(subdir):
subdir_files = []
subdir_root = os.path.join(ascii_dbase_root, subdir)
for root, _, files in os.walk(subdir_root):
subdir_files += [os.path.relpath(os.path.join(root, f), subdir_root) for f in files
if f[0] != '.']
return subdir_files
non_ion_subdirs = ['abundance', 'ioneq', 'ip', 'continuum']
all_files = {f'{sd}_files': walk_sub_dir(sd) for sd in non_ion_subdirs}
all_files['ion_files'] = ion_files
return all_files
def query_yes_no(question, default="yes"):
"""
Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
See `this gist <https://gist.github.com/hrouault/1358474>`_
"""
valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n")
|
[
"wtb2@rice.edu"
] |
wtb2@rice.edu
|
544d0a0526b74cb8a5db16160be5d4c749e8f462
|
a0dc64d08410cd728ba5e76d097b99b2d5dbee6d
|
/mysite/urls.py
|
dd93b9750eb7e9ebb1d0ccfd9fe481c2bb6e4688
|
[] |
no_license
|
hathairathat/my-first-blog
|
7ed2eb87900e42d899d1a3c87153098f5f6ad89e
|
e7600d54e06ce798f9c822ca89d0acaae87592fd
|
refs/heads/master
| 2020-03-18T06:47:19.584337
| 2018-05-22T15:16:33
| 2018-05-22T15:16:33
| 134,415,294
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 825
|
py
|
"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/',admin.site.urls),
url(r'', include('blog.urls')),
]
|
[
"hathairat.hat@outlook.com"
] |
hathairat.hat@outlook.com
|
1ef9e0b3ef9071d1e0d7ed97d780cb795172ddab
|
6ffcc7486c653b86c16de8479be9c3340f867ecf
|
/pages/loginpage.py
|
82600caa7e79a9e61d545f33e82bb740d4b56546
|
[] |
no_license
|
SrihariRamachandradas/Framework_POM_9
|
69aa75112735bc49b07630865f338a50cdd6745d
|
84a8705542c5c1ea8d6b768ecc0de2fa2d9623ff
|
refs/heads/master
| 2020-05-15T23:19:43.160290
| 2019-05-11T14:42:09
| 2019-05-11T14:42:09
| 182,549,922
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,006
|
py
|
from pages.webgeneric import WebGeneric
from testdata.ExcelUtil import *
class LoginPage(WebGeneric):
def __init__(self, driver):
WebGeneric.__init__(self, driver)
# self.driver = driver
self.un = "j_username"
self.pwd = "j_password"
self.login = "Submit"
global wb
wb = WebGeneric(driver)
def enter_un(self):
# self.driver.find_element_by_name(self.un).send_keys(UN)
# wb.enter(self.un, UN)
un = select_cell_val("Login.xlsx", "setup", "Un")
wb.enter('id', self.un, un)
def enter_pwd(self):
# self.driver.find_element_by_name(self.pwd).send_keys(PWD)
# wb.enter(self.pwd, PWD)
pwd = select_cell_val("Login.xlsx", "setup", "Pwd")
wb.enter('name', self.pwd, pwd)
def click_login(self):
# self.driver.find_element_by_xpath("//*[text()='Login ']").click()
# wb.click(self.login)
wb.click('name', self.login)
wb.report_pass_fail("A", "A")
|
[
"harish.python.com@gmail.com"
] |
harish.python.com@gmail.com
|
7606fb0bfbb5324ab73c4868b8964e12200402ce
|
c1839af89d32fb2bc602255d2dbecfcbfb138ba0
|
/picoscope/ps5000.py
|
797dbb73d06334a8598da4820c8f22f10e22b284
|
[
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
abrasive/pico-python
|
2e20aa04fffe5782dddd40c0d9611c6bddced15d
|
7c81be915618f659dc5b21ad5ceb404cea842f52
|
refs/heads/master
| 2022-11-10T08:30:38.511506
| 2020-06-24T04:36:17
| 2020-06-24T04:36:17
| 225,305,661
| 0
| 0
|
NOASSERTION
| 2019-12-02T06:52:23
| 2019-12-02T06:52:22
| null |
UTF-8
|
Python
| false
| false
| 23,761
|
py
|
# This is the instrument-specific file for the PS5000 series of instruments.
#
# pico-python is Copyright (c) 2013-2014 By:
# Colin O'Flynn <coflynn@newae.com>
# Mark Harfouche <mark.harfouche@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
This is the low level driver file for a specific Picoscope.
By this, I mean if parameters want to get passed as strings, they should be
handled by PSBase
All functions here should take things as close to integers as possible, the
only exception here is for array parameters. Array parameters should be passed
in a pythonic way through numpy since the PSBase class should not be aware of
the specifics behind how the clib is called.
The functions should not have any default values as these should be handled
by PSBase.
"""
# 3.0 compatibility
# see http://docs.python.org/2/library/__future__.html
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import math
# to load the proper dll
import platform
# Do not import or use ill definied data types
# such as short int or long
# use the values specified in the h file
# float is always defined as 32 bits
# double is defined as 64 bits
from ctypes import byref, POINTER, create_string_buffer, c_float, \
c_int16, c_int32, c_uint32, c_uint64, c_void_p
from ctypes import c_int32 as c_enum
from picoscope.picobase import _PicoscopeBase
class PS5000(_PicoscopeBase):
"""The following are low-level functions for the PS5000."""
LIBNAME = "ps5000"
MAX_VALUE = 32521
MIN_VALUE = -32521
# EXT/AUX seems to have an imput impedence of 50 ohm (PS6403B)
EXT_MAX_VALUE = 32767
EXT_MIN_VALUE = -32767
EXT_RANGE_VOLTS = 1
# The 10V and 20V ranges are only allowed in high impedence modes
CHANNEL_RANGE = [
{"rangeV": 10E-3, "apivalue": 1, "rangeStr": "10 mV"},
{"rangeV": 20E-3, "apivalue": 2, "rangeStr": "20 mV"},
{"rangeV": 50E-3, "apivalue": 3, "rangeStr": "50 mV"},
{"rangeV": 100E-3, "apivalue": 4, "rangeStr": "100 mV"},
{"rangeV": 200E-3, "apivalue": 5, "rangeStr": "200 mV"},
{"rangeV": 1.0, "apivalue": 6, "rangeStr": "1 V"},
{"rangeV": 2.0, "apivalue": 7, "rangeStr": "2 V"},
{"rangeV": 5.0, "apivalue": 8, "rangeStr": "5 V"},
{"rangeV": 10.0, "apivalue": 9, "rangeStr": "10 V"},
{"rangeV": 20.0, "apivalue": 10, "rangeStr": "20 V"},
{"rangeV": 50.0, "apivalue": 11, "rangeStr": "50 V"}]
NUM_CHANNELS = 4
CHANNELS = {"A": 0, "B": 1, "C": 2, "D": 3,
"External": 4, "MaxChannels": 4, "TriggerAux": 5}
CHANNEL_COUPLINGS = {"DC": 1, "AC": 0}
WAVE_TYPES = {"Sine": 0, "Square": 1, "Triangle": 2,
"RampUp": 3, "RampDown": 4,
"Sinc": 5, "Gaussian": 6, "HalfSine": 7, "DCVoltage": 8,
"WhiteNoise": 9}
SWEEP_TYPES = {"Up": 0, "Down": 1, "UpDown": 2, "DownUp": 3}
SIGGEN_TRIGGER_TYPES = {"Rising": 0, "Falling": 1,
"GateHigh": 2, "GateLow": 3}
SIGGEN_TRIGGER_SOURCES = {"None": 0, "ScopeTrig": 1, "AuxIn": 2,
"ExtIn": 3, "SoftTrig": 4, "TriggerRaw": 5}
# This is actually different depending on the AB/CD models
# I wonder how we could detect the difference between the oscilloscopes
# I believe we can obtain this information from the setInfo function
# by readign the hardware version
# for the PS6403B version, the hardware version is "1 1",
# an other possibility is that the PS6403B shows up as 6403 when using
# VARIANT_INFO and others show up as PS6403X where X = A,C or D
AWGPhaseAccumulatorSize = 32
AWGBufferAddressWidth = 14
AWGMaxSamples = 2 ** AWGBufferAddressWidth
AWGDACInterval = 5E-9 # in seconds
AWGDACFrequency = 1 / AWGDACInterval
# Note this is NOT what is written in the Programming guide as of version
# 10_5_0_28
# This issue was acknowledged in this thread
# http://www.picotech.com/support/topic13217.html
AWGMaxVal = 0x0FFF
AWGMinVal = 0x0000
AWG_INDEX_MODES = {"Single": 0, "Dual": 1, "Quad": 2}
def __init__(self, serialNumber=None, connect=True):
"""Load DLLs."""
if platform.system() == 'Linux':
from ctypes import cdll
# ok I don't know what is wrong with my installer,
# but I need to include .so.2
self.lib = cdll.LoadLibrary("lib" + self.LIBNAME + ".so.2")
elif platform.system() == 'Darwin':
from picoscope.darwin_utils import LoadLibraryDarwin
self.lib = LoadLibraryDarwin("lib" + self.LIBNAME + ".dylib")
else:
from ctypes import windll
self.lib = windll.LoadLibrary(str(self.LIBNAME + ".dll"))
super(PS5000, self).__init__(serialNumber, connect)
def _lowLevelOpenUnit(self, sn):
c_handle = c_int16()
if sn is not None:
serialNullTermStr = create_string_buffer(sn)
else:
serialNullTermStr = None
# Passing None is the same as passing NULL
m = self.lib.ps5000OpenUnit(byref(c_handle), serialNullTermStr)
self.checkResult(m)
self.handle = c_handle.value
def _lowLevelOpenUnitAsync(self, sn):
c_status = c_int16()
if sn is not None:
serialNullTermStr = create_string_buffer(sn)
else:
serialNullTermStr = None
# Passing None is the same as passing NULL
m = self.lib.ps5000OpenUnitAsync(byref(c_status), serialNullTermStr)
self.checkResult(m)
return c_status.value
def _lowLevelOpenUnitProgress(self):
complete = c_int16()
progressPercent = c_int16()
handle = c_int16()
m = self.lib.ps5000OpenUnitProgress(byref(handle),
byref(progressPercent),
byref(complete))
self.checkResult(m)
if complete.value != 0:
self.handle = handle.value
# if we only wanted to return one value, we could do somethign like
# progressPercent = progressPercent * (1 - 0.1 * complete)
return (progressPercent.value, complete.value)
def _lowLevelCloseUnit(self):
m = self.lib.ps5000CloseUnit(c_int16(self.handle))
self.checkResult(m)
def _lowLevelEnumerateUnits(self):
count = c_int16(0)
m = self.lib.ps5000EnumerateUnits(byref(count), None, None)
self.checkResult(m)
# a serial number is rouhgly 8 characters
# an extra character for the comma
# and an extra one for the space after the comma?
# the extra two also work for the null termination
serialLth = c_int16(count.value * (8 + 2))
serials = create_string_buffer(serialLth.value + 1)
m = self.lib.ps5000EnumerateUnits(byref(count), serials,
byref(serialLth))
self.checkResult(m)
serialList = str(serials.value.decode('utf-8')).split(',')
serialList = [x.strip() for x in serialList]
return serialList
def _lowLevelSetChannel(self, chNum, enabled, coupling, VRange, VOffset,
BWLimited):
m = self.lib.ps5000SetChannel(c_int16(self.handle), c_enum(chNum),
c_int16(enabled), c_enum(coupling),
c_enum(VRange), c_float(VOffset),
c_enum(BWLimited)) # 2 for PS6404
self.checkResult(m)
def _lowLevelStop(self):
m = self.lib.ps5000Stop(c_int16(self.handle))
self.checkResult(m)
def _lowLevelGetUnitInfo(self, info):
s = create_string_buffer(256)
requiredSize = c_int16(0)
m = self.lib.ps6000GetUnitInfo(c_int16(self.handle), byref(s),
c_int16(len(s)), byref(requiredSize),
c_enum(info))
self.checkResult(m)
if requiredSize.value > len(s):
s = create_string_buffer(requiredSize.value + 1)
m = self.lib.ps5000GetUnitInfo(c_int16(self.handle), byref(s),
c_int16(len(s)),
byref(requiredSize), c_enum(info))
self.checkResult(m)
# should this bee ascii instead?
# I think they are equivalent...
return s.value.decode('utf-8')
def _lowLevelFlashLed(self, times):
m = self.lib.ps5000FlashLed(c_int16(self.handle), c_int16(times))
self.checkResult(m)
def _lowLevelSetSimpleTrigger(self, enabled, trigsrc, threshold_adc,
direction, delay, timeout_ms):
m = self.lib.ps5000SetSimpleTrigger(
c_int16(self.handle), c_int16(enabled),
c_enum(trigsrc), c_int16(threshold_adc),
c_enum(direction), c_uint32(delay), c_int16(timeout_ms))
self.checkResult(m)
def _lowLevelRunBlock(self, numPreTrigSamples, numPostTrigSamples,
timebase, oversample, segmentIndex):
timeIndisposedMs = c_int32()
m = self.lib.ps5000RunBlock(
c_int16(self.handle), c_uint32(numPreTrigSamples),
c_uint32(numPostTrigSamples), c_uint32(timebase),
c_int16(oversample), byref(timeIndisposedMs),
c_uint32(segmentIndex), c_void_p(), c_void_p())
self.checkResult(m)
return timeIndisposedMs.value
def _lowLevelIsReady(self):
ready = c_int16()
m = self.lib.ps5000IsReady(c_int16(self.handle), byref(ready))
self.checkResult(m)
if ready.value:
return True
else:
return False
def _lowLevelGetTimebase(self, tb, noSamples, oversample, segmentIndex):
"""Return (timeIntervalSeconds, maxSamples)."""
maxSamples = c_int32()
sampleRate = c_float()
m = self.lib.ps5000GetTimebase2(c_int16(self.handle), c_uint32(tb),
c_uint32(noSamples), byref(sampleRate),
c_int16(oversample), byref(maxSamples),
c_uint32(segmentIndex))
self.checkResult(m)
return (sampleRate.value / 1.0E9, maxSamples.value)
def getTimeBaseNum(self, sampleTimeS):
"""Return sample time in seconds to timebase as int for API calls."""
maxSampleTime = (((2 ** 32 - 1) - 2) / 125000000)
if sampleTimeS < 8E-9:
timebase = math.floor(math.log(sampleTimeS * 1E9, 2))
timebase = max(timebase, 0)
else:
# Otherwise in range 2^32-1
if sampleTimeS > maxSampleTime:
sampleTimeS = maxSampleTime
timebase = math.floor((sampleTimeS * 125000000) + 2)
# is this cast needed?
timebase = int(timebase)
return timebase
def getTimestepFromTimebase(self, timebase):
"""Return timebase to sampletime as seconds."""
if timebase < 3:
dt = 2. ** timebase / 1E9
else:
dt = (timebase - 2) / 125000000.
return dt
def _lowLevelSetDataBuffer(self, channel, data, downSampleMode,
segmentIndex):
"""Set the data buffer.
Be sure to call _lowLevelClearDataBuffer
when you are done with the data array
or else subsequent calls to GetValue will still use the same array.
segmentIndex is unused, but required by other versions of the API
(eg PS5000a)
"""
dataPtr = data.ctypes.data_as(POINTER(c_int16))
numSamples = len(data)
m = self.lib.ps5000SetDataBuffer(c_int16(self.handle), c_enum(channel),
dataPtr, c_uint32(numSamples),
c_enum(downSampleMode))
self.checkResult(m)
def _lowLevelClearDataBuffer(self, channel, segmentIndex):
m = self.lib.ps5000SetDataBuffer(c_int16(self.handle), c_enum(channel),
c_void_p(), c_uint32(0), c_enum(0))
self.checkResult(m)
def _lowLevelGetValues(self, numSamples, startIndex, downSampleRatio,
downSampleMode, segmentIndex):
numSamplesReturned = c_uint32()
numSamplesReturned.value = numSamples
overflow = c_int16()
m = self.lib.ps5000GetValues(
c_int16(self.handle), c_uint32(startIndex),
byref(numSamplesReturned), c_uint32(downSampleRatio),
c_enum(downSampleMode), c_uint32(segmentIndex),
byref(overflow))
self.checkResult(m)
return (numSamplesReturned.value, overflow.value)
####################################################################
# Untested functions below #
# #
####################################################################
def _lowLevelSetAWGSimpleDeltaPhase(self, waveform, deltaPhase,
offsetVoltage, pkToPk, indexMode,
shots, triggerType, triggerSource):
"""Waveform should be an array of shorts."""
waveformPtr = waveform.ctypes.data_as(POINTER(c_int16))
m = self.lib.ps5000SetSigGenArbitrary(
c_int16(self.handle),
c_uint32(int(offsetVoltage * 1E6)), # offset voltage in microvolts
c_uint32(int(pkToPk * 1E6)), # pkToPk in microvolts
c_uint32(int(deltaPhase)), # startDeltaPhase
c_uint32(int(deltaPhase)), # stopDeltaPhase
c_uint32(0), # deltaPhaseIncrement
c_uint32(0), # dwellCount
waveformPtr, # arbitraryWaveform
c_int32(len(waveform)), # arbitraryWaveformSize
c_enum(0), # sweepType for deltaPhase
c_enum(0), # operation (adding random noise and whatnot)
c_enum(indexMode), # single, dual, quad
c_uint32(shots),
c_uint32(0), # sweeps
c_uint32(triggerType),
c_uint32(triggerSource),
c_int16(0)) # extInThreshold
self.checkResult(m)
def _lowLevelSetSigGenBuiltInSimple(self, offsetVoltage, pkToPk, waveType,
frequency, shots, triggerType,
triggerSource, stopFreq, increment,
dwellTime, sweepType, numSweeps):
# TODO, I just noticed that V2 exists
# Maybe change to V2 in the future
if stopFreq is None:
stopFreq = frequency
m = self.lib.ps5000SetSigGenBuiltIn(
c_int16(self.handle),
c_int32(int(offsetVoltage * 1000000)),
c_int32(int(pkToPk * 1000000)),
c_int16(waveType),
c_float(frequency), c_float(stopFreq),
c_float(increment), c_float(dwellTime),
c_enum(sweepType), c_enum(0),
c_uint32(shots), c_uint32(numSweeps),
c_enum(triggerType), c_enum(triggerSource),
c_int16(0))
self.checkResult(m)
def _lowLevelGetAnalogueOffset(self, range, coupling):
# TODO, populate properties with this function
maximumVoltage = c_float()
minimumVoltage = c_float()
m = self.lib.ps5000GetAnalogueOffset(
c_int16(self.handle), c_enum(range), c_enum(coupling),
byref(maximumVoltage), byref(minimumVoltage))
self.checkResult(m)
return (maximumVoltage.value, minimumVoltage.value)
def _lowLevelGetMaxDownSampleRatio(self, noOfUnaggregatedSamples,
downSampleRatioMode, segmentIndex):
maxDownSampleRatio = c_uint32()
m = self.lib.ps5000GetMaxDownSampleRatio(
c_int16(self.handle), c_uint32(noOfUnaggregatedSamples),
byref(maxDownSampleRatio),
c_enum(downSampleRatioMode), c_uint32(segmentIndex))
self.checkResult(m)
return maxDownSampleRatio.value
def _lowLevelGetNoOfCaptures(self):
nCaptures = c_uint32()
m = self.lib.ps5000GetNoOfCaptures(c_int16(self.handle),
byref(nCaptures))
self.checkResult(m)
return nCaptures.value
def _lowLevelGetTriggerTimeOffset(self, segmentIndex):
time = c_uint64()
timeUnits = c_enum()
m = self.lib.ps5000GetTriggerTimeOffset64(
c_int16(self.handle), byref(time),
byref(timeUnits), c_uint32(segmentIndex))
self.checkResult(m)
if timeUnits.value == 0: # PS5000_FS
return time.value * 1E-15
elif timeUnits.value == 1: # PS5000_PS
return time.value * 1E-12
elif timeUnits.value == 2: # PS5000_NS
return time.value * 1E-9
elif timeUnits.value == 3: # PS5000_US
return time.value * 1E-6
elif timeUnits.value == 4: # PS5000_MS
return time.value * 1E-3
elif timeUnits.value == 5: # PS5000_S
return time.value * 1E0
else:
raise TypeError("Unknown timeUnits %d" % timeUnits.value)
def _lowLevelMemorySegments(self, nSegments):
nMaxSamples = c_uint32()
m = self.lib.ps5000MemorySegments(c_int16(self.handle),
c_uint32(nSegments),
byref(nMaxSamples))
self.checkResult(m)
return nMaxSamples.value
def _lowLevelSetDataBuffers(self, channel, bufferMax, bufferMin,
downSampleRatioMode):
bufferMaxPtr = bufferMax.ctypes.data_as(POINTER(c_int16))
bufferMinPtr = bufferMin.ctypes.data_as(POINTER(c_int16))
bufferLth = len(bufferMax)
m = self.lib.ps5000SetDataBuffers(c_int16(self.handle),
c_enum(channel),
bufferMaxPtr, bufferMinPtr,
c_uint32(bufferLth),
c_enum(downSampleRatioMode))
self.checkResult(m)
def _lowLevelClearDataBuffers(self, channel):
m = self.lib.ps5000SetDataBuffers(
c_int16(self.handle), c_enum(channel),
c_void_p(), c_void_p(), c_uint32(0), c_enum(0))
self.checkResult(m)
# Bulk values.
# These would be nice, but the user would have to provide us
# with an array.
# we would have to make sure that it is contiguous amonts other things
def _lowLevelGetValuesBulk(self,
numSamples, fromSegmentIndex, toSegmentIndex,
downSampleRatio, downSampleRatioMode,
overflow):
noOfSamples = c_uint32(numSamples)
m = self.lib.ps5000GetValuesBulk(
c_int16(self.handle),
byref(noOfSamples),
c_uint32(fromSegmentIndex), c_uint32(toSegmentIndex),
c_uint32(downSampleRatio), c_enum(downSampleRatioMode),
overflow.ctypes.data_as(POINTER(c_int16))
)
self.checkResult(m)
return noOfSamples.value
def _lowLevelSetDataBufferBulk(self, channel, buffer, waveform,
downSampleRatioMode):
bufferPtr = buffer.ctypes.data_as(POINTER(c_int16))
bufferLth = len(buffer)
m = self.lib.ps5000SetDataBufferBulk(
c_int16(self.handle),
c_enum(channel), bufferPtr, c_uint32(bufferLth),
c_uint32(waveform), c_enum(downSampleRatioMode))
self.checkResult(m)
def _lowLevelSetDataBuffersBulk(self, channel, bufferMax, bufferMin,
waveform, downSampleRatioMode):
bufferMaxPtr = bufferMax.ctypes.data_as(POINTER(c_int16))
bufferMinPtr = bufferMin.ctypes.data_as(POINTER(c_int16))
bufferLth = len(bufferMax)
m = self.lib.ps5000SetDataBuffersBulk(
c_int16(self.handle), c_enum(channel),
bufferMaxPtr, bufferMinPtr, c_uint32(bufferLth),
c_uint32(waveform), c_enum(downSampleRatioMode))
self.checkResult(m)
def _lowLevelSetNoOfCaptures(self, nCaptures):
m = self.lib.ps5000SetNoOfCaptures(c_int16(self.handle),
c_uint32(nCaptures))
self.checkResult(m)
# ETS Functions
def _lowLevelSetEts():
pass
def _lowLevelSetEtsTimeBuffer():
pass
def _lowLevelSetEtsTimeBuffers():
pass
def _lowLevelSetExternalClock():
pass
# Complicated triggering
# need to understand structs for this one to work
def _lowLevelIsTriggerOrPulseWidthQualifierEnabled():
pass
def _lowLevelGetValuesTriggerTimeOffsetBulk():
pass
def _lowLevelSetTriggerChannelConditions():
pass
def _lowLevelSetTriggerChannelDirections():
pass
def _lowLevelSetTriggerChannelProperties():
pass
def _lowLevelSetPulseWidthQualifier():
pass
def _lowLevelSetTriggerDelay():
pass
# Async functions
# would be nice, but we would have to learn to implement callbacks
def _lowLevelGetValuesAsync():
pass
def _lowLevelGetValuesBulkAsync():
pass
# overlapped functions
def _lowLevelGetValuesOverlapped():
pass
def _lowLevelGetValuesOverlappedBulk():
pass
# Streaming related functions
def _lowLevelGetStreamingLatestValues():
pass
def _lowLevelNoOfStreamingValues(self):
noOfValues = c_uint32()
m = self.lib.ps5000NoOfStreamingValues(c_int16(self.handle),
byref(noOfValues))
self.checkResult(m)
return noOfValues.value
def _lowLevelRunStreaming():
pass
def _lowLevelStreamingReady():
pass
|
[
"johny@neuromancer.sk"
] |
johny@neuromancer.sk
|
944be11f0ca162fad718542a259881f7887e4afd
|
301ac466f179ec437c82e42af8f133a1cd8caa50
|
/plot_last_month_run.py
|
46f22592fc41e4480e312a199a0f81d351e8d749
|
[] |
no_license
|
jayhayes21/Python
|
b7ed40d55a30bc823e474ed421c98ca1d772fdc1
|
5f73a39d85c8c49d9f3299a4ae2b927590578254
|
refs/heads/master
| 2021-07-25T20:02:29.784621
| 2020-09-06T23:13:27
| 2020-09-06T23:13:27
| 216,238,484
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 133
|
py
|
import running_miles_graph
last_element = running_miles_graph.find_last_element()
running_miles_graph.plot_last_month(last_element)
|
[
"hayeswj@mail.uc.edu"
] |
hayeswj@mail.uc.edu
|
926aed03b5771e01fd62b52a10f109c378c175a7
|
7c25d42c0ef2f05190adbaa0de48c83d59e643ec
|
/game_functions.py
|
656b4046bae0ce01091224b7c2d3f16d58f0348c
|
[] |
no_license
|
LiMichael1/Alien_Invasion
|
91888ee211d4839e87f6aaf25aeaba7cce3a417e
|
0a42ceb7110215d72b4a65eae231944fd85b8304
|
refs/heads/master
| 2020-07-31T21:14:52.440947
| 2019-09-25T22:56:35
| 2019-09-25T22:56:35
| 210,756,092
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,536
|
py
|
import sys
from time import sleep
import pygame
from bullet import Bullet
from alien import Alien
def check_keydown_events(event, ai_settings, screen, ship, bullets):
"""Respond to keypressses"""
if event.key == pygame.K_RIGHT:
ship.moving_right = True
elif event.key == pygame.K_LEFT:
ship.moving_left = True
elif event.key == pygame.K_SPACE:
fire_bullet(ai_settings, screen, ship, bullets)
elif event.key == pygame.K_q:
sys.exit()
def check_keyup_events(event, ship):
"""Respond to key releases"""
if event.key == pygame.K_RIGHT:
ship.moving_right = False
elif event.key == pygame.K_LEFT:
ship.moving_left = False
def check_events(ai_settings, screen, stats, sb, play_button, ship, aliens, bullets):
""" Respond to keypresses and mouse events"""
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
check_keydown_events(event, ai_settings, screen, ship, bullets)
elif event.type == pygame.KEYUP:
check_keyup_events(event, ship)
elif event.type == pygame.MOUSEBUTTONDOWN:
mouse_x, mouse_y = pygame.mouse.get_pos()
check_play_button(ai_settings, screen, stats, sb, play_button,
ship, aliens, bullets, mouse_x, mouse_y)
def check_play_button(ai_settings, screen, stats, sb, play_button, ship,
aliens, bullets, mouse_x, mouse_y):
""" Start a new game when the player clicks Play"""
button_clicked = play_button.rect.collidepoint(mouse_x, mouse_y)
if button_clicked and not stats.game_active:
# Reset the game settings
ai_settings.initialize_dynamic_settings()
# Hide the mouse cursor
pygame.mouse.set_visible(False)
# Reset the game Statistics
stats.reset_stats()
stats.game_active = True
# Reset the scoreboard images
sb.prep_score()
sb.prep_high_score()
sb.prep_level()
sb.prep_ships()
# Empty the list of aliens and bullets
aliens.empty()
bullets.empty()
# Create a new fleet and center the ship
create_fleet(ai_settings, screen, ship, aliens)
ship.center_ship()
def update_screen(ai_settings, screen, stats, sb, ship, aliens, bullets,
play_button):
""" Update images on the screen and flip to the new screen"""
# Redraw the screen during each pass through the loop.
screen.fill(ai_settings.bg_color)
# Redraw all bullets behind ships and aliens
for bullet in bullets.sprites():
bullet.draw_bullets()
ship.blitme()
aliens.draw(screen)
# Draw the score information
sb.show_score()
# Draw the play button if the game is inactive
if not stats.game_active:
play_button.draw_button()
# Make the most recently drawn screen visible
pygame.display.flip()
def update_bullets(ai_settings, screen, stats, sb, ship, aliens, bullets):
"""Update position of bullets and get rid of old bullets"""
# Update bullet positions
bullets.update()
# Get rid of bullets that have disappeared
for bullet in bullets.copy():
if bullet.rect.bottom <= 0:
bullets.remove(bullet)
check_bullet_alien_collisions(ai_settings, screen, stats, sb, ship,
aliens, bullets)
def check_bullet_alien_collisions(ai_settings, screen, stats, sb, ship,
aliens, bullets):
# Respond to bullet-alien collisions
# Remove any bullets and aliens that have collided
collisions = pygame.sprite.groupcollide(bullets, aliens, True, True)
if collisions:
for aliens in collisions.values():
stats.score += ai_settings.alien_points * len(aliens)
sb.prep_score()
check_high_score(stats, sb)
if len(aliens) == 0:
# if entire fleet is destroyed, start a new level
bullets.empty()
ai_settings.increase_speed()
# Increase level
stats.level += 1
sb.prep_level()
create_fleet(ai_settings, screen, ship, aliens)
def fire_bullet(ai_settings, screen, ship, bullets):
"""Fire a bullet if limit not reached yet."""
# Create a new bullet and add it to the bullets group
if len(bullets) < ai_settings.bullets_allowed:
new_bullet = Bullet(ai_settings, screen, ship)
bullets.add(new_bullet)
def get_number_aliens_x(ai_settings, alien_width):
"""Determine the number of aliens that fit in a row"""
available_space_x = ai_settings.screen_width - (2 * alien_width)
number_aliens_x = int(available_space_x / (2 * alien_width))
return number_aliens_x
def get_number_rows(ai_settings, ship_height, alien_height):
""" Determine the number of rows of aliens that fit on the screen"""
available_space_y = (ai_settings.screen_height -
(3 * alien_height) - ship_height)
number_rows = int(available_space_y/ (2 * alien_height))
return number_rows
def create_alien(ai_settings, screen, aliens, alien_number, row_number):
"""Create an alien and place it in the row"""
alien = Alien(ai_settings, screen)
alien_width = alien.rect.width
alien.x = alien_width + 2 * alien_width * alien_number
alien.rect.x = alien.x
alien.rect.y = alien.rect.height + 2 * alien.rect.height * row_number
aliens.add(alien)
def create_fleet(ai_settings, screen, ship, aliens):
"""Create a full fleet of aliens"""
# Create an alien and find the number of aliens in a row.
# Spacing between each alien is equal to one alien width
alien = Alien(ai_settings, screen)
number_aliens_x = get_number_aliens_x(ai_settings, alien.rect.width)
number_rows = get_number_rows(ai_settings, ship.rect.height,
alien.rect.height)
# Create the first row of aliens
for row_number in range(number_rows):
for alien_number in range(number_aliens_x):
# Create an alien and place it in the row
create_alien(ai_settings, screen, aliens, alien_number,
row_number)
def check_fleet_edges(ai_settings, aliens):
"""Respond appropriately if any aliens have reached an edge"""
for alien in aliens.sprites():
if alien.check_edges():
change_fleet_direction(ai_settings, aliens)
break
def change_fleet_direction(ai_settings, aliens):
"""Drop the entire fleet and change the fleet's direction"""
for alien in aliens.sprites():
alien.rect.y += ai_settings.fleet_drop_speed
ai_settings.fleet_direction *= -1
def ship_hit(ai_settings, screen, stats, sb, ship, aliens, bullets):
"""Respond to ship being hit by alien"""
if stats.ships_left > 0:
# Decrement ships_left
stats.ships_left -= 1
# Update scoreboard
sb.prep_ships()
# Empty the list of aliens and bullets
aliens.empty()
bullets.empty()
# Create a new fleet and center the ship
create_fleet(ai_settings, screen, ship, aliens)
ship.center_ship()
# Pause
sleep(0.5)
else:
stats.game_active = False
pygame.mouse.set_visible(True)
def check_aliens_bottom(ai_settings, screen, stats, sb, ship, aliens, bullets):
"""Check if any aliens have reached the bottom of the screen"""
screen_rect = screen.get_rect()
for alien in aliens.sprites():
if alien.rect.bottom >= screen_rect.bottom:
# Treat this the same as if the ship got hit
ship_hit(ai_settings, screen, stats, sb, ship, aliens, bullets)
break
def update_aliens(ai_settings, screen, stats, sb, ship, aliens, bullets):
"""
Check if the fleet is at an edge,
and then update the positions of all aliens in the fleet
"""
check_fleet_edges(ai_settings, aliens)
aliens.update()
# Look for alien-ship collisions
if pygame.sprite.spritecollideany(ship, aliens):
ship_hit(ai_settings, screen, stats, sb, ship, aliens, bullets)
# Look for aliens hitting the bottom of the screen
check_aliens_bottom(ai_settings, screen, stats, sb, ship, aliens, bullets)
def check_high_score(stats, sb):
"""Check to see if there's a new high score"""
if stats.score > stats.high_score:
stats.high_score = stats.score
sb.prep_high_score()
|
[
"limichael1099419@gmail.com"
] |
limichael1099419@gmail.com
|
22c036c18f859b80d85efce8d539d0fdedd41a65
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02741/s822820391.py
|
a096a15ca173bd27563de44625b5ad31db1cf60f
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 141
|
py
|
L = [1, 1, 1, 2, 1, 2, 1, 5, 2, 2, 1, 5, 1, 2, 1, 14, 1, 5, 1, 5, 2, 2, 1, 15, 2, 2, 5, 4, 1, 4, 1, 51]
K = int(input())
r = L[K-1]
print(r)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
0d48829619502ec6558d28c6fc9451b587607ef1
|
ed5fff0774d62953f53fa799fd83765b9296c8e4
|
/modou.py
|
92bd1c103d4ccee55241e16182bedc2ad7410bcc
|
[] |
no_license
|
wangfeng3769/pymodou
|
57cca6fc1686ba115cd2a1021e02999ddfa454e3
|
fa769ef18da3a66b199e6126da00e48ec7278288
|
refs/heads/master
| 2020-04-06T04:40:17.519086
| 2014-12-11T14:25:30
| 2014-12-11T14:25:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,222
|
py
|
import json
import urllib
import requests
from local_conf import *
# https://github.com/modouwifi/doc-modouwifi-api
class Request(object):
API = 'http://modouwifi.net/api/'
def __init__(self, modou, method='get'):
self._modou = modou
self._method = method
def __getattr__(self, *args, **kwargs):
#print args, kwargs
name = args[0]
if name.startswith('_'):
return super(Request, self).__getattr__(*args, **kwargs)
else:
self._api_path = name.replace('__', '/')
return self
def __call__(self, *args, **kwargs):
#print args, kwargs, self._method, self._api_path
data = kwargs
if self._modou._userid:
cookies = dict(userid=self._modou._userid)
else:
cookies = {}
api_url = self.API + '%s' % self._api_path
if self._method == 'post':
r = requests.post(api_url, data=json.dumps(data), cookies=cookies)
else:
query = urllib.urlencode(data)
if query:
api_url += '?%s' % query
#print api_url
r = requests.get(api_url, cookies=cookies)
if r.status_code == 200:
result = r.json()
if self._api_path == 'auth/login':
code = result['code']
if code!=0:
msg = result['msg']
print code, msg
else:
userid = r.cookies['userid']
self._modou._userid = userid
return result
else:
print r.status_code
class Modou(object):
def __init__(self):
self._userid = None
def __getattr__(self, *args, **kwargs):
#print args, kwargs
name = args[0]
if name == 'post':
return Request(self, method='post')
elif name == 'get':
return Request(self)
elif name.startswith('_'):
print name
return super(Modou, self).__getattr__(*args, **kwargs)
assert False
if __name__ == '__main__':
modou = Modou()
modou.post.auth__login(password=PASSWORD)
#modou.get.system__get_version_info()
##modou.get.system__upgrade_get_latest_version() # not supported yet
#modou.get.system__check_upgrade_global_status()
#modou.get.system__check_remote_version_upgrade()
#modou.get.system__get_ucode()
#modou.get.security__get_config()
#modou.get.security__check_permission()
#modou.get.wan__get_info()
#modou.get.wan__is_internet_available()
#modou.get.system__get_cable_connection()
#modou.get.plugin__installed_plugins()
#modou.post.commands__run(cmd='echo "Hello, Modou."')
##modou.get.screenshot() # very very slow
r = modou.get.plugin__installed_plugins()
for plugin in r['plugins']:
if plugin['name'] == 'modou-weather':
#print json.dumps(plugin, indent=4)
#_r = modou.get.plugin__plugin_status(id=plugin['id'])
modou.post.plugin__config__set(package_id=plugin['package_id'], data={
"city_name": {
"group_id": "main_info_display",
"type": {
"max": 16,
"class": "STRING",
"min": 2
},
"name": "\u57ce\u5e02\u540d\u79f0",
"value": "**\u6b64\u7248\u672c\u4e0d\u652f\u6301**",
"id": "city_name"
},
"state_auto_update": {
"group_id": "main_info_display",
"type": {
"class": "BOOL"
},
"name": "\u81ea\u52a8\u66f4\u65b0",
"value": False,
"id": "state_auto_update"
},
"city_id": {
"group_id": "main_info_display",
"type": {
"max": 101340904,
"class": "INT",
"min": 101010100
},
"name": "\u57ce\u5e02\u4ee3\u7801",
# http://www.cnblogs.com/wf225/p/4090737.html
"value": 101010100,
"id": "city_id"
}
})
_r = modou.get.plugin__config__get(id=plugin['package_id'], type='TP')
#print json.dumps(_r, indent=4)
|
[
"twinsant@gmail.com"
] |
twinsant@gmail.com
|
829567adc7aa41b48c01d0389a98678a8bc03857
|
7e4451710cc94fe422483785582e0b209ddb0f14
|
/backend/exampleapp/migrations/0001_initial.py
|
576ebba8acb68df1297c574291884736de9b5db2
|
[
"MIT"
] |
permissive
|
cstoneham/sample-django-app
|
5368b8b41df43f8147abbdf5fb3cf4d699e35a9a
|
0d6c4bfb559e7a088bf7f316b0846e69f133773e
|
refs/heads/master
| 2022-12-15T00:40:06.014430
| 2020-09-13T23:47:31
| 2020-09-13T23:47:31
| 295,247,260
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,081
|
py
|
# Generated by Django 2.2.16 on 2020-09-13 18:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='exampleapp.Question')),
],
),
]
|
[
"cstoneha@cstoneha-mn1.linkedin.biz"
] |
cstoneha@cstoneha-mn1.linkedin.biz
|
0f50c4e549e087a9a584a000138b55ea5e47ae30
|
e72c9c9eb16286655a9856f69a61d2d4866bb31d
|
/dwa_local/build/dwa/catkin_generated/pkg.develspace.context.pc.py
|
9e8910933b7790ff285402d03152dbbeb03ec49a
|
[] |
no_license
|
lukeprotag/dwa_localplanner
|
a8050ff7207efd8d66d565201f7fdd859d758d4f
|
dabd5c015e5767815178cb454ee207ff04eb9959
|
refs/heads/main
| 2023-03-09T19:55:30.271296
| 2021-01-23T09:44:13
| 2021-01-23T09:44:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 362
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "dwa"
PROJECT_SPACE_DIR = "/home/czp/dwa_local/devel"
PROJECT_VERSION = "0.0.0"
|
[
"czjypczp@163.com"
] |
czjypczp@163.com
|
258ff636d09a4ba02f1a390d496a7104cfdea05f
|
d10b81bb886311ed9e7a0e02f4182a9a32ef8708
|
/resize.py
|
424a36cf70d7caae24469ca6ce873904d0375fa6
|
[] |
no_license
|
xinyuang/CV_tool
|
29c19a6b7d7f21d0d3e6bc65a987d053e9e07eb6
|
d26fb941d2da0f57e113dd7cea0c2aa043ecd2ed
|
refs/heads/master
| 2021-10-25T05:20:39.463255
| 2019-04-01T21:45:00
| 2019-04-01T21:45:00
| 108,859,598
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 845
|
py
|
from PIL import Image
h = 448
foreground = Image.open('0.jpg')
foreground = foreground.resize((h,h),Image.ANTIALIAS)
foreground.save('smallestCLASS.jpg')
size = 299
pf = '/media/xigu/DensoML/combination_data/test_1000_per_class'
tf = '/media/xigu/DensoML/MDP_2018/1000_emo_test'
for class_folder in os.listdir(pf):
tf_path = os.path.join(tf,class_folder)
if not os.path.exists(tf_path):
os.makedirs(tf_path)
for img_file in os.listdir(os.path.join(pf,class_folder)):
if img_file == '_face_position':
continue
img_path = os.path.join(pf,class_folder,img_file)
print(img_path)
img = cv2.imread(img_path)
new_img = cv2.resize(img,(size,size))
print(os.path.join(tf_path,img_file))
cv2.imwrite(os.path.join(tf_path,img_file),new_img)
|
[
"noreply@github.com"
] |
xinyuang.noreply@github.com
|
5a28764caee17859a67a324ced762fc9f4ca344f
|
e3690021648f922fe42d7d03d9548d25aa76f781
|
/app/drf_project/asgi.py
|
470d113b8a51a8c2617f7c2c3a9b14de21073907
|
[] |
no_license
|
Shah-imran/Test-Driven-Development-with-Django-REST-Framework-and-Docker
|
3eaae9211ff7e351958745f8e707e5612dc149a1
|
6b227cb6a5524ccc2cd2b91655341d92f1368f2a
|
refs/heads/main
| 2023-02-09T01:00:52.581790
| 2021-01-02T17:37:20
| 2021-01-02T17:37:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
py
|
"""
ASGI config for drf_project project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "drf_project.settings")
application = get_asgi_application()
|
[
"uweaime@gmail.com"
] |
uweaime@gmail.com
|
2db78bd8725def10983991faf071f0953ce9530f
|
c846a5217adf1f21999bffa7a32a8b7bdb3102ec
|
/silver/level2/수열의 합.py
|
d5ab3da5e03a450325d4f2479df1248f9b4eb7d3
|
[] |
no_license
|
123qpq/Beakjoon
|
c833433bb5e4237014ac0b84862aa7e15ec16ee8
|
fc191fc81c0d312bc306abd1a3b98c2a990223e5
|
refs/heads/main
| 2023-06-27T18:46:38.746673
| 2021-07-19T13:56:45
| 2021-07-19T13:56:45
| 343,278,319
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 510
|
py
|
n, l = map(int, input().split())
for i in range(l, 101):
if i % 2 == 1 and n % i == 0:
temp = n//i - (i-1)//2
if temp < 0:
print(-1)
break
for x in range(i):
print(temp + x, end = ' ')
break
elif i % 2 == 0 and n / i - int(n/i)== 0.5:
temp = int(n/i-0.5) - (i//2-1)
if temp < 0:
print(-1)
break
for x in range(i):
print(temp + x, end = ' ')
break
else:
print(-1)
|
[
"45002168+123qpq@users.noreply.github.com"
] |
45002168+123qpq@users.noreply.github.com
|
4ddc03fe83a70e247ca6962102c76c0060a4bf47
|
bef57d5b9d75a23f9b1b9d0aba2e10b126649b35
|
/35 Search_Insert_Position.py
|
a6dadd347abb1122f5478fec1469b94f830a5c87
|
[] |
no_license
|
elani0/leetcode-python-solutions
|
acc3a76da77028f2cbb80165fbc8fea5ca06e8bf
|
d6045053db1b58c34e9731dafed6f439501ec77d
|
refs/heads/master
| 2021-01-18T22:22:36.641800
| 2017-04-03T07:50:54
| 2017-04-03T07:50:54
| 87,049,952
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 712
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 18 19:21:05 2017
@author: Elani
"""
#35 Search Insert Position
"""
Given a sorted array and a target value, return the index if the target is found. If not, return the index where it would be if it were inserted in order.
You may assume no duplicates in the array.
Here are few examples.
[1,3,5,6], 5 → 2
[1,3,5,6], 2 → 1
[1,3,5,6], 7 → 4
[1,3,5,6], 0 → 0
"""
class Solution(object):
def searchInsert(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
for i in xrange(len(nums)):
if target <= nums[i]:
return i
return len(nums)
|
[
"elami_001@126.com"
] |
elami_001@126.com
|
b145ab9ba46987621d9fe358b91e4e7b238e0ceb
|
b72e41c71b8d3572e05920a10baf40a3a80f15e3
|
/非常棒的代码工作助手
|
4b564055dc98b299c9907502a22e3ac3be643a1e
|
[] |
no_license
|
ligaochao/pythonWheel
|
7a8ee853bb8ea8420757376f5aee1eb7fc37f139
|
4f86b31472e22de6aeb52149af38cb771264ec25
|
refs/heads/master
| 2021-05-09T05:41:50.482635
| 2020-03-03T07:18:04
| 2020-03-03T07:18:04
| 119,317,168
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 23,609
|
#!/usr/bin/env python3
import tkinter as tk
import threading
import pyperclip
import pyautogui
from pynput.keyboard import Controller, Key, Listener
from pynput.mouse import Button, Listener as ListenerMouse, Controller
from pynput import mouse
from pynput import keyboard
from collections import Counter
from tkinter import ttk
import win32gui
import pymysql
import time
import easygui as g
import tools
lastSentence = None
#这里记录字符的连续长度
strList = []
#这里记录排序后的数组
copyList = []
#这里记录窗口变化
fileUrlOld = None
#这里记录要被替换的字符串
replaced_string = None
replaced_string1 = None #分中英文
#这里记录的是实际值
realS = ""
#这里记录上一个按键值
firstBu=None
#这里记录全局的标签
label = "功能"
#
contentList = []
hwnd_title = dict()
"""
智能提示
"""
def intelligent_Tips(sentence,label):
global contentList
stock_identification = 0 #0:快捷输入库 1:重要信息库 2:代码库, 3 日常记录库
# if str(sentence) != "":
writing_Template_query_all_sql = "SELECT * from code_query WHERE content LIKE '%BBBB%' and label = '"+label+"' ORDER BY count DESC "
# 查询数据库 list
all_record = queryMysql(writing_Template_query_all_sql.replace("BBBB",str(sentence).replace("\n","").replace("\r","").replace(" ","")))
print(all_record)
# print(writing_Template_query_all_sql.replace("BBBB",str(sentence).replace("\n","").replace("\r","")))
contentList = []
for obj in all_record:
contentList.append(obj.get("content"))
return contentList
#把数据库查询结果转换为json
def sql_fetch_json(cursor: pymysql.cursors.Cursor):
"""
Convert the pymysql SELECT result to json format
:param cursor:
:return:
"""
keys = []
for column in cursor.description:
keys.append(column[0])
key_number = len(keys)
json_data = []
for row in cursor.fetchall():
print(row) #java
# item = dict()
item = {}
for q in range(key_number):
item[keys[q]] = row[q]
json_data.append(item)
return json_data
#查询通用接口
def queryMysql(sql,dbName="personal_product"):
# 打开数据库连接
db = pymysql.connect("localhost", "root", "lgc057171", dbName, charset='utf8' )
# 使用cursor()方法获取操作游标
cursor = db.cursor()
# 使用execute方法执行SQL语句
cursor.execute(sql)
# 使用 fetchone() 方法获取一条数据
# data = cursor.fetchone()
data = sql_fetch_json(cursor)
# print(data)
# 关闭数据库连接
db.close()
return data
# 插入,更新,删除通用接口
def saveMysql(sql,dbName="personal_product"):
# 打开数据库连接
db = pymysql.connect("localhost", "root", "lgc057171", dbName, charset='utf8' )
# 使用cursor()方法获取操作游标
cur= db.cursor()
# 使用execute方法执行SQL语句
try:
cur.execute(sql)
#提交
db.commit()
except Exception as e:
#错误回滚
print(e)
db.rollback()
finally:
db.close()
return True
def get_all_hwnd(hwnd,mouse):
if win32gui.IsWindow(hwnd) and win32gui.IsWindowEnabled(hwnd) and win32gui.IsWindowVisible(hwnd):
hwnd_title.update({hwnd:win32gui.GetWindowText(hwnd)})
def getHwnd():
appList = []
# print(hwnd_title.items())
for h,t in hwnd_title.items():
if t is not "":
t = t.replace("*","").replace(" - Notepad++","")
t = t.replace("•","").strip()
tList = t.split(" ")
if t != "tk":
appList.append(tList[0].replace("(mysite) - Sublime Text (UNREGISTERED)",""))
if "tk" == appList[0]:
print("试试看有没有走到这个地方"+str(appList[1]))
#
return appList[1]
return appList[0]
test_list = []
def on_copy(sentence):
global copyList,test_list,label
if len(sentence)>10:
return None
test_list = test_list + intelligent_Tips(sentence,label)
copyList1 = []
value = sentence
# value = event.widget.get()
# value = value.strip().lower()
if value == '':
data = test_list
copyList1 = test_list
else:
data = []
index = 1
for item in test_list:
if value in item.lower():
data.append(str(index)+","+item)
copyList1.append(item)
index = index + 1
copyList = copyList1
listbox_update(data)
def on_save(sentence):
global actionFalg,realS
result = g.buttonbox(msg=str(sentence)[0:300],title="保存到代码库",choices=("功能管理","功能","重要网址","python","java","sql","js","java知识点","html","css","取消"))
print("选择:"+str(result))
#result = pyautogui.confirm(text=str(sentence)[0:300],title='保存到代码库',buttons=["功能管理","功能","python","java","sql","js","java知识点","html","css","取消"])
record_insert_sql = "INSERT INTO code_query (label,content) VALUES ('AAAA','BBBB')"
if str(result).find("取消") > -1:
return None
if str(result).find("功能处理") > -1:
# pyautogui.hotkey('ctrl','c')
# sentence = pyperclip.paste()
realS = sentence
# if sentence != copyContent:
actionFalg = False
root.attributes("-alpha", 1)
root.overrideredirect(True)
root.geometry("+"+str(mouseX)+"+"+str(int(mouseY)+20))
return None
# print("实际的result"+str(result))
if result != None:
saveMysql(record_insert_sql.replace("AAAA",result.replace("\\", "\\\\").replace("'","\\'")).replace("BBBB",str(sentence).replace("\\", "\\\\").replace("'","\\'")))
pyautogui.alert(text='录入成功',title='',button='OK')
def on_keyrelease(sentence):
global copyList,fileUrlOld,test_list,realS,label
copyList1 = []
win32gui.EnumWindows(get_all_hwnd, 0)
# print("当前最新句柄"+getHwnd())
fileUrl = getHwnd()
if fileUrlOld != fileUrl or fileUrlOld == fileUrl:
fileUrlOld = fileUrl
test_list = []
# try:
abcList = []
print(fileUrl)
try:
with open(fileUrl,"r+",encoding = "utf-8") as f:
for obj in f:
obj = obj.replace("\n","")
if obj != "":
objList = obj.split(" ")
for obj1 in objList:
if obj1 not in abcList:
number = len(sentence)
if obj.find(";"*len(sentence)) > -1 or obj.find(";"*len(sentence)) > -1:
# print("实际的字符串0是"+str(obj))
sentence = obj.replace(";",";").replace(";"*len(sentence),"").strip()
# print("实际的字符串1是"+str(sentence))
sentence = sentence[len(sentence)-number:len(sentence)]
# print("实际的字符串2是"+str(sentence))
realS = sentence
# print("实际的字符串3是"+str(sentence))
abcList.append(obj1.replace(";"*len(sentence),"").replace(";"*len(sentence),""))
except Exception as e:
print(e)
test_list = test_list + intelligent_Tips(sentence,label)
test_list = test_list+abcList
# except Exception as e:
# print("baocuo"+str(e))
# value = event.widget.get()
# print(test_list)
value = sentence
if value == '':
data = test_list
copyList1 = test_list
else:
data = []
index = 1 #
for item in test_list:#get3
# print(value)
if item.lower().find(value.lower())>-1 :
# print(str(index)+"---"+str(item.lower()))
data.append(str(index)+","+item)
copyList1.append(item)
index = index + 1
copyList = copyList1
listbox_update(data)
def on_keySpace(sentence):
global copyList,fileUrlOld,test_list,realS,label
test_list = test_list + intelligent_Tips(sentence,label)
copyList1 = []
value = sentence
if value == '':
data = test_list
copyList1 = test_list
else:
data = []
index = 1 #
for item in test_list:#get3
# print(value)
if item.lower().find(value.lower())>-1 :
# print(str(index)+"---"+str(item.lower()))
data.append(str(index)+","+item)
copyList1.append(item)
index = index + 1
copyList = copyList1
listbox_update(data)
def listbox_update(data):
# delete previous data
listbox.delete(0, 'end')
# sorting data #我1
# data = sorted(data, key=str.lower)List
# put new data
if data != None and data != []:
for item in data:
listbox.insert('end', item)
else:
listbox.insert('end', "查询结果为空")
def on_select(event):
global label,actionFalg,clickFlag
#这里点击则展示全部的信息
clickFlag = False
content = ""
try:
content = str(event.widget.get(event.widget.curselection()))
except Exception as e:
print(e)
if content == "":
return False
if label == "功能":
# 模拟快捷键,热键
print("进入功能区")
root.attributes("-alpha", 0)
# pyautogui.hotkey('ctrl', 'c')
#获取粘贴板内容
if content == "取消":
actionFalg = True
return False
sentence = pyperclip.paste()
#独立的功能区域
functional_area(content,sentence)
#这里为了实现功能后,还能继续使用
actionFalg = True
# if content == "翻译":
# print("进入翻译了---"+sentence)
# elif content == "格式化Json":
# pass
code_query_update_sql = "UPDATE code_query SET count = count + 1 WHERE content = 'BBBB'"
saveMysql(code_query_update_sql.replace("BBBB",content.replace("\\", "\\\\").replace("'","\\'")))
else:
root.geometry("+1500+100")
content =",".join(content.split(",")[1:])
print(content)
#这边可以拓展一系列功能
# print("!!!!!!!!!!!!!!!!!!!!!!!!!")
# result1 = pyautogui.confirm(text=content,title='功能处理',buttons=["复制","翻译","查询"])
result1 = g.buttonbox(msg=content[0:300],title="",choices=("复制","翻译","查询","取消"))
print(result1)
if None != result1 and str(result1).find("复制")>-1:
pyperclip.copy(content)
if None != result1 and str(result1).find("取消") == -1:
print("进来了----"+content)
code_query_update_sql = "UPDATE code_query SET count = count + 1 WHERE content = 'BBBB'"
saveMysql(code_query_update_sql.replace("BBBB",content.replace("\\", "\\\\").replace("'","\\'")))
# display element selected on list
# print('(event) previous:', event.widget.get('active'))
# print('(event) current:', event.widget.get(event.widget.curselection()))
# print('---')
# def on_select(event):
#修改文件内容
def alter(file,old_str,new_str):
"""
替换文件中的字符串
:param file:文件名
:param old_str:就字符串
:param new_str:新字符串
:return:
"""
file_data = ""
# print("新字符串是"+new_str)
with open(file, "r", encoding="utf-8") as f:
for line in f:
if old_str in line:
line = line.replace(old_str,new_str)
file_data += line
with open(file,"w",encoding="utf-8") as f:
f.write(file_data)
def fun_timer(obj,flag):
global lastSentence,copyList,replaced_string,replaced_string1,realS
# print('Hello Timer!')
sentence = pyperclip.paste()
if obj in ["1","2","3","4","5","6","7","8","9","0"] and flag == "1":
try:
# print(copyList)
pyperclip.copy(copyList[int(obj)-1])
copyResult = pyperclip.paste()
# print("老字符串为"+str(realS+replaced_string+str(obj)))
if replaced_string !=None and fileUrlOld != None:
pyautogui.hotkey('ctrl', 's')
alter(fileUrlOld,realS+replaced_string+str(obj),copyResult)
alter(fileUrlOld,realS+replaced_string1+str(obj),copyResult)
# alter(fileUrlOld,replaced_string1,copyResult)
# alter(fileUrlOld,replaced_string,copyResult)
# copyList = []
listbox_update([])
except Exception as e:
print("1111"+str(e))
if flag == "2":
sentence = "".join(strList[len(strList)-2*int(obj):len(strList)-int(obj)])
lastSentence = None
if lastSentence != sentence:
lastSentence = sentence
on_keyrelease(sentence)
def go(*args): #处理事件,*args表示可变参数
global label,realS
label = str(comboxlist.get()).strip()
if label == "功能":
listbox_update(intelligent_Tips("",label))
if label != "功能":
if realS != None:
# try:
print("真实值是"+str(realS))
queryList = intelligent_Tips(realS,label)
newList = []
index = 1
for obj in queryList:
newList.append(str(index)+","+obj)
print("查询的结果为"+str(queryList))
listbox_update(newList)
# except Exception as e:
# print(e)
root.overrideredirect(False)
root.geometry("+1500+100")
print(comboxlist.get()) #打印选中的值
root = tk.Tk()
comvalue=tk.StringVar()#窗体自带的文本,新建一个值
comboxlist=ttk.Combobox(root,textvariable=comvalue) #初始化
comboxlist["values"]=("功能","python","重要网址","java","sql","js","java知识点","html","css","取消")
comboxlist.current(0) #选择第一个
comboxlist.bind("<<ComboboxSelected>>",go) #绑定事件,(下拉列表框被选中时,绑定go()函数)
comboxlist.pack()
# entry = tk.Entry(root)
# entry.pack()
# entry.bind('<KeyRelease>', on_keyrelease)
def word_count_in_str(string, keyword):
return len(string.split(keyword))-1
# 监听按压
def on_press(key):
global strList,replaced_string,replaced_string1,firstBu,test_list,label,keyAction,clickFlag,realS
# keyAction = True
keyValue = ""
try:
keyValue = key.char
except Exception as e:
keyValue = str(key)
if label != "功能":
# print(keyValue,firstBu)
if keyValue not in ["Key.space","Key.enter","Key.ctrl_l","Key.right","Key.shift_r","Key.shift","Key.alt_l"]:
if keyValue not in ["1","2","3","4","5","6","7","8","9","0"] :
# print(keyValue)
if keyValue.find("Key.") == -1:
strList.append(keyValue)
if len(strList) > 40:
strList = strList[len(strList)-40:len(strList)]
# print("累加的字符串为"+str(strList))
elif firstBu != "Key.shift_r":
test_list = []
strdd = "".join(strList)
# print(strList)
if strdd.find(";;")>-1 or strdd.find(";;")>-1:
if ";" in strList[len(strList)-3:len(strList)] or ";" in strList[len(strList)-3:len(strList)]:
fun_timer(keyValue,"1")
if keyValue in [";"]:
result = 0
if len(strList)>10:
result = word_count_in_str("".join(strList[len(strList)-10:len(strList)]),";")
else:
result = word_count_in_str("".join(strList),";")
# print("字符串为"+str("".join(strList[len(strList)-5:len(strList)])))
# print("出现次数"+str(result))
if int(result)>1:
test_list = []
pyautogui.hotkey('ctrl', 's')
fun_timer(str(result),"2")
replaced_string = ";"*int(result)
replaced_string1 = ";"*int(result)
if firstBu == "Key.shift_r" and keyValue in ["1","2","3","4","5","6","7","8","9","0"]:
pass
# elif firstBu == "Key.ctrl_l" and keyValue == "c" or firstBu == "c" and keyValue == "Key.ctrl_l":
# # pyautogui.hotkey('ctrl', 'c')
# time.sleep(1)
# sentence = pyperclip.paste()
# #print("拷贝的内容是---"+sentence)
# test_list = []
# firstBu = None
# keyValue = None
# # print("进来这个分支了--"+str(sentence))
# on_copy(sentence)
if firstBu == "Key.ctrl_l" and keyValue == "q" or firstBu == "q" and keyValue == "Key.ctrl_l":
print("进来了111")
pyautogui.hotkey('ctrl', 'c')
sentence = pyperclip.paste()
on_save(sentence)
else:
firstBu = keyValue
else:
print("这里是啥子"+keyValue)
# 不晓得为啥多了个C
if firstBu == "Key.ctrl_l" and keyValue == "Key.space" or firstBu == "Key.space" and keyValue == "Key.ctrl_l":
print("进入space功能")
firstBu = None
keyValue = None
pyautogui.hotkey('ctrl', 'c')
sentence = pyperclip.paste()
realS = sentence
on_keySpace(sentence)
root.attributes("-alpha", 1)
if firstBu == "Key.ctrl_l" and keyValue == "v" or firstBu == "v" and keyValue == "Key.ctrl_l":
clickFlag = False
if firstBu == "Key.ctrl_l" and keyValue == "q" or firstBu == "q" and keyValue == "Key.ctrl_l":
# print("进来了222")
firstBu = None
keyValue = None
pyautogui.hotkey('ctrl', 'c')
sentence = pyperclip.paste()
on_save(sentence)
else:
firstBu = keyValue
# pass
# 监听按压
def on_release(key):
pass
# 开始监听
def start_listen():
with Listener(on_press=on_press,on_release=on_release) as listener:
listener.join()
# 左键
shoot = 0
# 右键
scope = 1
"""
鼠标事件
"""
last_time = 0
"""
鼠标的坐标
"""
mouseX = 0
mouseY = 0
#执行判断
actionFalg = True
#判断拷贝内容是否修改了
copyContent = None
#加一个键盘判断,如果有任何的键盘操作,则都无法进入鼠标的操作
keyAction = False
#处理标识,如果经过了处理就修改标识,否则,不复制
clickFlag = False
#这里新增一个标识,来标记用户鼠标左键的状态
leftStatus = False
#鼠标持续时间
continueClickTime = 0
def mouse_click(x, y, button, pressed):
global last_time,copyContent,keyAction,clickFlag,leftStatus,continueClickTime
# print("简历了")
global shoot
global scope
global label
global actionFalg
if pressed:
leftStatus = True
# 点击右键
if button == Button.right:
scope = 0
# 点击左键
if button == Button.left:
last_time = 0
shoot = 1
# print(pressed)
if pressed == False:
leftStatus = False
# root.attributes("-alpha", 0)
actionFalg = True
continueTime = last_time
last_time = 0
print(button)
if label == "功能" and continueTime > 5 and button != Button.right and continueClickTime >=1:
pass
# print("进来了"+str(continueClickTime))
# #当复制一次的时候没有操作,证明不需要
# if actionFalg ==False:
# actionFalg = True
# root.attributes("-alpha", 0)
# else:
# sentence = pyperclip.paste()
# if sentence != copyContent:
# copyContent = sentence
# actionFalg = False
# root.attributes("-alpha", 1)
# root.overrideredirect(True)
# root.geometry("+"+str(mouseX)+"+"+str(int(mouseY)+20))
# keyAction = True
# if clickFlag == False:
# pyautogui.hotkey('ctrl','c')
# clickFlag = True
# keyAction = False
# else:
# # print("进来了3")
# pyautogui.hotkey('ctrl','c')
# sentence = pyperclip.paste()
# if sentence != copyContent:
# copyContent = sentence
# actionFalg = False
# root.attributes("-alpha", 1)
# root.overrideredirect(True)
# root.geometry("+"+str(mouseX)+"+"+str(int(mouseY)+20))
if not pressed and button == Button.left:
shoot = 0
def on_move(x, y):
global last_time,mouseX,mouseY
mouseX = x
mouseY = y
last_time = last_time + 1
"""
监听事件方法
"""
def mouseListener():
with ListenerMouse(on_click=mouse_click,on_move = on_move) as listener1:
listener1.join()
import threading
t= threading.Thread(target = start_listen)
t.start()
t1= threading.Thread(target = mouseListener)
t1.start()
oldMouseX = None
oldMouseY = None
#这里主要用来监视复制停留时间
def clickTime():
global mouseX,mouseY,leftStatus,continueClickTime,oldMouseX,oldMouseY,label,realS
while True:
time.sleep(0.5)
if leftStatus:
# print("进来了")
if oldMouseX == mouseX and oldMouseY == mouseY:
continueClickTime = continueClickTime + 1
continueTime = last_time
if label == "功能" and continueTime > 5 and continueClickTime >=1:
root.attributes("-alpha", 1)
root.overrideredirect(True)
root.geometry("+"+str(mouseX)+"+"+str(int(mouseY)+20))
pyautogui.hotkey('ctrl','c')
realS = pyperclip.paste()
else:
oldMouseX = mouseX
oldMouseY = mouseY
continueClickTime = 0
else:
continueClickTime = 0
#新启动一个线程,监控按压时长,当时间超过1秒的时候,认为
t2 = threading.Thread(target = clickTime)
t2.start()
#这里是功能大集合
def functional_area(content,sentence):
global realS
try:
if content == "快捷保存":
on_save(realS)
else:
tools.uncommon_Function_Complete(content,"",sentence)
except Exception as e:
print(e)
root.geometry("+1800+100")
listbox = tk.Listbox(root,height=15,width=23)
listbox.pack()
#listbox.bind('<Double-Button-1>', on_select)
listbox.bind('<<ListboxSelect>>', on_select)
listD = listbox_update(intelligent_Tips("","功能"))
print(listD)
if listD != None and listD != []:
for obj in listD:
listbox.insert('end', obj)
# listbox_update(listbox_update(intelligent_Tips("","功能")))
root.geometry("+1500+100")
root.attributes("-alpha", 0)
root.wm_attributes('-topmost',1)
root.mainloop()
|
[
"noreply@github.com"
] |
ligaochao.noreply@github.com
|
|
62e0bfb4ebd5b21b398ef19d68dc1a2f5e57662d
|
45e376ae66b78b17788b1d3575b334b2cb1d0b1c
|
/tests/cloudformation/checks/resource/aws/test_AppSyncFieldLevelLogs.py
|
55c1467de63607624517adf634fb55bcbea9d014
|
[
"Apache-2.0"
] |
permissive
|
bridgecrewio/checkov
|
aeb8febed2ed90e61d5755f8f9d80b125362644d
|
e64cbd27ffb6f09c2c9f081b45b7a821a3aa1a4d
|
refs/heads/main
| 2023-08-31T06:57:21.990147
| 2023-08-30T23:01:47
| 2023-08-30T23:01:47
| 224,386,599
| 5,929
| 1,056
|
Apache-2.0
| 2023-09-14T20:10:23
| 2019-11-27T08:55:14
|
Python
|
UTF-8
|
Python
| false
| false
| 1,280
|
py
|
import unittest
from pathlib import Path
from checkov.cloudformation.checks.resource.aws.AppSyncFieldLevelLogs import check
from checkov.cloudformation.runner import Runner
from checkov.runner_filter import RunnerFilter
class TestAppSyncLogging(unittest.TestCase):
def test_summary(self):
test_files_dir = Path(__file__).parent / "example_AppSyncFieldLevelLogs"
report = Runner().run(root_folder=str(test_files_dir), runner_filter=RunnerFilter(checks=[check.id]))
summary = report.get_summary()
passing_resources = {
"AWS::AppSync::GraphQLApi.All",
"AWS::AppSync::GraphQLApi.Error",
}
failing_resources = {
"AWS::AppSync::GraphQLApi.None",
}
passed_check_resources = {c.resource for c in report.passed_checks}
failed_check_resources = {c.resource for c in report.failed_checks}
self.assertEqual(summary["passed"], 2)
self.assertEqual(summary["failed"], 1)
self.assertEqual(summary["skipped"], 0)
self.assertEqual(summary["parsing_errors"], 0)
self.assertEqual(passing_resources, passed_check_resources)
self.assertEqual(failing_resources, failed_check_resources)
if __name__ == "__main__":
unittest.main()
|
[
"anton.gruebel@gmail.com"
] |
anton.gruebel@gmail.com
|
f9c25b37415e4ac45615315a53d4f3a9a4e16bcf
|
adf55054f064d17a0d5f53a70a9d00cd2b1eeb81
|
/bill/migrations/0002_auto_20210429_0934.py
|
00a6f0ffa2edeee114486db1ee4316f96ea9ae3f
|
[] |
no_license
|
shaneebavaisiar/Billing-Project
|
8d2d8209679b810d79032c3255d10abdeb3ea460
|
8bb8705e1bc56cebb6aea486a4f089188078e15b
|
refs/heads/master
| 2023-06-15T18:18:10.051789
| 2021-07-11T06:48:15
| 2021-07-11T06:48:15
| 384,879,442
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 358
|
py
|
# Generated by Django 3.1.7 on 2021-04-29 04:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bill', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='purchase',
old_name='price',
new_name='purchase_price',
),
]
|
[
"shaneebavaisiar007@gmail.com"
] |
shaneebavaisiar007@gmail.com
|
412184eee836bb38d482cd511bc67bf94ebc227e
|
300a3b25ceb1718b91748c38de9a470a02d67341
|
/src/products/migrations/0001_initial.py
|
de772ec35898e5741571775a59643b08fee79aea
|
[] |
no_license
|
jwilsontt/sagaboi_ecommerce
|
34b83fffd89506ad48ea126fec414df05b82537d
|
afca7bac6053b6fba55517adf4b56e0c6c24b399
|
refs/heads/master
| 2020-03-25T01:00:23.976983
| 2018-09-08T04:20:57
| 2018-09-08T04:20:57
| 143,217,400
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 607
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-06-12 17:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('description', models.TextField()),
],
),
]
|
[
"jwilsontt@gmail.com"
] |
jwilsontt@gmail.com
|
6a034b3761d46d90639ff3d61431848ed48e5b5d
|
e23a4f57ce5474d468258e5e63b9e23fb6011188
|
/140_gui/pyqt_pyside/examples/PyQt_PySide_book/006_Working with graphics/003_Working with Images/579. QPixmap_fill.py
|
f6d34a67c190d77929e0d11675d2579c29dc73b4
|
[] |
no_license
|
syurskyi/Python_Topics
|
52851ecce000cb751a3b986408efe32f0b4c0835
|
be331826b490b73f0a176e6abed86ef68ff2dd2b
|
refs/heads/master
| 2023-06-08T19:29:16.214395
| 2023-05-29T17:09:11
| 2023-05-29T17:09:11
| 220,583,118
| 3
| 2
| null | 2023-02-16T03:08:10
| 2019-11-09T02:58:47
|
Python
|
UTF-8
|
Python
| false
| false
| 1,831
|
py
|
# -*- coding: utf-8 -*-
from PyQt4 import QtCore, QtGui
class MyWindow(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.resize(600, 600)
painter = QtGui.QPainter()
self.pix = QtGui.QPixmap(300, 300)
self.pix.fill(color=QtGui.QColor("#00ff00"))
painter.begin(self.pix)
black = QtCore.Qt.black
white = QtCore.Qt.white
red = QtCore.Qt.red
painter.setRenderHint(QtGui.QPainter.Antialiasing)
painter.setPen(QtGui.QPen(black))
painter.setBrush(QtGui.QBrush(white))
painter.drawRect(3, 3, 294, 294)
painter.setPen(QtGui.QPen(red, 2, style=QtCore.Qt.SolidLine))
painter.setBrush(QtGui.QBrush(QtCore.Qt.green,
style=QtCore.Qt.Dense5Pattern))
painter.drawRect(50, 50, 80, 80)
painter.setBrush(QtGui.QBrush(QtCore.Qt.green,
style=QtCore.Qt.CrossPattern))
painter.drawRect(QtCore.QRect(150, 50, 80, 80))
painter.setBrush(QtGui.QBrush(QtCore.Qt.green,
style=QtCore.Qt.DiagCrossPattern))
painter.drawRect(QtCore.QRectF(50., 150., 80., 80.))
painter.setPen(QtGui.QPen(red, 0, style=QtCore.Qt.NoPen))
painter.setBrush(QtGui.QBrush(QtCore.Qt.green,
style=QtCore.Qt.SolidPattern))
painter.drawRect(QtCore.QRect(150, 150, 80, 80))
painter.end()
def paintEvent(self, e):
painter = QtGui.QPainter(self)
painter.drawPixmap(0, 0, self.pix)
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
window = MyWindow()
window.setWindowTitle("Класс QPixmap")
window.show()
sys.exit(app.exec_())
|
[
"sergejyurskyj@yahoo.com"
] |
sergejyurskyj@yahoo.com
|
e024b573d2b6109c5cad612a56f2aafcd84d2d56
|
a87cd0df7e808bcbb42c3b69299002b3623977ed
|
/Python_Tools/02_RESOLVE_ORA-01704.py
|
cc51e90dda9496d1612b91d71c2b1e77f60b92cc
|
[] |
no_license
|
gaolc/MyPython
|
b2a731f619d229815fb2e21e0365314925ec79d4
|
60d8f37e9faa814de0065cfea39a72126c8b88fa
|
refs/heads/master
| 2021-06-20T14:25:49.389613
| 2020-12-17T08:31:17
| 2020-12-17T08:31:17
| 148,150,259
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,863
|
py
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import os
import sys
import time
import datetime
import platform
MAX_LINE_NUM=0
#set change line position
LINE_POSITION=2000
CONNECTOR="'\n||'"
CHANGE=0
def count_time(func):
def wrapper(*args,**kwargs):
starTime=time.time()
func(*args,**kwargs)
endTime=time.time()
spend_time=int(endTime-starTime)
print ('{0} INFO : The time of spend is {1} seconds .'.format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),spend_time))
return wrapper
def getMaxLineNum(filename):
global MAX_LINE_NUM
if platform.system()=='Linux':
r=os.popen('wc -l '+filename).readlines()
MAX_LINE_NUM=int(r[0].split(" ")[0])
elif platform.system()=='Windows':
with open(filename) as f:
text=f.read()
MAX_LINE_NUM=len(text.splitlines())
else:
print ("Warning : Please user Linux or Windows !")
return MAX_LINE_NUM
def init():
time_now=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if os.path.isfile(sys.argv[1]+'-new'):
print ('{0} INFO : File {1}-new exits ,need to delete !'.format(time_now,sys.argv[1]))
os.remove(sys.argv[1]+'-new')
else :
print ("{0} INFO : File {1}-new not exits ,don't need to delete !".format(time_now,sys.argv[1]))
class CHANGE_LINE():
def __init__(self,data_file):
self.data_file=data_file
@count_time
def alterLine(self,callback=object):
self.curr_line_num=0
trigger_bar_update=MAX_LINE_NUM/70
if trigger_bar_update == 0 :
trigger_bar_update=1
t1=int(time.time())
with open(self.data_file,'r') as file :
for line in file :
self.curr_line_num+=1
line=line.strip('\n')
if len(line) > LINE_POSITION :
global CHANGE
CHANGE+=1
c_time=len(line)/LINE_POSITION
y=len(line)%LINE_POSITION
message=''
if c_time == 1 and y != 0:
message+=line[0:LINE_POSITION]+CONNECTOR+line[LINE_POSITION:]
elif c_time>1 and y == 0 :
for i in range(0,c_time):
if i < c_time-1 :
message+=line[i*LINE_POSITION:(i+1)*LINE_POSITION]+CONNECTOR
else :
message+=line[i*LINE_POSITION:(i+1)*LINE_POSITION]
elif c_time>1 and y != 0 :
for i in range(0,c_time):
if i < c_time - 1 :
message+=line[i*LINE_POSITION:(i+1)*LINE_POSITION]+CONNECTOR
else :
message+=line[i*LINE_POSITION:(i+1)*LINE_POSITION]+CONNECTOR+line[(i+1)*LINE_POSITION:]
with open(self.data_file+'-new','a') as temp_f:
print >> temp_f,(message)
else :
with open(self.data_file+'-new','a') as temp_f:
print >> temp_f,(line)
if self.curr_line_num % trigger_bar_update ==0 or self.curr_line_num == MAX_LINE_NUM:
callback(current_step=self.curr_line_num)
class PrcgressBar():
max_arrow=70
def __init__(self,max_step):
self.max_step=max_step
self.curret_step=1
def updateBar(self,current_step=None):
if current_step is not None :
self.current_step=current_step
num_arrow=int(self.current_step*self.max_arrow / self.max_step)
num_line=self.max_arrow - num_arrow
percent=self.current_step * 100.0 / self.max_step
progress_bar='[' + '>' * num_arrow + '-' * num_line + ']' +'%.2f' %percent + '%' + '\r'
progress_bar_n='[' + '>' * num_arrow + '-' * num_line + ']' +'%.2f' %percent + '%' + '\n'
if current_step<self.max_step :
sys.stdout.write(progress_bar)
else :
sys.stdout.write(progress_bar_n)
sys.stdout.flush()
if __name__=='__main__':
if len(sys.argv[:]) != 2:
print ('Usage : python {0} <filename> '.format(sys.argv[0]))
sys.exit(1)
else :
file_name=sys.argv[1]
init()
MAX_LINE_NUM=getMaxLineNum(file_name)
print ('{0} INFO : {1} has {2} lines .'.format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),file_name,MAX_LINE_NUM))
barObj=PrcgressBar(MAX_LINE_NUM)
chgLine=CHANGE_LINE(file_name)
chgLine.alterLine(callback=barObj.updateBar)
print ('{0} INFO : This time has changed {1} lines .'.format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),CHANGE))
|
[
"240949806@qq.com"
] |
240949806@qq.com
|
d1f1e51bd4778f138a058c541ed474b9d28ad28e
|
443a3644224a1c2e52e2bb18ff213f7626cbb37e
|
/src/hw_week1_Maheswari_Jyoti.py
|
6886371536466b9fab9ac94b5e8b628fc9daee52
|
[] |
no_license
|
jyotipmahes/HW1_MSDS603
|
22f1fbcf743b2d81722ba46fbfe5cdbfdef8845a
|
5839aa3b6cbb08c2febd9cd6496bb27ff13d5084
|
refs/heads/master
| 2020-05-01T08:18:11.296397
| 2019-03-24T07:24:25
| 2019-03-24T07:24:25
| 177,374,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,731
|
py
|
import paramiko
from os.path import expanduser
from user_definition import *
git_repo_name = "HW1_MSDS603"
git_user_id = "jyotipmahes"
# ## Assumption : Anaconda, Git (configured)
def ssh_client():
"""Return ssh client object"""
return paramiko.SSHClient()
def ssh_connection(ssh, ec2_address, user, key_file):
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ec2_address, username=user,
key_filename=expanduser("~") + key_file)
return ssh
def create_or_update_environment(ssh):
stdin, stdout, stderr = \
ssh.exec_command("conda env create -f "
"~/HW1_MSDS603/venv/environment.yml")
if (b'already exists' in stderr.read()):
stdin, stdout, stderr = \
ssh.exec_command("conda env update -f "
"~/HW1_MSDS603/venv/environment.yml")
def git_clone(ssh):
# ---- HOMEWORK ----- #
stdin, stdout, stderr = ssh.exec_command("git --version")
if (b"" is stderr.read()):
git_clone_command = "git clone https://github.com/" + \
git_user_id + "/" + git_repo_name + ".git"
stdin, stdout, stderr = ssh.exec_command(git_clone_command)
if (b'already exists' in stderr.read()):
path_change = "cd /home/ec2-user/"+git_repo_name+"/"
git_pull_command = "git pull origin master"
stdin, stdout, stderr = ssh.exec_command(path_change +
";" + git_pull_command)
def main():
ssh = ssh_client()
ssh_connection(ssh, ec2_address, user, key_file)
git_clone(ssh)
create_or_update_environment(ssh)
if __name__ == '__main__':
main()
|
[
"manish1352@gmail.com"
] |
manish1352@gmail.com
|
fa7594c9fdb0146e11aa47b8885825f522b575d5
|
3d19e1a316de4d6d96471c64332fff7acfaf1308
|
/Users/M/MSuman/films.py
|
f258b48df7ac4fe9277c78d918119010501cafa0
|
[] |
no_license
|
BerilBBJ/scraperwiki-scraper-vault
|
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
|
65ea6a943cc348a9caf3782b900b36446f7e137d
|
refs/heads/master
| 2021-12-02T23:55:58.481210
| 2013-09-30T17:02:59
| 2013-09-30T17:02:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,034
|
py
|
###############################################################################
# START HERE: Tutorial 2: Basic scraping and saving to the data store.
# Follow the actions listed in BLOCK CAPITALS below.
###############################################################################
import scraperwiki
html = scraperwiki.scrape('http://www.guardian.co.uk/news/datablog/2010/oct/16/greatest-films-of-all-time')
print html
# -----------------------------------------------------------------------------
# 1. Parse the raw HTML to get the interesting bits - the part inside <td> tags.
# -- UNCOMMENT THE 6 LINES BELOW (i.e. delete the # at the start of the lines)
# -- CLICK THE 'RUN' BUTTON BELOW
# Check the 'Console' tab again, and you'll see how we're extracting
# the HTML that was inside <td></td> tags.
# We use BeautifulSoup, which is a Python library especially for scraping.
# -----------------------------------------------------------------------------
from BeautifulSoup import BeautifulSoup
soup = BeautifulSoup(html) # turn our HTML into a BeautifulSoup object
tds = soup.findAll('td') # get all the <td> tags
for td in tds:
print td # the full HTML tag
# print td.text # just the text inside the HTML tag
# -----------------------------------------------------------------------------
# 2. Save the data in the ScraperWiki datastore.
# -- UNCOMMENT THE TWO LINES BELOW
# -- CLICK THE 'RUN' BUTTON BELOW
# Check the 'Data' tab - here you'll see the data saved in the ScraperWiki store.
# -----------------------------------------------------------------------------
for td in tds:
record = { "td" : td.text } # column name and value
scraperwiki.datastore.save(["td"], record) # save the records one by one
# -----------------------------------------------------------------------------
# Go back to the Tutorials page and continue to Tutorial 3 to learn about
# more complex scraping methods.
# -----------------------------------------------------------------------------
###############################################################################
# START HERE: Tutorial 2: Basic scraping and saving to the data store.
# Follow the actions listed in BLOCK CAPITALS below.
###############################################################################
import scraperwiki
html = scraperwiki.scrape('http://www.guardian.co.uk/news/datablog/2010/oct/16/greatest-films-of-all-time')
print html
# -----------------------------------------------------------------------------
# 1. Parse the raw HTML to get the interesting bits - the part inside <td> tags.
# -- UNCOMMENT THE 6 LINES BELOW (i.e. delete the # at the start of the lines)
# -- CLICK THE 'RUN' BUTTON BELOW
# Check the 'Console' tab again, and you'll see how we're extracting
# the HTML that was inside <td></td> tags.
# We use BeautifulSoup, which is a Python library especially for scraping.
# -----------------------------------------------------------------------------
from BeautifulSoup import BeautifulSoup
soup = BeautifulSoup(html) # turn our HTML into a BeautifulSoup object
tds = soup.findAll('td') # get all the <td> tags
for td in tds:
print td # the full HTML tag
# print td.text # just the text inside the HTML tag
# -----------------------------------------------------------------------------
# 2. Save the data in the ScraperWiki datastore.
# -- UNCOMMENT THE TWO LINES BELOW
# -- CLICK THE 'RUN' BUTTON BELOW
# Check the 'Data' tab - here you'll see the data saved in the ScraperWiki store.
# -----------------------------------------------------------------------------
for td in tds:
record = { "td" : td.text } # column name and value
scraperwiki.datastore.save(["td"], record) # save the records one by one
# -----------------------------------------------------------------------------
# Go back to the Tutorials page and continue to Tutorial 3 to learn about
# more complex scraping methods.
# -----------------------------------------------------------------------------
|
[
"pallih@kaninka.net"
] |
pallih@kaninka.net
|
f42269c7589701a7c82e1784aeb2354d653b7e9a
|
726f9aba85e336a5b3e19573060393eae4c665f2
|
/PythonPracticePrograms/Basic Program/Check_Even_Odd_Number.py
|
c4d1327207eeee752cd18cf46b10087bd7bf811b
|
[] |
no_license
|
dipikarpawarr/TQ_Python_Programming
|
318f9747d197364b2c6d7b875130f94b5b6407a1
|
0f0631a32571ed1a0c79d6f0ed2cc646c67ae2f5
|
refs/heads/main
| 2023-06-26T13:51:02.129544
| 2021-08-02T14:25:33
| 2021-08-02T14:25:33
| 387,700,665
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 177
|
py
|
# Write a python program to check whether a number is even or odd
num = int(input("Enter the number = "))
if num%2 == 0:
print("Even Number")
else:
print("Odd Number")
|
[
"dipikarpawar.dp@gmail.com"
] |
dipikarpawar.dp@gmail.com
|
e53a61a84b28cb1a541f6a118aece937a7866d9c
|
7ef5c1307622c96cdf4c8750f9dba16daf5c840a
|
/app/views.py
|
133fd4380f5d58a99cbd146eac3cc25f3b81939c
|
[] |
no_license
|
javiconrad13/info3180-project2
|
2ed691fadfd60968b795d846276fce58f4b6b41c
|
e20262a8da16a5c1f7f7b7304e88be259028a3cf
|
refs/heads/master
| 2021-01-20T06:29:07.484060
| 2017-05-01T00:33:34
| 2017-05-01T00:33:34
| 87,746,188
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,573
|
py
|
import os, json, requests, hashlib, uuid
from app import app, db
from flask import render_template, request, redirect, url_for, jsonify, g, session, flash
from flask_login import LoginManager, login_user, logout_user, current_user, login_required
from app.models import myprofile, mywish
from app.forms import LoginForm, ProfileForm, WishForm, ShareForm
from bs4 import BeautifulSoup
import BeautifulSoup
import urlparse
from app.email import send_email, mail
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
app.config['SECRET_KEY'] = 'super-secret'
@lm.user_loader
def load_user(id):
return myprofile.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
@app.route('/')
def home():
"""Render website's home page."""
if g.user.is_authenticated:
return redirect('/api/user/' + str(g.user.hashed) + '/wishlist')
return render_template('home.html')
@app.route('/api/user/login', methods=['POST','GET'])
def login():
error=None
form = LoginForm(request.form)
if request.method == 'POST':
attempted_email = request.form['email']
attempted_password = request.form['password']
db_creds = myprofile.query.filter_by(email=attempted_email).first()
if db_creds == None:
error = 'Email Address Does Not Exist.'
return render_template("home.html",error=error,form=form)
else:
db_email = db_creds.email
db_password = db_creds.password
db_id = db_creds.hashed
if attempted_email == db_email and attempted_password == db_password:
login_user(db_creds)
flash('Welcome, you are logged in :)', 'success')
return redirect('/api/user/' + str(db_id) + '/wishlist')
else:
error = 'Invalid credentials :('
return render_template("home.html",error=error,form=form)
form = LoginForm()
return render_template("home.html",error=error,form=form)
@app.route('/logout')
def logout():
logout_user()
flash('LOGGED OUT: Please log in to continue', 'info')
return redirect('/')
@app.route('/api/user/register', methods = ['POST','GET'])
def newprofile():
error=None
form = ProfileForm()
if request.method == 'POST':
firstname = request.form['firstname']
lastname = request.form['lastname']
sex = request.form['sex']
age = int(request.form['age'])
email = request.form['email']
password = request.form['password']
salt = uuid.uuid4().hex
salty = hashlib.sha256(salt.encode() + password.encode()).hexdigest() + ':' + salt
hash_object = hashlib.sha256(email + salty)
hashed = hash_object.hexdigest()
newProfile = myprofile(firstname=firstname, lastname=lastname, email=email, password=password, sex=sex, age=age, hashed=hashed)
db.session.add(newProfile)
db.session.commit()
login_user(newProfile)
flash('Profile for '+ firstname +' added','success')
return redirect('/')
form = ProfileForm()
return render_template('registration.html',form=form,error=error)
@app.route('/api/user/<userid>')
@login_required
def profile_view(userid):
if g.user.is_authenticated:
form = WishForm()
profile_vars = {'id':g.user.userid, 'email':g.user.email, 'age':g.user.age, 'firstname':g.user.firstname, 'lastname':g.user.lastname, 'sex':g.user.sex, 'hashed':g.user.hashed}
return render_template('addWish.html',form=form,profile=profile_vars)
@app.route('/api/user/<id>/wishlist', methods = ['POST','GET'])
@login_required
def wishlist(id):
profile = myprofile.query.filter_by(hashed=id).first()
profile_vars = {'id':profile.userid, 'email':profile.email, 'age':profile.age, 'firstname':profile.firstname, 'lastname':profile.lastname, 'sex':profile.sex, 'hashed':g.user.hashed}
form = WishForm()
if request.method == 'POST':
title = request.form['title']
description = request.form['description']
url = request.form['url']
newWish = mywish(userid=g.user.userid, title=title, description=description, description_url=url)
db.session.add(newWish)
flash('Item Added1')
db.session.commit()
flash('Item Added2')
return redirect(url_for('getPics',wishid=newWish.wishid))
if request.method == "GET":
wish = mywish.query.filter_by(userid=profile.userid)
wishes = []
for wishy in wish:
wish_vars = {'wishid':wishy.wishid, 'userid':wishy.userid, 'title':wishy.title, 'desc':wishy.description, 'descurl':wishy.description_url, 'thumbs':wishy.thumbnail_url}
wishes.append(wish_vars)
return render_template('profile_view.html', wish=wishes, profile=profile_vars)
return render_template('addWish.html',form=form,profile=profile_vars)
@app.route('/api/thumbnail/process/<wishid>')
@login_required
def getPics(wishid):
wish = mywish.query.filter_by(wishid=wishid).first()
wish_vars = {'wishid':wish.wishid, 'userid':g.user.userid, 'title':wish.title, 'desc':wish.description, 'descurl':wish.description_url, 'thumbs':wish.thumbnail_url}
profile_vars = {'id':g.user.userid, 'email':g.user.email, 'age':g.user.age, 'firstname':g.user.firstname, 'lastname':g.user.lastname, 'sex':g.user.sex, 'hashed':g.user.hashed}
url = wish.description_url
soup = BeautifulSoup.BeautifulSoup(requests.get(url).text)
images = []
og_image = (soup.find('meta', property='og:image') or soup.find('meta', attrs={'name': 'og:image'}))
if og_image and og_image['content']:
images.append(urlparse.urljoin(url, og_image['content']))
thumbnail_spec = soup.find('link', rel='image_src')
if thumbnail_spec and thumbnail_spec['href']:
images.append(urlparse.urljoin(url, thumbnail_spec['href']))
for img in soup.findAll("img", src=True):
images.append(urlparse.urljoin(url, img['src']))
#flash('Item successfully added :) ')
#eturn redirect("/")
return render_template('pickimage.html',images=images,wish=wish_vars,profile=profile_vars)
@app.route("/api/user/delete/wishlist/jj/<wishid>", methods=['POST'])
def deleteitem(wishid):
print "jeloo"
user = mywish.query.get(wishid)
flash("" + user.title + ' DELETED', 'danger')
db.session.delete(user)
db.session.commit()
return redirect('/api/user/' + str(g.user.hashed) + '/wishlist')
@app.route('/addpic/<wishid>', methods=['POST'])
@login_required
def wishpic(wishid):
user = mywish.query.get(wishid)
user.thumbnail_url = request.json['thumbs']
db.session.commit()
flash('Item successfully added :) ')
if user.thumbnail_url == request.json['thumbs']:
flash('Item successfully added :) ')
return redirect('/api/user/' + str(g.user.hashed) + '/wishlist')
else:
flash("Wish not added, some error occurred.")
return redirect('/api/user/' + str(g.user.hashed) + '/wishlist')
@app.route('/api/user/sharing/<userid>', methods=['POST', 'GET'])
def sharing(userid):
form = ShareForm()
profile = myprofile.query.filter_by(hashed=userid).first()
if request.method == 'POST':
email = request.form['email']
subject = "Please see my wishlist"
send_email(email, subject, profile.hashed)
flash('YOU HAVE MADE A WISH', 'success')
return redirect("/")
return render_template('sharing.html', form=form, profile= profile)
@app.route('/about/')
def about():
"""Render the website's about page."""
return render_template('about.html')
###
# The functions below should be applicable to all Flask apps.
###
@app.route('/api/thumbnails/', methods=["GET"])
def thumbnailss():
url = "http://s5.photobucket.com/"
#url = request.args.get('url')
soup = BeautifulSoup.BeautifulSoup(requests.get(url).text)
images = BeautifulSoup.BeautifulSoup(requests.get(url).text).findAll("img")
imagelist = []
og_image = (soup.find('meta', property='og:image') or soup.find('meta', attrs={'name': 'og:image'}))
if og_image and og_image['content']:
imagelist.append(urlparse.urljoin(url, og_image['content']))
thumbnail_spec = soup.find('link', rel='image_src')
if thumbnail_spec and thumbnail_spec['href']:
imagelist.append(urlparse.urljoin(url, thumbnail_spec['href']))
for img in images:
if "sprite" not in img["src"]:
imagelist.append(urlparse.urljoin(url, img['src']))
if(len(imagelist)>0):
response = jsonify({'error':'null', "data":{"thumbnails":imagelist},"message":"Success"})
else:
response = jsonify({'error':'1','data':{},'message':'Unable to extract thumbnails'})
return response
@app.route('/<file_name>.txt')
def send_text_file(file_name):
"""Send your static text file."""
file_dot_text = file_name + '.txt'
return app.send_static_file(file_dot_text)
@app.after_request
def add_header(response):
"""
Add headers to both force latest IE rendering engine or Chrome Frame,
and also to cache the rendered page for 10 minutes.
"""
response.headers['X-UA-Compatible'] = 'IE=Edge,chrome=1'
response.headers['Cache-Control'] = 'public, max-age=0'
return response
@app.errorhandler(404)
def page_not_found(error):
"""Custom 404 page."""
return render_template('404.html'), 404
if __name__ == '__main__':
app.run()
|
[
"javier.mac13@gmail.com"
] |
javier.mac13@gmail.com
|
8be98d807ab85f325cfa81d857d065d110ecb869
|
aa565bd7705c82cfb55d312ed4e2ce48430a1baf
|
/PythonMaintenanceTools/PythonMaintenanceTools/LogHandle.py
|
b8b5faf110e8553f5afe0ccce7403c5d0610c03e
|
[] |
no_license
|
SealOfFire/PythonMaintenanceTools
|
85da9664a0bcfe143e1d285a4bac207d222f921b
|
b7166de97fc9b1ba54c02f835d823aa6307d3c85
|
refs/heads/master
| 2020-01-28T04:23:06.046660
| 2016-05-18T01:21:24
| 2016-05-18T01:21:24
| 57,197,998
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,157
|
py
|
#coding:utf-8
import logging
import logging.handlers
class Logger():
"""
日志处理
"""
fileName = "maintenace.log"
fmt = '[%(asctime)s] - [%(levelname)s] - %(message)s'
logging.basicConfig(level=logging.NOTSET, format=fmt)
filehandler = logging.handlers.TimedRotatingFileHandler(fileName, when="M", interval=1, backupCount=0, encoding=None, delay=False, utc=False)
filehandler.suffix = "%Y%m%d.log"
filehandler.setFormatter(logging.Formatter(fmt))
logging.getLogger('').addHandler(filehandler)
logger = logging.getLogger()
logger.debug("log initialize finish")
'''
@classmethod
def __init__(self):
logger = logging.getLogger('web');
logger.debug("日志初始化完成")
'''
@classmethod
def msg(self, msg):
return "%s" % (msg)
@classmethod
def info(self, msg):
self.logger.info(Logger.msg(msg))
@classmethod
def debug(self, msg):
self.logger.debug(Logger.msg(msg))
@classmethod
def critical(self, msg):
self.logger.critical(Logger.msg(msg))
@classmethod
def error(self, msg):
self.logger.error(Logger.msg(msg))
@classmethod
def warning(self, msg):
self.logger.warning(Logger.msg(msg))
|
[
"sealoffire@hotmail.com"
] |
sealoffire@hotmail.com
|
27df235be75c7acc5609472abca7e701f25ae43b
|
af86911c54081303beac522608809e49407d3425
|
/accounts/api/urls.py
|
275de0d9bc7c23697bc826fb0ca47332d6b91550
|
[] |
no_license
|
s6eskand/hackthe6ix-live-demo
|
0366f66b842d2f4c96479572d472bcc6731e9c82
|
f203a81348024b8c7c8768fee61ed64fb35746c0
|
refs/heads/master
| 2022-12-05T08:59:28.313552
| 2020-08-22T00:37:46
| 2020-08-22T00:37:46
| 288,350,315
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 393
|
py
|
from django.urls import path, include
from .views import (
RegisterAPI,
LoginAPI,
UserAPI
)
from knox import views as knox_views
urlpatterns = [
path('', include('knox.urls')),
path('/register', RegisterAPI.as_view()),
path('/login', LoginAPI.as_view()),
path('/user', UserAPI.as_view()),
path('/logout', knox_views.LogoutView.as_view(), name="knox_logout"),
]
|
[
"sam.eskandar00@hotmail.com"
] |
sam.eskandar00@hotmail.com
|
5933c43ca08b198b28a8c7ecf6365fd2d86af56a
|
f90a6f4caa7484e763e48b8f0688d9a7dbbfc325
|
/utils/reflect_pad2d.py
|
40a501ceed97a0d37458a0bb534a828d3bbdfecb
|
[
"MIT"
] |
permissive
|
zhangguobin/fast-neural-style
|
77309a6e145a3847a138485c3e5a259c02fb699d
|
eefdcc3b6ba5e92bbad24cd20999c71dde4df072
|
refs/heads/master
| 2020-05-03T03:01:05.859365
| 2019-05-19T11:23:45
| 2019-05-19T11:23:45
| 178,386,501
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 210
|
py
|
import tensorflow as tf
def reflect_pad2d(x, H0=40, H1=40, W0=40, W1=40):
'NHWC format is expected for x'
paddings = tf.constant([[0,0],[H0,H1],[W0,W1],[0,0]])
return tf.pad(x, paddings, "REFLECT")
|
[
"zzggbb@gmail.com"
] |
zzggbb@gmail.com
|
a23e390e78c1834478f7d5451229c4f42b862a44
|
c5a379c8aeb60e77630906a812a5e530ec42eff7
|
/researchpy/correlation.py
|
a013ef37999e3bdf62b49c0a16cefa877c704142
|
[
"MIT"
] |
permissive
|
TheBiggerGuy/researchpy
|
63f3ed334042285aeeca22085ced65e4a7ef33bb
|
c94025c160824897cee39d74b66ed01cc68bde4e
|
refs/heads/master
| 2020-12-07T04:35:39.314655
| 2019-12-03T16:54:27
| 2019-12-03T16:54:27
| 232,633,158
| 0
| 0
|
NOASSERTION
| 2020-01-08T18:39:46
| 2020-01-08T18:39:45
| null |
UTF-8
|
Python
| false
| false
| 3,047
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 6 12:18:47 2018
@author: bryantcm
"""
import pandas
import numpy
import scipy.stats
import itertools
def corr_case(dataframe, method = None):
df = dataframe.dropna(how = 'any')._get_numeric_data()
dfcols = pandas.DataFrame(columns= df.columns)
# Getting r an p value dataframes ready
r_vals = dfcols.transpose().join(dfcols, how = 'outer')
p_vals = dfcols.transpose().join(dfcols, how = 'outer')
length = str(len(df))
# Setting test
if method == None:
test = scipy.stats.pearsonr
test_name = "Pearson"
elif method == "spearman":
test = scipy.stats.spearmanr
test_name = "Spearman Rank"
elif method == "kendall":
test = scipy. stats.kendalltau
test_name = "Kendall's Tau-b"
# Rounding values for the r and p value dataframes
for r in df.columns:
for c in df.columns:
r_vals[r][c] = round(test(df[r], df[c])[0], 4)
for r in df.columns:
for c in df.columns:
p_vals[r][c] = format(test(df[r], df[c])[1], '.4f')
# Getting the testing information dataframe ready
info = pandas.DataFrame(numpy.zeros(shape= (1,1)),
columns = [f"{test_name} correlation test using list-wise deletion"])
info.iloc[0,0] = f"Total observations used = {length}"
return info, r_vals, p_vals
def corr_pair(dataframe, method= None):
df = dataframe
correlations = {}
pvalues = {}
length = {}
columns = df.columns.tolist()
# Setting test
if method == None:
test = scipy.stats.pearsonr
test_name = "Pearson"
elif method == "spearman":
test = scipy.stats.spearmanr
test_name = "Spearman Rank"
elif method == "kendall":
test = scipy.stats.kendalltau
test_name = "Kendall's Tau-b"
# Iterrating through the Pandas series and performing the correlation
# analysis
for col1, col2 in itertools.combinations(columns, 2):
sub = df[[col1,col2]].dropna(how= "any")
correlations[col1 + " " + "&" + " " + col2] = format(test(sub.loc[:, col1], sub.loc[:, col2])[0], '.4f')
pvalues[col1 + " " + "&" + " " + col2] = format(test(sub.loc[:, col1], sub.loc[:, col2])[1], '.4f')
length[col1 + " " + "&" + " " + col2] = len(df[[col1,col2]].dropna(how= "any"))
corrs = pandas.DataFrame.from_dict(correlations, orient= "index")
corrs.columns = ["r value"]
pvals = pandas.DataFrame.from_dict(pvalues, orient= "index")
pvals.columns = ["p-value"]
l = pandas.DataFrame.from_dict(length, orient= "index")
l.columns = ["N"]
results = corrs.join([pvals,l])
return results
|
[
"noreply@github.com"
] |
TheBiggerGuy.noreply@github.com
|
322f7fe23c47903d990d42ba2d7b714ea4796660
|
b7543edb78cf1856daaca2fdfaf6f7737bb31f78
|
/SpMarket/apps/sp_goods/urls.py
|
dcc300d2046c0b55f1f0f70d44a344d454361076
|
[] |
no_license
|
wxl-yin/supermarket
|
fdb19644957008defd0d41c772f77dbe4829326a
|
19de5d276d3078aa1bca9390adb5b9a34dafeebd
|
refs/heads/master
| 2022-12-02T14:32:03.450613
| 2020-07-07T01:21:11
| 2020-07-07T01:21:11
| 158,045,995
| 0
| 0
| null | 2022-11-22T03:07:48
| 2018-11-18T03:20:40
|
CSS
|
UTF-8
|
Python
| false
| false
| 278
|
py
|
from django.conf.urls import url
from sp_goods.views import index, category, detail
urlpatterns = [
url(r'^$', index, name="首页"),
url(r'^category/(?P<cate_id>\d+)/(?P<order>\d)/$', category, name="分类"),
url(r'^(?P<id>\d+).html$', detail, name="详情"),
]
|
[
"yqcdwg@163.com"
] |
yqcdwg@163.com
|
c7958d614ef9d332d0006bf9408df7df41ebffa3
|
25cf15f81982348cdee729baa5c6c8ca19ab4506
|
/location/migrations/0004_auto_20180816_2307.py
|
bcdd0795f597c0d245bead09c38ac747fa52ac78
|
[] |
no_license
|
HarunColic/ZizaRepo
|
ca962f42cbb3a521e3121174d6bf615187dfb67c
|
79cd051b88a39d678abd8aa329fd7cfdca40cb42
|
refs/heads/master
| 2020-03-26T15:17:32.182469
| 2020-03-03T12:00:46
| 2020-03-03T12:00:46
| 145,034,327
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 519
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-08-16 23:07
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('location', '0003_auto_20180816_2306'),
]
operations = [
migrations.AlterField(
model_name='city',
name='created_at',
field=models.DateTimeField(default=datetime.datetime(2018, 8, 16, 23, 7, 50, 345866)),
),
]
|
[
"haruncolic@hotmail.com"
] |
haruncolic@hotmail.com
|
b5160583d7d4b9d481df3e07c3c3a83f75d3c46d
|
201cfbb7d5bb7c554b74f07165ff8ec628bed388
|
/Rubiks.py
|
3209db18e3e32b4c9891f4f3c4e36f446ec986a1
|
[] |
no_license
|
alswitz/Python-Projects
|
001a10a818254c89bb72e14b0fd1e7f6de1a2d8f
|
c0ee7cb635d6edfea51da835b6fa44e5e2cfce26
|
refs/heads/master
| 2021-01-11T19:59:54.969480
| 2017-04-30T20:30:58
| 2017-04-30T20:30:58
| 79,442,355
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,286
|
py
|
def main():
#Initialize our variables for usage.
cubeHead = []
squareMast = []
advTwister = []
intTurner = []
avgMover = []
pathetic = []
#Temporary array for in loop manipulation
file = open("timings.txt", "r")
for line in file:
temp = line.split(",")
name = temp[0].rstrip("\n")
num = float(temp[1].rstrip("\n")) #Strip the valuable components for comparison purposes.
if num < 9.99:
cubeHead.append(name)
elif num == 10 or num < 19.99:
squareMast.append(name)
elif num == 20 or num < 29.99:
advTwister.append(name)
elif num == 30 or num < 39.99:
intTurner.append(name)
elif num == 40 or num < 59.99:
avgMover.append(name)
else:
pathetic.append(name)
print("--- OFFICIAL RANKINGS --- ")
print ("\nCube Head (0-9.99): ")
print cubeHead
print ("\nSquare Master(10-19.99): ")
print squareMast
print ("\nAdvanced Twister(20-29.99): ")
print advTwister
print ("\nIntermediate Turner(30-39.99): ")
print intTurner
print ("\nAverage Mover(40-59.99)")
print avgMover
print ("\nPathetic(60+)")
print pathetic
blah = raw_input("\nMission complete. ")
main()
|
[
"alexanderswitz09@gmail.com"
] |
alexanderswitz09@gmail.com
|
f2acdc260cfc71e9805b9f71e021ffb0e8ad4cfe
|
e4959d5a980f5597589175e956bcef0fad350e09
|
/check_wind.py
|
28d7e9347ebce48697346ed6b5f63c183e61cdaa
|
[] |
no_license
|
mbanders/simple_wind_monitor
|
75bb4a7cc1f6de7e1d5e71a1c847d81a8e76fbbf
|
e591eb27b376c2f753312628597800b35ab46165
|
refs/heads/master
| 2021-07-19T12:29:44.660099
| 2017-10-27T12:57:47
| 2017-10-27T12:58:08
| 108,546,074
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 967
|
py
|
#!/usr/bin/env python3
import datetime
import re
import requests
airport = 'KAUS'
metar_base = 'https://aviationweather.gov/adds/dataserver_current/httpparam?'
metar_params = 'dataSource=metars&requestType=retrieve&format=xml&hoursBeforeNow=3&mostRecent=true&stationString=%s' % airport
metar_url = metar_base + metar_params
raw = requests.get(metar_url).text
wind_kt = re.search('<wind_speed_kt>(.*)</wind_speed_kt>', raw)
if wind_kt is not None:
wind_mph = round(1.15*float(wind_kt.group(1)), 1)
darksky_base = 'https://darksky.net/forecast/'
darksky_params = '30.1449,-97.6708/us12/en'
darksky_url = darksky_base + darksky_params
raw = requests.get(darksky_url).text
result = re.search(r'Wind.*?num swip\">(\d+)</span', raw, flags=re.S|re.M)
if result is not None:
darksky_wind_mph = round(float(result.group(1)), 1)
timestamp_str = datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
print('%s, %s, %s' % (timestamp_str, wind_mph, darksky_wind_mph))
|
[
"mbanderson@uwalumni.com"
] |
mbanderson@uwalumni.com
|
bbb3ec74512af6739fe39b1066189c187c8bdcb5
|
e108d14f6a57b549c906dbaf62d6d08df8f0d561
|
/manage.py
|
87ded52f8e86cc8fb7e35c11e335aba921a4881b
|
[] |
no_license
|
thraxil/pixelvore
|
4305e4b9602f0b74ff707f240eed5c3f74b6164c
|
0f3e97759a10edc318b0d25927100a58f5bdb525
|
refs/heads/master
| 2021-11-16T23:35:34.914895
| 2021-11-12T00:04:36
| 2021-11-12T08:29:15
| 1,564,337
| 0
| 0
| null | 2021-11-15T12:55:15
| 2011-04-03T20:53:33
|
Python
|
UTF-8
|
Python
| false
| false
| 241
|
py
|
#!ve/bin/python
import os, sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pixelvore.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"anders@columbia.edu"
] |
anders@columbia.edu
|
f178a3c4790dcb0e5d6b7eb52add398b1af18979
|
d931d1e0c14083fe23616e583dde03fde44892cc
|
/AtCoder_Beginner_Contest_156/A - Beginner.py
|
11d45ff1d2bf99a43be156f468439fc2dd47f1af
|
[] |
no_license
|
l3ickey/AtCoder
|
52379393299881bae3cdd9c0026a230068ad8b20
|
2740799ae5bfe08c31e1c735b127b82e9dd79b6a
|
refs/heads/master
| 2023-01-30T13:14:32.555692
| 2020-12-20T14:13:57
| 2020-12-20T14:13:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 82
|
py
|
n, r = map(int, input().split())
if (n < 10):
r = r + (100 * (10 - n))
print(r)
|
[
"47290651+FunabikiKeisuke@users.noreply.github.com"
] |
47290651+FunabikiKeisuke@users.noreply.github.com
|
e3555ac5b8855b7a4e5dd7b65d7f65672f1e3c45
|
55f4b7ae0cb97a71fe06e3231bd9f48e5383aa39
|
/exe03.py
|
d5a7de4432ca51aa0c6c0fed4c28638ab61c89bd
|
[] |
no_license
|
andreplacet/reiforcement-python-tasks-2
|
fc26d760a3307005d177c27e4e4fe60974b9009c
|
8aab66cd4e8396e3a603d2d96056bf755399a9aa
|
refs/heads/master
| 2022-12-25T12:31:09.457195
| 2020-10-14T23:32:57
| 2020-10-14T23:32:57
| 304,159,647
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 221
|
py
|
#Exercicio 3
sexo = str(input('Informe seu sexo: [M]asculino / [F]eminino: ')).upper()
if sexo[0] == 'M':
print('Sexo masculino!')
elif sexo[0] == 'F':
print('Sexo Feminino!')
else:
print('Sexo Invalido! ')
|
[
"andreplacet@gmail.com"
] |
andreplacet@gmail.com
|
4e4a1e7c3eb6ea886988f377c59444a994c60b96
|
0b727372ceadf0449f6b166f4d531cc4eb75f80f
|
/polls/models.py
|
0fff0f22c15cd7e2c4a1926fe0f7f9ab7ca02957
|
[] |
no_license
|
JHP-tido/DSIpract3Django
|
47bb916e63cacbb4e50dc3be6008f932bf098a0c
|
954b951ced2fe7102b5ae64366c4402d866578c4
|
refs/heads/master
| 2021-01-16T18:42:16.410334
| 2013-03-06T15:00:43
| 2013-03-06T15:00:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 529
|
py
|
from django.db import models
import datetime
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __unicode__(self):
return self.question
def was_published_today(self):
return self.pub_date.date() == datetime.date.today()
class Choice(models.Model):
poll = models.ForeignKey(Poll)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
def __unicode__(self):
return self.choice
# Create your models here.
|
[
"JHP-tido@hotmail.com"
] |
JHP-tido@hotmail.com
|
f77ec20f284ab8e85d2d93d6ff0fc544a72a79fd
|
0d6dfdb80b91c2100e2a5b24855f04daf8203811
|
/sentiment_rnn.py
|
a58323dad5927cddef368b3face6c7b264dc4de8
|
[] |
no_license
|
holdenlee/treeflow
|
21785a322e0a49e7eb9c70986d68d5ba805e11aa
|
e74a1f64f7920c2fa05d7135916698c32c064421
|
refs/heads/master
| 2021-01-11T09:13:03.369907
| 2016-12-24T19:09:42
| 2016-12-24T19:09:42
| 77,258,929
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 696
|
py
|
import tensorflow as tf
import numpy as np
from treeflow import treeflow, treeflow_unfold
#treeflow(fs, child_inds, leaves_inds, leaves, node_inputs = [], has_outputs = False, is_list_fn = False, def_size=10, ans_type=tf.float32, output_type=tf.float32, degree=2)
"""
* `T` is tensor
"""
def rtn(T, b, f, child_inds, leaves_inds, leaves, *node_inputs):
def tcombine(x1, x2, *inps):
#https://www.tensorflow.org/api_docs/python/math_ops/matrix_math_functions#matmul
y = tf.tanh(tf.add(tf.matmul(tf.matmul(tf.transpose(x1), T), x2), b))
o = f(y, *inp)
return (y, o)
return treeflow(tcombine, child_inds, leaves_inds, leaves, *node_inputs)
|
[
"holdenl@princeton.edu"
] |
holdenl@princeton.edu
|
45f64ab70cffaf28e04ee0ad75e2af391b7b7df4
|
bcb231b3f162e4d2d97569189dd241e74ad8dd73
|
/children/views.py
|
ccf28279ae4b572d22d905aace97f3ce55c2fe1c
|
[] |
no_license
|
Code-Institute-Submissions/ngo-project
|
bd2cdc2fdde8671939de733492dc7af2d241449f
|
118fde025b6d0b5e055637cb116229f4e286062f
|
refs/heads/master
| 2020-09-11T02:51:15.500692
| 2019-11-15T11:40:03
| 2019-11-15T11:40:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 608
|
py
|
from django.shortcuts import render
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from .models import Child
# Create your views here.
def all_children(request):
children = Child.objects.all()
paginator = Paginator(children, 6)
page = request.GET.get("page")
try:
children = paginator.page(page)
except PageNotAnInteger:
children = paginator.page(1)
except EmptyPage:
children = paginator.page(paginator.num_pages)
return render(request, "children.html", {"children": children})
|
[
"ubuntu@ip-172-31-35-122.ec2.internal"
] |
ubuntu@ip-172-31-35-122.ec2.internal
|
dfe4ee355c3664deef382bd69afcb51e20d4fdc9
|
e74b8138a4914d52abda6478d0e6001a62a1d050
|
/Archived bots/ChallengerBot_v7.py
|
7396579c80dd07e8b88141e781062b39f5b44d8e
|
[] |
no_license
|
adrienball/halite
|
69cfee7b9e9093421a966b7d60c7481fdee9e4f1
|
0662c9a438a796693c59bd2ac3d4194f3b966f5d
|
refs/heads/master
| 2021-03-24T09:31:17.308424
| 2017-02-18T17:50:54
| 2017-02-18T17:50:54
| 78,957,826
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,293
|
py
|
import hlt
from hlt import NORTH, EAST, SOUTH, WEST, STILL, Move, Square
import math
myID, game_map = hlt.get_init()
productions = [square.production for square in game_map]
max_production = max(productions)
hlt.send_init("ChallengerBot_v7")
def distance_to_enemy(square, direction):
distance = 0
max_distance = min(game_map.width, game_map.height) / 2
current = square
while current.owner == myID and distance < max_distance:
distance += 1
current = game_map.get_target(current, direction)
return distance
def assign_move(square):
if square.strength == 0:
return Move(square, STILL)
directions_with_opportunity = [{"direction": direction, "opportunity": direction_opportunity(square, direction)}
for direction in [NORTH, EAST, SOUTH, WEST, STILL]]
directions_with_opportunity.sort(key=lambda item: -item["opportunity"])
best_opportunity_direction = directions_with_opportunity[0]["direction"]
target_square = game_map.get_target(square, best_opportunity_direction)
if is_move_possible(square, target_square):
return Move(square, best_opportunity_direction)
else:
return Move(square, STILL)
def is_move_possible(source_square, target_square):
return target_square.owner == myID or source_square.strength >= target_square.strength
def direction_opportunity(square, direction):
if direction == STILL:
return still_opportunity(square)
else:
return move_opportunity(square, direction)
# TODO: improve opportunity definition by taking into account distance to frontier
def still_opportunity(square):
distances = [distance_to_enemy(square, direction) for direction in (NORTH, EAST, SOUTH, WEST)]
max_distance = max(distances)
return square_opportunity(square.strength, square.production) * (
1 - max_distance / (min(game_map.width, game_map.height) / 2))
def square_opportunity(strength, production):
normalized_strength = (strength + 1) / 256
normalized_production = (production + 1) / (max_production + 1)
opportunity = 1 - normalized_strength / normalized_production
opportunity2 = (1 - normalized_strength) * normalized_production
opportunity3 = normalized_production / normalized_strength
# logging.debug([opportunity, opportunity2, opportunity3])
return opportunity3
def move_opportunity(square, direction):
# TODO: this can be simplified by computing the closed form of the sums
distance = distance_to_enemy(square, direction)
opportunity = 0
current_weight = 1
decay_factor = math.exp(-1 / 2.0)
total_weight = 0
current_square = square
for i in range(game_map.width):
neighbor = game_map.get_target(current_square, direction)
if neighbor.owner != myID:
opportunity += square_opportunity(neighbor.strength, neighbor.production) * current_weight
current_square = neighbor
total_weight += current_weight
current_weight *= decay_factor
return (opportunity / total_weight) * (
1 - distance / (min(game_map.width, game_map.height) / 2))
while True:
game_map.get_frame()
moves = [assign_move(square) for square in game_map if square.owner == myID]
hlt.send_frame(moves)
|
[
"adrien.ball@snips.net"
] |
adrien.ball@snips.net
|
63f823588430813437dab8781e59e8148e6c5d7c
|
7b7a1221e9b472c690313186408a6a56ee94f97a
|
/public/static/svgedit/extras/update-langs.py
|
26d883905b41144f7dab17b9a978c7b84050aa46
|
[
"MIT"
] |
permissive
|
wanyanyan/judy
|
9e6ef7f9ac2ba434e04816b233807561216dead2
|
553cfe388477fcd4b561caf621afb837ab40c814
|
refs/heads/master
| 2020-06-25T11:46:12.499258
| 2017-01-11T03:22:53
| 2017-01-11T03:22:53
| 74,532,314
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,421
|
py
|
#!/usr/bin/env python
# -*- coding: iso-8859-15 -*-
"""
This is a helper script for the svg-edit project, useful for managing
all the language files
Licensed under the MIT License as is the rest of the project
Requires Python 2.6
Copyright (c) 2010 Jeff Schiller
"""
import os
import json
from types import DictType
def changeTooltipTarget(j):
"""
Moves the tooltip target for some tools
"""
tools = ['rect_width', 'rect_height']
for row in j:
try:
id = row['id']
if id in tools:
row['id'] = row['id'] + '_tool'
except KeyError:
pass
def updateMainMenu(j):
"""
Converts title into textContent for items in the main menu
"""
tools = ['tool_clear', 'tool_open', 'tool_save', 'tool_docprops']
for row in j:
try:
ids = row['id']
if ids in tools:
row[u'textContent'] = row.pop('title')
except KeyError:
pass
def ourPrettyPrint(j):
"""
Outputs a string representation of the JSON object passed in
formatted properly for our lang.XX.js files.
"""
s = '[' + os.linesep
js_strings = None
j.sort()
for row in j:
try:
ids = row['id']
row_string = json.dumps(row, sort_keys=True, ensure_ascii=False)
s += row_string + ',' + os.linesep
except KeyError:
if type(row) is DictType:
js_strings = row
s += json.dumps(js_strings, sort_keys=True, ensure_ascii=False, indent=1) + os.linesep
s += ']'
return s
def processFile(filename):
"""
Loads the given lang.XX.js file, processes it and saves it
back to the file system
"""
in_string = open('../editor/locale/' + filename, 'r').read()
try:
j = json.loads(in_string)
# process the JSON object here
changeTooltipTarget(j)
# now write it out back to the file
s = ourPrettyPrint(j).encode("UTF-8")
open('../editor/locale/' + filename, 'w').write(s)
print "Updated " + filename
except ValueError:
print "ERROR! " + filename + " was not valid JSON, please fix it!"
if __name__ == '__main__':
# get list of all lang files and process them
for file_name in os.listdir('../editor/locale/'):
if file_name[:4] == "lang":
processFile(file_name)
|
[
"wanyanya@126.com"
] |
wanyanya@126.com
|
dba414ab17db1a2b279188a1337ffecac5c63517
|
94446072f4097b95f0a771748b9b5be308bda58b
|
/测试函数/test_name_function.py
|
4f42b1564bd42683deaecd6e03240f736ab9863c
|
[] |
no_license
|
lj1064201288/Python-Notebook
|
55eb527ffc2a9fa65aee49fb10b571f0526e5113
|
03745ebb536157f4b747c2d96d951a281b9461cf
|
refs/heads/master
| 2020-03-26T03:54:06.945888
| 2018-08-25T01:51:40
| 2018-08-25T01:51:40
| 144,475,137
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 646
|
py
|
import unittest
from name_function import get_formatted_name
class NamesTestCase(unittest.TestCase):
'''测试name_function.py'''
def test_first_last_name(self):
'''能够正确的处理像Janis Joplin这样的姓名吗?'''
formatted_name = get_formatted_name('janis', 'joplin')
self.assertEqual(formatted_name, 'Janis Joplin')
def test_first_last_middle_name(self):
'''能够正确的处理像Wolfgang Amadeus Mozart这样的姓名吗?'''
formatted_name = get_formatted_name('wofgang', 'mozart', 'amadeus')
self.assertEqual(formatted_name,'Wofgang Amadeus Mozart')
unittest.main()
|
[
"1064201288@qq.com"
] |
1064201288@qq.com
|
8e023b8d712aa0d3d50dd30d21d4abfe5b71e3bf
|
040c49dd116ad6ff69a61a60c9fc2dc3d17da6d0
|
/guiFrame.py
|
9a87810f88306c919a7e796bbf271fff6bdd90ee
|
[] |
no_license
|
raulsanika015/python
|
0dba12d62a32b7cbcd1c50c018a6ab4f56cf9472
|
cf2e73192e180efe3357cbc5477b9abe1469f818
|
refs/heads/master
| 2020-08-13T10:43:57.942184
| 2019-10-14T13:38:45
| 2019-10-14T13:38:45
| 214,956,298
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 524
|
py
|
from tkinter import *
root = Tk()
frame = Frame(root)
frame.pack()
bottomframe = Frame(root)
bottomframe.pack( side = BOTTOM )
redbutton = Button(frame, text = "Red", fg = "red")
redbutton.pack( side = LEFT)
greenbutton = Button(frame, text = "Brown", fg="brown")
greenbutton.pack( side = LEFT )
bluebutton = Button(frame, text = "Blue", fg = "blue")
bluebutton.pack( side = LEFT )
blackbutton = Button(bottomframe, text = "Black", fg = "black")
blackbutton.pack( side = BOTTOM)
root.mainloop()
|
[
"noreply@github.com"
] |
raulsanika015.noreply@github.com
|
c9b819700f9d15af24deae52ad543be49d1c99e5
|
21ba6d42f7f26779322a149d0af25f3acbb07682
|
/Repository/Price Prediction/prediction-compare.py
|
2531d8309b4e5894b92d4b343b2a699ca79da1ae
|
[] |
no_license
|
Bolanle/G54MIP
|
6f1fa37b1a9da0477c3b22c9f957cbaf2249b764
|
02913adc86088bbbdab23c6d508a7c91bcb0d110
|
refs/heads/master
| 2021-01-18T16:41:17.783175
| 2015-05-07T10:31:19
| 2015-05-07T10:31:19
| 24,644,928
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,688
|
py
|
from collections import Counter
__author__ = 'Ester'
import os
import warnings
import pandas
import numpy as np
from sklearn.svm import SVC, LinearSVC
from sklearn.metrics import f1_score, accuracy_score, recall_score, precision_score
from sklearn.preprocessing import MinMaxScaler
from svm_hmm import SVMHMM
from sklearn.hmm import GMMHMM
from sklearn.decomposition import PCA
import matplotlib.pyplot as plot
from sklearn import cross_validation, metrics
from operator import mul
from functools import reduce
warnings.filterwarnings("ignore", category=DeprecationWarning)
def get_stock_data():
companies_data = dict()
rel_path_svm = "SVM-data"
rel_path_hmm = "Sentiment Data"
rel_path_sentsvm = "SVM-Sent Data"
for filename in os.listdir(rel_path_svm):
if ".csv" in filename:
company_name = filename.replace(".csv", "")
company_file = pandas.read_csv(os.path.join(rel_path_svm, filename))
companies_data[company_name] = (company_file, )
for filename in os.listdir(rel_path_hmm):
if ".csv" in filename:
company_name = filename.replace(".csv", "")
company_file = pandas.read_csv(os.path.join(rel_path_hmm, filename))
companies_data[company_name] += (company_file, )
for filename in os.listdir(rel_path_sentsvm):
if ".csv" in filename:
company_name = filename.replace(".csv", "")
company_file = pandas.read_csv(os.path.join(rel_path_sentsvm, filename))
companies_data[company_name] += (company_file, )
return companies_data
def convert_to_numpy_array(data, length):
if length != 1:
array = np.array([0] * length)
for row in data:
try:
array = np.vstack((array, row.tolist()))
except AttributeError:
array = np.vstack((array, row))
else:
array = list([0])
for row in data:
array.append(row)
array = np.array(array)
return array[1:, ]
def train_classifier(x_train, y_train, c, random_state):
classifier = (SVC(C=c, class_weight="auto", kernel="linear", random_state=random_state, tol=0.00001, max_iter=-1,
probability=True))
classifier.fit(x_train, y_train)
print("Score:", classifier.score(x_train, y_train), ) # end=", ")
return classifier
def get_transmat(y_train, index_transformer, count):
old_matrix = np.zeros((count, count))
matrix = np.zeros((count, count))
for position in range(0, len(y_train) - 1):
from_state = y_train[position]
to_state = y_train[position + 1]
if (to_state == 0 and count == 2):
# print("Encountered zero!")
continue
old_matrix[index_transformer[from_state], index_transformer[to_state]] += 1
summed = sum(old_matrix.T)
for i in range(0, len(old_matrix)):
for j in range(0, len(old_matrix) - 1):
matrix[i, j] = round((old_matrix[i, j] / summed[i]), 2)
current_sum = sum(matrix.T)
for i in range(0, len(old_matrix)):
matrix[i, len(old_matrix) - 1] = 1 - current_sum[i]
total = (sum(sum(matrix)))
if total != float(count):
pass
# print("Uhm...hold on", total)
return matrix
def build_report(x_data, y_labels, classifier, cross_val_iterator):
cm = np.zeros((2, 2))
f1 = precision = recall = accuracy = float()
support = Counter(y_labels)
for i, (train, test) in enumerate(cross_val_iterator):
x_train, x_test, y_train, y_test = x_data[train], x_data[test], y_labels[train], y_labels[test]
y_pred = classifier.fit(x_train, y_train).predict(x_test)
confusion_matrix, f1_measure, precision_sc, recall_sc, accuracy_sc = (metrics.confusion_matrix(y_test, y_pred),
metrics.f1_score(y_test, y_pred),
metrics.precision_score(y_test, y_pred),
metrics.recall_score(y_test, y_pred,
average='weighted'),
metrics.accuracy_score(y_test, y_pred))
cm += confusion_matrix
f1 += f1_measure
precision += precision_sc
recall += recall_sc
accuracy += accuracy_sc
return (cm, f1 / cross_val_iterator.n_folds, precision / cross_val_iterator.n_folds,
recall / cross_val_iterator.n_folds, support, accuracy / cross_val_iterator.n_folds)
def return_charts(close, predictions, actual, should_draw_normal):
# plot.plot(close, label="close")
return_values_ft = [1]
return_values_pre = [1]
# calculate simple returns
simple_returns = []
simple_returns_pre = []
for i in range(0, len(close) - 1):
simple_returns_pre.append(((close[i + 1] - close[i]) / close[i]))
simple_returns = [1 + rt for rt in simple_returns_pre]
for i in range(0, len(predictions)):
current_return = reduce(mul, simple_returns[:i + 1])
if predictions[i] == actual[i]:
simple_returns_pre[i] = 1 + abs(simple_returns_pre[i])
if predictions[i] != actual[i]:
simple_returns_pre[i] = 1 - abs(simple_returns_pre[i])
agg_return = reduce(mul, simple_returns_pre[:i + 1])
return_values_ft.append(current_return)
return_values_pre.append(agg_return)
if should_draw_normal:
plot.plot(return_values_ft, label="market return", linewidth=2.5, color="blueviolet")
plot.plot(return_values_pre, '--', label="news-based model", linewidth=2.5, color="black")
else:
plot.plot(return_values_pre, '+', label="technical indicators model", linewidth=2.5, color="black")
plot.xlabel("Trading Days")
plot.ylabel("Return")
plot.legend(loc="upper left")
if should_draw_normal:
plot.show()
def main():
companies_data = get_stock_data()
company = "chevron"
svm_file, sentiment_file, sent_svm = companies_data[company]
print(company)
print("*" * 100)
window = 320
prediction_window = 20
x_unscaled = svm_file.values[20:, 2:14]
x_data = convert_to_numpy_array(x_unscaled, 12)
x_data = x_data[229:, :]
y_data = svm_file.values[20:, 14]
y_data = convert_to_numpy_array(y_data, 1)
y_data = y_data[229:]
scalar = MinMaxScaler(feature_range=(-1, 1), copy=False)
x_data = scalar.fit_transform(x_data)
pca = PCA(n_components=6)
x_data = pca.fit_transform(x_data, y_data)
actual = y_data[window: window + 120].flatten()
predictions = []
c_values =[49, 37, 11, 33, 11, 7, 10, 4, 3, 5, 8, 1, 3, 90, 7, 5, 2, 2, 2, 1, 5, 3, 6, 5, 4, 10, 3, 19, 4, 19, 34,
4, 40, 1, 4, 1, 1, 4, 7, 9, 23, 15, 22, 13, 58, 1, 2, 1, 3, 3, 4, 1, 1, 1, 3, 4, 4, 10, 15, 3, 1, 2, 6,
1, 8, 1, 11, 1, 1, 4, 1, 1, 1, 1, 1, 1, 1, 8, 1, 2, 1, 4, 16, 4, 1, 1, 33, 54, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 2, 61, 9, 7, 10, 20, 3, 1, 2, 2, 3, 8, 3, 3, 55, 10, 12, 6, 8, 3, 2]
for day, c in zip(list(range(window, window + 120)), c_values): # For each row - each corresponding to a day
x_train = x_data[day - window: day]
y_train = y_data[day - window: day]
c_value = 0
# accuracy_max = 0 # Split into smaller sequences
converted_for_hmm = list()
for day_m in range(0, len(x_train) - 20):
converted_for_hmm.append(x_train[day_m:day_m + 20, :])
converted_for_hmm = np.array(converted_for_hmm)
count = None
index_transformer = {-1: 0, 1: 1}
count = 2
trans_mat = get_transmat(y_train, index_transformer, count)
# Get probabilities
# get Emission probabilities
x_test = x_data[day - prediction_window + 1: day + 1]
y_test = y_data[day] # - prediction_window + 1: day+1]
classifier = train_classifier(x_train, y_train, c=c, random_state=100)
predict = classifier.predict(x_test)
predictions.append(predict[-1])
# start_probs = np.zeros((len(index_transformer.values())))
# first_state = y_train[0]
# first_state_index = index_transformer[first_state]
# start_probs[first_state_index] = 1
#
# model = SVMHMM(n_components=count,
# random_state=100, thresh=1e-2, n_iter=200, svm=classifier, init_params="st", params="st",
# labels=y_train)
# model.fit([x_train])
#
# q_st = model.predict(x_test)
# q_st = q_st[-1]
# for original, index in index_transformer.items():
# if index == q_st:
# q_st = original
# break
# predictions.append(q_st)
# print("match: ", q_st == y_test, "q_st:", q_st)
actual = list(actual.flatten())
f_measure = f1_score(actual, predictions)
accuracy = accuracy_score(actual, predictions)
recall = recall_score(actual, predictions)
precision = precision_score(actual, predictions)
# print("rand:", rand, "accuracy:", accuracy)
print("Accuracy:", accuracy, ) # "C:", c_value)
# c_values.append(c_value)
print("F-Measure:", f_measure)
print("Accuracy:", accuracy)
print("Recall:", recall)
print("Precision: ", precision) # break # print("c", c_values)
return_charts(convert_to_numpy_array(svm_file.values[20:, 1], 1)[-121:], predictions, actual, False)
#*******************************************************************************************************************
sentiment_data = convert_to_numpy_array(sentiment_file.values, 2)
data_difference = (svm_file.shape[0] - sentiment_data.shape[0] - 20)
prepend_no_sentiment_data = np.array([[0, 0]] * data_difference)
sentiment_data = np.vstack((prepend_no_sentiment_data, sentiment_data))
x_unscaled = svm_file.values[20:, 2:14]
x_data = convert_to_numpy_array(x_unscaled, 12)
# Add sentiment data
sentiment_data[-120:] = convert_to_numpy_array(sent_svm.values, 2)
x_data = np.column_stack((x_data[229:, :], sentiment_data[229:, :]))
y_data = svm_file.values[20:, 14]
y_data = convert_to_numpy_array(y_data, 1)
y_data = y_data[229:]
scalar = MinMaxScaler(feature_range=(-1, 1), copy=False)
x_data = scalar.fit_transform(x_data)
pca = PCA(n_components=6)
x_data = pca.fit_transform(x_data, y_data)
actual = y_data[window: window + 120].flatten()
predictions = []
c_values = [26, 46, 23, 84, 72, 28, 76, 4, 35, 20, 13, 11, 3, 58, 68, 10, 33, 29, 12, 8, 6, 71, 7, 46, 10, 6, 90,
55, 17, 47, 56, 11, 31, 67, 6, 3, 1, 1, 25, 1, 2, 7, 3, 3, 3, 2, 1, 2, 2, 4, 3, 47, 8, 64, 4, 32, 2, 33,
2, 2, 2, 7, 2, 85, 94, 68, 2, 2, 1, 2, 2, 77, 83, 26, 19, 9, 97, 69, 2, 97, 5, 90, 83, 3, 3, 3, 55, 2,
2, 1, 1, 1, 5, 1, 3, 1, 25, 10, 11, 26, 14, 8, 7, 13, 88, 5, 8, 8, 8, 65, 13, 41, 20, 14, 9, 12, 4, 1,
1, 1]
for day, c in zip(list(range(window, window + 120)), c_values): # For each row - each corresponding to a day
x_train = x_data[day - window: day]
y_train = y_data[day - window: day]
c_value = 0
# accuracy_max = 0 # Split into smaller sequences
converted_for_hmm = list()
for day_m in range(0, len(x_train) - 20):
converted_for_hmm.append(x_train[day_m:day_m + 20, :])
converted_for_hmm = np.array(converted_for_hmm)
count = None
index_transformer = {-1: 0, 1: 1}
count = 2
trans_mat = get_transmat(y_train, index_transformer, count)
# Get probabilities
# get Emission probabilities
x_test = x_data[day - prediction_window + 1: day + 1]
y_test = y_data[day] # - prediction_window + 1: day+1]
classifier = train_classifier(x_train, y_train, c=c, random_state=100)
predict = classifier.predict(x_test)
predictions.append(predict[-1])
# start_probs = np.zeros((len(index_transformer.values())))
# first_state = y_train[0]
# first_state_index = index_transformer[first_state]
# start_probs[first_state_index] = 1
#
# model = SVMHMM(n_components=count,
# random_state=100, thresh=1e-3, n_iter=200, svm=classifier, init_params="st", params="st",
# labels=y_train)
# model.fit([x_train])
#
# q_st = model.predict(x_test)
# q_st = q_st[-1]
# for original, index in index_transformer.items():
# if index == q_st:
# q_st = original
# break
# predictions.append(q_st)
# print("match: ", q_st == y_test, "q_st:", q_st)
actual = list(actual.flatten())
f_measure = f1_score(actual, predictions)
accuracy = accuracy_score(actual, predictions)
recall = recall_score(actual, predictions)
precision = precision_score(actual, predictions)
# print("rand:", rand, "accuracy:", accuracy)
print("Accuracy:", accuracy, ) # "C:", c_value)
# c_values.append(c_value)
print("F-Measure:", f_measure)
print("Accuracy:", accuracy)
print("Recall:", recall)
print("Precision: ", precision) # break # print("c", c_values)
return_charts(convert_to_numpy_array(svm_file.values[20:, 1], 1)[-121:], predictions, actual, True)
if __name__ == "__main__":
main()
|
[
"onifade.esther@gmail.com"
] |
onifade.esther@gmail.com
|
8fefa67098f64d73674240e6c164dd8075fd97fc
|
f1b2552da228ba03ec895122b090c0f6c3234c56
|
/game_10_with_design.py
|
e37b1581a9ede527ba6251640d6f45c06105715f
|
[
"MIT"
] |
permissive
|
codinggrace/text_based_adventure_game
|
2458967a6a7fb2138fe8926714f67d32c88445d2
|
c0944f57d790dcd9be161b04b1372014d2c77d49
|
refs/heads/master
| 2023-04-20T19:24:44.167863
| 2021-04-28T17:45:20
| 2021-04-28T17:45:20
| 274,380,326
| 39
| 38
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,633
|
py
|
import os
# Let's clean it up (or refactor), and create a function to ask for your name and return it.
# This will keep main() as clean as possible.
# New code starts at line 147
#
# Run this code a few times and see what happens with different choices.
# It's good to test all options and see if that's what you expected.
##### ACTIONS #####
def you_died(why):
print_game_over()
# You expect a reason why the player died. It's a string.
print("{}. Good job!".format(why))
# This exits the program entirely.
exit(0)
### END ACTIONS ###
### CHARACTERS ###
def guard():
print_guard()
# The guard
print("You approach the guard, he's still sleeping.")
print("Suddenly you knocked a wooden cask with a mug on it... CRASSH!")
print("\nOi, what you doing 'ere?")
# Guard is not moving initially
guard_moved = False
# - When a player dies, it calls you_died() and it exits() the program.
# - When a player escapes through the door, you return to the previous function which
# called this function.
while True:
next_action = input("[run | door] > ").lower()
if next_action == "run" and guard_moved:
you_died("Guard was faster than he looks and your world goes dark...")
elif next_action == "run" and not guard_moved:
print("Guard jumps up and looks the other way, missing you entirely.")
guard_moved = True
elif next_action == "door" and guard_moved:
print("You just slipped through the door before the guard realised it.")
print("You are now outside, home free! Huzzah!")
return
elif next_action == "door" and not guard_moved:
you_died("Guard was faster than he looks and your world goes dark...")
else:
print("Not sure what you meant there... try again.")
# END CHARACTERS #
##### ROOMS #####
def blissful_ignorance_of_illusion_room():
print_chest()
# The variable treasure_chest is an object type called a list
# A list maybe empty as well.
# So our treasure_chest list contains 4 items.
treasure_chest = ["diamonds", "gold", "silver", "sword"]
print("You see a room with a wooden treasure chest on the left, and a sleeping guard on the right in front of the door")
# Ask player what to do.
action = input("What do you do? > ")
# This is a way to see if the text typed by player is in the list
if action.lower() in ["treasure", "chest", "left"]:
print("Oooh, treasure!")
print("Open it? Press '1'")
print("Leave it alone. Press '2'")
choice = input("> ")
# Try just leaving 1 and 2 as a number
# Change to string and see what happens
if choice == "1":
print("Let's see what's in here... /grins")
print("The chest creaks open, and the guard is still sleeping. That's one heavy sleeper!")
print("You find some")
# for each treasure (variable created on the fly in the for loop)
# in the treasure_chest list, print the treasure.
for treasure in treasure_chest:
print(treasure)
# So much treasure, what to do? Take it or leave it.
print("What do you want to do?")
print("Take all {} treasure, press '1'".format(len(treasure_chest)))
print("Leave it, press '2'")
treasure_choice = input("> ")
if treasure_choice == "1":
print("\tWoohoo! Bounty and a shiney new sword. /drops your crappy sword in the empty treasure chest.")
print("\tYou just received [{}]".format(", ".join(treasure_chest)))
elif treasure_choice == "2":
print("It will still be here (I hope), right after I get past this guard")
# Picked up treasure or left it, you will now encounter the guard.
# Let's call the guard() function here.
guard()
else:
# Let's call the guard() function here as well, no point writing a bunch of same code
# twice (or more). It's good to be able to re-use code.
print("The guard is more interesting, let's go that way!")
guard()
def painful_truth_of_reality_room():
print_monster()
print("There you see the great evil Cthulhu.")
print("He, it, whatever stares at you and you go insane.")
print("Do you flee for your life or eat your head?")
next_move = input("> ")
# Flee to return to the start of the game, in the room with the blue and red door or die!
if "flee" in next_move:
start_adventure()
else:
# You call the function you_died and pass the reason why you died as
# a string as an argument.
you_died("You died. Well, that was tasty!")
### END ROOMS ###
def get_player_name():
# LOCAL VARIABLES
# The player enters their name and gets assigned to a variable called "name"
name = input("What's your name? > ")
# This is just an alternative name that the game wants to call the player
alt_name = "Rainbow Unicorn"
answer = input("Your name is {}, is that correct? [Y|N] > ".format(alt_name.upper()))
if answer.lower() in ["y", "yes"]:
name = alt_name
print("You are fun, {}! Let's begin our adventure!".format(name.upper()))
elif answer.lower() in ["n", "no"]:
print("Ok, picky. {} it is. Let's get started on our adventure.".format(name.upper()))
else:
print("Trying to be funny? Well, you will now be called {} anyway.".format(alt_name.upper()))
name = alt_name
# Now notice that we are returning the variable called name.
# In main(), it doesn't know what the variable "name" is, as it only exists in
# get_player_name() function.
# This is why indentation is important, variables declared in this block only exists in that block
return name
def start_adventure():
print_dungeon()
print("You enter a room, and you see a red door to your left and a blue door to your right.")
door_picked = input("Do you pick the red door or blue door? > ")
# Pick a door and we go to a room and something else happens
if door_picked == "red":
painful_truth_of_reality_room()
elif door_picked == "blue":
blissful_ignorance_of_illusion_room()
else:
print("Sorry, it's either 'red' or 'blue' as the answer. You're the weakest link, goodbye!")
def main():
os.system("clear")
# Calls get_player_name and returns the player name
player_name = get_player_name()
####################################################################
# ACTIVITIES
#
# Read some of the best practices when writing Python code
# http://legacy.python.org/dev/peps/pep-0008/
# Main thing is if you are using tabs, make sure it's 4-spaces,
# most editors will convert it (check preferences/settings).
#
# Modify the code
# - add taunting the guard or talking
# - sword fight with the guard, and keep track of health points (HP)
# - puzzles like 1+2 during an encounter
# - modifiy blissful_ignorance_of_illusion_room()'s if statement
# so it takes into account player typing "right" or "guard"
# Hint: Add another elif before the else statement
#
# So many if statements, this can be made simpler and easier to
# maintain by using Finite State Machine (FSM)
# You can find info about it, but it will mainly be touching
# object-orient programming, which is another lesson for another day.
#
#####################################################################
start_adventure()
print("\nThe end\n")
print("Thanks for playing, {}".format(player_name.upper()))
def print_dungeon():
print()
print(" _________________________________________________________")
print(" /| -_- _- |\ ")
print("/ |_-_- _ -_- _- -| \ ")
print(" | _- _-- | ")
print(" | , |")
print(" | .-'````````'. '(` .-'```````'-. |")
print(" | .` | `. `)' .` | `. | ")
print(" | / | () \ U / | () \ |")
print(" | | | ; | o T o | | ; | |")
print(" | | | ; | . | . | | ; | |")
print(" | | | ; | . | . | | ; | |")
print(" | | | ; | .|. | | ; | |")
print(" | | |____;_________| | | |____;_________| | ")
print(" | | / __ ; - | ! | / `'() _ - | |")
print(" | | / __ () -| - | / __-- - | |")
print(" | | / __-- _ | _- _ - | / __--_ | |")
print(" |__|/__________________|___________|/__________________|__|")
print(" / _ - lc \ ")
print("/ -_- _ - _- _--- -_- -_ \ ")
print()
def print_monster():
print()
print(" | | ")
print(" \ / \ / ")
print(" -= .'> =- -= <'. =- ")
print(" '.'. .'.' ")
print(" '.'. .'.' ")
print(" '.'.----^----.'.' ")
print(" /'==========='\ ")
print(" . / .-. .-. \ . ")
print(" :'.\ '.O.') ('.O.' /.': ")
print(" '. | | .' ")
print(" '| / \ |' ")
print(" \ (o'o) / ")
print(" |\ /| ")
print(" \('._________.')/ ")
print(" '. \/|_|_|\/ .' ")
print(" /'._______.'\ lc ")
print()
def print_chest():
print()
print(" _.--. ")
print(" _.-'_:-'|| ")
print(" _.-'_.-::::'|| ")
print(" _.-:'_.-::::::' || ")
print(" .'`-.-:::::::' || ")
print(" /.'`;|:::::::' ||_ ")
print(" || ||::::::' _.;._'-._ ")
print(" || ||:::::' _.-!oo @.!-._'-. ")
print(" ('. ||:::::.-!()oo @!()@.-'_.| ")
print(" '.'-;|:.-'.&$@.& ()$%-'o.'-U|| ")
print(" `>'-.!@%()@'@_%-'_.-o _.|'|| ")
print(" ||-._'-.@.-'_.-' _.-o |'|| ")
print(" ||=[ '-._.-+U/.-' o |'|| ")
print(" || '-.]=|| |'| o |'|| ")
print(" || || |'| _| '; ")
print(" || || |'| _.-'_.-' ")
print(" |'-._ || |'|_.-'_.-' ")
print(" '-._'-.|| |' `_.-' ")
print(" '-.||_/.-' ")
print()
def print_guard():
print()
print(" ___I___ ")
print(" /= | #\ ")
print(" /.__-| __ \ ")
print(" |/ _\_/_ \| ")
print(" (( __ \__)) ")
print(" __ ((()))))()) __ ")
print(" ,' |()))))(((()|# `. ")
print(" / |^))()))))(^| =\ ")
print(" / /^v^(())()()v^;' .\ ")
print(" |__.'^v^v^))))))^v^v`.__| ")
print(" /_ ' \______(()_____( | ")
print(" _..-' _//_____[xxx]_____\.-| ")
print(" /,_#\.=-' /v^v^v^v^v^v^v^v^| _| ")
print(" \)|) v^v^v^v^v^v^v^v^v| _| ")
print(" || :v^v^v^v^v^v`.-' |# \, ")
print(" || v^v^v^v`_/\__,--.|\_=_/ ")
print(" >< :v^v____| \_____|_ ")
print(" , || v^ / \ / ")
print(" //\_||_)\ `/_..-._\ )_...__\ ")
print(" || \/ #| |_='_( | =_(_ ")
print(" || _/\_ | / =\ / ' =\ ")
print(" \\\/ \/ )/ gnv |=____#| '=....#| ")
print()
def print_game_over():
print()
print(" _____ __ __ ______ ______ ________ _____ ")
print(" / ____| /\ | \/ | ____| / __ \ \ / / ____| __ \ ")
print(" | | __ / \ | \ / | |__ | | | \ \ / /| |__ | |__) |")
print(" | | |_ | / /\ \ | |\/| | __| | | | |\ \/ / | __| | _ / ")
print(" | |__| |/ ____ \| | | | |____ | |__| | \ / | |____| | \ \ ")
print(" \_____/_/ \_\_| |_|______| \____/ \/ |______|_| \_\\")
print()
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
codinggrace.noreply@github.com
|
ba80a3767effb9210d98abe979d2f5eee9574674
|
0b91308ff675c680baa6ee31859a15e0ac9ca50d
|
/trio_typing/__init__.pyi
|
19775a6f653f3bdbc2c631b469bd9cae6a008f78
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
oakkitten/trio-typing
|
81f9c3fb3809525b9f85139c8756b78a673dbdc8
|
6f30046f327e9c4dcd541ed5ac5e1a0a2f8ab834
|
refs/heads/master
| 2020-07-22T23:43:37.148535
| 2019-05-10T19:04:00
| 2019-05-10T19:04:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,794
|
pyi
|
import sys
import trio
from abc import abstractmethod, abstractproperty, ABCMeta
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
FrozenSet,
Generic,
Optional,
Type,
TypeVar,
Union,
overload,
)
from types import CodeType, FrameType, TracebackType
from typing_extensions import Protocol
from mypy_extensions import NamedArg, VarArg
__all__ = [
"Nursery",
"TaskStatus",
"takes_callable_and_args",
"AsyncGenerator",
"CompatAsyncGenerator",
]
T = TypeVar("T")
T_co = TypeVar("T_co", covariant=True)
T_co2 = TypeVar("T_co2", covariant=True)
T_contra = TypeVar("T_contra", contravariant=True)
def takes_callable_and_args(fn: T) -> T:
return fn
class TaskStatus(Protocol[T_contra]):
def started(self, value: T_contra = ...) -> None: ...
class Nursery:
cancel_scope: trio.CancelScope
@property
def child_tasks(self) -> FrozenSet[trio.hazmat.Task]: ...
@property
def parent_task(self) -> trio.hazmat.Task: ...
@takes_callable_and_args
def start_soon(
self,
async_fn: Union[
# List these explicitly instead of Callable[..., Awaitable[None]]
# so that even without the plugin we catch cases of passing a
# function with keyword-only arguments to start_soon().
Callable[[], Awaitable[None]],
Callable[[Any], Awaitable[None]],
Callable[[Any, Any], Awaitable[None]],
Callable[[Any, Any, Any], Awaitable[None]],
Callable[[Any, Any, Any, Any], Awaitable[None]],
Callable[[VarArg()], Awaitable[None]],
],
*args: Any,
name: object = None,
) -> None: ...
@takes_callable_and_args
async def start(
self,
async_fn: Union[
# List these explicitly instead of Callable[..., Awaitable[None]]
# so that even without the plugin we can infer the return type
# of start(), and fail when a function is passed that doesn't
# accept task_status.
Callable[[NamedArg(TaskStatus[T], "task_status")], Awaitable[None]],
Callable[[Any, NamedArg(TaskStatus[T], "task_status")], Awaitable[None]],
Callable[
[Any, Any, NamedArg(TaskStatus[T], "task_status")], Awaitable[None]
],
Callable[
[Any, Any, Any, NamedArg(TaskStatus[T], "task_status")], Awaitable[None]
],
Callable[
[Any, Any, Any, Any, NamedArg(TaskStatus[T], "task_status")],
Awaitable[None],
],
Callable[
[VarArg(), NamedArg(TaskStatus[T], "task_status")], Awaitable[None]
],
],
*args: Any,
name: object = None,
) -> T: ...
if sys.version_info >= (3, 6):
from typing import AsyncGenerator as AsyncGenerator
else:
class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra]):
@abstractmethod
async def __anext__(self) -> T_co: ...
@abstractmethod
async def asend(self, value: T_contra) -> T_co: ...
@abstractmethod
async def athrow(
self,
exc_type: Type[BaseException],
exc_value: Optional[BaseException] = ...,
exc_traceback: Optional[TracebackType] = ...,
) -> T_co: ...
@abstractmethod
async def aclose(self) -> None: ...
@abstractmethod
def __aiter__(self) -> AsyncGenerator[T_co, T_contra]: ...
@property
def ag_await(self) -> Any: ...
@property
def ag_code(self) -> CodeType: ...
@property
def ag_frame(self) -> FrameType: ...
@property
def ag_running(self) -> bool: ...
class CompatAsyncGenerator(
AsyncGenerator[T_co, T_contra], Generic[T_co, T_contra, T_co2], metaclass=ABCMeta
):
async def __anext__(self) -> T_co: ...
async def asend(self, value: T_contra) -> T_co: ...
async def athrow(
self,
exc_type: Type[BaseException],
exc_value: Optional[BaseException] = ...,
exc_traceback: Optional[TracebackType] = ...,
) -> T_co: ...
# aclose() should return None but the stubs in typeshed for 3.6+ say
# it returns the YieldType, so we need to use Any to be compatible
# (https://github.com/python/typeshed/issues/2785)
async def aclose(self) -> Any: ...
def __aiter__(self) -> AsyncGenerator[T_co, T_contra]: ...
@property
def ag_await(self) -> Any: ...
@property
def ag_code(self) -> CodeType: ...
@property
def ag_frame(self) -> FrameType: ...
@property
def ag_running(self) -> bool: ...
class YieldType(Generic[T_co]):
pass
class SendType(Generic[T_contra]):
pass
|
[
"oremanj@gmail.com"
] |
oremanj@gmail.com
|
262ed87099fcaae6af2892a97066214b1d732ebf
|
a267a254f20d07702ba9050aa2508f7cff97c7ea
|
/optforwardtest.py
|
e8bf12eaf404f908fe7901b203b5c471c5b9c0af
|
[] |
no_license
|
fjfabz/ridgecvtest
|
8c3f30284dd3f064fe7fbf8d1d63cc2a4aeaaddf
|
26bf940eda7c149ba1deff803ff71b4014cfdbba
|
refs/heads/master
| 2020-05-17T15:18:08.711734
| 2018-03-21T06:39:58
| 2018-03-21T06:39:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 33,022
|
py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import division
__author__ = 'LiNing'
from sigknn import *
from sigsvm import *
import os, sys, datetime, glob
import argparse, logging
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
import pylab as pl
import matplotlib.pyplot as plt
import cPickle as pickle
from pprint import pprint
import multiprocessing as mp
import csv
import talib
from talib import abstract, common, func
from sklearn import preprocessing
from sklearn.preprocessing import MinMaxScaler, StandardScaler, Normalizer
from sklearn.multiclass import OneVsRestClassifier
from sklearn.neighbors import NearestNeighbors, KNeighborsClassifier, KNeighborsRegressor
from sklearn.svm import SVC, LinearSVC
from sklearn.decomposition import PCA
from sklearn.feature_selection import RFE, RFECV
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import f_classif, chi2, f_regression, SelectPercentile, SelectFpr, SelectFdr, SelectFwe, GenericUnivariateSelect
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
from sklearn import cross_validation
from sklearn.cross_validation import train_test_split, KFold, StratifiedKFold, cross_val_score
from sklearn import metrics
from sklearn.metrics import confusion_matrix, classification_report, precision_score, recall_score, f1_score, accuracy_score, make_scorer
from sklearn.metrics import mean_absolute_error, mean_squared_error, median_absolute_error, explained_variance_score, r2_score
import optunity, optunity.metrics
from optunity.constraints import wrap_constraints
from zigzag import peak_valley_pivots, max_drawdown
## set globle variables ##
APP_DIR = os.path.split(__file__)[0]
sys.path.append(os.path.join(APP_DIR, 'lib'))
APP_NAME = os.path.split(os.path.splitext(__file__)[0])[1]
## set logger ##
def init_log(logger_name=''):
## get logger
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s-%(name)s-%(levelname)s-%(message)s', datefmt='%Y-%m-%d %H:%M:%S')
## A handler class which writes formatted logging records to disk files.
filehandler = logging.FileHandler('%s.log' % (datetime.datetime.now().strftime('%Y-%m-%d')), mode='w')
# filehandler = logging.FileHandler('log.txt', mode='w')
filehandler.setFormatter(formatter)
logger.addHandler(filehandler)
## A handler class which writes formatted logging records to disk files.
consolehandler = logging.StreamHandler(sys.stdout)
consolehandler.setFormatter(formatter)
logger.addHandler(consolehandler)
return logger
## app initiation here
logger = init_log(APP_NAME)
def isDayAcross(timestamp, diff_n=1):
trading_time = timestamp.time()
close_time = datetime.time(15, 15-diff_n)
if trading_time > close_time or trading_time == datetime.time(11, 30):
return True
else:
return False
def isInTradingTime(timestamp):
trading_time = timestamp.time()
starttime1 = datetime.time(9, 15)
stoptime1 = datetime.time(11, 30)
starttime2 = datetime.time(13, 0)
stoptime2 = datetime.time(15, 15)
if starttime1 <= trading_time <= stoptime1 or starttime2 <= trading_time <= stoptime2:
return True
else:
return False
def setTimeResample(rs, delta = '5min'):
newopen = rs.open.resample(delta, how = 'first')
newhigh = rs.high.resample(delta, how = 'max')
newlow = rs.low.resample(delta, how = 'min')
newclose = rs.close.resample(delta, how = 'last')
newvolume = rs.volume.resample(delta, how = 'sum')
newrs = pd.DataFrame({'open':newopen, 'high':newhigh, 'low':newlow, 'close':newclose, 'volume':newvolume})
return newrs.dropna()
def setNumDeltaReverse(rs, delta = 5):
## from end to towards begin to resample
rs = rs.sort_index(ascending = False)
index_array = np.arange(0, rs.shape[0]-delta+1, delta)
# print index_array
newopen = [rs.open.ix[i+delta-1] for i in index_array] # the last
newhigh = [max(rs.high.ix[i:i+delta]) for i in index_array]
newlow = [min(rs.low.ix[i:i+delta]) for i in index_array]
newclose = [rs.close.ix[i] for i in index_array] # the first
newvolume = [sum(rs.volume.ix[i:i+delta]) for i in index_array]
newrs = pd.DataFrame({'open':newopen, 'high':newhigh, 'low':newlow, 'close':newclose, 'volume':newvolume})
newrs.index = rs.index[[i for i in index_array]]
newrs = newrs.sort_index(ascending = True)
return newrs.dropna()
def loadTimeSeries(csv_path, rs_num = 10000, nrows = 10000, resample_time = 5):
lines = sum(1 for _ in csv.reader(open(csv_path)))
## ------------------------------------------------------------------------------------
assert rs_num >= nrows
rs = pd.read_csv(csv_path, header = None, skiprows = lines-rs_num, nrows = nrows)
# rs = pd.read_csv(csv_path, header = None, skiprows = lines-rs_num, nrows = 10000)
# # rs = pd.read_csv(csv_path, skiprows = lines-rs_num, nrows = 9999)
## ------------------------------------------------------------------------------------
# print rs
rs = rs.ix[:, 0:6] ## 0,1,2,3,4,5,6 is the column index, not number
rs.columns = ['Date', 'Time', 'open', 'high', 'low', 'close', 'volume']
rs.index = pd.to_datetime(rs.Date+' '+rs.Time)
######################################################################
'''remove the same timestamp'''
position_dict = {}
k, last_index = 0, None
for index in rs.index:
if index != last_index:
position_dict[index] = k
k += 1
last_index = index
rs = rs.ix[sorted(position_dict.values())]
######################################################################
# index = [item for item in rs.index if isInTradingTime(item)]
# rs = rs.ix[index]
del rs['Date']
del rs['Time']
# print rs
######################################################################
# data = rs ## no resample training data for forwardtest when skipping by resample step
if isinstance(resample_time, str):
data = setTimeResample(rs, delta = resample_time)
elif isinstance(resample_time, int):
data = setNumDeltaReverse(rs, delta = resample_time)
else:
data = None
# print data
######################################################################
return data
def target_define(data, threshold = 1):
data['pos'] = (data['diff']>threshold).astype(int)
data['neg'] = -(data['diff']<-threshold).astype(int)
data['label'] = data['pos']+data['neg']
return data
def threshold_define(data, zero_propotion = 0.1):
#######################################################################################
diff_values = sorted(np.abs(data['diff']))
threshold = diff_values[int(len(diff_values)*zero_propotion)]
#######################################################################################
# print 'threshold:', threshold
return threshold
def mean_classification(dataset_window, test_size):
diff_pred = []
for i in np.arange(test_size, 0 , -1):
X_train = dataset_window.shift(i-1).dropna()
X_test = dataset_window.ix[-i]
mean_train = np.mean(X_train)
diff_test = mean_train-X_test
diff_pred.append(diff_test)
y_pred = np.sign(diff_pred)
#######################################################################################
return y_pred
def max_drawdown_num(X):
'''
Return the absolute value of the maximum drawdown of sequence X.
Note
----
If the sequence is strictly increasing, 0 is returned.
'''
mdd = 0
peak = X[0]
for x in X:
if x > peak:
peak = x
# dd = (peak - x) / peak
dd = peak - x
if dd > mdd:
mdd = dd
return mdd
def max_drawdown_rate(X):
'''
Return the absolute value of the maximum drawdown of sequence X.
Note
----
If the sequence is strictly increasing, 0 is returned.
'''
mdd = 0
peak = X[0]
for x in X:
if x > peak:
peak = x
dd = (peak - x) / peak
# dd = peak - x
if dd > mdd:
mdd = dd
return mdd
def resultplot_backup(op):
rs = pd.read_csv(op.fn_out, sep='\t')
'''here you must calculate diff using the next one close and current close!!!'''
rs['diff'] = rs.close.diff(periods=1).shift(-1).fillna(0)
# print rs
## ------------------------------------------------------------------------------------
predict = np.array(rs['predict'])
predict_bool = (predict!=0).astype(int)
# print 'predict times:', sum(predict_bool)
diff = np.array(rs['diff'])
siglist = predict*diff
correctlist = predict_bool & (siglist>=0).astype(int)
## plot
plt.figure()
plt.subplot(311)
sigsumlist = np.cumsum(siglist)
# print 'the last sigsum:', sigsumlist[-1]
## ------------------------------------------------------------------------------------
# i = 0
# while sigsumlist[i] == 0:
# sigsumlist[i] = 0.001
# i += 1
# up_down_thresh = 10
# pivots = peak_valley_pivots(sigsumlist, up_down_thresh, -up_down_thresh)
# plt.plot(sigsumlist[pivots!=0], 'k:', label='$pivots$')
## ------------------------------------------------------------------------------------
plt.plot(sigsumlist, 'k-', label='$sigsum$')
plt.legend()
plt.ylabel('sigsum')
plt.ylim(-500, 3000)
plt.subplot(312)
averagesiglist = np.cumsum(siglist)/np.cumsum(predict_bool)
# print 'the last averagesig:', averagesiglist[-1]
plt.plot(averagesiglist, label='$averagesig$')
plt.legend()
plt.ylabel('averagesig')
plt.ylim(0, 1)
plt.subplot(313)
accuracylist = np.cumsum(correctlist)/np.cumsum(predict_bool)
# print 'the last accuracy:', accuracylist[-1]
plt.plot(accuracylist, label='$accuracy$')
plt.legend()
plt.ylabel('accuracy')
plt.ylim(0.4, 0.8)
plt.figtext(0.15, 0.95, 'predict times:{}'.format(sum(predict_bool)), color='green')
plt.figtext(0.4, 0.95, 'mdd num:{:.2f}'.format(max_drawdown_num(sigsumlist)), color='green')
# plt.figtext(0.7, 0.95, 'mdd rate:{:.2f}'.format(max_drawdown_rate(sigsumlist[sigsumlist>200])), color='green')
plt.figtext(0.15, 0.91, 'last sigsum:{:.1f}'.format(sigsumlist[-1]), color='green')
plt.figtext(0.4, 0.91, 'last averagesig:{:.3f}'.format(averagesiglist[-1]), color='green')
plt.figtext(0.7, 0.91, 'last accuracy:{:.2f}'.format(accuracylist[-1]), color='green')
fig_dir = os.path.split(op.pn_out)[0]
if not os.path.exists(fig_dir):
os.makedirs(fig_dir)
plt.savefig(op.pn_out)
plt.close()
def resultplot(op):
#######################################################################################
# rs = pd.read_csv(op.fn_out, index_col=0, sep='\t')
rs = pd.read_csv(op.fn_out, sep='\t')
rs.index = pd.to_datetime(rs['index'])
del rs['index']
# print rs
## ------------------------------------------------------------------------------------
'''here you must calculate diff using the next one close and current close!!!'''
rs['diff'] = rs.close.diff(periods=1).shift(-1).fillna(0)
rs['sig'] = rs['predict']*rs['diff']
rs['sigsum'] = np.cumsum(rs['sig'])
# print rs
## ------------------------------------------------------------------------------------
rs['predict_bool'] = (rs['predict']!=0).astype(int)
rs['correct'] = rs['predict_bool'] & (rs['sig']>=0).astype(int)
# print rs
## ------------------------------------------------------------------------------------
## plot
plt.figure()
plt.subplot(311)
# print 'the last sigsum:', rs['sigsum'][-1]
plt.plot(rs['sigsum'], 'k-', label='$sigsum$')
plt.legend()
plt.ylabel('sigsum')
plt.ylim(-500, 3000)
plt.subplot(312)
rs['averagesig'] = rs['sigsum']/np.cumsum(rs['predict_bool'])
# print 'the last averagesig:', rs['averagesig'][-1]
plt.plot(rs['averagesig'], label='$averagesig$')
plt.legend()
plt.ylabel('averagesig')
plt.ylim(0, 1)
plt.subplot(313)
rs['accuracy'] = np.cumsum(rs['correct'])/np.cumsum(rs['predict_bool'])
# rs['accuracy'] = np.cumsum((rs['sig']>0).astype(int))/np.cumsum((rs['sig']!=0).astype(int))
# print 'the last accuracy:', rs['accuracy'][-1]
plt.plot(rs['accuracy'], label='$accuracy$')
plt.legend()
plt.ylabel('accuracy')
plt.ylim(0.4, 0.8)
plt.figtext(0.15, 0.95, 'predict times:{}'.format(sum(rs['predict_bool'])), color='green')
plt.figtext(0.4, 0.95, 'mdd num:{:.2f}'.format(max_drawdown_num(rs['sigsum'])), color='green')
# sigsum_threshold = 200
# plt.figtext(0.7, 0.95, 'mdd rate:{:.2f}%'.format(max_drawdown_rate(rs['sigsum'][rs['sigsum']>sigsum_threshold])*100), color='green')
plt.figtext(0.15, 0.91, 'last sigsum:{:.1f}'.format(rs['sigsum'][-1]), color='green')
plt.figtext(0.4, 0.91, 'last averagesig:{:.3f}'.format(rs['averagesig'][-1]), color='green')
plt.figtext(0.7, 0.91, 'last accuracy:{:.2f}%'.format(rs['accuracy'][-1]*100), color='green')
fig_dir = os.path.split(op.pn_out)[0]
if not os.path.exists(fig_dir):
os.makedirs(fig_dir)
plt.savefig(op.pn_out)
plt.close()
# print rs
#######################################################################################
# ## resample siglist for specified periods
# sig_resample = pd.DataFrame()
# resample_time = '1D' ## you can change the resample time, for example, 5Min, 1D, 1W, 1M
# sig_resample['sig'] = rs.sig.resample(resample_time, how='sum').fillna(0)
# print sig_resample
# ## plot
# plt.figure()
# plt.plot(sig_resample['sig'], 'k.', label='$sig$')
# acc = sum((sig_resample['sig']>0).astype(int))/sum((sig_resample['sig']!=0).astype(int))
# print 'accuracy(only for sig!=0):{:.2f}%'.format(acc*100)
# plt.figtext(0.35, 0.93, 'accuracy(only for sig!=0):{:.2f}%'.format(acc*100), color='green')
# plt.savefig('sig_{}.png'.format(resample_time))
# plt.close()
def closeplot(op):
csv_path = op.fn_in
lines = sum(1 for _ in csv.reader(open(csv_path)))
resample_time = 1
data = loadTimeSeries(csv_path, rs_num = lines, nrows = lines, resample_time = resample_time)
plt.plot(data.close)
plt.savefig('close.png')
def sigingknn_talib(params):
#######################################################################################
starttime = datetime.datetime.now()
csv_path, rs_num, nrows, resample_time, diff_n = params
#######################################################################################
'''1. DATA PREPARING'''
#######################################################################################
# rs_num, nrows = 10000, 10000
data = loadTimeSeries(csv_path, rs_num = rs_num, nrows = nrows, resample_time = resample_time)
## ------------------------------------------------------------------------------------
assert diff_n % resample_time == 0
data['diff'] = data.close.diff(periods=int(diff_n/resample_time)).shift(-int(diff_n/resample_time)).fillna(0)
zero_propotion = 0
threshold = threshold_define(data, zero_propotion)
data = target_define(data, threshold)
# print data.head(10)
#######################################################################################
'''2. FEATURE EXTRACTION'''
#######################################################################################
X = feature_gen_talib(data, termlist = getFibonacciList(144))
# X = getTermFeatures(data, windowsize = 17, termlist = getFibonacciList(144))
## ------------------------------------------------------------------------------------
## TODO(here you can do feature selection)
# my_pca = PCA(n_components = 100).fit(X)
# X = my_pca.transform(X)
index = X.index
#######################################################################################
# '''change definition of 'diff', use relative close change instead of absolute close change'''
# data['diff'] = data['diff']/data['close'] ## (close2-close1)/close1
yset, closeset, diffset = data['label'].ix[index], data['close'].ix[index], data['diff'].ix[index]
#######################################################################################
window_size = int(len(index))
logger.info('sigingknn_talib >> window_size:{}'.format(window_size))
yset_window = yset.ix[-window_size:]
closeset_window = closeset.ix[-window_size:]
diffset_window = diffset.ix[-window_size:]
## ------------------------------------------------------------------------------------
test_size = int(diff_n/resample_time)
y_train = yset_window.ix[:-test_size]
y_test = yset_window.ix[-test_size:]
diff_train = diffset_window.ix[:-test_size]
test_index = closeset_window.index[-test_size:]
test_close = closeset_window.ix[-test_size:]
#######################################################################################
'''3. CLASSIFICATION'''
#######################################################################################
dataset_window = X.ix[-window_size:]
X_train = dataset_window.ix[:-test_size]
X_test = dataset_window.ix[-test_size:]
y_pred = knn_classification(X_train, X_test, y_train, diff_train)
#######################################################################################
y_pred = y_pred.astype(int)
## no transaction in the final diff_n minutes of trading
assert len(test_index) == len(y_pred)
for i in range(len(test_index)):
if isDayAcross(test_index[i], diff_n):
y_pred[i] = 0
else:
pass
logger.info('sigingknn_talib >> {}, {}, {}'.format(test_index[-1], y_pred[-1], test_close[-1]))
#######################################################################################
endtime = datetime.datetime.now()
logger.info('sigingknn_talib >> consuming time:{}'.format(endtime-starttime))
return pd.DataFrame({'predict': y_pred[-1], 'close':test_close[-1]}, index=[test_index[-1]])
def sigingknn(params):
#######################################################################################
starttime = datetime.datetime.now()
csv_path, rs_num, nrows, resample_time, diff_n = params
#######################################################################################
'''1. DATA PREPARING'''
#######################################################################################
# rs_num, nrows = 10000, 10000
data = loadTimeSeries(csv_path, rs_num = rs_num, nrows = nrows, resample_time = resample_time)
## ------------------------------------------------------------------------------------
assert diff_n % resample_time == 0
data['diff'] = data.close.diff(periods=int(diff_n/resample_time)).shift(-int(diff_n/resample_time)).fillna(0)
zero_propotion = 0
threshold = threshold_define(data, zero_propotion)
data = target_define(data, threshold)
# print data.head(10)
#######################################################################################
'''2. FEATURE EXTRACTION'''
#######################################################################################
X_ut = getUnitTermFeatures(data)
X_st = getShortTermFeatures(data, windowsize = 17)
X_lt = getLongTermFeatures(data, termlist = getFibonacciList(144))
## ------------------------------------------------------------------------------------
'''you can change feature and index here'''
index = sorted(list(set(X_ut.index) & set(X_st.index) & set(X_lt.index)))
X_ut, X_st, X_lt = X_ut.ix[index], X_st.ix[index], X_lt.ix[index]
#######################################################################################
# '''change definition of 'diff', use relative close change instead of absolute close change'''
# data['diff'] = data['diff']/data['close'] ## (close2-close1)/close1
yset, closeset, diffset = data['label'].ix[index], data['close'].ix[index], data['diff'].ix[index]
#######################################################################################
window_size = int(len(index))
logger.info('sigingknn >> window_size:{}'.format(window_size))
yset_window = yset.ix[-window_size:]
closeset_window = closeset.ix[-window_size:]
diffset_window = diffset.ix[-window_size:]
## ------------------------------------------------------------------------------------
test_size = int(diff_n/resample_time)
y_train = yset_window.ix[:-test_size]
y_test = yset_window.ix[-test_size:]
diff_train = diffset_window.ix[:-test_size]
test_index = closeset_window.index[-test_size:]
test_close = closeset_window.ix[-test_size:]
#######################################################################################
'''3. CLASSIFICATION'''
#######################################################################################
'''multi_feature'''
# X = pd.concat([X_ut, X_st, X_lt], axis=0)
# dataset_window = X.ix[-window_size:]
# X_train = dataset_window.ix[:-test_size]
# X_test = dataset_window.ix[-test_size:]
# y_pred = knn_classification(X_train, X_test, y_train, diff_train)
#######################################################################################
'''multi_classification'''
X_ut_train = X_ut.ix[-window_size:].ix[:-test_size]
X_ut_test = X_ut.ix[-window_size:].ix[-test_size:]
y_pred_ut = knn_classification(X_ut_train, X_ut_test, y_train, diff_train)
X_st_train = X_st.ix[-window_size:].ix[:-test_size]
X_st_test = X_st.ix[-window_size:].ix[-test_size:]
y_pred_st = knn_classification(X_st_train, X_st_test, y_train, diff_train)
X_lt_train = X_lt.ix[-window_size:].ix[:-test_size]
X_lt_test = X_lt.ix[-window_size:].ix[-test_size:]
y_pred_lt = knn_classification(X_lt_train, X_lt_test, y_train, diff_train)
## ------------------------------------------------------------------------------------
'''you can change classification here'''
## ------------------ one classifiers: -----------------------------------------------
# y_pred = y_pred_st
## ------------------ two classifiers: just calculate sum ----------------------------
# y_pred = np.sign(y_pred_st+y_pred_lt)
y_pred = np.sign(y_pred_st+y_pred_lt)*(1-np.sign(np.abs(y_pred_st-y_pred_lt)))
## ------------------ three or more classification: Vote -----------------------------
# y_pred = Vote(np.array([y_pred_ut, y_pred_st, y_pred_lt]).T)
# y_pred = np.sign(y_pred_ut+y_pred_st+y_pred_lt)*\
# (1-np.sign(np.abs(y_pred_st-y_pred_lt)+np.abs(y_pred_lt-y_pred_ut)+np.abs(y_pred_ut-y_pred_st)))
#######################################################################################
y_pred = y_pred.astype(int)
## no transaction in the final diff_n minutes of trading
assert len(test_index) == len(y_pred)
for i in range(len(test_index)):
if isDayAcross(test_index[i], diff_n):
y_pred[i] = 0
else:
pass
logger.info('sigingknn >> {}, {}, {}'.format(test_index[-1], y_pred[-1], test_close[-1]))
#######################################################################################
endtime = datetime.datetime.now()
logger.info('sigingknn >> consuming time:{}'.format(endtime-starttime))
return pd.DataFrame({'predict': y_pred[-1], 'close':test_close[-1]}, index=[test_index[-1]])
def sigingsvm(params):
#######################################################################################
starttime = datetime.datetime.now()
csv_path, rs_num, nrows, resample_time, diff_n = params
#######################################################################################
'''1. DATA PREPARING'''
#######################################################################################
# rs_num, nrows = 10000, 10000
data = loadTimeSeries(csv_path, rs_num = rs_num, nrows = nrows, resample_time = resample_time)
## ------------------------------------------------------------------------------------
assert diff_n % resample_time == 0
data['diff'] = data.close.diff(periods=int(diff_n/resample_time)).shift(-int(diff_n/resample_time)).fillna(0)
zero_propotion = 0.06
threshold = threshold_define(data, zero_propotion)
data = target_define(data, threshold)
# print data.head(10)
#######################################################################################
'''2. FEATURE EXTRACTION'''
#######################################################################################
X = feature_gen(data)
X = X.drop(['open', 'high', 'low', 'close', 'volume', 'pos', 'neg', 'diff', 'label'], axis = 1)
index = X.index
#######################################################################################
# '''change definition of 'diff', use relative close change instead of absolute close change'''
# data['diff'] = data['diff']/data['close'] ## (close2-close1)/close1
yset, closeset, diffset = data['label'].ix[index], data['close'].ix[index], data['diff'].ix[index]
#######################################################################################
window_size = int(len(index))
logger.info('sigingsvm >> window_size:{}'.format(window_size))
yset_window = yset.ix[-window_size:]
closeset_window = closeset.ix[-window_size:]
diffset_window = diffset.ix[-window_size:]
## ------------------------------------------------------------------------------------
test_size = int(diff_n/resample_time)
y_train = yset_window.ix[:-test_size]
y_test = yset_window.ix[-test_size:]
diff_train = diffset_window.ix[:-test_size]
test_index = closeset_window.index[-test_size:]
test_close = closeset_window.ix[-test_size:]
#######################################################################################
'''3. CLASSIFICATION'''
#######################################################################################
dataset_window = X.ix[-window_size:]
X_train = dataset_window.ix[:-test_size]
X_test = dataset_window.ix[-test_size:]
y_pred = svm_classification(X_train, X_test, y_train, diff_train)
#######################################################################################
y_pred = y_pred.astype(int)
## no transaction in the final diff_n minutes of trading
assert len(test_index) == len(y_pred)
for i in range(len(test_index)):
if isDayAcross(test_index[i], diff_n):
y_pred[i] = 0
else:
pass
logger.info('sigingsvm >> {}, {}, {}'.format(test_index[-1], y_pred[-1], test_close[-1]))
#######################################################################################
endtime = datetime.datetime.now()
logger.info('sigingsvm >> consuming time:{}'.format(endtime-starttime))
return pd.DataFrame({'predict': y_pred[-1], 'close':test_close[-1]}, index=[test_index[-1]])
def xx(op):
classifier = op.classifier
csv_path = op.fn_in
diff_n = op.diff_n
# resample_time = diff_n ## str or int, for example, '1min' or 1
resample_time = 1 ## str or int, for example, '1min' or 1
lines = sum(1 for _ in csv.reader(open(csv_path)))
logger.info('xx >> {} lines in csv file'.format(lines))
## ------------------------------------------------------------------------------------
if classifier == 'knn':
nrows = int(lines*0.5) ## you can change the reading length here for forwardtest
elif classifier == 'svm':
nrows = min(int(lines*0.1), 800) ## you can change the reading length here for forwardtest
elif classifier == 'knn_talib':
nrows = int(lines*0.5) ## you can change the reading length here for forwardtest
else:
nrows = None
## ------------------------------------------------------------------------------------
# predict_periods = 1000
predict_periods = op.l_predict
assert lines >= nrows+predict_periods
step = diff_n ## because you predict label of the diff-th point in sliding window, you must use the next diff-th point to calculate diff and sigsum
start = nrows
end = nrows+predict_periods ## use 'end = nrows+1' just for the last point!!!
rs_num_list = reversed(range(start, end, step))
#### predict the last predict_periods periods, and predict int(1+(predict_periods-1)/step) points!!!
logger.info('xx >> {} periods and {} points to predict'.format(predict_periods, int(1+(predict_periods-1)/step)))
p = mp.Pool(op.n_job)
## ------------------------------------------------------------------------------------
if classifier == 'knn':
result = p.map(sigingknn, [(csv_path, rs_num, nrows, resample_time, diff_n) for rs_num in rs_num_list]) ## begin to read csv at 'lines-rs_num' line
elif classifier == 'svm':
result = p.map(sigingsvm, [(csv_path, rs_num, nrows, resample_time, diff_n) for rs_num in rs_num_list]) ## begin to read csv at 'lines-rs_num' line
elif classifier == 'knn_talib':
result = p.map(sigingknn_talib, [(csv_path, rs_num, nrows, resample_time, diff_n) for rs_num in rs_num_list]) ## begin to read csv at 'lines-rs_num' line
else:
result = []
## ------------------------------------------------------------------------------------
results = pd.concat([i for i in result if type(i) == pd.DataFrame])
results = results.reset_index().drop_duplicates(subset='index', take_last=True).set_index('index').sort()
# print results
results_dir = os.path.split(op.fn_out)[0]
if not os.path.exists(results_dir):
os.makedirs(results_dir)
results.to_csv(op.fn_out, index=1, sep='\t', header=1)
#######################################################################################
# resultplot(op)
if __name__ == '__main__':
## parse argument ##
description = '' # A description of what the program does
epilog = '' # Text following the argument descriptions
parser = argparse.ArgumentParser(description=description, epilog=epilog, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-i', '--input', dest='fn_in', help='input file name.', default='./data/IF.csv') # change default
parser.add_argument('-of', '--output_file', dest='fn_out', help='output file name.', default='./results/results.csv') # change default
parser.add_argument('-op', '--output_pic', dest='pn_out', help='output pic name.', default='./results/results.png') # change default
parser.add_argument('-nj', '--n_job', dest='n_job', help='job number.', type=int, default='1') # change default
parser.add_argument('-lp', '--l_predict', dest='l_predict', help='predict periods.', type=int, default='10') # change default
parser.add_argument('-dn', '--diff_n', dest='diff_n', help='diff_n number.', type=int, default='1') # change default
parser.add_argument('-cf', '--classifier', dest='classifier', help='classifier.', default='knn') # change default
op = parser.parse_args()
## record log:NOTSET<DEBUG<INFO<WARNING<ERROR<CRITICAL
# logging.debug('debug')
# logging.info('info')
# logging.warn('warn')
# logging.error('error')
# logging.critical('critical')
logger.info('main >> runing {} with parameters: {}'.format(APP_NAME, op))
## ------------------------------------------------------------------------------------
'''Speed Up'''
# xx(op)
from speedtest import xxtest
xxtest(op)
# resultplot(op)
# closeplot(op)
'''
optforwardtest system
command:
python optforwardtest.py -i ./data/IF.csv -of ./results/results.csv -op ./results/results.png -nj 1 -lp 10 -dn 1 -cl knn
help:
-i the input file
-of the output file which records time index, predict label, and close
-op the output file which describes sigsum and accuracy
-nj number of jobs to run in parallel
-lp length of periods to predict, not number of points to predict
-dn length of periods to shift
-cl classifier
'''
|
[
"lining0806@gmail.com"
] |
lining0806@gmail.com
|
e541abef00103a62f33255d9084922d3be63eabf
|
3a46ee7a74e2b8e57935110737de2279ab3f45f1
|
/automation/amazon_books.py
|
08e15ed341e82907b41237027de0ae403c0e074b
|
[] |
no_license
|
Shivashankar101/PycharmProjects
|
6c30685a00eb498b3e23a5fbbfe56d47797d9fcd
|
d28e63ea417ace1927dec733bea9567339b6ff16
|
refs/heads/main
| 2023-03-07T01:49:47.011595
| 2021-02-21T09:26:07
| 2021-02-21T09:26:07
| 330,922,566
| 0
| 0
| null | 2021-02-20T06:00:04
| 2021-01-19T09:08:39
|
Python
|
UTF-8
|
Python
| false
| false
| 1,920
|
py
|
import csv
from selenium import webdriver
from bs4 import BeautifulSoup
import time
template = 'https://www.amazon.in/s?k={}g&ref=nb_sb_noss'
def get_url(search_item):
search_item = search_item.replace(' ', '+')
url = template.format(search_item) # search query
url += '&page={}' # page querry
return url
def get_record(item):
try:
# descriptin
description = item.h2.a.text.strip()
url = 'https://www.amazon.in//' + item.h2.a.get('href')
author_line = item.find('div', {'class': 'a-row a-size-base a-color-secondary'})
author = author_line.a.text.strip()
# star-rating
rating = item.i.text
reviews = author_line.find('span', {'class': 'a-size-base', 'dir': 'auto'}).text
# price
price = item.find('span', {'class': 'a-price'}).text
except AttributeError:
description = ' '
author = ' '
rating = ' '
reviews = ' '
price = ' '
result = (description, author, rating, reviews, price, url)
return result
def scrap_data(search_item):
url = get_url(search_item)
driver = webdriver.Safari()
records = []
for page in range(1, 21):
driver.get(url.format(page))
driver.maximize_window()
driver.implicitly_wait(10)
soup = BeautifulSoup(driver.page_source, 'html.parser')
res = soup.find_all('div', {'data-component-type': 's-search-result'})
for item in res:
time.sleep(1)
record = get_record(item)
if record:
records.append(record)
driver.quit()
## saving data to .CSV file
with open('results.csv', 'w', newline='', encoding='utf-8') as f:
writer = csv.writer(f)
writer.writerow(['description', 'author', 'rating', 'reviews', 'price', 'url'])
writer.writerows(records)
scrap_data('books on programming')
|
[
"shivashankarawati@gmail.com"
] |
shivashankarawati@gmail.com
|
16c4dc330380e021d8ad37830b8fba414fa2bcc4
|
e1e95bb143246839e2c9cd64c45ed53e2009239d
|
/Eleonor/codes/FactureOO.py
|
074be7f7e69045954e60f780d878c5d5e0a3f01c
|
[] |
no_license
|
Dalois-30/Eleonor1
|
f34a012dcaf7baae5ba90c3f3291754b3ef0705b
|
6d9b2366393db1859d52c9339bd14f981fa02168
|
refs/heads/main
| 2023-04-07T00:13:16.578001
| 2021-04-06T14:33:08
| 2021-04-06T14:33:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,171
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 22 17:07:06 2021
@author: toor
"""
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 20 16:43:50 2021
@author: Paradoxe
"""
import os
#import django
from tkinter import *
import psycopg2
import tkinter.messagebox #conclure
from datetime import date, datetime
import time
import math, random
#print(django.get_version())
DATABASE = "kali_db2" #nom de la base de donnée
USER = "kali" #propriétaire de la bd
PASSWORD = "kali" # mot de passe d'accès
HOST = "localhost" #adresse ip du serveur, ici on est en local
#Établissement de la connexion . Création du curseur"
try:
con = psycopg2.connect("host=%s dbname=%s user=%s password=%s" % (HOST, DATABASE, USER, PASSWORD))
except Exception as err:
print('La connexion a la base de donnée a échoué : \n'\
'Erreur détecté :\n%s' % err)
echec =1
else:
cursor = con.cursor() #création du curseur
echec =0
if echec:
sys.exit()
class Facture(Tk):
def __init__(self, root, numBoutique, numclient, nomclient, numcaissier, produitlist, numero_produit, quantitelist, prixlist, prixtt):
self.root = root
self.fen=Toplevel(self.root)
self.prixtt = prixtt
self.produitlist = produitlist
self.numclient=numclient
self.num_caissier=numcaissier
self.fen.title("Gestion des ventes")
self.fen.geometry("1010x642+140+20")
self.canfond=Canvas(self.fen, width=1025,height=655)
self.img=PhotoImage(file="images/imeleobfact.png")
self.canfond.create_image(0,0,anchor=NW,image=self.img)
self.canfond.config(bg="black")
self.canfond.place(x=-2,y=-1.7)
x=random.randint(1000,9999)
self.num_facture = x
self.numero_produit = numero_produit
self.quantitelist = quantitelist
self.prixlist = prixlist
#==================================frames===============================
DataFrame = Frame(self.fen, bd=-1, width=10, height=50, bg="black")
DataFrame.place(x=160,y=140)
titre = Label(self.canfond, font=('arial', 30, 'bold'), bg="black",fg="gold", text="Informations finales de facture")
titre.place(x=210,y=40)
scrolf = Scrollbar(DataFrame, orient=VERTICAL)
self.txtarea = Text(DataFrame, yscrollcommand=scrolf.set)
scrolf.pack(side=RIGHT,fill=Y)
scrolf.config(command=self.txtarea.yview())
self.txtarea.pack(side=BOTTOM, fil=BOTH)
self.txtarea.insert(END, "\t\t\t\t\t "+time.strftime("%A %d %B %Y %H:%M:%S"))
self.txtarea.insert(END, "\n\n \t\t\t\tBoutique Numero" + str(numBoutique))
self.txtarea.insert(END, "\n\n================================================================================")
self.txtarea.insert(END, "\n\n Facture N° : "+str(self.num_facture))
self.txtarea.insert(END, "\n Numero du client : 000"+str(self.numclient))
self.txtarea.insert(END, "\n Nom du client : "+str(nomclient))
self.txtarea.insert(END, "\n Numero du caissiers : 000"+str(numcaissier))
self.txtarea.insert(END, "\n\n================================================================================")
self.txtarea.insert(END, "\n Ref")
self.txtarea.insert(END, "\t Produits")
self.txtarea.insert(END, "\t\t Quantité")
self.txtarea.insert(END, "\t\t Prix Unitaire ")
self.txtarea.insert(END, "\n================================================================================")
i = 0
while(i<len(produitlist)):
self.txtarea.insert(END, "\n "+str(numero_produit[i]))
self.txtarea.insert(END, "\t "+produitlist[i])
self.txtarea.insert(END, "\t\t\t "+str(quantitelist[i]))
self.txtarea.insert(END, "\t\t\t "+str(prixlist[i]))
i+=1
self.txtarea.insert(END, "\n\n\n\tPrix total : "+str(prixtt)+" FCFA" )
self.butImprimer = Button(self.fen, text="Imprimer",font=('arial', 30, 'bold'), bg="black",fg="gold", command = self.imprimer)
self.butImprimer.place(x=547, y=465)
def imprimer(self):
question = messagebox.askyesno("Enregistrement","Voulez vous enregistrer la facture?")
if question > 0:
self.data = self.txtarea.get("1.0","end-1c")
if os.path.isdir('factures'):
#pass
print("heureux")
else:
os.mkdir("factures")
print("joyeux")
print("sa continu?")
f1=open("factures/Facture du "+str(f'{datetime.now():%d-%m-%Y %H:%M:%S}')+".txt", "w")
f1.write(self.data)
f1.close
tab=psycopg2.connect("host=%s dbname=%s user=%s password=%s" % (HOST, DATABASE, USER,PASSWORD))
cursor=tab.cursor()
cursor.execute("INSERT INTO Factures (num_facture, prix_total,date, num_client,num_caissier) VALUES ('"+str(self.num_facture)+"', '"+str(self.prixtt)+"', '"+str(datetime.now())+"', '"+str(self.numclient)+"', '"+str(self.num_caissier)+"')")
#cursor.execute("INSERT INTO Factures (num_facture, prix_total,date, num_client,num_caissier) VALUES ('"+str(self.num_facture)+"', '"+str(self.prixtt)+"', '"+'13-03-2021'+"', '"+str(self.numclient)+"', '"+str(self.num_caissier)+"')")
tab.commit()
tab.close()
self.fen.destroy()
j = 0
while (j<len(self.produitlist)):
con=psycopg2.connect("host=%s dbname=%s user=%s password=%s" % (HOST, DATABASE, USER,PASSWORD))
cursor=con.cursor()
cursor.execute("INSERT INTO Contenir (quantite, prix_unitaire, num_produit, num_facture) VALUES ('"+str(self.quantitelist[j])+"','"+str(self.prixlist[j])+"', '"+str(self.numero_produit[j])+"', '"+str(self.num_facture)+"')")
j+=1
con.commit()
con.close()
self.root.destroy()
else:
return
#if os.path.isdir('/home/wtlx/dossier1'): verifier si un dossier existe
#os.mkdir('myDirectory') creer un dossier
"""def testC2():
obj = Facturation(fen,1,56,"claude",89,produit,numero,qt,prixl,59350)
#obj.remplissage(1,54,"claude",89,produit,numero,qt,prixl,59350)"""
"""fen = Tk()
bouton_new = Button(fen, width=10, height=1, text="Connexion", command=testC2)
bouton_new.pack()
produit=["tomate","pomme","orange"]
numero = [8,9,3]
qt=[10,15,7]
prixl=[800,900,750]
#obj = Facturation(fen)
#obj.remplissage(1,54,"claude",89,produit,numero,qt,prixl,59350)
fen.mainloop()"""
|
[
"claudetapi@yahoo.com"
] |
claudetapi@yahoo.com
|
b1da344207265003e044c700a5073e037dc06e3f
|
08ee36e0bb1c250f7f2dfda12c1a73d1984cd2bc
|
/src/mnistk/networks/resnetstyle_95.py
|
52381dd5557fa0bbec007c65e2b5abeff1a8cf33
|
[] |
no_license
|
ahgamut/mnistk
|
58dadffad204602d425b18549e9b3d245dbf5486
|
19a661185e6d82996624fc6fcc03de7ad9213eb0
|
refs/heads/master
| 2021-11-04T07:36:07.394100
| 2021-10-27T18:37:12
| 2021-10-27T18:37:12
| 227,103,881
| 2
| 1
| null | 2020-02-19T22:07:24
| 2019-12-10T11:33:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,347
|
py
|
# -*- coding: utf-8 -*-
"""
resnetstyle_95.py
:copyright: (c) 2019 by Gautham Venkatasubramanian.
:license: MIT
"""
import torch
from torch import nn
from torchvision.models.resnet import BasicBlock
class ResNetStyle_95(nn.Module):
def __init__(self):
nn.Module.__init__(self)
self.f0 = nn.Conv2d(in_channels=1, out_channels=50, kernel_size=(17, 17), stride=(1, 1), padding=(0, 0), dilation=(1, 1), groups=1, bias=True, padding_mode='zeros')
self.f1 = BasicBlock(inplanes=50, planes=50)
self.f2 = BasicBlock(inplanes=50, planes=50)
self.f3 = nn.Conv2d(in_channels=50, out_channels=12, kernel_size=(11, 11), stride=(1, 1), padding=(0, 0), dilation=(1, 1), groups=1, bias=False, padding_mode='zeros')
self.f4 = nn.Conv2d(in_channels=12, out_channels=10, kernel_size=(2, 2), stride=(1, 1), padding=(0, 0), dilation=(1, 1), groups=1, bias=False, padding_mode='zeros')
self.f5 = nn.Linear(in_features=10, out_features=10, bias=True)
self.f6 = nn.LogSoftmax(dim=1)
def forward(self, *inputs):
x = inputs[0]
x = x.view(x.shape[0],1,28,28)
x = self.f0(x)
x = self.f1(x)
x = self.f2(x)
x = self.f3(x)
x = self.f4(x)
x = x.view(x.shape[0],10)
x = self.f5(x)
x = self.f6(x)
return x
|
[
"41098605+ahgamut@users.noreply.github.com"
] |
41098605+ahgamut@users.noreply.github.com
|
d90be07dcd1745206432452d98e1ce1ced901c93
|
8d0757b66cd276455645d0271d2aaae394638ce0
|
/lib/tasks/scripts/load_post_histories_from_psaw.py
|
78fb7863b4db64167b70df49ad69eb3fb2eabe8c
|
[] |
no_license
|
shrik450/gru_recreation
|
95d756df9da7c227643e58bfc906e4e9d7c2108a
|
2d6cedbd7f07351367310fd02a4c25179a475715
|
refs/heads/master
| 2022-12-29T03:08:18.736886
| 2020-10-10T16:15:34
| 2020-10-10T16:15:34
| 294,592,730
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 729
|
py
|
import json
import datetime as dt
from psaw import PushshiftAPI
api = PushshiftAPI()
file = open("hate_authors.txt", "r")
authors = [ author[:-1] for author in file.readlines() ]
processed_authors_count = 1
with open("ps-post-histories", "w") as file:
for author in authors:
posts = api.search_submissions(author=author, limit=500)
subreddits = set([ post.d_["subreddit"] for post in posts ])
output = {author: list(subreddits)}
print(json.dumps(output), file=file, end="\n")
processed_authors_count += 1
print(f"Processed {processed_authors_count} authors...", end="\r")
if processed_authors_count % 10 == 0:
file.flush()
print(f"Processed {processed_authors_count} authors.", end="\n")
|
[
"shrik450@gmail.com"
] |
shrik450@gmail.com
|
d441d48133d054454e4b073fdba64d94513e826e
|
559d35264d84f40cda478442710ed01b9927aed1
|
/app.py
|
7a7cb2eb849a678f98c697201884f2aabd05306c
|
[] |
no_license
|
cauchymike/temi_go
|
5bbfc463d16d313ba50580ea08b50673479633f4
|
deff410bcd4fe8a3cac2476fa075d69fd8b00fa7
|
refs/heads/main
| 2022-12-20T08:18:04.299527
| 2020-10-09T01:16:47
| 2020-10-09T01:16:47
| 302,466,983
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 81
|
py
|
from temibot import app
if __name__ =="__main__":
app.run(debug =False)
|
[
"noreply@github.com"
] |
cauchymike.noreply@github.com
|
4e3127f178684e21906555e033ffcbf9ebd032e8
|
490f13d332d93b14431a5da8e075774bcc6aee3b
|
/quotation/urls.py
|
b7ce3b6e9f9a918eab182325538b75b12509034d
|
[] |
no_license
|
rickyakilimali/ria
|
c9769825fc2b1f1514906b1ac4c30c2e8fe25dfd
|
4c54e4326ff312e231eac6484d09476d61fb564a
|
refs/heads/master
| 2021-08-24T11:39:49.940193
| 2017-12-09T15:13:35
| 2017-12-09T15:13:35
| 113,436,642
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 217
|
py
|
from django.conf.urls import url
from django.views.generic.base import TemplateView
from .views import MaQuotationView
urlpatterns = [
url(r'^ma-quotation/$', MaQuotationView.as_view(), name='ma-quotation'),
]
|
[
"jusciamua@gmail.com"
] |
jusciamua@gmail.com
|
2b95f7153977ca0d45bb1fc024b05b2a6ac9c972
|
182de0ee6ac69d269bc0f90f927e29c7079494c6
|
/data_electorate/data_electorate/settings.py
|
48de898c301a496c567a5662c5ed17290b8f77f4
|
[] |
no_license
|
lucassalcarde/data-ibge-tse
|
2301647ebab817bb706c248e37331e8f2d8cdb6d
|
8b479a07e939bbe474c06fe29a52c085365a0a46
|
refs/heads/master
| 2022-11-20T04:40:57.161242
| 2019-10-23T17:19:40
| 2019-10-23T17:19:40
| 217,048,696
| 0
| 0
| null | 2022-11-04T19:37:42
| 2019-10-23T12:12:44
|
Python
|
UTF-8
|
Python
| false
| false
| 3,307
|
py
|
# -*- coding: utf-8 -*-
# Scrapy settings for data_electorate project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'data_electorate'
SPIDER_MODULES = ['data_electorate.spiders']
NEWSPIDER_MODULE = 'data_electorate.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'data_electorate (+http://www.yourdomain.com)'
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
# COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'data_electorate.middlewares.DataElectorateSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'data_electorate.middlewares.DataElectorateDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'data_electorate.pipelines.DataElectoratePipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
|
[
"lucassalcarde@gmail.com"
] |
lucassalcarde@gmail.com
|
534d2c99d85e1cde09020b20275186f6e96c3704
|
b00ff8b5adc27762209859c7d4d84c54652be9fa
|
/users/views.py
|
d387992c92e27af2bd97eacbe80b75e404266787
|
[] |
no_license
|
frankieGitHub/myCardFlask
|
608db76470e12ddb6e317a4898365878965c832a
|
94a90ff61aa257fa62647ab96ca01266b0df0dc7
|
refs/heads/master
| 2020-03-28T07:39:56.651188
| 2018-09-08T09:36:15
| 2018-09-08T09:36:15
| 147,916,152
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,666
|
py
|
from flask import Flask, jsonify, request
from common.views import APIView
from users.auth import auth_required
from users.resources import UserResource
class UserAPIView(APIView):
@auth_required
def get(self, user=None, **kwargs):
return self.json_response(data=user.to_serializable_dict())
def post(self):
print 'hello post'
response = {}
user_resource = UserResource(request.json)
if user_resource.is_valid():
try:
user_resource.add()
response['user'] = user_resource.to_serializable_dict()
except Exception as error:
print error
pass
return self.json_response(data=response)
@auth_required
def put(self, user=None, **kwargs):
response = {}
user_resource = UserResource(request.json, model=user)
if user_resource.is_valid():
try:
user_resource.update()
response['user'] = user_resource.to_serializable_dict()
except Exception as error:
print error
pass
return self.json_response(data=response)
@auth_required
def delete(self, user=None, **kwargs):
response = {}
try:
user.delete()
response['ok'] = 'record deleted'
except Exception as error:
print error
pass
return self.json_response(data=response)
app = Flask(__name__)
app.add_url_rule('{0}/users/'.format(UserAPIView.ENDPOINT), view_func=UserAPIView.as_view('users'))
def run_app():
app.run(host='0.0.0.0', port=8000, debug=True)
|
[
"frankie_lrh@163.com"
] |
frankie_lrh@163.com
|
b3238114e24ec787a5ce0c4b2519570ab8b75ccd
|
14aed8f5a144bd8b3833e7a9d5a1c8fddaeb1590
|
/SparkLearning/Solutions/PairRDD_RegularRDD.py
|
7da13d75acef5b3fe4fabf3c171560a52a58b630
|
[] |
no_license
|
TorpidCoder/BigData-Autoscaling
|
2cc393fcb05b9b09a637302770bdf367f162425f
|
7ee3538c7ccf877fd6868698891f29dcd9947df4
|
refs/heads/master
| 2021-06-22T11:02:27.667140
| 2020-12-04T04:14:10
| 2020-12-04T04:14:10
| 159,915,768
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 463
|
py
|
__author__ = "ResearchInMotion"
import findspark
findspark.init()
from pyspark import SparkContext , SparkConf
sparkconf = SparkConf().setMaster("local[*]").setAppName("test")
sparkcont = SparkContext(conf=sparkconf)
sparkcont.setLogLevel("ERROR")
data = ["sahil 26" , "aardhana 27"]
rdddata = sparkcont.parallelize(data)
pairrdd = rdddata.map(lambda line : (line.split(" ")[0],line.split(" ")[1])).collect()
for key , value in pairrdd:
print(key,value)
|
[
"thisissahilnagpal@gmail.com"
] |
thisissahilnagpal@gmail.com
|
b0a7c23572aa0b77efc89cdac036e3494c42f6ec
|
a35878a2a370804e7c11d3c3a1fccfe99891b0e1
|
/venv/Scripts/pip3.7-script.py
|
672c1a3a6439056a9dfac3a0bc56828c91d466d5
|
[] |
no_license
|
parmarsuraj99/Saar
|
d005da2dba3b4e594f7497018bedd918856c5db0
|
bd9e7e7294550be36ec9a2768194a75e12927bc1
|
refs/heads/master
| 2020-04-20T13:56:21.558282
| 2019-10-04T04:31:52
| 2019-10-04T04:31:52
| 168,883,505
| 1
| 3
| null | 2019-10-04T04:31:54
| 2019-02-02T22:06:27
|
Python
|
UTF-8
|
Python
| false
| false
| 388
|
py
|
#!E:\Python\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.7'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.7')()
)
|
[
"parmarsuraj99@gmail.com"
] |
parmarsuraj99@gmail.com
|
a9b5389399e46c13c19544252d9a80caf805f169
|
91bfaf7c6f7d0dfad6cfeb697f94e1fd21837ee4
|
/python/gyakorlofeladatok/2.feladat.py
|
c4c09bdec2e52cfddae49a3eafe481ac4859d310
|
[] |
no_license
|
jantayg/13SZF2
|
49c5ed1e462a16ea92c5edaa984fc2a90096a394
|
e77b6b8ff4e954b02b9402a4ddbb29af2800d1f2
|
refs/heads/master
| 2023-05-20T06:13:25.379622
| 2021-06-08T09:03:21
| 2021-06-08T09:03:21
| 353,932,565
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 280
|
py
|
szam1 = (input("első szám: "))
szam2 = (input("második szám: "))
szam3 = (input("harmadik szám: "))
if szam1 > szam2 and szam3:
print("a legnagyobb szám: " + szam1)
elif szam2 > szam3:
print("a legnagyobb szám: " + szam2)
else:print("a legnagyobb szám: " + szam3)
|
[
"jantayg@icloud.com"
] |
jantayg@icloud.com
|
73166b6628fffe4b8974514dd61779e899b0ebd0
|
e49822ef4837a0642127cb95c3424bdc613fdd85
|
/fifa_crawler.py
|
9c61e007d1039d90a5533110857edff77b041964
|
[] |
no_license
|
tchrys/Football-Stats-Generator-for-ML
|
aad1db05575dfeb1aff15da5bddee1fe2a3bdb07
|
2dbd1ae2b7242d129ac5d8de7faefd61a28bac23
|
refs/heads/master
| 2020-07-30T08:24:05.213575
| 2019-09-22T14:59:00
| 2019-09-22T14:59:00
| 210,153,708
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,791
|
py
|
from bs4 import BeautifulSoup as soup
import urllib
import ast
import io
# this file represents a crawler for fifaindex.com. It has three big functions:
# getAllTeams - it constructs the link regarding to country and year and
# access all teams from that page.
# getTeamInfo - it founds the table with team's players and get the hyperlink
# for each player
# getPlayerInfo: player abiliies start with ball control label, we must be
# careful beacuse we can have a goalkeeper and not a field player
# I've used BeautifulSoup as html parser.
# Fifaindex didn't change page design in last years so I hope this will be
# useful for FIFA 20 too
# output files have json format
currentYear = 2019
leagues = {'england' : 13, 'spain' : 53, 'france' : 16, 'germany' : 19,
'italy' : 31, 'netherlands' : 10, 'portugal' : 308}
site_url = 'https://www.fifaindex.com'
def workRate(wkr):
if wkr == 'High':
return 3
elif wkr == 'Medium':
return 2
else:
return 1
qualities = {'Ball Skills' : 3, 'Defence' : 4, 'Mental' : 5, 'Passing' : 6,
'Physical' : 7, 'Shooting' : 8, 'Goalkeeper' : 9}
def getPlayerInfo(my_url):
req = urllib.request.Request(my_url, headers={'User-Agent' : "Magic Browser"})
con = urllib.request.urlopen( req )
page_html = con.read()
con.close()
page_soup = soup(page_html, "html.parser")
res = dict()
name = page_soup.h1.text.split("FIFA")[0]
res['name'] = page_soup.h1.text.split("FIFA")[0]
generalInfo = page_soup.findAll("div", {"class" : "card-body"})
genStats = generalInfo[1].findAll("p", {"class" : ""})
res['height'] = int(genStats[0].span.span.text.split(" ")[0])
res['weight'] = int(genStats[1].span.span.text.split(" ")[0])
res['position'] = genStats[5].span.text
allStats = page_soup.find_all("div", {"class" : "card mb-5"})
start = 2
while "Ball Control" not in str(generalInfo[start].text):
start += 1
end = start + 6
if res['position'] == 'GK':
start = end
end += 1
for i in range(start, end):
stats = allStats[i].find_all('p')
for j in range(len(stats)):
single_stat = stats[j].text.split(" ")
ovr = single_stat[len(single_stat) - 1]
string = ''
for k in range(len(single_stat) - 1):
string += single_stat[k] + ' '
string = string[:-1]
res[string] = ovr
res.pop("Composure", None)
return res, name
def getTeamInfo(my_url):
print(my_url)
req = urllib.request.Request(my_url, headers={'User-Agent' : "Magic Browser"})
con = urllib.request.urlopen( req )
page_html = con.read()
con.close()
page_soup = soup(page_html, "html.parser")
res = dict()
res['name'] = page_soup.h1.text.split("FIFA")[0]
players = dict()
players_html = page_soup.findAll("table",
{"class" : "table table-players table-striped"})
rows = players_html[0].findAll('tr')
for i in range(1, len(rows)):
link = site_url + rows[i].a['href']
playerinfo, playername = getPlayerInfo(link)
players[playername] = playerinfo
res['players'] = players
return res
def getUrl(country, year):
my_url = 'https://www.fifaindex.com/teams/'
if year != currentYear:
year_ending = 'fifa' + str(year)[-2] + str(year)[-1]
my_url += year_ending
my_url += '/?league='
my_url += str(leagues[country])
my_url += '&order=desc'
return my_url
def getAllTeams(country, year):
my_url = getUrl(country, year)
split_index = -2 if year == currentYear else -3
req = urllib.request.Request(my_url, headers={'User-Agent' : "Magic Browser"})
con = urllib.request.urlopen( req )
page_html = con.read()
con.close()
page_soup = soup(page_html, "html.parser")
teams_table = page_soup.findAll("table",
{"class" : "table table-striped table-teams"})
rows = teams_table[0].findAll('tr')
for i in range(1, len(rows)):
team = rows[i].a['href'].split("/")[split_index]
link = site_url + rows[i].a['href']
team_dict = getTeamInfo(link)
path = 'teams_' + country + '\\' + str(year) + '\\' + team
with io.open(path, "w", encoding="utf-8") as f:
f.write(str(team_dict))
def readFromFile(filename):
fl = open(filename, "r")
str1 = fl.read()
dict1 = ast.literal_eval(str1)
return dict1
# use example
#getAllTeams('italy', 2019)
#for country in ['germany', 'france']:
# for year in range(2011, 2020):
# getAllTeams(country, year)
|
[
"tanasescuchrys@gmail.com"
] |
tanasescuchrys@gmail.com
|
7d4ec6bb3cb595bb0184c6b167533d881f5550f3
|
28958794f32fd2b5852a4b904325e0fd647a73e9
|
/modules/mmconnect.py
|
de6e504e114c7037d522ab6f08aae663bd8fb808
|
[] |
no_license
|
gruvin/mmxmodem
|
23decdd5d9150f8807233f6560113efca3d8ff89
|
15db54eec2e584ee2f0a95cab7e613dbc0bd46c2
|
refs/heads/master
| 2021-01-01T05:32:17.623290
| 2015-05-17T11:15:06
| 2015-05-17T11:15:06
| 34,551,096
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,422
|
py
|
import sys
import serial
from time import time, sleep
import logging
log = logging.getLogger()
SOH='\x01'
STX='\x02'
ACK='\x06'
NAK='\x15'
CAN='\x18'
log.debug('opening serial port')
def mmconnect(sp):
try:
s = serial.Serial(
port=sp,
baudrate=115200,
bytesize=8,
parity='N',
stopbits=1,
timeout=3,
rtscts=1,
# dsrdtr=1
)
except:
print 'ERROR: Could not access serial port ' + sp + '\n'
quit()
found = 0
for a in range(1, 6):
log.debug('Waiting for "... Maximite ..." [%d]', a)
hello = ''
timeout = time() + 2.0
while (1):
log.debug('time: %f', time())
if time() > timeout:
log.debug('TIMEOUT');
break
if s.inWaiting():
c = s.read()
# sys.stdout.write(c)
if c == NAK or c == ACK or c == STX or c == SOH:
log.debug('Detected xmodem session. Attempting to cancel it ...')
for i in range(1, 5):
s.write(CAN)
sleep(0.1)
sleep(1.0)
s.readline()
hello=''
break
elif c == '\r':
break
else:
hello += c
if hello.find('Maximite') != -1:
found = 1
break
s.write(NAK)
s.flushInput()
s.close()
log.debug('closing serial port')
sleep(1.0)
log.debug('re-opening serial port')
s.open()
if found == 0:
print 'Expected "... Maximite ..." but got "' + hello + '"\nAborting.\n\n'
quit()
log.info('Maximite connected. Checking for command prompt ...')
# now ensure that we are at a cmd prompt
found = 0
for n in range(1, 2):
s.write('\003');
sleep(0.2)
s.flushInput();
s.write('print "m""m""x"\r')
sleep(0.2)
s.readline()
hello = s.readline()
log.debug('prompt? %s', hello)
if hello.find('mmx') != -1:
found = 1
break
sleep(1.0)
s.flushInput()
if found == 0:
print 'Cannot seem to get to a command prompt :-(\nAborting\n\n'
quit()
sleep(0.2)
return s
|
[
"gruvin@gmail.com"
] |
gruvin@gmail.com
|
200f686b4da3cb5b583d6c94769b27385b223a24
|
0819516b5ebbf82967b8bca36e6c9185f0570aa2
|
/examples/layer_2/threaded/network_down_detectors_no_runner.py
|
cfbb65c0ccd48387d3f2d1dc44fb4c1c005db494
|
[
"BSD-3-Clause"
] |
permissive
|
rosekdrd/moler
|
4fa395b54f6f5813168f1ea35e97877a59efed8c
|
d06bc226cf794edbedea2e863365150759564ae5
|
refs/heads/master
| 2020-06-26T13:59:58.367831
| 2019-07-30T08:00:32
| 2019-07-30T08:00:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,697
|
py
|
# -*- coding: utf-8 -*-
"""
threaded.network_down_detectors_no_runner.py
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A fully-functional connection-observer using socket & threading.
Works on Python 2.7 as well as on 3.6
This is Layer_2 (half of it) example:
uses Moler provided external-IO TCP implementation (moler.io.raw.tcp.ThradedTcp)
that integrates with Moler's connection
This example demonstrates multiple connection observers working
on multiple connections.
Shows following concepts:
- multiple observers may observe single connection
- each one is focused on different data (processing decomposition)
- client code may run observers on different connections
- client code may "start" observers in sequence
"""
__author__ = 'Grzegorz Latuszek'
__copyright__ = 'Copyright (C) 2018, Nokia'
__email__ = 'grzegorz.latuszek@nokia.com'
import logging
import sys
import os
import threading
import time
from moler.connection import ObservableConnection
from moler.io.raw import tcp
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) # allow finding modules in examples/
from network_toggle_observers import NetworkDownDetector, NetworkUpDetector
# ===================== Moler's connection-observer usage ======================
def ping_observing_task(ext_io_connection, ping_ip):
"""
Here external-IO connection is abstract - we don't know its type.
What we know is just that it has .moler_connection attribute.
"""
logger = logging.getLogger('moler.user.app-code')
conn_addr = str(ext_io_connection)
# Layer 2 of Moler's usage (ext_io_connection):
# 1. create observers
net_down_detector = NetworkDownDetector(ping_ip)
net_drop_found = False
net_up_detector = NetworkUpDetector(ping_ip)
moler_conn = ext_io_connection.moler_connection
# 2. virtually "start" observer by making it data-listener
moler_conn.subscribe(net_down_detector.data_received)
info = '{} on {} using {}'.format(ping_ip, conn_addr, net_down_detector)
logger.debug('observe ' + info)
with ext_io_connection.open():
observing_timeout = 10
start_time = time.time()
while time.time() < start_time + observing_timeout:
# anytime new data comes it may change status of observer
if not net_drop_found and net_down_detector.done():
net_drop_found = True
net_down_time = net_down_detector.result()
timestamp = time.strftime("%H:%M:%S", time.localtime(net_down_time))
logger.debug('Network {} is down from {}'.format(ping_ip, timestamp))
# 3. virtually "stop" that observer
moler_conn.unsubscribe(net_down_detector.data_received)
# 4. and start subsequent one (to know when net is back "up")
info = '{} on {} using {}'.format(ping_ip, conn_addr, net_up_detector)
logger.debug('observe ' + info)
moler_conn.subscribe(net_up_detector.data_received)
if net_up_detector.done():
net_up_time = net_up_detector.result()
timestamp = time.strftime("%H:%M:%S", time.localtime(net_up_time))
logger.debug('Network {} is back "up" from {}'.format(ping_ip, timestamp))
# 5. virtually "stop" that observer
moler_conn.unsubscribe(net_up_detector.data_received)
break
time.sleep(0.2)
# ==============================================================================
def main(connections2observe4ip):
# Starting the clients
connections = []
for address, ping_ip in connections2observe4ip:
host, port = address
# 1. create Moler's connection that knows encoding
decoder = lambda data: data.decode("utf-8")
moler_conn = ObservableConnection(decoder=decoder)
# 2. create external-IO connection gluing to Moler's connection
conn_logger_name = 'threaded.tcp-connection({}:{})'.format(*address)
conn_logger = logging.getLogger(conn_logger_name)
tcp_connection = tcp.ThreadedTcp(moler_connection=moler_conn,
port=port, host=host,
logger=conn_logger)
client_thread = threading.Thread(target=ping_observing_task,
args=(tcp_connection, ping_ip))
client_thread.start()
connections.append(client_thread)
# await observers job to be done
for client_thread in connections:
client_thread.join()
# ==============================================================================
if __name__ == '__main__':
from threaded_ping_server import start_ping_servers, stop_ping_servers
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s |%(name)-40s |%(message)s',
datefmt='%H:%M:%S',
stream=sys.stderr,
)
connections2observe4ip = [(('localhost', 5671), '10.0.2.15'),
(('localhost', 5672), '10.0.2.16')]
servers = start_ping_servers(connections2observe4ip)
main(connections2observe4ip)
stop_ping_servers(servers)
'''
LOG OUTPUT
12:06:50 |threaded.ping.tcp-server(5671) |Ping Sim started at tcp://localhost:5671
12:06:50 |threaded.ping.tcp-server(5672) |Ping Sim started at tcp://localhost:5672
12:06:50 |moler.user.app-code |observe 10.0.2.15 on tcp://localhost:5671 using NetworkDownDetector(id:46425312)
12:06:50 |moler.user.app-code |observe 10.0.2.16 on tcp://localhost:5672 using NetworkDownDetector(id:46425704)
12:06:50 |threaded.tcp-connection(localhost:5671) |connecting to tcp://localhost:5671
12:06:50 |threaded.tcp-connection(localhost:5672) |connecting to tcp://localhost:5672
12:06:50 |threaded.tcp-connection(localhost:5671) |connection tcp://localhost:5671 is open
12:06:50 |threaded.ping.tcp-server(5671 -> 62735) |connection accepted - client at tcp://127.0.0.1:62735
12:06:50 |threaded.tcp-connection(localhost:5672) |connection tcp://localhost:5672 is open
12:06:50 |threaded.tcp-connection(localhost:5671) |< b'\n'
12:06:50 |threaded.ping.tcp-server(5672 -> 62736) |connection accepted - client at tcp://127.0.0.1:62736
12:06:50 |threaded.tcp-connection(localhost:5672) |< b'\n'
12:06:51 |threaded.tcp-connection(localhost:5672) |< b'greg@debian:~$ ping 10.0.2.16\n'
12:06:51 |threaded.tcp-connection(localhost:5671) |< b'greg@debian:~$ ping 10.0.2.15\n'
12:06:52 |threaded.tcp-connection(localhost:5672) |< b'PING 10.0.2.16 (10.0.2.16) 56(84) bytes of data.\n'
12:06:52 |threaded.tcp-connection(localhost:5671) |< b'PING 10.0.2.15 (10.0.2.15) 56(84) bytes of data.\n'
12:06:53 |threaded.tcp-connection(localhost:5672) |< b'64 bytes from 10.0.2.16: icmp_req=1 ttl=64 time=0.080 ms\n'
12:06:53 |threaded.tcp-connection(localhost:5671) |< b'64 bytes from 10.0.2.15: icmp_req=1 ttl=64 time=0.080 ms\n'
12:06:54 |threaded.tcp-connection(localhost:5672) |< b'64 bytes from 10.0.2.16: icmp_req=2 ttl=64 time=0.037 ms\n'
12:06:54 |threaded.tcp-connection(localhost:5671) |< b'64 bytes from 10.0.2.15: icmp_req=2 ttl=64 time=0.037 ms\n'
12:06:55 |threaded.tcp-connection(localhost:5672) |< b'64 bytes from 10.0.2.16: icmp_req=3 ttl=64 time=0.045 ms\n'
12:06:55 |threaded.tcp-connection(localhost:5671) |< b'64 bytes from 10.0.2.15: icmp_req=3 ttl=64 time=0.045 ms\n'
12:06:56 |threaded.tcp-connection(localhost:5672) |< b'ping: sendmsg: Network is unreachable\n'
12:06:56 |moler.NetworkDownDetector(id:46425704) |Network 10.0.2.16 is down!
12:06:56 |moler.user.app-code |Network 10.0.2.16 is down from 12:06:56
12:06:56 |moler.user.app-code |observe 10.0.2.16 on tcp://localhost:5672 using NetworkUpDetector(id:46426096)
12:06:56 |threaded.tcp-connection(localhost:5671) |< b'ping: sendmsg: Network is unreachable\n'
12:06:56 |moler.NetworkDownDetector(id:46425312) |Network 10.0.2.15 is down!
12:06:56 |moler.user.app-code |Network 10.0.2.15 is down from 12:06:56
12:06:56 |moler.user.app-code |observe 10.0.2.15 on tcp://localhost:5671 using NetworkUpDetector(id:46425480)
12:06:57 |threaded.tcp-connection(localhost:5672) |< b'ping: sendmsg: Network is unreachable\n'
12:06:57 |threaded.tcp-connection(localhost:5671) |< b'ping: sendmsg: Network is unreachable\n'
12:06:58 |threaded.tcp-connection(localhost:5672) |< b'ping: sendmsg: Network is unreachable\n'
12:06:58 |threaded.tcp-connection(localhost:5671) |< b'ping: sendmsg: Network is unreachable\n'
12:06:59 |threaded.tcp-connection(localhost:5672) |< b'64 bytes from 10.0.2.16: icmp_req=7 ttl=64 time=0.123 ms\n'
12:06:59 |moler.NetworkUpDetector(id:46426096) |Network 10.0.2.16 is up!
12:06:59 |threaded.tcp-connection(localhost:5671) |< b'64 bytes from 10.0.2.15: icmp_req=7 ttl=64 time=0.123 ms\n'
12:06:59 |moler.NetworkUpDetector(id:46425480) |Network 10.0.2.15 is up!
12:06:59 |moler.user.app-code |Network 10.0.2.15 is back "up" from 12:06:59
12:06:59 |threaded.tcp-connection(localhost:5671) |connection tcp://localhost:5671 is closed
12:06:59 |moler.user.app-code |Network 10.0.2.16 is back "up" from 12:06:59
12:06:59 |threaded.tcp-connection(localhost:5672) |connection tcp://localhost:5672 is closed
12:07:00 |threaded.ping.tcp-server(5671) |Ping Sim: ... bye
12:07:00 |threaded.ping.tcp-server(5672) |Ping Sim: ... bye
12:07:01 |threaded.ping.tcp-server(5672 -> 62736) |Connection closed
12:07:01 |threaded.ping.tcp-server(5671 -> 62735) |Connection closed
'''
|
[
"grzegorz.latuszek@nokia.com"
] |
grzegorz.latuszek@nokia.com
|
2b5d91deecaab4c21ab1f2aa574664b0600d821b
|
b80189c10ead7cc7f04282c52a3424459fe1af9b
|
/lesson2/serializers.py
|
05651443ab71cb2bbdb89e88c5f5b56ed27a21da
|
[] |
no_license
|
turganaliev/django_rf
|
4586961b66171f43b010842a2ea2990b5e1f1d14
|
c2cb59c0d414112a7f851f7df1a41f6155c6f3be
|
refs/heads/main
| 2023-03-12T12:01:22.390129
| 2021-03-02T12:09:26
| 2021-03-02T12:09:26
| 342,930,006
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 814
|
py
|
from rest_framework import serializers
from lesson2.models import CarBrand, Car
class CarListSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = '__all__'
class CarBrandListSerializer(serializers.ModelSerializer):
cars = CarListSerializer(many=True)
class Meta:
model = CarBrand
fields = 'id name country cars'.split()
class CarBrandSerializer(serializers.ModelSerializer):
class Meta:
model = CarBrand
fields = 'id name country'.split()
class CarWithBrandSerializer(serializers.ModelSerializer):
# brand = CarBrandSerializer()
brand = serializers.SerializerMethodField()
class Meta:
model = Car
fields = 'id model brand'.split()
def get_brand(self, obj):
return obj.brand.name
|
[
"izatturganaliev@Izats-MacBook-Air.local"
] |
izatturganaliev@Izats-MacBook-Air.local
|
b7f049329b8231816e82d0935ee42713218c7f5f
|
f075e81dd84cb8220eaf800a873bcc784072421b
|
/bokeh-master/tests/integration/interaction/test_tools.py
|
0c5d67ccae109ef675f53be4bd3507121b191727
|
[] |
no_license
|
tommycarpi/py-tracy
|
ef3912f26d2a9fc3a52baf1bc546b915f5faa380
|
f10916e9219e902187fa4be2e874da83bb7251c7
|
refs/heads/master
| 2021-01-18T19:01:37.174060
| 2016-01-24T15:22:59
| 2016-01-24T15:22:59
| 47,195,069
| 8
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,640
|
py
|
from __future__ import absolute_import
from bokeh.io import save
from bokeh.models import BoxSelectTool, ColumnDataSource, CustomJS
from bokeh.plotting import figure
from selenium.webdriver.common.action_chains import ActionChains
import pytest
pytestmark = pytest.mark.integration
def test_box_select(output_file_url, selenium):
PLOT_DIM = 600
source = ColumnDataSource(dict(
x=[1, 2, 3],
y=[3, 2, 3],
name=['top_left', 'middle', 'top_right'],
))
# Make plot and add a taptool callback that generates an alert
plot = figure(tools='box_select', height=PLOT_DIM, width=PLOT_DIM, x_range=[1, 3], y_range=[1, 3])
plot.circle(x='x', y='y', radius=0.2, source=source)
source.callback = CustomJS(code="""
var indices = cb_obj.get('selected')['1d'].indices,
data = cb_obj.get('data'),
selected_names = '';
Bokeh.$.each(indices, function(i, index) {
selected_names += data['name'][index];
});
alert(selected_names);
""")
# Save the plot and start the test
save(plot)
selenium.get(output_file_url)
# Drag a box zoom around middle point
canvas = selenium.find_element_by_tag_name('canvas')
actions = ActionChains(selenium)
actions.move_to_element_with_offset(canvas, PLOT_DIM * 0.25, PLOT_DIM * 0.25)
actions.click_and_hold()
actions.move_by_offset(PLOT_DIM * 0.5, PLOT_DIM * 0.5)
actions.release()
actions.perform()
# Get the alert from box select and assert that the middle item is selected
alert = selenium.switch_to_alert()
assert alert.text == 'middle'
|
[
"tommy@tommy.local"
] |
tommy@tommy.local
|
2c7bba85d135de825a21fed533f6e623f05365a4
|
fd15aff8a2249a27e61a053fd24da0ea85ac4a42
|
/Lib/test/test_bz2.py
|
f158b901b9c90ab675115a5aa40af2ae0d5a4d35
|
[
"MIT"
] |
permissive
|
Henrike100/EpicoCompiladores
|
b8d362ac3f87031ff49635b04defc800e19b5688
|
432f3a3371cdfe232208781f7b180241c5c22fac
|
refs/heads/master
| 2022-11-06T02:54:41.467006
| 2019-06-28T15:48:47
| 2019-06-28T15:48:47
| 194,295,124
| 0
| 1
|
MIT
| 2022-10-13T19:14:02
| 2019-06-28T15:22:27
|
Python
|
UTF-8
|
Python
| false
| false
| 37,571
|
py
|
from test import support
from test.support import bigmemtest, _4G
import unittest
from io import BytesIO, DEFAULT_BUFFER_SIZE
import os
import pickle
import glob
import tempfile
import pathlib
import random
import shutil
import subprocess
import threading
from test.support import unlink
import _compression
import sys
# Skip tests if the bz2 module doesn't exist.
bz2 = support.import_module('bz2')
from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor
has_cmdline_bunzip2 = None
def ext_decompress(data):
global has_cmdline_bunzip2
if has_cmdline_bunzip2 is None:
has_cmdline_bunzip2 = bool(shutil.which('bunzip2'))
if has_cmdline_bunzip2:
return subprocess.check_output(['bunzip2'], input=data)
else:
return bz2.decompress(data)
class BaseTest(unittest.TestCase):
"Base for other testcases."
TEXT_LINES = [
b'root:x:0:0:root:/root:/bin/bash\n',
b'bin:x:1:1:bin:/bin:\n',
b'daemon:x:2:2:daemon:/sbin:\n',
b'adm:x:3:4:adm:/var/adm:\n',
b'lp:x:4:7:lp:/var/spool/lpd:\n',
b'sync:x:5:0:sync:/sbin:/bin/sync\n',
b'shutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\n',
b'halt:x:7:0:halt:/sbin:/sbin/halt\n',
b'mail:x:8:12:mail:/var/spool/mail:\n',
b'news:x:9:13:news:/var/spool/news:\n',
b'uucp:x:10:14:uucp:/var/spool/uucp:\n',
b'operator:x:11:0:operator:/root:\n',
b'games:x:12:100:games:/usr/games:\n',
b'gopher:x:13:30:gopher:/usr/lib/gopher-data:\n',
b'ftp:x:14:50:FTP User:/var/ftp:/bin/bash\n',
b'nobody:x:65534:65534:Nobody:/home:\n',
b'postfix:x:100:101:postfix:/var/spool/postfix:\n',
b'niemeyer:x:500:500::/home/niemeyer:/bin/bash\n',
b'postgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\n',
b'mysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\n',
b'www:x:103:104::/var/www:/bin/false\n',
]
TEXT = b''.join(TEXT_LINES)
DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
EMPTY_DATA = b'BZh9\x17rE8P\x90\x00\x00\x00\x00'
BAD_DATA = b'this is not a valid bzip2 file'
# Some tests need more than one block of uncompressed data. Since one block
# is at least 100,000 bytes, we gather some data dynamically and compress it.
# Note that this assumes that compression works correctly, so we cannot
# simply use the bigger test data for all tests.
test_size = 0
BIG_TEXT = bytearray(128*1024)
for fname in glob.glob(os.path.join(os.path.dirname(__file__), '*.py')):
with open(fname, 'rb') as fh:
test_size += fh.readinto(memoryview(BIG_TEXT)[test_size:])
if test_size > 128*1024:
break
BIG_DATA = bz2.compress(BIG_TEXT, compresslevel=1)
def setUp(self):
fd, self.filename = tempfile.mkstemp()
os.close(fd)
def tearDown(self):
unlink(self.filename)
class BZ2FileTest(BaseTest):
"Test the BZ2File class."
def createTempFile(self, streams=1, suffix=b""):
with open(self.filename, "wb") as f:
f.write(self.DATA * streams)
f.write(suffix)
def testBadArgs(self):
self.assertRaises(TypeError, BZ2File, 123.456)
self.assertRaises(ValueError, BZ2File, os.devnull, "z")
self.assertRaises(ValueError, BZ2File, os.devnull, "rx")
self.assertRaises(ValueError, BZ2File, os.devnull, "rbt")
self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=0)
self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=10)
def testRead(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.read, float())
self.assertEqual(bz2f.read(), self.TEXT)
def testReadBadFile(self):
self.createTempFile(streams=0, suffix=self.BAD_DATA)
with BZ2File(self.filename) as bz2f:
self.assertRaises(OSError, bz2f.read)
def testReadMultiStream(self):
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.read, float())
self.assertEqual(bz2f.read(), self.TEXT * 5)
def testReadMonkeyMultiStream(self):
# Test BZ2File.read() on a multi-stream archive where a stream
# boundary coincides with the end of the raw read buffer.
buffer_size = _compression.BUFFER_SIZE
_compression.BUFFER_SIZE = len(self.DATA)
try:
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.read, float())
self.assertEqual(bz2f.read(), self.TEXT * 5)
finally:
_compression.BUFFER_SIZE = buffer_size
def testReadTrailingJunk(self):
self.createTempFile(suffix=self.BAD_DATA)
with BZ2File(self.filename) as bz2f:
self.assertEqual(bz2f.read(), self.TEXT)
def testReadMultiStreamTrailingJunk(self):
self.createTempFile(streams=5, suffix=self.BAD_DATA)
with BZ2File(self.filename) as bz2f:
self.assertEqual(bz2f.read(), self.TEXT * 5)
def testRead0(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.read, float())
self.assertEqual(bz2f.read(0), b"")
def testReadChunk10(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
text = b''
while True:
str = bz2f.read(10)
if not str:
break
text += str
self.assertEqual(text, self.TEXT)
def testReadChunk10MultiStream(self):
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
text = b''
while True:
str = bz2f.read(10)
if not str:
break
text += str
self.assertEqual(text, self.TEXT * 5)
def testRead100(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertEqual(bz2f.read(100), self.TEXT[:100])
def testPeek(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
pdata = bz2f.peek()
self.assertNotEqual(len(pdata), 0)
self.assertTrue(self.TEXT.startswith(pdata))
self.assertEqual(bz2f.read(), self.TEXT)
def testReadInto(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
n = 128
b = bytearray(n)
self.assertEqual(bz2f.readinto(b), n)
self.assertEqual(b, self.TEXT[:n])
n = len(self.TEXT) - n
b = bytearray(len(self.TEXT))
self.assertEqual(bz2f.readinto(b), n)
self.assertEqual(b[:n], self.TEXT[-n:])
def testReadLine(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.readline, None)
for line in self.TEXT_LINES:
self.assertEqual(bz2f.readline(), line)
def testReadLineMultiStream(self):
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.readline, None)
for line in self.TEXT_LINES * 5:
self.assertEqual(bz2f.readline(), line)
def testReadLines(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.readlines, None)
self.assertEqual(bz2f.readlines(), self.TEXT_LINES)
def testReadLinesMultiStream(self):
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.readlines, None)
self.assertEqual(bz2f.readlines(), self.TEXT_LINES * 5)
def testIterator(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertEqual(list(iter(bz2f)), self.TEXT_LINES)
def testIteratorMultiStream(self):
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
self.assertEqual(list(iter(bz2f)), self.TEXT_LINES * 5)
def testClosedIteratorDeadlock(self):
# Issue #3309: Iteration on a closed BZ2File should release the lock.
self.createTempFile()
bz2f = BZ2File(self.filename)
bz2f.close()
self.assertRaises(ValueError, next, bz2f)
# This call will deadlock if the above call failed to release the lock.
self.assertRaises(ValueError, bz2f.readlines)
def testWrite(self):
with BZ2File(self.filename, "w") as bz2f:
self.assertRaises(TypeError, bz2f.write)
bz2f.write(self.TEXT)
with open(self.filename, 'rb') as f:
self.assertEqual(ext_decompress(f.read()), self.TEXT)
def testWriteChunks10(self):
with BZ2File(self.filename, "w") as bz2f:
n = 0
while True:
str = self.TEXT[n*10:(n+1)*10]
if not str:
break
bz2f.write(str)
n += 1
with open(self.filename, 'rb') as f:
self.assertEqual(ext_decompress(f.read()), self.TEXT)
def testWriteNonDefaultCompressLevel(self):
expected = bz2.compress(self.TEXT, compresslevel=5)
with BZ2File(self.filename, "w", compresslevel=5) as bz2f:
bz2f.write(self.TEXT)
with open(self.filename, "rb") as f:
self.assertEqual(f.read(), expected)
def testWriteLines(self):
with BZ2File(self.filename, "w") as bz2f:
self.assertRaises(TypeError, bz2f.writelines)
bz2f.writelines(self.TEXT_LINES)
# Issue #1535500: Calling writelines() on a closed BZ2File
# should raise an exception.
self.assertRaises(ValueError, bz2f.writelines, ["a"])
with open(self.filename, 'rb') as f:
self.assertEqual(ext_decompress(f.read()), self.TEXT)
def testWriteMethodsOnReadOnlyFile(self):
with BZ2File(self.filename, "w") as bz2f:
bz2f.write(b"abc")
with BZ2File(self.filename, "r") as bz2f:
self.assertRaises(OSError, bz2f.write, b"a")
self.assertRaises(OSError, bz2f.writelines, [b"a"])
def testAppend(self):
with BZ2File(self.filename, "w") as bz2f:
self.assertRaises(TypeError, bz2f.write)
bz2f.write(self.TEXT)
with BZ2File(self.filename, "a") as bz2f:
self.assertRaises(TypeError, bz2f.write)
bz2f.write(self.TEXT)
with open(self.filename, 'rb') as f:
self.assertEqual(ext_decompress(f.read()), self.TEXT * 2)
def testSeekForward(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.seek)
bz2f.seek(150)
self.assertEqual(bz2f.read(), self.TEXT[150:])
def testSeekForwardAcrossStreams(self):
self.createTempFile(streams=2)
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.seek)
bz2f.seek(len(self.TEXT) + 150)
self.assertEqual(bz2f.read(), self.TEXT[150:])
def testSeekBackwards(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.read(500)
bz2f.seek(-150, 1)
self.assertEqual(bz2f.read(), self.TEXT[500-150:])
def testSeekBackwardsAcrossStreams(self):
self.createTempFile(streams=2)
with BZ2File(self.filename) as bz2f:
readto = len(self.TEXT) + 100
while readto > 0:
readto -= len(bz2f.read(readto))
bz2f.seek(-150, 1)
self.assertEqual(bz2f.read(), self.TEXT[100-150:] + self.TEXT)
def testSeekBackwardsFromEnd(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(-150, 2)
self.assertEqual(bz2f.read(), self.TEXT[len(self.TEXT)-150:])
def testSeekBackwardsFromEndAcrossStreams(self):
self.createTempFile(streams=2)
with BZ2File(self.filename) as bz2f:
bz2f.seek(-1000, 2)
self.assertEqual(bz2f.read(), (self.TEXT * 2)[-1000:])
def testSeekPostEnd(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(150000)
self.assertEqual(bz2f.tell(), len(self.TEXT))
self.assertEqual(bz2f.read(), b"")
def testSeekPostEndMultiStream(self):
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
bz2f.seek(150000)
self.assertEqual(bz2f.tell(), len(self.TEXT) * 5)
self.assertEqual(bz2f.read(), b"")
def testSeekPostEndTwice(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(150000)
bz2f.seek(150000)
self.assertEqual(bz2f.tell(), len(self.TEXT))
self.assertEqual(bz2f.read(), b"")
def testSeekPostEndTwiceMultiStream(self):
self.createTempFile(streams=5)
with BZ2File(self.filename) as bz2f:
bz2f.seek(150000)
bz2f.seek(150000)
self.assertEqual(bz2f.tell(), len(self.TEXT) * 5)
self.assertEqual(bz2f.read(), b"")
def testSeekPreStart(self):
self.createTempFile()
with BZ2File(self.filename) as bz2f:
bz2f.seek(-150)
self.assertEqual(bz2f.tell(), 0)
self.assertEqual(bz2f.read(), self.TEXT)
def testSeekPreStartMultiStream(self):
self.createTempFile(streams=2)
with BZ2File(self.filename) as bz2f:
bz2f.seek(-150)
self.assertEqual(bz2f.tell(), 0)
self.assertEqual(bz2f.read(), self.TEXT * 2)
def testFileno(self):
self.createTempFile()
with open(self.filename, 'rb') as rawf:
bz2f = BZ2File(rawf)
try:
self.assertEqual(bz2f.fileno(), rawf.fileno())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.fileno)
def testSeekable(self):
bz2f = BZ2File(BytesIO(self.DATA))
try:
self.assertTrue(bz2f.seekable())
bz2f.read()
self.assertTrue(bz2f.seekable())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.seekable)
bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.seekable())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.seekable)
src = BytesIO(self.DATA)
src.seekable = lambda: False
bz2f = BZ2File(src)
try:
self.assertFalse(bz2f.seekable())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.seekable)
def testReadable(self):
bz2f = BZ2File(BytesIO(self.DATA))
try:
self.assertTrue(bz2f.readable())
bz2f.read()
self.assertTrue(bz2f.readable())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.readable)
bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.readable())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.readable)
def testWritable(self):
bz2f = BZ2File(BytesIO(self.DATA))
try:
self.assertFalse(bz2f.writable())
bz2f.read()
self.assertFalse(bz2f.writable())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.writable)
bz2f = BZ2File(BytesIO(), "w")
try:
self.assertTrue(bz2f.writable())
finally:
bz2f.close()
self.assertRaises(ValueError, bz2f.writable)
def testOpenDel(self):
self.createTempFile()
for i in range(10000):
o = BZ2File(self.filename)
del o
def testOpenNonexistent(self):
self.assertRaises(OSError, BZ2File, "/non/existent")
def testReadlinesNoNewline(self):
# Issue #1191043: readlines() fails on a file containing no newline.
data = b'BZh91AY&SY\xd9b\x89]\x00\x00\x00\x03\x80\x04\x00\x02\x00\x0c\x00 \x00!\x9ah3M\x13<]\xc9\x14\xe1BCe\x8a%t'
with open(self.filename, "wb") as f:
f.write(data)
with BZ2File(self.filename) as bz2f:
lines = bz2f.readlines()
self.assertEqual(lines, [b'Test'])
with BZ2File(self.filename) as bz2f:
xlines = list(bz2f.readlines())
self.assertEqual(xlines, [b'Test'])
def testContextProtocol(self):
f = None
with BZ2File(self.filename, "wb") as f:
f.write(b"xxx")
f = BZ2File(self.filename, "rb")
f.close()
try:
with f:
pass
except ValueError:
pass
else:
self.fail("__enter__ on a closed file didn't raise an exception")
try:
with BZ2File(self.filename, "wb") as f:
1/0
except ZeroDivisionError:
pass
else:
self.fail("1/0 didn't raise an exception")
def testThreading(self):
# Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
data = b"1" * 2**20
nthreads = 10
with BZ2File(self.filename, 'wb') as f:
def comp():
for i in range(5):
f.write(data)
threads = [threading.Thread(target=comp) for i in range(nthreads)]
with support.start_threads(threads):
pass
def testMixedIterationAndReads(self):
self.createTempFile()
linelen = len(self.TEXT_LINES[0])
halflen = linelen // 2
with BZ2File(self.filename) as bz2f:
bz2f.read(halflen)
self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:])
self.assertEqual(bz2f.read(), self.TEXT[linelen:])
with BZ2File(self.filename) as bz2f:
bz2f.readline()
self.assertEqual(next(bz2f), self.TEXT_LINES[1])
self.assertEqual(bz2f.readline(), self.TEXT_LINES[2])
with BZ2File(self.filename) as bz2f:
bz2f.readlines()
self.assertRaises(StopIteration, next, bz2f)
self.assertEqual(bz2f.readlines(), [])
def testMultiStreamOrdering(self):
# Test the ordering of streams when reading a multi-stream archive.
data1 = b"foo" * 1000
data2 = b"bar" * 1000
with BZ2File(self.filename, "w") as bz2f:
bz2f.write(data1)
with BZ2File(self.filename, "a") as bz2f:
bz2f.write(data2)
with BZ2File(self.filename) as bz2f:
self.assertEqual(bz2f.read(), data1 + data2)
def testOpenBytesFilename(self):
str_filename = self.filename
try:
bytes_filename = str_filename.encode("ascii")
except UnicodeEncodeError:
self.skipTest("Temporary file name needs to be ASCII")
with BZ2File(bytes_filename, "wb") as f:
f.write(self.DATA)
with BZ2File(bytes_filename, "rb") as f:
self.assertEqual(f.read(), self.DATA)
# Sanity check that we are actually operating on the right file.
with BZ2File(str_filename, "rb") as f:
self.assertEqual(f.read(), self.DATA)
def testOpenPathLikeFilename(self):
filename = pathlib.Path(self.filename)
with BZ2File(filename, "wb") as f:
f.write(self.DATA)
with BZ2File(filename, "rb") as f:
self.assertEqual(f.read(), self.DATA)
def testDecompressLimited(self):
"""Decompressed data buffering should be limited"""
bomb = bz2.compress(b'\0' * int(2e6), compresslevel=9)
self.assertLess(len(bomb), _compression.BUFFER_SIZE)
decomp = BZ2File(BytesIO(bomb))
self.assertEqual(decomp.read(1), b'\0')
max_decomp = 1 + DEFAULT_BUFFER_SIZE
self.assertLessEqual(decomp._buffer.raw.tell(), max_decomp,
"Excessive amount of data was decompressed")
# Tests for a BZ2File wrapping another file object:
def testReadBytesIO(self):
with BytesIO(self.DATA) as bio:
with BZ2File(bio) as bz2f:
self.assertRaises(TypeError, bz2f.read, float())
self.assertEqual(bz2f.read(), self.TEXT)
self.assertFalse(bio.closed)
def testPeekBytesIO(self):
with BytesIO(self.DATA) as bio:
with BZ2File(bio) as bz2f:
pdata = bz2f.peek()
self.assertNotEqual(len(pdata), 0)
self.assertTrue(self.TEXT.startswith(pdata))
self.assertEqual(bz2f.read(), self.TEXT)
def testWriteBytesIO(self):
with BytesIO() as bio:
with BZ2File(bio, "w") as bz2f:
self.assertRaises(TypeError, bz2f.write)
bz2f.write(self.TEXT)
self.assertEqual(ext_decompress(bio.getvalue()), self.TEXT)
self.assertFalse(bio.closed)
def testSeekForwardBytesIO(self):
with BytesIO(self.DATA) as bio:
with BZ2File(bio) as bz2f:
self.assertRaises(TypeError, bz2f.seek)
bz2f.seek(150)
self.assertEqual(bz2f.read(), self.TEXT[150:])
def testSeekBackwardsBytesIO(self):
with BytesIO(self.DATA) as bio:
with BZ2File(bio) as bz2f:
bz2f.read(500)
bz2f.seek(-150, 1)
self.assertEqual(bz2f.read(), self.TEXT[500-150:])
def test_read_truncated(self):
# Drop the eos_magic field (6 bytes) and CRC (4 bytes).
truncated = self.DATA[:-10]
with BZ2File(BytesIO(truncated)) as f:
self.assertRaises(EOFError, f.read)
with BZ2File(BytesIO(truncated)) as f:
self.assertEqual(f.read(len(self.TEXT)), self.TEXT)
self.assertRaises(EOFError, f.read, 1)
# Incomplete 4-byte file header, and block header of at least 146 bits.
for i in range(22):
with BZ2File(BytesIO(truncated[:i])) as f:
self.assertRaises(EOFError, f.read, 1)
class BZ2CompressorTest(BaseTest):
def testCompress(self):
bz2c = BZ2Compressor()
self.assertRaises(TypeError, bz2c.compress)
data = bz2c.compress(self.TEXT)
data += bz2c.flush()
self.assertEqual(ext_decompress(data), self.TEXT)
def testCompressEmptyString(self):
bz2c = BZ2Compressor()
data = bz2c.compress(b'')
data += bz2c.flush()
self.assertEqual(data, self.EMPTY_DATA)
def testCompressChunks10(self):
bz2c = BZ2Compressor()
n = 0
data = b''
while True:
str = self.TEXT[n*10:(n+1)*10]
if not str:
break
data += bz2c.compress(str)
n += 1
data += bz2c.flush()
self.assertEqual(ext_decompress(data), self.TEXT)
@bigmemtest(size=_4G + 100, memuse=2)
def testCompress4G(self, size):
# "Test BZ2Compressor.compress()/flush() with >4GiB input"
bz2c = BZ2Compressor()
data = b"x" * size
try:
compressed = bz2c.compress(data)
compressed += bz2c.flush()
finally:
data = None # Release memory
data = bz2.decompress(compressed)
try:
self.assertEqual(len(data), size)
self.assertEqual(len(data.strip(b"x")), 0)
finally:
data = None
def testPickle(self):
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.assertRaises(TypeError):
pickle.dumps(BZ2Compressor(), proto)
class BZ2DecompressorTest(BaseTest):
def test_Constructor(self):
self.assertRaises(TypeError, BZ2Decompressor, 42)
def testDecompress(self):
bz2d = BZ2Decompressor()
self.assertRaises(TypeError, bz2d.decompress)
text = bz2d.decompress(self.DATA)
self.assertEqual(text, self.TEXT)
def testDecompressChunks10(self):
bz2d = BZ2Decompressor()
text = b''
n = 0
while True:
str = self.DATA[n*10:(n+1)*10]
if not str:
break
text += bz2d.decompress(str)
n += 1
self.assertEqual(text, self.TEXT)
def testDecompressUnusedData(self):
bz2d = BZ2Decompressor()
unused_data = b"this is unused data"
text = bz2d.decompress(self.DATA+unused_data)
self.assertEqual(text, self.TEXT)
self.assertEqual(bz2d.unused_data, unused_data)
def testEOFError(self):
bz2d = BZ2Decompressor()
text = bz2d.decompress(self.DATA)
self.assertRaises(EOFError, bz2d.decompress, b"anything")
self.assertRaises(EOFError, bz2d.decompress, b"")
@bigmemtest(size=_4G + 100, memuse=3.3)
def testDecompress4G(self, size):
# "Test BZ2Decompressor.decompress() with >4GiB input"
blocksize = 10 * 1024 * 1024
block = random.getrandbits(blocksize * 8).to_bytes(blocksize, 'little')
try:
data = block * (size // blocksize + 1)
compressed = bz2.compress(data)
bz2d = BZ2Decompressor()
decompressed = bz2d.decompress(compressed)
self.assertTrue(decompressed == data)
finally:
data = None
compressed = None
decompressed = None
def testPickle(self):
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.assertRaises(TypeError):
pickle.dumps(BZ2Decompressor(), proto)
def testDecompressorChunksMaxsize(self):
bzd = BZ2Decompressor()
max_length = 100
out = []
# Feed some input
len_ = len(self.BIG_DATA) - 64
out.append(bzd.decompress(self.BIG_DATA[:len_],
max_length=max_length))
self.assertFalse(bzd.needs_input)
self.assertEqual(len(out[-1]), max_length)
# Retrieve more data without providing more input
out.append(bzd.decompress(b'', max_length=max_length))
self.assertFalse(bzd.needs_input)
self.assertEqual(len(out[-1]), max_length)
# Retrieve more data while providing more input
out.append(bzd.decompress(self.BIG_DATA[len_:],
max_length=max_length))
self.assertLessEqual(len(out[-1]), max_length)
# Retrieve remaining uncompressed data
while not bzd.eof:
out.append(bzd.decompress(b'', max_length=max_length))
self.assertLessEqual(len(out[-1]), max_length)
out = b"".join(out)
self.assertEqual(out, self.BIG_TEXT)
self.assertEqual(bzd.unused_data, b"")
def test_decompressor_inputbuf_1(self):
# Test reusing input buffer after moving existing
# contents to beginning
bzd = BZ2Decompressor()
out = []
# Create input buffer and fill it
self.assertEqual(bzd.decompress(self.DATA[:100],
max_length=0), b'')
# Retrieve some results, freeing capacity at beginning
# of input buffer
out.append(bzd.decompress(b'', 2))
# Add more data that fits into input buffer after
# moving existing data to beginning
out.append(bzd.decompress(self.DATA[100:105], 15))
# Decompress rest of data
out.append(bzd.decompress(self.DATA[105:]))
self.assertEqual(b''.join(out), self.TEXT)
def test_decompressor_inputbuf_2(self):
# Test reusing input buffer by appending data at the
# end right away
bzd = BZ2Decompressor()
out = []
# Create input buffer and empty it
self.assertEqual(bzd.decompress(self.DATA[:200],
max_length=0), b'')
out.append(bzd.decompress(b''))
# Fill buffer with new data
out.append(bzd.decompress(self.DATA[200:280], 2))
# Append some more data, not enough to require resize
out.append(bzd.decompress(self.DATA[280:300], 2))
# Decompress rest of data
out.append(bzd.decompress(self.DATA[300:]))
self.assertEqual(b''.join(out), self.TEXT)
def test_decompressor_inputbuf_3(self):
# Test reusing input buffer after extending it
bzd = BZ2Decompressor()
out = []
# Create almost full input buffer
out.append(bzd.decompress(self.DATA[:200], 5))
# Add even more data to it, requiring resize
out.append(bzd.decompress(self.DATA[200:300], 5))
# Decompress rest of data
out.append(bzd.decompress(self.DATA[300:]))
self.assertEqual(b''.join(out), self.TEXT)
def test_failure(self):
bzd = BZ2Decompressor()
self.assertRaises(Exception, bzd.decompress, self.BAD_DATA * 30)
# Previously, a second call could crash due to internal inconsistency
self.assertRaises(Exception, bzd.decompress, self.BAD_DATA * 30)
@support.refcount_test
def test_refleaks_in___init__(self):
gettotalrefcount = support.get_attribute(sys, 'gettotalrefcount')
bzd = BZ2Decompressor()
refs_before = gettotalrefcount()
for i in range(100):
bzd.__init__()
self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10)
class CompressDecompressTest(BaseTest):
def testCompress(self):
data = bz2.compress(self.TEXT)
self.assertEqual(ext_decompress(data), self.TEXT)
def testCompressEmptyString(self):
text = bz2.compress(b'')
self.assertEqual(text, self.EMPTY_DATA)
def testDecompress(self):
text = bz2.decompress(self.DATA)
self.assertEqual(text, self.TEXT)
def testDecompressEmpty(self):
text = bz2.decompress(b"")
self.assertEqual(text, b"")
def testDecompressToEmptyString(self):
text = bz2.decompress(self.EMPTY_DATA)
self.assertEqual(text, b'')
def testDecompressIncomplete(self):
self.assertRaises(ValueError, bz2.decompress, self.DATA[:-10])
def testDecompressBadData(self):
self.assertRaises(OSError, bz2.decompress, self.BAD_DATA)
def testDecompressMultiStream(self):
text = bz2.decompress(self.DATA * 5)
self.assertEqual(text, self.TEXT * 5)
def testDecompressTrailingJunk(self):
text = bz2.decompress(self.DATA + self.BAD_DATA)
self.assertEqual(text, self.TEXT)
def testDecompressMultiStreamTrailingJunk(self):
text = bz2.decompress(self.DATA * 5 + self.BAD_DATA)
self.assertEqual(text, self.TEXT * 5)
class OpenTest(BaseTest):
"Test the open function."
def open(self, *args, **kwargs):
return bz2.open(*args, **kwargs)
def test_binary_modes(self):
for mode in ("wb", "xb"):
if mode == "xb":
unlink(self.filename)
with self.open(self.filename, mode) as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
file_data = ext_decompress(f.read())
self.assertEqual(file_data, self.TEXT)
with self.open(self.filename, "rb") as f:
self.assertEqual(f.read(), self.TEXT)
with self.open(self.filename, "ab") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
file_data = ext_decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_implicit_binary_modes(self):
# Test implicit binary modes (no "b" or "t" in mode string).
for mode in ("w", "x"):
if mode == "x":
unlink(self.filename)
with self.open(self.filename, mode) as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
file_data = ext_decompress(f.read())
self.assertEqual(file_data, self.TEXT)
with self.open(self.filename, "r") as f:
self.assertEqual(f.read(), self.TEXT)
with self.open(self.filename, "a") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
file_data = ext_decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_text_modes(self):
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
for mode in ("wt", "xt"):
if mode == "xt":
unlink(self.filename)
with self.open(self.filename, mode) as f:
f.write(text)
with open(self.filename, "rb") as f:
file_data = ext_decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol)
with self.open(self.filename, "rt") as f:
self.assertEqual(f.read(), text)
with self.open(self.filename, "at") as f:
f.write(text)
with open(self.filename, "rb") as f:
file_data = ext_decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol * 2)
def test_x_mode(self):
for mode in ("x", "xb", "xt"):
unlink(self.filename)
with self.open(self.filename, mode) as f:
pass
with self.assertRaises(FileExistsError):
with self.open(self.filename, mode) as f:
pass
def test_fileobj(self):
with self.open(BytesIO(self.DATA), "r") as f:
self.assertEqual(f.read(), self.TEXT)
with self.open(BytesIO(self.DATA), "rb") as f:
self.assertEqual(f.read(), self.TEXT)
text = self.TEXT.decode("ascii")
with self.open(BytesIO(self.DATA), "rt") as f:
self.assertEqual(f.read(), text)
def test_bad_params(self):
# Test invalid parameter combinations.
self.assertRaises(ValueError,
self.open, self.filename, "wbt")
self.assertRaises(ValueError,
self.open, self.filename, "xbt")
self.assertRaises(ValueError,
self.open, self.filename, "rb", encoding="utf-8")
self.assertRaises(ValueError,
self.open, self.filename, "rb", errors="ignore")
self.assertRaises(ValueError,
self.open, self.filename, "rb", newline="\n")
def test_encoding(self):
# Test non-default encoding.
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
with self.open(self.filename, "wt", encoding="utf-16-le") as f:
f.write(text)
with open(self.filename, "rb") as f:
file_data = ext_decompress(f.read()).decode("utf-16-le")
self.assertEqual(file_data, text_native_eol)
with self.open(self.filename, "rt", encoding="utf-16-le") as f:
self.assertEqual(f.read(), text)
def test_encoding_error_handler(self):
# Test with non-default encoding error handler.
with self.open(self.filename, "wb") as f:
f.write(b"foo\xffbar")
with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \
as f:
self.assertEqual(f.read(), "foobar")
def test_newline(self):
# Test with explicit newline (universal newline mode disabled).
text = self.TEXT.decode("ascii")
with self.open(self.filename, "wt", newline="\n") as f:
f.write(text)
with self.open(self.filename, "rt", newline="\r") as f:
self.assertEqual(f.readlines(), [text])
def test_main():
support.run_unittest(
BZ2FileTest,
BZ2CompressorTest,
BZ2DecompressorTest,
CompressDecompressTest,
OpenTest,
)
support.reap_children()
if __name__ == '__main__':
test_main()
|
[
"henrikem100@gmail.com"
] |
henrikem100@gmail.com
|
ef9cecc91e7b99f22752fd0266c4724f94f43454
|
5c8eae715c3dd605d383686d1777f6f7795c1b63
|
/e-cidade/bin/oo2pdf/DocumentConverter.py
|
23d8ec8c62f8dcfac3bfee82c7f3a7c983656fed
|
[] |
no_license
|
soarescbm/e-cidade
|
6e6b7cefdfc8d4d497e97c514d37dd330f6b1867
|
58d9c980900cd7448d58270111d105bc011a917e
|
refs/heads/master
| 2021-04-24T18:02:27.939759
| 2020-04-11T18:48:07
| 2020-04-11T18:48:07
| 254,710,850
| 2
| 2
| null | 2020-04-11T18:30:19
| 2020-04-10T18:47:36
|
PHP
|
UTF-8
|
Python
| false
| false
| 4,563
|
py
|
#
# PyODConverter (Python OpenDocument Converter) v0.9.1 - 2008-03-01
#
# This script converts a document from one office format to another by
# connecting to an OpenOffice.org instance via Python-UNO bridge.
#
# Copyright (C) 2008 Mirko Nasato <mirko@artofsolving.com>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
#
DEFAULT_OPENOFFICE_PORT = 8100
import uno
from os.path import abspath, splitext
from com.sun.star.beans import PropertyValue
from com.sun.star.connection import NoConnectException
FAMILY_PRESENTATION = "Presentation"
FAMILY_SPREADSHEET = "Spreadsheet"
FAMILY_TEXT = "Text"
FAMILY_BY_EXTENSION = {
"odt": FAMILY_TEXT,
"sxw": FAMILY_TEXT,
"doc": FAMILY_TEXT,
"rtf": FAMILY_TEXT,
"txt": FAMILY_TEXT,
"wpd": FAMILY_TEXT,
"html": FAMILY_TEXT,
"ods": FAMILY_SPREADSHEET,
"sxc": FAMILY_SPREADSHEET,
"xls": FAMILY_SPREADSHEET,
"odp": FAMILY_PRESENTATION,
"sxi": FAMILY_PRESENTATION,
"ppt": FAMILY_PRESENTATION
}
FILTER_BY_EXTENSION = {
"pdf": {
FAMILY_TEXT: "writer_pdf_Export",
FAMILY_SPREADSHEET: "calc_pdf_Export",
FAMILY_PRESENTATION: "impress_pdf_Export"
},
"html": {
FAMILY_TEXT: "HTML (StarWriter)",
FAMILY_SPREADSHEET: "HTML (StarCalc)",
FAMILY_PRESENTATION: "impress_html_Export"
},
"odt": { FAMILY_TEXT: "writer8" },
"doc": { FAMILY_TEXT: "MS Word 97" },
"rtf": { FAMILY_TEXT: "Rich Text Format" },
"txt": { FAMILY_TEXT: "Text" },
"ods": { FAMILY_SPREADSHEET: "calc8" },
"xls": { FAMILY_SPREADSHEET: "MS Excel 97" },
"odp": { FAMILY_PRESENTATION: "impress8" },
"ppt": { FAMILY_PRESENTATION: "MS PowerPoint 97" },
"swf": { FAMILY_PRESENTATION: "impress_flash_Export" }
}
class DocumentConversionException(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return self.message
def _unoProps(**args):
props = []
for key in args:
prop = PropertyValue()
prop.Name = key
prop.Value = args[key]
props.append(prop)
return tuple(props)
class DocumentConverter:
def __init__(self, port=DEFAULT_OPENOFFICE_PORT):
localContext = uno.getComponentContext()
resolver = localContext.ServiceManager.createInstanceWithContext("com.sun.star.bridge.UnoUrlResolver", localContext)
try:
context = resolver.resolve("uno:socket,host=localhost,port=%s;urp;StarOffice.ComponentContext" % port)
except NoConnectException:
raise DocumentConversionException, "failed to connect to OpenOffice.org on port %s" % port
self.desktop = context.ServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", context)
def convert(self, inputFile, outputFile):
inputExt = self._fileExt(inputFile)
outputExt = self._fileExt(outputFile)
filterName = self._filterName(inputExt, outputExt)
inputUrl = self._fileUrl(inputFile)
outputUrl = self._fileUrl(outputFile)
document = self.desktop.loadComponentFromURL(inputUrl, "_blank", 0, _unoProps(Hidden=True, ReadOnly=True))
try:
document.refresh()
except AttributeError:
pass
document.storeToURL(outputUrl, _unoProps(FilterName=filterName))
document.close(True)
def _filterName(self, inputExt, outputExt):
try:
family = FAMILY_BY_EXTENSION[inputExt]
except KeyError:
raise DocumentConversionException, "unknown input format: '%s'" % inputExt
try:
filterByFamily = FILTER_BY_EXTENSION[outputExt]
except KeyError:
raise DocumentConversionException, "unknown output format: '%s'" % outputExt
try:
return filterByFamily[family]
except KeyError:
raise DocumentConversionException, "unsupported conversion: from '%s' to '%s'" % (inputExt, outputExt)
def _fileExt(self, path):
ext = splitext(path)[1]
if ext is not None:
return ext[1:].lower()
def _fileUrl(self, path):
return uno.systemPathToFileUrl(abspath(path))
if __name__ == "__main__":
from sys import argv, exit
if len(argv) < 3:
print "USAGE: " + argv[0] + " <input-file> <output-file>"
exit(255)
try:
converter = DocumentConverter()
converter.convert(argv[1], argv[2])
except DocumentConversionException, exception:
print "ERROR! " + str(exception)
exit(1)
|
[
"soarescbm@gmail.com"
] |
soarescbm@gmail.com
|
5215b5aaa450988a9cc261e3c68a0353ab39e5bc
|
935a5dbec1e56a615f9d426c4cd8f31ed709ad45
|
/utils/evaluate.py
|
be8f0d85683b96c0a00f44705b6135de31fced99
|
[] |
no_license
|
PandasCute/BDCI_Car_2018
|
1f94a858f114506335ef3e3ebb48256035ed83ba
|
1d8947fea362103ee6ca46133bfad3536fd5301f
|
refs/heads/master
| 2020-04-09T04:04:42.729789
| 2018-12-05T03:58:13
| 2018-12-05T03:58:13
| 160,009,414
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,642
|
py
|
import numpy as np
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, precision_recall_fscore_support
def score_list(predicted, golden):
assert len(predicted) == len(golden)
correct = 0
for p, g in zip(predicted, golden):
# print(p)
# print(g)
# print(g[0].tolist())
# exit(-1)
if p == g:
correct += 1
acc = correct/len(golden)
predicted_all = [l for p in predicted for l in p]
golden_all = [l for g in golden for l in g]
precision = precision_score(golden_all, predicted_all)
recall = recall_score(golden_all, predicted_all)
f1 = f1_score(golden_all, predicted_all)
f12 = f1_score(predicted_all, golden_all)
return precision, recall, f1, acc
def score(predicted, golden):
assert len(predicted) == len(golden)
correct = 0
for p, g in zip(predicted, golden):
# print(p)
# print(g)
# print(g[0].tolist())
# exit(-1)
if p == g[0].tolist():
correct += 1
acc = correct/len(golden)
predicted_all = [l for p in predicted for l in p]
golden_all = [l for g in golden for l in g[0].tolist()]
precision = precision_score(golden_all, predicted_all)
recall = recall_score(golden_all, predicted_all)
f1 = f1_score(golden_all, predicted_all)
f12 = f1_score(predicted_all, golden_all)
return precision, recall, f1, acc
def label_analysis(predicted, golden):
assert len(predicted) == len(golden)
golden = np.asarray([g[0].tolist() for g in golden])
predicted = np.asarray(predicted)
rslt = []
for i in range(golden.shape[1]):
p = precision_score(golden[:, i], predicted[:, i])
r = recall_score(golden[:, i], predicted[:, i])
f = f1_score(golden[:, i], predicted[:, i])
rate = sum(golden[:, i]) / golden.shape[0]
rslt.append([p, r, f, rate])
return rslt
def score2(predicted, golden):
# print(len(predicted))
# print(len(golden))
assert len(predicted) == len(golden)
correct = 0
for p, g in zip(predicted, golden):
if p == g[0].tolist():
correct += 1
acc = correct/len(golden)
predicted_all = [p[0].tolist() for p in predicted]
# print(predicted_all)
golden_all = [g[0].tolist() for g in golden]
# print(golden_all)
p, r, f, _ = precision_recall_fscore_support(golden_all, predicted_all, average='micro')
return p, r, f, acc
def label_analysis2(predicted, golden, label_num):
assert len(predicted) == len(golden)
rslt = []
predicted_all = [p for p in predicted]
golden_all = [g[0].tolist() for g in golden]
P, R, F, Support = precision_recall_fscore_support(golden_all, predicted_all, labels=[i for i in range(label_num)])
for p, r, f, s in zip(P, R, F, Support):
rslt.append([p, r, f, s])
# print(len(rslt))
return rslt
def score_aspect(predict_list, true_list):
correct = 0
predicted = 0
relevant = 0
i = 0
j = 0
pairs = []
while i < len(true_list):
true_seq = true_list[i]
predict = predict_list[i]
for num in range(len(true_seq)):
if true_seq[num] == 0:
if num < len(true_seq) - 1:
# if true_seq[num + 1] == '0' or true_seq[num + 1] == '1':
if true_seq[num + 1] != 1:
# if predict[num] == '1':
if predict[num] == 0 and predict[num + 1] != 1:
# if predict[num] == '1' and predict[num + 1] != '1':
correct += 1
# predicted += 1
relevant += 1
else:
relevant += 1
else:
if predict[num] == 0:
for j in range(num + 1, len(true_seq)):
if true_seq[j] == 1:
if predict[j] == 1 and j < len(predict) - 1:
# if predict[j] == '1' and j < len(predict) - 1:
continue
elif predict[j] == 1 and j == len(predict) - 1:
# elif predict[j] == '1' and j == len(predict) - 1:
correct += 1
relevant += 1
else:
relevant += 1
break
else:
if predict[j] != 1:
# if predict[j] != '1':
correct += 1
# predicted += 1
relevant += 1
break
else:
relevant += 1
else:
if predict[num] == 0:
correct += 1
# predicted += 1
relevant += 1
else:
relevant += 1
for num in range(len(predict)):
if predict[num] == 0:
predicted += 1
i += 1
precision = float(correct) / (predicted + 1e-6)
recall = float(correct) / (relevant + 1e-6)
f1 = 2 * precision * recall / (precision + recall + 1e-6)
return precision, recall, f1
|
[
"fanzf@nlp.nju.edu.cn"
] |
fanzf@nlp.nju.edu.cn
|
6cf7f2a5e872e4cb93934c85e98ff767a3ca6331
|
5803c356bdf5b1a0bde429937f10d947a8b5afdf
|
/BimSetup.py
|
b47523425873cff050db711b92b6373276a9463c
|
[] |
no_license
|
fengjixiong/BIM_Workbench
|
0a1500af344ccd19d606b21544009dc0dc3ed19b
|
7ba04c0f7e0da8c8c14bf622f3d2611cd8295452
|
refs/heads/master
| 2020-04-03T01:59:27.390302
| 2018-10-26T17:56:33
| 2018-10-26T17:56:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,555
|
py
|
#***************************************************************************
#* *
#* Copyright (c) 2017 Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
"""This module contains FreeCAD commands for the BIM workbench"""
import os,FreeCAD,FreeCADGui
def QT_TRANSLATE_NOOP(ctx,txt): return txt # dummy function for the QT translator
class BIM_Setup:
def GetResources(self):
return {'Pixmap' : ":icons/preferences-system.svg",
'MenuText': QT_TRANSLATE_NOOP("BIM_Setup", "BIM Setup..."),
'ToolTip' : QT_TRANSLATE_NOOP("BIM_Setup", "Set some common FreeCAD preferences for BIM workflow")}
def Activated(self):
TECHDRAWDIMFACTOR = 0.16 # How many times TechDraw dim arrows are smaller than Draft
# load dialog
from PySide import QtGui
form = FreeCADGui.PySideUic.loadUi(os.path.join(os.path.dirname(__file__),"dialogSetup.ui"))
# center the dialog over FreeCAD window
mw = FreeCADGui.getMainWindow()
form.move(mw.frameGeometry().topLeft() + mw.rect().center() - form.rect().center())
# fill values from current settings
unit = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Units").GetInt("UserSchema",0)
unit = [0,2,3,3,1,4,0][unit] # less choices in our simplified dialog
form.settingUnits.setCurrentIndex(unit)
decimals = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Units").GetInt("Decimals",2)
form.settingDecimals.setValue(decimals)
grid = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").GetFloat("gridSpacing",10)
grid = FreeCAD.Units.Quantity(grid,FreeCAD.Units.Length).UserString
form.settingGrid.setText(grid)
wp = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").GetInt("defaultWP",0)
form.settingWP.setCurrentIndex(wp)
tsize = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").GetFloat("textheight",10)
tsize = FreeCAD.Units.Quantity(tsize,FreeCAD.Units.Length).UserString
form.settingText.setText(tsize)
font = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").GetString("textfont","Sans")
form.settingFont.setCurrentFont(QtGui.QFont(font))
linewidth = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").GetInt("DefautShapeLineWidth",2)
form.settingLinewidth.setValue(linewidth)
dimstyle = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").GetInt("dimsymbol",0)
dimstyle = [0,0,1,2,3][dimstyle] # less choices in our simplified dialog
form.settingDimstyle.setCurrentIndex(dimstyle)
asize = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").GetFloat("arrowsize",5)
asize = FreeCAD.Units.Quantity(asize,FreeCAD.Units.Length).UserString
form.settingArrowsize.setText(asize)
author = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").GetString("prefAuthor","")
form.settingAuthor.setText(author)
lic = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").GetInt("prefLicenseType",0)
lic = [0,1,2,1,3,4,1,0,0,0][lic] # less choices in our simplified dialog
form.settingLicense.setCurrentIndex(lic)
bimdefault = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/General").GetString("AutoloadModule","")
form.settingWorkbench.setChecked(bimdefault == "BIMWorkbench")
newdoc = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").GetBool("CreateNewDoc",False)
form.settingNewdocument.setChecked(newdoc)
bkp = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").GetInt("CountBackupFiles",2)
form.settingBackupfiles.setValue(bkp)
# TODO - antialiasing?
colTop = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").GetUnsigned("BackgroundColor2",775244287)
form.colorButtonTop.setProperty("color",getPrefColor(colTop))
colBottom = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").GetUnsigned("BackgroundColor3",1905041919)
form.colorButtonBottom.setProperty("color",getPrefColor(colBottom))
colFace = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").GetUnsigned("DefaultShapeColor",4294967295)
form.colorButtonFaces.setProperty("color",getPrefColor(colFace))
colLine = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").GetUnsigned("DefaultShapeLineColor",255)
form.colorButtonLines.setProperty("color",getPrefColor(colLine))
colHelp = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch").GetUnsigned("ColorHelpers",674321151)
form.colorButtonHelpers.setProperty("color",getPrefColor(colHelp))
colConst = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").GetUnsigned("constructioncolor",746455039)
form.colorButtonConstruction.setProperty("color",getPrefColor(colConst))
# check missing addons
form.labelMissingWorkbenches.hide()
m = []
try:
import RebarTools
except:
m.append("Reinforcement")
try:
import BIMServer
except:
m.append("WebTools")
try:
import CommandsFrame
except:
m.append("Flamingo")
try:
import FastenerBase
except:
m.append("Fasteners")
if m:
form.labelMissingWorkbenches.setText("Tip: Some additional workbenches are not installed, that extend BIM functionality: <b>"+",".join(m)+"</b>. You can install them from menu Tools -> Addon manager.")
form.labelMissingWorkbenches.show()
# show dialog and exit if cancelled
result = form.exec_()
if not result:
return
# set preference values
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/BIM").SetBool("FirstTime",False)
unit = form.settingUnits.currentIndex()
unit = [0,4,1,3,5][unit] # less choices in our simplified dialog
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Units").SetInt("UserSchema",unit)
decimals = form.settingDecimals.value()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Units").SetInt("Decimals",decimals)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/TechDraw/Dimensions").SetBool("UseGlobalDecimals",True)
grid = form.settingGrid.text()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Sketcher/General").SetString("GridSize",str(grid)) # Also set sketcher grid
grid = FreeCAD.Units.Quantity(grid).Value
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetFloat("gridSpacing",grid)
wp = form.settingWP.currentIndex()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetInt("defaultWP",wp)
tsize = form.settingText.text()
tsize = FreeCAD.Units.Quantity(tsize).Value
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetFloat("textheight",tsize)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/TechDraw/Dimensions").SetFloat("FontSize",tsize) # TODO - check if this needs a mult factor?
font = form.settingFont.currentFont().family()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetString("textfont",font)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/TechDraw/Labels").SetString("LabelFont",font)
linewidth = form.settingLinewidth.value()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").SetInt("DefautShapeLineWidth",linewidth)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetInt("linewidth",linewidth)
# TODO - TechDraw default line styles
dimstyle = form.settingDimstyle.currentIndex()
ddimstyle = [0,2,3,4][dimstyle] # less choices in our simplified dialog
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetInt("dimsymbol",ddimstyle)
tdimstyle = [3,0,2,2][dimstyle] # TechDraw has different order than Draft
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/TechDraw/Dimensions").SetInt("dimsymbol",tdimstyle)
asize = form.settingArrowsize.text()
asize = FreeCAD.Units.Quantity(asize).Value
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetFloat("arrowsize",asize)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/TechDraw/Dimensions").SetFloat("ArrowSize",asize*TECHDRAWDIMFACTOR)
author = form.settingAuthor.text()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").SetString("prefAuthor",author)
lic = form.settingLicense.currentIndex()
lic = [0,1,2,4,5][lic] # less choices in our simplified dialog
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").SetInt("prefLicenseType",lic)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").SetString("prefLicenseUrl","") # TODO - set correct license URL
bimdefault = form.settingWorkbench.isChecked()
if bimdefault:
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/General").SetString("AutoloadModule","BIMWorkbench")
newdoc = form.settingNewdocument.isChecked()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").SetBool("CreateNewDoc",newdoc)
bkp = form.settingBackupfiles.value()
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Document").SetInt("CountBackupFiles",bkp)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").SetUnsigned("BackgroundColor2",form.colorButtonTop.property("color").rgb()<<8)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").SetUnsigned("BackgroundColor3",form.colorButtonBottom.property("color").rgb()<<8)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").SetUnsigned("DefaultShapeColor",form.colorButtonFaces.property("color").rgb()<<8)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetUnsigned("color",form.colorButtonFaces.property("color").rgb()<<8)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").SetUnsigned("DefaultShapeLineColor",form.colorButtonLines.property("color").rgb()<<8)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch").SetUnsigned("ColorHelpers",form.colorButtonHelpers.property("color").rgb()<<8)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Draft").SetUnsigned("constructioncolor",form.colorButtonConstruction.property("color").rgb()<<8)
FreeCAD.ParamGet("User parameter:BaseApp/Preferences/View").SetUnsigned("ConstructionColor",form.colorButtonConstruction.property("color").rgb()<<8)
# set the working plane
if hasattr(FreeCAD,"DraftWorkingPlane") and hasattr(FreeCADGui,"draftToolBar"):
if wp == 1:
FreeCAD.DraftWorkingPlane.alignToPointAndAxis(Vector(0,0,0), Vector(0,0,1), 0)
FreeCADGui.draftToolBar.wplabel.setText("Top(XY)")
elif wp == 2:
FreeCAD.DraftWorkingPlane.alignToPointAndAxis(Vector(0,0,0), Vector(0,1,0), 0)
FreeCADGui.draftToolBar.wplabel.setText("Front(XZ)")
elif wp == 3:
FreeCAD.DraftWorkingPlane.alignToPointAndAxis(Vector(0,0,0), Vector(1,0,0), 0)
FreeCADGui.draftToolBar.wplabel.setText("Side(YZ)")
else:
FreeCADGui.draftToolBar.wplabel.setText("Auto")
# set Draft toolbar
if hasattr(FreeCADGui,"draftToolBar"):
FreeCADGui.draftToolBar.widthButton.setValue(linewidth)
FreeCADGui.draftToolBar.fontsizeButton.setValue(tsize)
def getPrefColor(color):
r = ((color>>24)&0xFF)/255.0
g = ((color>>16)&0xFF)/255.0
b = ((color>>8)&0xFF)/255.0
from PySide import QtGui
return QtGui.QColor.fromRgbF(r,g,b)
FreeCADGui.addCommand('BIM_Setup',BIM_Setup())
|
[
"yorik@uncreated.net"
] |
yorik@uncreated.net
|
a01fbfabc6dedb2dd92c73ae03b9ef2bbe1e3a2a
|
4dff4681ea924faabd3c64c4ce425972768ad2a9
|
/image_classification/utils/file_processing.py
|
df35949dd745dc1fe173c541ef4aa60a2d64f343
|
[] |
no_license
|
Wprofessor/pytorch-learning-notes
|
0b1b038fa5feca330d28b7f24a30c01cd7a2bf5c
|
166dd1f31f4d3c4ce73f13077cf6619bbff91635
|
refs/heads/master
| 2022-01-08T10:03:54.590611
| 2019-05-15T03:01:35
| 2019-05-15T03:01:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,224
|
py
|
# -*-coding: utf-8 -*-
"""
@Project: IntelligentManufacture
@File : file_processing.py
@Author : panjq
@E-mail : pan_jinquan@163.com
@Date : 2019-02-14 15:08:19
"""
import glob
import os
import os,shutil
import numpy as np
import pandas as pd
def write_data(filename, content_list,mode='w'):
"""保存list[list[]]的数据到txt文件
:param filename:文件名
:param content_list:需要保存的数据,type->list
:param mode:读写模式:'w' or 'a'
:return: void
"""
with open(filename, mode=mode, encoding='utf-8') as f:
for line_list in content_list:
# 将list转为string
line=" ".join('%s' % id for id in line_list)
f.write(line+"\n")
def write_list_data(filename, list_data,mode='w'):
"""保存list[]的数据到txt文件,每个元素分行
:param filename:文件名
:param list_data:需要保存的数据,type->list
:param mode:读写模式:'w' or 'a'
:return: void
"""
with open(filename, mode=mode, encoding='utf-8') as f:
for line in list_data:
# 将list转为string
f.write(str(line)+"\n")
def read_data(filename,split=" ",convertNum=True):
"""
读取txt数据函数
:param filename:文件名
:param split :分割符
:param convertNum :是否将list中的string转为int/float类型的数字
:return: txt的数据列表
Python中有三个去除头尾字符、空白符的函数,它们依次为:
strip: 用来去除头尾字符、空白符(包括\n、\r、\t、' ',即:换行、回车、制表符、空格)
lstrip:用来去除开头字符、空白符(包括\n、\r、\t、' ',即:换行、回车、制表符、空格)
rstrip:用来去除结尾字符、空白符(包括\n、\r、\t、' ',即:换行、回车、制表符、空格)
注意:这些函数都只会删除头和尾的字符,中间的不会删除。
"""
with open(filename, mode="r",encoding='utf-8') as f:
content_list = f.readlines()
if split is None:
content_list = [content.rstrip() for content in content_list]
return content_list
else:
content_list = [content.rstrip().split(split) for content in content_list]
if convertNum:
for i,line in enumerate(content_list):
line_data=[]
for l in line:
if is_int(l): # isdigit() 方法检测字符串是否只由数字组成,只能判断整数
line_data.append(int(l))
elif is_float(l): # 判断是否为小数
line_data.append(float(l))
else:
line_data.append(l)
content_list[i]=line_data
return content_list
def is_int(str):
# 判断是否为整数
try:
x = int(str)
return isinstance(x, int)
except ValueError:
return False
def is_float(str):
# 判断是否为整数和小数
try:
x = float(str)
return isinstance(x, float)
except ValueError:
return False
def list2str(content_list):
content_str_list=[]
for line_list in content_list:
line_str = " ".join('%s' % id for id in line_list)
content_str_list.append(line_str)
return content_str_list
def get_images_list(image_dir,postfix=['*.jpg'],basename=False):
'''
获得文件列表
:param image_dir: 图片文件目录
:param postfix: 后缀名,可是多个如,['*.jpg','*.png']
:param basename: 返回的列表是文件名(True),还是文件的完整路径(False)
:return:
'''
images_list=[]
for format in postfix:
image_format=os.path.join(image_dir,format)
image_list=glob.glob(image_format)
if not image_list==[]:
images_list+=image_list
images_list=sorted(images_list)
if basename:
images_list=get_basename(images_list)
return images_list
def get_basename(file_list):
dest_list=[]
for file_path in file_list:
basename=os.path.basename(file_path)
dest_list.append(basename)
return dest_list
def copyfile(srcfile,dstfile):
if not os.path.isfile(srcfile):
print("%s not exist!"%(srcfile))
else:
fpath,fname=os.path.split(dstfile) #分离文件名和路径
if not os.path.exists(fpath):
os.makedirs(fpath) #创建路径
shutil.copyfile(srcfile,dstfile) #复制文件
# print("copy %s -> %s"%( srcfile,dstfile))
def merge_list(data1, data2):
'''
将两个list进行合并
:param data1:
:param data2:
:return:返回合并后的list
'''
if not len(data1) == len(data2):
return
all_data = []
for d1, d2 in zip(data1, data2):
all_data.append(d1 + d2)
return all_data
def split_list(data, split_index=1):
'''
将data切分成两部分
:param data: list
:param split_index: 切分的位置
:return:
'''
data1 = []
data2 = []
for d in data:
d1 = d[0:split_index]
d2 = d[split_index:]
data1.append(d1)
data2.append(d2)
return data1, data2
def getFilePathList(file_dir):
'''
获取file_dir目录下,所有文本路径,包括子目录文件
:param rootDir:
:return:
'''
filePath_list = []
for walk in os.walk(file_dir):
part_filePath_list = [os.path.join(walk[0], file) for file in walk[2]]
filePath_list.extend(part_filePath_list)
return filePath_list
def get_files_list(file_dir, postfix='ALL'):
'''
获得file_dir目录下,后缀名为postfix所有文件列表,包括子目录
:param file_dir:
:param postfix: jpg.png
:return:
'''
postfix = postfix.split('.')[-1]
file_list = []
filePath_list = getFilePathList(file_dir)
if postfix == 'ALL':
file_list = filePath_list
else:
for file in filePath_list:
basename = os.path.basename(file) # 获得路径下的文件名
postfix_name = basename.split('.')[-1]
if postfix_name == postfix:
file_list.append(file)
file_list.sort()
return file_list
def gen_files_labels(files_dir,postfix='ALL'):
'''
获取files_dir路径下所有文件路径,以及labels,其中labels用子级文件名表示
files_dir目录下,同一类别的文件放一个文件夹,其labels即为文件的名
:param files_dir:
:postfix 后缀名
:return:filePath_list所有文件的路径,label_list对应的labels
'''
# filePath_list = getFilePathList(files_dir)
filePath_list=get_files_list(files_dir, postfix=postfix)
print("files nums:{}".format(len(filePath_list)))
# 获取所有样本标签
label_list = []
for filePath in filePath_list:
label = filePath.split(os.sep)[-2]
label_list.append(label)
labels_set = list(set(label_list))
print("labels:{}".format(labels_set))
# 标签统计计数
# print(pd.value_counts(label_list))
return filePath_list, label_list
def decode_label(label_list,name_table):
'''
根据name_table解码label
:param label_list:
:param name_table:
:return:
'''
name_list=[]
for label in label_list:
name = name_table[label]
name_list.append(name)
return name_list
def encode_label(name_list,name_table,unknow=0):
'''
根据name_table,编码label
:param name_list:
:param name_table:
:param unknow :未知的名称,默认label为0,一般在name_table中index=0是背景,未知的label也当做背景处理
:return:
'''
label_list=[]
for name in name_list:
if name in name_table:
index = name_table.index(name)
else:
index = unknow
label_list.append(index)
return label_list
if __name__=='__main__':
filename = 'test.txt'
w_data = [['1.jpg', 'dog', 200, 300, 1.0], ['2.jpg', 'dog', 20, 30, -2]]
print("w_data=", w_data)
write_data(filename,w_data, mode='w')
r_data = read_data(filename)
print('r_data=', r_data)
|
[
"pan_jinquan@163.com"
] |
pan_jinquan@163.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.