hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
649fc66bebfabac4f317bf9a2446146f166a9e2e | 1,549 | py | Python | crawls/ProxyPool/proxy_spiders/spider_ip181.py | NCU-NLP/news_feed | af2097c5b815c45c6089824759b50c64e51955e4 | [
"MIT"
] | 2 | 2017-11-22T02:51:25.000Z | 2017-11-27T10:50:22.000Z | crawls/ProxyPool/proxy_spiders/spider_ip181.py | NCU-NLP/news_feed | af2097c5b815c45c6089824759b50c64e51955e4 | [
"MIT"
] | 4 | 2017-11-12T14:13:16.000Z | 2021-06-01T21:56:17.000Z | crawls/ProxyPool/proxy_spiders/spider_ip181.py | NCU-NLP/news_feed | af2097c5b815c45c6089824759b50c64e51955e4 | [
"MIT"
] | 7 | 2017-11-01T12:46:17.000Z | 2020-05-14T01:20:45.000Z | import requests
import re
import logging
import time
import threading
from bs4 import BeautifulSoup
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "en-US,en;q=0.5",
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0"}
def get_current_time():
timenow = time.strftime('%Y-%m-%d %X', time.localtime())
return timenow
def crawl():
urls = ['http://www.ip181.com/daili/2.html', 'http://www.ip181.com/daili/1.html']
result = []
for pageurl in urls:
try:
html = requests.get(pageurl, headers=headers, timeout=30).text
table = BeautifulSoup(html, 'lxml').find('table', {'class': 'ctable'}).find_all('tr')
except Exception as e:
print('[%s][Spider][ip181]Error:' % get_current_time(), e)
continue
for item in table[1:]:
try:
tds = item.find_all('td')
ip = tds[0].get_text()
port = tds[1].get_text()
except:
continue
line = ip + ':' + port
result.append(line.replace('\r', '').replace('\n', '').replace('\t', '').replace(' ', ''))
print('[%s][Spider][ip181]OK!' % get_current_time(), 'Crawled IP Count:', len(result))
return result
class SpiderIP181(threading.Thread):
def __init__(self):
super(SpiderIP181, self).__init__()
def run(self):
self.result = crawl()
| 31.612245 | 102 | 0.574564 | 154 | 0.099419 | 0 | 0 | 0 | 0 | 0 | 0 | 443 | 0.285991 |
64a06148dea41ab8bba1f28dcb0466b88c322b88 | 3,670 | py | Python | toolbox/docker/utils.py | avatao-content/challenge-engine | 38700b96895f12f310e6cf266b9c24b639dbad40 | [
"Apache-2.0"
] | 23 | 2017-08-15T08:18:27.000Z | 2021-05-16T20:38:30.000Z | toolbox/docker/utils.py | avatao-content/challenge-engine | 38700b96895f12f310e6cf266b9c24b639dbad40 | [
"Apache-2.0"
] | 7 | 2017-08-31T18:18:04.000Z | 2019-10-20T00:07:10.000Z | toolbox/docker/utils.py | avatao-content/challenge-engine | 38700b96895f12f310e6cf266b9c24b639dbad40 | [
"Apache-2.0"
] | 9 | 2017-08-31T18:37:01.000Z | 2020-02-11T08:30:46.000Z | import os
import subprocess
from glob import glob
from typing import Any, Dict, Iterable, List, Tuple
from toolbox.config.docker import DOCKER_REGISTRY, DOCKER_REGISTRY_MIRRORS, WHITELISTED_DOCKER_REGISTRIES
from toolbox.utils import run_cmd, fatal_error
def get_image_url(image: str) -> str:
if '/' not in image:
return '/'.join((DOCKER_REGISTRY, image))
for registry in WHITELISTED_DOCKER_REGISTRIES:
if image.startswith(registry + '/'):
return image
fatal_error("Invalid image: %s Registry not in whitelist: %s", image, WHITELISTED_DOCKER_REGISTRIES)
def get_challenge_image_url(
repo_name: str, repo_branch: str, short_name: str, crp_config_item: Dict[str, Any]
) -> str:
# Accept pre-set images from config.yml
image = crp_config_item.get('image')
if not image:
if repo_branch != 'master':
tag = '-'.join((short_name, repo_branch))
else:
tag = short_name
image = ':'.join((repo_name, tag))
return get_image_url(image)
def pull_images(images: List[str], raise_errors: bool = False):
for image in images:
run_cmd(['docker', 'pull', image], raise_errors=raise_errors)
def push_images(images: List[str]):
for image in images:
run_cmd(['docker', 'push', image])
def strip_image_registry(image: str) -> str:
if '/' not in image:
return image
for registry in WHITELISTED_DOCKER_REGISTRIES:
if image.startswith(registry + '/'):
return image[len(registry) + 1:]
fatal_error("Invalid image to strip: %s Registry not in whitelist: %s", image, WHITELISTED_DOCKER_REGISTRIES)
def mirror_images(images: List[str]):
for image in images:
relative_image = strip_image_registry(image)
for mirror in DOCKER_REGISTRY_MIRRORS:
mirror_image = '/'.join((mirror, relative_image))
try:
run_cmd(['docker', 'tag', image, mirror_image])
run_cmd(['docker', 'push', mirror_image])
finally:
run_cmd(['docker', 'rmi', mirror_image])
def yield_dockerfiles(
repo_path: str, repo_name: str, repo_branch: str, crp_config: Dict[str, Dict]
) -> Iterable[Tuple[str, str]]:
for dockerfile in glob(os.path.join(repo_path, '*', 'Dockerfile')):
short_name = os.path.basename(os.path.dirname(dockerfile))
image = get_challenge_image_url(repo_name, repo_branch, short_name, crp_config[short_name])
yield dockerfile, image
def yield_all_image_urls(
repo_path: str, repo_name: str, repo_branch: str, crp_config: Dict[str, Dict]
) -> Iterable[Tuple[str, str, bool]]:
built_images = [image for _, image in yield_dockerfiles(repo_path, repo_name, repo_branch, crp_config)]
for short_name, crp_config_item in crp_config.items():
image = get_challenge_image_url(repo_name, repo_branch, short_name, crp_config_item)
yield short_name, image, image in built_images
def sorted_container_configs(crp_config: Dict[str, Dict]) -> List[Tuple[str, Dict]]:
def sort_key(item: Tuple[str, Dict]):
# The solvable must come first to share its volumes and namespaces
if item[0] == "solvable":
return 0
# Then the controller if defined
if item[0] == "controller":
return 1
# Then any other solvable in their current order
return 2
return sorted(crp_config.items(), key=sort_key)
def image_exists(image: str) -> bool:
image_output: str = subprocess.check_output(['docker', 'images', '-q', image]).decode('utf-8').rstrip()
if not image_output:
return False
return True
| 33.669725 | 113 | 0.668937 | 0 | 0 | 869 | 0.236785 | 0 | 0 | 0 | 0 | 463 | 0.126158 |
64a2067aa9929d774bfeaaacf40147f2b7919a09 | 7,194 | py | Python | complex_neural_net.py | mehdihosseinimoghadam/Complex-Neural-Networks | 7f1135d5c6e23113c43f5a9d9aa3d257bf0770de | [
"OLDAP-2.2.1"
] | null | null | null | complex_neural_net.py | mehdihosseinimoghadam/Complex-Neural-Networks | 7f1135d5c6e23113c43f5a9d9aa3d257bf0770de | [
"OLDAP-2.2.1"
] | null | null | null | complex_neural_net.py | mehdihosseinimoghadam/Complex-Neural-Networks | 7f1135d5c6e23113c43f5a9d9aa3d257bf0770de | [
"OLDAP-2.2.1"
] | null | null | null | """
Complex Valued Neural Layers From Scratch
Programmed by Mehdi Hosseini Moghadam
* MIT Licence
* 2022-02-15 Last Update
"""
from torch import nn
import torch
##__________________________________Complex Linear Layer __________________________________________
class CLinear(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CLinear, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.re_linear = nn.Linear(self.in_channels, self.out_channels, **kwargs)
self.im_linear = nn.Linear(self.in_channels, self.out_channels, **kwargs)
nn.init.xavier_uniform_(self.re_linear.weight)
nn.init.xavier_uniform_(self.im_linear.weight)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_linear(x_re) - self.im_linear(x_im)
out_im = self.re_linear(x_im) + self.im_linear(x_re)
out = torch.stack([out_re, out_im], -1)
return out
##______________________________________Complex Convolution 2d_____________________________________________
class CConv2d(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CConv2d, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.re_conv = nn.Conv2d(self.in_channels, self.out_channels, **kwargs)
self.im_conv = nn.Conv2d(self.in_channels, self.out_channels, **kwargs)
nn.init.xavier_uniform_(self.re_conv.weight)
nn.init.xavier_uniform_(self.im_conv.weight)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_conv(x_re) - self.im_conv(x_im)
out_im = self.re_conv(x_im) + self.im_conv(x_re)
out = torch.stack([out_re, out_im], -1)
return out
##___________________________________Complex Convolution Transpose 2d_______________________________________________
class CConvTrans2d(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CConvTrans2d, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.re_Tconv = nn.ConvTranspose2d(self.in_channels, self.out_channels, **kwargs)
self.im_Tconv = nn.ConvTranspose2d(self.in_channels, self.out_channels, **kwargs)
nn.init.xavier_uniform_(self.re_Tconv.weight)
nn.init.xavier_uniform_(self.im_Tconv.weight)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_Tconv(x_re) - self.im_Tconv(x_im)
out_im = self.re_Tconv(x_im) + self.im_Tconv(x_re)
out = torch.stack([out_re, out_im], -1)
return out
##___________________________Complex BatchNorm Layer____________________________________
class CBatchnorm(nn.Module):
def __init__(self, in_channels):
super(CBatchnorm, self).__init__()
self.in_channels = in_channels
self.re_batch = nn.BatchNorm2d(in_channels)
self.im_batch = nn.BatchNorm2d(in_channels)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.re_batch(x_re)
out_im = self.re_batch(x_im)
out = torch.stack([out_re, out_im], -1)
return out
##_______________________Complex Convolutional Block_______________________________________
class CconvBlock(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(CconvBlock, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.CConv2d = CConv2d(self.in_channels, self.out_channels, **kwargs)
self.CBatchnorm = CBatchnorm(self.out_channels)
self.leaky_relu = nn.LeakyReLU()
def forward(self, x):
conved = self.CConv2d(x)
normed = self.CBatchnorm(conved)
activated = self.leaky_relu(normed)
return activated
##__________________________________Complex Convolutional Transpose Block________________________________________
class CConvTransBlock(nn.Module):
def __init__(self, in_channels, out_channels, last_layer=False, **kwargs):
super(CConvTransBlock, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.last_layer = last_layer
self.CConvTrans2d = CConvTrans2d(self.in_channels, self.out_channels, **kwargs)
self.CBatchnorm = CBatchnorm(self.out_channels)
self.leaky_relu = nn.LeakyReLU()
def forward(self, x):
conved = self.CConvTrans2d(x)
if not self.last_layer:
normed = self.CBatchnorm(conved)
activated = self.leaky_relu(normed)
return activated
else:
m_phase = conved/(torch.abs(conved)+1e-8)
m_mag = torch.tanh(torch.abs(conved))
out = m_phase * m_mag
return out
##______________________Complex LSTM Layer_________________________________________________
class CLSTM(nn.Module):
def __init__(self, in_channels, hidden_size, num_layers, **kwargs):
super(CLSTM, self).__init__()
self.in_channels = in_channels
self.hidden_size = hidden_size
self.num_layers = num_layers
self.re_LSTM = nn.LSTM(self.in_channels, self.hidden_size, self.num_layers , **kwargs)
self.im_LSTM = nn.LSTM(self.in_channels, self.hidden_size, self.num_layers, **kwargs)
def forward(self, x, h0, c0):
x_re = x[..., 0]
x_im = x[..., 1]
out_re1, (hn_re1, cn_re1) = self.re_LSTM(x_re, (h0[...,0], c0[...,0]))
out_re2, (hn_re2, cn_re2) = self.im_LSTM(x_im, (h0[...,1], c0[...,1]))
out_re = out_re1 - out_re2
hn_re = hn_re1 - hn_re2
cn_re = cn_re1 - cn_re2
out_im1, (hn_im1, cn_im1) = self.re_LSTM(x_re, (h0[...,1], c0[...,1]))
out_im2, (hn_im2, cn_im2) = self.im_LSTM(x_im, (h0[...,0], c0[...,0]))
out_im = out_im1 + out_im2
hn_im = hn_im1 + hn_im2
cn_im = cn_im1 + cn_im2
out = torch.stack([out_re, out_im], -1)
hn = torch.stack([hn_re, hn_im], -1)
cn = torch.stack([cn_re, cn_im], -1)
return out, (hn, cn)
##_______________________________Complex MaxPooling 2d Layer___________________
class CMaxPool2d(nn.Module):
def __init__(self, kernel_size, **kwargs):
super(CMaxPool2d, self).__init__()
self.kernel_size = kernel_size
self.CMax_re = nn.MaxPool2d(self.kernel_size, **kwargs)
self.CMax_im = nn.MaxPool2d(self.kernel_size, **kwargs)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.CMax_re(x_re)
out_im = self.CMax_im(x_im)
out = torch.stack([out_re, out_im], -1)
return out
##________________________________Complex Average Pooling 2d Layer_____________________________
class CAvgPool2d(nn.Module):
def __init__(self, kernel_size, **kwargs):
super(CAvgPool2d, self).__init__()
self.kernel_size = kernel_size
self.CMax_re = nn.AvgPool2d(self.kernel_size, **kwargs)
self.CMax_im = nn.AvgPool2d(self.kernel_size, **kwargs)
def forward(self, x):
x_re = x[..., 0]
x_im = x[..., 1]
out_re = self.CMax_re(x_re)
out_im = self.CMax_im(x_im)
out = torch.stack([out_re, out_im], -1)
return out
| 27.458015 | 118 | 0.678343 | 6,386 | 0.887684 | 0 | 0 | 0 | 0 | 0 | 0 | 1,018 | 0.141507 |
64a6d9297b2d82f455c0e98224ae042ba6dbe984 | 1,939 | py | Python | scripts/sample_script.py | TheConfused/LinkedIn | 83e75ed18c54ebc1bed55ee55f69d580a2cb1b73 | [
"MIT"
] | null | null | null | scripts/sample_script.py | TheConfused/LinkedIn | 83e75ed18c54ebc1bed55ee55f69d580a2cb1b73 | [
"MIT"
] | null | null | null | scripts/sample_script.py | TheConfused/LinkedIn | 83e75ed18c54ebc1bed55ee55f69d580a2cb1b73 | [
"MIT"
] | null | null | null | from simplelinkedin import LinkedIn
def run_script(settings):
with LinkedIn(
username=settings.get("LINKEDIN_USER"),
password=settings.get("LINKEDIN_PASSWORD"),
browser=settings.get("LINKEDIN_BROWSER"),
driver_path=settings.get("LINKEDIN_BROWSER_DRIVER"),
headless=bool(settings.get("LINKEDIN_BROWSER_HEADLESS")),
) as ln:
# all the steps manually
ln.login()
# ln.remove_sent_invitations(older_than_days=14)
ln.send_invitations(
max_invitation=max(ln.WEEKLY_MAX_INVITATION - ln.invitations_sent_last_week, 0),
min_mutual=10,
max_mutual=450,
preferred_users=["Quant"],
not_preferred_users=["Sportsman"],
view_profile=True,
)
ln.accept_invitations()
# OR
# run smart follow-unfollow method (without setting cron jobs) which essentially does the same thing as
# all the above steps
ln.smart_follow_unfollow(
users_preferred=settings.get("LINKEDIN_PREFERRED_USER") or [],
users_not_preferred=settings.get("LINKEDIN_NOT_PREFERRED_USER") or [],
)
# setting and un-setting cron
# set cron
ln.set_smart_cron(settings)
# remove existing cron jobs
ln.remove_cron_jobs(settings=settings)
if __name__ == "__main__":
import os
sett = {
"LINKEDIN_USER": os.getenv("LINKEDIN_USER"),
"LINKEDIN_PASSWORD": os.getenv("LINKEDIN_PASSWORD"),
"LINKEDIN_BROWSER": "Chrome",
"LINKEDIN_BROWSER_DRIVER": "/Users/dayhatt/workspace/drivers/chromedriver",
"LINKEDIN_BROWSER_HEADLESS": 0,
"LINKEDIN_BROWSER_CRON": 0,
"LINKEDIN_CRON_USER": "dayhatt",
"LINKEDIN_PREFERRED_USER": "./data/user_preferred.txt",
"LINKEDIN_NOT_PREFERRED_USER": "./data/user_not_preferred.txt",
}
run_script(settings=sett)
| 32.864407 | 111 | 0.647241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 809 | 0.417225 |
64a85288f25d878a7f78992ddf71bbcdba23b115 | 4,561 | py | Python | F20/SVM_3beat/HeatMap.py | rmorse7/TCH_Arrhythmias_F20 | eb739ab68288d012c9af7f5c21c16776f947ac09 | [
"MIT"
] | null | null | null | F20/SVM_3beat/HeatMap.py | rmorse7/TCH_Arrhythmias_F20 | eb739ab68288d012c9af7f5c21c16776f947ac09 | [
"MIT"
] | null | null | null | F20/SVM_3beat/HeatMap.py | rmorse7/TCH_Arrhythmias_F20 | eb739ab68288d012c9af7f5c21c16776f947ac09 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
time_max_normalized = 1.25
normalized_amplitude = 400
num_bins_x = 50
num_bins_y = 50
# ----------------------------------------------------
# Based off code from D2K MIC group from Spring 2020. Rewritten and optimized for beat displaying by Ricky Morse,
# D2K Arrhythmias Team Fall 2020.
# ----------------------------------------------------
def get_histogram(segments, resample_size=100, num_bins_y=50, amp=400, log=True):
"""
Get a probability density vector of size (B1, B2), where B1 is the size of time after resampling (default 100)
and B1 the number of bins for lead value (default 50)
:param segments -- a list of beat segments
:param resample_size -- size to resample the time axis
:param num_bins_y -- number of bins for the value axis
:param log -- boolean to turn into log likelihood
"""
y_bin_width = 2. * (amp + 1) / num_bins_y
hist = np.zeros((resample_size, num_bins_y))
for segment in segments:
x = np.linspace(segment[0, 0], segment[-1, 0], resample_size)
y = np.interp(x, segment[:, 0], segment[:, 1])
bins = list(((y // y_bin_width) + (num_bins_y//2)).astype(int))
indices = list(zip(list(range(len(bins))), bins))
for index in indices:
try:
hist[index] += 1
except:
print('Histogram index issue w/: ',index)
hist = np.divide(hist, len(segments))
if log:
#dealing with zero entries in log scale
nonzero = hist[hist != 0]
hist[hist == 0] = np.min(nonzero) * 0.5
hist = np.log(hist)
lm = np.min(hist)
#clearing up picture (if necesary)
#for i in range(resample_size):
# for j in range(num_bins_y):
# if hist[i,j] < lm / 1.8:
# hist[i,j] = lm
return hist
def plot_histogram(hist, title):
"""
Plot the histogram
"""
# Modify heatmap
hist = np.row_stack((hist[40:, :], hist[:40, :]))
plt.imshow(np.rot90(hist))
plt.xlabel("Samples")
plt.ylabel("Bin Number")
log = True if np.sum(hist) < 0 else False
# if log:
# title = "ECG Beat Log-likelihood"
# else:
# title = "ECG Beat Likelihood"
plt.title(title)
clb = plt.colorbar()
clb.set_label('log likelihood', labelpad=15, rotation=270)
plt.show()
def get_segments_likelihood(hist, segments, num_parameters=1):
"""
Returns the likelihood of "segments" in a (N, P) numpy array, where N is the number of segments
and P is the number of parameters (default 1). If P > 1 is used, the likelihood vector is divided
and averaged into P chunks.
:param hist -- a histogram of size (B1, B2), see get_histogram for details
:param segments -- list of segments
:param num_parameters -- number of parameter per segment for output. Should be <= resample size of hist
"""
num_bins_y = hist.shape[1]
resample_size = hist.shape[0]
y_bin_width = 2. * (normalized_amplitude + 1) / num_bins_y
feature_matrix = np.zeros((len(segments), resample_size))
for i in range(len(segments)):
segment = segments[i]
x = np.linspace(segment[0, 0], segment[-1, 0], resample_size)
y = np.interp(x, segment[:, 0], segment[:, 1])
bins = list(((y // y_bin_width) + 24).astype(int))
indices = list(zip(list(range(len(bins))), bins))
for j in range(resample_size):
index = indices[j]
feature_matrix[i, j] = hist[index]
# Scale dimension 1 to num_parameter
log = True if np.sum(feature_matrix) < 0 else False
split_arr = np.array_split(feature_matrix, num_parameters, axis=1)
reshaped_matrix = np.zeros((len(segments), num_parameters))
for i in range(len(split_arr)):
if log:
compressed_arr = np.sum(split_arr[i], axis=1)
else:
compressed_arr = np.prod(split_arr[i], axis=1)
reshaped_matrix[:, i] = compressed_arr
return reshaped_matrix
def resample_segments(segments, resample_size):
"""
Resample a list of segments using linear interpolation and return a N * M numpy matrix,
where N is number of segments and M is the resample size
"""
resampled = np.zeros((len(segments), resample_size))
for i in range(len(segments)):
segment = segments[i]
x = np.linspace(segment[0, 0], segment[-1, 0], resample_size)
y = np.interp(x, segment[:, 0], segment[:, 1])
resampled[i, :] = y
return resampled
| 37.385246 | 114 | 0.611269 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,730 | 0.379303 |
64aa2bc5220e8017e409c846fc02f3f094df8aea | 3,106 | py | Python | models/image/greenFluorescenceQuantifier.py | marinarobin/uwaterloo-igem-2018 | dd2d3227975c51c31e923c0e262b4fc07b44b73a | [
"MIT"
] | 3 | 2018-05-15T00:46:37.000Z | 2018-09-20T22:50:52.000Z | models/image/greenFluorescenceQuantifier.py | marinarobin/uwaterloo-igem-2018 | dd2d3227975c51c31e923c0e262b4fc07b44b73a | [
"MIT"
] | 1 | 2018-03-22T19:30:24.000Z | 2018-03-22T19:30:24.000Z | models/image/greenFluorescenceQuantifier.py | marinarobin/uwaterloo-igem-2018 | dd2d3227975c51c31e923c0e262b4fc07b44b73a | [
"MIT"
] | 3 | 2018-10-01T21:19:23.000Z | 2018-10-13T19:04:28.000Z | # Max Reed
# August 22, 2018
# A program designed for the UW iGEM Robots Subsubteam within the Math Subteam. It is meant to
# help quantify the amount of green fluorescence visible in an image. We have a very bright blue
# LED and a band pass filter that blocks blue light but lets through green light (and also red
# light I think). If you put the filter in front of a camera while shining the blue LED on bacterial
# samples, you can get pictures of the "green fluorescence" of your samples. Visually, it is
# possible to distinguish between pictures of "high fluorescence" samples and "low fluorescence"
# samples, but it is good to have a program to make the analysis quantitative.
from scipy import misc
# this python program should be put in the same directory as whatever images you want to analyze.
# you then enter the names of your images here.
fileNames = ["11","12","13","14","21","22","23","24"]
for name in fileNames:
currentImage = misc.imread(name + '.png') # the image gets read in as a 3D array.
# the 3rd dimension (the second embedded array) has the values:
# [red value, green value, blue value, 255]
# i don't know why 255 is added on at the end.
# this is a magical command that flattens the array so now it has the dimensions:
# (height in pixels * width in pixels)x(4)
rgb_flat_list = [item for sublist in currentImage for item in sublist]
# the rest of this is just finding the average rgb value for each image and outputting that.
# the average blue value is probably useless. the average green value might actually get too much
# bleed over from blue light, meaning the average red value actually gives the best idea of how
# much green fluorescence there is. this is just a guess though (supported by a single experiment
# that i performed on August 21st, 2018).
totIntensity = [0, 0, 0]
for j in range(len(rgb_flat_list)):
for k in range(3):
totIntensity[k] = totIntensity[k] + rgb_flat_list[j][k]
for k in range(3):
totIntensity[k] = totIntensity[k] / (1.0 * len(rgb_flat_list))
print "Average (r,g,b) for {} : ({}/255, {}/255, {}/255)".format(
name,
round(totIntensity[0], 1),
round(totIntensity[1], 1),
round(totIntensity[2], 1)
)
# It's pretty dumb to include this here but this is the output from my first experiment on August 21st:
# Average (r,g,b) for 11: (77.1/255, 224.9/255, 191.9/255)
# Average (r,g,b) for 12: (121.5/255, 232.5/255, 198.1/255)
# Average (r,g,b) for 13: (59.4/255, 216.8/255, 183.9/255)
# Average (r,g,b) for 14: (118.1/255, 233.4/255, 200.9/255)
# Average (r,g,b) for 21: (136.8/255, 240.7/255, 220.0/255)
# Average (r,g,b) for 22: (114.3/255, 227.6/255, 200.5/255)
# Average (r,g,b) for 23: (61.0/255, 207.2/255, 178.7/255)
# Average (r,g,b) for 24: (66.6/255, 209.0/255, 179.7/255)
# 12, 14, 21, and 22 were the high fluorescence samples. The major flaw in this experiment was that I didn't normalize
# for optical density, though all samples should've had an OD of about 1.
| 53.551724 | 118 | 0.68255 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,408 | 0.775274 |
64aa32eb7118f5a70eea293de71708246ff3bf6d | 405 | py | Python | src/domain/image.py | wlsouza/py-video-maker | 7e39b884db8f6159489157e077705a68433a48c9 | [
"MIT"
] | 3 | 2020-02-16T18:28:29.000Z | 2020-07-16T22:53:08.000Z | src/domain/image.py | wlsouza/py-video-maker | 7e39b884db8f6159489157e077705a68433a48c9 | [
"MIT"
] | 10 | 2020-02-16T01:26:39.000Z | 2022-03-12T00:58:39.000Z | src/domain/image.py | wlsouza/py-video-maker | 7e39b884db8f6159489157e077705a68433a48c9 | [
"MIT"
] | null | null | null | # !usr/bin/python
# -*- coding: UTF-8 -*-
class Image:
def __init__(self, name, path, url):
self.name = name
self.path = path
self.url = url
@property
def full_path(self):
return self.path + self.name
def __str__(self):
return self.name
def __repr__(self):
return f' {{ Name: {self.name}, \nPath: {self.path}, \nURL:{self.url} }}'
| 19.285714 | 81 | 0.553086 | 360 | 0.888889 | 0 | 0 | 71 | 0.175309 | 0 | 0 | 106 | 0.261728 |
64aa9e0fb2abbc8acf4ca4d645c0e94f3277a488 | 152 | py | Python | GreyMatter/open_firefox.py | nayangupta824/Melissa-Web | cd669a60bf5642145904b6e7e2c4f3de2d4874c1 | [
"MIT"
] | 20 | 2015-12-09T13:14:25.000Z | 2020-05-14T05:08:31.000Z | Using_Internet_to_Gather_Information/Using_Internet_to_Gather_Information/Melissa-Core/GreyMatter/open_firefox.py | Abhidalakoti/Project1 | e7b6bae3cb96f543d04d33cdb5015b2698af283e | [
"MIT"
] | 1 | 2021-03-26T00:28:00.000Z | 2021-03-26T00:28:00.000Z | Using_Internet_to_Gather_Information/Using_Internet_to_Gather_Information/Melissa-Core/GreyMatter/open_firefox.py | Abhidalakoti/Project1 | e7b6bae3cb96f543d04d33cdb5015b2698af283e | [
"MIT"
] | 24 | 2015-12-16T13:15:32.000Z | 2021-02-21T17:29:44.000Z | from selenium import webdriver
from SenseCells.tts import tts
def open_firefox():
tts('Aye aye captain, opening Firefox')
webdriver.Firefox()
| 19 | 43 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 34 | 0.223684 |
64abd26bdea2dd2cb38f2c7f7fbbe13c0579ff77 | 126 | py | Python | gym_quoridor/__init__.py | Xe-Xo/GymQuoridor | ab65c9f698fa2ceaf4728a241fcf181b09dbfd9d | [
"MIT"
] | 1 | 2020-06-27T08:19:23.000Z | 2020-06-27T08:19:23.000Z | gym_quoridor/__init__.py | Xe-Xo/GymQuoridor | ab65c9f698fa2ceaf4728a241fcf181b09dbfd9d | [
"MIT"
] | null | null | null | gym_quoridor/__init__.py | Xe-Xo/GymQuoridor | ab65c9f698fa2ceaf4728a241fcf181b09dbfd9d | [
"MIT"
] | null | null | null | from gym.envs.registration import register
register(
id='quoridor-v0',
entry_point='gym_quoridor.envs:QuoridorEnv',
) | 21 | 48 | 0.753968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.349206 |
64acc512b5825b962fd5ff32c2724cf9fe2368b1 | 482 | py | Python | Python3/OTUS/lesson04/lesson04-6.py | neon1ks/Study | 5d40171cf3bf5e8d3a95539e91f5afec54d1daf3 | [
"MIT"
] | null | null | null | Python3/OTUS/lesson04/lesson04-6.py | neon1ks/Study | 5d40171cf3bf5e8d3a95539e91f5afec54d1daf3 | [
"MIT"
] | null | null | null | Python3/OTUS/lesson04/lesson04-6.py | neon1ks/Study | 5d40171cf3bf5e8d3a95539e91f5afec54d1daf3 | [
"MIT"
] | null | null | null | from http.server import BaseHTTPRequestHandler, HTTPServer
class MyHandler(BaseHTTPRequestHandler):
def do_GET(self):
print('In GET request')
self.send_response(200)
self.send_header('Content-type', 'text-html')
self.end_headers()
self.wfile.write('<h1>Hello World!</h1>'.encode())
httpd = HTTPServer(('localhost', 8082), MyHandler)
print('Starting...')
try:
httpd.serve_forever()
except KeyboardInterrupt:
print('Bye!')
| 20.083333 | 58 | 0.670124 | 269 | 0.558091 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.195021 |
64ada6a04d190562d8e72db15ae3de0750f25de1 | 920 | py | Python | pacote-download/ex111/utilidadescev/moeda/__init__.py | nkonai/Curso-em-video-Python | c05a60b3daa7d448e1e7f0d4d23f62df5d2c8df2 | [
"MIT"
] | null | null | null | pacote-download/ex111/utilidadescev/moeda/__init__.py | nkonai/Curso-em-video-Python | c05a60b3daa7d448e1e7f0d4d23f62df5d2c8df2 | [
"MIT"
] | null | null | null | pacote-download/ex111/utilidadescev/moeda/__init__.py | nkonai/Curso-em-video-Python | c05a60b3daa7d448e1e7f0d4d23f62df5d2c8df2 | [
"MIT"
] | null | null | null | def metade(preco,sit):
if sit==True:
return (f'R${preco/2}')
else:
return preco/2
def dobro(preco,sit):
if sit==True:
return (f'R${preco * 2}')
else:
return preco * 2
def aumentar(preco,r,sit):
if sit==True:
return (f'R${preco * (100 + r)/100}')
else:
return preco * (100 + r) / 100
def diminuir(preco,r,sit):
if sit==True:
return(f'R${preco * (100 - r)/100}')
else:
return preco * (100-r)/100
def moeda(preco):
return (f'R${preco}')
def resumo(p=0, r=10, q=5):
print('-'*30)
print('RESUMO DO VALOR'.center(30))
print('-' * 30)
print(f'Preco analisado: \t{moeda(p):>10}')
print(f'Dobro do preco: \t{dobro(p, True):>10}')
print(f'Metade do preco: \t{metade(p, True):>10}')
print(f'{r}% de aumento: \t{aumentar(p, r, True):>10}')
print(f'{q}% de reducao: \t{diminuir(p, q, True):>10}') | 25.555556 | 59 | 0.536957 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 337 | 0.366304 |
64adfd451b09f6dc065a668f9264ad3f97a22f44 | 397 | py | Python | test/unit/tools/filter_modules/filtermod.py | ramezrawas/galaxy-1 | c03748dd49c060a68d07bce56eae33e0ba154414 | [
"CC-BY-3.0"
] | 6 | 2018-11-03T22:43:35.000Z | 2022-02-15T17:51:33.000Z | test/unit/tools/filter_modules/filtermod.py | igorhollaender/OBSOLETE_sirv_dashboard | 85aec60b80ef6f561d89398e3da5963d3d0f2aa4 | [
"CC-BY-3.0"
] | 7 | 2016-12-07T22:19:37.000Z | 2019-01-30T15:04:26.000Z | test/unit/tools/filter_modules/filtermod.py | igorhollaender/OBSOLETE_sirv_dashboard | 85aec60b80ef6f561d89398e3da5963d3d0f2aa4 | [
"CC-BY-3.0"
] | 10 | 2017-04-10T21:40:22.000Z | 2022-02-21T16:50:10.000Z | """ Test filters used by test_toolbox_filters.py.
"""
def filter_tool( context, tool ):
"""Test Filter Tool"""
return False
def filter_section( context, section ):
"""Test Filter Section"""
return False
def filter_label_1( context, label ):
"""Test Filter Label 1"""
return False
def filter_label_2( context, label ):
"""Test Filter Label 2"""
return False
| 17.26087 | 49 | 0.654912 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.377834 |
64ae05324f3dae6df96c35bcbece7b74feb328fc | 120 | py | Python | countd.py | sushmitajaiswal/PythonPrograms | d4fb1b36953185e2f8dd866798ca6965a52563a9 | [
"MIT"
] | null | null | null | countd.py | sushmitajaiswal/PythonPrograms | d4fb1b36953185e2f8dd866798ca6965a52563a9 | [
"MIT"
] | null | null | null | countd.py | sushmitajaiswal/PythonPrograms | d4fb1b36953185e2f8dd866798ca6965a52563a9 | [
"MIT"
] | null | null | null | word=input("enter any word:")
d={}
for x in word:
d[x]=d.get(x,0)+1
for k,v in d.items():
print(k,"occured",v,"times") | 20 | 29 | 0.608333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.275 |
64ae3d6f62b7608b160f2bb48636489376bec131 | 4,004 | py | Python | body/test/test_hello_utils.py | soumith/stretch_body | 998cec78fae67090060d4474c3029f9db45cde85 | [
"RSA-MD"
] | null | null | null | body/test/test_hello_utils.py | soumith/stretch_body | 998cec78fae67090060d4474c3029f9db45cde85 | [
"RSA-MD"
] | 1 | 2021-08-29T21:42:17.000Z | 2021-08-30T05:58:15.000Z | body/test/test_hello_utils.py | soumith/stretch_body | 998cec78fae67090060d4474c3029f9db45cde85 | [
"RSA-MD"
] | 3 | 2021-08-20T22:51:57.000Z | 2021-09-02T17:05:25.000Z | import unittest
import stretch_body.hello_utils
import time
import warnings
class TestHelloUtils(unittest.TestCase):
def test_yaml_file_released(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# read yaml, generating a ResourceWarning if the file is not released
yaml = stretch_body.hello_utils.read_fleet_yaml('stretch_re1_user_params.yaml')
self.assertTrue(len(w) == 0)
def test_reading_invalid_yaml(self):
"""Verify that read_fleet_yaml returns empty dict on invalid file.
"""
read_params = stretch_body.hello_utils.read_fleet_yaml('invalid_file123.yaml')
self.assertEqual(read_params, {})
read_params1 = stretch_body.hello_utils.read_fleet_yaml('')
self.assertEqual(read_params1, {})
def test_overwriting_params(self):
"""Test the behavior of the overwrite_dict method.
"""
dee1 = {'param1': 1}
der1 = {'param2': 2}
stretch_body.hello_utils.overwrite_dict(dee1, der1)
self.assertEqual(dee1, {'param1': 1, 'param2': 2})
dee2 = {'param1': 'to_override'}
der2 = {'param1': 'over'}
stretch_body.hello_utils.overwrite_dict(dee2, der2)
self.assertEqual(dee2, {'param1': 'over'})
dee3 = {'param1': {'motion': 'to_override', 'no_change': 1}}
der3 = {'param1': {'motion': 'over'}}
stretch_body.hello_utils.overwrite_dict(dee3, der3)
self.assertEqual(dee3, {'param1': {'motion': 'over', 'no_change': 1}})
dee4 = {'param1': {'motion': 'same', 'no_change': 1}}
der4 = {'param1': {'motion': {}}}
stretch_body.hello_utils.overwrite_dict(dee4, der4)
self.assertEqual(dee4, {'param1': {'motion': 'same', 'no_change': 1}})
dee5 = {'param1': {'motion': {}, 'no_change': 1}}
der5 = {'param1': {'motion': 2}}
stretch_body.hello_utils.overwrite_dict(dee5, der5)
self.assertEqual(dee5, {'param1': {'motion': {}, 'no_change': 1}})
def test_overwriting_vs_updating_params(self):
"""Verify the difference between overwrite_dict and updating a dict.
"""
overider1 = {"robot": {"motion": {"max": 100}}}
overidee1 = {"robot": {"motion": {"min": -100}}}
stretch_body.hello_utils.overwrite_dict(overidee1, overider1)
self.assertEqual(overidee1, {"robot": {"motion": {"max": 100, "min": -100}}})
overider2 = {"robot": {"motion": {"max": 100}}}
overidee2 = {"robot": {"motion": {"min": -100}}}
overidee2.update(overider2)
self.assertNotEqual(overidee1, overidee2)
def test_pretty_print_dict(self):
dict1 = {"param1": 1, "param2": 2}
stretch_body.hello_utils.pretty_print_dict("params", dict1)
dict2 = {"robot": {"motion": {"max": 100, "min": -100}, "retry": True}}
stretch_body.hello_utils.pretty_print_dict("Stretch", dict2)
def test_create_time_string(self):
"""Verify time strings match
"""
t = time.localtime()
expected_time_string = str(t.tm_year) + str(t.tm_mon).zfill(2) + str(t.tm_mday).zfill(2) + str(t.tm_hour).zfill(2) + str(t.tm_min).zfill(2) + str(t.tm_sec).zfill(2)
actual_time_string = stretch_body.hello_utils.create_time_string()
self.assertEqual(expected_time_string, actual_time_string)
def test_get_stretch_directory(self):
"""
"""
import os
if os.environ.get('HELLO_FLEET_PATH', None) is not None:
self.assertNotEqual(stretch_body.hello_utils.get_stretch_directory(), "/tmp/")
original_fleet_path = os.environ['HELLO_FLEET_PATH']
del os.environ['HELLO_FLEET_PATH']
self.assertEqual(stretch_body.hello_utils.get_stretch_directory(), "/tmp/")
os.environ['HELLO_FLEET_PATH'] = original_fleet_path
else:
self.assertEqual(stretch_body.hello_utils.get_stretch_directory(), "/tmp/")
| 43.053763 | 172 | 0.630619 | 3,924 | 0.98002 | 0 | 0 | 0 | 0 | 0 | 0 | 997 | 0.249001 |
64af3bbda0149dc749dbd37a20c934c3da8f8100 | 24,483 | py | Python | 2019/016_cielab_color_space/check_boundary_characteristic.py | toru-ver4/sample_code | 9165b4cb07a3cb1b3b5a7f6b3a329be081bddabe | [
"BSD-3-Clause"
] | 19 | 2019-11-12T23:34:35.000Z | 2022-03-08T13:21:03.000Z | 2019/016_cielab_color_space/check_boundary_characteristic.py | colour-science/sample_code | 8bda35b674d770da5a0e6c210634a77691527fce | [
"BSD-3-Clause"
] | 101 | 2019-08-12T01:20:13.000Z | 2022-03-18T12:17:01.000Z | 2019/016_cielab_color_space/check_boundary_characteristic.py | colour-science/sample_code | 8bda35b674d770da5a0e6c210634a77691527fce | [
"BSD-3-Clause"
] | 3 | 2020-06-08T09:48:08.000Z | 2022-03-09T15:35:51.000Z | # -*- coding: utf-8 -*-
"""
XYZ空間やL*a*b*空間の境界線の確認
======================================
XYZ空間やL*a*b*空間の境界線を確認する
"""
# import standard libraries
import os
import ctypes
# import third-party libraries
import numpy as np
import matplotlib.pyplot as plt
from colour import xyY_to_XYZ, XYZ_to_RGB, read_image, write_image, LUT3D,\
RGB_to_XYZ, XYZ_to_xyY
from colour.models import BT709_COLOURSPACE
from sympy import symbols, solve
from multiprocessing import Pool, cpu_count, Array
from mpl_toolkits.mplot3d import Axes3D
import test_pattern_generator2 as tpg
import cv2
# import matplotlib as mpl
# mpl.use('Agg')
# import my libraries
import plot_utility as pu
import color_space as cs
import test_pattern_generator2 as tpg
# information
__author__ = 'Toru Yoshihara'
__copyright__ = 'Copyright (C) 2019 - Toru Yoshihara'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Toru Yoshihara'
__email__ = 'toru.ver.11 at-sign gmail.com'
__all__ = []
# global variables
y_sample = 64
h_sample = 1024
shared_array = Array(
typecode_or_type=ctypes.c_float,
size_or_initializer=y_sample*h_sample)
def calc_xyY(large_y, hue):
sample_num = 1024
c = np.linspace(-0.2, 1.0, sample_num)
x = c * np.cos(hue) + cs.D65[0]
y = c * np.sin(hue) + cs.D65[1]
large_y2 = np.ones(sample_num) * large_y
xyY = np.dstack((x, y, large_y2))
return xyY, x, y
def plot_rgb_around_large_xyz_boundary(rgb, x, y, large_y, hue):
title_str =\
"Y={:.02f}, Angle={:.01f}°".format(large_y, hue * 360 / (2 * np.pi))
ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(10, 8),
graph_title=title_str,
graph_title_size=None,
xlabel="x",
ylabel="RGB Value",
axis_label_size=None,
legend_size=17,
xlim=None,
ylim=(-0.1, 1.1),
xtick=[0.0, 0.185, 0.3, 0.467, 0.6],
ytick=None,
xtick_size=None, ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None)
ax1.plot(x, rgb[..., 0].flatten(), '-r', label="Red")
ax1.plot(x, rgb[..., 1].flatten(), '-g', label="Green")
ax1.plot(x, rgb[..., 2].flatten(), '-b', label="Blue")
plt.legend(loc='upper right')
fname = "./blog_img/" + title_str + ".png"
fname = fname.replace("=", "_")
fname = fname.replace("°", "")
plt.savefig(fname, bbox_inches='tight', pad_inches=0.1)
plt.show()
def plot_line_with_chromaticity_diagram(
x, y, large_y, hue,
rate=480/755.0*2, xmin=0.0, xmax=0.8, ymin=0.0, ymax=0.9):
"""
直線とxy色度図をプロットしてみる
"""
title_str =\
"Y={:.02f}, Angle={:.01f}°".format(large_y, hue * 360 / (2 * np.pi))
# プロット用データ準備
# ---------------------------------
xy_image = tpg.get_chromaticity_image(
xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax)
cmf_xy = tpg._get_cmfs_xy()
bt709_gamut, _ = tpg.get_primaries(name=cs.BT709)
xlim = (min(0, xmin), max(0.8, xmax))
ylim = (min(0, ymin), max(0.9, ymax))
ax1 = pu.plot_1_graph(fontsize=20 * rate,
figsize=((xmax - xmin) * 10 * rate,
(ymax - ymin) * 10 * rate),
graph_title="CIE1931 Chromaticity Diagram",
graph_title_size=None,
xlabel='x', ylabel='y',
axis_label_size=None,
legend_size=18 * rate,
xlim=xlim, ylim=ylim,
xtick=[x * 0.1 + xmin for x in
range(int((xlim[1] - xlim[0])/0.1) + 1)],
ytick=[x * 0.1 + ymin for x in
range(int((ylim[1] - ylim[0])/0.1) + 1)],
xtick_size=17 * rate,
ytick_size=17 * rate,
linewidth=4 * rate,
minor_xtick_num=2,
minor_ytick_num=2)
ax1.plot(cmf_xy[..., 0], cmf_xy[..., 1], '-k', lw=3.5*rate, label=None)
ax1.plot((cmf_xy[-1, 0], cmf_xy[0, 0]), (cmf_xy[-1, 1], cmf_xy[0, 1]),
'-k', lw=3.5*rate, label=None)
ax1.plot(bt709_gamut[:, 0], bt709_gamut[:, 1],
c=tpg.UNIVERSAL_COLOR_LIST[2], label="BT.709", lw=2.75*rate)
ax1.plot(x, y, 'k-', label="Line", lw=2.75*rate)
ax1.imshow(xy_image, extent=(xmin, xmax, ymin, ymax))
plt.legend(loc='upper right')
fname = "./blog_img/chroma_" + title_str + ".png"
fname = fname.replace("=", "_")
fname = fname.replace("°", "")
plt.savefig(fname, bbox_inches='tight', pad_inches=0.1)
plt.show()
def plot_chromaticity_diagram(
rate=480/755.0*2, xmin=0.0, xmax=0.8, ymin=0.0, ymax=0.9):
"""
直線とxy色度図をプロットしてみる
"""
title_str = "Chromaticity Diagram"
# プロット用データ準備
# ---------------------------------
xy_image = tpg.get_chromaticity_image(
xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax)
cmf_xy = tpg._get_cmfs_xy()
bt709_gamut, _ = tpg.get_primaries(name=cs.BT709)
xlim = (min(0, xmin), max(0.8, xmax))
ylim = (min(0, ymin), max(0.9, ymax))
ax1 = pu.plot_1_graph(fontsize=20 * rate,
figsize=((xmax - xmin) * 10 * rate,
(ymax - ymin) * 10 * rate),
graph_title="CIE1931 Chromaticity Diagram",
graph_title_size=None,
xlabel='x', ylabel='y',
axis_label_size=None,
legend_size=18 * rate,
xlim=xlim, ylim=ylim,
xtick=[x * 0.1 + xmin for x in
range(int((xlim[1] - xlim[0])/0.1) + 1)],
ytick=[x * 0.1 + ymin for x in
range(int((ylim[1] - ylim[0])/0.1) + 1)],
xtick_size=17 * rate,
ytick_size=17 * rate,
linewidth=4 * rate,
minor_xtick_num=2,
minor_ytick_num=2)
ax1.plot(cmf_xy[..., 0], cmf_xy[..., 1], '-k', lw=3.5*rate, label=None)
ax1.plot((cmf_xy[-1, 0], cmf_xy[0, 0]), (cmf_xy[-1, 1], cmf_xy[0, 1]),
'-k', lw=3.5*rate, label=None)
ax1.plot(bt709_gamut[:, 0], bt709_gamut[:, 1],
c=tpg.UNIVERSAL_COLOR_LIST[2], label="BT.709", lw=2.75*rate)
ax1.plot([0.35], [0.2], 'o', c="#000000", ms=12, label="A")
ax1.plot([0.45], [0.2], 'o', c="#808080", ms=12, label="B")
arrowprops = dict(
facecolor='#A0A0A0', shrink=0.0, headwidth=8, headlength=10,
width=2)
ax1.annotate("A", xy=[0.35, 0.2], xytext=[0.45, 0.3], xycoords='data',
textcoords='data', ha='left', va='bottom',
arrowprops=arrowprops)
ax1.annotate("B", xy=[0.45, 0.2], xytext=[0.55, 0.3], xycoords='data',
textcoords='data', ha='left', va='bottom',
arrowprops=arrowprops)
ax1.imshow(xy_image, extent=(xmin, xmax, ymin, ymax))
plt.legend(loc='upper right')
fname = "./blog_img/" + title_str + ".png"
plt.savefig(fname, bbox_inches='tight', pad_inches=0.1)
plt.show()
def apply_matrix(src, mtx):
"""
src: [3]
mtx: [3][3]
"""
a = src[0] * mtx[0][0] + src[1] * mtx[0][1] + src[2] * mtx[0][2]
b = src[0] * mtx[1][0] + src[1] * mtx[1][1] + src[2] * mtx[1][2]
c = src[0] * mtx[2][0] + src[1] * mtx[2][1] + src[2] * mtx[2][2]
return a, b, c
def xyY_to_XYZ_local(x, y, large_y):
"""
# 概要
xyYからXYZを計算する
# 入力データ
numpy形式。shape = (1, N, 3)
"""
z = 1 - x - y
yx = y / x
yz = y / z
large_x = large_y / yx
large_z = large_y / yz
return large_x, large_y, large_z
def get_rgb_formula(large_y, hue):
c = symbols('c', real=True)
x = c * np.cos(hue)
y = c * np.sin(hue)
large_x, large_y, large_z = xyY_to_XYZ_local(x, y, large_y)
mtx = BT709_COLOURSPACE.XYZ_to_RGB_matrix
r, g, b = apply_matrix([large_x, large_y, large_z], mtx)
result = []
result.extend(solve(r + 0))
result.extend(solve(g + 0))
result.extend(solve(b + 0))
result.extend(solve(r - 1))
result.extend(solve(g - 1))
result.extend(solve(b - 1))
result = np.array(result)
print(result[result > 0] * np.cos(hue))
def calc_point_ab_rgb():
ab = np.array([[[0.35, 0.2, 0.05], [0.45, 0.2, 0.05]]])
rgb = XYZ_to_RGB(
xyY_to_XYZ(ab), cs.D65, cs.D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
print(rgb)
def calc_point_xyY_rgb():
D65 = [0.3127, 0.329]
a = np.array([0.35, 0.2, 0.05])
b = np.array([0.45, 0.2, 0.05])
a_rgb = XYZ_to_RGB(
xyY_to_XYZ(a), D65, D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
b_rgb = XYZ_to_RGB(
xyY_to_XYZ(b), D65, D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
print("Point A = {:.3f}, {:.3f}, {:.3f}".format(a_rgb[0], a_rgb[1], a_rgb[2]))
print("Point B = {:.3f}, {:.3f}, {:.3f}".format(b_rgb[0], b_rgb[1], b_rgb[2]))
def check_large_xyz_boundary(large_y, hue):
"""
XYZ空間の境界線でのRGB値の変化を確認する
"""
xyY, x, y = calc_xyY(large_y=large_y, hue=hue)
rgb = XYZ_to_RGB(
xyY_to_XYZ(xyY), cs.D65, cs.D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
plot_rgb_around_large_xyz_boundary(rgb, x, y, large_y=large_y, hue=hue)
plot_line_with_chromaticity_diagram(x, y, large_y, hue)
get_rgb_formula(large_y, hue)
def solve_chroma(large_y, hue):
"""
large_y, hue から xyY を計算し、
更に XYZ to RGB 変換して RGB値の境界の Chroma を出す。
"""
c = symbols('c', real=True)
x = c * np.cos(hue) + cs.D65[0]
y = c * np.sin(hue) + cs.D65[1]
large_xyz = xyY_to_XYZ_local(x, y, large_y)
mtx = BT709_COLOURSPACE.XYZ_to_RGB_matrix
r, g, b = apply_matrix([large_xyz[0], large_xyz[1], large_xyz[2]], mtx)
chroma = []
chroma.extend(solve(r + 0))
chroma.extend(solve(g + 0))
chroma.extend(solve(b + 0))
chroma.extend(solve(r - 1))
chroma.extend(solve(g - 1))
chroma.extend(solve(b - 1))
chroma = np.array(chroma)
if chroma != []:
chroma = np.min(chroma[chroma >= 0])
else:
chroma = 0.0
# result_x = chroma * np.cos(hue) + cs.D65[0]
# result_y = chroma * np.sin(hue) + cs.D65[1]
return chroma
def solve_chroma_thread(args):
chroma = solve_chroma(args[1], args[2])
print("y_idx={}, h_idx={}".format(args[1], args[2]))
shared_array[args[0]] = chroma
def calc_all_chroma():
large_y = np.linspace(0.0, 1.0, y_sample)
hue = np.linspace(0.0, 2*np.pi, h_sample)
for y_idx, y in enumerate(large_y):
with Pool(cpu_count()) as pool:
args = []
for h_idx, h in enumerate(hue):
idx = h_sample * y_idx + h_idx
args.append([idx, y, h])
pool.map(solve_chroma_thread, args)
chroma = np.array(shared_array[:]).reshape((y_sample, h_sample))
return chroma
def plot_xy_plane(y_idx, chroma):
graph_name = "./xy_plane_seq/no_{:04d}.png".format(y_idx)
rad = np.linspace(0, 2 * np.pi, h_sample)
large_y = 1.0 * y_idx / (y_sample - 1)
if y_idx < (y_sample - 1):
x = chroma * np.cos(rad) + cs.D65[0]
y = chroma * np.sin(rad) + cs.D65[1]
else:
x = np.ones_like(chroma) * cs.D65[0]
y = np.ones_like(chroma) * cs.D65[1]
ly = np.ones_like(y) * large_y
large_xyz = xyY_to_XYZ(np.dstack((x, y, ly)))
rgb = XYZ_to_RGB(
large_xyz, cs.D65, cs.D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
rgb = rgb.reshape((rgb.shape[1], rgb.shape[2]))
rgb = np.clip(rgb, 0.0, 1.0) ** (1/2.4)
cmf_xy = tpg._get_cmfs_xy()
ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(8, 9),
graph_title="Y={:.03f} xy plane".format(large_y),
graph_title_size=None,
xlabel="x", ylabel="y",
axis_label_size=None,
legend_size=17,
xlim=(0.0, 0.8),
ylim=(0.0, 0.9),
xtick=[x * 0.1 for x in range(9)],
ytick=[x * 0.1 for x in range(10)],
xtick_size=None, ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None)
ax1.patch.set_facecolor("#B0B0B0")
ax1.plot(cmf_xy[..., 0], cmf_xy[..., 1], '-k', lw=3, label=None)
ax1.plot((cmf_xy[-1, 0], cmf_xy[0, 0]), (cmf_xy[-1, 1], cmf_xy[0, 1]),
'-k', lw=3, label=None)
# ax1.plot(x, y, 'k-')
ax1.scatter(x, y, c=rgb)
# plt.legend(loc='upper left')
plt.savefig(graph_name, bbox_inches='tight', pad_inches=0.1)
print("plot y_idx={}".format(y_idx))
# plt.show()
def plot_xy_plane_thread(args):
plot_xy_plane(*args)
def plot_xy_pane_all(chroma):
args = []
with Pool(cpu_count()) as pool:
for y_idx in range(y_sample):
args.append([y_idx, chroma[y_idx]])
# plot_xy_plane(y_idx, chroma[y_idx])
pool.map(plot_xy_plane_thread, args)
def plot_xyY_plane_seq(chroma):
args = []
with Pool(cpu_count()) as pool:
for y_idx in range(y_sample):
args.append([y_idx, chroma])
# plot_xy_plane(y_idx, chroma[y_idx])
pool.map(plot_xyY_plane_seq_thread, args)
def plot_xyY_plane_seq_thread(args):
plot_xyY_color_volume_seq(*args)
def plot_xyY_color_volume(chroma):
rad = np.linspace(0, 2 * np.pi, h_sample)
large_y = np.linspace(0, 1, y_sample).reshape((y_sample, 1))
chroma[-1, :] = 0.0
x = chroma * np.cos(rad) + cs.D65[0]
y = chroma * np.sin(rad) + cs.D65[1]
ly = np.ones_like(y) * large_y
large_xyz = xyY_to_XYZ(np.dstack((x, y, ly)))
rgb = XYZ_to_RGB(
large_xyz, cs.D65, cs.D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
rgb = np.clip(rgb, 0.0, 1.0) ** (1/2.4)
rgb = rgb.reshape((y_sample * h_sample, 3))
fig = plt.figure()
ax = Axes3D(fig)
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("Y")
ax.set_xlim(0.0, 0.8)
ax.set_ylim(0.0, 0.9)
ax.view_init(elev=20, azim=-120)
ax.plot(x.flatten(), y.flatten(), ly.flatten())
plt.savefig("./blog_img/xyY_Color_Volume_wire.png",
bbox_inches='tight', pad_inches=0.1)
ax.scatter(x.flatten(), y.flatten(), ly.flatten(),
marker='o', linestyle='-', c=rgb)
plt.savefig("./blog_img/xyY_Color_Volume_color.png",
bbox_inches='tight', pad_inches=0.1)
def plot_xyY_color_volume_seq(y_idx, chroma):
graph_name_0 = "./xy_plane_seq/no_{:04d}.png".format(y_idx)
graph_name_1 = "./xyY_color_volume_seq/no_{:04d}.png".format(y_idx)
rad = np.linspace(0, 2 * np.pi, h_sample)
large_y = np.linspace(0, 1, y_sample).reshape((y_sample, 1))
large_y = large_y[:y_idx+1]
chroma[-1, :] = 0.0
x = chroma[:y_idx+1] * np.cos(rad) + cs.D65[0]
y = chroma[:y_idx+1] * np.sin(rad) + cs.D65[1]
ly = np.ones_like(y) * large_y
large_xyz = xyY_to_XYZ(np.dstack((x, y, ly)))
print(large_xyz.shape)
rgb = XYZ_to_RGB(
large_xyz, cs.D65, cs.D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
rgb = np.clip(rgb, 0.0, 1.0) ** (1/2.4)
rgb = rgb.reshape(((y_idx + 1) * h_sample, 3))
fig = plt.figure(figsize=(9, 9))
ax = Axes3D(fig)
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("Y")
ax.set_title("Y={:.03f} xyY Color Volume".format(large_y[-1, 0]),
fontsize=18)
ax.set_xlim(0.0, 0.8)
ax.set_ylim(0.0, 0.9)
ax.set_zlim(0.0, 1.1)
ax.view_init(elev=20, azim=-120)
ax.scatter(x.flatten(), y.flatten(), ly.flatten(),
marker='o', c=rgb, zorder=1)
plt.savefig(graph_name_1, bbox_inches='tight', pad_inches=0.1)
resize_and_hstack(graph_name_0, graph_name_1)
def resize_and_hstack(fname1, fname2):
print(fname1, fname2)
img_0 = read_image(fname1)
img_1 = read_image(fname2)
if img_0.shape[0] > img_1.shape[0]:
static_img = img_0
resize_img = img_1
else:
static_img = img_1
resize_img = img_0
rate = static_img.shape[0] / resize_img.shape[0]
dst_size = (int(resize_img.shape[1] * rate),
int(resize_img.shape[0] * rate))
resize_img = cv2.resize(resize_img, dst_size)
img = np.hstack((resize_img, static_img))
write_image(img, fname2)
def make_rgb_figure(y_idx=y_sample//2):
large_y = np.linspace(0, 1, y_sample)[y_idx]
hue_array = np.linspace(0.0, 2*np.pi, h_sample)
chroma = np.load("cie1931_chroma.npy")
args = []
for h_idx, hue in enumerate(hue_array):
xyY, x, y = calc_xyY(large_y=large_y, hue=hue)
rgb = XYZ_to_RGB(xyY_to_XYZ(xyY), cs.D65, cs.D65,
BT709_COLOURSPACE.XYZ_to_RGB_matrix)
args.append([rgb, large_y, hue, chroma[y_idx][h_idx], h_idx])
# plot_rgb_formula(rgb, large_y=large_y, hue=hue)
with Pool(cpu_count()) as pool:
pool.map(plot_rgb_formula_thread, args)
def plot_rgb_formula_thread(args):
plot_rgb_formula(*args)
def plot_rgb_formula(rgb, large_y, hue, chroma, h_idx):
sample_num = 1024
c = np.linspace(-0.2, 1.0, sample_num)
title_str =\
"Y={:.02f}, H={:.01f}°".format(large_y, hue * 360 / (2 * np.pi))
ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(8, 8),
graph_title=title_str,
graph_title_size=None,
xlabel="C",
ylabel="RGB Value",
axis_label_size=None,
legend_size=17,
xlim=(-0.2, 0.8),
ylim=(-0.2, 1.2),
xtick=None,
ytick=None,
xtick_size=None, ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None)
chroma_x = np.array([chroma, chroma])
chroma_y = np.array([-0.2, 1.2])
ax1.plot(c, rgb[..., 0].flatten(), '-r', label="Red")
ax1.plot(c, rgb[..., 1].flatten(), '-g', label="Green")
ax1.plot(c, rgb[..., 2].flatten(), '-b', label="Blue")
ax1.plot(chroma_x, chroma_y, '--k', label='gamut boundary')
plt.legend(loc='upper right')
fname_str = "./formula_seq/Y_{:.02f}_Angle_{:04d}.png"
fname = fname_str.format(large_y, h_idx)
plt.savefig(fname, bbox_inches='tight', pad_inches=0.1)
# plt.show()
def plot_xy_plane_little_by_little_seq(y_idx=y_sample//2):
hue_array = np.linspace(0.0, 2*np.pi, h_sample)
chroma = np.load("cie1931_chroma.npy")
args = []
for h_idx, hue in enumerate(hue_array):
args.append([y_idx, chroma[y_idx], h_idx])
# plot_xy_plane_little_by_little(y_idx, chroma[y_idx], h_idx)
with Pool(cpu_count()) as pool:
pool.map(plot_xy_plane_little_by_little_thread, args)
def plot_xy_plane_little_by_little_thread(args):
plot_xy_plane_little_by_little(*args)
def plot_xy_plane_little_by_little(y_idx, chroma, h_idx):
graph_name = "./xy_plane_seq/no_{:04d}.png".format(h_idx)
hue = h_idx / (h_sample - 1) * 360
rad = np.linspace(0, 2 * np.pi, h_sample)
large_y = 1.0 * y_idx / (y_sample - 1)
if y_idx < (y_sample - 1):
x = chroma * np.cos(rad) + cs.D65[0]
y = chroma * np.sin(rad) + cs.D65[1]
else:
x = np.ones_like(chroma) * cs.D65[0]
y = np.ones_like(chroma) * cs.D65[1]
ly = np.ones_like(y) * large_y
large_xyz = xyY_to_XYZ(np.dstack((x, y, ly)))
rgb = XYZ_to_RGB(
large_xyz, cs.D65, cs.D65, BT709_COLOURSPACE.XYZ_to_RGB_matrix)
rgb = rgb.reshape((rgb.shape[1], rgb.shape[2]))
rgb = np.clip(rgb, 0.0, 1.0) ** (1/2.4)
cmf_xy = tpg._get_cmfs_xy()
x = x[:h_idx+1]
y = y[:h_idx+1]
rgb = rgb[:h_idx+1]
line_x = np.array([cs.D65[0], x[-1]])
line_y = np.array([cs.D65[1], y[-1]])
ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(8, 9),
graph_title="Y={:.03f}, H={:.01f}° xy plane".format(large_y, hue),
graph_title_size=None,
xlabel="x", ylabel="y",
axis_label_size=None,
legend_size=17,
xlim=(0.0, 0.8),
ylim=(0.0, 0.9),
xtick=[x * 0.1 for x in range(9)],
ytick=[x * 0.1 for x in range(10)],
xtick_size=None, ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None)
ax1.patch.set_facecolor("#D0D0D0")
ax1.plot(cmf_xy[..., 0], cmf_xy[..., 1], '-k', lw=3, label=None)
ax1.plot((cmf_xy[-1, 0], cmf_xy[0, 0]), (cmf_xy[-1, 1], cmf_xy[0, 1]),
'-k', lw=3, label=None)
# ax1.plot(x, y, 'k-')
ax1.plot(line_x, line_y, '-k', lw=2, zorder=1)
ax1.scatter(x, y, c=rgb, zorder=2)
# plt.legend(loc='upper left')
plt.savefig(graph_name, bbox_inches='tight', pad_inches=0.1)
print("plot y_idx={}".format(y_idx))
# plt.show()
def concat_graph():
fname0_dir = "./formula_seq"
fname1_dir = "./xy_plane_seq"
fname2_dir = "./xy_plane_seq_concat"
fname0_list = sorted(os.listdir(fname0_dir))
fname1_list = sorted(os.listdir(fname1_dir))
args = []
for fname0, fname1 in zip(fname0_list, fname1_list):
fname_0 = os.path.join(fname0_dir, fname0)
fname_1 = os.path.join(fname1_dir, fname1)
fname_2 = os.path.join(fname2_dir, fname0)
args.append([fname_0, fname_1, fname_2])
# resize_and_hstack(fname_0, fname_1)
with Pool(cpu_count()) as pool:
pool.map(concat_graph_thread, args)
def resize_and_hstack2(fname1, fname2, fname3):
print(fname1, fname2)
img_0 = read_image(fname1)
img_1 = read_image(fname2)
if img_0.shape[0] > img_1.shape[0]:
static_img = img_0
resize_img = img_1
else:
static_img = img_1
resize_img = img_0
rate = static_img.shape[0] / resize_img.shape[0]
dst_size = (int(resize_img.shape[1] * rate),
int(resize_img.shape[0] * rate))
resize_img = cv2.resize(resize_img, dst_size)
img = np.hstack((resize_img, static_img))
write_image(img, fname3)
def concat_graph_thread(args):
resize_and_hstack2(*args)
def verify_xy_gamut_boundary(sample=256):
idx_list = np.array([8, 16, 24, 32, 48, 56])
# idx_list = np.array([16])
delta_large_y = 0.001
y_list = idx_list / (y_sample - 1)
chroma = np.load("cie1931_chroma.npy")
rgb = LUT3D.linear_table(sample).reshape((1, sample ** 3, 3))
xyz = RGB_to_XYZ(rgb, cs.D65, cs.D65, BT709_COLOURSPACE.RGB_to_XYZ_matrix)
xyY = XYZ_to_xyY(xyz)
for idx, y in enumerate(y_list):
ok_idx = (y < xyY[:, :, 2]) & (xyY[:, :, 2] < (y + delta_large_y))
verify_xy_gamut_boundary_plot(
idx, y, xyY[ok_idx], rgb[ok_idx], chroma[idx_list[idx]])
def verify_xy_gamut_boundary_plot(y_idx, large_y, xyY, rgb, chroma):
rad = np.linspace(0, 2 * np.pi, h_sample)
x = chroma * np.cos(rad) + cs.D65[0]
y = chroma * np.sin(rad) + cs.D65[1]
cmf_xy = tpg._get_cmfs_xy()
fname = "./blog_img/verify_y_{:.03f}.png".format(large_y)
ax1 = pu.plot_1_graph(
fontsize=20,
figsize=(8, 9),
graph_title="Y={:.03f} xy plane".format(large_y),
graph_title_size=None,
xlabel="x", ylabel="y",
axis_label_size=None,
legend_size=17,
xlim=(0.0, 0.8),
ylim=(0.0, 0.9),
xtick=[x * 0.1 for x in range(9)],
ytick=[x * 0.1 for x in range(10)],
xtick_size=None, ytick_size=None,
linewidth=3,
minor_xtick_num=None,
minor_ytick_num=None)
ax1.patch.set_facecolor("#D0D0D0")
ax1.plot(cmf_xy[..., 0], cmf_xy[..., 1], '-k', lw=3, label=None)
ax1.plot(x, y, c='k', lw=3, label='Gamut Boundary', alpha=0.5)
ax1.plot((cmf_xy[-1, 0], cmf_xy[0, 0]), (cmf_xy[-1, 1], cmf_xy[0, 1]),
'-k', lw=3, label=None)
ax1.scatter(xyY[..., 0], xyY[..., 1], c=rgb, s=2)
plt.savefig(fname, bbox_inches='tight', pad_inches=0.1)
# plt.show()
def main_func():
# check_large_xyz_boundary(large_y=0.51, hue=90/360*2*np.pi)
# calc_point_ab_rgb()
# calc_point_xyY_rgb()
# solve_chroma(0.1, 45/360*2*np.pi)
# chroma = calc_all_chroma()
# np.save("cie1931_chroma.npy", chroma)
# chroma = np.load("cie1931_chroma.npy")
# plot_xy_pane_all(chroma)
# plot_xyY_plane_seq(chroma)
# plot_xyY_color_volume(chroma)
# make_rgb_figure(y_idx=y_sample//4)
# plot_xy_plane_little_by_little_seq(y_idx=y_sample//4)
# concat_graph()
verify_xy_gamut_boundary()
if __name__ == '__main__':
os.chdir(os.path.dirname(os.path.abspath(__file__)))
main_func()
| 33.492476 | 82 | 0.579668 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,408 | 0.13768 |
64afdabe7a49d85a55002e8219642eede03635c1 | 288 | py | Python | score.py | salisu14/python-tut | 4defc38924e064657daa46647e8979cde733f9f4 | [
"MIT"
] | 5 | 2021-04-16T01:29:21.000Z | 2021-12-24T10:03:54.000Z | score.py | salisu14/python-tut | 4defc38924e064657daa46647e8979cde733f9f4 | [
"MIT"
] | null | null | null | score.py | salisu14/python-tut | 4defc38924e064657daa46647e8979cde733f9f4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
average = 0
score = int(input('Enter first score: '))
score += int(input('Enter second score: '))
score += int(input('Enter third score: '))
average = score / 3.0
average = round(average, 2)
print(f'Total Score: {score}')
print(f'Average Score: {average}')
| 16.941176 | 43 | 0.65625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 136 | 0.472222 |
64b06d39035e35021652b98bd685aa742c7ae018 | 66 | py | Python | source/tweaks/__init__.py | mverleg/svsite | 5c9dbcacf81020cf0c1960e337bdd33113acd597 | [
"BSD-3-Clause"
] | null | null | null | source/tweaks/__init__.py | mverleg/svsite | 5c9dbcacf81020cf0c1960e337bdd33113acd597 | [
"BSD-3-Clause"
] | 142 | 2015-06-05T07:53:09.000Z | 2020-03-31T18:37:07.000Z | source/tweaks/__init__.py | mdilli/svsite | 5c9dbcacf81020cf0c1960e337bdd33113acd597 | [
"BSD-3-Clause"
] | null | null | null | """
Changes to the defaults of ``django``, ``djangocms``, etc.
""" | 22 | 58 | 0.606061 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 66 | 1 |
64b0e622a1042d009df7256126b96fb6332feade | 1,250 | py | Python | companies/forms.py | wsoliveira/borsocontrole | 61a4e2aac738a766b6919e30c08fc967fe96fb40 | [
"MIT"
] | null | null | null | companies/forms.py | wsoliveira/borsocontrole | 61a4e2aac738a766b6919e30c08fc967fe96fb40 | [
"MIT"
] | 12 | 2020-04-14T11:54:50.000Z | 2021-06-10T18:54:44.000Z | companies/forms.py | wsoliveira/borsocontrole | 61a4e2aac738a766b6919e30c08fc967fe96fb40 | [
"MIT"
] | null | null | null | from django.forms import ModelForm
from .models import bc_sector, bc_company, bc_company_code
from administrators.models import bc_admin_type_investiment
class SectorForm(ModelForm):
class Meta:
model = bc_sector
fields = ['name','description','is_active']
class CompanyForm(ModelForm):
class Meta:
model = bc_company
fields = ['name','identification','is_new_market','bc_sector']
def __init__(self, *args, **kwargs):
super(CompanyForm, self).__init__(*args, **kwargs)
self.fields['bc_sector'].label = 'Sector'
self.fields['bc_sector'].queryset = bc_sector.objects.filter(is_active=True)
class CompanyCodeForm(ModelForm):
class Meta:
model = bc_company_code
fields = ['name','bc_admin_type_investiment','bc_company']
def __init__(self, *args, **kwargs):
super(CompanyCodeForm, self).__init__(*args, **kwargs)
self.fields['bc_admin_type_investiment'].label = 'Type of Investiment'
self.fields['bc_admin_type_investiment'].queryset = bc_admin_type_investiment.objects.filter(is_active=True)
self.fields['bc_company'].label = 'Company'
self.fields['bc_company'].queryset = bc_company.objects.filter(is_active=True) | 39.0625 | 116 | 0.7032 | 1,089 | 0.8712 | 0 | 0 | 0 | 0 | 0 | 0 | 261 | 0.2088 |
64b20b9ed2c7c1d8565510f985e4c1edaa94157e | 4,271 | py | Python | docker-image/locust-tasks/fakerutil.py | eklicious/mlocust-app | 04a7599503d9b6bac027dd4c473ad6c7be753362 | [
"Apache-2.0"
] | null | null | null | docker-image/locust-tasks/fakerutil.py | eklicious/mlocust-app | 04a7599503d9b6bac027dd4c473ad6c7be753362 | [
"Apache-2.0"
] | null | null | null | docker-image/locust-tasks/fakerutil.py | eklicious/mlocust-app | 04a7599503d9b6bac027dd4c473ad6c7be753362 | [
"Apache-2.0"
] | null | null | null | import pymongo
from bson.json_util import loads, dumps
from bson import json_util
import csv
import sys
import uuid
import os
import itertools
from faker import Faker
from collections import defaultdict
import json
import datetime
from deepmerge import Merger
import random
import re
############################################################################################################
# This is a utility that takes in the model name. The model file name has a suffix that indicates
# how many records should be added to the list before returning the results.
# The objective is to send back the largest list possible without killing the mLocust
# worker CPU/RAM
############################################################################################################
stripProp = lambda str: re.sub(r'\s+', '', (str[0].upper() + str[1:].strip('()')))
fake = Faker()
# This serializer isn't needed anymore as long as we use faker.datetime.datetime instead of datetime.date
# I'll keep this serializer code in here in case someone in the future needs to use it for another data type that isn't native...
def ser(o):
"""Customize serialization of types that are not JSON native"""
if isinstance(o, datetime.date):
return str(o)
def procpath(path, counts, generator):
"""Recursively walk a path, generating a partial tree with just this path's random contents"""
stripped = stripProp(path[0])
if len(path) == 1:
# Base case. Generate a random value by running the Python expression in the text file
return { stripped: eval(generator) }
elif path[0].endswith('()'):
# Lists are slightly more complex. We generate a list of the length specified in the
# counts map. Note that what we pass recursively is _the exact same path_, but we strip
# off the ()s, which will cause us to hit the `else` block below on recursion.
return {
stripped: [ procpath([ path[0].strip('()') ] + path[1:], counts, generator)[stripped] for X in range(0, counts[stripped]) ]
}
else:
# Return an object, of the specified type, populated recursively.
return {
# stripped: {
stripped: procpath(path[1:], counts, generator)
# }
}
def zipmerge(the_merger, path, base, nxt):
"""Strategy for deepmerge that will zip merge two lists. Assumes lists of equal length."""
return [ the_merger.merge(base[i], nxt[i]) for i in range(0, len(base)) ]
def ID(key):
id_map[key] += 1
return key + str(id_map[key]+starting_id_minus_1)
# A deep merger using our custom list merge strategy.
merger = Merger([
(dict, "merge"),
(list, zipmerge)
], [ "override" ], [ "override" ])
# This field is used for an incremental field, e.g. ID. We can't really control this using mLocust so we'll always default to 0.
# Not every loader file may require this.
starting_id_minus_1 = 0
id_map = defaultdict(int)
def bulkFetch(model):
# from the model name, derive the template file name along with bulk insert count
# example model would be members.csv.100
arr = model.split('.')
template = "models/" + model
bulkCount = int(arr[2])
# instantiate a new list
l = []
for J in range(0, bulkCount): # iterate through the bulk insert count
# A dictionary that will provide consistent, random list lengths
counts = defaultdict(lambda: random.randint(1, 5))
data = {}
with open(template) as csvfile:
propreader = csv.reader(itertools.islice(csvfile, 1, None))
for row in propreader:
path = row[0].split('.')
# print(path)
partial = procpath(path, counts, row[3])
# print(partial);
# Merge partial trees.
data = merger.merge(data, partial)
# print(data);
# print(data);
# To JSON!
# Old debugging statements that used the custom deserializer. Non-issue if you use native bson/json data types
# print("%s\t%s\t%s"%(str(id), str(idempotencyKey), json.dumps(obj, default=ser)))
# print(json.dumps(obj, default=ser))
# Add the object to our list
l.append(data)
return l
| 39.183486 | 135 | 0.619527 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,268 | 0.531023 |
64b21da399686c13e0023dde485bb17649e24185 | 3,554 | py | Python | examples/utils.py | hadar-solver/hadar | 9e656212f255cf5c0a1af87b02c2a9e5450806b5 | [
"Apache-2.0"
] | 1 | 2021-01-08T14:31:37.000Z | 2021-01-08T14:31:37.000Z | examples/utils.py | hadar-simulator/hadar | 9e656212f255cf5c0a1af87b02c2a9e5450806b5 | [
"Apache-2.0"
] | 86 | 2020-01-23T09:43:26.000Z | 2020-12-03T13:56:31.000Z | examples/utils.py | hadar-solver/hadar | 9e656212f255cf5c0a1af87b02c2a9e5450806b5 | [
"Apache-2.0"
] | 1 | 2020-01-23T15:05:50.000Z | 2020-01-23T15:05:50.000Z | # Copyright (c) 2019-2020, RTE (https://www.rte-france.com)
# See AUTHORS.txt
# This Source Code Form is subject to the terms of the Apache License, version 2.0.
# If a copy of the Apache License, version 2.0 was not distributed with this file, you can obtain one at http://www.apache.org/licenses/LICENSE-2.0.
# SPDX-License-Identifier: Apache-2.0
# This file is part of hadar-simulator, a python adequacy library for everyone.
import sys
import click
from typing import List
import nbformat
import os
from nbconvert import RSTExporter
from nbconvert.preprocessors import ExecutePreprocessor
exporter = RSTExporter()
ep = ExecutePreprocessor(timeout=600, kernel_name="python3", store_widget_state=True)
def open_nb(name: str, src: str) -> nbformat:
"""
Open notebook file
:param name: name of notebook to open
:param src: source directory
:return: notebook object
"""
print("Reading...", end=" ")
nb = nbformat.read(
"{src}/{name}/{name}.ipynb".format(name=name, src=src), as_version=4
)
print("OK", end=" ")
return nb
def execute(nb: nbformat, name: str, src: str) -> nbformat:
"""
Execute notebook and store widget state.
:param nb: notebook object to execute
:param name: notebook name (for setup directory context purpose)
:param src: notebook source directory (for setup context)
:return: notebook object with computed and stored output widget state
"""
print("Executing...", end=" ")
ep.preprocess(nb, {"metadata": {"path": "%s/%s/" % (src, name)}})
print("OK", end=" ")
return nb
def copy_image(name: str, export: str, src: str):
"""
Copy images present next to notebook file to exported folder.
:param name: notebook name
:param export: export directory
:param src: source directory
:return: None
"""
src = "%s/%s" % (src, name)
dest = "%s/%s" % (export, name)
images = [f for f in os.listdir(src) if f.split(".")[-1] in ["png"]]
for img in images:
os.rename("%s/%s" % (src, img), "%s/%s" % (dest, img))
def to_export(nb: nbformat, name: str, export: str):
"""
Export notebook into HTML format.
:param nb: notebook with result state
:param name: notebook name
:param export: directory to export
:return: None
"""
print("Exporting...", end=" ")
rst, _ = exporter.from_notebook_node(nb)
path = "%s/%s" % (export, name)
if not os.path.exists(path):
os.makedirs(path)
with open("%s/%s.rst" % (path, name), "w") as f:
f.write(rst)
print("OK", end=" ")
def list_notebook(src: str) -> List[str]:
"""
List available notebook in directory.
:return:
"""
dirs = os.listdir(src)
return [
d
for d in dirs
if os.path.isfile("{src}/{name}/{name}.ipynb".format(name=d, src=src))
]
@click.command("Check and export notebooks")
@click.option("--src", nargs=1, help="Notebook directory")
@click.option("--check", nargs=1, help="check notebook according to result file given")
@click.option("--export", nargs=1, help="export notebooks to directory given")
def main(src: str, check: str, export: str):
for name in list_notebook(src):
print("{:30}".format(name), ":", end="")
nb = open_nb(name, src)
nb = execute(nb, name, src)
if check:
pass # Implement check
if export:
to_export(nb, name, export)
copy_image(name, export, src)
print("")
if __name__ == "__main__":
main()
| 28.894309 | 149 | 0.627462 | 0 | 0 | 0 | 0 | 641 | 0.18036 | 0 | 0 | 1,729 | 0.486494 |
64b279e328c6e5af418118b043a2d65021c5d603 | 1,230 | py | Python | lukkari/timerange.py | JuEeHa/lukkari | 699caf7686d1b060927dfdc7dac2a7452f2371d5 | [
"Unlicense"
] | null | null | null | lukkari/timerange.py | JuEeHa/lukkari | 699caf7686d1b060927dfdc7dac2a7452f2371d5 | [
"Unlicense"
] | null | null | null | lukkari/timerange.py | JuEeHa/lukkari | 699caf7686d1b060927dfdc7dac2a7452f2371d5 | [
"Unlicense"
] | null | null | null | import datetime
class Timerange:
def __init__(self, start, length):
self.start = start
self.length = length
def range(self):
end = self.start + self.length
return (self.start, end)
def __contains__(self, day):
delta = day - self.start
return datetime.timedelta(seconds = 0) <= delta and delta < self.length
def overlaps(self, other):
if other.start < self.start:
return other.overlaps(self)
assert(self.start <= other.start)
return other.start < self.start + self.length
def __repr__(self):
return 'Timerange(%s, %s)' % (repr(self.start), repr(self.length))
def __str__(self):
start, end = (i.strftime('%H:%M') for i in self.range())
return '%s - %s' % (start, end)
def __eq__(self, other):
return self.start == other.start and self.length == other.length
def __ne__(self, other):
return not self == other
def between(start, end):
assert(len(start) == 2 and len(end) == 2)
start_hour, start_minute = start
end_hour, end_minute = end
start_obj = datetime.datetime(1970, 1, 1, start_hour, start_minute)
end_obj = datetime.datetime(1970, 1, 1, end_hour, end_minute)
assert(end_obj - start_obj > datetime.timedelta(seconds = 0))
return Timerange(start_obj, end_obj - start_obj)
| 27.954545 | 73 | 0.696748 | 836 | 0.679675 | 0 | 0 | 0 | 0 | 0 | 0 | 35 | 0.028455 |
64b296594edc9672e5df42017e0febcf11f1fb78 | 243 | py | Python | renlabs/sudoku/cli.py | swork/sudoku | 7dcc33bf6572ada4d3d92f21278d19b5237c1d83 | [
"MIT"
] | null | null | null | renlabs/sudoku/cli.py | swork/sudoku | 7dcc33bf6572ada4d3d92f21278d19b5237c1d83 | [
"MIT"
] | null | null | null | renlabs/sudoku/cli.py | swork/sudoku | 7dcc33bf6572ada4d3d92f21278d19b5237c1d83 | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
import logging, sys, os
from . import main
logger = logging.getLogger(__name__ if not __name__ == '__main__' else os.path.basename(__file__))
if __name__ == '__main__':
logging.basicConfig()
sys.exit(main())
| 22.090909 | 98 | 0.707819 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.176955 |
64b5326f176464feb08959dbb46da61e35edf533 | 873 | py | Python | Python 基础教程/1.5.1 列表.py | shao1chuan/pythonbook | cd9877d04e1e11422d38cc051e368d3d9ce2ab45 | [
"MulanPSL-1.0"
] | 95 | 2020-10-11T04:45:46.000Z | 2022-02-25T01:50:40.000Z | Python 基础教程/1.5.1 列表.py | shao1chuan/pythonbook | cd9877d04e1e11422d38cc051e368d3d9ce2ab45 | [
"MulanPSL-1.0"
] | null | null | null | Python 基础教程/1.5.1 列表.py | shao1chuan/pythonbook | cd9877d04e1e11422d38cc051e368d3d9ce2ab45 | [
"MulanPSL-1.0"
] | 30 | 2020-11-05T09:01:00.000Z | 2022-03-08T05:58:55.000Z | # 插入
print('插入'*15)
x = [1,2,3]
print(x)
x = x+ [4]
x.append(5)
print(x)
x.insert(3,'w')
x.extend(['a','b'])
print(x*3)
# 删除
print("删除"*15)
y = ["a","b","c","d",'e','f']
del y[2]
print(y)
y.pop(0)
print(y)
y.remove('f')
print(y)
# 列表元素访问与计数
print("列表元素访问与计数"*5)
x =[1,2,3,3,4,5]
print(x.count(3),x.index(2))
# 列表排序
print("列表排序"*10)
x = [1,2,4,5,6,34,22,55,22,11,24,56,78]
import random as r
r.shuffle(x)
print(x)
x.reverse()
print("reverse",x)
x.sort(reverse = True)
print('sort ',x)
# 使用内置函数sorted对列表进行排序并返回新列表,不对原列表做任何修改。
sorted(x)
reversed(x)
# 打包
print("打包"*10)
a = [1,2,3]
b = [4,5,6]
print(list(zip(a,b)))
# 枚举
print("枚举"*10)
for item in enumerate('abcdef'):
print(item)
# 遍历列表的三种方式
print("遍历列表的三种方式"*10)
a = ['a','b','c','d','e','f']
for i in a:
print(i)
for i in range(len(a)):
print(i,a[i])
for i,ele in enumerate(a):
print(i,ele)
| 12.125 | 39 | 0.578465 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 381 | 0.361137 |
64b62172678af5d0df137277a87c6dc84666d9fe | 1,820 | py | Python | custom_components/vektiva_smarwi/smarwi_control.py | wongcz/vektiva_smarwi_ha | 1243905a3236cedc5e8d6c55f578e71b673b7a6e | [
"Apache-2.0"
] | 3 | 2020-11-05T15:40:22.000Z | 2021-02-07T02:01:59.000Z | custom_components/vektiva_smarwi/smarwi_control.py | wongcz/vektiva_smarwi_ha | 1243905a3236cedc5e8d6c55f578e71b673b7a6e | [
"Apache-2.0"
] | 2 | 2021-03-30T08:24:49.000Z | 2022-02-07T10:39:40.000Z | custom_components/vektiva_smarwi/smarwi_control.py | wongcz/vektiva_smarwi_ha | 1243905a3236cedc5e8d6c55f578e71b673b7a6e | [
"Apache-2.0"
] | 1 | 2021-05-06T21:32:55.000Z | 2021-05-06T21:32:55.000Z | import aiohttp
class SmarwiControl:
"""Control class."""
def __init__(self, hosts):
"""Initialize."""
self.hosts = [x.strip() for x in hosts.split(',')]
self.title = ', '.join([x.split('.')[0] for x in self.hosts])
async def authenticate(self) -> bool:
"""Test if we can authenticate with the host."""
try:
for host in self.hosts:
ctl = SmarwiControlItem(host)
await ctl.get_status()
result = True
except:
result = False
return result
def list(self) -> list:
"""List what we have"""
return [SmarwiControlItem(host) for host in self.hosts]
class SmarwiControlItem:
"""Control class for single host."""
def __init__(self, host):
self.host = host
self.name = host.split('.')[0]
async def __request(self, path):
async with aiohttp.ClientSession() as session:
async with session.get(f"http://{self.host}/{path}") as resp:
if resp.status != 200:
raise ValueError(f"Request failed with {resp.status}/{resp.reason}")
return await resp.text()
async def open(self):
await self.__request("cmd/open/100")
async def set_position(self, pos:int):
# if pos too small, close instead
if pos > 1:
await self.__request("cmd/open/{}".format(pos))
else:
await self.close()
async def close(self):
await self.__request("cmd/close")
async def get_status(self):
response = await self.__request("statusn")
result = {}
for item in response.split('\n'):
item_list = item.split(':', maxsplit=1)
result[item_list[0]] = item_list[1]
return result
| 29.354839 | 88 | 0.557143 | 1,800 | 0.989011 | 0 | 0 | 0 | 0 | 1,246 | 0.684615 | 322 | 0.176923 |
64b6386728a5037f9d572fa1e9fcc3acf2c312a4 | 25,806 | py | Python | add_ops.py | SoftDevX/MJDB | 386bbb464fbd24c02f0d87da30e0dd11da65b764 | [
"MIT"
] | null | null | null | add_ops.py | SoftDevX/MJDB | 386bbb464fbd24c02f0d87da30e0dd11da65b764 | [
"MIT"
] | null | null | null | add_ops.py | SoftDevX/MJDB | 386bbb464fbd24c02f0d87da30e0dd11da65b764 | [
"MIT"
] | null | null | null | from PyQt5 import QtWidgets, QtCore, QtPrintSupport
from PyQt5.QtCore import QDate, QTime, Qt, QTimer, QRectF
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtPrintSupport import QPrinter
from database import MyCursor
from PyQt5 import QtGui
from datetime import datetime, timedelta
from annulation import Annulation, RemEntry
from ops import Ui_zd
#GLOBALS
SWITCH = 0
class AddOps(QWidget, Ui_zd):
def __init__(self, parent=None):
super(AddOps, self).__init__(parent)
self.setWindowModality(Qt.ApplicationModal)
self.setupUi(self)
self.setWindowTitle('Opérations')
self.m = MyCursor()
flags = QtCore.Qt.WindowFlags(QtCore.Qt.FramelessWindowHint)
self.setWindowFlags(flags)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.setWindowIcon(QIcon('MJDB_ICON.ico'))
self.acc_list = self.findChild(QComboBox, 'acc_combo')
self.sacc_list = self.findChild(QComboBox, 'acc_combo2')
self.debit = self.findChild(QLineEdit, 'debit')
self.credit = self.findChild(QLineEdit, 'credit')
self.insert = self.findChild(QPushButton, 'new_op')
self.con = self.findChild(QRadioButton, 'con')
self.non_con = self.findChild(QRadioButton, 'non_con')
self.w_debit = self.findChild(QLabel, 'w_debit')
self.w_credit = self.findChild(QLabel, 'w_credit')
self.date = self.findChild(QLabel, 'date')
self.time = self.findChild(QLabel, 'time')
self.ops_table = self.findChild(QTableWidget, 'ops_table')
self.user = self.findChild(QLabel, 'user')
self.wrong = self.findChild(QLabel, 'wrong')
self.g_ballance = self.findChild(QLineEdit, 'b_sc')
self.g_ballance_ = self.findChild(QLineEdit, 'b_g')
self.general = self.findChild(QLineEdit, 'general_b')
self.title = self.findChild(QFrame, 'title_bar')
self.colse_btn = self.findChild(QPushButton, 'close_btn')
self.minimize = self.findChild(QPushButton, 'mini')
self.date_label = self.findChild(QLabel, 'today')
self.annulation = self.findChild(QCheckBox, 'annulation')
self.insert.clicked.connect(self.add_ops)
self.colse_btn.clicked.connect(lambda: self.close())
self.minimize.clicked.connect(lambda: self.showMinimized())
self.remarque_btn.clicked.connect(self.handlePrint)
self.ops_table.setSortingEnabled(False)
now = QDate.currentDate()
self.date.setText(now.toString(Qt.ISODate))
timer = QTimer(self)
timer.timeout.connect(self.displaytime)
timer.start(1000)
self.m = MyCursor()
self.m.mycursor.execute('SELECT name FROM Accounts')
for x in self.m.mycursor.fetchall():
self.acc_list.addItems(x)
self.acc_list.currentIndexChanged.connect(self.repeat)
self.sacc_list.currentIndexChanged.connect(self.repeat1)
self.con.setChecked(True)
self.general_()
def moveWindow(event):
if event.buttons() == Qt.LeftButton:
self.move(self.pos() + event.globalPos() - self.dragPos)
self.dragPos = event.globalPos()
event.accept()
self.title_bar.mouseMoveEvent = moveWindow
self.debit.textChanged.connect(self.debitChanged)
self.credit.textChanged.connect(self.creditChanged)
self.debit.mousePressEvent = lambda _: self.debit.selectAll()
self.credit.mousePressEvent = lambda _: self.credit.selectAll()
# CELL CLICKED
self.ops_table.cellClicked.connect(self.cellOpClicked)
def handlePrint(self):
printer = QPrinter(QPrinter.HighResolution)
dialog = QtPrintSupport.QPrintDialog(printer, self)
if dialog.exec_() == QtPrintSupport.QPrintDialog.Accepted:
self.handlePaintRequest()
def handlePaintRequest(self):
rows = self.ops_table.selectionModel().selectedRows()
for self.index in sorted(rows):
pass
if len(rows) > 0:
for row in range(self.index.row(), self.index.row() + 1):
self.twi0 = self.ops_table.item(row, 0)
self.twi8 = self.ops_table.item(row, 1)
self.twi16 = self.ops_table.item(row, 2)
self.twi32 = self.ops_table.item(row, 3)
self.twi64 = self.ops_table.item(row, 4)
self.twi128 = self.ops_table.item(row, 5)
self.twi256 = self.ops_table.item(row, 6)
datetime2 = datetime.now().date().strftime('%d/%m/%Y')
datetime1 = datetime.now().strftime('%H:%M:%S')
printer = QPrinter()
printer.setOrientation(getattr(QPrinter, "Landscape"))
printer.setPaperSize(QPrinter.A5)
painter = QtGui.QPainter()
# Start painter
painter.begin(printer)
# Grab a widget you want to print
printer.setFullPage(True)
# Draw grabbed pixmap
painter.setRenderHint(QPainter.SmoothPixmapTransform)
painter.drawImage(0, 0, QImage('recus-01.jpg'))
painter.setPen(QColor('black'))
font = QFont('Mongolian Baiti')
font1 = QFont('Mongolian Baiti')
font.setPointSize(15)
font1.setPointSize(12)
painter.setFont(QFont(font1))
painter.drawText(170, 50, datetime2)
painter.drawText(170, 77, datetime1)
painter.drawText(140, 102, self.user.text())
painter.drawText(15, 320, self.acc_combo.currentText())
painter.drawText(15, 375, self.twi0.text())
painter.drawText(500, 330, self.twi8.text())
painter.drawText(560, 372, self.twi32.text())
painter.drawText(455, 417, self.twi64.text())
painter.drawText(850, 417, self.twi16.text())
painter.setFont(QFont(font))
painter.drawText(405, 525, self.twi128.text())
painter.drawText(705, 525, self.twi256.text())
painter.end()
def cellOpClicked(self):
self.stackedWidget.setCurrentIndex(0)
def general_(self):
self.m = MyCursor()
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE type IN ('C', 'C / Annulation')")
result4 = self.m.mycursor.fetchone()[0]
self.o = MyCursor()
self.o.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE type IN ('C', 'C / Annulation')")
result5 = self.o.mycursor.fetchone()[0]
if result4 is None and result5 is None:
pass
else:
re = result4 + result5
formatted_re = "{:,.2f}".format(re)
if re < 0:
self.general.setStyleSheet("""QLineEdit{border-radius:10px;
color: rgb(255, 0, 0);}""")
self.general.setText(formatted_re + ' DH')
elif re > 0:
self.general.setStyleSheet("""QLineEdit{border-radius:10px;
color: rgb(0, 170, 0);}""")
self.general.setText(formatted_re + ' DH')
elif re == 0:
self.general.setStyleSheet("""QLineEdit{border-radius:10px;
color: rgb(0, 0, 0);}""")
self.general.setText(formatted_re + ' DH')
def debitChanged(self):
if self.debit.text() == '':
self.debit.setText('0')
self.debit.selectAll()
def creditChanged(self):
if self.credit.text() == '':
self.credit.setText('0')
self.credit.selectAll()
def mousePressEvent(self, event):
self.dragPos = event.globalPos()
def displaytime(self):
time = QTime.currentTime()
self.time.setText(time.toString(Qt.DefaultLocaleLongDate))
def style_table(self):
for x in range(self.ops_table.rowCount()):
g = float(self.ops_table.item(x, 6).text())
y = float(self.ops_table.item(x, 5).text())
if g < 0:
formatted_float_debit = "{:,.2f}".format(g)
self.ops_table.setItem(x, 6, QtWidgets.QTableWidgetItem(str(formatted_float_debit + ' DH')))
self.ops_table.item(x, 6).setForeground(QtGui.QColor(255, 0, 0))
elif y > 0:
formatted_float_debit1 = "{:,.2f}".format(y)
self.ops_table.setItem(x, 5, QtWidgets.QTableWidgetItem(str(formatted_float_debit1 + ' DH')))
self.ops_table.item(x, 5).setForeground(QtGui.QColor(0, 170, 0))
elif g == 0 or y == 0:
formatted_float_debit = "{:,.2f}".format(g)
formatted_float_debit1 = "{:,.2f}".format(y)
self.ops_table.setItem(x, 6, QtWidgets.QTableWidgetItem(str(formatted_float_debit + ' DH')))
self.ops_table.item(x, 6).setForeground(QtGui.QColor(0, 0, 0))
self.ops_table.setItem(x, 5, QtWidgets.QTableWidgetItem(str(formatted_float_debit1 + ' DH')))
self.ops_table.item(x, 5).setForeground(QtGui.QColor(0, 0, 0))
def fill_table(self):
if self.sacc_list.currentText() == 'Tout':
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE acc = %s and created between %s and %s""",
(self.acc_list.currentText(), date - timedelta(1), datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
elif self.sacc_list.currentText() == 'Selectionnez un sous-compte...':
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE acc = %s and created between %s and %s""",
(self.acc_list.currentText(), date - timedelta(1), datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
else:
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE opID = %s and acc=%s and created between %s and %s""",
(self.sacc_list.currentText(), self.acc_list.currentText(), date - timedelta(1), datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m = MyCursor()
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(),
date - timedelta(1), datetime.now().date(),))
result = self.m.mycursor.fetchone()[0]
self.o = MyCursor()
self.o.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(),
date - timedelta(1), datetime.now().date(),))
result1 = self.o.mycursor.fetchone()[0]
if result is None and result1 is None:
l_sc = 0
self.g_ballance.setStyleSheet("""border-radius:10px;
color: rgb(0, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_sc)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
else:
l_sc = result + result1
if l_sc < 0:
self.g_ballance.setStyleSheet("""border-radius:10px;
color: rgb(255, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_sc)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
elif l_sc > 0:
self.g_ballance.setStyleSheet("""border-radius:10px;
color: rgb(0, 170, 0);""")
formatted_float_debit = "{:,.2f}".format(l_sc)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
def repeat1(self):
self.fill_table()
def soldeSCompte(self):
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m = MyCursor()
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(), datetime.now().date() - timedelta(1), datetime.now().date(),))
result = self.m.mycursor.fetchone()[0]
self.m.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE opID=%s and acc=%s and created between %s and %s", (self.sacc_list.currentText(), self.acc_list.currentText(), datetime.now().date() - timedelta(1), datetime.now().date(),))
result1 = self.m.mycursor.fetchone()[0]
if result is None and result1 is None:
self.g_ballance.setStyleSheet("color: rbg(0, 0, 0);")
fr1 = 0
formatted_float_debit = "{:,.2f}".format(fr1)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
else:
self.fr12 = result + result1
if self.fr12 < 0:
self.g_ballance.setStyleSheet("color: rgb(255, 0, 0);")
formatted_float_debit = "{:,.2f}".format(self.fr12)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
elif self.fr12 > 0:
self.g_ballance.setStyleSheet("color: rgb(0, 170, 0);")
formatted_float_debit = "{:,.2f}".format(self.fr12)
self.g_ballance.setText(str(formatted_float_debit) + ' DH')
return self.fr12
def soldeCompte(self):
self.m.mycursor.execute("SELECT SUM(debit) FROM Ops WHERE acc=%s", (self.acc_list.currentText(),))
result2 = self.m.mycursor.fetchone()[0]
self.m.mycursor.execute("SELECT SUM(credit) FROM Ops WHERE acc=%s", (self.acc_list.currentText(),))
result3 = self.m.mycursor.fetchone()[0]
if result2 is None and result3 is None:
pass
else:
self.fr = result2 + result3
self.m.mycursor.execute("UPDATE Accounts SET balance=%s WHERE name=%s", (self.fr, self.acc_list.currentText(),))
self.m.db.commit()
return self.fr
def keyPressEvent(self, event):
r = RemEntry()
switch_rem = r.switch
if event.key() == Qt.Key_Return:
self.add_ops()
elif event.key() == Qt.Key_Return and SWITCH == 2:
self.anul()
def addfunc(self, sacc, user, debit1, credit1, anul, motif):
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
debit1 = float(self.debit.text())
credit1 = (float(self.credit.text()) * (-1))
if self.soldeCompte() is None and self.soldeSCompte() is None:
m = MyCursor()
m.mycursor.execute(
"INSERT INTO Ops (opID, userID, created, type, debit, credit, soldeCompte, soldeSCompte, acc, motif) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(sacc, user, date, self.type + anul, debit1, credit1, debit1+credit1, debit1+credit1,
str(self.acc_list.currentText()), motif,))
m.db.commit()
else:
if self.soldeSCompte() is None:
m = MyCursor()
m.mycursor.execute(
"INSERT INTO Ops (opID, userID, created, type, debit, credit, soldeCompte, soldeSCompte, acc, motif) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(sacc, user, date, self.type + anul, debit1, credit1, self.soldeCompte()+debit1+credit1, debit1+credit1,
str(self.acc_list.currentText()), motif,))
m.db.commit()
else:
m = MyCursor()
m.mycursor.execute(
"INSERT INTO Ops (opID, userID, created, type, debit, credit, soldeCompte, soldeSCompte, acc, motif) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(sacc, user, date, self.type + anul, debit1, credit1, self.soldeCompte()+debit1+credit1, self.soldeSCompte()+debit1+credit1,
str(self.acc_list.currentText()), motif,))
m.db.commit()
self.soldeSCompte()
self.soldeCompte()
m = MyCursor()
m.mycursor.execute('SELECT opID, reference, userID, created, type ,debit, credit FROM ops WHERE opID = %s',
(self.sacc_list.currentText(),))
f = m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
for x in range(self.ops_table.rowCount()):
self.ops_table.item(x, 6).setForeground(QtGui.QColor(255, 0, 0))
self.ops_table.item(x, 5).setForeground(QtGui.QColor(0, 170, 0))
self.account_fill()
self.fill_table()
self.debit.selectAll()
def anul(self):
debit1 = float(self.debit.text())
credit1 = (float(self.credit.text()) * (-1))
sacc = self.sacc_list.currentText()
user = self.user.text()
motif = self.a.motif.toPlainText()
if motif == '':
self.a.wrong.setText('*')
else:
self.addfunc(sacc, user, debit1, credit1, ' / Annulation', motif)
self.general_()
self.a.close()
self.fill_ops()
def remarque_op(self):
debit1 = float(self.debit.text())
credit1 = (float(self.credit.text()) * (-1))
sacc = self.sacc_list.currentText()
user = self.user.text()
motif = self.rem.motif.toPlainText()
self.m = MyCursor()
self.m.mycursor.execute("SELECT Obl FROM Accounts WHERE name=%s", (self.acc_list.currentText(),))
r = ''.join(map(str, self.m.mycursor.fetchone()))
if r == 'None':
self.addfunc(sacc, user, debit1, credit1, '', motif)
self.general_()
self.rem.close()
self.fill_ops()
elif r == '0':
self.addfunc(sacc, user, debit1, credit1, '', motif)
self.general_()
self.rem.close()
self.fill_ops()
else:
if motif == '':
self.rem.wrong.setText("*")
else:
self.addfunc(sacc, user, debit1, credit1, '', motif)
self.general_()
self.rem.close()
self.fill_ops()
def fill_ops(self):
self.fill_table()
self.debit.selectAll()
self.debit.clear()
self.credit.clear()
self.debit.setFocus()
def add_ops(self):
global SWITCH
try:
self.debit.setFocus()
self.debit.selectAll()
test = self.debit.text().lower()
test_char = test.islower()
test2 = self.credit.text().lower()
test_char2 = test2.islower()
if self.con.isChecked():
self.type = self.con.text()
elif self.non_con.isChecked():
self.type = self.non_con.text()
if self.sacc_list.currentText() != 'Tout':
if self.debit.text() != '0' and self.credit.text() == '0' and self.sacc_list.currentText() != 'Selectionnez un sous-compte...' and test_char is False and test_char2 is False:
if self.annulation.isChecked():
self.a = Annulation()
self.a.show()
self.a.val.clicked.connect(self.anul)
SWITCH = 2
else:
self.rem = RemEntry()
self.rem.show()
self.rem.val.clicked.connect(self.remarque_op)
SWITCH = 1
elif self.debit.text() == '0' and self.credit.text() != '0' and test_char is False and test_char2 is False:
if self.annulation.isChecked():
self.a = Annulation()
self.a.show()
self.a.val.clicked.connect(self.anul)
SWITCH = 2
else:
self.rem = RemEntry()
self.rem.show()
self.rem.val.clicked.connect(self.remarque_op)
SWITCH = 1
else:
pass
except Exception:
msg = QMessageBox()
msg.setWindowTitle('Erreur')
msg.setText("Erreur Inconnu! CODE D'ERREUR (0001)")
msg.setIcon(QMessageBox.Critical)
msg.exec_()
def account_fill(self):
if self.acc_list.currentText() == 'Selectionnez un compte...':
self.g_ballance.clear()
self.g_ballance_.clear()
else:
self.m.mycursor.execute('SELECT balance FROM Accounts WHERE name=%s',
(self.acc_list.currentText(),))
l_ = self.m.mycursor.fetchone()[0]
if l_ is None:
pass
else:
if l_ < 0:
self.g_ballance_.setStyleSheet("""border-radius:10px;
color: rgb(255, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_)
self.g_ballance_.setText(str(formatted_float_debit) + ' DH')
elif l_ > 0:
self.g_ballance_.setStyleSheet("""border-radius:10px;
color: rgb(0, 170, 0);""")
formatted_float_debit = "{:,.2f}".format(l_)
self.g_ballance_.setText(str(formatted_float_debit) + ' DH')
elif l_ == 0:
self.g_ballance_.setStyleSheet("""border-radius:10px;
color: rgb(0, 0, 0);""")
formatted_float_debit = "{:,.2f}".format(l_)
self.g_ballance_.setText(str(formatted_float_debit) + ' DH')
def repeat(self):
self.sacc_list.clear()
self.m.mycursor.execute('SELECT name FROM SAccounts WHERE accountID=%s', (self.acc_list.currentText(),))
for x in self.m.mycursor.fetchall():
self.sacc_list.addItems(x)
self.sacc_list.insertItem(0, 'Tout')
self.account_fill()
time_r = datetime.now().strftime('%H-%M-%S')
r = " ".join([self.date_label.text(), str(time_r)])
date = datetime.strptime(r, '%d/%m/%Y %H-%M-%S')
self.m.mycursor.execute("""SELECT opID, reference, userID, created, type ,debit, credit FROM Ops
WHERE acc = %s and created between %s and %s""",
(self.acc_list.currentText(), date, datetime.now()))
f = self.m.mycursor.fetchall()
self.ops_table.setRowCount(0)
for column_number, row_data in enumerate(f):
self.ops_table.insertRow(column_number)
for row_number, data in enumerate(row_data):
self.ops_table.setItem(column_number, row_number, QtWidgets.QTableWidgetItem(str(data)))
self.style_table()
self.fill_table()
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
window = AddOps()
window.show()
sys.exit(app.exec())
| 45.036649 | 240 | 0.556305 | 25,259 | 0.978765 | 0 | 0 | 0 | 0 | 0 | 0 | 3,834 | 0.148564 |
64b6dc2967551718c352e4ed6c6a33338fe3045c | 3,971 | py | Python | packages/tractography/tensor_gen.py | justi/m2g | 09e8b889889ee8d8fb08b9b6fcd726fb3d901644 | [
"Apache-2.0"
] | 12 | 2015-03-11T22:07:17.000Z | 2016-01-29T21:24:29.000Z | packages/tractography/tensor_gen.py | youngmook/m2g | 09e8b889889ee8d8fb08b9b6fcd726fb3d901644 | [
"Apache-2.0"
] | 213 | 2015-01-30T16:02:57.000Z | 2016-01-29T21:45:02.000Z | packages/tractography/tensor_gen.py | youngmook/m2g | 09e8b889889ee8d8fb08b9b6fcd726fb3d901644 | [
"Apache-2.0"
] | 5 | 2015-02-04T13:58:12.000Z | 2016-01-29T21:24:46.000Z | #!/usr/bin/env python
# Copyright 2015 Open Connectome Project (http://openconnecto.me)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# dti2tens.py
# Created by Greg Kiar on 2015-01-09.
# Email: gkiar@jhu.edu
# Copyright (c) 2015. All rights reserved.
# Load necessary packages
from argparse import ArgumentParser
from os import system
from os.path import basename, splitext
def make_tens(dti, grad, bval, mask, scheme, dti_bfloat, tensors): #, fa, md, eigs):
"""
Computes tensors from DTI image
We leverage Camino's tensor estimation tool to compute the tensors at each voxel within the DTI volumes. The tensors are computed using standard methods of estimation: performing multiple linear regression on the equation relating the diffusion direction matrix of the voxel, the b-vectors, and the voxel intensities across different imposed b-fields.
Camino's dtfit documentation: http://cmic.cs.ucl.ac.uk/camino/index.php?n=Man.Dtfit
**Positional Arguments**
DTI Image: [.nii; nifti image]
- Corrected DTI volume.
B-vectors: [.grad; ASCII file]
- Field direction vectors for each volume in DTI image.
B-values: [.b; ASCII file]
- List of b-values corresponding to the b-vectors.
Brain mask: [.nii; nifti image]
- Binary labels identifying region of the image which contains brain tissue.
**Returns**
Tensors: [.Bdouble; big-endian double]
- List of tensors for each voxel in the source DTI image.
"""
# Create scheme file
system('fsl2scheme -bvecfile '+grad+' -bvalfile '+bval+' -bscale 1 > '+scheme)
# Maps the DTI image to a Camino compatible Bfloat format
system('image2voxel -4dimage '+dti+' -outputfile '+dti_bfloat)
# Produce tensors from image
system('dtfit '+dti_bfloat+' '+scheme+' -bgmask '+mask+' -outputfile '+tensors)
# In order to visualize, and just 'cause it's fun anyways, we get some stats
#[fa_base, ext] = splitext(basename(fa))
#[md_base, ext] = splitext(basename(md))
#system('for PROG in '+fa+' '+md+'; do cat '+tensors+' | ${PROG} | voxel2image -outputroot ${PROG} -header '+dti+'; done')
# We also need the eigen system to visualize
#system('cat '+tensors+' | dteig > '+eigs)
def main():
parser = ArgumentParser(description="")
parser.add_argument("dti", action="store", help="The DTI image, not skull stripped (.nii)")
parser.add_argument("grad", action="store", help="The gradient directions corresponding to the DTI image (bvec)")
parser.add_argument("bval", action="store", help="The bvalue file corresponding to the DTI image (bval)")
parser.add_argument("mask", action="store", help="The brain mask of the DTI image (.nii, .nii.gz)")
parser.add_argument("scheme", action="store", help="The scheme file (.scheme)")
parser.add_argument("dti_bfloat", action="store", help="The Bfloat format equivalent of the DTI image (.Bfloat)")
parser.add_argument("tensors", action="store", help="The produced tensors in Bdouble format (.Bdouble)")
#parser.add_argument("fa", action="store", help="The fractional anisotropy statistic (.nii)")
#parser.add_argument("md", action="store", help="The mean diffusivity statistic (.nii)")
#parser.add_argument("eigs", action="store", help="The eigen values of the system (.Bdouble)")
result = parser.parse_args()
make_tens(result.dti, result.grad, result.bval, result.mask, result.scheme, result.dti_bfloat, result.tensors)#, result.fa, result.md, result.eigs)
if __name__ == '__main__':
main()
| 43.163043 | 352 | 0.723244 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,136 | 0.789726 |
64b7672cb62dac687e85a4289526c29c26d01251 | 4,055 | py | Python | ml2/ltl/ltl_spec/ltl_spec_encoder.py | reactive-systems/ml2 | c5ecaf07816b48a76ce4f7a3255fcf3baa78fd5c | [
"MIT"
] | 2 | 2021-06-30T14:00:11.000Z | 2022-02-02T14:12:26.000Z | ml2/ltl/ltl_spec/ltl_spec_encoder.py | reactive-systems/ml2 | c5ecaf07816b48a76ce4f7a3255fcf3baa78fd5c | [
"MIT"
] | null | null | null | ml2/ltl/ltl_spec/ltl_spec_encoder.py | reactive-systems/ml2 | c5ecaf07816b48a76ce4f7a3255fcf3baa78fd5c | [
"MIT"
] | 1 | 2021-07-01T13:38:33.000Z | 2021-07-01T13:38:33.000Z | """LTL specification encoder"""
import tensorflow as tf
from ...data.encoder import PAD_TOKEN
from ..ltl_encoder import LTLTreeEncoder
class LTLSpecTreeEncoder(LTLTreeEncoder):
def encode(self, spec):
return super().encode(spec.formula_str)
class LTLSpecGuaranteeEncoder(LTLTreeEncoder):
def __init__(self, guarantee_pad, num_guarantees, **kwargs):
self.num_guarantees = num_guarantees
self.guarantee_ids = []
self.guarantee_padded_tpe = []
super().__init__(pad=guarantee_pad, **kwargs)
@property
def tensor_spec(self):
guarantee_spec = tf.TensorSpec(shape=(self.num_guarantees, self.pad),
dtype=self.tf_dtype)
tpe_spec = tf.TensorSpec(shape=(self.num_guarantees, self.pad,
self.tpe_pad),
dtype=self.tf_dtype)
return (guarantee_spec, tpe_spec)
@property
def tensor(self):
guarantee_tensor = tf.constant(self.guarantee_ids, dtype=self.tf_dtype)
tpe_tensor = tf.constant(self.guarantee_padded_tpe, dtype=self.tf_dtype)
return (guarantee_tensor, tpe_tensor)
def encode(self, spec):
if len(spec.guarantees) > self.num_guarantees:
self.error = 'Num guarantees'
return False
self.guarantee_ids = []
self.guarantee_padded_tpe = []
for guarantee in spec.guarantees:
if not super().encode(guarantee):
return False
self.guarantee_ids.append(self.ids)
self.guarantee_padded_tpe.append(self.padded_tpe)
for _ in range(len(spec.guarantees), self.num_guarantees):
if self.vocabulary:
self.guarantee_ids.append(
self.vocabulary.tokens_to_ids([PAD_TOKEN] * self.pad))
self.guarantee_padded_tpe.append([[0] * self.tpe_pad] *
self.pad)
return True
class LTLSpecPropertyEncoder(LTLTreeEncoder):
def __init__(self, property_pad, num_properties, **kwargs):
self.num_properties = num_properties
self.property_ids = []
self.property_padded_tpe = []
super().__init__(start=True, pad=property_pad, **kwargs)
@property
def tensor_spec(self):
property_spec = tf.TensorSpec(shape=(self.num_properties, self.pad),
dtype=self.tf_dtype)
tpe_spec = tf.TensorSpec(shape=(self.num_properties, self.pad,
self.tpe_pad),
dtype=self.tf_dtype)
return (property_spec, tpe_spec)
@property
def tensor(self):
property_tensor = tf.constant(self.property_ids, dtype=self.tf_dtype)
tpe_tensor = tf.constant(self.property_padded_tpe, dtype=self.tf_dtype)
return (property_tensor, tpe_tensor)
def encode(self, spec):
if len(spec.assumptions + spec.guarantees) > self.num_properties:
self.error = 'Num properties'
return False
self.property_ids = []
self.property_padded_tpe = []
self.encode_start = True
for assumption in spec.assumptions:
if not super().encode(assumption):
return False
self.property_ids.append(self.ids)
self.property_padded_tpe.append(self.padded_tpe)
self.encode_start = False
for guarantee in spec.guarantees:
if not super().encode(guarantee):
return False
self.property_ids.append(self.ids)
self.property_padded_tpe.append(self.padded_tpe)
for _ in range(len(spec.assumptions + spec.guarantees),
self.num_properties):
if self.vocabulary:
self.property_ids.append(
self.vocabulary.tokens_to_ids([PAD_TOKEN] * self.pad))
self.property_padded_tpe.append([[0] * self.tpe_pad] * self.pad)
return True
| 38.254717 | 80 | 0.605672 | 3,909 | 0.963995 | 0 | 0 | 1,261 | 0.310974 | 0 | 0 | 63 | 0.015536 |
64b82d4b5f5b18d5d4af06617964ece62fbc4a17 | 5,781 | py | Python | pulse_lib/segments/data_classes/data_HVI_variables.py | NicoHendrickx/pulse_lib | 94cd9cf7a8d4d422b86c0759dd2eb7c3466d6413 | [
"MIT"
] | null | null | null | pulse_lib/segments/data_classes/data_HVI_variables.py | NicoHendrickx/pulse_lib | 94cd9cf7a8d4d422b86c0759dd2eb7c3466d6413 | [
"MIT"
] | 26 | 2020-04-06T09:33:39.000Z | 2022-02-18T14:08:22.000Z | pulse_lib/segments/data_classes/data_HVI_variables.py | NicoHendrickx/pulse_lib | 94cd9cf7a8d4d422b86c0759dd2eb7c3466d6413 | [
"MIT"
] | 3 | 2020-03-31T11:56:23.000Z | 2021-12-06T13:42:50.000Z | """
data class for markers.
"""
from pulse_lib.segments.data_classes.data_generic import parent_data
import copy
class marker_HVI_variable(parent_data):
def __init__(self):
"""
init marker object
Args:
pulse_amplitude(double) : pulse amplitude in mV
"""
super().__init__()
self.my_time_data = dict()
self.my_amp_data = dict()
self.end_time = 0
@property
def HVI_markers(self):
return {**self.my_time_data, **self.my_amp_data}
def __getitem__(self, *item):
try:
return self.my_time_data[item[0]]
except:
pass
try:
return self.my_amp_data[item[0]]
except:
pass
raise ValueError("Asking for HVI variable {}. But this variable is not present in the current data set.".format(item[0]))
def add_HVI_marker(self, name, amplitude, time):
"""
add a marker
Args:
name (str) : variable name for the HVI marker
amplitude (float) : amplitude of the marker (in case of a time, unit is in ns, else mV)
time (bool) : True is marker needs to be interpreted as a time.
"""
if time == True:
self.my_time_data[name] = amplitude
else:
self.my_amp_data[name] = amplitude
def reset_time(self, time = None, extend_only = False):
"""
reset the effective start time. See online manual in pulse building instructions to understand this command.
Args:
time (double) : new time that will become time zero
"""
self.start_time = self.total_time
if time is not None:
self.start_time =time
if self.start_time > self.end_time:
self.end_time = self.start_time
def wait(self, time):
"""
Wait after marker for x ns.
Args:
time (double) : time in ns to wait
"""
self.end_time += time
@property
def total_time(self):
'''
get the total time of this segment.
'''
return self.end_time
def slice_time(self, start, end):
"""
apply slice operation on this marker.
Args:
start (double) : start time of the marker
stop (double) : stop time of the marker
"""
for key in self.my_time_data.keys():
self.my_time_data[key] -= start
def get_vmin(self,sample_rate = 1e9):
return 0
def get_vmax(self,sample_rate = 1e9):
return 0
def integrate_waveform(self, sample_rate):
"""
as markers are connected to matched inputs, we do not need to compensate, hence no integration of waveforms is needed.
"""
return 0
def append(self, other, time = None):
'''
Append two segments to each other, where the other segment is places after the first segment. Time is the total time of the first segment.
Args:
other (marker_HVI_variable) : other pulse data object to be appended
time (double/None) : length that the first segment should be.
** what to do with start time argument?
'''
end_time = self.total_time
if time is not None:
end_time = time
self.slice_time(0, end_time)
other_shifted = other._shift_all_time(end_time)
self.my_time_data.update(other_shifted.my_time_data)
self.my_amp_data.update(other.my_amp_data)
def __copy__(self):
"""
make a copy of this marker.
"""
my_copy = marker_HVI_variable()
my_copy.my_amp_data = copy.copy(self.my_amp_data)
my_copy.my_time_data = copy.copy(self.my_time_data)
my_copy.start_time = copy.copy(self.start_time)
my_copy.end_time = copy.copy(self.end_time)
return my_copy
def _shift_all_time(self, time_shift):
'''
Make a copy of all the data and shift all the time
Args:
time_shift (double) : shift the time
Returns:
data_copy_shifted (pulse_data) : copy of own data
'''
if time_shift <0 :
raise ValueError("when shifting time, you cannot make negative times. Apply a positive shift.")
data_copy_shifted = copy.copy(self)
for key in data_copy_shifted.my_time_data.keys():
data_copy_shifted.my_time_data[key] += time_shift
return data_copy_shifted
def __add__(self, other):
"""
add other maker to this one
Args:
other (marker_HVI_variable) : other marker object you want to add
"""
if not isinstance(other, marker_HVI_variable):
raise ValueError("only HVI makers can be added to HVI makers. No other types allowed.")
new_data = marker_HVI_variable()
new_data.my_time_data = {**self.my_time_data, **other.my_time_data}
new_data.my_amp_data = {**self.my_amp_data, **other.my_amp_data}
new_data.start_time = self.start_time
new_data.end_time = self.end_time
if other.total_time > self.total_time:
new_data.end_time = other.end_time
return new_data
def __mul__(self, other):
raise ValueError("No multiplication support for markers ...")
def __repr__(self):
return "=== raw data in HVI variable object ===\n\namplitude data ::\n" + str(self.my_amp_data) + "\ntime dep data ::\n" + str(self.my_time_data)
def _render(self, sample_rate, ref_channel_states):
'''
make a full rendering of the waveform at a predetermined sample rate.
'''
raise ValueError("Rendering of HVI marker is currently not supported.")
| 30.426316 | 153 | 0.60474 | 5,664 | 0.979761 | 0 | 0 | 225 | 0.038921 | 0 | 0 | 2,441 | 0.422245 |
64b83e5317efd2af4f7c1ca229a67c582a84f39a | 2,066 | py | Python | variant_coverage/collapse_bcs.py | cindyyeh/barseq | 273ff891a8dd4c6a73209a5a7f4be04da3b3e6b5 | [
"MIT"
] | 1 | 2022-03-23T20:09:08.000Z | 2022-03-23T20:09:08.000Z | variant_coverage/collapse_bcs.py | cindyyeh/barseq | 273ff891a8dd4c6a73209a5a7f4be04da3b3e6b5 | [
"MIT"
] | 1 | 2022-03-24T18:03:32.000Z | 2022-03-24T18:03:32.000Z | variant_coverage/collapse_bcs.py | cindyyeh/barseq | 273ff891a8dd4c6a73209a5a7f4be04da3b3e6b5 | [
"MIT"
] | null | null | null | '''
Written by Chiann-Ling Cindy Yeh, updated 11/30/2021
This script groups barcodes by variants and counts how many reads are associated with each variant; works with maps where a barcode is being mapped to more than one gene (e.g., Ste5 and Ste7 on the same amplicon with the barcode)
First argument: path to tab-delimited barcode-variant map (first column barcode, other columns contain variant info. doesn't necessarily have to be a sequence (can be unique identifier). aka output from PacRAT https://github.com/dunhamlab/PacRAT )
Second argument: path to tab-delimited barcode-counts file (first column barcode, sequence column counts, aka the output of misc_scripts/count_unique_bcs.py)
Third argument: name of output file
Output file:
column 1: variant
column 2: number of barcodes matched to this variant from pacbio
column 3: number of barcodes matched to this variant from pacbio that are also found in barseq counts
column 4: number of read counts for this variant over all barcodes
'''
import sys
var_map_infile = open(sys.argv[1],"r") # this input is the link to the tab-delimited barcode-variant map
var_bc_dict = {}
for line in var_map_infile:
line = line.strip().split()
if len(line) > 2: # if barcode represents more than one variant
bc = line[0]
var = ";".join(line[1:])
else:
bc = line[0]
var = line[1]
if var not in var_bc_dict:
var_bc_dict[var] = [bc]
else:
var_bc_dict[var].append(bc)
var_map_infile.close()
print(len(var_bc_dict))
bc_count_file = open(sys.argv[2],"r")
bc_count_dict = {}
for line in bc_count_file:
line = line.strip().split()
bc_count_dict[line[0]] = int(line[1])
bc_count_file.close()
print(len(bc_count_dict))
outfile = open(sys.argv[3],"w+")
var_counts_dict = {}
for key in var_bc_dict:
counts = 0
num_bcs = 0
num_bcs_counted = 0
for bc in var_bc_dict[key]:
num_bcs += 1
if bc in bc_count_dict:
counts += bc_count_dict[bc]
num_bcs_counted += 1
var_counts_dict[key] = counts
outfile.write(key + "\t" + str(num_bcs) + "\t"+ str(num_bcs_counted) + "\t" + str(counts) + "\n")
| 32.793651 | 247 | 0.734753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,138 | 0.550823 |
64b8c4d0cf9e1949005c5b1ea8a056e52ed57d58 | 2,504 | py | Python | AutoBonsai/AutoBonsai.py | WolfgangAxel/Random-Projects | 12764d96be3fa162abc5451fdd07db7481200a07 | [
"MIT"
] | 1 | 2017-08-17T19:50:11.000Z | 2017-08-17T19:50:11.000Z | AutoBonsai/AutoBonsai.py | WolfgangAxel/Random-Projects | 12764d96be3fa162abc5451fdd07db7481200a07 | [
"MIT"
] | null | null | null | AutoBonsai/AutoBonsai.py | WolfgangAxel/Random-Projects | 12764d96be3fa162abc5451fdd07db7481200a07 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# AutoBonsai.py
#
# Copyright 2016 keaton <keaton@MissionControl>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
## Variables
sleepTime = 30*60 # Check the schedule twice an hour
wateringSleep = 6*60*60 # Check the water levels 4 times a day
lightOnTime = 10*60*60 # Turn the light on for 10 hours a day
## Definitions
def addEvent(date,commands):
"""
adds an event to the schedule
'date' should be a time in seconds since epoch
'commands' will be passed through exec when 'date' is past (can be string or array of strings)
"""
global Schedule
if type(commands) is not list:
commands=[commands]
Schedule.append([date,commands])
# sort the array by epoch time
Schedule=sorted(Schedule,key=lambda i: i[0])
saveArray(MYDIR+"/MyFiles/Schedule.list",Schedule)
def checkEvent():
"""
Checks to see if an event needs to be triggered
"""
global Schedule
if Schedule != []:
if time() >= Schedule[0][0]:
for command in Schedule[0][1]:
exec(command)
Schedule=Schedule[1:]
saveArray(MYDIR+"/MyFiles/Schedule.list",Schedule)
checkEvent()
def notifyMe(title,message):
post('https://api.simplepush.io/send',data={'key':'36h2Me', 'title':str(title), 'msg':str(message)})
## Modules
from time import time,sleep
from os import path
from requests import post
from sys import path as moduleDir
MYDIR = path.dirname(path.realpath(__file__))
moduleDir.append(MYDIR+"/Modules")
from MyMods import *
from PlantUtils import *
Schedule = []
try:
Schedule = loadArray(MYDIR+"/Schedule.list")
except:
print "Schedule not found."
if __name__ == '__main__':
print "hi!"
if Schedule == []:
print "Schedule not found or empty. Initiating and scheduling all care commands"
beginCaring()
while True:
sleep(sleepTime)
checkEvent()
| 28.454545 | 101 | 0.717652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,487 | 0.59385 |
64bb02fc468eb1c3e175fb0282d1289dd4088e79 | 1,504 | py | Python | CameraApp.py | hemameh/License-Plate-Recognition | f182184c19b17522450bc228a4c710290f3da84f | [
"MIT"
] | null | null | null | CameraApp.py | hemameh/License-Plate-Recognition | f182184c19b17522450bc228a4c710290f3da84f | [
"MIT"
] | null | null | null | CameraApp.py | hemameh/License-Plate-Recognition | f182184c19b17522450bc228a4c710290f3da84f | [
"MIT"
] | null | null | null | from imutils.video import WebcamVideoStream
from FaceRecognition import *
from face_recognition import compare_faces
import cv2
SKIP_FRAME_RATIO = 2
images, greys, names = load_faces("knownfaces")
knownfaces = encodeDataset(images, greys)
ID = [1,2,3,4,5,6,7,8,9]
face_locations = []
face_names = []
process_frame_count = 1
vs = WebcamVideoStream().start()
while True:
frame = vs.read()
if(process_frame_count % SKIP_FRAME_RATIO == 0):
temp_frame, grey_frame = preprocess(frame)
face_locations = detectfaces(grey_frame)
face_names = []
id_iden = []
for face in face_locations:
#facealigned, rect = alignface(temp_frame,grey_frame,face)
facelandmark = predictface(grey_frame,face)
faceencoded = encodeface(temp_frame, facelandmark)
matchfaces = compare_faces(knownfaces, faceencoded)
name = "Unknown"
id = "id"
for i,match in enumerate(matchfaces):
if(match == True):
name = names[i]
id = ID[i]
face_names.append(name)
id_iden.append(id)
process_frame_count += 1
for i,location in enumerate(face_locations):
frame = draw_name(frame,face_names[i],location)
insertName(None,None,id_iden[i])
cv2.imshow('Video', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
vs.stop()
cv2.destroyAllWindows() | 29.490196 | 71 | 0.609043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.0625 |
64bd36f6fcd20ce41e9f8fade95cc48e8c9d5d1e | 1,172 | py | Python | tests/test_genericls.py | scsides/usgscsm | 4a69e01015faddd7d0bdeb15097cc02a9cae09be | [
"Unlicense"
] | null | null | null | tests/test_genericls.py | scsides/usgscsm | 4a69e01015faddd7d0bdeb15097cc02a9cae09be | [
"Unlicense"
] | null | null | null | tests/test_genericls.py | scsides/usgscsm | 4a69e01015faddd7d0bdeb15097cc02a9cae09be | [
"Unlicense"
] | null | null | null | import os
import json
import pytest
from cycsm import isd
import cycsm.csm as csm
import usgscam as cam
data_path = os.path.dirname(__file__)
class TestGenericLs:
@pytest.mark.parametrize('image, ground',[
((2500, 9216, 0), (-73589.5516508502, 562548.342040933, 2372508.44060771))
])
def test_image_to_ground(self, ctx_model, image, ground):
gx, gy, gz = ground
x, y, z = ctx_model.imageToGround(*image)
#TODO: Get this test up and running.
#print(x, y, z)
#assert False
#assert x == pytest.approx(gx, rel=1)
#assert y == pytest.approx(gy, rel=1)
#assert z == pytest.approx(gz, rel=1)
#@pytest.mark.parametrize('image, ground',[
# ((512, 512, 0), (-73589.5516508502, 562548.342040933, 2372508.44060771)),
# ((100, 100, 0), (-48020.2164819883, 539322.805489926, 2378549.41724731))
#])
#def test_ground_to_image(self, model, image, ground):
# y, x = model.groundToImage(*ground)
# ix, iy, _ = image
#
# assert x == pytest.approx(ix)
# assert y == pytest.approx(iy)
| 32.555556 | 104 | 0.585324 | 946 | 0.807167 | 0 | 0 | 523 | 0.446246 | 0 | 0 | 620 | 0.52901 |
64bddadefa8eae379589362085331c22326167a4 | 9,761 | py | Python | glacierbackup/tests/test_init.py | arkhebuz/GlacierBackup | 9f2ccd51e70d860269ac107dfbbf7c4fed51b7d4 | [
"MIT"
] | null | null | null | glacierbackup/tests/test_init.py | arkhebuz/GlacierBackup | 9f2ccd51e70d860269ac107dfbbf7c4fed51b7d4 | [
"MIT"
] | null | null | null | glacierbackup/tests/test_init.py | arkhebuz/GlacierBackup | 9f2ccd51e70d860269ac107dfbbf7c4fed51b7d4 | [
"MIT"
] | null | null | null | import pytest
import json
import os
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
from glacierbackup.command import _construct_argparse_parser
from glacierbackup.jobs import BackupJob
from glacierbackup.database import GBDatabase, GBDatabaseError
def _create_test_backup_files_and_dirs(parent_dir):
root_test_dir = os.path.join(parent_dir, '__test_backup_structure')
os.mkdir(root_test_dir)
dir1 = os.path.join(root_test_dir, 'dir1')
os.mkdir(dir1)
dir2 = os.path.join(root_test_dir, 'dir2')
os.mkdir(dir2)
for char in 'a10':
for adir in (dir1, dir2):
with open(os.path.join(adir, char), 'w') as f:
f.write(char*10**5)
file_list_loc = os.path.join(root_test_dir, '__test_backup_file_list.txt')
paths = [root_test_dir+'/dir1/'+char for char in 'a10']
paths += [root_test_dir+'/dir2/'+char for char in 'a10']
with open(file_list_loc, 'w') as f:
for p in paths:
f.write(p)
f.write('\n')
return file_list_loc
class TemplateSetupTeardown(object):
@classmethod
def setup_class(cls):
# Startup config
cls.database_dir = os.path.join(os.path.expanduser('~'), '.glacierbackup/__TESTS')
cls.database_path = os.path.join(cls.database_dir, 'GB_database.sqlite')
tmp_dir = os.path.join(cls.database_dir, 'TMP')
try:
os.makedirs(tmp_dir)
except FileExistsError:
pass
cls._cnf = {
"set_id" : 0,
"compression_algorithm" : "lzma",
"temporary_dir": tmp_dir,
"database_dir": cls.database_dir,
"public_key": "None",
"region_name": "eu-west-1",
"vault_name": "TestVault1",
"aws_access_key_id": cls.keys["aws_access_key_id"],
"aws_secret_access_key": cls.keys["aws_secret_access_key"]
}
cls.sample_conf_file = os.path.join(cls.database_dir, '_init_conf.json')
with open(cls.sample_conf_file, 'w') as f:
json.dump(cls._cnf, f, indent=2)
cls.list_of_files_path = _create_test_backup_files_and_dirs(cls.database_dir)
@classmethod
def teardown_class(cls):
import shutil
shutil.rmtree(cls.database_dir)
class TestGBDatabase(TemplateSetupTeardown):
""" Tests directly glacierbackup.database.GBDatabase """
with open("__keys.json", 'r') as f:
keys = json.load(f)
def teardown_method(self, method):
if os.path.isfile(self.database_path):
os.remove(self.database_path)
def test_operation_protection_on_disconnected_database(self):
DB = GBDatabase(self.database_path)
with pytest.raises(GBDatabaseError):
DB.change('Select * from Jobs')
def test_database_initialization(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with pytest.raises(GBDatabaseError):
CONFIG = DB.read_config_from_db(set_id=0)
DB.connect()
CONFIG = DB.read_config_from_db(set_id=0)
for k in self._cnf.keys():
assert CONFIG[k] == self._cnf[k]
def test_multiple_close_calls(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
# Should rise no errors
DB.close()
DB.connect()
DB.close()
DB.close()
DB.close()
del DB
def test_writing_to_Files(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
v = ('abs', 1, 0, 1)
DB.change('INSERT INTO Files (abs_path, registration_date, file_exists, registered) VALUES (?,?,?,?)', v)
with GBDatabase(self.database_path) as DB:
row = DB.fetch_row('SELECT * FROM Files WHERE abs_path=?', (v[0],))
for i, val in enumerate(row):
assert v[i] == val
def test_writing_two_identical_abs_path_to_Files(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
v = ('abs', 1, 0, 1)
DB.change('INSERT INTO Files (abs_path, registration_date, file_exists, registered) VALUES (?,?,?,?)', v)
import sqlite3
with pytest.raises(sqlite3.IntegrityError):
DB.change('INSERT INTO Files (abs_path, registration_date, file_exists, registered) VALUES (?,?,?,?)', v)
def test_writing_to_Backups(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
vs = [('abs', 1, 'abc', 1),
('abs', 2, 'ccc', 2),
('abs', 3, 'ddd', 3),]
DB.change_many('INSERT INTO Backups (abs_path, mod_date, sha256, job_id) VALUES (?,?,?,?)', vs)
with GBDatabase(self.database_path) as DB:
rows = DB.fetch_all('SELECT * FROM Backups')
for written, read in zip(vs, rows):
for v1, v2 in zip(written, read):
assert v1 == v2
def test_writing_duplicates_to_Backups(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
vs = [('abs', 1, 'abc', 1),
('abs', 1, 'abc', 1),
('abs', 1, 'abc', 1),]
import sqlite3
with pytest.raises(sqlite3.IntegrityError):
DB.change_many('INSERT INTO Backups (abs_path, mod_date, sha256, job_id) VALUES (?,?,?,?)', vs)
def test_writing_nonsense_to_Backups(self):
DB = GBDatabase(self.database_path)
DB.initialize(self._cnf)
with DB:
vs = [('abs', 1, 'abc', 1),
('abs', 1, 'cba', 1),
('abs', 3, 'abc', 1),]
import sqlite3
with pytest.raises(sqlite3.IntegrityError):
DB.change_many('INSERT INTO Backups (abs_path, mod_date, sha256, job_id) VALUES (?,?,?,?)', vs)
@pytest.mark.incremental
class TestsFunctional(TemplateSetupTeardown):
"""Possible test scenarios often depend on the AG Database state,
which is created and altered by executing actions/test cases
earlier. They also simultaniusly depend on the files (i.e.
backed_files -> archive -> encryped_archive). This could be mitigated
by preparing entire state from scratch, independently from glacierbackup
funcionality, by writing custom straight-to-the-state setups or state
injection. However it would require substantial effort at the very
early stage of development when things are mostly in flux, so a fixed
sequence of rather functional than strictly unit tests is used for now.
It still has the benefit of automated execution and can better pin down
a faillure location.
Note that the test order below roughly mimics how glacierbackup would
be used from CLI. """
with open("__keys.json", 'r') as f:
keys = json.load(f)
def test_initialization_from_cmd_args(self):
parser = _construct_argparse_parser()
args = parser.parse_args(['init', '--genkeys', self.sample_conf_file])
args.func(args)
DB = GBDatabase(os.path.join(self.database_dir, 'GB_database.sqlite'))
DB.connect()
CONFIG = DB.read_config_from_db()
assert CONFIG['vault_name'] == self._cnf['vault_name']
assert CONFIG['database_dir'] == self._cnf['database_dir']
assert CONFIG['temporary_dir'] == self._cnf['temporary_dir']
assert len(CONFIG['public_key']) > 100 # TODO: Should do better...
assert os.path.isfile(os.path.join(self.database_dir, 'GB_RSA_private.pem'))
def test_registering_files_by_file_list(self):
database_path = os.path.join(self.database_dir, 'GB_database.sqlite')
parser = _construct_argparse_parser()
args = parser.parse_args(['register', '--database', database_path,
'--filelist', self.list_of_files_path])
args.func(args)
import sqlite3
conn = sqlite3.connect(database_path)
c = conn.cursor()
c.execute('SELECT * FROM FILES')
files_in_db = c.fetchall()
conn.close()
assert len(files_in_db) == 6
def test_backup_job_initialization(self):
database_path = os.path.join(self.database_dir, 'GB_database.sqlite')
self.__class__.DB = GBDatabase(database_path)
self.__class__.DB.connect()
self.__class__.BJ = BackupJob(self.__class__.DB, 'asdf')
def test_backup_job_checkout_files(self):
self.__class__.BJ.checkout_files()
def test_backup_job_archive_packing(self):
self.__class__.BJ.archive_files()
assert os.path.isfile(self.__class__.BJ.archive)
def test_backup_job_archive_contents(self):
import tarfile
tarf = tarfile.open(self.__class__.BJ.archive, 'r')
with open(self.list_of_files_path, 'r') as f:
paths1 = [s.strip() for s in f.readlines()]
paths2 = ['/'+t for t in tarf.getnames()]
for p in paths1:
assert p in paths2
def test_backup_job_encrypt_archive(self):
self.__class__.BJ.encrypt_archive()
assert os.path.isfile(self.__class__.BJ.encrypted_archive)
def test_archive_decryption(self):
pass
def test_backup_job_upload_into_glacier(self):
#~ self.__class__.BJ.upload_into_glacier()
pass
| 39.358871 | 121 | 0.608647 | 8,619 | 0.883004 | 0 | 0 | 4,854 | 0.497285 | 0 | 0 | 2,432 | 0.249155 |
64be755a7b18906a904b88788aa463f48ea5cce4 | 772 | py | Python | exercicios-python/ex039.py | anavesilva/python-introduction | d85fb9381e348262584fd2942e4818ee822adbe5 | [
"MIT"
] | null | null | null | exercicios-python/ex039.py | anavesilva/python-introduction | d85fb9381e348262584fd2942e4818ee822adbe5 | [
"MIT"
] | null | null | null | exercicios-python/ex039.py | anavesilva/python-introduction | d85fb9381e348262584fd2942e4818ee822adbe5 | [
"MIT"
] | null | null | null | from datetime import date
ano = int(input('Ano de nascimento: '))
print('[ 1 ] Masculino [ 2 ] Feminino')
sexo = int(input('Qual o seu sexo: '))
hoje = date.today().year
idade = hoje - ano
if idade < 18 and sexo == 1:
print('Você tem hoje {} anos e deverá se alistar ao serviço militar em {} anos, no ano de {}.'.format(idade, 18 - idade, ano + 18))
elif idade == 18 and sexo == 1:
print('Você tem hoje {} anos e portanto deverá de alistar ao serviço militar já neste ano de {}.'.format(idade, hoje))
elif idade > 18 and sexo == 1:
print('Você tem {} anos e o seu tempo de alistamento expirou à {} anos, no ano de {}.'.format(idade, idade - 18, ano + 18))
else:
print('Como você é do sexo feminino, não será obrigatório o alistamento no serviço militar.') | 55.142857 | 135 | 0.665803 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 433 | 0.550191 |
64c147ac5042253d20a565d2edd5202d18d6d421 | 484 | py | Python | ignition/utils/tests/test_enum.py | IgnitionProject/ignition | 0eeb3a7878d828bc3c06d2cb2dd781e17776a8a6 | [
"BSD-2-Clause-FreeBSD"
] | 7 | 2015-01-25T18:15:48.000Z | 2022-03-09T17:39:12.000Z | ignition/utils/tests/test_enum.py | IgnitionProject/ignition | 0eeb3a7878d828bc3c06d2cb2dd781e17776a8a6 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | ignition/utils/tests/test_enum.py | IgnitionProject/ignition | 0eeb3a7878d828bc3c06d2cb2dd781e17776a8a6 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2019-11-14T01:27:23.000Z | 2019-11-14T01:27:23.000Z | from ignition import Enum
def test_Enum ():
Colours = Enum('red', 'blue', 'green')
Weekdays = Enum('mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun')
pizza_night = Weekdays[4]
game_night = Weekdays['mon']
shirt_colour = Colours.green
assert(pizza_night == Weekdays.fri)
assert(game_night == Weekdays[0])
assert(shirt_colour > Colours.red)
assert(shirt_colour != "green")
multi = Enum(('foo', 'bar'), 'baz')
assert(multi.foo == multi.bar)
| 26.888889 | 68 | 0.617769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.165289 |
64c1d3647fe939dffae3a2fb85734866b5491ca0 | 1,331 | py | Python | map1.py | Dimitrios-Tsiaras/portofolio | c56de7e8ccc2f9dca12017b6139bab2b3cc8f41f | [
"MIT"
] | null | null | null | map1.py | Dimitrios-Tsiaras/portofolio | c56de7e8ccc2f9dca12017b6139bab2b3cc8f41f | [
"MIT"
] | null | null | null | map1.py | Dimitrios-Tsiaras/portofolio | c56de7e8ccc2f9dca12017b6139bab2b3cc8f41f | [
"MIT"
] | null | null | null | import pandas
import folium
data = pandas.read_csv("Volcanoes.txt")
lat = list(data['LAT'])
lon = list(data['LON'])
elev = list(data['ELEV'])
name = list(data["NAME"])
html = """
Volcano name:<br>
<a href="https://www.google.com/search?q=%%22%s%%22" target="_blank">%s</a><br>
Height: %s m
"""
def color_producer(elevation):
if elevation < 1000:
return 'green'
elif 1000 <= elevation < 3000:
return 'orange'
else:
return 'red'
map = folium.Map(location=[38.58,-99.09], zoom_start=6, tiles="Stamen Terrain")
fgv = folium.FeatureGroup(name='Volcanoes')
for lt, ln, el, name in zip(lat, lon, elev, name):
iframe = folium.IFrame(html=html % (name, name, el), width=200, height=100)
fgv.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=folium.Popup(iframe),
fill_color = color_producer(el), color ='grey', fill_opacity=0.9))
fgp = folium.FeatureGroup(name='Population')
fgp.add_child(folium.GeoJson(data=open('world.json', 'r', encoding='utf-8-sig').read(),
style_function=lambda x: {'fillColor':'green' if x['properties']['POP2005'] < 10000000
else 'yellow' if 10000000 <= x['properties']['POP2005'] < 20000000 else 'red'}))
map.add_child(fgv)
map.add_child(fgp)
map.add_child(folium.LayerControl())
map.save("Map1.html")
| 31.690476 | 95 | 0.654395 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 334 | 0.250939 |
64c21f54cec5604ae2bfe98e4820b018dfebed5a | 2,905 | py | Python | src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/private_endpoint_connection_py3.py | limingu/azure-cli-extensions | 1bc29f089f4da42ab8905e440f2f46d6b5b0aa97 | [
"MIT"
] | 2 | 2021-06-05T17:51:26.000Z | 2021-11-17T11:17:56.000Z | src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/private_endpoint_connection_py3.py | limingu/azure-cli-extensions | 1bc29f089f4da42ab8905e440f2f46d6b5b0aa97 | [
"MIT"
] | 1 | 2020-06-12T01:39:40.000Z | 2020-06-12T01:39:40.000Z | src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/private_endpoint_connection_py3.py | anpaz-msft/azure-cli-extensions | 847fd487fe61e83f2a4163a9393edc9555267bc2 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .proxy_resource_py3 import ProxyResource
class PrivateEndpointConnection(ProxyResource):
"""A private endpoint connection.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. Ex-
Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
:vartype type: str
:param private_endpoint: Private endpoint which the connection belongs to.
:type private_endpoint:
~azure.mgmt.cosmosdb.models.PrivateEndpointProperty
:param private_link_service_connection_state: Connection State of the
Private Endpoint Connection.
:type private_link_service_connection_state:
~azure.mgmt.cosmosdb.models.PrivateLinkServiceConnectionStateProperty
:param group_id: Group id of the private endpoint.
:type group_id: str
:param provisioning_state: Provisioning state of the private endpoint.
:type provisioning_state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpointProperty'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionStateProperty'},
'group_id': {'key': 'properties.groupId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(self, *, private_endpoint=None, private_link_service_connection_state=None, group_id: str=None, provisioning_state: str=None, **kwargs) -> None:
super(PrivateEndpointConnection, self).__init__(**kwargs)
self.private_endpoint = private_endpoint
self.private_link_service_connection_state = private_link_service_connection_state
self.group_id = group_id
self.provisioning_state = provisioning_state
| 45.390625 | 161 | 0.678141 | 2,382 | 0.819966 | 0 | 0 | 0 | 0 | 0 | 0 | 2,081 | 0.716351 |
64c3a770ec74f61297b7a81dce5e68acdf82a4ee | 576 | py | Python | pdfmajor/interpreter/commands/state/PDFGraphicState/PDFColor.py | asosnovsky/pdfmajor | 7e24c64b5b4fdc84c12b2f78dcaab0e1aa07f4ad | [
"MIT"
] | 23 | 2019-01-13T23:32:24.000Z | 2021-07-08T04:29:15.000Z | pdfmajor/interpreter/commands/state/PDFGraphicState/PDFColor.py | asosnovsky/pdfmajor | 7e24c64b5b4fdc84c12b2f78dcaab0e1aa07f4ad | [
"MIT"
] | 3 | 2019-08-09T18:42:01.000Z | 2019-12-13T15:43:24.000Z | pdfmajor/interpreter/commands/state/PDFGraphicState/PDFColor.py | asosnovsky/pdfmajor | 7e24c64b5b4fdc84c12b2f78dcaab0e1aa07f4ad | [
"MIT"
] | 2 | 2020-01-09T11:18:20.000Z | 2020-03-24T06:02:30.000Z | from typing import Tuple, Optional
from .PDFColorSpace import PDFColorSpace
class PDFColor:
def __init__(self, color_space: Optional[PDFColorSpace], *values: float):
self.color_space: Optional[PDFColorSpace] = color_space
self.values: Tuple[float, ...] = values
def copy(self):
return self.__class__(
self.color_space,
*self.values
)
def __repr__(self):
return f"""<PDFColor space="{
None if self.color_space is None else self.color_space.name
}" values={self.values}/>"""
| 30.315789 | 77 | 0.633681 | 498 | 0.864583 | 0 | 0 | 0 | 0 | 0 | 0 | 131 | 0.227431 |
64c3b70bd4a11694d8806b5762ea9c6780b09649 | 1,858 | py | Python | oauth2/serializers.py | tinyms/bopress | 6182c8940ebeb1f7a26c0e1aa62528b9f090b2d9 | [
"Apache-2.0"
] | null | null | null | oauth2/serializers.py | tinyms/bopress | 6182c8940ebeb1f7a26c0e1aa62528b9f090b2d9 | [
"Apache-2.0"
] | null | null | null | oauth2/serializers.py | tinyms/bopress | 6182c8940ebeb1f7a26c0e1aa62528b9f090b2d9 | [
"Apache-2.0"
] | null | null | null | from rest_framework import serializers
from oauth2.models import UserInfo, CompanyInfo, CompanyEmployee, AppCommerceLicense, AppGrantAuthorization
class OAuth2VerifySerializer(serializers.Serializer):
def create(self, validated_data):
return None
def update(self, instance, validated_data):
return instance
code = serializers.CharField(max_length=255, required=True)
class UserInfoSerializer(serializers.HyperlinkedModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = UserInfo
fields = ('union_id', 'nick_name', 'gender', 'city', 'province', 'country', 'avatar_url', 'app_id',
'mobile', 'name', 'address', 'identity_card', 'timestamp', 'user')
class CompanyInfoSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.name')
class Meta:
model = CompanyInfo
fields = ('company_name', 'name', 'mobile', 'logo', 'address', 'phone', 'summary', 'description', 'owner')
class CompanyEmployeeSerializer(serializers.HyperlinkedModelSerializer):
company_name = serializers.ReadOnlyField(source='company.company_name')
employee_name = serializers.ReadOnlyField(source='employee.name')
class Meta:
model = CompanyEmployee
fields = ('company', 'employee', 'level')
class AppCommerceLicenseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = AppCommerceLicense
fields = ('level', 'name', 'price', 'position')
class AppGrantAuthorizationSerializer(serializers.HyperlinkedModelSerializer):
company_name = serializers.ReadOnlyField(source='company.company_name')
class Meta:
model = AppGrantAuthorization
fields = ('app_id', 'app_name', 'start_time', 'end_time', 'level', 'company')
| 33.781818 | 114 | 0.717976 | 1,692 | 0.910657 | 0 | 0 | 0 | 0 | 0 | 0 | 406 | 0.218515 |
64c3cb8887f6597d3c4a2e14e75f1a485f354d20 | 12,288 | py | Python | edm_web1/app/task/utils/contexts.py | zhouli121018/nodejsgm | 0ccbc8acf61badc812f684dd39253d55c99f08eb | [
"MIT"
] | null | null | null | edm_web1/app/task/utils/contexts.py | zhouli121018/nodejsgm | 0ccbc8acf61badc812f684dd39253d55c99f08eb | [
"MIT"
] | 18 | 2020-06-05T18:17:40.000Z | 2022-03-11T23:25:21.000Z | edm_web1/app/task/utils/contexts.py | zhouli121018/nodejsgm | 0ccbc8acf61badc812f684dd39253d55c99f08eb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
import time
import random
from django.db import connections
from django.db.models import Q
from django.http import Http404
from django_redis import get_redis_connection
from app.core.models import Services, CustomerMailbox, CustomerDomain, CustomerDomainMailboxRel, CustomerTrackDomain
from app.address.models import MailList, TaskMailList
from app.address.utils.vars import get_addr_var_fields
from app.template.models import SendTemplate
from app.task.models import SendTask, SendTaskReplyto, SendTaskTpl
from app.task.models import HOUR_SPEED
from app.track.models import StatTask, StatError
######################################################
# 获取任务相关的服务状态
def get_user_service(request):
try:
svs_obj = Services.objects.get(customer=request.user)
if svs_obj.is_share_flag in ('1', '2'):
custQtyValid = svs_obj.qty_valid
is_service_disabled = True if svs_obj.disabled == '1' else False
if svs_obj.is_share_flag == '4':
custQtyValid = request.user.parent.service().qty_valid
is_service_disabled = True if request.user.parent.service().disabled == '1' else False
if svs_obj.is_share_flag == '3':
custQtyValid1 = request.user.parent.service().qty_valid
custQtyValid2 = svs_obj.limit_qty
custQtyValid = custQtyValid1 if custQtyValid1 < custQtyValid2 else custQtyValid2
is_service_disabled = True if request.user.parent.service().disabled == '1' else False
except:
custQtyValid = 0
is_service_disabled = False
return custQtyValid, is_service_disabled
######################################################
# 获取 添加任务的初始化信息
def get_task_add_context(request):
# 任务名称
send_name = '{}-{}-{}'.format(time.strftime('%Y%m%d%H%M%S'), request.user.id, random.randint(10, 100))
# 地址池
maillist_objs = MailList.objects.filter(
Q(customer=request.user) | Q(sub_share_maillist__user=request.user)).filter(
isvalid=True, is_smtp=False).order_by('-id')[:500]
# maillist_objs = MailList.objects.filter(customer=request.user, isvalid=True, is_smtp=False).order_by('-id')[:300]
# 获取域名
domain_list = CustomerMailbox.objects.filter(
customer=request.user, disabled='0').values_list('domain', flat=True).distinct()
domain_objs = CustomerDomain.objects.filter(domain__in=list(domain_list), customer_id__in=[0, request.user.id])
# 共享域名获取
ctype = CustomerDomainMailboxRel.objects.get_content_type('domain')
share_domain_ids = CustomerDomainMailboxRel.objects.filter(customer=request.user, content_type=ctype).values_list(
'object_id', flat=True)
share_domain_objs = CustomerDomain.objects.filter(customer=request.user.parent, id__in=share_domain_ids)
# 获取跟踪域名
track_domain_list = CustomerTrackDomain.objects.filter(customer=request.user).order_by('-id')
track_domain = track_domain_list[0].domain if track_domain_list else None
# 复制
task_id = request.GET.get('task_id', '')
task_obj = None
if task_id:
task_obj = SendTask.objects.filter(user=request.user, id=task_id).first()
task_copy_template_ids = task_obj.get_copy_template_ids() if task_obj else []
c_send_qty_type = 'all'
c_send_qty = 0
c_send_qty_start = 0
c_send_domain = 'all'
c_send_fullname = None
c_send_replyto = None
c_track_status = 0
c_track_domain = None
c_is_need_receipt = False
# 发送速度
c_hour_speed = 5000
# AB 发送
c_is_ab = False
c_ab_appraise_qty = 5000
c_ab_content_limit = 2
c_send_maillist_ids = []
if task_obj:
if task_obj.send_maillist_id == 0:
c_send_maillist_ids = TaskMailList.objects.filter(send=task_obj).values_list("maillist_id", flat=True)
else:
c_send_maillist_ids = [task_obj.send_maillist_id]
c_send_qty = task_obj.send_qty
c_send_qty_start = task_obj.send_qty_start
if c_send_qty_start != 0:
c_send_qty_type = 'limit'
c_send_qty_start = c_send_qty_start if c_send_qty_start else 1
if task_obj.send_acct_type == 'domain':
c_send_domain = task_obj.send_acct_domain
c_send_fullname = task_obj.send_fullname
c_send_replyto = task_obj.send_replyto
c_track_status = task_obj.track_status
c_track_domain = task_obj.track_domain
c_is_need_receipt = task_obj.is_need_receipt
c_hour_speed = task_obj.hour_speed
c_is_ab = task_obj.is_ab
c_ab_appraise_qty = task_obj.ab_appraise_qty
c_ab_content_limit = task_obj.ab_content_limit
# 失败地址重发(拒绝投递重发)
errtype = request.GET.get('errtype', '')
status = request.GET.get('status', '')
retry_flag = False
if status == 'retry' and errtype == '5' and task_obj:
if task_obj.send_maillist_id == 0:
T_send_maillist_ids = list(TaskMailList.objects.filter(send=task_obj).values_list("maillist_id", flat=True))
else:
T_send_maillist_ids = [task_obj.send_maillist_id]
if not T_send_maillist_ids:
raise Http404
retry_flag = True
_mobj = MailList.objects.create(
customer=request.user,
subject=u'{}_失败重发'.format(task_obj.send_name)
)
c_send_maillist_id = _mobj.id
stats = StatTask.objects.filter(
customer=request.user, task_ident=task_obj.send_name).values_list(
'id', flat=True)
lists = StatError.objects.filter(
customer=request.user, task_id__in=stats).filter(
error_type='5').values_list(
'recipient', flat=True).distinct()
_mobj.count_real = len(lists)
_mobj.save()
created = time.strftime("%Y-%m-%d %H:%M:%S")
res = [(c_send_maillist_id, r, created) for r in lists]
sql = "INSERT INTO `mm-pool`.`ml_subscriber_{}` (list_id, address, created) VALUES (%s, %s, %s)".format(
request.user.id)
cr = connections['mm-pool'].cursor()
cr.executemany(sql, res)
var_lists = get_addr_var_fields(cr, request.user.id)
select_var_str = ','.join(var_lists)
update_var_str = ', '.join('t1.{0}=t2.{0}'.format(varT) for varT in var_lists)
update_sql = """
UPDATE ml_subscriber_{0} t1, (
SELECT address, list_id, fullname, sex, birthday, phone, area, {3}
FROM ml_subscriber_{0} WHERE list_id in ({1})
) t2
SET t1.fullname=t2.fullname, t1.sex=t2.sex, t1.birthday=t2.birthday, t1.phone=t2.phone, t1.area=t2.area, {4}
WHERE t1.address = t2.address AND t1.list_id={2};
""".format(request.user.id, ",".join(map(str, T_send_maillist_ids)), c_send_maillist_id, select_var_str, update_var_str)
cr.execute(update_sql)
task_copy_template_ids = []
c_send_maillist_ids = [c_send_maillist_id]
# 指定地址
replyto_obj, _c = SendTaskReplyto.objects.get_or_create(user=request.user)
c_send_replyto = c_send_replyto if c_send_replyto else replyto_obj.send_replyto
track_domain = c_track_domain if c_track_domain else track_domain
template_ids = ','.join(map(str, task_copy_template_ids)) if task_copy_template_ids else request.GET.get(
'template_ids', '')
select_template_ids = []
if template_ids:
select_template_ids = map(int, template_ids.split(','))
select_template_ids = task_copy_template_ids if task_copy_template_ids else select_template_ids
# 加载模板
template_existed_1, load_template_existed = False, True
# lists = SendTemplate.objects.filter(user=request.user, isvalid=True, result__in=['green', 'yellow', 'red_pass'])
lists = SendTemplate.objects.filter(
Q(user=request.user) | Q(sub_share_template__user=request.user)).filter(
isvalid=True, result__in=['green', 'yellow', 'red_pass'])
template_lists = lists.filter(id__in=select_template_ids)
if template_lists:
template_existed_1 = True
exclude_template_existed = lists.exclude(id__in=select_template_ids).exists()
template_existed = True if ( template_existed_1 or exclude_template_existed ) else False
# 获取测试通道数量
redis = get_redis_connection()
test_channal_qty = redis.hget('channel:cfg', 'qty') or 30
context = {
'send_name': send_name,
'template_lists': template_lists,
'maillist_objs': maillist_objs,
'domain_objs': domain_objs,
'share_domain_objs': share_domain_objs,
'track_domain_list': track_domain_list,
'track_domain': track_domain,
# 'custQtyValid': custQtyValid,
# 'is_service_disabled': is_service_disabled,
'template_ids': template_ids,
'template_existed': template_existed,
'exclude_template_existed': exclude_template_existed,
"c_send_maillist_ids": c_send_maillist_ids,
'c_send_qty_type': c_send_qty_type,
'c_send_qty': c_send_qty,
'c_send_qty_start': c_send_qty_start,
'c_send_domain': c_send_domain,
'c_send_fullname': c_send_fullname,
'c_send_replyto': c_send_replyto,
'c_track_status': c_track_status,
'c_is_need_receipt': c_is_need_receipt,
'retry_flag': retry_flag,
'test_channal_qty': test_channal_qty,
# 发送速度
"c_hour_speed": c_hour_speed,
"hour_speeds": HOUR_SPEED,
# AB 发送
"c_is_ab": c_is_ab,
"c_ab_appraise_qty": c_ab_appraise_qty,
"c_ab_content_limit": c_ab_content_limit,
}
return context
######################################################
# 获取 修改任务的初始化信息
def get_task_modify_context(request, task_id):
# 地址池
# maillist_objs = MailList.objects.filter(customer=request.user, is_smtp=False, isvalid=True)[:300]
maillist_objs = MailList.objects.filter(
Q(customer=request.user) | Q(sub_share_maillist__user=request.user)).filter(
isvalid=True, is_smtp=False).order_by('-id')[:500]
# 加载模板
select_template_ids = SendTaskTpl.objects.filter(task_id=task_id).values_list('template_id', flat=True)
template_existed_1, load_template_existed = False, True
template_ids_str = ''
if select_template_ids:
template_ids_str = ','.join(map(str, select_template_ids))
lists = SendTemplate.objects.filter(
Q(user=request.user) | Q(sub_share_template__user=request.user)).filter(
result__in=['green', 'yellow', 'red_pass']).filter(isvalid=True)
template_lists = lists.filter(id__in=select_template_ids)
if template_lists:
template_existed_1 = True
exclude_template_existed = lists.exclude(id__in=select_template_ids).exists()
template_existed = True if ( template_existed_1 or exclude_template_existed ) else False
# 获取域名
domain_list = CustomerMailbox.objects.filter(
customer=request.user, disabled='0').values_list('domain', flat=True).distinct()
domain_objs = CustomerDomain.objects.filter(domain__in=list(domain_list), customer_id__in=[0, request.user.id])
# domain_objs = CustomerDomain.objects.filter(customer=request.user, status='Y')
# 共享域名获取
ctype = CustomerDomainMailboxRel.objects.get_content_type('domain')
share_domain_ids = CustomerDomainMailboxRel.objects.filter(customer=request.user, content_type=ctype).values_list(
'object_id', flat=True)
share_domain_objs = CustomerDomain.objects.filter(customer=request.user.parent, id__in=share_domain_ids)
# 获取跟踪域名
track_domain_list = CustomerTrackDomain.objects.filter(customer=request.user).order_by('-id')
context = {
# 'task_obj': obj,
'maillist_objs': maillist_objs,
'domain_objs': domain_objs,
'share_domain_objs': share_domain_objs,
# 'c_send_maillist_ids': task_tools.get_modify_maillistid(obj),
'track_domain_list': track_domain_list,
# 'custQtyValid': custQtyValid,
# 'is_service_disabled': is_service_disabled,
'template_lists': template_lists,
'template_ids': select_template_ids,
'template_existed': template_existed,
'exclude_template_existed': exclude_template_existed,
'template_ids_str': template_ids_str,
# 发送速度
"hour_speeds": HOUR_SPEED,
}
return context
| 42.815331 | 128 | 0.684163 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,736 | 0.218008 |
64c51cd27d512080218a2d740ce5b99c9116f45a | 1,632 | py | Python | unit_tests/test_objects.py | carlos3dx/hash_code_practice | 6ab27074e2183f511348afcc10827cbefa6891af | [
"MIT"
] | 1 | 2021-02-13T16:02:55.000Z | 2021-02-13T16:02:55.000Z | unit_tests/test_objects.py | carlos3dx/hash_code_practice | 6ab27074e2183f511348afcc10827cbefa6891af | [
"MIT"
] | null | null | null | unit_tests/test_objects.py | carlos3dx/hash_code_practice | 6ab27074e2183f511348afcc10827cbefa6891af | [
"MIT"
] | null | null | null | from components.competition import Competition
from components.pizzeria import Pizzeria
class TestObjects:
def test_two_identical_pizzeria_objects(self):
p1 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
p2 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
assert p1 == p2
def test_two_different_pizzeria_objects(self):
p1 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
p2 = Pizzeria({'p1': [1, 2, 3, 0], 'p2': [4, 5, 6]}, 7)
assert p1 != p2
def test_pizzeria_against_other_object(self):
p1 = Pizzeria({'p1': [1, 2, 3], 'p2': [4, 5, 6]}, 6)
p2 = 'mock'
assert p1 != p2
def test_two_identical_competition_objects(self):
c1 = Competition(3, 4, 5)
c2 = Competition(3, 4, 5)
assert c1 == c2
def test_two_different_competition_objects(self):
c1 = Competition(5, 4, 3)
c2 = Competition(3, 4, 5)
assert c1 != c2
def test_competition_against_other_object(self):
c1 = Competition(5, 4, 3)
c2 = 'mock'
assert c1 != c2
def test_pizzeria_reverse_index_properly_constructed(self):
pizzas = {0: [0, 1, 2],
1: [3, 4, 5],
2: [6, 3, 1],
3: [4, 3, 5],
4: [6, 5]}
expected_reverse_index = {
0: [0],
1: [0, 2],
2: [0],
3: [1, 2, 3],
4: [1, 3],
5: [1, 3, 4],
6: [2, 4]
}
pizzeria = Pizzeria(pizzas, 7)
assert pizzeria.ingredients_reverse_index == expected_reverse_index
| 30.222222 | 75 | 0.501225 | 1,541 | 0.94424 | 0 | 0 | 0 | 0 | 0 | 0 | 52 | 0.031863 |
64c63c1ab341cb60c3239568cdb0617b55e8a74e | 1,979 | py | Python | restrictive_growth_strings.py | oversj96/PyKabe | 031c11b995b5e226a4aed1fbcb31f876661c4c0a | [
"MIT"
] | 1 | 2020-03-12T14:07:12.000Z | 2020-03-12T14:07:12.000Z | restrictive_growth_strings.py | oversj96/PyKabe | 031c11b995b5e226a4aed1fbcb31f876661c4c0a | [
"MIT"
] | null | null | null | restrictive_growth_strings.py | oversj96/PyKabe | 031c11b995b5e226a4aed1fbcb31f876661c4c0a | [
"MIT"
] | null | null | null | """restrictive_growth_strings.py: Constructs the lexicographic
restrictive growth string representation of all the way to partition a set,
given the length of the set."""
__author__ = "Justin Overstreet"
__copyright__ = "oversj96.github.io"
def set_to_zero(working_set, index):
"""Given a set and an index, set all elements to 0 after the index."""
if index == len(working_set) - 1:
return working_set
else:
for i in range(index + 1, len(working_set)):
working_set[i] = 0
return working_set
def update_b_row(a_row, b_row):
"""Update b row to reflect the max value of a sequence range in row a"""
for i in range(1, len(a_row)):
b_row[i - 1] = max(a_row[:i]) + 1
return b_row
def restrictive_growth_strings(length):
"""Returns the set of all partitions of a set in a lexicographic integer format.
The algorithm is based on Donald Knuth's volume 4A on combinatoric algorithms.
Algorithm H."""
n = length - 1
a_string = [0 for i in range(0, length)]
b_string = [1 for i in range(0, n)]
lexico_string = [a_string.copy()]
incrementable = True
while incrementable:
incrementable = False
for index in range(n, 0, -1):
if a_string[index] < n and a_string[index] < b_string[index - 1]:
incrementable = True
a_string[index] += 1
a_string = set_to_zero(a_string, index)
b_string = update_b_row(a_string, b_string)
lexico_string.append(a_string.copy())
break
return lexico_string
if __name__ == "__main__":
"""A quick way to check if the module works correctly is to compare
the partition count to the bell number of the correct 'n' size.
For a set of 7 elements, the partition count should be 877 by Bell's numbers."""
strings = restrictive_growth_strings(7)
for string in strings:
print(string)
print(len(strings))
| 35.339286 | 84 | 0.649318 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 766 | 0.387064 |
64c641b81944ce773cccc5c59fdd5fec1026b816 | 22,358 | py | Python | examples/pvmldemo.py | claudio-unipv/pvml | 9f583c2658218003bba2efa70cdbb7499b22d843 | [
"MIT"
] | 3 | 2020-03-30T11:17:17.000Z | 2021-05-15T10:27:30.000Z | examples/pvmldemo.py | claudio-unipv/pvml | 9f583c2658218003bba2efa70cdbb7499b22d843 | [
"MIT"
] | null | null | null | examples/pvmldemo.py | claudio-unipv/pvml | 9f583c2658218003bba2efa70cdbb7499b22d843 | [
"MIT"
] | 2 | 2020-07-15T09:42:42.000Z | 2021-09-14T07:24:50.000Z | #!/usr/bin/env python3
import pvml
import numpy as np
import matplotlib.pyplot as plt
import argparse
from itertools import zip_longest
_NORMALIZATION = {
"none": lambda *X: (X[0] if len(X) == 1 else X),
"meanvar": pvml.meanvar_normalization,
"minmax": pvml.minmax_normalization,
"maxabs": pvml.maxabs_normalization,
"l2": pvml.l2_normalization,
"l1": pvml.l1_normalization,
"whitening": pvml.whitening,
"pca": pvml.pca
}
def parse_args():
parser = argparse.ArgumentParser("Classification demo")
a = parser.add_argument
a("-r", "--lr", type=float, default=0.01,
help="learning rate (%(default)g)")
a("-l", "--lambda", type=float, dest="lambda_", default=0,
help="regularization coefficient (%(default)g)")
a("-s", "--steps", type=int, default=10000,
help="maximum number of training iterations (%(default)d)")
a("-p", "--plot-every", type=int, default=100,
help="frequency of plotting training data (%(default)d)")
a("-t", "--test", help="test set")
a("-f", "--features", help="Comma-separated feature columns")
a("-n", "--normalization", choices=_NORMALIZATION.keys(),
default="none", help="Feature normalization")
a("-c", "--class", type=int, default=-1, dest="class_",
help="Class column")
a("--seed", type=int, default=171956,
help="Random seed")
a("--confusion-matrix", "-C", action="store_true",
help="Show the confusion matrix.")
a("--dump", action="store_true",
help="Save the decision boundary and other data")
a("--nodraw", action="store_true",
help="Skip drawing the plots")
a("-m", "--model", choices=_MODELS.keys(), default="logreg",
help="Classification model")
a("-k", "--kernel", choices=["rbf", "polynomial"], default="rbf",
help="Kernel function")
a("--kernel-param", type=float, default=2,
help="Parameter of the kernel")
a("--knn-k", type=int, default=0, help="KNN neighbors (default auto)")
a("--classtree-minsize", type=int, default=1,
help="Classification tree minimum node size (%(default)d)")
a("--classtree-diversity", default="gini",
choices=["gini", "entropy", "error"],
help="Classification tree diversity function (%(default)s)")
a("--classtree-cv", type=int, default=5,
help="Cross-validation folds used for pruning (%(default)d)")
a("--mlp-hidden", default="",
help="Comma-separated list of number of hidden neurons")
a("--mlp-momentum", type=float, default=0.99,
help="Momentum term (%(default)g)")
a("--mlp-batch", type=int,
help="Batch size (default: use all training data)")
a("train", help="training set")
return parser.parse_args()
class DemoModel:
def __init__(self, args, binary, iterative=True):
self.lr = args.lr
self.lambda_ = args.lambda_
self.binary = binary
self.iterative = iterative
self.plot_every = args.plot_every
self.draw = not args.nodraw
self.confusion_matrix = args.confusion_matrix
self.dump = args.dump
def train(self, X, Y, Xtest, Ytest, steps):
st = self.plot_every
iterations = []
train_acc = []
test_acc = []
train_loss = []
test_loss = []
print("Step Train", "" if Xtest is None else "Test")
for step in range(st, steps + st, st):
self.train_step(X, Y, st)
iterations.append(step)
Z, P = self.inference(X)
train_acc.append(100 * (Z == Y).mean())
train_loss.append(self.loss(Y, P))
if Xtest is not None:
Ztest, Ptest = self.inference(Xtest)
test_acc.append(100 * (Ztest == Ytest).mean())
test_loss.append(self.loss(Ytest, Ptest))
self.plot_curves(0, "Accuracy (%)", iterations, train_acc,
test_acc)
self.plot_curves(1, "Loss", iterations, train_loss, test_loss)
self.plot_confusion(4, "Confusion matrix (train)", Z, Y)
if X.shape[1] == 2:
self.plot_data(2, "Training set", X, Y)
if Xtest is not None:
self.plot_data(3, "Test set", Xtest, Ytest)
if Xtest is None:
print("{} {:.2f}%".format(step, train_acc[-1]))
else:
self.plot_confusion(5, "Confusion matrix (test)", Ztest, Ytest)
print("{} {:.2f}% {:.2f}%".format(step, train_acc[-1],
test_acc[-1]))
plt.pause(0.0001)
if not self.iterative or (self.draw and not plt.fignum_exists(0)):
break
if self.dump:
with open("dump.txt", "wt") as f:
for t in zip_longest(iterations, train_acc, test_acc,
train_loss, test_loss):
row = (x if x is not None else "" for x in t)
print("{} {} {} {} {}".format(*row), file=f)
def plot_curves(self, fignum, title, iters, train, test):
train = [x for x in train if x is not None]
test = [x for x in test if x is not None]
if not self.draw or (not train and not test):
return
plt.figure(fignum)
plt.clf()
plt.title(title)
plt.xlabel("Iterations")
if train:
plt.plot(iters, train)
if test:
plt.plot(iters, test)
plt.legend(["train", "test"])
def plot_data(self, fignum, title, X, Y, resolution=200):
if not self.draw:
return
plt.figure(fignum)
plt.clf()
plt.title(title)
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.coolwarm)
xmin, xmax = plt.gca().get_xlim()
ymin, ymax = plt.gca().get_ylim()
ax = np.linspace(xmin, xmax, resolution)
ay = np.linspace(ymin, ymax, resolution)
gx, gy = np.meshgrid(ax, ay)
data = np.vstack((gx.reshape(-1), gy.reshape(-1))).T
v = self.inference(data)[1]
if v.ndim == 1:
v = v.reshape(gx.shape)
plt.contour(gx, gy, v, [0.5], cmap=plt.cm.coolwarm)
self.dump_contour(gx, gy, v - 0.5, title)
elif v.shape[1] == 2:
v = v[:, 0] - v[:, 1]
v = v.reshape(gx.shape)
plt.contour(gx, gy, v, [0.0], cmap=plt.cm.coolwarm)
self.dump_contour(gx, gy, v, title)
else:
values = np.arange(v.shape[1] - 1) + 0.5
v = v.argmax(1)
v = v.reshape(gx.shape)
plt.contour(gx, gy, v, values, cmap=plt.cm.coolwarm)
def dump_contour(self, gx, gy, v, title):
if self.dump:
with open(f"contour-{title}.txt".replace(" ", "_"), "w") as f:
for i in range(v.shape[0]):
for j in range(v.shape[1]):
print(gx[i, j], gy[i, j], v[i, j], file=f)
print(file=f)
def plot_confusion(self, fignum, title, predictions, labels):
if not self.draw or not self.confusion_matrix:
return
klasses = max(predictions.max(), labels.max()) + 1
plt.figure(fignum)
plt.clf()
plt.title(title)
cmat = np.bincount(klasses * labels + predictions,
minlength=klasses ** 2)
cmat = cmat.reshape(klasses, klasses)
cmat = 100 * cmat / np.maximum(1, cmat.sum(1, keepdims=True))
im = plt.imshow(cmat, vmin=0, vmax=100, cmap="OrRd")
plt.gca().set_xticks(np.arange(klasses))
plt.gca().set_yticks(np.arange(klasses))
colors = ("black", "white")
for i in range(klasses):
for j in range(klasses):
val = cmat[i, j]
color = (colors[0] if val < 50 else colors[1])
im.axes.text(j, i, "%.1f" % val, color=color,
horizontalalignment="center",
verticalalignment="center")
def train_step(self, X, Y, steps):
pass
def inference(self, X):
pass
def loss(self, Y, P):
pass
_MODELS = {}
def _register_model(name):
def f(cls):
_MODELS[name] = cls
return cls
return f
@_register_model("logreg")
class LogisticRegressionModel(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.w = None
self.b = 0
def train_step(self, X, Y, steps):
ret = pvml.logreg_train(X, Y, lr=self.lr,
lambda_=self.lambda_, steps=steps,
init_w=self.w, init_b=self.b)
self.w, self.b = ret
def inference(self, X):
P = pvml.logreg_inference(X, self.w, self.b)
return (P > 0.5).astype(int), P
def loss(self, Y, P):
return pvml.binary_cross_entropy(Y, P)
@_register_model("logreg_l1")
class LogisticRegressionL1Model(LogisticRegressionModel):
def train_step(self, X, Y, steps):
ret = pvml.logreg_l1_train(X, Y, lr=self.lr,
lambda_=self.lambda_, steps=steps,
init_w=self.w, init_b=self.b)
self.w, self.b = ret
@_register_model("ksvm")
class KernelSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.alpha = None
self.b = 0
self.Xtrain = None
self.kfun = args.kernel
self.kparam = args.kernel_param
def train_step(self, X, Y, steps):
self.Xtrain = X
ret = pvml.ksvm_train(X, Y, self.kfun, self.kparam,
lr=self.lr, lambda_=self.lambda_,
steps=steps, init_alpha=self.alpha,
init_b=self.b)
self.alpha, self.b = ret
def inference(self, X):
ret = pvml.ksvm_inference(X, self.Xtrain, self.alpha, self.b,
self.kfun, self.kparam)
labels, logits = ret
return labels, logits + 0.5
def loss(self, Y, P):
return pvml.hinge_loss(Y, P - 0.5)
@_register_model("svm")
class LinearSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.w = None
self.b = 0
def train_step(self, X, Y, steps):
ret = pvml.svm_train(X, Y, lr=self.lr, lambda_=self.lambda_,
steps=steps, init_w=self.w,
init_b=self.b)
self.w, self.b = ret
def inference(self, X):
labels, logits = pvml.svm_inference(X, self.w, self.b)
return labels, logits + 0.5
def loss(self, Y, P):
return pvml.hinge_loss(Y, P - 0.5)
@_register_model("multinomial")
class MultinomialLogisticRegressionModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.w = None
self.b = None
def train_step(self, X, Y, steps):
self.w, self.b = pvml.multinomial_logreg_train(
X, Y, lr=self.lr,
lambda_=self.lambda_,
steps=steps, init_w=self.w,
init_b=self.b)
def inference(self, X):
P = pvml.multinomial_logreg_inference(X, self.w, self.b)
Z = np.argmax(P, 1)
return Z, P
def loss(self, Y, P):
return pvml.cross_entropy(Y, P)
@_register_model("ovo_svm")
class OvoSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.W = None
self.b = None
def train_step(self, X, Y, steps):
ret = pvml.one_vs_one_svm_train(X, Y, lr=self.lr, lambda_=self.lambda_,
steps=steps, init_w=self.W,
init_b=self.b)
self.W, self.b = ret
def inference(self, X):
return pvml.one_vs_one_svm_inference(X, self.W, self.b)
@_register_model("ovr_svm")
class OvrSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.W = None
self.b = None
def train_step(self, X, Y, steps):
ret = pvml.one_vs_rest_svm_train(X, Y, lr=self.lr, lambda_=self.lambda_,
steps=steps, init_w=self.W,
init_b=self.b)
self.W, self.b = ret
def inference(self, X):
return pvml.one_vs_rest_svm_inference(X, self.W, self.b)
@_register_model("ovo_ksvm")
class OvoKSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.Xtrain = None
self.alpha = None
self.b = None
self.kfun = args.kernel
self.kparam = args.kernel_param
def train_step(self, X, Y, steps):
self.Xtrain = X.copy()
ret = pvml.one_vs_one_ksvm_train(X, Y, self.kfun, self.kparam,
lr=self.lr, lambda_=self.lambda_,
steps=steps, init_alpha=self.alpha,
init_b=self.b)
self.alpha, self.b = ret
def inference(self, X):
return pvml.one_vs_one_ksvm_inference(X, self.Xtrain, self.alpha, self.b,
self.kfun, self.kparam)
@_register_model("ovr_ksvm")
class OvrKSVMModel(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.Xtrain = None
self.alpha = None
self.b = None
self.kfun = args.kernel
self.kparam = args.kernel_param
def train_step(self, X, Y, steps):
self.Xtrain = X.copy()
ret = pvml.one_vs_rest_ksvm_train(X, Y, self.kfun, self.kparam,
lr=self.lr, lambda_=self.lambda_,
steps=steps, init_alpha=self.alpha,
init_b=self.b)
self.alpha, self.b = ret
def inference(self, X):
return pvml.one_vs_rest_ksvm_inference(X, self.Xtrain, self.alpha, self.b,
self.kfun, self.kparam)
@_register_model("hgda")
class HeteroscedasticGDA(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.means = None
self.icovs = None
self.priors = None
def train_step(self, X, Y, steps):
ret = pvml.hgda_train(X, Y)
self.means, self.invcovs, self.priors = ret
def inference(self, X):
ret = pvml.hgda_inference(X, self.means, self.invcovs,
self.priors)
labels, scores = ret
return labels, scores
@_register_model("ogda")
class OmoscedasticGDA(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.w = None
self.b = None
def train_step(self, X, Y, steps):
self.w, self.b = pvml.ogda_train(X, Y)
def inference(self, X):
labels, scores = pvml.ogda_inference(X, self.w, self.b)
return labels, scores
@_register_model("mindist")
class MinimumDistanceClassifier(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.means = None
def train_step(self, X, Y, steps):
self.means = pvml.mindist_train(X, Y)
def inference(self, X):
labels, scores = pvml.mindist_inference(X, self.means)
return labels, scores
@_register_model("categorical_nb")
class CategoricalNaiveBayes(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.probs = None
self.priors = None
def train_step(self, X, Y, steps):
ret = pvml.categorical_naive_bayes_train(X, Y)
self.probs, self.priors = ret
def inference(self, X):
ret = pvml.categorical_naive_bayes_inference(X, self.probs,
self.priors)
labels, scores = ret
return ret
@_register_model("multinomial_nb")
class MultinomialNaiveBayes(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.w = None
self.b = None
def train_step(self, X, Y, steps):
ret = pvml.multinomial_naive_bayes_train(X, Y)
self.w, self.b = ret
def inference(self, X):
ret = pvml.multinomial_naive_bayes_inference(X, self.w,
self.b)
labels, scores = ret
return ret
@_register_model("gaussian_nb")
class GaussianNaiveBayes(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.means = None
self.vars = None
self.priors = None
def train_step(self, X, Y, steps):
ret = pvml.gaussian_naive_bayes_train(X, Y)
self.means, self.vars, self.priors = ret
def inference(self, X):
ret = pvml.gaussian_naive_bayes_inference(X, self.means,
self.vars,
self.priors)
return ret
@_register_model("classtree")
class ClassificationTree(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.tree = pvml.ClassificationTree()
self.minsize = args.classtree_minsize
self.cv = args.classtree_cv
self.diversity = args.classtree_diversity
def train_step(self, X, Y, steps):
self.tree.train(X, Y, minsize=self.minsize,
diversity=self.diversity, pruning_cv=self.cv)
def inference(self, X):
ret = self.tree.inference(X)
return ret
@_register_model("perceptron")
class Perceptron(DemoModel):
def __init__(self, args):
super().__init__(args, True)
self.w = None
self.b = 0
def train_step(self, X, Y, steps):
ret = pvml.perceptron_train(X, Y, steps, init_w=self.w,
init_b=self.b)
self.w, self.b = ret
def inference(self, X):
ret = pvml.perceptron_inference(X, self.w, self.b)
return ret
@_register_model("knn")
class KNN(DemoModel):
def __init__(self, args):
super().__init__(args, False, False)
self.X = None
self.Y = None
self.k = args.knn_k
def train_step(self, X, Y, steps):
self.X = X.copy()
self.Y = Y.copy()
if self.k < 1:
print("Select K... ", end="", flush=True)
self.k, acc = pvml.knn_select_k(X, Y)
print("{} ({:.3f}%)".format(self.k, acc * 100))
def inference(self, X):
ret = pvml.knn_inference(X, self.X, self.Y, self.k)
return ret
@_register_model("kmeans")
class KMeans(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.k = 2
self.centroids = None
def train_step(self, X, Y, steps):
new_k = Y.max() + 1
if new_k > self.k:
# If the classes change centroids are reset
self.centroids = None
self.k = new_k
self.centroids = pvml.kmeans_train(X, self.k, steps=steps,
init_centroids=self.centroids)
self._sort_centroids(X, Y)
def inference(self, X):
ret = pvml.kmeans_inference(X, self.centroids)
return ret
def _sort_centroids(self, X, Y):
# K-means labels do not correspond to training labels. A
# categorical classifier is used to reorder the centroids to
# minimize the error.
P, _ = pvml.kmeans_inference(X, self.centroids)
probs, priors = pvml.categorical_naive_bayes_train(P[:, None], Y)
YK = np.arange(self.k)[:, None]
Q, _ = pvml.categorical_naive_bayes_inference(YK, probs, priors)
ii = np.argsort(Q)
self.centroids = self.centroids[ii, :]
@_register_model("mlp")
class MultiLayerPerceptron(DemoModel):
def __init__(self, args):
super().__init__(args, False)
self.net = None
self.hidden = [int(x) for x in args.mlp_hidden.split(",") if x.strip()]
self.momentum = args.mlp_momentum
self.batch = args.mlp_batch
def train_step(self, X, Y, steps):
if self.net is None:
counts = [X.shape[1]] + self.hidden + [Y.max() + 1]
self.net = pvml.MLP(counts)
self.net.train(X, Y, lr=self.lr, lambda_=self.lambda_,
momentum=self.momentum, steps=steps,
batch=self.batch)
def inference(self, X):
labels, scores = self.net.inference(X)
return labels, scores
def loss(self, Y, P):
return self.net.loss(Y, P)
def select_features(X, Y, features, class_):
if features is None and class_ == -1:
return X, Y
if features is None:
features = np.arange(X.shape[1] - 1)
else:
features = np.array(list(map(int, features.split(","))))
data = np.concatenate((X, Y[:, None]), 1)
X = data[:, features]
Y = data[:, class_]
return X, Y
def normalization(X, Xtest, fun):
if Xtest is None:
return _NORMALIZATION[fun](X), None
else:
return _NORMALIZATION[fun](X, Xtest)
def main():
args = parse_args()
np.random.seed(args.seed)
X, Y = pvml.load_dataset(args.train)
print("Training set loaded: {} samples, {} features, {} classes".format(
X.shape[0], X.shape[1], Y.max() + 1))
X, Y = select_features(X, Y, args.features, args.class_)
if args.test:
Xtest, Ytest = pvml.load_dataset(args.test)
print("Test set loaded: {} samples, {} features, {} classes".format(
Xtest.shape[0], Xtest.shape[1], Ytest.max() + 1))
Xtest, Ytest = select_features(Xtest, Ytest, args.features,
args.class_)
else:
Xtest, Ytest = None, None
X, Xtest = normalization(X, Xtest, args.normalization)
model = _MODELS[args.model](args)
if model.binary:
Y = (Y > 0).astype(int)
if Ytest is not None:
Ytest = (Ytest > 0).astype(int)
plt.ion()
model.train(X, Y, Xtest, Ytest, args.steps)
plt.ioff()
print("TRAINING COMPLETED")
plt.show()
if __name__ == "__main__":
main()
| 33.47006 | 82 | 0.556311 | 17,295 | 0.773549 | 0 | 0 | 12,413 | 0.555193 | 0 | 0 | 2,127 | 0.095134 |
64c72358d0521f48443317121d32c2e809620cb3 | 12,125 | py | Python | sucestest.py | hugegene/Area-Under-Load | 007d95e96b0c445793adef1a3fe0988a3da0b444 | [
"MIT"
] | null | null | null | sucestest.py | hugegene/Area-Under-Load | 007d95e96b0c445793adef1a3fe0988a3da0b444 | [
"MIT"
] | null | null | null | sucestest.py | hugegene/Area-Under-Load | 007d95e96b0c445793adef1a3fe0988a3da0b444 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 11 13:46:58 2019
@author: bdgecyt
"""
import cv2
import math
from time import time
import numpy as np
import wrapper
from operator import itemgetter
boxes = []
xCount = 0
yCount = 0
iter = 0
img = 0
def on_mouse(event, x, y, flags, params):
global iter
t = time()
global img
if event == cv2.EVENT_LBUTTONDOWN:
print('Start Mouse Position: '+str(x)+', '+str(y))
sbox = [x, y]
boxes.append(sbox)
# cv2.line(img,pt1=(0,0),pt2=(x,y),color=(255,255,0),thickness=2)
elif event == cv2.EVENT_LBUTTONUP:
print('End Mouse Position: '+str(x)+', '+str(y))
ebox = [x, y]
boxes.append(ebox)
# print boxes
iter += 1
# print iter
def split(start, end, segments):
x_delta = (end[0] - start[0]) / float(segments)
y_delta = (end[1] - start[1]) / float(segments)
points = []
for i in range(1, segments):
points.append([start[0] + i * x_delta, start[1] + i * y_delta])
return [start] + points + [end]
def line_intersection(line1, line2):
xdiff = (line1[0][0] - line1[1][0], line2[0][0] - line2[1][0])
ydiff = (line1[0][1] - line1[1][1], line2[0][1] - line2[1][1])
def det(a, b):
return a[0] * b[1] - a[1] * b[0]
div = det(xdiff, ydiff)
if div == 0:
raise Exception('lines do not intersect')
d = (det(*line1), det(*line2))
x = det(d, xdiff) / div
y = det(d, ydiff) / div
return x, y
def norm(point1, point2):
xdiff = point1[0] - point2[0]
ydiff = point1[1] - point2[1]
norm = math.sqrt(xdiff*xdiff + ydiff*ydiff)
# print norm
return norm
def orderptinline(pts, vp):
# print("ordering points")
# print(pts)
lengths = [norm(pt, vp) for pt in pts]
lengths= np.argsort(lengths)[::-1]
strlength = ''.join(str(e) for e in lengths)
# print(strlength)
return strlength
def getborderpt(line1, line2):
return line_intersection(line1, line2)
def findAnglebetVP(line, vp):
a = np.array(line[0])
b = np.array(line[1])
c = np.array(vp)
ba = a - b
bc = c - b
cosine_angle = np.dot(ba, bc) / (np.linalg.norm(ba) * np.linalg.norm(bc))
angle = np.arccos(cosine_angle)
return np.degrees(angle)
def estimatelength(order,a1,a2,r1,r2, Vanish, response):
if order == "0123":
# print("order is:" + order)
reflength = (norm(a1, r2)/norm(a1, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
# print(reflength)
ref2length = (norm(a2, r2)/norm(a2, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
# print(ref2length)
finallength = reflength-ref2length
elif order == "0213":
# print("order is:" + order)
reflength = (norm(a1, r2)/norm(a1, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
ref2length = response/(norm(r1, r2)/norm(r1, Vanish))/(norm(a2,r2)/norm(a2,Vanish))
finallength = reflength - ref2length
elif order == "0213":
reflength = (norm(a1, r2)/norm(a1, Vanish))/(norm(r1,r2)/norm(r1,Vanish))*response
ref2length = response/((norm(r1, a2)/norm(r1, Vanish))/(norm(r2,a2)/norm(r2,Vanish))-1)
finallength = reflength + ref2length
elif order == "2031":
reflength = response/(norm(r1, r2)/norm(r1, Vanish))/(norm(a1,r2)/norm(a1,Vanish))
ref2length = reflength/((norm(a1, a2)/norm(a1, Vanish))/(norm(r2,a2)/norm(r2,Vanish))-1)
finallength = reflength + ref2length
elif order == "2301":
reflength = response/((norm(r1, a1)/norm(r1, Vanish))/(norm(r2,a1)/norm(r2,Vanish))-1)
ref2length = (reflength +response)/((norm(r1, a2)/norm(r1, Vanish))/(norm(a1,a2)/norm(a2,Vanish))-1)
finallength = ref2length
else:
finallength = 99999
return finallength
def calibrateframe(img, findref = False):
vps = wrapper.dealAImage(img,"data/result/",True,True,True)
vps = [[i[0], i[1]] for i in vps]
print(vps)
count = 0
# while(True):
#
# # print count
# if iter == 2:
# cv2.destroyAllWindows()
# break
#
# count += 1
# cv2.namedWindow('real image')
# cv2.setMouseCallback('real image', on_mouse, 0)
#
# if len(boxes) != 0:
# for i in range(0,len(boxes), 2):
# # print(i)
# try:
# cv2.line(img,pt1=tuple(boxes[i]),pt2=tuple(boxes[i+1]),color=(0,255,255),thickness=2)
#
# except:
# continue
# cv2.imshow('real image', img)
# if cv2.waitKey(1) == 27:
# cv2.destroyAllWindows()
# break
print(vps)
vps = sorted(vps, key=itemgetter(1))
print(vps)
print(boxes)
xVanish = vps[0]
print ("x vanishing pt:" + str(xVanish))
yVanish = vps[1]
print ("y vanishing pt:" + str(yVanish))
zVanish = vps[2]
print ("z vanishing pt:" + str(zVanish))
if findref == True:
referenceline = [boxes[0], boxes[1]]
referenceline.sort(key = lambda x: norm(x, xVanish), reverse = False)
ang1 = findAnglebetVP(referenceline, xVanish)
print("angles between reference line and xVanish:" + str(ang1))
referenceline.sort(key = lambda x: norm(x, yVanish), reverse = False)
ang2 = findAnglebetVP(referenceline, yVanish)
print("angles between reference line and yVanish:" + str(ang2))
if ang1> ang2:
print("ref vp is Y vanishing point" )
refV= yVanish
ortV= xVanish
if ang2> ang1:
print("ref vp is X vanishing point" )
refV= xVanish
ortV= yVanish
referenceline.sort(key = lambda x: norm(x, refV), reverse = True)
estimateline = [boxes[2], boxes[3]]
estimateline.sort(key = lambda x: norm(x, refV), reverse = True)
response = float(input("Please enter length of reference object: "))
response2 = float(input("Please enter length of measured object: "))
return response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish
else:
return zVanish, xVanish, yVanish
def drawfallarea(img, refV, ortV, zVanish, correctpt, correct2pt):
nextpt= [int(0.78*img_shape[1]),
int(0.615*img_shape[0])]
droptoVP3 = [nextpt, zVanish]
print("vp3")
print(droptoVP3)
bordervp3= line_intersection(droptoVP3, [(0, img_shape[0]),(img_shape[1], img_shape[0])])
dropline3 = [nextpt, bordervp3]
ptB = line_intersection(dropline3, [correctpt, ortV])
cv2.line(img,(int(correctpt[0]), int(correctpt[1])), (int(ptB[0]), int(ptB[1])),color=(0,0,255),thickness=2)
backline1 = [correct2pt, ortV]
backline2 = [ptB, refV]
ptC= line_intersection(backline1, backline2)
cv2.line(img,(int(correct2pt[0]), int(correct2pt[1])), (int(ptC[0]), int(ptC[1])),color=(0,0,255),thickness=2)
cv2.line(img,(int(ptB[0]), int(ptB[1])), (int(ptC[0]), int(ptC[1])),color=(0,0,255),thickness=2)
def processframe(img, response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish, img_shape):
droptoVP1= [estimateline[0], zVanish]
droptoVP2= [estimateline[1], zVanish]
print("vp1")
print(droptoVP1)
# print(droptoVP1)
# cv2.line(img,(0, int(0.9*img_shape[0])), (img_shape[1], int(0.9*img_shape[0])),color=(0,255,255),thickness=10)
#test line
# cv2.line(img,(0, int(0.8*img_shape[0])), (int(0.78*img_shape[1]), int(0.615*img_shape[0])),color=(0,255,255),thickness=10)
bordervp1= line_intersection(droptoVP1, [(0, img_shape[0]),(img_shape[1], img_shape[0])])
bordervp2= line_intersection(droptoVP2, [(0, img_shape[0]),(img_shape[1], img_shape[0])])
# print(bordervp1)
# print(bordervp2)
dropline1 = [estimateline[0], bordervp1]
dropline2 = [estimateline[1], bordervp2]
refline1 = [referenceline[0],ortV]
refline2 = [referenceline[1],ortV]
print("breaking drop line to segments")
dropline1seg = split(dropline1[0], dropline1[1], 50)
# print(dropline1seg)
finallengths = []
dropline2pts = []
for pt in dropline1seg:
# print(pt)
cv2.circle(img,(int(pt[0]), int(pt[1])), 3, (0,255,255), -1)
# cv2.line(img,(int(pt[0]), int(pt[1])), (int(yVanish[0]), int(yVanish[1])),color=(0,255,255),thickness=2)
intersectDropline2= line_intersection([pt, refV], dropline2)
dropline2pts += [intersectDropline2]
intersectRefline1= line_intersection([pt, refV], refline1)
intersectRefline2= line_intersection([pt, refV], refline2)
cv2.circle(img,(int(intersectDropline2[0]), int(intersectDropline2[1])), 3, (255,0,0), -1)
cv2.circle(img,(int(intersectRefline1[0]), int(intersectRefline1[1])), 3, (0,255,0), -1)
cv2.circle(img,(int(intersectRefline2[0]), int(intersectRefline2[1])), 3, (0,0,255), -1)
ordered = orderptinline([pt, intersectDropline2,intersectRefline1, intersectRefline2] , refV)
finallength = estimatelength(ordered, pt, intersectDropline2,intersectRefline1, intersectRefline2, refV, response)
# reflength = (norm(pt, intersectRefline2)/norm(pt, yVanish))/(norm(intersectRefline1,intersectRefline2)/norm(intersectRefline1,yVanish))*response
# print(reflength)
# ref2length = (norm(intersectDropline2, intersectRefline2)/norm(intersectDropline2, yVanish))/(norm(intersectRefline1,intersectRefline2)/norm(intersectRefline1,yVanish))*response
# print(ref2length)
# finallength = reflength-ref2length
# print("finallength:" +str(finallength))
finallengths += [finallength]
measurements = [abs(response2- i)for i in finallengths]
correctpt = dropline1seg[np.argmin(measurements)]
correct2pt = dropline2pts[np.argmin(measurements)]
#if finallength >16 and finallength <18:
cv2.line(img,(int(estimateline[0][0]), int(estimateline[0][1])), (int(estimateline[1][0]), int(estimateline[1][1])),color=(0,255,255),thickness=2)
cv2.line(img,(int(correctpt[0]), int(correctpt[1])), (int(correct2pt[0]), int(correct2pt[1])),color=(0,0,255),thickness=2)
drawfallarea(img, refV, ortV, zVanish, correctpt, correct2pt)
print("nearest measurement:" +str( finallengths[np.argmin(measurements)] ) )
if zVanish:
cv2.line(img,(int(0.5*img.shape[1]), int(0.5*img.shape[0])), (int(zVanish[0]), int(zVanish[1])),color=(0,255,255),thickness=2)
if xVanish:
cv2.line(img,(int(0.5*img.shape[1]), int(0.5*img.shape[0])), (int(xVanish[0]), int(xVanish[1])),color=(0,255,255),thickness=2)
if yVanish:
cv2.line(img,(int(0.5*img.shape[1]), int(0.5*img.shape[0])), (int(yVanish[0]), int(yVanish[1])),color=(0,255,255),thickness=2)
# return img
if __name__ == "__main__":
img = cv2.imread('data\\18.jpg')
# img = cv2.resize(img, None, fx = 0.3,fy = 0.3)
img_shape = img.shape
# cv2.circle(img, (100,900), 5, (0,0,255), 5)
# while(True):
# cv2.imshow('points image', img)
# if cv2.waitKey(1) == 27:
# cv2.destroyAllWindows()
# break
# print(img.shape)
response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish = calibrateframe(img, findref = True)
#
while(True):
print(img.shape)
img = cv2.imread('data\\18.jpg')
# img = cv2.resize(img, None, fx = 0.3,fy = 0.3)
processframe(img, response, response2, estimateline, referenceline, refV, ortV, zVanish, xVanish, yVanish, img_shape)
cv2.imshow('points image', img)
# estimateline[0][0] -= 1
# estimateline[1][0] -= 1
# print("estimate line is:" + str(estimateline))
if cv2.waitKey(1) == 27:
cv2.destroyAllWindows()
break
# img = cv2.blur(img, (3,3))
# img = cv2.resize(img, None, fx = 0.2,fy = 0.2)
# print(img.shape) | 36.854103 | 186 | 0.600742 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,110 | 0.256495 |
64c8811a09b8f045e85e90b5c5ecbc4676c90930 | 1,148 | py | Python | good_spot/places/migrations/0086_auto_20180813_1225.py | jasmine92122/NightClubBackend | 7f59129b78baaba0e0c25de2b493033b858f1b00 | [
"MIT"
] | null | null | null | good_spot/places/migrations/0086_auto_20180813_1225.py | jasmine92122/NightClubBackend | 7f59129b78baaba0e0c25de2b493033b858f1b00 | [
"MIT"
] | 5 | 2020-02-12T03:13:11.000Z | 2022-01-13T01:41:14.000Z | good_spot/places/migrations/0086_auto_20180813_1225.py | jasmine92122/NightClubBackend | 7f59129b78baaba0e0c25de2b493033b858f1b00 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-08-13 12:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('places', '0085_placetype_name_plural'),
]
operations = [
migrations.AddField(
model_name='placetype',
name='name_plural_en',
field=models.CharField(blank=True, max_length=20, null=True, verbose_name='Place type'),
),
migrations.AddField(
model_name='placetype',
name='name_plural_fr',
field=models.CharField(blank=True, max_length=20, null=True, verbose_name='Place type'),
),
migrations.AddField(
model_name='placetype',
name='name_plural_ru',
field=models.CharField(blank=True, max_length=20, null=True, verbose_name='Place type'),
),
migrations.AddField(
model_name='placetype',
name='name_plural_uk',
field=models.CharField(blank=True, max_length=20, null=True, verbose_name='Place type'),
),
]
| 31.888889 | 100 | 0.611498 | 990 | 0.862369 | 0 | 0 | 0 | 0 | 0 | 0 | 263 | 0.229094 |
64c9216d8fa2a1253a7570d4c6809f00d17bb600 | 4,665 | py | Python | src/pspnet/run.py | jefequien/PSPNet-Keras | bad76c4c397b127c1d82bff31cb8ada39d39a230 | [
"MIT"
] | 4 | 2019-09-29T06:13:17.000Z | 2020-06-06T10:21:49.000Z | src/pspnet/run.py | jefequien/PSPNet-Keras | bad76c4c397b127c1d82bff31cb8ada39d39a230 | [
"MIT"
] | null | null | null | src/pspnet/run.py | jefequien/PSPNet-Keras | bad76c4c397b127c1d82bff31cb8ada39d39a230 | [
"MIT"
] | 1 | 2020-12-22T08:30:25.000Z | 2020-12-22T08:30:25.000Z | import os
from os.path import join, isfile, isdir, dirname, basename
from os import environ, makedirs
import sys
import argparse
import numpy as np
import h5py
from scipy import misc
from keras import backend as K
import tensorflow as tf
from pspnet import PSPNet50
import utils
from utils import image_utils
from utils.datasource import DataSource
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--project', type=str, required=True, help="Project name")
parser.add_argument('-r', '--randomize', action='store_true', default=False, help="Randomize image list")
parser.add_argument('-c', '--checkpoint', type=str, help='Checkpoint to use')
parser.add_argument('-s', '--scale', type=str, default='normal',
help='Scale to use',
choices=['normal',
'medium',
'big',
'single'])
parser.add_argument('--start', type=int, default=0)
parser.add_argument('--end', type=int, default=None)
parser.add_argument('--id', default="0")
args = parser.parse_args()
environ["CUDA_VISIBLE_DEVICES"] = args.id
config = utils.get_config(args.project)
datasource = DataSource(config)
im_list = utils.open_im_list(config["im_list"])
im_list = im_list[args.start:args.end]
if args.randomize:
random.seed(3)
random.shuffle(im_list)
# Output directory
root_result = "../predictions/softmax_default/{}".format(args.scale)
if args.checkpoint is not None:
model = basename(dirname(args.checkpoint))
version = basename(args.checkpoint).split('-')[0]
root_result = "predictions/{}/{}/{}".format(model, version, args.scale)
print "Outputting to ", root_result
root_mask = os.path.join(root_result, 'category_mask')
root_prob = os.path.join(root_result, 'prob_mask')
root_maxprob = os.path.join(root_result, 'max_prob')
root_allprob = os.path.join(root_result, 'all_prob')
sess = tf.Session()
K.set_session(sess)
with sess.as_default():
print(args)
pspnet = PSPNet50(checkpoint=args.checkpoint)
for im in im_list:
print im
fn_maxprob = os.path.join(root_maxprob, im.replace('.jpg', '.h5'))
fn_mask = os.path.join(root_mask, im.replace('.jpg', '.png'))
fn_prob = os.path.join(root_prob, im)
fn_allprob = os.path.join(root_allprob, im.replace('.jpg', '.h5'))
if os.path.exists(fn_allprob):
print "Already done."
continue
# make paths if not exist
if not os.path.exists(dirname(fn_maxprob)):
os.makedirs(dirname(fn_maxprob))
if not os.path.exists(dirname(fn_mask)):
os.makedirs(dirname(fn_mask))
if not os.path.exists(dirname(fn_prob)):
os.makedirs(dirname(fn_prob))
if not os.path.exists(dirname(fn_allprob)):
os.makedirs(dirname(fn_allprob))
img, _ = datasource.get_image(im)
probs = None
if args.scale == "single":
probs = pspnet.predict(img)
elif args.scale == "normal":
img_s = image_utils.scale_maxside(img, maxside=512)
probs_s = pspnet.predict_sliding(img_s)
probs = image_utils.scale(probs_s, img.shape)
elif args.scale == "medium":
img_s = image_utils.scale_maxside(img, maxside=1028)
probs_s = pspnet.predict_sliding(img_s)
probs = image_utils.scale(probs_s, img.shape)
elif args.scale == "big":
img_s = image_utils.scale_maxside(img, maxside=2048)
probs_s = pspnet.predict_sliding(img_s)
probs = image_utils.scale(probs_s, img.shape)
# probs is 150 x h x w
probs = np.transpose(probs, (2,0,1))
# Write output
pred_mask = np.array(np.argmax(probs, axis=0) + 1, dtype='uint8')
prob_mask = np.array(np.max(probs, axis=0)*255, dtype='uint8')
max_prob = np.max(probs, axis=(1,2))
all_prob = np.array(probs*255+0.5, dtype='uint8')
# write to file
misc.imsave(fn_mask, pred_mask)
misc.imsave(fn_prob, prob_mask)
with h5py.File(fn_maxprob, 'w') as f:
f.create_dataset('maxprob', data=max_prob)
with h5py.File(fn_allprob, 'w') as f:
f.create_dataset('allprob', data=all_prob)
| 37.926829 | 109 | 0.591854 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 586 | 0.125616 |
64c991ec20357872015ddf9bd0d5384cff6e68c4 | 2,177 | py | Python | tests/test_fake_dataframe_factory.py | damklis/fakedatafactory | d80c238f14b80b0cc928b0bad9bffdb8e9810b49 | [
"BSD-2-Clause"
] | null | null | null | tests/test_fake_dataframe_factory.py | damklis/fakedatafactory | d80c238f14b80b0cc928b0bad9bffdb8e9810b49 | [
"BSD-2-Clause"
] | null | null | null | tests/test_fake_dataframe_factory.py | damklis/fakedatafactory | d80c238f14b80b0cc928b0bad9bffdb8e9810b49 | [
"BSD-2-Clause"
] | null | null | null | import pytest
import pandas as pd
from pandas.testing import assert_frame_equal
from fakedatafactory.fakerow import fake_rows
from fakedatafactory.fakerow.fake_row_base import FakeRowBase
from fakedatafactory.fake_data_factory import (
FakeDataFactory, _FakeRowParser
)
@pytest.fixture()
def example_fake_row():
class ExampleFakeRow(FakeRowBase):
def __init__(self):
self.first_name = "example_first_name"
self.last_name = "example_last_name"
self.id = 100
self.domain = "gmail.com"
yield ExampleFakeRow
@pytest.fixture()
def example_pandas_df():
row = {
"first_name": "example_first_name",
"last_name": "example_last_name",
"id": 100,
"domain": "gmail.com"
}
yield pd.DataFrame(
[row for _ in range(10)]
)
@pytest.fixture()
def module():
yield fake_rows
def test_fakerowparser_parse_name(module):
row = "ExampleFakeRow"
expected = "example"
parser = _FakeRowParser(module)
result = parser.parse_name(row)
assert result == expected
def test_fakerowparser_turn_to_object(example_fake_row, module):
object_name = "ExampleFakeRow"
parser = _FakeRowParser(module)
fake_rows.ExampleFakeRow = example_fake_row()
result = parser.turn_to_object(object_name)
assert isinstance(result, FakeRowBase)
assert isinstance(result, example_fake_row)
def test_list_available_dataframe_types():
fake_df_factory = FakeDataFactory()
result = fake_df_factory.list_available_dataframe_types()
assert len(result) > 0
assert isinstance(result, list)
def test_generate_fake_dataframe(example_fake_row, example_pandas_df):
fake_rows.ExampleFakeRow = example_fake_row()
fake_df_factory = FakeDataFactory()
result = fake_df_factory.generate_fake_dataframe(
"example",
10
)
assert_frame_equal(result, example_pandas_df)
def test_generate_fake_dataframe_with_wrong_type():
with pytest.raises(ValueError):
fake_df_factory = FakeDataFactory()
fake_df_factory.generate_fake_dataframe(
"wrong_type",
10
)
| 22.677083 | 70 | 0.706017 | 227 | 0.104272 | 556 | 0.255397 | 610 | 0.280202 | 0 | 0 | 197 | 0.090492 |
64cc7023daad78ab9f6d1601c3d2b688eb891aad | 2,300 | py | Python | examples/1-1-3-segment_vs_box/plot.py | hyperpower/CarpioPlus | 68cc6c976d6c3ba6adec847a94c344be3f4690aa | [
"MIT"
] | null | null | null | examples/1-1-3-segment_vs_box/plot.py | hyperpower/CarpioPlus | 68cc6c976d6c3ba6adec847a94c344be3f4690aa | [
"MIT"
] | 1 | 2018-06-18T03:52:56.000Z | 2018-06-18T03:52:56.000Z | examples/1-1-3-segment_vs_box/plot.py | hyperpower/CarpioPlus | 68cc6c976d6c3ba6adec847a94c344be3f4690aa | [
"MIT"
] | null | null | null | import matplotlib
import matplotlib.pyplot as plt
import os, sys
import numpy as np
import string
import math
import operator
import multiprocessing
from multiprocessing import Pool
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['font.family'] = 'serif'
matplotlib.rcParams['font.size'] = 14
PATH_CASES = os.path.abspath(os.path.join(__file__, "../.."))
PATH_THIS = os.path.abspath(os.path.join(__file__, "../"))
PATH_DATA = os.path.abspath(os.path.join(PATH_THIS, "data"))
PATH_FIG = os.path.abspath(os.path.join(PATH_THIS, "fig"))
PATH_PROJECT = os.path.abspath(os.path.join(PATH_CASES, "../"))
PATH_PYTOOLS = os.path.abspath(os.path.join(PATH_CASES, "pytools"))
sys.path.append(PATH_PYTOOLS)
import filetool as FT
# plot figure for illustration
def figure_1():
plt.figure(figsize=(6,6))
frame = plt.gca()
ax = plt.axes()
ax.set_aspect("equal")
plot_box_line(ax, box, line)
plot_annotation(plt)
plt.axis('off')
plt.savefig(PATH_FIG + "/" +"illustration.png")
box = np.array([[0.0, 0.0],
[1.0, 0.0],
[1.0, 1.0],
[0.0, 1.0],
[0.0, 0.0]])
line = np.array([[0.2,1.2],[0.8,-0.3]])
def plot_box_line(ax, box, line):
print(box[:, 0])
ax.plot(box[:,0], box[:,1],"-")
ax.plot(line[:,0], line[:,1])
def plot_annotation(plt):
arrowprops = dict(arrowstyle = "->",
connectionstyle = "arc3")
plt.annotate("Box", xy =(1.0, 0.9),
xytext =(1.2, 1.0),
va="center",
ha="center",
arrowprops=arrowprops)
plt.annotate("Line", xy =(0.4, 0.7),
xytext =(0.5, 0.8),
va="center",
ha="center",
arrowprops=arrowprops)
def make_gif(fn_prefix, filename):
# make gif
os.system("convert -delay 5 -loop 0 ./fig/%s_*.png ./fig/%s.gif" % (fn_prefix, filename))
# delete files
files = FT.select_files1(PATH_FIG, fn_prefix)
for file in files:
os.system("rm " + PATH_FIG + "/" + file)
def main():
figure_1()
make_gif("lb", "lb")
if __name__ == '__main__':
main() | 27.710843 | 93 | 0.55087 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 304 | 0.132174 |
64cd64bfd6bfba4b56677bdef2a44e0de4c57683 | 3,020 | py | Python | module_pytorch/OWN.py | xy1999729/OthogonalWN | 534623cfd093a93f5dafdff9cc9de4a208f0710c | [
"BSD-2-Clause"
] | 47 | 2017-09-21T09:12:45.000Z | 2022-03-30T10:07:17.000Z | module_pytorch/OWN.py | xy1999729/OthogonalWN | 534623cfd093a93f5dafdff9cc9de4a208f0710c | [
"BSD-2-Clause"
] | 4 | 2018-05-29T04:45:04.000Z | 2021-07-27T13:44:18.000Z | module_pytorch/OWN.py | xy1999729/OthogonalWN | 534623cfd093a93f5dafdff9cc9de4a208f0710c | [
"BSD-2-Clause"
] | 13 | 2017-12-06T07:38:20.000Z | 2022-01-02T00:31:16.000Z | """
Orthogonal Weight Normalization: Solution to Optimization over Multiple Dependent Stiefel Manifolds in Deep Neural Networks
AAAI 2018
Authors: Lei Huang
"""
import torch.nn
import torch.nn.functional as F
from torch.nn import Parameter
from torch.autograd import Variable
from typing import List
from torch.autograd.function import once_differentiable
__all__ = ['OWN_Conv2d']
# norm funcitons--------------------------------
class IdentityModule(torch.nn.Module):
def __init__(self, *args, **kwargs):
super(IdentityModule, self).__init__()
def forward(self, input: torch.Tensor):
return input
class OWNNorm(torch.nn.Module):
def __init__(self, norm_groups=1, *args, **kwargs):
super(OWNNorm, self).__init__()
self.norm_groups = norm_groups
def matrix_power3(self, Input):
B=torch.bmm(Input, Input)
return torch.bmm(B, Input)
def forward(self, weight: torch.Tensor):
assert weight.shape[0] % self.norm_groups == 0
Z = weight.view(self.norm_groups, weight.shape[0] // self.norm_groups, -1) # type: torch.Tensor
Zc = Z - Z.mean(dim=-1, keepdim=True)
S = torch.matmul(Zc, Zc.transpose(1, 2))
wm = torch.randn(S.shape).to(S)
for i in range(self.norm_groups):
U, Eig, _ = S[i].svd()
Scales = Eig.rsqrt().diag()
wm[i] = U.mm(Scales).mm(U.t())
W = wm.matmul(Zc)
return W.view_as(weight)
def extra_repr(self):
fmt_str = ['OWN:']
if self.norm_groups > 1:
fmt_str.append('groups={}'.format(self.norm_groups))
return ', '.join(fmt_str)
class OWN_Conv2d(torch.nn.Conv2d):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True,
norm_groups=1, NScale=1.414, adjustScale=False):
super(OWN_Conv2d, self).__init__(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias)
print('OWN_conv:----norm_groups:', norm_groups, '---NScale:', NScale, '---adjust:', adjustScale)
self.weight_normalization = OWNNorm(norm_groups=norm_groups)
self.scale_ = torch.ones(out_channels, 1, 1, 1).fill_(NScale)
if adjustScale:
self.WNScale = Parameter(self.scale_)
else:
# self.scale = Variable(self.scale, requires_grad=False)
self.register_buffer('WNScale', self.scale_)
def forward(self, input_f: torch.Tensor) -> torch.Tensor:
weight_q = self.weight_normalization(self.weight)
weight_q = weight_q * self.WNScale
out = F.conv2d(input_f, weight_q, self.bias, self.stride, self.padding, self.dilation, self.groups)
return out
if __name__ == '__main__':
oni_ = OWNNorm(norm_groups=2)
print(oni_)
w_ = torch.randn(4, 4, 3, 3)
w_.requires_grad_()
y_ = oni_(w_)
z_ = y_.view(w_.size(0), -1)
print(z_.matmul(z_.t()))
y_.sum().backward()
print('w grad', w_.grad.size())
| 33.932584 | 123 | 0.638079 | 2,300 | 0.761589 | 0 | 0 | 0 | 0 | 0 | 0 | 397 | 0.131457 |
64cdcaff8bfef6cb8ac2031be3de787a65f14e15 | 1,781 | py | Python | src/visions/lib/relations/string_to_datetime.py | sweersr/visions | 1af04235cb77bec52e4923627dfbf968ed1a584d | [
"BSD-4-Clause"
] | null | null | null | src/visions/lib/relations/string_to_datetime.py | sweersr/visions | 1af04235cb77bec52e4923627dfbf968ed1a584d | [
"BSD-4-Clause"
] | null | null | null | src/visions/lib/relations/string_to_datetime.py | sweersr/visions | 1af04235cb77bec52e4923627dfbf968ed1a584d | [
"BSD-4-Clause"
] | null | null | null | import pandas as pd
from visions import visions_string, visions_datetime
from visions.core.model import TypeRelation
from visions.core.model.relations import InferenceRelation
from visions.utils.coercion import test_utils
def to_datetime_year_week(series):
"""Convert a series of the format YYYY/UU (year, week) to datetime.
A '0' is added as day dummy value, as pandas requires a day value to parse.
Args:
series: the Series to parse
Returns:
A datetime series
Examples:
>>> series = pd.Series(['2018/47', '2018/12', '2018/03'])
>>> parsed_series = to_datetime_year_week(series)
>>> print(parsed_series.dt.week)
0 47
1 12
2 3
dtype: int64
"""
return pd.to_datetime(series + "0", format="%Y/%U%w")
def to_datetime_year_month_day(series):
"""Convert a series of the format YYYYMMDD (year, month, day) to datetime.
Args:
series: the Series to parse
Returns:
A datetime series
Examples:
>>> series = pd.Series(['20181201', '20181202', '20181203'])
>>> parsed_series = to_datetime_year_week(series)
>>> print(parsed_series.dt.day)
0 1
1 2
2 3
dtype: int64
"""
return pd.to_datetime(series, format="%Y%m%d")
def get_string_datetime_type_relation(func):
return InferenceRelation(
relationship=test_utils.coercion_test(func),
transformer=func,
related_type=visions_string,
type=visions_datetime,
)
def string_to_datetime_year_week():
return get_string_datetime_type_relation(to_datetime_year_week)
def string_to_datetime_year_month_day():
return get_string_datetime_type_relation(to_datetime_year_month_day)
| 26.58209 | 79 | 0.665357 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 928 | 0.521056 |
64cf76b2d8601ee102da83689b123357da9bdff9 | 2,237 | py | Python | thirdparty/ffmpeg/tools/python/convert.py | yashrajsingh1998/ApraPipes1 | ec93095613f4345d6044c7012f2d8c3b99f65f03 | [
"MIT"
] | 30 | 2022-03-10T16:34:13.000Z | 2022-03-29T09:32:35.000Z | thirdparty/ffmpeg/tools/python/convert.py | yashrajsingh1998/ApraPipes1 | ec93095613f4345d6044c7012f2d8c3b99f65f03 | [
"MIT"
] | 2 | 2021-07-24T19:31:37.000Z | 2022-02-14T05:25:19.000Z | thirdparty/ffmpeg/tools/python/convert.py | yashrajsingh1998/ApraPipes1 | ec93095613f4345d6044c7012f2d8c3b99f65f03 | [
"MIT"
] | 6 | 2021-09-24T04:58:43.000Z | 2022-03-10T10:02:27.000Z | # Copyright (c) 2019 Guo Yejun
#
# This file is part of FFmpeg.
#
# FFmpeg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# FFmpeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with FFmpeg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ==============================================================================
# verified with Python 3.5.2 on Ubuntu 16.04
import argparse
import os
from convert_from_tensorflow import *
def get_arguments():
parser = argparse.ArgumentParser(description='generate native mode model with weights from deep learning model')
parser.add_argument('--outdir', type=str, default='./', help='where to put generated files')
parser.add_argument('--infmt', type=str, default='tensorflow', help='format of the deep learning model')
parser.add_argument('infile', help='path to the deep learning model with weights')
parser.add_argument('--dump4tb', type=str, default='no', help='dump file for visualization in tensorboard')
return parser.parse_args()
def main():
args = get_arguments()
if not os.path.isfile(args.infile):
print('the specified input file %s does not exist' % args.infile)
exit(1)
if not os.path.exists(args.outdir):
print('create output directory %s' % args.outdir)
os.mkdir(args.outdir)
basefile = os.path.split(args.infile)[1]
basefile = os.path.splitext(basefile)[0]
outfile = os.path.join(args.outdir, basefile) + '.model'
dump4tb = False
if args.dump4tb.lower() in ('yes', 'true', 't', 'y', '1'):
dump4tb = True
if args.infmt == 'tensorflow':
convert_from_tensorflow(args.infile, outfile, dump4tb)
if __name__ == '__main__':
main()
| 39.245614 | 116 | 0.685293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,282 | 0.573089 |
64cfaf05b6963c5ebf29624e1caab4a4a9f80d75 | 839 | py | Python | custom/icds/rules/util.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2020-05-05T13:10:01.000Z | 2020-05-05T13:10:01.000Z | custom/icds/rules/util.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2019-12-09T14:00:14.000Z | 2019-12-09T14:00:14.000Z | custom/icds/rules/util.py | MaciejChoromanski/commcare-hq | fd7f65362d56d73b75a2c20d2afeabbc70876867 | [
"BSD-3-Clause"
] | 5 | 2015-11-30T13:12:45.000Z | 2019-07-01T19:27:07.000Z | from __future__ import absolute_import
from __future__ import unicode_literals
import pytz
import re
import six
from corehq.util.python_compatibility import soft_assert_type_text
from corehq.util.timezones.conversions import ServerTime
from datetime import datetime, date
def get_date(value):
if isinstance(value, date):
if isinstance(value, datetime):
return value.date()
return value
if not isinstance(value, six.string_types):
raise TypeError("Expected date, datetime, or string")
soft_assert_type_text(value)
if not re.match(r'^\d{4}-\d{2}-\d{2}', value):
raise ValueError("Expected a date string")
return datetime.strptime(value, '%Y-%m-%d').date()
def todays_date(utc_now):
return ServerTime(utc_now).user_time(pytz.timezone('Asia/Kolkata')).done().date()
| 27.064516 | 85 | 0.722288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 105 | 0.125149 |
64d3d2d5c2079d93b8675451c183f60e97e4020f | 4,670 | py | Python | applications/popart/resnext_inference/resnext_inference_launch.py | Splendon/examples | ed4a8a01857b6ddca49559141acf5d0986eb01e1 | [
"MIT"
] | null | null | null | applications/popart/resnext_inference/resnext_inference_launch.py | Splendon/examples | ed4a8a01857b6ddca49559141acf5d0986eb01e1 | [
"MIT"
] | null | null | null | applications/popart/resnext_inference/resnext_inference_launch.py | Splendon/examples | ed4a8a01857b6ddca49559141acf5d0986eb01e1 | [
"MIT"
] | null | null | null | # Copyright 2019 Graphcore Ltd.
import time
from datetime import datetime
import os
from absl import app, flags
import subprocess
import re
import statistics
"""
This program launches subprocesses to handle data loading and resnext101 inference.
It can also be used to perform inference on other ONNX CNNs that take ImageNet sized input images.
To adapt, download a different ONNX model from the Python package `pretrainedmodels` via get_model.py,
or save your own model to models/<model_name>/<model_name>_<batch_size>.onnx
Then, run with the flag --model_name <model_name> --batch_size <batch_size>
"""
def launch_resnext_subprocess(i, f):
# parse flags into list of strings to pass through to subprocesses
# give the i_th process the i_th dataset
data_sub_dir = FLAGS.data_dir + f"{i}"
args = FLAGS.flags_into_string().split('\n')
print(f"\n\nRunning subprocess {i}: \t ")
print(" ".join(["python3", "resnext101.py",
"--data_sub_dir", data_sub_dir] + args))
return subprocess.Popen(["python3", "resnext101.py", "--data_sub_dir", data_sub_dir] + args, stdout=f, stderr=f)
FLAGS = flags.FLAGS
flags.DEFINE_integer("batch_size", 6, "Batch size (per device)")
flags.DEFINE_integer(
"num_ipus", 8, "Number of IPUs to be used. One IPU runs one compute process.")
flags.DEFINE_string("data_dir", "datasets/",
"Parent directory containing subdirectory dataset(s). Number of subdirs should equal num_ipus")
flags.DEFINE_integer("num_workers", 12, "Number of threads per dataloader")
flags.DEFINE_integer("batches_per_step", 1500,
"Number of batches to fetch on the host ready for streaming onto the device, reducing host IO")
flags.DEFINE_boolean(
"profile", False, "Saves a GCProfile memory report. Use for debugging")
flags.DEFINE_string("model_name", "resnext101_32x4d",
"model name. Used to locate ONNX protobuf in models/")
flags.DEFINE_bool("synthetic", False, "Use synthetic data created on the IPU for inference")
flags.DEFINE_integer(
"iterations", 1, "Number of iterations to run if using synthetic data. Each iteration uses one `batches_per_step` x `batch_size` x `H` x `W` x `C` sized input tensor.")
def main(argv):
FLAGS = flags.FLAGS
log_str = f"""
Number of subprocesses created: {FLAGS.num_ipus}
Per subprocess:
\t Batch size: {FLAGS.batch_size}
\t Number of batches prepared by the host at a time: {FLAGS.batches_per_step}
"""
print(log_str)
procs = []
log_files = []
timestamp = datetime.now().strftime("%H-%M-%S")
if not os.path.exists("logs"):
os.mkdir("logs")
os.mkdir(f"logs/{timestamp}")
for i in range(FLAGS.num_ipus):
f = open(f"logs/{timestamp}/log_{i}", "w")
p = launch_resnext_subprocess(i, f)
# sleep to prevent race conditions on acquiring IPUs
time.sleep(1)
# log
log_files.append(f)
procs.append(p)
exit_codes = [p.wait() for p in procs]
print(f"All processes finished with exit codes: {exit_codes}")
for f in log_files:
f.close()
regex_throughput = re.compile("Compute .* sec .* (.*) images/sec.")
regex_latency = re.compile("Total (.*).* sec. Preprocessing")
throughputs = []
latencies = []
for i in range(FLAGS.num_ipus):
sub_throughputs = []
sub_latencies = []
with open(f"logs/{timestamp}/log_{i}") as f:
for line in f:
match = regex_throughput.search(line)
match_lat = regex_latency.search(line)
if match:
res = match.group(1)
sub_throughputs.append(float(res))
if match_lat:
res = match_lat.group(1)
sub_latencies.append(float(res))
throughputs.append(sub_throughputs)
latencies.append(sub_latencies)
sums_throughputs = [sum(l) for l in zip(*throughputs)]
mean_latencies = [statistics.mean(l) for l in zip(*latencies)]
stats = zip(mean_latencies, sums_throughputs)
start = 2 if len(sums_throughputs) >= 4 else 0
for (duration, through) in list(stats)[start:]:
report_string = "Total {:<8.3} sec.".format(duration)
report_string += " Preprocessing {:<8.3} sec ({:4.3}%).".format(
duration, 95.) # just for the output
report_string += " Compute {:<8.3} sec ({:4.3}%).".format(
duration, 95.)
report_string += " {:5f} images/sec.".format(int(through))
print(report_string)
if __name__ == '__main__':
app.run(main)
| 40.258621 | 172 | 0.64561 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,136 | 0.457388 |
64d58b123468ff248ecdc2ae08b02067e31303dc | 32 | py | Python | src/ultimateml/dummy.py | EmilMachine/ultimateml | f5c58e882b120bb99e4a56ea3f9ac5a636ae3a00 | [
"MIT"
] | null | null | null | src/ultimateml/dummy.py | EmilMachine/ultimateml | f5c58e882b120bb99e4a56ea3f9ac5a636ae3a00 | [
"MIT"
] | null | null | null | src/ultimateml/dummy.py | EmilMachine/ultimateml | f5c58e882b120bb99e4a56ea3f9ac5a636ae3a00 | [
"MIT"
] | null | null | null |
def fancyfunction():
return 42 | 10.666667 | 20 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
64d790a54760f91efa2b838551ebcdc4ea2e8758 | 72 | py | Python | src/main.py | EfficientElevator/Simulation | fdbb479aa907c26999715e132aff0a40d9bdf892 | [
"MIT"
] | 1 | 2020-01-30T16:29:51.000Z | 2020-01-30T16:29:51.000Z | src/main.py | EfficientElevator/Simulation | fdbb479aa907c26999715e132aff0a40d9bdf892 | [
"MIT"
] | 17 | 2020-01-14T16:48:53.000Z | 2020-04-20T17:58:44.000Z | src/main.py | EfficientElevator28/Simulation | fdbb479aa907c26999715e132aff0a40d9bdf892 | [
"MIT"
] | null | null | null | """
Author: Sean Toll
Test simulation functionality
"""
print("Test")
| 9 | 29 | 0.694444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 61 | 0.847222 |
64d7a8053972b4804e4b495adaa95ceebb6347ae | 350 | py | Python | tests/preprocessing/test_preprocessing_check.py | lshtm-gis/WHO_PHSM_Cleaning | 5892673922fc555fb86d6e0be548b48c7dc66814 | [
"MIT"
] | null | null | null | tests/preprocessing/test_preprocessing_check.py | lshtm-gis/WHO_PHSM_Cleaning | 5892673922fc555fb86d6e0be548b48c7dc66814 | [
"MIT"
] | 123 | 2020-10-12T11:06:27.000Z | 2021-04-28T15:32:29.000Z | tests/preprocessing/test_preprocessing_check.py | lshtm-gis/WHO_PHSM_Cleaning | 5892673922fc555fb86d6e0be548b48c7dc66814 | [
"MIT"
] | null | null | null | import pytest
import pandas as pd
from src.preprocess import check
class Test_check_column_names:
def test_check_column_names(self):
records = pd.DataFrame({'a': [1]})
config = pd.DataFrame({'column': ['a'], 'dataset': ['ACAPS']})
res = check.check_column_names(records, config, log=False)
assert res is None
| 23.333333 | 70 | 0.662857 | 281 | 0.802857 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0.085714 |
64d8895cf78d6da53f81000994752388d727b1b9 | 5,312 | py | Python | Lib/site-packages/proboscis/sorting.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 25 | 2015-03-03T08:36:12.000Z | 2021-02-10T16:52:17.000Z | Lib/site-packages/proboscis/sorting.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 6 | 2015-01-09T16:54:32.000Z | 2018-08-30T15:18:53.000Z | Lib/site-packages/proboscis/sorting.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 14 | 2015-02-10T16:00:09.000Z | 2020-04-26T14:11:24.000Z | # Copyright (c) 2011 Rackspace
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This module is home to Proboscis's sorting algorithms.
"""
from collections import deque
class Dependent(object):
def __init__(self, case, critical):
self.case = case
self.critical = critical
class TestNode:
"""Representation of a TestEntry used in sorting."""
def __init__(self, case):
self.case = case
self.dependencies = []
self.dependents = []
def add_dependency(self, node, is_critical):
"""Adds a bidirectional link between this node and a dependency.
This also informs the dependency TestEntry of its dependent. It is
intuitive to specify dependencies when writing tests, so we have
to wait until this phase to determine the dependents of the TestEntry.
"""
# TODO: Could this be sped up by using a set?
if node in self.dependencies:
return
self.dependencies.append(node)
node.dependents.append(self)
node.case.dependents.append(Dependent(self.case, is_critical))
@property
def has_no_dependencies(self):
return len(self.dependencies) == 0
def pop_dependent(self):
"""Removes and returns a dependent from this nodes dependent list.
This act of destruction is one reason why this second representation
of a TestEntry is necessary.
"""
dependent = self.dependents.pop()
dependent.dependencies.remove(self)
return dependent
class TestGraph:
"""Used to sort the tests in a registry in the correct order.
As it sorts, it also adds dependent information to the TestEntries, which
means calling it twice messes stuff up.
"""
def __init__(self, groups, entries, cases):
self.nodes = []
self.entries = entries
self.groups = groups
for case in cases:
self.nodes.append(TestNode(case))
for node in self.nodes:
n_info = node.case.entry.info
for dependency_group in n_info.runs_after_groups:
d_group_nodes = self.nodes_for_group(dependency_group)
for dependency_group_node in d_group_nodes:
node.add_dependency(dependency_group_node, False)
for dependency_group in n_info.depends_on_groups:
d_group_nodes = self.nodes_for_group(dependency_group)
for dependency_group_node in d_group_nodes:
node.add_dependency(dependency_group_node, True)
for dependency in n_info.runs_after:
d_nodes = self.nodes_for_class_or_function(dependency)
for dependency_node in d_nodes:
node.add_dependency(dependency_node, False)
for dependency in n_info.depends_on:
d_nodes = self.nodes_for_class_or_function(dependency)
for dependency_node in d_nodes:
node.add_dependency(dependency_node, True)
def nodes_for_class_or_function(self, test_home):
"""Returns nodes attached to the given class."""
search_homes = [test_home]
if hasattr(test_home, '_proboscis_entry_'):
if hasattr(test_home._proboscis_entry_, 'children'):
children = test_home._proboscis_entry_.children
search_homes += [child.home for child in children]
search_set = set(search_homes)
return (n for n in self.nodes \
if search_set.intersection(n.case.entry.homes))
def nodes_for_group(self, group_name):
"""Returns nodes attached to the given group."""
group = self.groups[group_name]
entries = group.entries
return [node for node in self.nodes if node.case.entry in entries]
def sort(self):
"""Returns a sorted list of entries.
Dismantles this graph's list of nodes and adds dependent information
to the list of TestEntries (in other words, don't call this twice).
"""
independent_nodes = deque((n for n in self.nodes
if n.has_no_dependencies))
ordered_nodes = [] # The new list
while independent_nodes:
i_node = independent_nodes.popleft()
ordered_nodes.append(i_node)
while i_node.dependents:
d_node = i_node.pop_dependent()
if d_node.has_no_dependencies:
independent_nodes.appendleft(d_node)
# Search for a cycle
for node in self.nodes:
if not node.has_no_dependencies:
raise RuntimeError("Cycle found on node " + str(node.case))
return list((n.case for n in ordered_nodes))
| 37.942857 | 78 | 0.645331 | 4,582 | 0.862575 | 0 | 0 | 87 | 0.016378 | 0 | 0 | 1,850 | 0.348268 |
64da1b100ea51354dfbc6cebf59ad3bf8b64417b | 565 | py | Python | backoffice/migrations/0019_auto_20211010_1210.py | Psemp/artsetforme_public | 240bb8ef22c0589f168b24c0ee5ed8e9030fe94a | [
"MIT"
] | null | null | null | backoffice/migrations/0019_auto_20211010_1210.py | Psemp/artsetforme_public | 240bb8ef22c0589f168b24c0ee5ed8e9030fe94a | [
"MIT"
] | 1 | 2021-10-08T22:20:09.000Z | 2021-10-08T22:20:09.000Z | backoffice/migrations/0019_auto_20211010_1210.py | Psemp/artsetforme_public | 240bb8ef22c0589f168b24c0ee5ed8e9030fe94a | [
"MIT"
] | null | null | null | # Generated by Django 3.2.7 on 2021-10-10 10:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('backoffice', '0018_auto_20210929_1112'),
]
operations = [
migrations.RemoveField(
model_name='newsletter',
name='attachment',
),
migrations.RemoveField(
model_name='newsletter',
name='image_body_1',
),
migrations.RemoveField(
model_name='newsletter',
name='image_body_2',
),
]
| 21.730769 | 50 | 0.564602 | 480 | 0.849558 | 0 | 0 | 0 | 0 | 0 | 0 | 160 | 0.283186 |
64da549fe78588f6bf34e8fda4bcb42b15aa1631 | 4,874 | py | Python | official/cv/yolov3_darknet53/eval.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 77 | 2021-10-15T08:32:37.000Z | 2022-03-30T13:09:11.000Z | official/cv/yolov3_darknet53/eval.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 3 | 2021-10-30T14:44:57.000Z | 2022-02-14T06:57:57.000Z | official/cv/yolov3_darknet53/eval.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 24 | 2021-10-15T08:32:45.000Z | 2022-03-24T18:45:20.000Z | # Copyright 2020-2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""YoloV3 eval."""
import os
import datetime
import time
import mindspore as ms
from src.yolo import YOLOV3DarkNet53
from src.logger import get_logger
from src.yolo_dataset import create_yolo_dataset
from src.util import DetectionEngine
from model_utils.config import config
# only useful for huawei cloud modelarts.
from model_utils.moxing_adapter import moxing_wrapper, modelarts_pre_process
def conver_testing_shape(args):
"""Convert testing shape to list."""
testing_shape = [int(args.testing_shape), int(args.testing_shape)]
return testing_shape
def load_parameters(network, file_name):
config.logger.info("yolov3 pretrained network model: %s", file_name)
param_dict = ms.load_checkpoint(file_name)
param_dict_new = {}
for key, values in param_dict.items():
if key.startswith('moments.'):
continue
elif key.startswith('yolo_network.'):
param_dict_new[key[13:]] = values
else:
param_dict_new[key] = values
ms.load_param_into_net(network, param_dict_new)
config.logger.info('load_model %s success', file_name)
@moxing_wrapper(pre_process=modelarts_pre_process)
def run_test():
"""The function of eval."""
start_time = time.time()
config.data_root = os.path.join(config.data_dir, 'val2014')
config.annFile = os.path.join(config.data_dir, 'annotations/instances_val2014.json')
devid = int(os.getenv('DEVICE_ID')) if os.getenv('DEVICE_ID') else 0
ms.set_context(mode=ms.GRAPH_MODE, device_target=config.device_target, save_graphs=False, device_id=devid)
# logger
config.outputs_dir = os.path.join(config.log_path,
datetime.datetime.now().strftime('%Y-%m-%d_time_%H_%M_%S'))
rank_id = int(os.environ.get('RANK_ID')) if os.environ.get('RANK_ID') else 0
config.logger = get_logger(config.outputs_dir, rank_id)
ms.reset_auto_parallel_context()
parallel_mode = ms.ParallelMode.STAND_ALONE
ms.set_auto_parallel_context(parallel_mode=parallel_mode, gradients_mean=True, device_num=1)
config.logger.info('Creating Network....')
network = YOLOV3DarkNet53(is_training=False)
if os.path.isfile(config.pretrained):
load_parameters(network, config.pretrained)
else:
raise FileNotFoundError(f"{config.pretrained} not exists or not a pre-trained file.")
if config.testing_shape:
config.test_img_shape = conver_testing_shape(config)
ds = create_yolo_dataset(config.data_root, config.annFile, is_training=False,
batch_size=config.per_batch_size, device_num=1,
rank=rank_id, shuffle=False, config=config)
config.logger.info('testing shape : %s', config.test_img_shape)
config.logger.info('totol %d images to eval', ds.get_dataset_size() * config.per_batch_size)
network.set_train(False)
# init detection engine
detection = DetectionEngine(config)
config.logger.info('Start inference....')
for i, data in enumerate(ds.create_dict_iterator(num_epochs=1)):
image = data["image"]
image_shape = data["image_shape"]
image_id = data["img_id"]
output_big, output_me, output_small = network(image)
output_big = output_big.asnumpy()
output_me = output_me.asnumpy()
output_small = output_small.asnumpy()
image_id = image_id.asnumpy()
image_shape = image_shape.asnumpy()
detection.detect([output_small, output_me, output_big], config.per_batch_size, image_shape, image_id)
if i % 50 == 0:
config.logger.info('Processing... {:.2f}% '.format(i / ds.get_dataset_size() * 100))
config.logger.info('Calculating mAP...')
detection.do_nms_for_results()
result_file_path = detection.write_result()
config.logger.info('result file path: %s', result_file_path)
eval_result = detection.get_eval_result()
cost_time = time.time() - start_time
eval_print_str = '\n=============coco eval result=========\n' + eval_result
config.logger.info(eval_print_str)
config.logger.info('testing cost time %.2f h', cost_time / 3600.)
if __name__ == "__main__":
run_test()
| 38.078125 | 110 | 0.693886 | 0 | 0 | 0 | 0 | 3,039 | 0.623513 | 0 | 0 | 1,327 | 0.272261 |
64db5e195c76fd45e4c6390d88a97be172fa087d | 901 | py | Python | ims/upload/browser/factories.py | imsweb/ims.upload | b1143ed231b1a9a22f3f53596c0c8b90bda1a0d0 | [
"MIT"
] | null | null | null | ims/upload/browser/factories.py | imsweb/ims.upload | b1143ed231b1a9a22f3f53596c0c8b90bda1a0d0 | [
"MIT"
] | null | null | null | ims/upload/browser/factories.py | imsweb/ims.upload | b1143ed231b1a9a22f3f53596c0c8b90bda1a0d0 | [
"MIT"
] | null | null | null | import plone.api
from plone.app.content.browser.folderfactories import FolderFactoriesView
class UploadFolderFactoriesView(FolderFactoriesView):
""" Replaces the link for Add File dropdown with a link to our upload page """
def addable_types(self, include=None):
addables = super(UploadFolderFactoriesView,
self).addable_types(include)
hijack = plone.api.portal.get_registry_record(
'ims.upload.interfaces.IChunkSettings.hijack')
if hijack:
upload_types = ('File', 'Image')
for upload_type in upload_types:
upload_add = [a for a in addables if a['id'] == upload_type]
if upload_add:
upload_add = upload_add[0]
upload_add[
'action'] = '%s/@@upload' % self.add_context().absolute_url()
return addables
| 39.173913 | 85 | 0.612653 | 807 | 0.895671 | 0 | 0 | 0 | 0 | 0 | 0 | 161 | 0.17869 |
64db9da9deb9de8fdc1b27e59b80498e4bf37e2d | 1,446 | py | Python | literal/apps/order/tests/test_validators.py | spanickroon/Text-From-Photo-Django-API | e1ef79c90a443cc3e606dec9e1c531aa5943ca59 | [
"MIT"
] | null | null | null | literal/apps/order/tests/test_validators.py | spanickroon/Text-From-Photo-Django-API | e1ef79c90a443cc3e606dec9e1c531aa5943ca59 | [
"MIT"
] | null | null | null | literal/apps/order/tests/test_validators.py | spanickroon/Text-From-Photo-Django-API | e1ef79c90a443cc3e606dec9e1c531aa5943ca59 | [
"MIT"
] | 1 | 2021-06-08T18:06:21.000Z | 2021-06-08T18:06:21.000Z | from unittest import mock
from django.test import TestCase
from parameterized import param, parameterized
from rest_framework import validators
from apps.order.constants import (
EXTENSION_ERROR_MESSAGE,
MAX_IMAGE_SIZE,
MAX_IMAGE_SIZE_ERROR_MESSAGE,
)
from apps.order.validators import FileValidator
class FileValidatorTestCase(TestCase):
def setUp(self) -> None:
super().setUp()
self.validator = FileValidator
def test__image_validator__success(self):
file = mock.MagicMock()
file.name = "test.jpg"
file.size = 1
actual_file = self.validator.image_validator(file=file)
self.assertEqual(actual_file.name, file.name)
self.assertEqual(actual_file.size, file.size)
@parameterized.expand(
[
param(
EXTENSION_ERROR_MESSAGE,
name="test.pdf",
size=1,
error=validators.ValidationError,
),
param(
MAX_IMAGE_SIZE_ERROR_MESSAGE,
name="test.jpg",
size=MAX_IMAGE_SIZE + 1,
error=validators.ValidationError,
),
]
)
def test__image_validator__raise_exceptions(self, _, name, size, error):
file = mock.MagicMock()
file.name = name
file.size = size
with self.assertRaises(error):
self.validator.image_validator(file=file)
| 27.283019 | 76 | 0.618949 | 1,129 | 0.780775 | 0 | 0 | 687 | 0.475104 | 0 | 0 | 30 | 0.020747 |
64dbc268e94b0e86f76dd10af1901fafdc87b38b | 2,000 | py | Python | gocardless_pro/services/billing_request_flows_service.py | gocardless/gocardless-pro-python | e6763fba5326ff56f4ba417ddd7828c03e059be5 | [
"MIT"
] | 30 | 2015-07-08T21:10:10.000Z | 2022-02-17T10:08:55.000Z | gocardless_pro/services/billing_request_flows_service.py | gocardless/gocardless-pro-python | e6763fba5326ff56f4ba417ddd7828c03e059be5 | [
"MIT"
] | 21 | 2015-12-14T02:24:52.000Z | 2022-02-05T15:56:00.000Z | gocardless_pro/services/billing_request_flows_service.py | gocardless/gocardless-pro-python | e6763fba5326ff56f4ba417ddd7828c03e059be5 | [
"MIT"
] | 19 | 2016-02-10T15:57:42.000Z | 2022-02-05T10:21:05.000Z | # WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
from . import base_service
from .. import resources
from ..paginator import Paginator
from .. import errors
class BillingRequestFlowsService(base_service.BaseService):
"""Service class that provides access to the billing_request_flows
endpoints of the GoCardless Pro API.
"""
RESOURCE_CLASS = resources.BillingRequestFlow
RESOURCE_NAME = 'billing_request_flows'
def create(self,params=None, headers=None):
"""Create a billing request flow.
Creates a new billing request flow.
Args:
params (dict, optional): Request body.
Returns:
ListResponse of BillingRequestFlow instances
"""
path = '/billing_request_flows'
if params is not None:
params = {self._envelope_key(): params}
response = self._perform_request('POST', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def initialise(self,identity,params=None, headers=None):
"""Initialise a billing request flow.
Returns the flow having generated a fresh session token which can be
used to power
integrations that manipulate the flow.
Args:
identity (string): Unique identifier, beginning with "BRQ".
params (dict, optional): Request body.
Returns:
ListResponse of BillingRequestFlow instances
"""
path = self._sub_url_params('/billing_request_flows/:identity/actions/initialise', {
'identity': identity,
})
if params is not None:
params = {'data': params}
response = self._perform_request('POST', path, params, headers,
retry_failures=False)
return self._resource_for(response)
| 30.30303 | 92 | 0.618 | 1,776 | 0.888 | 0 | 0 | 0 | 0 | 0 | 0 | 999 | 0.4995 |
64de2e1c5f7f8c520e37623f47bbfb1a25b6e79b | 1,915 | py | Python | src/pathrev/cli.py | pathwayforte/reproducibility-survey | dd1059a14270414240d7dace4c6446e52321c0cf | [
"Apache-2.0"
] | 1 | 2019-07-29T14:54:26.000Z | 2019-07-29T14:54:26.000Z | src/pathrev/cli.py | pathwayforte/reproducibility-survey | dd1059a14270414240d7dace4c6446e52321c0cf | [
"Apache-2.0"
] | 4 | 2019-07-04T11:39:34.000Z | 2019-09-28T06:31:38.000Z | src/pathrev/cli.py | pathwayforte/reproducibility-survey | dd1059a14270414240d7dace4c6446e52321c0cf | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Command line interface."""
import logging
import click
from pathrev.pipeline import (
do_gsea, do_preranked,
)
logger = logging.getLogger(__name__)
@click.group(help='pathrev')
def main():
"""Run pathrev."""
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(name)s - %(message)s")
matrix_option = click.option(
'-m', '--matrix',
help="path to matrix",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
rnk_option = click.option(
'-r', '--rnk',
help="path to rank file",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
phenotype_option = click.option(
'-c', '--cls',
help="path to cls file",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
gene_set_option = click.option(
'-g', '--gmt',
help="path to gmt file",
type=click.Path(file_okay=True, dir_okay=False, exists=True),
required=True
)
out_dir_option = click.option(
'-o', '--out_dir',
help="path to output directory",
type=click.Path(file_okay=False, dir_okay=True, exists=False),
required=True
)
@main.command()
@matrix_option
@phenotype_option
@gene_set_option
@out_dir_option
def gsea(matrix, cls, gmt, out_dir):
"""Run normal GSEA with a matrix file."""
click.echo("Running GSEA on {} with {}, {} and outputting to {}".format(matrix, cls, gmt, out_dir))
do_gsea(matrix, cls, gmt, out_dir)
click.echo('Done with GSEA analysis')
@main.command()
@rnk_option
@gene_set_option
@out_dir_option
def prerank(rnk, gmt, out_dir):
"""Run prerank GSEA with a sorted rank file."""
click.echo("Running GSEA-PreRanked on {} with {} and outputting to {}".format(rnk, gmt, out_dir))
do_preranked(rnk, gmt, out_dir)
click.echo('Done with prerank analysis')
if __name__ == '__main__':
main()
| 25.533333 | 103 | 0.662141 | 0 | 0 | 0 | 0 | 826 | 0.431332 | 0 | 0 | 555 | 0.289817 |
64de3949ccd17a2f6665718ed1ecbe21a3a14929 | 5,842 | py | Python | parsl/monitoring/monitoring_base.py | benclifford/parsl | 21f8681882779050d2e074591e95ada43789748f | [
"Apache-2.0"
] | null | null | null | parsl/monitoring/monitoring_base.py | benclifford/parsl | 21f8681882779050d2e074591e95ada43789748f | [
"Apache-2.0"
] | null | null | null | parsl/monitoring/monitoring_base.py | benclifford/parsl | 21f8681882779050d2e074591e95ada43789748f | [
"Apache-2.0"
] | null | null | null | import logging
from parsl.monitoring.handler import DatabaseHandler
from parsl.monitoring.handler import RemoteHandler
from parsl.utils import RepresentationMixin
class NullHandler(logging.Handler):
"""Setup default logging to /dev/null since this is library."""
def emit(self, record):
pass
class MonitoringStore(RepresentationMixin):
def __init__(self,
host=None,
port=None,
logging_server_host='localhost',
logging_server_port=9595):
"""
Parameters
----------
host : str
The hostname for running the visualization interface.
port : int
The port for the visualization interface.
logging_server_host : str
The hostname for the logging server.
logging_server_port : int
The port for the logging server.
"""
self.host = host
self.port = port
self.logging_server_host = logging_server_host
self.logging_server_port = logging_server_port
class Database(MonitoringStore, RepresentationMixin):
def __init__(self,
connection_string=None, **kwargs):
""" Initializes a monitoring configuration class.
Parameters
----------
connection_string : str, optional
Database connection string that defines how to connect to the database. If not set, DFK init will use a sqlite3
database inside the rundir.
"""
super().__init__(**kwargs)
self.connection_string = connection_string
class VisualizationServer(RepresentationMixin):
def __init__(self,
host='http://localhost',
port=8899):
"""
Parameters
----------
host : str
The hostname for running the visualization interface.
port : int
The port for the visualization interface
"""
self.host = host
self.port = port
class Monitoring(RepresentationMixin):
""" This is a config class for monitoring. """
def __init__(self,
store=None,
visualization_server=None,
monitoring_interval=15,
workflow_name=None,
version='1.0.0'):
""" Initializes a monitoring configuration class.
Parameters
----------
monitoring_interval : float, optional
The amount of time in seconds to sleep in between resource monitoring logs per task.
workflow_name : str, optional
Name to record as the workflow base name, defaults to the name of the parsl script file if left as None.
version : str, optional
Optional workflow identification to distinguish between workflows with the same name, not used internally only for display to user.
Example
-------
.. code-block:: python
import parsl
from parsl.config import Config
from parsl.executors.threads import ThreadPoolExecutor
from parsl.monitoring.db_logger import MonitoringConfig
config = Config(
executors=[ThreadPoolExecutor()],
monitoring_config=MonitoringConfig(
MonitoringStore=DatabaseStore(
connection_string='sqlite///monitoring.db'
)
VisualizationInterface=VisualizationInterface(
host='http:localhost'
port='9999'
)
)
)
parsl.load(config)
"""
self.store = store
self.visualization_server = visualization_server
self.version = version
self.monitoring_interval = monitoring_interval
self.workflow_name = workflow_name
# for now just set this to none but can be used to present the dashboard location to user
self.dashboard_link = None
def get_parsl_logger(
logger_name='parsl_monitor_logger',
is_logging_server=False,
monitoring_config=None,
**kwargs):
"""
Parameters
----------
logger_name : str, optional
Name of the logger to use. Prevents adding repeat handlers or incorrect handlers
is_logging_server : Bool, optional
Used internally to determine which handler to return when using local db logging
monitoring_config : MonitoringConfig, optional
Pass in a logger class object to use for generating loggers.
Returns
-------
logging.logger object
Raises
------
OptionalModuleMissing
"""
logger = logging.getLogger(logger_name)
if monitoring_config is None:
logger.addHandler(NullHandler())
return logger
if monitoring_config.store is None:
raise ValueError('No MonitoringStore defined')
if is_logging_server:
# add a handler that will take logs being received on the server and log them to the store
handler = DatabaseHandler(monitoring_config.store.connection_string)
# use the specific name generated by the server or the monitor wrapper
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
else:
# add a handler that will pass logs to the logging server
handler = RemoteHandler(monitoring_config.store.logging_server_host, monitoring_config.store.logging_server_port)
# use the specific name generated by the server or the monitor wrapper
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
return logger
| 32.455556 | 143 | 0.613317 | 3,897 | 0.667066 | 0 | 0 | 0 | 0 | 0 | 0 | 3,365 | 0.576001 |
64de72b84736cad3940b04d3b19539c316ab87ad | 591 | py | Python | music/class_/time/atom.py | jedhsu/music | dea68c4a82296cd4910e786f533b2cbf861377c3 | [
"MIT"
] | null | null | null | music/class_/time/atom.py | jedhsu/music | dea68c4a82296cd4910e786f533b2cbf861377c3 | [
"MIT"
] | null | null | null | music/class_/time/atom.py | jedhsu/music | dea68c4a82296cd4910e786f533b2cbf861377c3 | [
"MIT"
] | null | null | null | """
*Atom*
Smallest controllable unit of space.
"""
from dataclasses import dataclass
from typing import Sequence
from ._space import Space
from .quark import Quark
__all__ = ["Atom"]
@dataclass
class Atom(
tuple[Quark],
Space,
):
index: int
def __init__(
self,
beats: Sequence[Quark],
index: int,
):
assert index > 0, "Index must be positive"
self.index = index
super(Atom, self).__new__(
tuple,
beats,
)
def __repr__(self) -> str:
return f"Atom-{self.index}"
| 14.414634 | 50 | 0.568528 | 380 | 0.642978 | 0 | 0 | 391 | 0.661591 | 0 | 0 | 110 | 0.186125 |
64deb8cf784d9d6dcf55594be2839d484d08aa59 | 1,599 | py | Python | Main/bakend.py | avinsit123/Precog_Intern_Assignment | 07024fff780eea861998ec781d6e75c47effaa80 | [
"MIT"
] | null | null | null | Main/bakend.py | avinsit123/Precog_Intern_Assignment | 07024fff780eea861998ec781d6e75c47effaa80 | [
"MIT"
] | null | null | null | Main/bakend.py | avinsit123/Precog_Intern_Assignment | 07024fff780eea861998ec781d6e75c47effaa80 | [
"MIT"
] | null | null | null | from flask import Flask,request,render_template
import numpy as np
from Reccomending_functions import item_item_cf,user_user_cf,rank_matrix_factorize
from Database_connector import fetch_from_database
import random
#ML Packages
asd = []
app = Flask(__name__)
@app.route('/')
def index():
global asd
randindex = [x for x in range(1,301)]
random.shuffle(randindex)
movies_list = randindex[0:12]
asd = movies_list
display_list = fetch_from_database(movies_list)
return render_template("Display_movies.html",display_list=display_list)
@app.route('/recommendations',methods=['POST','GET'])
def recommend():
if request.method != 'POST':
return "Bye-Bye"
movies_list = asd
user_ratings = np.zeros((1,301))
for i in range(len(movies_list)):
user_ratings[0][movies_list[i]]=request.form['movie'+str(i+1)]
if request.form['recco_method']=="uucf":
recommendend_movies_list = user_user_cf(user_ratings,movies_list)
elif request.form['recco_method']=="iicf":
recommendend_movies_list = item_item_cf(user_ratings, movies_list)
elif request.form['recco_method']=="rf":
recommendend_movies_list = rank_matrix_factorize(user_ratings,movies_list)
print(user_ratings)
recommendend_movies_list = list(recommendend_movies_list)
sasa =[]
for i in recommendend_movies_list:
sasa.append(int(i))
movie_details = fetch_from_database(sasa)
return render_template("Display Recommendations.html",movie_details=movie_details)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| 35.533333 | 86 | 0.729206 | 0 | 0 | 0 | 0 | 1,268 | 0.792996 | 0 | 0 | 194 | 0.121326 |
b37955d5f66a497d8fb24f9f904d68db40a6f837 | 37,903 | py | Python | wcpan/telegram/api.py | legnaleurc/wcpan.telegram | b6b0d827c72673777a7e18a1879eb43c6e35f665 | [
"MIT"
] | 7 | 2016-10-22T05:38:37.000Z | 2021-04-29T09:54:59.000Z | wcpan/telegram/api.py | legnaleurc/wcpan.telegram | b6b0d827c72673777a7e18a1879eb43c6e35f665 | [
"MIT"
] | 7 | 2017-04-23T19:50:53.000Z | 2018-12-05T02:16:41.000Z | wcpan/telegram/api.py | legnaleurc/wcpan.telegram | b6b0d827c72673777a7e18a1879eb43c6e35f665 | [
"MIT"
] | 7 | 2016-11-09T14:44:49.000Z | 2019-08-05T12:42:19.000Z | import json
from typing import List, Awaitable, Union
from tornado import httpclient as thc, web as tw, httputil as thu
from . import types, util
_API_TEMPLATE = 'https://api.telegram.org/bot{api_token}/{api_method}'
ReplyMarkup = Union[
types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup,
types.ReplyKeyboardRemove,
types.ForceReply,
]
class BotClient(object):
def __init__(self, api_token: str) -> None:
self._api_token = api_token
if not self._api_token:
raise BotError('invalid API token')
async def get_updates(self, offset: int = None, limit: int = None,
timeout: int = None, allowed_updates: List[str] = None
) -> Awaitable[List[types.Update]]:
args = {}
if offset is not None:
args['offset'] = offset
if limit is not None:
args['limit'] = limit
if timeout is not None:
args['timeout'] = timeout
if allowed_updates is not None:
args['allowed_updates'] = allowed_updates
data = await self._get('getUpdates', args)
return [types.Update(u) for u in data]
async def set_webhook(self, url: str, certificate: types.InputFile = None,
max_connections: int = None,
allowed_updates: List[str] = None) -> Awaitable[bool]:
args = {
'url': '' if not url else str(url),
}
if certificate is not None:
args['certificate'] = certificate
if max_connections is not None:
args['max_connections'] = max_connections
if allowed_updates is not None:
args['allowed_updates'] = allowed_updates
if isinstance(certificate, types.InputFile):
data = await self._post('setWebhook', args)
else:
data = await self._get('setWebhook', args)
return data
async def delete_webhook(self) -> Awaitable[bool]:
data = await self._get('deleteWebhook')
return data
async def get_webhook_info(self) -> Awaitable[types.WebhookInfo]:
data = await self._get('getWebhookInfo')
return types.WebhookInfo(data)
async def get_me(self) -> Awaitable[types.User]:
data = await self._get('getMe')
return types.User(data)
async def send_message(self, chat_id: Union[int, str], text: str,
parse_mode: str = None,
disable_web_page_preview: bool = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'text': text,
}
if parse_mode is not None:
args['parse_mode'] = parse_mode
if disable_web_page_preview is not None:
args['disable_web_page_preview'] = disable_web_page_preview
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('sendMessage', args)
return types.Message(data)
async def forward_message(self, chat_id: Union[int, str],
from_chat_id: Union[int, str], message_id: int,
disable_notification: bool = None,
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'from_chat_id': from_chat_id,
'message_id': message_id,
}
if disable_notification is not None:
args['disable_notification'] = disable_notification
data = await self._get('forwardMessage', args)
return types.Message(data)
async def send_photo(self, chat_id: Union[int, str],
photo: Union[types.InputFile, str],
caption: str = None, disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'photo': photo,
}
if caption is not None:
args['caption'] = caption
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
if isinstance(photo, str):
data = await self._get('sendPhoto', args)
else:
data = await self._post('sendPhoto', args)
return types.Message(data)
async def send_audio(self, chat_id: Union[int, str],
audio: Union[types.InputFile, str],
caption: str = None, duration: int = None,
performer: str = None, title: str = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'audio': audio,
}
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if caption is not None:
args['caption'] = caption
if duration is not None:
args['duration'] = duration
if performer is not None:
args['performer'] = performer
if title is not None:
args['title'] = title
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_markup is not None:
args['reply_markup'] = reply_markup
if isinstance(audio, str):
data = await self._get('sendAudio', args)
else:
data = await self._post('sendAudio', args)
return types.Message(data)
async def send_document(self, chat_id: Union[int, str],
document: Union[types.InputFile, str],
caption: str = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'document': document,
}
if caption is not None:
args['caption'] = caption
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
if isinstance(document, str):
data = await self._get('sendDocument', args)
else:
data = await self._post('sendDocument', args)
return types.Message(data)
async def send_video(self, chat_id: Union[int, str],
video: Union[types.InputFile, str],
duration: int = None, width: int = None,
height: int = None, caption: str = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'video': video,
}
if duration is not None:
args['duration'] = duration
if width is not None:
args['width'] = width
if height is not None:
args['height'] = height
if caption is not None:
args['caption'] = caption
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
if isinstance(video, str):
data = await self._get('sendVideo', args)
else:
data = await self._post('sendVideo', args)
return types.Message(data)
async def send_voice(self, chat_id: Union[int, str],
voice: Union[types.InputFile, str],
caption: str = None, duration: int = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'voice': voice,
}
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if caption is not None:
args['caption'] = caption
if duration is not None:
args['duration'] = duration
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_markup is not None:
args['reply_markup'] = reply_markup
if isinstance(voice, str):
data = await self._get('sendVoice', args)
else:
data = await self._post('sendVoice', args)
return types.Message(data)
async def send_video_note(self, chat_id: Union[int, str],
video_note: Union[types.InputFile, str],
duration: int = None, length: int = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'video_note': video_note,
}
if duration is not None:
args['duration'] = duration
if length is not None:
args['length'] = length
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
if isinstance(video_note, str):
data = await self._get('sendVideoNote', args)
else:
data = await self._post('sendVideoNote', args)
return types.Message(data)
async def send_location(self, chat_id: Union[int, str], latitude: float,
longitude: float, disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'latitude': latitude,
'longitude': longitude,
}
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('sendLocation', args)
return types.Message(data)
async def send_venue(self, chat_id: Union[int, str], latitude: float,
longitude: float, title: str, address: str,
foursquare_id: str = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'latitude': latitude,
'longitude': longitude,
'title': title,
'address': address,
}
if foursquare_id is not None:
args['foursquare_id'] = foursquare_id
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('sendVenue', args)
return types.Message(data)
async def send_contact(self, chat_id: Union[int, str], phone_number: str,
first_name: str, last_name: str = None,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'phone_number': phone_number,
'first_name': first_name,
}
if last_name is not None:
args['last_name'] = last_name
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('sendContact', args)
return types.Message(data)
async def send_chat_action(self, chat_id: Union[int, str],
action: str) -> Awaitable[bool]:
args = {
'chat_id': chat_id,
'action': action,
}
data = await self._get('sendChatAction', args)
return data
async def get_user_profile_photos(self, user_id: int, offset: int = None,
limit: int = None
) -> Awaitable[types.UserProfilePhotos]:
args = {
'user_id': user_id,
}
if offset is not None:
args['offset'] = offset
if limit is not None:
args['limit'] = limit
data = await self._get('getUserProfilePhotos', args)
return types.UserProfilePhotos(data)
async def get_file(self, file_id: str) -> Awaitable[types.File]:
args = {
'file_id': file_id,
}
data = await self._get('getFile', args)
return types.File(data)
async def kick_chat_member(self, chat_id: Union[int, str],
user_id: int) -> Awaitable[bool]:
args = {
'chat_id': chat_id,
'user_id': user_id,
}
data = await self._get('kickChatMember', args)
return data
async def leave_chat(self, chat_id: Union[int, str]) -> Awaitable[bool]:
args = {
'chat_id': chat_id,
}
data = await self._get('leaveChat', args)
return data
async def unban_chat_member(self, chat_id: Union[int, str],
user_id: int) -> Awaitable[bool]:
args = {
'chat_id': chat_id,
'user_id': user_id,
}
data = await self._get('unbanChatMember', args)
return data
async def get_chat(self, chat_id: Union[int, str]) -> Awaitable[types.Chat]:
args = {
'chat_id': chat_id,
}
data = await self._get('getChat', args)
return types.Chat(data)
async def get_chat_administrators(self, chat_id: Union[int, str]
) -> Awaitable[List[types.ChatMember]]:
args = {
'chat_id': chat_id,
}
data = await self._get('getChatAdministrators', args)
return [types.ChatMember(_) for _ in data]
async def get_chat_members_count(self, chat_id: Union[int, str]) -> Awaitable[int]:
args = {
'chat_id': chat_id,
}
data = await self._get('getChatMembersCount', args)
return data
async def get_chat_member(self, chat_id: Union[int, str],
user_id: int) -> Awaitable[types.ChatMember]:
args = {
'chat_id': chat_id,
'user_id': user_id,
}
data = await self._get('getChatMember', args)
return types.ChatMember(data)
async def answer_callback_query(self, callback_query_id: str,
text: str = None, show_alert: bool = None,
url: str = None, cache_time: int = None
) -> Awaitable[bool]:
args = {
'callback_query_id': callback_query_id,
}
if text is not None:
args['text'] = text
if show_alert is not None:
args['show_alert'] = show_alert
if url is not None:
args['url'] = url
if cache_time is not None:
args['cache_time'] = cache_time
data = await self._get('answerCallbackQuery', args)
return data
async def edit_message_text(self, text: str,
chat_id: Union[int, str] = None,
message_id: int = None,
inline_message_id: str = None,
parse_mode: str = None,
disable_web_page_preview: bool = None,
reply_markup: types.InlineKeyboardMarkup = None
) -> Awaitable[Union[types.Message, bool]]:
args = {
'text': text,
}
if chat_id is not None:
args['chat_id'] = chat_id
if message_id is not None:
args['message_id'] = message_id
if inline_message_id is not None:
args['inline_message_id'] = inline_message_id
if parse_mode is not None:
args['parse_mode'] = parse_mode
if disable_web_page_preview is not None:
args['disable_web_page_preview'] = disable_web_page_preview
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('editMessageText', args)
if isinstance(data, bool):
return data
return types.Message(data)
async def edit_message_caption(self, chat_id: Union[int, str] = None,
message_id: int = None,
inline_message_id: str = None,
caption: str = None,
reply_markup:
types.InlineKeyboardMarkup = None
) -> Awaitable[Union[types.Message, bool]]:
args = {}
if chat_id is not None:
args['chat_id'] = chat_id
if message_id is not None:
args['message_id'] = message_id
if inline_message_id is not None:
args['inline_message_id'] = inline_message_id
if caption is not None:
args['caption'] = caption
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('editMessageCaption', args)
if isinstance(data, bool):
return data
return types.Message(data)
async def edit_message_reply_markup(self, chat_id: Union[int, str] = None,
message_id: int = None,
inline_message_id: str = None,
reply_markup:
types.InlineKeyboardMarkup = None
) -> Awaitable[
Union[types.Message, bool]]:
args = {}
if chat_id is not None:
args['chat_id'] = chat_id
if message_id is not None:
args['message_id'] = message_id
if inline_message_id is not None:
args['inline_message_id'] = inline_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('editMessageReplyMarkup', args)
if isinstance(data, bool):
return data
return types.Message(data)
async def delete_message(self, chat_id: Union[int, str],
message_id: int) -> Awaitable[bool]:
args = {
'chat_id': chat_id,
'message_id': message_id,
}
data = await self._get('deleteMessage', args)
return data
async def send_sticker(self, chat_id: Union[int, str],
sticker: Union[types.InputFile, str],
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: ReplyMarkup = None
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'sticker': sticker,
}
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
if isinstance(sticker, str):
data = await self._get('sendSticker', args)
else:
data = await self._post('sendSticker', args)
return types.Message(data)
async def get_sticker_set(self, name: str) -> Awaitable[types.StickerSet]:
args = {
'name': name,
}
data = await self._get('getStickerSet', args)
return types.StickerSet(data)
async def upload_sticker_file(self, user_id: int,
png_sticker: types.InputFile
) -> Awaitable[types.File]:
args = {
'user_id': user_id,
'png_sticker': png_sticker,
}
data = await self._post('uploadStickerFile', args)
return types.File(data)
async def create_new_sticker_set(self, user_id: int, name: str, title: str,
png_sticker: Union[types.InputFile, str],
emojis: str, contains_masks: bool = None,
mask_position: types.MaskPosition = None
) -> Awaitable[bool]:
args = {
'user_id': user_id,
'name': name,
'title': title,
'png_sticker': png_sticker,
'emojis': emojis,
}
if contains_masks is not None:
args['contains_masks'] = contains_masks
if mask_position is not None:
args['mask_position'] = mask_position
if isinstance(png_sticker, str):
data = await self._get('createNewStickerSet', args)
else:
data = await self._post('createNewStickerSet', args)
return data
async def add_sticker_to_set(self, user_id: int, name: str,
png_sticker: Union[types.InputFile, str],
emojis: str,
mask_position: types.MaskPosition = None
) -> Awaitable[types.Message]:
args = {
'user_id': user_id,
'name': name,
'png_sticker': png_sticker,
'emojis': emojis,
}
if mask_position is not None:
args['mask_position'] = mask_position
if isinstance(png_sticker, str):
data = await self._get('addStickerToSet', args)
else:
data = await self._post('addStickerToSet', args)
return data
async def set_sticker_position_in_set(self, sticker: str,
position: int) -> Awaitable[bool]:
args = {
'sticker': sticker,
'position': position,
}
data = await self._get('setStickerPositionInSet', args)
return data
async def delete_sticker_from_set(self, sticker: str) -> Awaitable[bool]:
args = {
'sticker': sticker,
}
data = await self._get('deleteStickerFromSet', args)
return data
async def answer_inline_query(self, inline_query_id: str,
results: List[types.InlineQueryResult],
cache_time: int = None,
is_personal: bool = None,
next_offset: str = None,
switch_pm_text: str = None,
switch_pm_parameter: str = None
) -> Awaitable[bool]:
args = {
'inline_query_id': inline_query_id,
'results': results,
}
if cache_time is not None:
args['cache_time'] = cache_time
if is_personal is not None:
args['is_personal'] = is_personal
if next_offset is not None:
args['next_offset'] = next_offset
if switch_pm_text is not None:
args['switch_pm_text'] = switch_pm_text
if switch_pm_parameter is not None:
args['switch_pm_parameter'] = switch_pm_parameter
data = await self._post('answerInlineQuery', args)
return data
async def send_game(self, chat_id: int, game_short_name: str,
disable_notification: bool = None,
reply_to_message_id: int = None,
reply_markup: types.InlineKeyboardMarkup = None,
) -> Awaitable[types.Message]:
args = {
'chat_id': chat_id,
'game_short_name': game_short_name,
}
if disable_notification is not None:
args['disable_notification'] = disable_notification
if reply_to_message_id is not None:
args['reply_to_message_id'] = reply_to_message_id
if reply_markup is not None:
args['reply_markup'] = reply_markup
data = await self._get('sendGame', args)
return types.Message(data)
async def set_game_score(self, user_id: int, score: int, force: bool = None,
disable_edit_message: bool = None,
chat_id: int = None, message_id: int = None,
inline_message_id: str = None
) -> Awaitable[Union[types.Message, bool]]:
args = {
'user_id': user_id,
'score': score,
}
if force is not None:
args['force'] = force
if disable_edit_message is not None:
args['disable_edit_message'] = disable_edit_message
if chat_id is not None:
args['chat_id'] = chat_id
if message_id is not None:
args['message_id'] = message_id
if inline_message_id is not None:
args['inline_message_id'] = inline_message_id
data = await self._get('setGameScore', args)
if isinstance(data, bool):
return data
return types.Message(data)
async def get_game_high_scores(self, user_id: int, chat_id: int = None,
message_id: int = None,
inline_message_id: str = None
) -> Awaitable[List[types.GameHighScore]]:
args = {
'user_id': user_id,
}
if chat_id is not None:
args['chat_id'] = chat_id
if message_id is not None:
args['message_id'] = message_id
if inline_message_id is not None:
args['inline_message_id'] = inline_message_id
data = await self._get('getGameHighScores', args)
return [types.GameHighScore(_) for _ in data]
def _get_api_url(self, api_method):
return _API_TEMPLATE.format(api_token=self._api_token,
api_method=api_method)
@staticmethod
def _parse_response(response):
data = response.body.decode('utf-8')
data = json.loads(data)
if not data['ok']:
raise BotError(data['description'])
return data['result']
async def _get(self, api_method, args=None):
url = self._get_api_url(api_method)
if args is not None:
args = util.normalize_args(args)
url = thu.url_concat(url, args)
link = thc.AsyncHTTPClient()
request = thc.HTTPRequest(url)
response = await link.fetch(request)
return self._parse_response(response)
async def _post(self, api_method, args):
url = self._get_api_url(api_method)
args = util.normalize_args(args)
content_type, stream = util.generate_multipart_formdata(args.items())
link = thc.AsyncHTTPClient()
request = thc.HTTPRequest(url, method='POST', headers={
'Content-Type': content_type,
}, body_producer=stream, request_timeout=0.0)
response = await link.fetch(request)
return self._parse_response(response)
class _DispatcherMixin(object):
def __init__(self, *args, **kwargs) -> None:
super(_DispatcherMixin, self).__init__()
async def on_text(self, message: types.Message) -> None:
pass
async def on_audio(self, message: types.Message) -> None:
pass
async def on_document(self, message: types.Message) -> None:
pass
async def on_game(self, message: types.Message) -> None:
pass
async def on_photo(self, message: types.Message) -> None:
pass
async def on_sticker(self, message: types.Message) -> None:
pass
async def on_video(self, message: types.Message) -> None:
pass
async def on_voice(self, message: types.Message) -> None:
pass
async def on_video_note(self, message: types.Message) -> None:
pass
async def on_caption(self, message: types.Message) -> None:
pass
async def on_contact(self, message: types.Message) -> None:
pass
async def on_location(self, message: types.Message) -> None:
pass
async def on_venue(self, message: types.Message) -> None:
pass
async def on_new_chat_members(self, message: types.Message) -> None:
pass
async def on_left_chat_member(self, message: types.Message) -> None:
pass
async def on_new_chat_title(self, message: types.Message) -> None:
pass
async def on_new_chat_photo(self, message: types.Message) -> None:
pass
async def on_delete_chat_photo(self, message: types.Message) -> None:
pass
async def on_group_chat_created(self, message: types.Message) -> None:
pass
async def on_supergroup_chat_created(self, message: types.Message) -> None:
pass
async def on_channel_chat_created(self, message: types.Message) -> None:
pass
async def on_pinned_message(self, message: types.Message) -> None:
pass
async def _receive_message(self, message: types.Message) -> None:
if message.text is not None:
await self.on_text(message)
elif message.audio is not None:
await self.on_audio(message)
elif message.document is not None:
await self.on_document(message)
elif message.game is not None:
await self.on_game(message)
elif message.photo is not None:
await self.on_photo(message)
elif message.sticker is not None:
await self.on_sticker(message)
elif message.video is not None:
await self.on_video(message)
elif message.voice is not None:
await self.on_voice(message)
elif message.video_note is not None:
await self.on_video_note(message)
elif message.caption is not None:
await self.on_caption(message)
elif message.contact is not None:
await self.on_contact(message)
elif message.location is not None:
await self.on_location(message)
elif message.venue is not None:
await self.on_venue(message)
elif message.new_chat_members is not None:
await self.on_new_chat_members(message)
elif message.left_chat_member is not None:
await self.on_left_chat_member(message)
elif message.new_chat_title is not None:
await self.on_new_chat_title(message)
elif message.new_chat_photo is not None:
await self.on_new_chat_photo(message)
elif message.delete_chat_photo is not None:
await self.on_delete_chat_photo(message)
elif message.group_chat_created is not None:
await self.on_group_chat_created(message)
elif message.supergroup_chat_created is not None:
await self.on_supergroup_chat_created(message)
elif message.channel_chat_created is not None:
await self.on_channel_chat_created(message)
elif message.pinned_message is not None:
await self.on_pinned_message(message)
else:
raise BotError('unknown message type')
async def _receive_callback_query(self, callback_query: types.CallbackQuery) -> None:
if callback_query.data is not None:
await self.on_callback_data(callback_query)
elif callback_query.game_short_name is not None:
await self.on_game_short_name(callback_query)
async def on_callback_data(self, callback_query: types.CallbackQuery) -> None:
pass
async def on_game_short_name(self, callback_query: types.CallbackQuery) -> None:
pass
async def _receive_inline_query(self, inline_query: types.InlineQuery) -> None:
await self.on_inline_query(inline_query)
async def on_inline_query(self, inline_query: types.InlineQuery) -> None:
pass
class BotAgent(_DispatcherMixin):
def __init__(self, api_token: str, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._api = BotClient(api_token)
@property
def client(self) -> BotClient:
return self._api
async def get_updates(self,
timeout: int = 0) -> Awaitable[List[types.Update]]:
offset = 0
updates = []
while True:
us = await self._api.get_updates(offset, timeout=timeout) # type: List[types.Update]
updates.extend(us)
if not us:
break
offset = us[-1].update_id + 1
return updates
async def get_user_profile_photos(self, user_id: int
) -> Awaitable[types.UserProfilePhotos]:
offset = 0
photos = []
while True:
ps = await self._api.get_user_profile_photos(user_id, offset) # type: types.UserProfilePhotos
total = ps.total_count
photos.extend(ps.photos)
if not ps:
break
offset = ps[-1][-1].file_id + 1
return types.UserProfilePhotos({
'total_count': total,
'photos': photos,
})
async def poll(self, timeout: int) -> Awaitable[None]:
# remove previous webhook first
ok = False
while not ok:
ok = await self._api.delete_webhook()
# forever
while True:
try:
updates = await self.get_updates(timeout) # type: List[types.Update]
for update in updates:
if update.message is not None:
await self._receive_message(update.message)
elif update.callback_query is not None:
await self._receive_callback_query(update.callback_query)
elif update.inline_query is not None:
await self._receive_inline_query(update.inline_query)
except thc.HTTPError as e:
if e.code != 599:
raise
async def listen(self, hook_url: str) -> Awaitable[None]:
await self._api.set_webhook(url=hook_url)
async def close(self):
await self._api.delete_webhook()
class BotHookHandler(tw.RequestHandler, _DispatcherMixin):
async def post(self):
data = self.request.body
data = data.decode('utf-8')
data = json.loads(data)
update = types.Update(data)
if update.message is not None:
await self._receive_message(update.message)
elif update.callback_query is not None:
await self._receive_callback_query(update.callback_query)
elif update.inline_query is not None:
await self._receive_inline_query(update.inline_query)
class BotError(Exception):
def __init__(self, description):
self.description = description
def __str__(self):
return self.description
| 37.379684 | 106 | 0.552779 | 37,526 | 0.990054 | 0 | 0 | 299 | 0.007889 | 35,866 | 0.946258 | 3,232 | 0.08527 |
b37b44efa43ac0b0a452bc4955eec6b97cf0f90c | 6,151 | py | Python | pyapns_client/exceptions.py | rissicay/pyapns_client | 3e8bc035c3319e8ba0a7d84db7ba6c7f10628ea4 | [
"MIT"
] | 17 | 2019-04-03T23:13:08.000Z | 2022-03-25T03:24:38.000Z | pyapns_client/exceptions.py | rissicay/pyapns_client | 3e8bc035c3319e8ba0a7d84db7ba6c7f10628ea4 | [
"MIT"
] | 7 | 2021-02-28T19:13:38.000Z | 2022-01-26T19:21:43.000Z | pyapns_client/exceptions.py | rissicay/pyapns_client | 3e8bc035c3319e8ba0a7d84db7ba6c7f10628ea4 | [
"MIT"
] | 4 | 2021-02-23T02:07:46.000Z | 2022-03-29T15:11:31.000Z | import pytz
from datetime import datetime
# BASE
class APNSException(Exception):
"""
The base class for all exceptions.
"""
def __init__(self, status_code, apns_id):
super().__init__()
# The HTTP status code retuened by APNs.
# A 200 value indicates that the notification was successfully sent.
# For a list of other possible status codes, see table 6-4 in the Apple Local
# and Remote Notification Programming Guide.
self.status_code = status_code
# The APNs ApnsID value from the Notification. If you didn't set an ApnsID on the
# Notification, this will be a new unique UUID which has been created by APNs.
self.apns_id = apns_id
class APNSDeviceException(APNSException):
"""
Device should be flagged as potentially invalid (remove immediately in case of UnregisteredException).
"""
pass
class APNSServerException(APNSException):
"""
Try again later.
"""
pass
class APNSProgrammingException(APNSException):
"""
Check your code, and try again later.
"""
pass
# CONNECTION
class APNSConnectionException(APNSServerException):
"""
Used when a connectinon to APNS servers fails.
"""
def __init__(self):
super().__init__(status_code=None, apns_id=None)
# APNS REASONS
class BadCollapseIdException(APNSProgrammingException):
"""
The collapse identifier exceeds the maximum allowed size.
"""
pass
class BadDeviceTokenException(APNSDeviceException):
"""
The specified device token was bad. Verify that the request contains a valid token and that the token matches the environment.
"""
pass
class BadExpirationDateException(APNSProgrammingException):
"""
The apns-expiration value is bad.
"""
pass
class BadMessageIdException(APNSProgrammingException):
"""
The apns-id value is bad.
"""
pass
class BadPriorityException(APNSProgrammingException):
"""
The apns-priority value is bad.
"""
pass
class BadTopicException(APNSProgrammingException):
"""
The apns-topic was invalid.
"""
pass
class DeviceTokenNotForTopicException(APNSDeviceException):
"""
The device token does not match the specified topic.
"""
pass
class DuplicateHeadersException(APNSProgrammingException):
"""
One or more headers were repeated.
"""
pass
class IdleTimeoutException(APNSServerException):
"""
Idle time out.
"""
pass
class InvalidPushTypeException(APNSProgrammingException):
"""
The apns-push-type value is invalid.
"""
pass
class MissingDeviceTokenException(APNSProgrammingException):
"""
The device token is not specified in the request :path. Verify that the :path header contains the device token.
"""
pass
class MissingTopicException(APNSProgrammingException):
"""
The apns-topic header of the request was not specified and was required. The apns-topic header is mandatory when the client is connected using a certificate that supports multiple topics.
"""
pass
class PayloadEmptyException(APNSProgrammingException):
"""
The message payload was empty.
"""
pass
class TopicDisallowedException(APNSProgrammingException):
"""
Pushing to this topic is not allowed.
"""
pass
class BadCertificateException(APNSProgrammingException):
"""
The certificate was bad.
"""
pass
class BadCertificateEnvironmentException(APNSProgrammingException):
"""
The client certificate was for the wrong environment.
"""
pass
class ExpiredProviderTokenException(APNSServerException):
"""
The provider token is stale and a new token should be generated.
"""
pass
class ForbiddenException(APNSProgrammingException):
"""
The specified action is not allowed.
"""
pass
class InvalidProviderTokenException(APNSProgrammingException):
"""
The provider token is not valid or the token signature could not be verified.
"""
pass
class MissingProviderTokenException(APNSProgrammingException):
"""
No provider certificate was used to connect to APNs and Authorization header was missing or no provider token was specified.
"""
pass
class BadPathException(APNSProgrammingException):
"""
The request contained a bad :path value.
"""
pass
class MethodNotAllowedException(APNSProgrammingException):
"""
The specified :method was not POST.
"""
pass
class UnregisteredException(APNSDeviceException):
"""
The device token is inactive for the specified topic.
Expected HTTP/2 status code is 410; see Table 8-4.
"""
def __init__(self, status_code, apns_id, timestamp):
super().__init__(status_code=status_code, apns_id=apns_id)
# If the value of StatusCode is 410, this is the last time at which APNs
# confirmed that the device token was no longer valid for the topic.
# The value is in milliseconds (ms).
self.timestamp = timestamp
@property
def timestamp_datetime(self):
if not self.timestamp:
return None
return datetime.fromtimestamp(self.timestamp / 1000, tz=pytz.utc)
class PayloadTooLargeException(APNSProgrammingException):
"""
The message payload was too large. See Creating the Remote Notification Payload for details on maximum payload size.
"""
pass
class TooManyProviderTokenUpdatesException(APNSServerException):
"""
The provider token is being updated too often.
"""
pass
class TooManyRequestsException(APNSServerException):
"""
Too many requests were made consecutively to the same device token.
"""
pass
class InternalServerErrorException(APNSServerException):
"""
An internal server error occurred.
"""
pass
class ServiceUnavailableException(APNSServerException):
"""
The service is unavailable.
"""
pass
class ShutdownException(APNSServerException):
"""
The server is shutting down.
"""
pass
| 20.101307 | 191 | 0.691595 | 5,969 | 0.970411 | 0 | 0 | 172 | 0.027963 | 0 | 0 | 3,054 | 0.496505 |
b37b80ca95b3bea2079a71c9c9e266ed64bede4c | 2,223 | py | Python | vor/backends/name/census_1990.py | ryankanno/vor | f4a2a61118a1000bbeaa921cd248657ac3643f90 | [
"MIT"
] | null | null | null | vor/backends/name/census_1990.py | ryankanno/vor | f4a2a61118a1000bbeaa921cd248657ac3643f90 | [
"MIT"
] | null | null | null | vor/backends/name/census_1990.py | ryankanno/vor | f4a2a61118a1000bbeaa921cd248657ac3643f90 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .base import NameProvider
from .base import PROVIDERS
from vor.human import Gender
from py_utilities.decorators import run_once
import os
import random
class Census1990Provider(NameProvider):
KEYS = {
'provider': 'Census1990',
'm_first': 'male:first_name',
'f_first': 'female:first_name',
'last': 'all:last_name'
}
def __init__(self):
super(Census1990Provider, self).__init__()
self.load_provider_data()
@run_once
def load_provider_data(self):
for (data_source_path, key) in self._get_data_source_keys():
names = []
with open(data_source_path) as data_file:
for line in data_file:
name, _, _, _ = line.split()
names.append(name.upper())
PROVIDERS[self._get_prefixed_key(key)] = names
def get_first_name(self, gender):
key = self._get_first_name_key_from_gender(gender)
return random.choice(PROVIDERS[key]).title()
def get_last_name(self):
key = self._get_last_name_key()
return random.choice(PROVIDERS[key]).title()
def _get_prefixed_key(self, key):
return "{0}:{1}".format(self.KEYS['provider'], key)
def _get_first_name_key_from_gender(self, gender):
if gender == Gender.Male:
return self._get_prefixed_key(self.KEYS['m_first'])
elif gender == Gender.Female:
return self._get_prefixed_key(self.KEYS['f_first'])
else:
raise ValueError("Unsupported value in gender enum")
def _get_last_name_key(self):
return self._get_prefixed_key(self.KEYS['last'])
def _get_data_source_keys(self):
cwd = os.path.dirname(os.path.realpath(__file__))
full_path = lambda x: os.path.join(cwd, x)
source_keys = [
('../../data/1990_census/dist.all.last', self.KEYS['last']),
('../../data/1990_census/dist.male.first', self.KEYS['m_first']),
('../../data/1990_census/dist.female.first', self.KEYS['f_first'])
]
return [(full_path(source), keys) for source, keys in source_keys]
# vim: filetype=python
| 32.691176 | 78 | 0.623932 | 1,992 | 0.896086 | 0 | 0 | 387 | 0.174089 | 0 | 0 | 384 | 0.17274 |
b37b8a1795a3e0841f69cbb9cac07ad783e0862d | 1,701 | py | Python | lib/source.py | imamol/license_automation | 845aa213a23afa05f70a2d3d25284626a2f07b5e | [
"Adobe-Glyph"
] | null | null | null | lib/source.py | imamol/license_automation | 845aa213a23afa05f70a2d3d25284626a2f07b5e | [
"Adobe-Glyph"
] | null | null | null | lib/source.py | imamol/license_automation | 845aa213a23afa05f70a2d3d25284626a2f07b5e | [
"Adobe-Glyph"
] | null | null | null | import requests
from bs4 import BeautifulSoup
def verfiy_package_names():
with open('final_output/remaining_list.csv') as file:
orig_packages = file.readlines()
bad_count = 0
good_count = 0
count = 0
dict_packages = {}
for package in orig_packages:
package = package.strip()
r = requests.get('https://packages.ubuntu.com/xenial/'+package)
if ( r.status_code > 400):
bad_count+=1
dict_packages[package] = ""
#print("miss hit")
else:
good_count+=1
#print("hit")
src_name = _get_package_name(r)
if package == src_name:
count+=1
dict_packages[package] = src_name
#print(" Packages: %d" % len(lines))
#print(" Hit count: %d" % good_count)
#print(" Miss count: %d" % bad_count)
#print(" Correct count: %d" % count)
return dict_packages
def _get_package_name(page):
soup = BeautifulSoup(page.text, 'html.parser')
text = soup.find_all('p')[0].get_text()
#print(text)
name = _find_between(text, 'Download Source Package', ':').strip()
#print(name)
return name
def _find_between(s, first, last):
try:
start = s.index(first) + len(first)
end = s.index(last, start)
return s[start:end]
except ValueError:
return ""
| 32.09434 | 79 | 0.471487 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 320 | 0.188125 |
b37c632709a6f52e9270bc4114337fa833a78ab7 | 232 | py | Python | Uche Clare/Phase 1/Python Basic 2/Day 17/Task 2.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 6 | 2020-05-23T19:53:25.000Z | 2021-05-08T20:21:30.000Z | Uche Clare/Phase 1/Python Basic 2/Day 17/Task 2.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 8 | 2020-05-14T18:53:12.000Z | 2020-07-03T00:06:20.000Z | Uche Clare/Phase 1/Python Basic 2/Day 17/Task 2.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 39 | 2020-05-10T20:55:02.000Z | 2020-09-12T17:40:59.000Z | #Write a Python program to create all possible strings by using 'a', 'e', 'i', 'o', 'u'. Use the characters exactly once.
import random
vowels = ['a', 'e', 'i', 'o', 'u']
random.shuffle(vowels)
char = "".join(vowels)
print(char)
| 23.2 | 121 | 0.642241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 138 | 0.594828 |
b37c7961a918b47e8c39d8b6b6cd69f43d49c54f | 287 | py | Python | pdf_generator/views.py | ianvieira/django-pdf-generator | 65b89e0cbf9aee890336e9eb86785ce3171f82a3 | [
"MIT"
] | null | null | null | pdf_generator/views.py | ianvieira/django-pdf-generator | 65b89e0cbf9aee890336e9eb86785ce3171f82a3 | [
"MIT"
] | null | null | null | pdf_generator/views.py | ianvieira/django-pdf-generator | 65b89e0cbf9aee890336e9eb86785ce3171f82a3 | [
"MIT"
] | 2 | 2019-12-08T09:24:31.000Z | 2020-09-01T12:49:42.000Z | import os
from django.shortcuts import render
from .settings import pdf_settings
def pdf_html(request, html_key):
filename = '%s.html' % html_key
response = render(request, 'pdf_generator/%s' % filename)
os.remove(os.path.join(pdf_settings.TEMPLATES_DIR, filename))
return response | 28.7 | 62 | 0.780488 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.094077 |
b37dc2263269bc3ed0d80ad7589428053b74a855 | 18,942 | py | Python | netw_preprocess.py | lynx-delta/adapted_u-net_dense_crfs | 215ea62b64757e55ab54bce2da2db323f836d892 | [
"MIT"
] | null | null | null | netw_preprocess.py | lynx-delta/adapted_u-net_dense_crfs | 215ea62b64757e55ab54bce2da2db323f836d892 | [
"MIT"
] | null | null | null | netw_preprocess.py | lynx-delta/adapted_u-net_dense_crfs | 215ea62b64757e55ab54bce2da2db323f836d892 | [
"MIT"
] | 2 | 2020-09-03T04:19:03.000Z | 2020-12-16T09:18:25.000Z |
from os import listdir, makedirs
from os.path import isfile, isdir, join
import random
import warnings
warnings.filterwarnings("ignore")
import numpy as np
from skimage import io
from skimage import transform as trf
class DataPreprocessor():
'''Class for data preprocessing'''
def __init__(self, main_path, folder_name):
'''Initialization'''
self.main_path = main_path
self.folder_name = folder_name
self.path = join(self.main_path, self.folder_name)
subdir = [f for f in listdir(self.path) if isdir(join(self.path, f))]
if any(x in subdir[0] for x in ['data', 'input', 'Data', 'Input']):
data_folder_path = join(self.path, subdir[0])
label_folder_path = join(self.path, subdir[1])
else:
data_folder_path = join(self.path, subdir[1])
label_folder_path = join(self.path, subdir[0])
# Get path of input data folders
subdir = [f for f in listdir(data_folder_path) if isdir(join(
data_folder_path, f))]
self.input_path = []
if len(subdir) == 0:
self.input_path.append([data_folder_path])
else:
for i in range(0, len(subdir)):
temp_path = join(data_folder_path, subdir[i])
self.input_path.append([temp_path])
# Get path of label data folders
subdir = [f for f in listdir(label_folder_path) if isdir(join(
label_folder_path, f))]
self.label_path = []
if len(subdir) == 0:
self.label_path.append([label_folder_path])
else:
for i in range(0, len(subdir)):
temp_path = join(label_folder_path, subdir[i])
self.label_path.append([temp_path])
def select_patches(self, n_patches, patch_shape=(256, 256),
new_folder_ext='patches', change_dir=False):
'''Select patches in all images (provided in folders)'''
self._init_folders()
# Initialize new directories (for patches)
patch_input_path = self._new_dir(self.input_path, self.folder_name,
self.folder_name + '_' + new_folder_ext)
patch_label_path = self._new_dir(self.label_path, self.folder_name,
self.folder_name + '_' + new_folder_ext)
# Patch selection
prog = -1
for file, counter in self._get_file_generator_patches(patch_input_path,
patch_label_path):
# Generate random number pairs
if prog != counter:
image = io.imread('{}\{}'.format(file[0], file[1]))
row, col = random.randint(0, (
image.shape[0]-patch_shape[0])-1
), random.randint(0, (
image.shape[1]-patch_shape[1])-1)
pair_seen = set((row, col))
rand_pair = [[row, col]]
for n in range(1, n_patches):
row, col = random.randint(0, (
image.shape[0]-patch_shape[0])-1
), random.randint(0, (
image.shape[1]-patch_shape[1])-1)
while (row, col) in pair_seen:
row, col = random.randint(0, (
image.shape[0]-patch_shape[0])-1
), random.randint(0, (
image.shape[1]-patch_shape[1])-1)
pair_seen.add((row, col))
rand_pair.append([row, col])
prog = counter
# Perform random patch selection
self._random_patch_selection(file, rand_pair, patch_shape)
if change_dir == True:
self.input_path = patch_input_path
self.label_path = patch_label_path
self.path = self.path.replace(self.folder_name,
self.folder_name + '_' + new_folder_ext)
def augment_data(self, rotate_prop=0.2, hflip_prop=0.2, vflip_prop=0.2,
scale_prop=0.2, scale_factor=1.2, shear_prop=0.2,
shear_factor=0.3):
'''Perform data augmentation in all subfolders'''
self._init_folders()
data_len = len(self.input_data[0])
# Rotation
if rotate_prop is not None and rotate_prop > 0.0:
n_files = int(data_len*rotate_prop)
file_list = [random.randint(0, data_len-1) for n in range(0, n_files)]
# Perform rotation
for file, counter in self._get_file_generator(file_list):
image = io.imread('{}\{}'.format(file[0], file[1]))
rotate_tf = trf.SimilarityTransform(rotation=np.deg2rad(180))
new_imag = trf.warp(image, inverse_map=rotate_tf, mode='reflect',
preserve_range=True).astype(image.dtype)
io.imsave(fname='{}\{}'.format(
file[0], 'rot_' + str(
counter) + '_' + file[1]), arr=new_imag)
# Horizontal flip
if hflip_prop is not None and hflip_prop > 0.0:
n_files = int(data_len*hflip_prop)
file_list = [random.randint(0, data_len-1) for n in range(0, n_files)]
# Perform rotation
for file, counter in self._get_file_generator(file_list):
image = io.imread('{}\{}'.format(file[0], file[1]))
new_imag = image[:, ::-1]
io.imsave(fname='{}\{}'.format(
file[0], 'hfl_' + str(
counter) + '_' + file[1]), arr=new_imag)
# Vertical flip
if vflip_prop is not None and vflip_prop > 0.0:
n_files = int(data_len*vflip_prop)
file_list = [random.randint(0, data_len-1) for n in range(0, n_files)]
# Perform rotation
for file, counter in self._get_file_generator(file_list):
image = io.imread('{}\{}'.format(file[0], file[1]))
new_imag = image[::-1, :]
io.imsave(fname='{}\{}'.format(
file[0], 'vfl_' + str(
counter) + '_' + file[1]), arr=new_imag)
# Zooming
if scale_factor is None or scale_factor < 1:
scale_factor = 1.0
if scale_prop is not None and scale_prop > 0.0:
n_files = int(data_len*scale_prop)
file_list = [random.randint(0, data_len-1) for n in range(0, n_files)]
# Perform rotation
for file, counter in self._get_file_generator(file_list):
image = io.imread('{}\{}'.format(file[0], file[1]))
new_imag = trf.rescale(
image, scale_factor, mode='reflect', preserve_range=True)
left = int((new_imag.shape[0]-image.shape[0])/2)
right = left+image.shape[0]
bottom = int((new_imag.shape[1]-image.shape[0])/2)
top = bottom+image.shape[1]
crop_imag = new_imag[bottom:top, left:right].astype(image.dtype)
io.imsave(fname='{}\{}'.format(
file[0], 'scl_' + str(
counter) + '_' + file[1]), arr=crop_imag)
# Shearing
if shear_factor is None or shear_factor < 0.1 or shear_factor > 0.5:
shear_factor = 0.3
if shear_prop is not None and shear_prop > 0.0:
n_files = int(data_len*shear_prop)
file_list = [random.randint(0, data_len-1) for n in range(0, n_files)]
# Perform rotation
for file, counter in self._get_file_generator(file_list):
image = io.imread('{}\{}'.format(file[0], file[1]))
affine_tf = trf.AffineTransform(shear=shear_factor)
new_imag = trf.warp(image, inverse_map=affine_tf, mode='reflect',
preserve_range=True).astype(image.dtype)
io.imsave(fname='{}\{}'.format(
file[0], 'shr_' + str(
counter) + '_' + file[1]), arr=new_imag)
def data_to_array(self, n_classes, normalization='max', filename='dataset'):
'''Fetch data from folder and save data (X) array
and label (Y) array in one common .npz-file'''
self._init_folders()
# Load first data set (to determine input shape)
single_input = self._get_input_data(0, normalization)
# Initialize array for input data
data_X = np.zeros(
(len(self.input_data[0]), *single_input.shape), dtype='float32')
# Fill input data array
for i in range(data_X.shape[0]):
data_X[i, :, :, :] = self._get_input_data(i, normalization)
# Determine label input type
if len(self.label_path) == 1:
one_hot_encoder = self._get_label_data_single
else:
one_hot_encoder = self._get_label_data_mult
# Initialize array for input data
data_Y = np.zeros(
(len(self.label_data[0]), single_input.shape[0],
single_input.shape[1],
n_classes),
dtype='float32')
# Fill label data array
for i in range(data_Y.shape[0]):
data_Y[i, :, :, :] = one_hot_encoder(i, n_classes)
# Save datasets, labelsets to file
np.savez(join(self.path, filename + '.npz'), data_X=data_X, data_Y=data_Y)
del data_X, data_Y # free up memory
def _new_dir(self, old_path, old_folder_name, new_folder_name):
'''Create new directories'''
new_path = []
for item in old_path:
temp_str = item[0]
temp_dir = temp_str.replace(old_folder_name, new_folder_name)
makedirs(temp_dir)
new_path.append([temp_dir])
return new_path
def _init_folders(self):
'''Read files in folders'''
# Get files in input data folders
self.input_data = []
for i in range(0, len(self.input_path)):
self.input_data.append([f for f in listdir(
self.input_path[i][0]) if isfile(
join(self.input_path[i][0], f))])
# Get files in label data folders
self.label_data = []
for i in range(0, len(self.label_path)):
self.label_data.append([f for f in listdir(
self.label_path[i][0]) if isfile(
join(self.label_path[i][0], f))])
# Check if all folders contain the same number of files
it = iter(self.input_data + self.label_data)
len_entry = len(next(it))
if not all(len(n) == len_entry for n in it):
raise ValueError('Not all lists have same length!')
def _get_file_generator_patches(self, patch_input_path, patch_label_path):
'''Generator to retrieve file names for patch selection'''
n_folders = len(self.input_path + self.label_path)
n_files = len(self.input_data[0])
temp_path = n_files * (self.input_path + self.label_path)
temp_patch_path = n_files * (patch_input_path + patch_label_path)
temp_files = [(self.input_data + self.label_data)[j][i]
for i in range(0, n_files)
for j in range(0, n_folders)]
for i in range(0, len(temp_files)):
yield (temp_path[i][0], temp_files[i],
temp_patch_path[i][0]), divmod(i, n_folders)[0]
def _get_file_generator(self, file_list):
'''Generator to retrieve file names'''
n_folders = len(self.input_path + self.label_path)
temp_path = len(file_list) * (self.input_path + self.label_path)
temp_files = [(self.input_data + self.label_data)[j][i]
for i in file_list
for j in range(0, n_folders)]
for i in range(0, len(temp_files)):
yield (temp_path[i][0], temp_files[i]), divmod(i, n_folders)[0]
def _random_patch_selection(self, file, rand_pair, patch_shape):
'''Selects patches (randomly) from original image'''
image = io.imread('{}\{}'.format(file[0], file[1]))
for n in range(0, len(rand_pair)):
# Crop patch n
patch_n = image[rand_pair[n][0]:(rand_pair[n][0] + patch_shape[0]),
rand_pair[n][1]:(rand_pair[n][1] + patch_shape[1])
].astype(image.dtype)
# Save patch n
io.imsave(fname='{}\{}'.format(
file[2], 'patch_' + str(n) + '_' + file[1]), arr=patch_n)
def _get_input_data(self, k, normalization):
'''Read data files and return separate numpy arrays'''
input_arrays = []
for h in range(0, len(self.input_data)):
image = io.imread('{}\{}'.format(
self.input_path[h][0], self.input_data[h][k])
)
# Normalize image
if normalization == 'max':
image = image / image.max()
elif normalization == 'mean':
image = image / image.mean()
if len(image.shape) != 3:
image = image[:, :, np.newaxis]
input_arrays.append(image)
single_input = np.concatenate(input_arrays, axis=2)
return single_input
def _get_label_data_single(self, k, n_classes):
'''Read label files and perform one-hot encoding,
single file which contains all classes'''
image = io.imread('{}\{}'.format(
self.label_path[0][0], self.label_data[0][k]))
if len(image.shape) != 3:
image = image[:, :, np.newaxis]
if image.max() >= 255: # if image is black / white (8, 16 or 24 bit)
image = image / image.max()
image = np.around(image) # round float type data (data augmentation)
one_hot_labels = []
for g in range(0, n_classes):
class_i = np.ones(image.shape, dtype='float32') * (image == g)
one_hot_labels.append(class_i)
# Move background class to last index
one_hot_labels = one_hot_labels[1:] + [one_hot_labels[0]]
return np.concatenate(one_hot_labels, axis=2)
def _get_label_data_mult(self, k, _):
'''Read label files and perform one-hot encoding,
multiple files with 2 classes each (one common class (background) is
present in every image)'''
label_arrays = []
for g in range(0, len(self.label_data)):
image = io.imread('{}\{}'.format(
self.label_path[g][0], self.label_data[g][k]))
if len(image.shape) != 3:
image = image[:, :, np.newaxis]
if image.max() >= 255: # if image is black / white (8, 16 or 24 bit)
image = image / image.max()
image = np.around(image) # round float type data (data augmentation)
label_arrays.append(image)
# Load first label image
imag_ref = label_arrays[0]
# Perform one-hot encoding
one_hot_labels = [imag_ref]
for g in range(1, len(self.label_data)):
imag_mult = imag_ref * label_arrays[g]
imag_corr = label_arrays[g] - imag_mult
one_hot_labels.append(imag_corr)
imag_ref = imag_ref + imag_corr
one_hot_labels.append(np.ones(imag_ref.shape)-imag_ref)
return np.concatenate(one_hot_labels, axis=2)
def predict_convert(main_path, folder_name, normalization='max',
filename='dataset'):
'''
Data coverter (to .npz file) for images to predict
main_path: path to main folder
folder_name: data folder with images (can contain subfolders)
normalization: divides image by image max or mean
default = 'max'
filename: filename of created .npz file
default = 'dataset'
'''
# Initialze directory
path = join(main_path, folder_name)
subdir = [f for f in listdir(path) if isdir(join(path, f))]
input_path = []
if len(subdir) == 0:
input_path.append([path])
else:
for i in range(0, len(subdir)):
temp_path = join(path, subdir[i])
input_path.append([temp_path])
# Initialize folders
input_data = []
for i in range(0, len(input_path)):
input_data.append([f for f in listdir(input_path[i][0]) if isfile(
join(input_path[i][0], f))])
# Check if all folders contain the same number of files
it = iter(input_data)
len_entry = len(next(it))
if not all(len(n) == len_entry for n in it):
raise ValueError('Not all lists have same length!')
# Load first data set (to determine input shape)
single_input = get_data(0, input_path, input_data, normalization)
data_X = np.zeros((len(input_data[0]), *single_input.shape),
dtype='float32')
# Fill input data array
for i in range(data_X.shape[0]):
data_X[i, :, :, :] = get_data(i, input_path, input_data, normalization)
# Save dataset to file
np.savez(join(path, filename + '.npz'), data_X=data_X)
del data_X # free up memory
def get_data(k, input_path, input_data, normalization):
'''Read data files and return numpy array'''
input_arrays = []
for h in range(0, len(input_data)):
image = io.imread('{}\{}'.format(
input_path[h][0], input_data[h][k]))
# Normalize image
if normalization == 'max':
image = image / image.max()
elif normalization == 'mean':
image = image / image.mean()
if len(image.shape) != 3:
image = image[:, :, np.newaxis]
input_arrays.append(image)
single_input = np.concatenate(input_arrays, axis=2)
return single_input
if __name__ == "__main__":
import sys
predict_convert(*sys.argv[1:])
| 47.833333 | 84 | 0.53025 | 16,174 | 0.85387 | 1,226 | 0.064724 | 0 | 0 | 0 | 0 | 2,876 | 0.151832 |
b37e55272ef98aa2ee817e1085b7b26d1e87865a | 197 | py | Python | FusionIIIT/applications/academic_procedures/urls.py | paras11agarwal/FusionIIIT | 8332811dcec475176c15e6a3a00e5bdd9074fa49 | [
"Apache-2.0"
] | null | null | null | FusionIIIT/applications/academic_procedures/urls.py | paras11agarwal/FusionIIIT | 8332811dcec475176c15e6a3a00e5bdd9074fa49 | [
"Apache-2.0"
] | 6 | 2020-02-12T00:40:35.000Z | 2022-03-11T23:19:42.000Z | FusionIIIT/applications/academic_procedures/urls.py | paras11agarwal/FusionIIIT | 8332811dcec475176c15e6a3a00e5bdd9074fa49 | [
"Apache-2.0"
] | null | null | null | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'addCourse/', views.add_course, name='addCourse'),
url(r'^dropCourse/', views.drop_course, name='dropCourse'),
]
| 21.888889 | 63 | 0.700508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 51 | 0.258883 |
b38293c6c3550a45c1beb78e6252cdaf75198804 | 2,226 | py | Python | playground/distributions/tanh_gaussian.py | brandontrabucco/playground | 069be961aaecb45d75f12f4a71cfa65d7152ea8a | [
"MIT"
] | 3 | 2019-12-06T19:22:22.000Z | 2020-01-20T01:57:26.000Z | playground/distributions/tanh_gaussian.py | brandontrabucco/playground | 069be961aaecb45d75f12f4a71cfa65d7152ea8a | [
"MIT"
] | null | null | null | playground/distributions/tanh_gaussian.py | brandontrabucco/playground | 069be961aaecb45d75f12f4a71cfa65d7152ea8a | [
"MIT"
] | null | null | null | """Author: Brandon Trabucco, Copyright 2019, MIT License"""
from playground.distributions.gaussian import Gaussian
import tensorflow as tf
import math
class TanhGaussian(Gaussian):
def __init__(
self,
model,
std=1.0,
tau=0.01,
optimizer_class=tf.keras.optimizers.Adam,
optimizer_kwargs=None,
):
Gaussian.__init__(
self,
model,
std=std,
tau=tau,
optimizer_class=optimizer_class,
optimizer_kwargs=optimizer_kwargs)
def sample(
self,
*inputs
):
# sample from a gaussian distribution
gaussian_samples, log_probs = Gaussian.sample(self, *inputs)
# pass samples through the tanh
tanh_samples = tf.tanh(gaussian_samples)
# compute the log probability density of the samples
return tanh_samples, log_probs - tf.reduce_sum(
2.0 * (tf.math.log(2.0) - gaussian_samples - tf.math.softplus(
-2.0 * gaussian_samples)), axis=(-1))
def expected_value(
self,
*inputs
):
# expected value of a gaussian distribution
gaussian_samples, log_probs = Gaussian.expected_value(self, *inputs)
# pass samples through the tanh
tanh_samples = tf.tanh(gaussian_samples)
# compute the log probability density of the expected value
return tanh_samples, log_probs - tf.reduce_sum(
2.0 * (tf.math.log(2.0) - gaussian_samples - tf.math.softplus(
-2.0 * gaussian_samples)), axis=(-1))
def log_prob(
self,
tanh_samples,
*inputs
):
# convert tanh gaussian samples to gaussian samples
gaussian_samples = tf.math.atanh(
tf.clip_by_value(tanh_samples, -0.999, 0.999))
# compute the log probability density under a gaussian
log_probs = Gaussian.log_prob(self, gaussian_samples, *inputs)
# compute the log probability density of the samples
return log_probs - tf.reduce_sum(
2.0 * (tf.math.log(2.0) - gaussian_samples - tf.math.softplus(
-2.0 * gaussian_samples)), axis=(-1))
| 30.493151 | 76 | 0.603324 | 2,070 | 0.929919 | 0 | 0 | 0 | 0 | 0 | 0 | 469 | 0.210692 |
b382e7cca2b06372222238d82ac27e4fe771b94d | 52,650 | py | Python | tests/test_aiotapioca.py | ilindrey/aiotapioca-wrapper | 9fc84b8b5c7e11df3ee9a3c8aa6615bc94948524 | [
"MIT"
] | null | null | null | tests/test_aiotapioca.py | ilindrey/aiotapioca-wrapper | 9fc84b8b5c7e11df3ee9a3c8aa6615bc94948524 | [
"MIT"
] | null | null | null | tests/test_aiotapioca.py | ilindrey/aiotapioca-wrapper | 9fc84b8b5c7e11df3ee9a3c8aa6615bc94948524 | [
"MIT"
] | null | null | null | import pickle
from collections import OrderedDict
from itertools import product
import orjson
import pytest
import pytest_asyncio
import xmltodict
from aiohttp.client_reqrep import ClientResponse
from pydantic import BaseModel
from yarl import URL
from aiotapioca.adapters import TapiocaAdapter, generate_wrapper_from_adapter
from aiotapioca.aiotapioca import TapiocaClient, TapiocaClientExecutor
from aiotapioca.exceptions import ClientError, ServerError
from aiotapioca.serializers import SimpleSerializer
from .callbacks import callback_201, callback_401
from .clients import (
ClassParserClient,
CustomModel,
CustomModelDT,
DictParserClient,
FailTokenRefreshClient,
FuncParserClient,
NoneSemaphoreClient,
PydanticDefaultClientAdapter,
PydanticForcedClient,
RetryRequestClient,
RootModel,
RootModelDT,
SimpleClient,
StaticMethodParserClient,
TokenRefreshByDefaultClient,
TokenRefreshClient,
XMLClient,
)
@pytest_asyncio.fixture
async def retry_request_client():
async with RetryRequestClient() as c:
yield c
@pytest_asyncio.fixture
async def xml_client():
async with XMLClient() as c:
yield c
@pytest_asyncio.fixture
async def token_refresh_by_default_client():
async with TokenRefreshByDefaultClient(token="token") as c:
yield c
@pytest.fixture
def refresh_token_possible_false_values():
yield False, None, 1, 0, "511", -22, 41, [], tuple(), {}, set(), [41], {
"key": "value"
}
def check_response(response, data, status=200, refresh_data=None):
executor = response()
assert type(response) == TapiocaClient
assert type(executor) == TapiocaClientExecutor
assert executor.data == data
assert executor.refresh_data == refresh_data
assert isinstance(executor.response, ClientResponse)
assert executor.status == status
async def check_pages_responses(
response, total_pages=1, max_pages=None, max_items=None
):
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
async for item in response().pages(max_pages=max_pages, max_items=max_items):
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data)
iterations_count += 1
assert iterations_count == total_pages
"""
test TapiocaClient
"""
def test_adapter_class_default_attributes():
assert isinstance(TapiocaAdapter.refresh_token, bool)
assert isinstance(TapiocaAdapter.semaphore, int)
assert isinstance(TapiocaAdapter.serializer_class, object)
assert TapiocaAdapter.refresh_token is False
assert TapiocaAdapter.semaphore == 10
assert TapiocaAdapter.serializer_class == SimpleSerializer
def test_fill_url_template(client):
expected_url = "https://api.example.org/user/123/"
resource = client.user(id="123")
assert resource.data == expected_url
def test_fill_another_root_url_template(client):
expected_url = "https://api.another.com/another-root/"
resource = client.another_root()
assert resource.data == expected_url
def test_calling_len_on_tapioca_list(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
assert len(wrap_client) == 3
def test_iterated_client_items_should_be_tapioca_instances(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
for item in wrap_client:
assert isinstance(item, TapiocaClient)
def test_iterated_client_items_should_contain_list_items(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
for i, item in enumerate(wrap_client):
assert item().data == i
async def test_in_operator(mocked, client):
mocked.get(
client.test().data,
body='{"data": 1, "other": 2}',
status=200,
content_type="application/json",
)
response = await client.test().get()
assert "data" in response
assert "other" in response
assert "wat" not in response
async def test_transform_camelCase_in_snake_case(mocked, client):
next_url = "http://api.example.org/next_batch"
response_data = {
"data": {
"key_snake": "value",
"camelCase": "data in camel case",
"NormalCamelCase": "data in camel case",
},
"paging": {"next": "%s" % next_url},
}
mocked.add(
client.test().data,
body=orjson.dumps(response_data),
status=200,
content_type="application/json",
)
response = await client.test().get()
assert response.data.key_snake().data == "value"
assert response.data.camel_case().data == "data in camel case"
assert response.data.normal_camel_case().data == "data in camel case"
async def test_should_be_able_to_access_by_index(mocked, client):
mocked.get(
client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await client.test().get()
assert response[0]().data == "a"
assert response[1]().data == "b"
assert response[2]().data == "c"
async def test_accessing_index_out_of_bounds_should_raise_index_error(mocked, client):
mocked.get(
client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await client.test().get()
with pytest.raises(IndexError):
response[3]
async def test_accessing_empty_list_should_raise_index_error(mocked, client):
mocked.get(
client.test().data, body="[]", status=200, content_type="application/json"
)
response = await client.test().get()
with pytest.raises(IndexError):
response[3]
def test_fill_url_from_default_params():
client = SimpleClient(default_url_params={"id": 123})
assert client.user().data == "https://api.example.org/user/123/"
async def test_is_pickleable(mocked):
pickle_client = pickle.loads(pickle.dumps(SimpleClient()))
# ensure requests keep working after pickle:
next_url = "http://api.example.org/next_batch"
mocked.get(
pickle_client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
async with pickle_client:
response = await pickle_client.test().get()
iterations_count = 0
async for item in response().pages():
assert "value" in item.key().data
iterations_count += 1
assert iterations_count == 2
"""
test TapiocaExecutor
"""
def test_resource_executor_data_should_be_composed_url(client):
expected_url = "https://api.example.org/test/"
resource = client.test()
assert resource.data == expected_url
def test_docs(client):
assert "\n".join(client.resource.__doc__.split("\n")[1:]) == (
"Resource: " + client.resource._resource["resource"] + "\n"
"Docs: " + client.resource._resource["docs"] + "\n"
"Foo: " + client.resource._resource["foo"] + "\n"
"Spam: " + client.resource._resource["spam"]
)
def test_access_data_attributres_through_executor(client):
wrap_client = client._wrap_in_tapioca({"test": "value"})
items = wrap_client().items()
assert isinstance(items, TapiocaClient)
data = dict(items().data)
assert data == {"test": "value"}
def test_is_possible_to_reverse_a_list_through_executor(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
wrap_client().reverse()
assert wrap_client().data == [2, 1, 0]
def test_cannot__getittem__(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
with pytest.raises(Exception):
wrap_client()[0]
def test_cannot_iterate(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
with pytest.raises(Exception):
for item in wrap_client():
pass
def test_dir_call_returns_executor_methods(client):
wrap_client = client._wrap_in_tapioca([0, 1, 2])
e_dir = dir(wrap_client())
assert "data" in e_dir
assert "response" in e_dir
assert "get" in e_dir
assert "post" in e_dir
assert "post_batch" in e_dir
assert "pages" in e_dir
assert "open_docs" in e_dir
assert "open_in_browser" in e_dir
async def test_response_executor_object_has_a_response(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
executor = response()
assert executor.response is not None
assert executor._response is not None
assert executor.response.status == 200
assert executor._response.status == 200
def test_raises_error_if_executor_does_not_have_a_response_object(client):
with pytest.raises(Exception):
client().response
async def test_response_executor_has_a_status_code(mocked, client):
mocked.get(
client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
assert response().status == 200
"""
test TapiocaExecutor requests
"""
def test_when_executor_has_no_response(client):
with pytest.raises(Exception) as context:
client.test().response
exception = context.exception
assert "has no response" == str(exception)
async def test_access_response_field(mocked, client):
mocked.get(
client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
response_data = response.data()
assert response_data.data == {"key": "value"}
async def test_carries_request_kwargs_over_calls(mocked, client):
mocked.get(
client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
request_kwargs = response.data.key()._request_kwargs
assert "url" in request_kwargs
assert "data" in request_kwargs
assert "headers" in request_kwargs
async def test_thrown_tapioca_exception_with_client_error_data(mocked, client):
mocked.get(
client.test().data,
body='{"error": "bad request test"}',
status=400,
content_type="application/json",
)
with pytest.raises(ClientError) as client_exception:
await client.test().get()
assert "bad request test" in client_exception.value.args
async def test_thrown_tapioca_exception_with_server_error_data(mocked, client):
mocked.get(
client.test().data,
body='{"error": "server error test"}',
status=500,
content_type="application/json",
)
with pytest.raises(ServerError) as server_exception:
await client.test().get()
assert "server error test" in server_exception.value.args
async def test_retry_request(mocked, retry_request_client):
for _ in range(10):
mocked.get(
retry_request_client.test().data,
body='{"error": "bad request test"}',
status=400,
content_type="application/json",
)
mocked.get(
retry_request_client.test().data,
body='{"data": "success!"}',
status=200,
content_type="application/json",
)
response = await retry_request_client.test().get()
assert response.data().data == "success!"
for _ in range(3):
mocked.get(
retry_request_client.test().data,
body='{"error": "bad request test"}',
status=400,
content_type="application/json",
)
mocked.get(
retry_request_client.test().data,
body='{"data": "success!"}',
status=200,
content_type="application/json",
)
response = await retry_request_client.test().get()
assert response.data().data == "success!"
for _ in range(3):
mocked.get(
retry_request_client.test().data,
body='{"error": "bad request test"}',
status=403,
content_type="application/json",
)
with pytest.raises(ClientError):
await retry_request_client.test().get()
async def test_requests(mocked, client):
semaphores = (3, None)
types_request = ("get", "post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
executor = client.test()
status = 200 if type_request == "get" else 201
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request)
mocked_method(
executor.data,
body='{"data": {"key": "value"}}',
status=status,
content_type="application/json",
)
kwargs = {}
if semaphore:
kwargs.update({"semaphore": semaphore})
response = await executor_method(**kwargs)
result_response = {
response: {"data": {"key": "value"}},
response.data: {"key": "value"},
response.data.key: "value",
}
for response, data in result_response.items():
check_response(response, data, status)
async def test_batch_requests(mocked, client):
response_data = [
{"data": {"key": "value"}},
{"data": {"key": "value"}},
{"data": {"key": "value"}},
]
semaphores = (3, None)
types_request = ("post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
executor = client.test()
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request + "_batch")
for data_row in response_data:
mocked_method(
executor.data,
body=orjson.dumps(data_row),
status=201,
content_type="application/json",
)
kwargs = dict(data=response_data)
if semaphore:
kwargs.update({"semaphore": semaphore})
results = await executor_method(**kwargs)
for i, response in enumerate(results):
result_response = {
response: response_data[i],
response.data: response_data[i]["data"],
response.data.key: response_data[i]["data"]["key"],
}
for resp, data in result_response.items():
check_response(resp, data, 201)
assert len(results) == len(response_data)
async def test_as_api_params_requests(mocked):
semaphores = (4, None, False)
types_request = ("get", "post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
async with SimpleClient(semaphore=semaphore) as simple_client:
executor = simple_client.test()
status = 200 if type_request == "get" else 201
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request)
mocked_method(
executor.data,
body='{"data": {"key": "value"}}',
status=status,
content_type="application/json",
)
kwargs = dict()
response = await executor_method(**kwargs)
result_response = {
response: {"data": {"key": "value"}},
response.data: {"key": "value"},
response.data.key: "value",
}
for response, data in result_response.items():
check_response(response, data, status)
assert response()._api_params.get("semaphore") == semaphore
async def test_as_api_params_batch_requests(mocked):
response_data = [
{"data": {"key": "value"}},
{"data": {"key": "value"}},
{"data": {"key": "value"}},
]
semaphores = (4, None, False)
types_request = ("post", "put", "patch", "delete")
for semaphore, type_request in product(semaphores, types_request):
async with SimpleClient(semaphore=semaphore) as simple_client:
executor = simple_client.test()
mocked_method = getattr(mocked, type_request)
executor_method = getattr(executor, type_request + "_batch")
for data_row in response_data:
mocked_method(
executor.data,
body=orjson.dumps(data_row),
status=201,
content_type="application/json",
)
kwargs = dict(data=response_data)
if semaphore:
kwargs.update({"semaphore": semaphore})
results = await executor_method(**kwargs)
for i, response in enumerate(results):
result_response = {
response: response_data[i],
response.data: response_data[i]["data"],
response.data.key: response_data[i]["data"]["key"],
}
for resp, data in result_response.items():
check_response(resp, data, 201)
assert resp()._api_params.get("semaphore") == semaphore
assert len(results) == len(response_data)
async def test_failed_semaphore(mocked):
async with NoneSemaphoreClient() as none_semaphore_client:
mocked.get(
none_semaphore_client.test().data,
body='{"data": {"key": "value"}}',
status=200,
content_type="application/json",
)
with pytest.raises(TypeError):
await none_semaphore_client.test().get()
"""
test iterator features
"""
async def test_simple_pages_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=2)
async def test_simple_pages_with_max_pages_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=7, max_pages=3)
async def test_simple_pages_with_max_items_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": "%s"}}'
% next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=3, max_items=3)
async def test_simple_pages_with_max_pages_and_max_items_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}, {"key": "value"}, {"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=3, max_pages=2, max_items=3)
async def test_simple_pages_max_pages_zero_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.add(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=0, max_pages=0)
async def test_simple_pages_max_items_zero_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=0, max_items=0)
async def test_simple_pages_max_pages_ans_max_items_zero_iterator(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
await check_pages_responses(response, total_pages=0, max_pages=0, max_items=0)
async def test_pages_iterator_with_client_error(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=408,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
with pytest.raises(ClientError):
async for item in response().pages():
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data)
iterations_count += 1
assert iterations_count == 2
async def test_pages_iterator_with_server_error(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=504,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
with pytest.raises(ServerError):
async for item in response().pages():
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data)
iterations_count += 1
assert iterations_count == 2
async def test_pages_iterator_with_error_on_single_page(mocked, client):
next_url = "http://api.example.org/next_batch"
mocked.get(
client.test().data,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": "%s"}}' % next_url,
status=200,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{}], "paging": {"next": "%s"}}' % next_url,
status=204,
content_type="application/json",
)
mocked.get(
next_url,
body='{"data": [{"key": "value"}], "paging": {"next": ""}}',
status=200,
content_type="application/json",
)
response = await client.test().get()
result_response = {
response: {
"data": [{"key": "value"}],
"paging": {"next": "http://api.example.org/next_batch"},
},
response.data: [{"key": "value"}],
response.paging: {"next": "http://api.example.org/next_batch"},
response.paging.next: "http://api.example.org/next_batch",
}
for resp, data in result_response.items():
check_response(resp, data)
iterations_count = 0
async for item in response().pages():
if iterations_count == 2:
status = 204
result_page = {item: dict()}
else:
status = 200
result_page = {item: {"key": "value"}, item.key: "value"}
for resp, data in result_page.items():
check_response(resp, data, status)
iterations_count += 1
assert iterations_count == 4
"""
test XML requests
"""
async def test_xml_post_string(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="application/json",
)
data = '<tag1 attr1="val1">' "<tag2>text1</tag2>" "<tag3>text2</tag3>" "</tag1>"
await xml_client.test().post(data=data)
request_body = mocked.requests[("POST", URL(xml_client.test().data))][0].kwargs[
"data"
]
assert request_body == data.encode("utf-8")
async def test_xml_post_dict(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="application/json",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
await xml_client.test().post(data=data)
request_body = mocked.requests[("POST", URL(xml_client.test().data))][0].kwargs[
"data"
]
assert request_body == xmltodict.unparse(data).encode("utf-8")
async def test_xml_post_dict_passes_unparse_param(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="application/json",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
await xml_client.test().post(data=data, xmltodict_unparse__full_document=False)
request_body = mocked.requests[("POST", URL(xml_client.test().data))][0].kwargs[
"data"
]
assert request_body == xmltodict.unparse(data, full_document=False).encode("utf-8")
async def test_xml_returns_text_if_response_not_xml(mocked, xml_client):
mocked.post(
xml_client.test().data,
body="Any response",
status=200,
content_type="any content",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
response = await xml_client.test().post(data=data)
assert "Any response" == response().data["text"]
async def test_xml_post_dict_returns_dict_if_response_xml(mocked, xml_client):
xml_body = '<tag1 attr1="val1">text1</tag1>'
mocked.post(
xml_client.test().data,
body=xml_body,
status=200,
content_type="application/xml",
)
data = OrderedDict(
[
(
"tag1",
OrderedDict([("@attr1", "val1"), ("tag2", "text1"), ("tag3", "text2")]),
)
]
)
response = await xml_client.test().post(data=data)
assert response().data == xmltodict.parse(xml_body)
"""
test token refreshing
"""
async def test_not_token_refresh_client_propagates_client_error(mocked, client):
no_refresh_client = client
mocked.post(
no_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await no_refresh_client.test().post()
async def test_disable_token_refreshing(mocked, refresh_token_possible_false_values):
async with TokenRefreshClient(token="token") as token_refreshing_client:
mocked.post(
token_refreshing_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await token_refreshing_client.test().post()
for refresh_token in refresh_token_possible_false_values:
async with TokenRefreshClient(
token="token", refresh_token=refresh_token
) as token_refreshing_client:
mocked.post(
token_refreshing_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await token_refreshing_client.test().post()
async with TokenRefreshClient(token="token") as token_refreshing_client:
mocked.post(
token_refreshing_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await token_refreshing_client.test().post(refresh_token=refresh_token)
async def test_token_expired_automatically_refresh_authentication(mocked):
async with TokenRefreshClient(token="token") as token_refresh_client:
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_201,
content_type="application/json",
)
response = await token_refresh_client.test().post(refresh_token=True)
# refresh_authentication method should be able to update api_params
assert response._api_params["token"] == "new_token"
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
# check that the refresh_token flag is not cyclic
with pytest.raises(ClientError):
await token_refresh_client.test().post(refresh_token=True)
async with TokenRefreshClient(
token="token", refresh_token=True
) as token_refresh_client:
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_201,
content_type="application/json",
)
response = await token_refresh_client.test().post()
# refresh_authentication method should be able to update api_params
assert response._api_params["token"] == "new_token"
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_client.test().data,
callback=callback_401,
content_type="application/json",
)
# check that the refresh_token flag is not cyclic
with pytest.raises(ClientError):
await token_refresh_client.test().post()
async def test_token_expired_automatically_refresh_authentication_by_default(
mocked, token_refresh_by_default_client
):
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_201,
content_type="application/json",
)
response = await token_refresh_by_default_client.test().post()
# refresh_authentication method should be able to update api_params
assert response._api_params["token"] == "new_token"
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_401,
content_type="application/json",
)
mocked.post(
token_refresh_by_default_client.test().data,
callback=callback_401,
content_type="application/json",
)
# check that the refresh_token flag is not cyclic
with pytest.raises(ClientError):
await token_refresh_by_default_client.test().post()
async def test_raises_error_if_refresh_authentication_method_returns_false_value(
mocked, refresh_token_possible_false_values
):
async with FailTokenRefreshClient(token="token") as fail_client:
mocked.post(
fail_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await fail_client.test().post()
for refresh_token in (True, *refresh_token_possible_false_values):
async with FailTokenRefreshClient(
token="token", refresh_token=refresh_token
) as fail_client:
mocked.post(
fail_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await fail_client.test().post()
async with FailTokenRefreshClient(token="token") as fail_client:
mocked.post(
fail_client.test().data,
callback=callback_401,
content_type="application/json",
)
with pytest.raises(ClientError):
await fail_client.test().post(refresh_token=refresh_token)
"""
Test PydanticAdapterMixin.
"""
async def test_pydantic_model_not_found(mocked):
async with PydanticForcedClient() as client:
mocked.get(
client.test_not_found().data,
body="{}",
status=200,
content_type="application/json",
)
with pytest.raises(ValueError):
await client.test_not_found().get()
async def test_bad_pydantic_model(mocked):
async with PydanticForcedClient() as client:
mocked.get(
client.test_bad_pydantic_model().data,
body="{}",
status=200,
content_type="application/json",
)
with pytest.raises(ValueError):
await client.test_bad_pydantic_model().get()
async def test_bad_dataclass_model(mocked):
async with PydanticForcedClient() as client:
mocked.get(
client.test_bad_dataclass_model().data,
body="{}",
status=200,
content_type="application/json",
)
with pytest.raises(TypeError):
await client.test_bad_dataclass_model().get()
async def test_pydantic_mixin_response_to_native(mocked):
response_body_root = (
'[{"key1": "value1", "key2": 123}, {"key1": "value2", "key2": 321}]'
)
response_body = '{"data": %s}' % response_body_root
validate_data_received_list = [True, False]
validate_data_sending_list = [True, False]
extract_root_list = [True, False]
convert_to_dict_list = [True, False]
for validate_received, validate_sending, extract, convert in product(
validate_data_received_list,
validate_data_sending_list,
extract_root_list,
convert_to_dict_list,
):
class PidanticClientAdapter(PydanticDefaultClientAdapter):
validate_data_received = validate_received
validate_data_sending = validate_sending
extract_root = extract
convert_to_dict = convert
PydanticClient = generate_wrapper_from_adapter(PidanticClientAdapter)
async with PydanticClient() as client:
mocked.get(
client.test().data,
body=response_body,
status=200,
content_type="application/json",
)
response = await client.test().get()
if convert or not validate_received:
assert isinstance(response().data, dict)
assert response().data == orjson.loads(response_body)
else:
assert isinstance(response().data, BaseModel)
assert response().data.dict() == orjson.loads(response_body)
mocked.get(
client.test_root().data,
body=response_body_root,
status=200,
content_type="application/json",
)
response = await client.test_root().get()
data = response().data
if extract:
assert isinstance(data, list)
else:
if not validate_received:
assert isinstance(data, list)
elif convert:
assert isinstance(data, dict)
data = data["__root__"]
else:
assert isinstance(data, BaseModel)
data = data.__root__
for response_data, expected_data in zip(
data, orjson.loads(response_body_root)
):
if convert or not validate_received:
assert isinstance(response_data, dict)
assert response_data == expected_data
else:
assert isinstance(response_data, BaseModel)
assert response_data.dict() == expected_data
mocked.get(
client.test_dataclass().data,
body=response_body,
status=200,
content_type="application/json",
)
response = await client.test_dataclass().get()
if convert or not validate_received:
assert isinstance(response().data, dict)
assert response().data == orjson.loads(response_body)
else:
assert isinstance(response().data, BaseModel)
assert response().data.dict() == orjson.loads(response_body)
mocked.get(
client.test_dataclass_root().data,
body=response_body_root,
status=200,
content_type="application/json",
)
response = await client.test_dataclass_root().get()
data = response().data
if extract:
assert isinstance(data, list)
else:
if not validate_received:
assert isinstance(data, list)
elif convert:
assert isinstance(data, dict)
data = data["__root__"]
else:
assert isinstance(data, BaseModel)
data = data.__root__
for response_data, expected_data in zip(
data, orjson.loads(response_body_root)
):
if convert or not validate_received:
assert isinstance(response_data, dict)
assert response_data == expected_data
else:
assert isinstance(response_data, BaseModel)
assert response_data.dict() == expected_data
async def test_pydantic_mixin_format_data_to_request(mocked):
response_body_root = (
'[{"key1": "value1", "key2": 123}, {"key1": "value2", "key2": 321}]'
)
response_body = '{"data": %s}' % response_body_root
validate_data_received_list = [True, False]
validate_data_sending_list = [True, False]
extract_root_list = [True, False]
convert_to_dict_list = [True, False]
for validate_received, validate_sending, extract, convert in product(
validate_data_received_list,
validate_data_sending_list,
extract_root_list,
convert_to_dict_list,
):
class PidanticClientAdapter(PydanticDefaultClientAdapter):
validate_data_received = validate_received
validate_data_sending = validate_sending
extract_root = extract
convert_to_dict = convert
PydanticClient = generate_wrapper_from_adapter(PidanticClientAdapter)
async with PydanticClient() as client:
mocked.post(
client.test().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
if validate_sending:
data = orjson.loads(response_body)
response = await client.test().post(data=data)
assert response().data == {"id": 100500}
else:
data = CustomModel.parse_raw(response_body)
response = await client.test().post(data=data)
assert response().data == {"id": 100500}
if validate_sending:
data = orjson.loads(response_body_root)
for _ in range(len(data)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data)
assert len(responses) == len(data)
for response in responses:
assert response().data == {"id": 100500}
else:
data = RootModel.parse_raw(response_body_root)
for _ in range(len(data.__root__)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data.__root__)
assert len(responses) == len(data.__root__)
for response in responses:
assert response().data == {"id": 100500}
mocked.post(
client.test().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
if validate_sending:
data = orjson.loads(response_body)
response = await client.test_dataclass().post(data=data)
assert response().data == {"id": 100500}
else:
data = CustomModelDT.__pydantic_model__.parse_raw(response_body)
response = await client.test_dataclass().post(data=data)
assert response().data == {"id": 100500}
if validate_sending:
data = orjson.loads(response_body_root)
for _ in range(len(data)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data)
assert len(responses) == len(data)
for response in responses:
assert response().data == {"id": 100500}
else:
data = RootModelDT.__pydantic_model__.parse_raw(response_body_root)
for _ in range(len(data.__root__)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data.__root__)
assert len(responses) == len(data.__root__)
for response in responses:
assert response().data == {"id": 100500}
class PidanticClientAdapter(PydanticDefaultClientAdapter):
forced_to_have_model = True
validate_data_sending = False
validate_data_received = False
PydanticClient = generate_wrapper_from_adapter(PidanticClientAdapter)
async with PydanticClient() as client:
data = orjson.loads(response_body_root)
for _ in range(len(data)):
mocked.post(
client.test_root().data,
body='{"id": 100500}',
status=200,
content_type="application/json",
)
responses = await client.test_root().post_batch(data=data)
assert len(responses) == len(data)
for response in responses:
assert response().data == {"id": 100500}
class TestParsers:
@pytest_asyncio.fixture
async def func_parser_client(self):
async with FuncParserClient() as client:
yield client
@pytest_asyncio.fixture
async def static_method_parser_client(self):
async with StaticMethodParserClient() as client:
yield client
@pytest_asyncio.fixture
async def class_parser_client(self):
async with ClassParserClient() as client:
yield client
@pytest_asyncio.fixture
async def dict_parser_client(self):
async with DictParserClient() as client:
yield client
async def test_parsers_not_found(self, mocked, func_parser_client):
mocked.get(
func_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await func_parser_client.test().get()
with pytest.raises(AttributeError):
response().blablabla()
async def test_func_parser(self, mocked, func_parser_client):
mocked.get(
func_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await func_parser_client.test().get()
assert response().foo_parser() == ["a", "b", "c"]
assert response().foo_parser(0) == "a"
assert response().foo_parser(1) == "b"
assert response().foo_parser(2) == "c"
with pytest.raises(IndexError):
response().foo_parser(3)
async def test_static_method_parser(self, mocked, static_method_parser_client):
mocked.get(
static_method_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await static_method_parser_client.test().get()
assert response().foo() == ["a", "b", "c"]
assert response().foo(0) == "a"
assert response().foo(1) == "b"
assert response().foo(2) == "c"
with pytest.raises(IndexError):
response().foo(3)
async def test_class_parser(self, mocked, class_parser_client):
mocked.get(
class_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await class_parser_client.test().get()
parser = response().foo_parser()
assert parser.bar() == ["a", "b", "c"]
assert parser.bar(0) == "a"
assert parser.bar(1) == "b"
assert parser.bar(2) == "c"
with pytest.raises(IndexError):
parser.bar(3)
async def test_dict_parser(self, mocked, dict_parser_client):
mocked.get(
dict_parser_client.test().data,
body='["a", "b", "c"]',
status=200,
content_type="application/json",
)
response = await dict_parser_client.test().get()
assert response().func_parser() == ["a", "b", "c"]
assert response().func_parser(1) == "b"
assert response().static_method_parser() == ["a", "b", "c"]
assert response().static_method_parser(1) == "b"
assert response().class_parser().bar() == ["a", "b", "c"]
assert response().class_parser().bar(1) == "b"
| 30.363322 | 105 | 0.586667 | 4,036 | 0.076657 | 890 | 0.016904 | 1,090 | 0.020703 | 47,023 | 0.893124 | 8,153 | 0.154853 |
b383570d14d763cb45ddf1b73edff1f299247c91 | 1,331 | py | Python | sandal.py | ImaduddinAMajid/webshop-scraper | e8d153153a49f1b5f482ae1e483687238c786aab | [
"MIT"
] | null | null | null | sandal.py | ImaduddinAMajid/webshop-scraper | e8d153153a49f1b5f482ae1e483687238c786aab | [
"MIT"
] | 1 | 2022-03-02T14:58:57.000Z | 2022-03-02T14:58:57.000Z | sandal.py | imaduddinamajid/webshop-scraper | e8d153153a49f1b5f482ae1e483687238c786aab | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import json
import scrapy
class SandalSpider(scrapy.Spider):
name = "sandal"
api_url = "https://www.skoringen.dk/sandaler-dame/?page={}"
start_urls = [api_url.format(1)]
def parse(self, response):
if response.status == 404:
return
sandals = response.css("div.item__price")
for sandal in sandals:
detail_url = sandal.css("a::attr(href)").extract_first()
detail_url = response.urljoin(detail_url)
yield scrapy.Request(url=detail_url, callback=self.parse_details)
# pagination handler
page_number = (
int(response.css("div.col::attr(data-pageindex)").extract_first()) + 1
)
yield scrapy.Request(url=self.api_url.format(page_number))
def parse_details(self, response):
yield {
"beskrivelse": response.css(
"div.item-description__content::text"
).extract_first(),
"materiale": response.xpath(
'//div[contains(.//text(), "Materiale")]//div[contains(@class, "item-pim__value")]//text()'
).extract_first(),
"sål": response.xpath(
'//div[contains(.//text(), "Sål")]//div[contains(@class, "item-pim__value")]//text()'
).extract_first(),
}
| 34.128205 | 107 | 0.574756 | 1,280 | 0.96024 | 1,113 | 0.834959 | 0 | 0 | 0 | 0 | 407 | 0.305326 |
b383c1f2704d2ae2a9e5fdb01fe6f31ca5410042 | 374 | py | Python | app/services/mail/events.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/services/mail/events.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/services/mail/events.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | import logging
from fastapi import FastAPI
from fastapi_mail import ConnectionConfig as MailConnectionSettings
from .state import MailState
__all__ = ['init_mail']
logger = logging.getLogger(__name__)
def init_mail(app: FastAPI, settings: MailConnectionSettings) -> None:
app.state.mail = MailState(settings)
logger.info('Mail state (sender) has been set.')
| 20.777778 | 70 | 0.772727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 46 | 0.122995 |
b3858ba61fb7c623b550e2ed98ab06225b6691e0 | 689 | py | Python | csv2json.py | gaguevaras/lideresviz | f51e7a4e71579b9788372258677b74f7298e0054 | [
"MIT"
] | null | null | null | csv2json.py | gaguevaras/lideresviz | f51e7a4e71579b9788372258677b74f7298e0054 | [
"MIT"
] | null | null | null | csv2json.py | gaguevaras/lideresviz | f51e7a4e71579b9788372258677b74f7298e0054 | [
"MIT"
] | null | null | null | import csv
import json
# Open the CSV
f = open( 'events.csv', 'rU' )
# Change each fieldname to the appropriate field name. I know, so difficult.
reader = csv.DictReader( f, fieldnames = ("id","Nombre","datasketch","pacifista","ojoalapaz","indepaz","ONU","Defensoria","Unidad de Victimas","Somos Defensores","Cinep. Noche y Niebla","CODHES","Colectivo de Abogados José Alvear Restrepo Cajar","Género","Fecha","Municipio","Departamento","Tipo de líder","Cargo","Móvil","Otros","Fuentes"))
# Parse the CSV into JSON
out = json.dumps( [ row for row in reader ] )
print ("JSON parsed!")
# Save the JSON
f = open( 'events.json', 'w')
f.write(out)
print ("JSON saved!")
| 45.933333 | 343 | 0.674891 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 477 | 0.688312 |
b3859f2b30854f47917ba9cb42b44404ed3ba402 | 8,035 | py | Python | installation/templates/configuration/auth.py | piwaniuk/critic | 28ed20bb8032d7cc5aa23de98da51e619fd84164 | [
"Apache-2.0"
] | 216 | 2015-01-05T12:48:10.000Z | 2022-03-08T00:12:23.000Z | installation/templates/configuration/auth.py | piwaniuk/critic | 28ed20bb8032d7cc5aa23de98da51e619fd84164 | [
"Apache-2.0"
] | 55 | 2015-02-28T12:10:26.000Z | 2020-11-18T17:45:16.000Z | installation/templates/configuration/auth.py | piwaniuk/critic | 28ed20bb8032d7cc5aa23de98da51e619fd84164 | [
"Apache-2.0"
] | 34 | 2015-05-02T15:15:10.000Z | 2020-06-15T19:20:37.000Z | # -*- mode: python; encoding: utf-8 -*-
#
# Copyright 2013 Jens Lindström, Opera Software ASA
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# Accepted password hash schemes. They need to be supported by the passlib
# Python package; see http://packages.python.org/passlib for details.
PASSWORD_HASH_SCHEMES = %(installation.config.password_hash_schemes)r
# Default password hash scheme. Must be included in PASSWORD_HASH_SCHEMES.
DEFAULT_PASSWORD_HASH_SCHEME = %(installation.config.default_password_hash_scheme)r
# (Approximate) minimum password hash time in seconds. Higher means safer
# passwords (more difficult to decrypt using brute-force) but slower sign-in
# operation.
MINIMUM_PASSWORD_HASH_TIME = %(installation.config.minimum_password_hash_time)r
# Calibrated minimum rounds per password hash scheme.
MINIMUM_ROUNDS = %(installation.config.minimum_rounds)r
# External authentication providers.
PROVIDERS = {
# GitHub OAuth-based authentication.
"github": {
"enabled": %(installation.config.provider_github.enabled)r,
# Allow authenticated user to create a Critic user.
"allow_user_registration": %(installation.config.provider_github.allow_user_registration)r,
# Verify user email addresses provided by GitHub.
"verify_email_addresses": %(installation.config.provider_github.verify_email_addresses)r,
# Client ID and secret. These are generated by registering an
# application at https://github.com/settings/applications/new.
"client_id": %(installation.config.provider_github.client_id)r,
"client_secret": %(installation.config.provider_github.client_secret)r,
# Bypass /createuser on first sign in, creating a user automatically.
"bypass_createuser": %(installation.config.provider_github.bypass_createuser)r,
# Authentication callback URI. This same URI must be provided
# to GitHub when registering the application. The path
# component must be "/oauth/github".
"redirect_uri": %(installation.config.provider_github.redirect_uri)r
},
# Google OAuth-based authentication.
"google": {
"enabled": %(installation.config.provider_google.enabled)r,
# Allow authenticated user to create a Critic user.
"allow_user_registration": %(installation.config.provider_google.allow_user_registration)r,
# Verify user email addresses provided by Google.
"verify_email_addresses": %(installation.config.provider_google.verify_email_addresses)r,
# Client ID and secret. These are generated by creating a project at
# https://cloud.google.com/console/project, and then creating an OAuth2
# client id using the project administration UI.
"client_id": %(installation.config.provider_google.client_id)r,
"client_secret": %(installation.config.provider_google.client_secret)r,
# Bypass /createuser on first sign in, creating a user automatically.
"bypass_createuser": %(installation.config.provider_google.bypass_createuser)r,
# Authentication callback URI. This same URI must be provided
# to Google when creating the OAuth2 client id. The path
# component must be "/oauth/google".
"redirect_uri": %(installation.config.provider_google.redirect_uri)r
},
}
# Authentication databases.
DATABASES = {
# Using Critic's own user database for authentication.
"internal": {},
# Using an LDAP database for authentication.
"ldap": {
# Input fields.
#
# Each element is a tuple containing:
# [0]: True if the field should use <input type=password>
# [1]: Internal field identifier
# [2]: Field label
# [3]: (Optional) Longer description / help text
"fields": [
(False, "username", "Username:"),
(True, "password", "Password:"),
],
# LDAP server URL.
"url": "%(installation.config.ldap_url)s",
# Use TLS when connecting to LDAP server.
"use_tls": True,
# Credentials field.
#
# Identifier of the field whose value will be used as the credentials
# (e.g. password) in the bind request used for authentication.
"credentials": "password",
# The following two values are all interpreted as Python format strings
# that can reference field values, e.g. using "%%(username)s". The input
# values will have been escaped for safe usage in LDAP expressions.
# LDAP search base.
"search_base": "%(installation.config.ldap_search_base)s",
# LDAP search filter.
"search_filter": "(uid=%%(username)s)",
# The following settings control if and how Critic user records are
# created after successful authentication of a user.
# If true, Critic user records are created automatically if
# authentication succeeds but a matching record is not found.
"create_user": %(installation.config.ldap_create_user)r,
# User name LDAP attribute.
#
# This is the LDAP attribute whose value is used as the Critic username,
# both when looking for an existing user record and when creating a new
# one (if one isn't found.)
#
# If the attribute is missing or empty it will be considered an
# authentication error.
"username_attribute": "%(installation.config.ldap_username_attribute)s",
# Full name LDAP attribute.
#
# This is the LDAP attribute to use as the (initial) full name when
# creating a new Critic user record. It is not used if an existing user
# record is found.
#
# If the attribute is missing or empty, the user is created with the
# username as full name.
"fullname_attribute": "%(installation.config.ldap_fullname_attribute)s",
# Email LDAP attribute.
#
# This is the LDAP attribute to use as the (initial) primary email
# address when creating a new Critic user record. It is not used if an
# existing user record is found.
#
# If the attribute is missing or empty, the user is created with no
# primary email address.
"email_attribute": "%(installation.config.ldap_email_attribute)s",
# List of required LDAP groups.
#
# If the list is empty, no group membership is required.
"require_groups": [
# {
# # Distinguished name of the required group.
# "dn": "cn=SomeGroup,ou=Groups,dc=example,dc=com",
#
# # Group attribute containing the list of members.
# "members_attribute": "memberUid",
#
# # Value to search for in the list of members.
# #
# # The value is interpreted as a Python format string, and can
# # reference field values. It can also reference the
# # distinguished name of the user signing in as "%%(dn)s".
# "member_value": "%%(username)s",
# },
],
# Maximum age of cached successful authentication attempts, in seconds.
# If set to zero, caching is disabled altogether.
"cache_max_age": %(installation.config.ldap_cache_max_age)r,
},
}
DATABASE = %(installation.config.auth_database)r
ENABLE_ACCESS_TOKENS = %(installation.config.enable_access_tokens)r
| 41.205128 | 99 | 0.667953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,434 | 0.676207 |
b385fd76815079de8928aba898ca084d02c692a5 | 4,024 | py | Python | reports/migrations/0001_initial.py | medfiras/Bazinga | 2f77b70a3fe627410ddf0a5be0f074de5e0dccdd | [
"Apache-2.0"
] | null | null | null | reports/migrations/0001_initial.py | medfiras/Bazinga | 2f77b70a3fe627410ddf0a5be0f074de5e0dccdd | [
"Apache-2.0"
] | 1 | 2015-05-31T10:42:36.000Z | 2015-11-03T17:52:06.000Z | reports/migrations/0001_initial.py | medfiras/Bazinga | 2f77b70a3fe627410ddf0a5be0f074de5e0dccdd | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('events', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Activity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('active', models.BooleanField(default=True)),
],
options={
'ordering': ['name'],
'verbose_name': 'activity',
'verbose_name_plural': 'activities',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Campaign',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('active', models.BooleanField(default=True)),
],
options={
'ordering': ['name'],
'verbose_name': 'initiative',
'verbose_name_plural': 'initiatives',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='NGReport',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('report_date', models.DateField(db_index=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('longitude', models.FloatField(null=True)),
('latitude', models.FloatField(null=True)),
('location', models.CharField(default=b'', max_length=150, blank=True)),
('is_passive', models.BooleanField(default=False)),
('link', models.URLField(default=b'', max_length=500, blank=True)),
('link_description', models.CharField(default=b'', max_length=500, blank=True)),
('activity_description', models.TextField(default=b'', blank=True)),
('verified_activity', models.BooleanField(default=False, verbose_name=b'I have verified this activity')),
('country', models.CharField(default=b'', max_length=50, blank=True)),
('activity', models.ForeignKey(related_name='ng_reports', to='reports.Activity')),
('campaign', models.ForeignKey(related_name='ng_reports', blank=True, to='reports.Campaign', null=True)),
('event', models.ForeignKey(blank=True, to='events.Event', null=True)),
('mentor', models.ForeignKey(related_name='ng_reports_mentored', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
('user', models.ForeignKey(related_name='ng_reports', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-report_date', '-created_on'],
'get_latest_by': 'report_date',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='NGReportComment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('comment', models.TextField()),
('report', models.ForeignKey(to='reports.NGReport')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['id'],
},
bases=(models.Model,),
),
]
| 45.727273 | 168 | 0.560636 | 3,849 | 0.956511 | 0 | 0 | 0 | 0 | 0 | 0 | 779 | 0.193588 |
b38646db57ad70e650771a76187c2e5cd7c63f38 | 3,278 | py | Python | Utils/Classes/osuuser.py | HeapUnderfl0w/Phaazebot | 54e637bd4bc213b8efdaf23d5f331f2569e96843 | [
"MIT"
] | null | null | null | Utils/Classes/osuuser.py | HeapUnderfl0w/Phaazebot | 54e637bd4bc213b8efdaf23d5f331f2569e96843 | [
"MIT"
] | null | null | null | Utils/Classes/osuuser.py | HeapUnderfl0w/Phaazebot | 54e637bd4bc213b8efdaf23d5f331f2569e96843 | [
"MIT"
] | null | null | null | import datetime
from Utils.Classes.undefined import UNDEFINED
from Utils.Classes.apiclass import APIClass
class OsuUser(APIClass):
"""
Represents a osu! user with all its stats in a specific game mode
"""
def __repr__(self):
return f"<{self.__class__.__name__} name='{self.username}' mode='{self.mode}'>"
def __init__(self, data:dict, mode:str="0"):
self.mode_number:str = mode
self.user_id:str = data.get("user_id", UNDEFINED)
self.username:str = data.get("username", UNDEFINED)
self.JoinDate:datetime.datetime = datetime.datetime.fromisoformat( data.get("join_date", "1970-01-01 00:00:00") )
self.count300:str = data.get("count300", UNDEFINED)
self.count100:str = data.get("count100", UNDEFINED)
self.count50:str = data.get("count50", UNDEFINED)
self.playcount:str = data.get("playcount", UNDEFINED)
self.ranked_score:str = data.get("ranked_score", UNDEFINED)
self.total_score:str = data.get("total_score", UNDEFINED)
self.pp_rank:str = data.get("pp_rank", UNDEFINED)
self.pp_country_rank:str = data.get("pp_country_rank", UNDEFINED)
self.level:str = data.get("level", UNDEFINED)
self.pp_raw:str = data.get("pp_raw", UNDEFINED)
self.accuracy:str = data.get("accuracy", UNDEFINED)
self.count_rank_ssh:str = data.get("count_rank_ssh", UNDEFINED)
self.count_rank_ss:str = data.get("count_rank_ss", UNDEFINED)
self.count_rank_sh:str = data.get("count_rank_sh", UNDEFINED)
self.count_rank_s:str = data.get("count_rank_s", UNDEFINED)
self.count_rank_a:str = data.get("count_rank_a", UNDEFINED)
self.country:str = data.get("country", UNDEFINED)
self.total_seconds_played:str = data.get("total_seconds_played", UNDEFINED)
@property
def mode(self) -> str:
if self.mode_number == "0": return "osu!"
elif self.mode_number == "1": return "osu!taiko"
elif self.mode_number == "2": return "osu!ctb"
elif self.mode_number == "3": return "osu!mania"
else: return "Unknown"
def toJSON(self, count_objects:bool=True, ranks:bool=True) -> dict:
""" Returns a json save dict representation of all values for API, storage, etc... """
j:dict = dict()
j["mode"] = self.toString(self.mode)
j["user_id"] = self.toString(self.user_id)
j["username"] = self.toString(self.username)
j["join_date"] = self.toString(self.JoinDate)
j["playcount"] = self.toString(self.playcount)
j["country"] = self.toString(self.country)
j["ranked_score"] = self.toString(self.ranked_score)
j["total_score"] = self.toString(self.total_score)
j["pp_rank"] = self.toString(self.pp_rank)
j["pp_country_rank"] = self.toString(self.pp_country_rank)
j["level"] = self.toString(self.level)
j["pp_raw"] = self.toString(self.pp_raw)
j["accuracy"] = self.toString(self.accuracy)
j["total_seconds_played"] = self.toString(self.total_seconds_played)
if count_objects:
j["count300"] = self.toString(self.count300)
j["count100"] = self.toString(self.count100)
j["count50"] = self.toString(self.count50)
if ranks:
j["count_rank_ssh"] = self.toString(self.count_rank_ssh)
j["count_rank_ss"] = self.toString(self.count_rank_ss)
j["count_rank_sh"] = self.toString(self.count_rank_sh)
j["count_rank_s"] = self.toString(self.count_rank_s)
j["count_rank_a"] = self.toString(self.count_rank_a)
return j
| 38.564706 | 115 | 0.719341 | 3,170 | 0.967053 | 0 | 0 | 253 | 0.077181 | 0 | 0 | 828 | 0.252593 |
b386e11c2fec38fedadad6827bce0761aa62162c | 2,613 | py | Python | Lib/site-packages/fair_identifiers_client/helpers.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | null | null | null | Lib/site-packages/fair_identifiers_client/helpers.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | Lib/site-packages/fair_identifiers_client/helpers.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | 1 | 2020-03-12T20:48:04.000Z | 2020-03-12T20:48:04.000Z | import json
# Pattern (and code) taken from:
# https://gist.github.com/mivade/384c2c41c3a29c637cb6c603d4197f9f
FILE_SPECIFIER = 'file://'
def argument(*name_or_flags, **kwargs):
"""Convenience function to properly format arguments to pass to the
subcommand decorator.
"""
args = list()
for arg in name_or_flags:
args.append(arg)
return args, kwargs
def subcommand(args, parent, **kwargs):
def decorator(func):
parser = parent.add_parser(
func.__name__.replace('_', '-'),
description=func.__doc__,
**kwargs)
for arg in args:
parser.add_argument(*arg[0], **arg[1])
parser.set_defaults(func=func)
return func
return decorator
_internal_arg_names = ['func', 'subcommand', 'identifier']
def clear_internal_args(args):
for arg_name in _internal_arg_names:
try:
args.pop(arg_name)
except KeyError:
pass # Its ok if the key is not in the list to be cleared
return args
def load_metadata(argument):
metadata = {}
if argument.startswith(FILE_SPECIFIER):
fname = argument.replace(FILE_SPECIFIER, '')
with open(fname) as f:
metadata = json.loads(f.read())
else:
metadata = json.loads(argument)
return metadata
def set_checksum_args(arguments):
"""
Argparse parses checksums as {'checksum_sha256': '<sha256_hash>'}
Return a list of these arguments in a format the Identifiers Service
understands:
"checksums": [
{
"function": "md5",
"value": "fobarbas"
},
{
"function": "sha256",
"value": "foobarbaz"
}
],
Note: This modifies the values in 'arguments'
"""
checksum_args = [
{'function': arg_name.replace('checksum_', '').replace('_', '-'),
'value': arguments.pop(arg_name)}
for arg_name in list(arguments.keys())
if arg_name.startswith('checksum') and arguments[arg_name] is not None
]
if checksum_args:
arguments['checksums'] = checksum_args
return arguments
def parse_none_values(values, none_value='None'):
options = {}
for option_name, option_value, option_none_value in values:
if isinstance(option_value, str) and option_value == none_value:
options[option_name] = option_none_value
elif isinstance(option_value, list) and option_value == [none_value]:
options[option_name] = option_none_value
elif option_value:
options[option_name] = option_value
return options
| 27.505263 | 78 | 0.629162 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 757 | 0.289705 |
b387c4f1ebc295636e6bf20a9ec8555d73ca50a5 | 914 | py | Python | Python/GenericStructures/Stack.py | adeeconometrics/Algorithms | ee8e994ed91193a405ccd037d2f054dc67828dc6 | [
"MIT"
] | null | null | null | Python/GenericStructures/Stack.py | adeeconometrics/Algorithms | ee8e994ed91193a405ccd037d2f054dc67828dc6 | [
"MIT"
] | null | null | null | Python/GenericStructures/Stack.py | adeeconometrics/Algorithms | ee8e994ed91193a405ccd037d2f054dc67828dc6 | [
"MIT"
] | null | null | null | from typing import TypeVar, Generic
T = TypeVar("T")
class Node(Generic[T]):
next: Node[T] = None
def __init__(self, data: T) -> None:
self.data = data
class Stack(Generic[T]):
top: Node[T] = None
size: int = 0
def push(self, data: int) -> None:
node = Node[T](data)
node.next = self.top
self.top = node
self.size += 1
def pop(self) -> None:
if self.is_empty():
raise Exception("Stack is already")
self.top = self.top.next
self.size -= 1
def display(self) -> None:
ptr = self.top
while ptr.next != None:
print(ptr.data)
ptr = ptr.next
def is_empty(self) -> bool:
return self.size == 0
if __name__ == "__main__":
s = Stack()
for i in range(0, 9):
s.push(i)
s.pop()
s.display()
| 19.869565 | 48 | 0.493435 | 718 | 0.785558 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.033917 |
b3880d96be8593094b3b17d77e39b7880c82ec2e | 634 | py | Python | content/ft_para_todos/load_content.py | mobilizaFT/mobiliza-ft-site | 5700459f7f35c30cf8000e1fc5464e9b2b9e4503 | [
"MIT"
] | 1 | 2021-05-02T17:16:14.000Z | 2021-05-02T17:16:14.000Z | content/ft_para_todos/load_content.py | mobilizaFT/mobiliza-ft-site | 5700459f7f35c30cf8000e1fc5464e9b2b9e4503 | [
"MIT"
] | 11 | 2020-04-08T03:00:06.000Z | 2021-03-31T19:51:35.000Z | content/ft_para_todos/load_content.py | mobilizaFT/mobiliza-ft-site | 5700459f7f35c30cf8000e1fc5464e9b2b9e4503 | [
"MIT"
] | 1 | 2020-04-06T10:58:11.000Z | 2020-04-06T10:58:11.000Z | from markdown2 import markdown
from lxml import html
import os
def load_ft_para_todos_posts(dict):
posts_dir = 'content/ft_para_todos/blog/'
all_data = []
filename = posts_dir + 'posts.md'
content = html.fromstring(markdown(open(filename, 'r').read()))
all_data.append({
"authors": content.xpath('//h1/text()'),
"dates": content.xpath('//h2/text()'),
"titles": content.xpath('//h3/text()'),
"briefings": content.xpath('//h4/text()'),
"links": content.xpath('//h5/text()'),
"actions": content.xpath('//h6/text()')
})
dict['FT_PARA_TODOS_CONTENT'] = all_data | 33.368421 | 67 | 0.613565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 194 | 0.305994 |
b388d8673e156c073bc0e747b23586e086fb3fd0 | 2,906 | py | Python | sorteio.py | laurocjs/criptomigo | 871bcd051376f5ffe7fe64cae80401da69ae171f | [
"MIT"
] | null | null | null | sorteio.py | laurocjs/criptomigo | 871bcd051376f5ffe7fe64cae80401da69ae171f | [
"MIT"
] | null | null | null | sorteio.py | laurocjs/criptomigo | 871bcd051376f5ffe7fe64cae80401da69ae171f | [
"MIT"
] | null | null | null | # coding= utf-8
import random
from Crypto.PublicKey import RSA
# Função para sortear os pares
def sorteiaPares(listaDeParticipantes): # Recebe lista com nome dos participantes
# e o valor é a chave pública dela
dictSorteado = {} # Dict a ser retornado
numeroDeParticipantes = len(listaDeParticipantes) # Apenas para tornar o código mais limpo e legível
if numeroDeParticipantes < 2:
print "Você deve ter pelo menos dois participantes!!"
return
# Geramos então uma lista de N números aleatórios de 0 a N-1, sendo N o número de participantes
# Para evitar problemas na distribuição, o primeiro número não pode ser 0
# Caso seja, troco com algum outro número da lista
sorteio = random.sample(xrange(numeroDeParticipantes), numeroDeParticipantes)
if sorteio[0] == 0:
rand = random.randint(1, numeroDeParticipantes-1)
sorteio[0] = sorteio[rand]
sorteio[rand] = 0
# Realiza uma distribuição em que cada participante recebe outro participante aleatório
iterator = 0
for numero in sorteio:
if iterator == numero: # A pessoa tirou ela própria
# Nesse caso, ele troca com a pessoa anterior a ele na lista
dictSorteado[listaDeParticipantes[iterator]] = dictSorteado[listaDeParticipantes[iterator-1]]
dictSorteado[listaDeParticipantes[iterator-1]] = listaDeParticipantes[numero]
else:
dictSorteado[listaDeParticipantes[iterator]] = listaDeParticipantes[numero]
iterator += 1
return dictSorteado
# Função para criptografar o dict
def criptografaSorteio(dictDeChaves, dictSorteado): # Recebe dict Presenteante -> Chave e Presenteante -> Presenteado
dictCriptografado = {}
for participante in dictDeParticipantes:
pubKeyObj = RSA.importKey(dictDeParticipantes[participante]) # Pega a chave pública do participante
msg = dictSorteado[participante] # Pega o presenteado sorteado para ele
emsg = pubKeyObj.encrypt(msg, 'x')[0] # Encripta o nome do sujeito
caminho = "sorteio/" + participante
with open(caminho, "w") as text_file:
text_file.write(emsg)
# Início do programa:
# Crie a sua lista de participantes da maneira preferida
# A forma mais básica é:
listaDeParticipantes = [] # Uma lista de participantes
# Porém ler de um arquivo ou diretório também é interessante
dictDeParticipantes = {} # Um dict vazio
# Para cada participante, lê a sua chave e mapeia Participante -> Chave Pública
for participante in listaDeParticipantes:
with open("chaves/pubKey" + participante, mode='r') as file:
key = file.read()
dictDeParticipantes[participante] = key
dictSorteado = sorteiaPares(listaDeParticipantes) # Recebe o dicionário que mapeia presenteante -> presenteado
criptografaSorteio(dictDeParticipantes, dictSorteado)
| 44.030303 | 117 | 0.712319 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,244 | 0.423273 |
b38ea18fde8a979df788072d40b24291c6dbc34a | 1,632 | py | Python | src/ds_algs/list_binary_tree.py | E1mir/PySandbox | 44b39b98a41add433f0815cd3cde4d7554629eea | [
"MIT"
] | null | null | null | src/ds_algs/list_binary_tree.py | E1mir/PySandbox | 44b39b98a41add433f0815cd3cde4d7554629eea | [
"MIT"
] | null | null | null | src/ds_algs/list_binary_tree.py | E1mir/PySandbox | 44b39b98a41add433f0815cd3cde4d7554629eea | [
"MIT"
] | null | null | null | def binary_tree(r):
"""
:param r: This is root node
:return: returns tree
"""
return [r, [], []]
def insert_left(root, new_branch):
"""
:param root: current root of the tree
:param new_branch: new branch for a tree
:return: updated root of the tree
"""
t = root.pop(1)
if len(t) > 1:
root.insert(1, [new_branch, t, []])
else:
root.insert(1, [new_branch, [], []])
return root
def insert_right(root, new_branch):
"""
:param root: current root of the tree
:param new_branch: new branch for a tree
:return: updated root of the tree
"""
t = root.pop(2)
if len(t) > 1:
root.insert(2, [new_branch, [], t])
else:
root.insert(2, [new_branch, [], []])
return root
def get_root_val(root):
"""
:param root: current tree root
:return: current tree root value
"""
return root[0]
def set_root_val(root, new_val):
"""
:param root: current tree root
:param new_val: new value for root to update it
:return: updated tree root
"""
root[0] = new_val
def get_left_child(root):
"""
:param root: current root
:return: Left child of selected root
"""
return root[1]
def get_right_child(root):
"""
:param root: current root
:return: Right child of selected root
"""
return root[2]
if __name__ == '__main__':
r = binary_tree(3)
print(insert_left(r, 5))
print(insert_left(r, 6))
print(insert_right(r, 7))
print(insert_right(r, 8))
l = get_left_child(r)
print(l)
rg = get_right_child(r)
print(rg)
| 19.2 | 51 | 0.583333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 728 | 0.446078 |
b38fe55eecc2cb179f46a5d77a08782440d72905 | 1,396 | py | Python | research/recommend/DIEN/src/utils.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 77 | 2021-10-15T08:32:37.000Z | 2022-03-30T13:09:11.000Z | research/recommend/DIEN/src/utils.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 3 | 2021-10-30T14:44:57.000Z | 2022-02-14T06:57:57.000Z | research/recommend/DIEN/src/utils.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 24 | 2021-10-15T08:32:45.000Z | 2022-03-24T18:45:20.000Z | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
'''
calculate AUC
'''
def calc_auc(raw_arr):
'''
calculate AUC
:param raw_arr:
:return:
'''
arr = sorted(raw_arr, key=lambda d: d[0], reverse=True)
pos, neg = 0., 0.
for record in arr:
if abs(record[1] - 1.) < 0.000001:
pos += 1
else:
neg += 1
fp, tp = 0., 0.
xy_arr = []
for record in arr:
if abs(record[1] - 1.) < 0.000001:
tp += 1
else:
fp += 1
xy_arr.append([fp / neg, tp / pos])
auc = 0.
prev_x = 0.
prev_y = 0.
for x, y in xy_arr:
if x != prev_x:
auc += ((x - prev_x) * (y + prev_y) / 2.)
prev_x = x
prev_y = y
return auc
| 26.339623 | 78 | 0.547278 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 736 | 0.527221 |
b390b46232762e8aa8342658d28e1d8f4b83336f | 446 | py | Python | tests/network_speedtest.py | cloud-mon/server-test | 1175ec5426eed6455600ef45a5217e5145e6d203 | [
"MIT"
] | null | null | null | tests/network_speedtest.py | cloud-mon/server-test | 1175ec5426eed6455600ef45a5217e5145e6d203 | [
"MIT"
] | 1 | 2020-07-02T06:42:26.000Z | 2020-07-02T06:42:26.000Z | tests/network_speedtest.py | cloud-mon/server-test | 1175ec5426eed6455600ef45a5217e5145e6d203 | [
"MIT"
] | null | null | null | import speedtest
def perform_test():
s = speedtest.Speedtest()
best_server = s.get_best_server()
print('Best server: ')
print(best_server['name'])
print('Perform upload app:')
result = s.upload()
print('Done:' + str(result / 1024 / 1024) + ' MBit/s')
print('Perform download app:')
result = s.download()
print('Done:' + str(result / 1024 / 1024) + ' MBit/s')
print(s.results)
return s.results
| 22.3 | 58 | 0.609865 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 97 | 0.217489 |
b391e1dcf55a8792f4b2a7b65eefb9b385e989a1 | 1,552 | py | Python | ferris/controllers/oauth.py | palladius/gae-ferris-ricc | e6d9d8d4aadeae10eb258b94b6fe5912c8630b36 | [
"MIT"
] | 2 | 2015-03-04T07:05:57.000Z | 2015-03-04T07:06:00.000Z | ferris/controllers/oauth.py | palladius/gae-ferris-ricc | e6d9d8d4aadeae10eb258b94b6fe5912c8630b36 | [
"MIT"
] | null | null | null | ferris/controllers/oauth.py | palladius/gae-ferris-ricc | e6d9d8d4aadeae10eb258b94b6fe5912c8630b36 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from google.appengine.ext import ndb
from ferris.core.controller import Controller, route, route_with
from oauth2client.client import OAuth2WebServerFlow
from ferris.core.oauth2.user_credentials import UserCredentials as OAuth2UserCredentials
from ferris.core import settings
class Oauth(Controller):
@route
def start(self, session):
config = settings.get('oauth2')
session = ndb.Key(urlsafe=session).get()
callback_uri = self.uri(action='callback', _full=True)
flow = OAuth2WebServerFlow(
client_id=config['client_id'],
client_secret=config['client_secret'],
scope=session.scopes,
redirect_uri=callback_uri)
flow.params['state'] = session.key.urlsafe()
if session.admin or session.force_prompt:
flow.params['approval_prompt'] = 'force'
uri = flow.step1_get_authorize_url()
session.flow = flow
session.put()
return self.redirect(uri)
@route_with(template='/oauth2callback')
def callback(self):
session = ndb.Key(urlsafe=self.request.params['state']).get()
credentials = session.flow.step2_exchange(self.request.params['code'])
OAuth2UserCredentials.create(
user=self.user,
scopes=session.scopes,
credentials=credentials,
admin=session.admin
)
session.key.delete() # No need for the session any longer
return self.redirect(str(session.redirect))
| 29.846154 | 88 | 0.666881 | 1,234 | 0.795103 | 0 | 0 | 1,198 | 0.771907 | 0 | 0 | 141 | 0.090851 |
b39336da5d14f41b5e11922d81cebc45de1a648c | 588 | py | Python | data_scripts/weeds.py | bayerhealth/bayerhealth | c860dc105494bab3a00798322476c3ab034cceb9 | [
"MIT"
] | null | null | null | data_scripts/weeds.py | bayerhealth/bayerhealth | c860dc105494bab3a00798322476c3ab034cceb9 | [
"MIT"
] | null | null | null | data_scripts/weeds.py | bayerhealth/bayerhealth | c860dc105494bab3a00798322476c3ab034cceb9 | [
"MIT"
] | 1 | 2021-11-24T12:45:03.000Z | 2021-11-24T12:45:03.000Z |
import os
import pandas as pd
import shutil
os.chdir("../Downloads/DeepWeeds_Images_256")
try:
os.mkdir("train")
os.mkdir("val")
except:
pass
train = pd.read_csv("../train_set_labels.csv")
val = pd.read_csv("../test_set_labels.csv")
print(train)
for j,i in train.iterrows():
try:
os.mkdir("train/"+str(i.Species))
except:
pass
shutil.copyfile(i.Label, "train/"+i.Species+"/"+i.Label)
for j,i in val.iterrows():
try:
os.mkdir("val/"+str(i.Species))
except:
pass
shutil.copyfile(i.Label, "val/"+i.Species+"/"+i.Label) | 21 | 60 | 0.622449 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 130 | 0.221088 |
b393bdceee5141732cb897079744e23870c97773 | 491 | py | Python | auctionbot/users/migrations/0006_auto_20180218_1311.py | netvigator/auctions | f88bcce800b60083a5d1a6f272c51bb540b8342a | [
"MIT"
] | null | null | null | auctionbot/users/migrations/0006_auto_20180218_1311.py | netvigator/auctions | f88bcce800b60083a5d1a6f272c51bb540b8342a | [
"MIT"
] | 13 | 2019-12-12T03:07:55.000Z | 2022-03-07T12:59:27.000Z | auctionbot/users/migrations/0006_auto_20180218_1311.py | netvigator/auctions | f88bcce800b60083a5d1a6f272c51bb540b8342a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-18 06:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20180107_2152'),
]
operations = [
migrations.AlterField(
model_name='user',
name='iMarket',
field=models.PositiveIntegerField(default=1, verbose_name='ebay market (default)'),
),
]
| 23.380952 | 95 | 0.631365 | 335 | 0.682281 | 0 | 0 | 0 | 0 | 0 | 0 | 139 | 0.283096 |
b393bdd519461bf7d2b02ee148f39cad73098a46 | 2,295 | py | Python | search_engines/http_client.py | soxoj/async-search-scraper | 322733e77c3c40bc24b16a30dfe36a21fc9f32b9 | [
"MIT"
] | 5 | 2022-01-10T15:23:40.000Z | 2022-02-25T15:11:24.000Z | search_engines/http_client.py | soxoj/Search-Engines-Scraper | 322733e77c3c40bc24b16a30dfe36a21fc9f32b9 | [
"MIT"
] | null | null | null | search_engines/http_client.py | soxoj/Search-Engines-Scraper | 322733e77c3c40bc24b16a30dfe36a21fc9f32b9 | [
"MIT"
] | 2 | 2022-02-08T23:27:39.000Z | 2022-02-25T16:23:59.000Z | import aiohttp
from collections import namedtuple
from aiohttp_socks import ProxyConnector
from .config import TIMEOUT, PROXY, USER_AGENT
from . import utils as utl
class HttpClient(object):
'''Performs HTTP requests. A `aiohttp` wrapper, essentialy'''
def __init__(self, timeout=TIMEOUT, proxy=PROXY):
if proxy:
connector = ProxyConnector.from_url(proxy)
self.session = aiohttp.ClientSession(connector=connector)
else:
self.session = aiohttp.ClientSession()
self.headers = {
'User-Agent': USER_AGENT,
'Accept-Language': 'en-GB,en;q=0.5',
}
self.timeout = timeout
self.response = namedtuple('response', ['http', 'html'])
async def close(self):
await self.session.close()
async def get(self, page):
'''Submits a HTTP GET request.'''
page = self._quote(page)
try:
req = await self.session.get(page, headers=self.headers, timeout=self.timeout)
text = await req.text()
self.headers['Referer'] = page
except aiohttp.client_exception.ClientError as e:
return self.response(http=0, html=e.__doc__)
return self.response(http=req.status, html=text)
async def post(self, page, data):
'''Submits a HTTP POST request.'''
page = self._quote(page)
try:
req = await self.session.post(page, data=data, headers=self.headers, timeout=self.timeout)
text = await req.text()
self.headers['Referer'] = page
except aiohttp.client_exception.ClientError as e:
return self.response(http=0, html=e.__doc__)
return self.response(http=req.status, html=text)
def _quote(self, url):
'''URL-encodes URLs.'''
if utl.decode_bytes(utl.unquote_url(url)) == utl.decode_bytes(url):
url = utl.quote_url(url)
return url
def _set_proxy(self, proxy):
'''Returns HTTP or SOCKS proxies dictionary.'''
if proxy:
if not utl.is_url(proxy):
raise ValueError('Invalid proxy format!')
proxy = {'http':proxy, 'https':proxy}
return proxy
| 34.772727 | 103 | 0.588671 | 2,113 | 0.920697 | 0 | 0 | 0 | 0 | 1,008 | 0.439216 | 319 | 0.138998 |
b396c971b024674d61bcb82714f34a20eb7078c7 | 2,176 | py | Python | Python Advanced/Advanced/Multidimensional Lists/Exercise - 2/Task07.py | IvanTodorovBG/SoftUni | 7b667f6905d9f695ab1484efbb02b6715f6d569e | [
"MIT"
] | 1 | 2022-03-16T10:23:04.000Z | 2022-03-16T10:23:04.000Z | Python Advanced/Advanced/Multidimensional Lists/Exercise - 2/Task07.py | IvanTodorovBG/SoftUni | 7b667f6905d9f695ab1484efbb02b6715f6d569e | [
"MIT"
] | null | null | null | Python Advanced/Advanced/Multidimensional Lists/Exercise - 2/Task07.py | IvanTodorovBG/SoftUni | 7b667f6905d9f695ab1484efbb02b6715f6d569e | [
"MIT"
] | null | null | null | def is_valid(r, c, size):
if 0 <= r < size and 0 <= c < size:
return True
return False
count_presents = int(input())
n = int(input())
matrix = []
nice_kids_count = 0
given_to_nice = 0
for _ in range(n):
data = input().split()
matrix.append(data)
nice_kids_count += data.count("V")
santa_row = int
santa_col = int
for row in range(n):
for col in range(n):
if matrix[row][col] == "S":
santa_row = row
santa_col = col
all_directions = {
"up": (-1, 0),
"down": (1, 0),
"left": (0, -1),
"right": (0, 1)
}
while True:
command = input()
if command == "Christmas morning":
break
next_row = santa_row + all_directions[command][0]
next_col = santa_col + all_directions[command][1]
if is_valid(next_row, next_col, n):
if matrix[next_row][next_col] == "V":
given_to_nice += 1
count_presents -= 1
elif matrix[next_row][next_col] == "C":
for direction in all_directions:
next_step_row = next_row + all_directions[direction][0]
next_step_col = next_col + all_directions[direction][1]
if is_valid(next_step_row, next_step_col, n):
if matrix[next_step_row][next_step_col] == "V":
given_to_nice += 1
count_presents -= 1
elif matrix[next_step_row][next_step_col] == "X":
count_presents -= 1
matrix[next_step_row][next_step_col] = "-"
if count_presents == 0:
break
matrix[santa_row][santa_col] = "-"
matrix[next_row][next_col] = "S"
santa_row = next_row
santa_col = next_col
if count_presents == 0:
break
if count_presents == 0 and nice_kids_count != given_to_nice:
print("Santa ran out of presents!")
for sublist in matrix:
print(" ".join(sublist))
if nice_kids_count == given_to_nice:
print(f"Good job, Santa! {given_to_nice} happy nice kid/s.")
else:
print(f"No presents for {nice_kids_count - given_to_nice} nice kid/s.") | 26.536585 | 75 | 0.56204 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 217 | 0.099724 |
b3972e6ffb6c4aa24b8e72c30d0d7fe2844a3e14 | 2,042 | py | Python | Cloud Services and Computing/Amazon AWS/Lambdas/Lambdas-Practice-Web-Examples/aws-tutorial-code-master/textract/bounding-box-kv-tables/flask-app/app.py | okara83/Becoming-a-Data-Scientist | f09a15f7f239b96b77a2f080c403b2f3e95c9650 | [
"MIT"
] | null | null | null | Cloud Services and Computing/Amazon AWS/Lambdas/Lambdas-Practice-Web-Examples/aws-tutorial-code-master/textract/bounding-box-kv-tables/flask-app/app.py | okara83/Becoming-a-Data-Scientist | f09a15f7f239b96b77a2f080c403b2f3e95c9650 | [
"MIT"
] | null | null | null | Cloud Services and Computing/Amazon AWS/Lambdas/Lambdas-Practice-Web-Examples/aws-tutorial-code-master/textract/bounding-box-kv-tables/flask-app/app.py | okara83/Becoming-a-Data-Scientist | f09a15f7f239b96b77a2f080c403b2f3e95c9650 | [
"MIT"
] | 2 | 2022-02-09T15:41:33.000Z | 2022-02-11T07:47:40.000Z | import os
import time
from flask import Flask, render_template, request
from aws_requests_auth.aws_auth import AWSRequestsAuth
import requests
import uuid
import base64
import shutil
from config import Config
app = Flask(__name__)
config = Config()
@app.route("/", methods=["GET", "POST"])
def index():
if "uploadFile" in request.files:
try:
shutil.rmtree("static/temp")
except:
pass
os.makedirs("static/temp", exist_ok=True)
uploaded_file = request.files.get("uploadFile", None)
uploaded_file = uploaded_file.read()
file_name = f"{uuid.uuid4().hex}.png"
endpoint = f"{config.ENDPOINT}/upload?filename={file_name}"
response = requests.get(endpoint, auth=sign())
response = response.json()
files = {"file": (file_name, uploaded_file)}
http_response = requests.post(
response["url"], data=response["fields"], files=files
)
full_filename = download_processed_file(file_name)
with open(f"static/temp/{file_name}", "wb") as f:
f.write(full_filename)
processed_image = os.path.join("static/temp", file_name)
uploaded_file = base64.b64encode(uploaded_file).decode("utf-8")
else:
processed_image = None
uploaded_file = None
return render_template(
"home.html", processed_image=processed_image, uploaded_file=uploaded_file
)
def sign():
auth = AWSRequestsAuth(
aws_access_key=config.AWS_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_SECRET_ACCESS_KEY,
aws_host=config.HOST,
aws_region="us-east-1",
aws_service="execute-api",
)
return auth
def download_processed_file(file_name):
while True:
endpoint = f"{config.ENDPOINT}/download?filename={file_name}"
response = requests.get(endpoint, auth=sign())
if response.status_code == 200:
response = requests.get(response.text)
return response.content
time.sleep(1)
| 29.171429 | 81 | 0.651812 | 0 | 0 | 0 | 0 | 1,186 | 0.580803 | 0 | 0 | 291 | 0.142507 |
b397b968bf5b0884131d6ce2a3bdfa2c376131cc | 792 | py | Python | tests/integration/test_project.py | gitter-badger/a2ml | 1d9ef6657645b61c64090284ed8fadb1a68b932c | [
"Apache-2.0"
] | 30 | 2019-07-01T13:23:27.000Z | 2022-03-16T21:19:33.000Z | tests/integration/test_project.py | arita37/a2ml | 3e92bede2c2ef6e63be74560cc6b904d3ec9d931 | [
"Apache-2.0"
] | 234 | 2019-07-04T13:56:15.000Z | 2021-11-04T10:12:55.000Z | tests/integration/test_project.py | arita37/a2ml | 3e92bede2c2ef6e63be74560cc6b904d3ec9d931 | [
"Apache-2.0"
] | 13 | 2019-07-04T14:00:34.000Z | 2020-07-13T11:18:44.000Z | import pytest
from a2ml.cmdl.cmdl import cmdl
class TestProjectCLI():
def test_create_list_delete(self, runner, log, project):
result = runner.invoke(cmdl, ['project', 'delete', 'cli-integration-test'])
result = runner.invoke(cmdl, ['project', 'select', 'cli-integration-test'])
assert log.messages[-1] == '[auger] Selected Project cli-integration-test'
result = runner.invoke(cmdl, ['project', 'create'])
assert log.messages[-1] == '[auger] Created Project cli-integration-test'
result = runner.invoke(cmdl, ['project', 'list'])
assert 'cli-integration-test' in str(log.messages)
result = runner.invoke(cmdl, ['project', 'delete'])
assert log.messages[-1] == '[auger] Deleted Project cli-integration-test'
| 39.6 | 83 | 0.65404 | 743 | 0.938131 | 0 | 0 | 0 | 0 | 0 | 0 | 291 | 0.367424 |
b3985cdea908a7ea476bd235e7bfc5f58bee0b36 | 609 | py | Python | pyat/tools.py | matt-hayden/PyAt | 00a011a935287e15fd63fd1a842fb839f4fd1b4d | [
"Unlicense"
] | null | null | null | pyat/tools.py | matt-hayden/PyAt | 00a011a935287e15fd63fd1a842fb839f4fd1b4d | [
"Unlicense"
] | null | null | null | pyat/tools.py | matt-hayden/PyAt | 00a011a935287e15fd63fd1a842fb839f4fd1b4d | [
"Unlicense"
] | null | null | null |
import subprocess
import sys
from . import debug, info, warning, error, fatal
from . import AT, BATCH
from .jobs import _get_jobs
if sys.platform.startswith('win'):
GREP='GREP.EXE'
else:
GREP='grep'
def search(grep_args, at_encoding='UTF-8'):
for job in _get_jobs():
jid, started, queue, owner = job
label = str(jid)
proc = subprocess.Popen([GREP, '--label='+label, '-H']+grep_args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
contents = '\n'.join(job.get_script())
out, _ = proc.communicate(contents.encode(at_encoding))
if out:
yield from out.decode().splitlines()
| 23.423077 | 67 | 0.688013 | 0 | 0 | 402 | 0.660099 | 0 | 0 | 0 | 0 | 46 | 0.075534 |
b3985f565a3b58ad025d4c71f174f2a90abe2aa7 | 12,172 | py | Python | tests/test_main_window.py | Ceystyle/easyp2p | 99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc | [
"MIT"
] | 4 | 2019-07-18T10:58:28.000Z | 2021-11-18T16:57:45.000Z | tests/test_main_window.py | Ceystyle/easyp2p | 99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc | [
"MIT"
] | 1 | 2019-07-05T09:21:47.000Z | 2019-07-05T09:21:47.000Z | tests/test_main_window.py | Ceystyle/easyp2p | 99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc | [
"MIT"
] | 2 | 2019-07-05T08:56:34.000Z | 2020-06-09T10:03:42.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Niko Sandschneider
"""Module containing all tests for the main window of easyp2p."""
from datetime import date, timedelta
import os
import sys
import unittest.mock
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication, QCheckBox, QLineEdit
import easyp2p.platforms
from easyp2p.ui.main_window import MainWindow
QT_APP = QApplication(sys.argv)
class MainWindowTests(unittest.TestCase):
"""Test the main window of easyp2p."""
PLATFORMS = {pl for pl in dir(easyp2p.platforms) if pl[0].isupper()}
def setUp(self) -> None:
"""Create the GUI."""
self.form = MainWindow(QT_APP)
def set_date_combo_boxes(
self, start_month: int, start_year: int, end_month: int,
end_year: int) -> None:
"""
Helper method to set the indices of the date combo boxes
Args:
start_month: Index of start month combo box entry.
start_year: Index of start year combo box entry.
end_month: Index of end month combo box entry.
end_year: Index of end year combo box entry.
"""
self.form.combo_box_start_month.setCurrentIndex(start_month)
self.form.combo_box_start_year.setCurrentIndex(start_year)
self.form.combo_box_end_month.setCurrentIndex(end_month)
self.form.combo_box_end_year.setCurrentIndex(end_year)
self.form.on_combo_box_start_year_activated()
def test_defaults(self) -> None:
"""Test GUI in default state."""
# All check boxes are unchecked in default state
for check_box in self.form.group_box_platforms.findChildren(QCheckBox):
self.assertFalse(check_box.isChecked())
# Check if date_range is correct
end_last_month = date.today().replace(day=1) - timedelta(days=1)
date_range = (end_last_month.replace(day=1), end_last_month)
self.assertEqual(date_range, self.form.date_range)
# Check if date combo boxes are correct
self.assertEqual(
QLocale().monthName(date_range[0].month, 1),
self.form.combo_box_start_month.currentText())
self.assertEqual(
str(date_range[0].year),
self.form.combo_box_start_year.currentText())
self.assertEqual(
QLocale().monthName(date_range[1].month, 1),
self.form.combo_box_end_month.currentText())
self.assertEqual(
str(date_range[1].year),
self.form.combo_box_end_year.currentText())
# Check if output file name is set correctly
self.assertEqual(
self.form.line_edit_output_file.text(), os.path.join(
self.form.settings.directory,
f'P2P_Results_{date_range[0].strftime("%d%m%Y")}-'
f'{date_range[1].strftime("%d%m%Y")}.xlsx'))
def test_select_all_platforms(self) -> None:
"""Test the Select All Platforms checkbox."""
# Toggle the 'Select all platforms' checkbox
self.form.check_box_select_all.setChecked(True)
# Test that all platform check boxes are checked
for check_box in self.form.group_box_platforms.findChildren(QCheckBox):
self.assertTrue(check_box.isChecked())
def test_get_platforms_no_platform_checked_true(self) -> None:
"""Test get_platforms if no platform is selected and checked==True."""
platforms = self.form.get_platforms(True)
self.assertEqual(platforms, set())
def test_get_platforms_all_platforms_checked_true(self) -> None:
"""
Test get_platforms if all platforms are selected and checked==True.
"""
self.form.check_box_select_all.setChecked(True)
platforms = self.form.get_platforms(True)
self.assertEqual(platforms, self.PLATFORMS)
def test_get_platforms_three_platforms_selected_checked_true(self) -> None:
"""
Test get_platforms if three platforms are selected and checked==True.
"""
self.form.check_box_bondora.setChecked(True)
self.form.check_box_mintos.setChecked(True)
self.form.check_box_twino.setChecked(True)
platforms = self.form.get_platforms(True)
self.assertEqual(platforms, {'Bondora', 'Mintos', 'Twino'})
def test_get_platforms_three_platforms_selected_checked_false(self) -> None:
"""
Test get_platforms if three platforms are selected and checked==False.
"""
self.form.check_box_bondora.setChecked(True)
self.form.check_box_mintos.setChecked(True)
self.form.check_box_twino.setChecked(True)
platforms = self.form.get_platforms(False)
self.assertEqual(platforms, self.PLATFORMS)
def test_get_platforms_checked_false(self) -> None:
"""Test get_platforms if checked==False."""
platforms = self.form.get_platforms(False)
self.assertEqual(platforms, self.PLATFORMS)
def test_select_all_platforms_twice(self) -> None:
"""Test the Select All Platforms checkbox."""
# Toggle the 'Select all platforms' checkbox
self.form.check_box_select_all.setChecked(True)
# Untoggle the 'Select all platforms' checkbox again
self.form.check_box_select_all.setChecked(False)
# Test that all platform check boxes are unchecked again
for check_box in self.form.group_box_platforms.findChildren(QCheckBox):
self.assertFalse(check_box.isChecked())
def test_output_file_on_date_change(self) -> None:
"""Test output file name after a date change."""
old_output_file = self.form.line_edit_output_file.text()
# Change start and end date
self.set_date_combo_boxes(4, 0, 10, 5)
new_output_file = self.form.line_edit_output_file.text()
self.assertNotEqual(new_output_file, old_output_file)
self.assertEqual(
os.path.join(
self.form.settings.directory,
'P2P_Results_01052010-30112015.xlsx'),
new_output_file)
def test_output_file_on_date_change_after_user_change(self) -> None:
"""Test output file after date change if user already changed file."""
QLineEdit.setText(self.form.line_edit_output_file, 'Test.xlsx')
self.form.output_file_changed = True
# Change start and end date
self.set_date_combo_boxes(4, 0, 10, 5)
# Check that the output file name was not changed
self.assertEqual(self.form.line_edit_output_file.text(), 'Test.xlsx')
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
@unittest.mock.patch('easyp2p.ui.main_window.QMessageBox.warning')
def test_no_platform_selected(self, mock_warning, mock_dialog) -> None:
"""Test clicking start without any selected platform."""
self.form.push_button_start.click()
# Check that QMessageBox was opened and ProgressWindow was not
mock_warning.assert_called_once_with(
self.form,
'No P2P platform selected!',
'Please choose at least one P2P platform!')
self.assertFalse(mock_dialog.called)
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
@unittest.mock.patch('easyp2p.ui.main_window.QMessageBox.warning')
def test_end_date_before_start_date(
self, mock_warning, mock_dialog) -> None:
"""Test clicking start with end date set before start date."""
self.set_date_combo_boxes(5, 6, 11, 5)
self.form.push_button_start.click()
# Check that QMessageBox was opened and ProgressWindow was not
mock_warning.assert_called_once_with(
self.form,
'Start date is after end date!',
'Start date must be before end date!')
self.assertFalse(mock_dialog.called, 'ProgressWindow was opened!')
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
def test_push_start_button_with_bondora_selected(self, mock_dialog) -> None:
"""Test pushing start button after selecting Bondora."""
self.form.check_box_bondora.setChecked(True)
self.set_date_combo_boxes(8, 8, 1, 9)
QLineEdit.setText(self.form.line_edit_output_file, 'Test.xlsx')
self.form.push_button_start.click()
# Check that ProgressWindow opened
mock_dialog.assert_called_once_with(self.form.settings)
# Check that all settings are correct
self.assertEqual(self.form.settings.platforms, {'Bondora'})
self.assertEqual(
self.form.settings.date_range,
(date(2018, 9, 1), date(2019, 2, 28)))
self.assertEqual(self.form.settings.output_file, 'Test.xlsx')
@unittest.mock.patch('easyp2p.ui.main_window.ProgressWindow')
def test_push_start_button_with_increasing_number_of_platforms_selected(
self, mock_dialog) -> None:
"""
Test push start button with increasing number of selected platforms.
"""
self.set_date_combo_boxes(8, 8, 1, 9)
QLineEdit.setText(self.form.line_edit_output_file, 'Test.xlsx')
selected_platforms = set()
for platform in self.PLATFORMS:
check_box = getattr(self.form, 'check_box_' + platform.lower())
check_box.setChecked(True)
selected_platforms.add(platform)
self.form.push_button_start.click()
# Check that ProgressWindow opened
mock_dialog.assert_called_once_with(self.form.settings)
mock_dialog.reset_mock()
# Check that all settings are correct
self.assertEqual(self.form.settings.platforms, selected_platforms)
self.assertEqual(
self.form.settings.date_range,
(date(2018, 9, 1), date(2019, 2, 28)))
self.assertEqual(self.form.settings.output_file, 'Test.xlsx')
@unittest.mock.patch('easyp2p.ui.main_window.SettingsWindow')
def test_push_tool_button_settings(self, mock_dialog) -> None:
"""Test pushing settings button."""
self.form.tool_button_settings.click()
# Check that SettingsWindow opened
mock_dialog.assert_called_once_with(
self.form.get_platforms(False), self.form.settings)
def test_change_language_to_german(self) -> None:
"""Test changing the language to German."""
self.form.action_german.trigger()
all_months = {
self.form.combo_box_start_month.itemText(i) for i in
range(self.form.combo_box_start_month.count())}
all_months_expected = {
QLocale('de_de').monthName(i, 1) for i in range(1, 13)}
self.assertEqual('Startdatum', self.form.groupBox_start_date.title())
self.assertEqual(all_months_expected, all_months)
def test_change_language_to_german_to_english(self) -> None:
"""Test changing the language to German and then back to English."""
self.form.action_german.trigger()
self.form.action_english.trigger()
all_months = {
self.form.combo_box_start_month.itemText(i) for i in
range(self.form.combo_box_start_month.count())}
all_months_expected = {
QLocale('en_US').monthName(i, 1) for i in range(1, 13)}
self.assertEqual(self.form.groupBox_start_date.title(), 'Start date')
self.assertEqual(all_months, all_months_expected)
def test_change_language_to_german_after_date_update(self) -> None:
"""
Test changing the language to German if the dates have been changed.
"""
self.set_date_combo_boxes(4, 7, 11, 8)
self.form.action_german.trigger()
self.assertEqual(
QLocale('de_de').monthName(5, 1),
self.form.combo_box_start_month.currentText())
self.assertEqual(
'2017', self.form.combo_box_start_year.currentText())
self.assertEqual(
QLocale('de_de').monthName(12, 1),
self.form.combo_box_end_month.currentText())
self.assertEqual(
'2018', self.form.combo_box_end_year.currentText())
if __name__ == "__main__":
unittest.main()
| 41.684932 | 80 | 0.669898 | 11,701 | 0.961305 | 0 | 0 | 3,678 | 0.302169 | 0 | 0 | 3,234 | 0.265692 |
b39916cb7995122f663f739553c92cd8a79865f1 | 4,955 | py | Python | pineboolib/fllegacy/flserialport.py | juanjosepablos/pineboo | f6ce515aec6e0139821bb9c1d62536d9fb50dae4 | [
"MIT"
] | null | null | null | pineboolib/fllegacy/flserialport.py | juanjosepablos/pineboo | f6ce515aec6e0139821bb9c1d62536d9fb50dae4 | [
"MIT"
] | 1 | 2017-10-30T22:00:48.000Z | 2017-11-11T19:34:32.000Z | pineboolib/fllegacy/flserialport.py | juanjosepablos/pineboo | f6ce515aec6e0139821bb9c1d62536d9fb50dae4 | [
"MIT"
] | 1 | 2017-10-30T20:16:38.000Z | 2017-10-30T20:16:38.000Z | """Flserial por module."""
from PyQt5 import QtCore
from pineboolib.core import decorators
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from PyQt5 import QtSerialPort # type: ignore [attr-defined]
class BaudRateType(object):
"""BaudRateType class."""
BAUD50: int = -1
BAUD75: int = -1
BAUD110: int = -1
BAUD134: int = -1
BAUD150: int = -1
BAUD200: int = -1
BAUD300: int = -1
BAUD600: int = -1
BAUD1200: int = 1200
BAUD1800: int = -1
BAUD2400: int = 2400
BAUD4800: int = 4800
BAUD9600: int = 9600
BAUD14400: int = -1
BAUD19200: int = 19200
BAUD38400: int = 38400
BAUD56000: int = -1
BAUD57600: int = 57600
BAUD76800: int = -1
BAUD115200: int = 115200
BAUD128000: int = -1
BAUD256000: int = -1
class DataBitsType(object):
"""DataBitsType class."""
DATA_5: int = 5
DATA_6: int = 6
DATA_7: int = 7
DATA_8: int = 8
class ParityType(object):
"""ParityType class."""
PAR_NONE: int = 0
PAR_EVEN: int = 2
PAR_ODD: int = 3
PAR_SPACE: int = 4
PAR_MARK: int = 5
class StopBitType(object):
"""StopBitType class."""
STOP_1: int = 1
STOP_1_5: int = 3
STOP_2: int = 2
class FlowType(object):
"""FlowType class."""
FLOW_OFF: int = 0
FLOW_HARDWARE: int = 1
FLOW_XONXOFF: int = 2
class FLSerialPort(QtCore.QObject, BaudRateType, DataBitsType, ParityType, StopBitType, FlowType):
"""FLSerialPort class."""
_obj: "QtSerialPort.QSerialPort"
def __init__(self, port_name: str) -> None:
"""Inicialize."""
super().__init__()
if str(QtCore.QSysInfo()) == "ios":
from pineboolib.q3widgets.messagebox import MessageBox
MessageBox.information(
None,
self.tr("Opción deshabilitada"),
self.tr("FLSerialPort no está disponible para IOS"),
MessageBox.Ok,
)
return
else:
from PyQt5 import QtSerialPort # type: ignore [attr-defined] # noqa: F821
self._obj = QtSerialPort.QSerialPort(port_name)
def setBaudRate(self, baud_rate: int) -> None:
"""Set baud rate."""
self._obj.setBaudRate(baud_rate)
def baudRate(self) -> int:
"""Return actual baud rate."""
return self._obj.baudRate()
def setDataBits(self, data_bits: int) -> None:
"""Set data bits."""
self._obj.setDataBits(data_bits)
def dataBits(self) -> int:
"""Return actual data bits."""
return self._obj.dataBits()
def setParity(self, parity: int) -> None:
"""Set parity check value."""
self._obj.setParity(parity)
def parity(self) -> int:
"""Return parity check."""
return self._obj.parity()
def setStopBits(self, stop_bit: int) -> None:
"""Set stop bits."""
self._obj.setStopBits(stop_bit)
def stopBits(self) -> int:
"""Return stop bits."""
return self._obj.stopBits()
def setFlowControl(self, flow: int) -> None:
"""Set flow conrol."""
self._obj.setFlowControl(flow)
def flowControl(self) -> int:
"""Return flow control."""
return self._obj.flowControl()
def setTimeOut(self, sec: int = 0, milisec: int = 3000) -> None:
"""Set time out."""
time = milisec
if sec:
time = sec * 1000 + time
self._obj.waitForBytesWritten(time)
self._obj.waitForReadyRead(time)
def open(self) -> bool:
"""Return if port is open."""
return self._obj.open()
def close(self) -> bool:
"""Return if port is closed."""
return self._obj.close()
@decorators.NotImplementedWarn
def writeText(self, data: str) -> None:
"""Send string data."""
pass
@decorators.NotImplementedWarn
def getch(self) -> int:
"""Return int char readed from port."""
return 0
@decorators.NotImplementedWarn
def putch(self, ch: int) -> int:
"""Send a char."""
return 0
@decorators.NotImplementedWarn
def ungetch(self, ch: int) -> int:
"""Return unsigned char?."""
return 0
def size(self) -> int:
"""Return size data recceived."""
return self._obj.size()
def flush(self) -> bool:
"""Flush data."""
return self._obj.flush()
@decorators.NotImplementedWarn
def readBlock(self) -> int:
"""Read data block."""
return 0
def writeBlock(self) -> int:
"""Write data block."""
return 0
def bytesAvailable(self) -> int:
"""Return number of bytes avalaible to read."""
return 0
def setRts(self, b: bool) -> None:
"""Emit RTS. signal."""
self._obj.setRequestToSend(b)
def setDtr(self, b: bool) -> None:
"""Emit DTR signal."""
self._obj.setDataTerminalReady(b)
| 24.651741 | 98 | 0.578204 | 4,729 | 0.954004 | 0 | 0 | 586 | 0.118217 | 0 | 0 | 974 | 0.19649 |
b39ac8a96e45d8646cdf0980fdb5a20cae752964 | 434 | py | Python | test4.py | JarkJiao/Python_learning_TestCase | cc77a7a20b01e230e0edd818532570a7d8853b03 | [
"MIT"
] | null | null | null | test4.py | JarkJiao/Python_learning_TestCase | cc77a7a20b01e230e0edd818532570a7d8853b03 | [
"MIT"
] | null | null | null | test4.py | JarkJiao/Python_learning_TestCase | cc77a7a20b01e230e0edd818532570a7d8853b03 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: UTF-8 -*-
year = int(raw_input('years:\n'))
month = int(raw_input('month:\n'))
day = int(raw_input('day:\n'))
months = (0,31,59,90,120,151,181,212,243,273,304,334)
if 0< month <=12:
sum = months[month-1]
else:
print '日期错误'
sum+=day
leap = 0
if (year %400==0) or ((year % 4 == 0) and (year % 100 ==0)):
leap+=1
if(leap == 1) and (month >2):
sum+=1
print 'it is the %dth day' % sum
| 18.083333 | 60 | 0.569124 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 102 | 0.230769 |