max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
workspaces.py
|
CC-Digital-Innovation/aws-workspaces-reboot
| 1
|
12775251
|
# -*- coding: utf-8 -*-
'''
Python CLI wrapper for Amazon Workspaces API
Usage:
workspaces.py get
workspaces.py getallwsids
workspaces.py reboot <WorkspaceId>
workspaces.py test
workspaces.py nuke
Arguments:
WorkspaceId use 'get' to identify a workspace
Options:
-h --help Show this screen.
--version Show version.
Commands:
get Query workspaces
getallwsids Get all workspace IDs
reboot <WorkspaceId> Reboot a specific workspace
test Test run rebooting all workspaces
nuke Reboot all workspaces
'''
from docopt import docopt
from rich.console import Console
from rich.panel import Panel
import boto3
from botocore.config import Config
import configparser
# owned
__author__ = '<NAME>'
__copyright__ = 'Copyright 2021, Python CLI wrapper for Amazon Workspaces API'
__credits__ = ['<NAME>']
__license__ = 'MIT'
__version__ = '0.1.0'
__maintainer__ = '<NAME>'
__email__ = '<EMAIL>'
__status__ = 'Dev'
def main():
"""Things start here."""
arguments = docopt(
__doc__,
version='Python CLI wrapper for Amazon Workspaces API - v0.1.0')
if arguments['get']:
get()
elif arguments['getallwsids']:
getallwsids()
elif arguments['reboot']:
reboot(arguments['<WorkspaceId>'])
elif arguments['test']:
test()
elif arguments['nuke']:
nuke()
else:
exit("{0} is not a command. \
See 'workspaces.py --help'.".format(arguments['<command>']))
def aws():
"""Read and parse config file and create AWS API connection."""
config = configparser.ConfigParser()
config.read('config.ini')
config.sections()
my_config = Config(region_name=config['aws']['region'],
signature_version='v4',
retries={
'max_attempts': 10,
'mode': 'standard'
})
client = boto3.client(
'workspaces',
config=my_config,
aws_access_key_id=config['aws']['access_key_id'],
aws_secret_access_key=config['aws']['secret_access_key'])
return (client)
def get():
"""Get AWS Workspace instances."""
client = aws()
console.log(Panel('Getting Workspcaes', title='INFO', style=info_fmt))
workspaces = client.describe_workspaces()['Workspaces']
# workspaceIds = [workspace['WorkspaceId'] for workspace in workspaces]
console.log(workspaces)
# console.log(workspaceIds)
def getallwsids():
"""Get all AWS Workspace instance IDs."""
client = aws()
paginator = client.get_paginator("describe_workspaces")
workspaceIds = []
for result in paginator.paginate():
if "Workspaces" not in result:
continue
for workspace in result["Workspaces"]:
# yield workspace['WorkspaceId']
workspaceIds.append(workspace['WorkspaceId'])
# console.log(workspaceIds)
return (workspaceIds)
def reboot(WorkspaceId):
"""Reboot a specific AWS Workspace instance."""
client = aws()
console.log(
Panel('Attemptng reboot of workspaceId: ' + WorkspaceId,
title='INFO',
style=info_fmt))
response = client.reboot_workspaces(RebootWorkspaceRequests=[
{
'WorkspaceId': WorkspaceId
},
])
console.log(response)
def test():
"""Test run reboot all AWS Workspace instances."""
console.log(Panel('Test run rebooting All Workspcaes', title='INFO',
style=info_fmt))
workspaceIds = getallwsids()
for x in range(len(workspaceIds)):
WorkspaceId = (workspaceIds[x])
console.log('Test run reboot of WorkspaceId: ' +
WorkspaceId, style=info_fmt)
def nuke():
"""Reboot all AWS Workspace instances."""
client = aws()
console.log(Panel('Rebooting All Workspcaes', title='INFO',
style=info_fmt))
workspaceIds = getallwsids()
for x in range(len(workspaceIds)):
WorkspaceId = (workspaceIds[x])
console.log('Rebooting WorkspaceId: ' + WorkspaceId, style=info_fmt)
response = client.reboot_workspaces(RebootWorkspaceRequests=[
{
'WorkspaceId': WorkspaceId
},
])
console.log(response)
if __name__ == '__main__':
console = Console()
info_fmt = 'yellow'
main()
| 2.578125
| 3
|
efficient_net_v2/model/efficient_net_v2.py
|
akashAD98/EfficientNetv2-with-Detectron2
| 0
|
12775252
|
#!/usr/bin/env python
from copy import deepcopy
import torch.nn as nn
from yacs.config import CfgNode as CN
from ..layers import ConvBNA, MBConv, FusedMBConv
class EfficientNetV2(nn.Module):
def __init__(self, cfg: CN, in_channels: int = 3):
super(EfficientNetV2, self).__init__()
# input_shape = cfg.get('INPUTS').get('SHAPE')
backbone = cfg['BACKBONE']
# assert len(input_shape) == 3
# in_channels = input_shape[0]
layers, in_channels = self.build(backbone, in_channels)
self.backbone = nn.Sequential(*layers)
try:
head = cfg['HEAD']
layers, in_channels = self.build(head, in_channels)
self.head = nn.Sequential(*layers)
except KeyError:
self.head = None
self.out_channels = in_channels
def build(self, nodes, in_channels):
layers = []
for index, (stage, node) in enumerate(nodes.items()):
for i in range(node.pop('LAYERS', 1)):
stride = node.get('STRIDE', 1) if i == 0 else 1
assert stride
layers.append(self.create_layer(node, in_channels, stride))
in_channels = node.get('CHANNELS')
return layers, in_channels
def create_layer(self, node: CN, in_channels: int, stride: int):
node = deepcopy(node)
ops = node.pop('OPS')
out_channels = node.pop('CHANNELS', None)
kernel_size = node.get('KERNEL')
expansion = node.get('EXPANSION')
se = node.get('SE', 0)
padding = node.get('PADDING', 0)
if ops == 'conv':
layer = ConvBNA(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
)
elif ops == 'mbconv':
layer = MBConv(
in_channels=in_channels,
expansion=expansion,
out_channels=out_channels,
knxn=kernel_size,
stride=stride,
reduction=se,
)
elif ops == 'fused_mbconv':
layer = FusedMBConv(
in_channels=in_channels,
expansion=expansion,
out_channels=out_channels,
knxn=kernel_size,
stride=stride,
reduction=se,
)
else:
layer = getattr(nn, ops)
if not issubclass(layer, nn.Module):
raise ValueError(f'Unknown layer type {ops}')
layer = layer(**node)
return layer
def forward(self, x):
x = self.backbone(x)
if self.head is not None:
x = self.head(x)
return x
def stage_forward(self, x):
s0 = self.backbone[0](x)
s1 = self.backbone[1:3](s0)
s2 = self.backbone[3:7](s1)
s3 = self.backbone[7:11](s2)
s4 = self.backbone[11:17](s3)
s5 = self.backbone[17:26](s4)
s6 = self.backbone[26:](s5)
return {
# 's0': s0,
# 's1': s1,
's2': s2,
's3': s3,
's4': s4,
's5': s5,
's6': s6,
}
@property
def stage_indices(self):
return {
's0': 1,
's1': 3,
's2': 7,
's3': 11,
's4': 17,
's5': 26,
's6': 41,
}
| 2.234375
| 2
|
storm_control/sc_hardware/utility/corr_2d_gauss_c.py
|
shiwei23/STORM6
| 1
|
12775253
|
<gh_stars>1-10
#!/usr/bin/env python
"""
Fitting for offset using correlation with a 2D Gaussian.
Hazen 04/18
"""
import ctypes
import math
import numpy
from numpy.ctypeslib import ndpointer
import scipy
import scipy.optimize
import tifffile
import storm_control.c_libraries.loadclib as loadclib
# Load C library.
c2dg = loadclib.loadCLibrary("corr_2d_gauss")
# corr2DData structure definition.
class corr2DData(ctypes.Structure):
_fields_ = [('n_checks', ctypes.c_int),
('n_updates', ctypes.c_int),
('size_x', ctypes.c_int),
('size_y', ctypes.c_int),
('stale_ddx', ctypes.c_int),
('stale_ddy', ctypes.c_int),
('stale_dx', ctypes.c_int),
('stale_dy', ctypes.c_int),
('stale_f', ctypes.c_int),
('stale_gi', ctypes.c_int),
('cx', ctypes.c_double),
('cy', ctypes.c_double),
('ddx', ctypes.c_double),
('ddy', ctypes.c_double),
('dx', ctypes.c_double),
('dy', ctypes.c_double),
('f', ctypes.c_double),
('last_x', ctypes.c_double),
('last_y', ctypes.c_double),
('sg_term', ctypes.c_double),
('g_im', ctypes.POINTER(ctypes.c_double)),
('gx', ctypes.POINTER(ctypes.c_double)),
('gy', ctypes.POINTER(ctypes.c_double)),
('r_im', ctypes.POINTER(ctypes.c_double)),
('xi', ctypes.POINTER(ctypes.c_double)),
('yi', ctypes.POINTER(ctypes.c_double))]
# C interface definition.
c2dg.cleanup.argtypes = [ctypes.POINTER(corr2DData)]
c2dg.ddx.argtypes = [ctypes.POINTER(corr2DData),
ctypes.c_double,
ctypes.c_double]
c2dg.ddx.restype = ctypes.c_double
c2dg.ddy.argtypes = [ctypes.POINTER(corr2DData),
ctypes.c_double,
ctypes.c_double]
c2dg.ddy.restype = ctypes.c_double
c2dg.dx.argtypes = [ctypes.POINTER(corr2DData),
ctypes.c_double,
ctypes.c_double]
c2dg.dx.restype = ctypes.c_double
c2dg.dy.argtypes = [ctypes.POINTER(corr2DData),
ctypes.c_double,
ctypes.c_double]
c2dg.dy.restype = ctypes.c_double
c2dg.fn.argtypes = [ctypes.POINTER(corr2DData),
ctypes.c_double,
ctypes.c_double]
c2dg.fn.restype = ctypes.c_double
c2dg.initialize.argtypes = [ctypes.c_double,
ctypes.c_int,
ctypes.c_int]
c2dg.initialize.restype = ctypes.POINTER(corr2DData)
c2dg.setImage.argtypes = [ctypes.POINTER(corr2DData),
ndpointer(dtype=numpy.float64)]
class Corr2DGaussC(object):
"""
This class optimizes the correlation between an image and a 2D
Gaussian.
C library wrapper implementation.
"""
def __init__(self, size = None, sigma = None, verbose = True, **kwds):
super(Corr2DGaussC, self).__init__(**kwds)
assert(len(size) == 2), "Size must have two elements."
self.verbose = verbose
self.x_size = size[0]
self.y_size = size[1]
self.c2d = c2dg.initialize(sigma, size[0], size[1])
def cleanup(self):
if self.verbose:
print("Lock fitting: {0:0d} checks, {1:0d} updates".format(self.c2d.contents.n_checks,
self.c2d.contents.n_updates))
c2dg.cleanup(self.c2d)
self.c2d = None
def ddx(self, x):
return c2dg.ddx(self.c2d, x[0], x[1])
def ddy(self, x):
return c2dg.ddy(self.c2d, x[0], x[1])
def dx(self, x):
return c2dg.dx(self.c2d, x[0], x[1])
def dy(self, x):
return c2dg.dy(self.c2d, x[0], x[1])
def func(self, x, sign = 1.0):
return sign * c2dg.fn(self.c2d, x[0], x[1])
def hessian(self, x, sign = 1.0):
dxdy = -sign * self.dx(x) * self.dy(x)
return numpy.array([[sign * self.ddx(x), dxdy],
[dxdy, sign * self.ddy(x)]])
def jacobian(self, x, sign = 1.0):
return sign * numpy.array([self.dx(x), self.dy(x)])
def setImage(self, image):
assert(image.shape[0] == self.x_size)
assert(image.shape[1] == self.y_size)
c_image = numpy.ascontiguousarray(image, dtype = numpy.float64)
c2dg.setImage(self.c2d, c_image)
class Corr2DGaussPy(object):
"""
This class optimizes the correlation between an image and a 2D
Gaussian.
Python implementation.
"""
def __init__(self, size = None, sigma = None):
"""
size - The size of the image.
sigma - The sigma of the Gaussian.
"""
super(Corr2DGaussPy, self).__init__()
assert(len(size) == 2), "Size must have two elements."
self.g_image = None
self.g_x = None
self.image = None
self.sigma = sigma
self.x_size = size[0]
self.y_size = size[1]
[self.xi,self.yi] = numpy.mgrid[-self.x_size/2.0:self.x_size/2.0,
-self.y_size/2.0:self.y_size/2.0]
self.xi += 0.5
self.yi += 0.5
self.cx = 0.5 * self.x_size - 0.5
self.cy = 0.5 * self.y_size - 0.5
def cleanup(self):
pass
def ddx(self, x, sign = 1.0):
g_image = self.translate(x)
t1 = (x[0] - self.xi)/(self.sigma*self.sigma)
t2 = 1.0/(self.sigma * self.sigma)
return sign * numpy.sum(self.image * g_image * (t1*t1 - t2))
def ddy(self, x, sign = 1.0):
g_image = self.translate(x)
t1 = (x[1] - self.yi)/(self.sigma*self.sigma)
t2 = 1.0/(self.sigma * self.sigma)
return sign * numpy.sum(self.image * g_image * (t1*t1 - t2))
def dx(self, x, sign = 1.0):
g_image = self.translate(x)
t1 = -(x[0] - self.xi)/(self.sigma*self.sigma)
return sign * numpy.sum(self.image * g_image * t1)
def dy(self, x, sign = 1.0):
g_image = self.translate(x)
t1 = -(x[1] - self.yi)/(self.sigma*self.sigma)
return sign * numpy.sum(self.image * g_image * t1)
def func(self, x, sign = 1.0):
g_image = self.translate(x)
return sign * numpy.sum(self.image * g_image)
def hessian(self, x, sign = 1.0):
dxdy = -sign * self.dx(x) * self.dy(x)
return numpy.array([[sign * self.ddx(x), dxdy],
[dxdy, sign * self.ddy(x)]])
def jacobian(self, x, sign = 1.0):
return sign * numpy.array([self.dx(x), self.dy(x)])
def setImage(self, image):
assert(image.shape[0] == self.x_size)
assert(image.shape[1] == self.y_size)
self.g_image = None
self.image = image
def translate(self, x):
if (self.g_image is None) or (not numpy.allclose(self.g_x, x, atol = 1.0e-12, rtol = 1.0e-12)):
self.g_image = dg.drawGaussiansXY((self.x_size, self.y_size),
numpy.array([self.cx + x[0]]),
numpy.array([self.cy + x[1]]),
sigma = self.sigma)
self.g_x = numpy.copy(x)
return self.g_image
class Corr2DGaussPyLM(Corr2DGaussPy):
"""
Optimize using a variant of the Levenberg-Marquadt algorithm.
"""
def __init__(self, max_reps = 100, tolerance = 1.0e-6, **kwds):
super(Corr2DGaussPyLM, self).__init__(**kwds)
self.fn_curr = None
self.fn_old = None
self.lm_lambda = 1.0
self.max_reps = max_reps
self.tolerance = tolerance
def hasConverged(self):
return (abs((self.fn_old - self.fn_curr)/self.fn_curr) < self.tolerance)
def update(self, x):
"""
Return the update vector at x.
"""
jac = self.jacobian(x, sign = -1.0)
hess = self.hessian(x, sign = -1.0)
for i in range(jac.size):
hess[i,i] += hess[i,i] * self.lm_lambda
delta = numpy.linalg.solve(hess, jac)
return delta
def maximize(self, dx = 0.0, dy = 0.0):
self.lm_lambda = 1.0
xo = numpy.array([dx, dy])
self.fn_curr = self.func(xo, sign = -1.0)
for i in range(self.max_reps):
xn = xo - self.update(xo)
fn = self.func(xn, sign = -1.0)
# If we did not improve increase lambda and try again.
if (fn > self.fn_curr):
self.lm_lambda = 2.0 * self.lm_lambda
continue
self.lm_lambda = 0.9 * self.lm_lambda
self.fn_old = self.fn_curr
self.fn_curr = fn
xo = xn
if self.hasConverged():
break
success = (i < (self.max_reps - 1))
return [xo, success, -self.fn_curr, 0]
class Corr2DGaussCNCG(Corr2DGaussC):
"""
Optimize using Newton-CG (Python version).
"""
def maximize(self, dx = 0.0, dy = 0.0):
"""
Find the offset that optimizes the correlation of the
Gaussian with the reference image.
"""
x0 = numpy.array([dx, dy])
fit = scipy.optimize.minimize(self.func,
x0,
args=(-1.0,),
method='Newton-CG',
jac=self.jacobian,
hess=self.hessian,
options={'xtol': 1e-3, 'disp': False})
if (not fit.success) and (not (fit.status == 2)):
#if (not fit.success):
print("Maximization failed with:")
print(fit.message)
print("Status:", fit.status)
print("X:", fit.x)
print("Function value:", -fit.fun)
print()
return [fit.x, fit.success, -fit.fun, fit.status]
class Corr2DGaussPyNCG(Corr2DGaussPy):
"""
Optimize using Newton-CG (Python version).
"""
def maximize(self, dx = 0.0, dy = 0.0):
"""
Find the offset that optimizes the correlation of the
Gaussian with the reference image.
"""
x0 = numpy.array([dx, dy])
fit = scipy.optimize.minimize(self.func,
x0,
args=(-1.0,),
method='Newton-CG',
jac=self.jacobian,
hess=self.hessian,
options={'xtol': 1e-3, 'disp': False})
if (not fit.success) and (not (fit.status == 2)):
#if (not fit.success):
print("Maximization failed with:")
print(fit.message)
print("Status:", fit.status)
print("X:", fit.x)
print("Function value:", -fit.fun)
print()
return [fit.x, fit.success, -fit.fun, fit.status]
if (__name__ == "__main__"):
# The unit tests, if this was a unit.
if True:
# Test X/Y derivatives.
im_size = (9,9)
c2dg_py = Corr2DGaussPy(size = im_size, sigma = 1.0)
x = numpy.zeros(2)
image = c2dg_py.translate(x)
c2dg_py.setImage(image)
x = numpy.zeros(2)
dx = 1.0e-6
# Check X derivative.
for i in range(-3,4):
offset = 0.1 * i
x[0] = offset + dx
f1 = c2dg_py.func(x)
x[0] = offset
f2 = c2dg_py.func(x)
assert(abs((f1-f2)/dx - c2dg_py.dx(x)) < 1.0e-4)
# Check Y derivative.
for i in range(-3,4):
offset = 0.1 * i
x[1] = offset + dx
f1 = c2dg_py.func(x)
x[1] = offset
f2 = c2dg_py.func(x)
assert(abs((f1-f2)/dx - c2dg_py.dy(x)) < 1.0e-4)
if True:
# Test X/Y second derivatives.
im_size = (9,9)
c2dg_py = Corr2DGaussPy(size = im_size, sigma = 1.0)
x = numpy.zeros(2)
image = c2dg_py.translate(x)
c2dg_py.setImage(image)
dx = 1.0e-6
# Check X second derivative.
x = numpy.zeros(2)
for i in range(-3,4):
offset = 0.1 * i + 0.05
x[0] = offset + dx
f1 = c2dg_py.func(x)
x[0] = offset
f2 = c2dg_py.func(x)
x[0] = offset - dx
f3 = c2dg_py.func(x)
x[0] = offset
nddx = (f1 - 2.0 * f2 + f3)/(dx*dx)
assert(abs(nddx - c2dg_py.ddx(x)) < 0.02)
# Check Y second derivative.
x = numpy.zeros(2)
for i in range(-3,4):
offset = 0.1 * i + 0.05
x[1] = offset + dx
f1 = c2dg_py.func(x)
x[1] = offset
f2 = c2dg_py.func(x)
x[1] = offset - dx
f3 = c2dg_py.func(x)
x[1] = offset
nddy = (f1 - 2.0 * f2 + f3)/(dx*dx)
assert(abs(nddy - c2dg_py.ddy(x)) < 0.02)
if True:
# Test finding the correct offset (Python version).
im_size = (9,9)
c2dg_py = Corr2DGaussPyNCG(size = im_size, sigma = 1.0)
for i in range(-2,3):
disp = numpy.array([0.1*i, -0.2*i])
image = c2dg_py.translate(disp)
c2dg_py.setImage(image)
[dd, success, fn, status] = c2dg_py.maximize()
assert(success)
assert(numpy.allclose(dd, disp, atol = 1.0e-3, rtol = 1.0e-3))
if True:
# Test C version against Python version.
im_size = (9,10)
c2dg_py = Corr2DGaussPy(size = im_size, sigma = 1.0)
c2dg_c = Corr2DGaussC(size = im_size, sigma = 1.0)
x = numpy.zeros(2)
image = c2dg_py.translate(x)
c2dg_py.setImage(image)
c2dg_c.setImage(image)
assert(abs(c2dg_py.func(x) - c2dg_c.func(x)) < 1.0e-6)
x = numpy.zeros(2)
for i in range(-3,4):
x[0] = 0.1*i
assert(abs(c2dg_py.dx(x) - c2dg_c.dx(x)) < 1.0e-6)
x = numpy.zeros(2)
for i in range(-3,4):
x[1] = 0.1*i
assert(abs(c2dg_py.dy(x) - c2dg_c.dy(x)) < 1.0e-6)
x = numpy.zeros(2)
for i in range(-3,4):
x[1] = 0.1*i
assert(abs(c2dg_py.ddx(x) - c2dg_c.ddx(x)) < 1.0e-6)
x = numpy.zeros(2)
for i in range(-3,4):
x[1] = 0.1*i
assert(abs(c2dg_py.ddy(x) - c2dg_c.ddy(x)) < 1.0e-6)
if True:
# Test finding the correct offset (C version).
im_size = (9,9)
c2dg_c = Corr2DGaussCNCG(size = im_size, sigma = 1.0)
c2dg_py = Corr2DGaussPyNCG(size = im_size, sigma = 1.0)
for i in range(-2,3):
disp = numpy.array([0.1*i, -0.2*i])
image = c2dg_py.translate(disp)
c2dg_c.setImage(image)
[dd, success, fn, status] = c2dg_c.maximize()
assert(success)
assert(numpy.allclose(dd, disp, atol = 1.0e-3, rtol = 1.0e-3))
#
# The MIT License
#
# Copyright (c) 2018 <NAME>, Harvard University
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
| 2.296875
| 2
|
actusmp/model/term_group.py
|
CasperLabs/actus-mp
| 0
|
12775254
|
import dataclasses
import typing
from actusmp.model.term_item import TermSet
@dataclasses.dataclass
class TermGroup():
group_id: str
name: str
term_set: TermSet
def __str__(self) -> str:
return f"term-group|{self.group_id}"
@dataclasses.dataclass
class TermGroupSet():
groups: typing.List[TermGroup]
def __iter__(self) -> typing.Iterator[TermGroup]:
return iter(sorted(self.groups, key=lambda i: i.name))
def __len__(self) -> int:
return len(self.groups)
def __str__(self) -> str:
return f"term-group-set|{len(self)}"
| 2.625
| 3
|
car_class/car_class.py
|
Alweezy/bootcamp-18-day-2
| 0
|
12775255
|
<reponame>Alweezy/bootcamp-18-day-2
class Car(object):
# setting some default values
num_of_doors = 4
num_of_wheels = 4
def __init__(self, name='General', model='GM', car_type='saloon', speed=0):
self.name = name
self.model = model
self.car_type = car_type
self.speed = speed
if self.name is 'Porshe' or self.name is 'Koenigsegg':
self.num_of_doors = 2
elif self.car_type is 'trailer':
self.num_of_wheels = 8
else:
self
def is_saloon(self):
'''
Determine between saloon and trailer
'''
if self.car_type is not 'trailer':
return True
return False
def drive(self, speed):
'''
Check the car type and return appropriate speed
'''
if self.car_type is 'trailer':
self.speed = speed * 11
else:
self.speed = 10 ** speed
return self
| 3.796875
| 4
|
common/utils.py
|
threathunterX/nebula_web
| 2
|
12775256
|
# -*- coding: utf-8 -*-
import time
from os import path as opath
from datetime import datetime
import jinja2
executor = None # ThreadExecutor
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
Traceback (most recent call last):
...
AttributeError: 'a'
"""
def __getattr__(self, key):
try:
return self[key]
except KeyError, k:
raise AttributeError, k
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError, k
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def render(template_path, context):
"""
Assuming a template at /some/path/my_tpl.html, containing:
Hello {{ firstname }} {{ lastname }}!
>> context = {
'firstname': 'John',
'lastname': 'Doe'
}
>> result = render('/some/path/my_tpl.html', context)
>> print(result)
Hello <NAME>!
"""
path, filename = opath.split(template_path)
return jinja2.Environment(
loader=jinja2.FileSystemLoader(path or './')
).get_template(filename).render(context)
def get_hour_strs_fromtimestamp(fromtime, endtime ):
# fromtime, endtime is float timestamp
if fromtime >= endtime:
return []
ts = []
while fromtime < endtime:
ts.append(fromtime)
fromtime = fromtime + 3600
if ts and ts[-1] + 3600 < endtime:
ts.append(endtime)
return ts
def get_hour_strs(fromtime, endtime, f='%Y%m%d%H'):
timestamps = get_hour_strs_fromtimestamp(fromtime, endtime)
hours = [ datetime.fromtimestamp(_).strftime(f) for _ in timestamps]
return hours
def get_hour_start(point=None):
"""
获取point时间戳所在的小时的开始的时间戳, 默认获取当前时间所在小时的开始时的时间戳
"""
if point is None:
p = time.time()
else:
p = point
return (int(p) / 3600) * 3600
def dict_merge(src_dict, dst_dict):
"""
将两个dict中的数据对应键累加,
不同类型值的情况:
>>> s = dict(a=1,b='2')
>>> d = {'b': 3, 'c': 4}
>>> dict_merge(s,d)
>>> t = {'a': 1, 'b': 5, 'c': 4}
>>> s == t
True
>>> s = dict(a=set([1,2]), )
>>> d = dict(a=set([2, 3]),)
>>> dict_merge(s,d)
>>> t = {'a':set([1,2,3])}
>>> s == t
True
>>> s = dict(a={'a':1, 'b':2})
>>> d = dict(a={'a':1, 'b':2})
>>> dict_merge(s, d)
>>> t = dict(a={'a':2, 'b':4})
>>> s == t
True
"""
if src_dict is None:
return dst_dict
for k,v in dst_dict.iteritems():
if not src_dict.has_key(k):
src_dict[k] = v
else:
if isinstance(v, (basestring, int, float)):
src_dict[k] = int(v) + int(src_dict[k])
elif isinstance(v, set):
assert type(v) == type(src_dict[k]), 'key %s,dst_dict value: %s type: %s, src_dict value: %s type:%s' % (k, v, type(v), src_dict[k], type(src_dict[k]))
src_dict[k].update(v)
elif isinstance(v, dict):
assert type(v) == type(src_dict[k]), 'key %s,dst_dict value: %s type: %s, src_dict value: %s type:%s' % (k, v, type(v), src_dict[k], type(src_dict[k]))
dict_merge(src_dict[k], v)
| 2.84375
| 3
|
pylighter/utils.py
|
stungkit/PyLighter
| 13
|
12775257
|
import colorsys
import pkgutil
from dataclasses import dataclass
import pandas as pd
from pylighter import config
def text_parser(file_name, **kwargs):
"""Parse text and replace <% variable %> by the value of variable.
Parameters
----------
file_name : str
Path to the file
Returns
-------
file_content : str
The content of the file
"""
file_content = str(pkgutil.get_data("pylighter", file_name), "utf-8")
file_content = file_content.replace("\n", "")
for key in kwargs:
file_content = file_content.replace(f"<% {key} %>", kwargs[key])
return file_content
def js_add_el_to_div(class_name_source, class_name_target):
"""
Js function to append source element to the target one.
"""
js = f"const el = document.getElementsByClassName('{class_name_target}')[0];"
js += "if (el) "
js += f"el.appendChild(document.getElementsByClassName('{class_name_source}')[0])"
return js
def js_remove_el(el_id_class_name):
js = f"const el = document.getElementsByClassName('{el_id_class_name}')[0];"
js += "if (el) el.remove()"
return js
def chunk_html_display(text):
if text and text[-1] == " ":
text = text[:-1] + "␣"
return f"{text}"
def annotation_to_csv(corpus, labels, additional_outputs_values, file_path):
df = pd.DataFrame(data={"document": corpus, "labels": labels})
if additional_outputs_values is not None:
df = pd.concat([df, additional_outputs_values], axis=1)
df.to_csv(file_path, sep=";", index=False)
def assert_IOB2_format(labels_list):
for labels in labels_list:
previous_label = None
for label in labels:
assert label == "O" or label[:2] == "B-" or label[:2] == "I-"
if label == "O":
previous_label = None
continue
if label[:2] == "B-":
previous_label = label[2:]
else:
assert previous_label
assert previous_label == label[2:]
def assert_input_consistency(corpus, labels, start_index):
if labels:
assert len(corpus) == len(labels)
assert_IOB2_format(labels)
assert start_index >= 0
assert start_index < len(corpus)
def compute_selected_label_color(str_color_hex):
rgb = tuple(
int(str_color_hex.lstrip("#")[i : i + 2], 16) / 255 for i in (0, 2, 4) # noqa
)
hue, lightness, saturation = colorsys.rgb_to_hls(*rgb)
lightness *= 0.8
return f"hsl({int(hue*360)}, {int(saturation * 100)}%, {int(lightness * 100)}%)"
def wait_for_threads(threads):
for thread in threads:
thread.join()
threads = []
@dataclass
class LabelColor:
name: str
text_color: str
background_color: str
class AdditionalOutputElement:
def __init__(self, name, display_type, description, default_value):
self.name = name
self.display_type = display_type
self.description = description
self.default_value = default_value
if display_type not in config.DISPLAY_ELEMENTS.keys():
raise ValueError(
f"display_type must one of those {config.DISPLAY_ELEMENTS.keys()}"
)
class PreloadedDisplays:
def __init__(
self,
):
self.previous = {}
self.current = {}
self.next = {}
def update(self, direction):
if direction == 1:
self.previous = self.current
self.current = self.next
self.next = {}
elif direction == -1:
self.next = self.current
self.current = self.previous
self.previous = {}
else:
self.next = {}
self.current = {}
self.previous = {}
| 2.96875
| 3
|
Chapter 12/Implement_Trees_with_Lists5_4(b).py
|
bpbpublications/Advance-Core-Python-Programming
| 0
|
12775258
|
<reponame>bpbpublications/Advance-Core-Python-Programming
class Tree:
def __init__(self,data):
self.tree = [data, [],[]]
def left_subtree(self,branch):
left_list = self.tree.pop(1)
if len(left_list) > 1:
branch.tree[1]=left_list
self.tree.insert(1,branch.tree)
else:
self.tree.insert(1,branch.tree)
def right_subtree(self,branch):
right_list = self.tree.pop(2)
if len(right_list) > 1:
branch.tree[2]=right_list
self.tree.insert(2,branch.tree)
else:
self.tree.insert(2,branch.tree)
#EXECUTION
print("Create Root Node")
root = Tree("Root_node")
print("Value of Root = ",root.tree)
print("Create Left Tree")
tree_left = Tree("Tree_Left")
root.left_subtree(tree_left)
print("Value of Tree_Left = ",root.tree)
print("Create Right Tree")
tree_right = Tree("Tree_Right")
root.right_subtree(tree_right)
print("Value of Tree_Right = ",root.tree)
print("Create Left Inbetween")
tree_inbtw = Tree("Tree left in between")
root.left_subtree(tree_inbtw)
print("Value of Tree_Left = ",root.tree)
print("Create TreeLL")
treell = Tree("TreeLL")
tree_left.left_subtree(treell)
print("Value of TREE = ",root.tree)
| 4.09375
| 4
|
switchmap/snmp/mib_if.py
|
PalisadoesFoundation/switchmap-ng
| 6
|
12775259
|
#!/usr/bin/env python3
"""Class interacts with devices supporting IfMIB. (32 Bit Counters)."""
from collections import defaultdict
from switchmap.snmp.base_query import Query
from switchmap.utils import general
def get_query():
"""Return this module's Query class."""
return IfQuery
def init_query(snmp_object):
"""Return initialize and return this module's Query class."""
return IfQuery(snmp_object)
class IfQuery(Query):
"""Class interacts with devices supporting IfMIB.
Args:
None
Returns:
None
Key Methods:
supported: Queries the device to determine whether the MIB is
supported using a known OID defined in the MIB. Returns True
if the device returns a response to the OID, False if not.
layer1: Returns all needed layer 1 MIB information from the device.
Keyed by OID's MIB name (primary key), ifIndex (secondary key)
"""
def __init__(self, snmp_object):
"""Function for intializing the class.
Args:
snmp_object: SNMP Interact class object from snmp_manager.py
Returns:
None
"""
# Define query object
self.snmp_object = snmp_object
# Get one OID entry in MIB (ifDescr)
test_oid = '.1.3.6.1.2.1.2.2.1.1'
super().__init__(snmp_object, test_oid, tags=['system', 'layer1'])
def system(self):
"""Get system data from device.
Args:
None
Returns:
final: Final results
"""
# Initialize key variables
final = defaultdict(lambda: defaultdict(dict))
# Return
final['IF-MIB']['ifStackStatus'] = self.ifstackstatus()
return final
def layer1(self):
"""Get layer 1 data from device using Layer 1 OIDs.
Args:
None
Returns:
final: Final results
"""
# Initialize key variables
final = defaultdict(lambda: defaultdict(dict))
# Get interface ifDescr data
_get_data('ifDescr', self.ifdescr, final)
# Get interface ifAlias data
_get_data('ifAlias', self.ifalias, final)
# Get interface ifSpeed data
_get_data('ifSpeed', self.ifspeed, final)
# Get interface ifOperStatus data
_get_data('ifOperStatus', self.ifoperstatus, final)
# Get interface ifAdminStatus data
_get_data('ifAdminStatus', self.ifadminstatus, final)
# Get interface ifType data
_get_data('ifType', self.iftype, final)
# Get interface ifName data
_get_data('ifName', self.ifname, final)
# Get interface ifIndex data
_get_data('ifIndex', self.ifindex, final)
# Get interface ifPhysAddress data
_get_data('ifPhysAddress', self.ifphysaddress, final)
# Get interface ifInOctets data
_get_data('ifInOctets', self.ifinoctets, final)
# Get interface ifOutOctets data
_get_data('ifOutOctets', self.ifoutoctets, final)
# Get interface ifInBroadcastPkts data
_get_data('ifInBroadcastPkts', self.ifinbroadcastpkts, final)
# Get interface ifOutBroadcastPkts data
_get_data('ifOutBroadcastPkts', self.ifoutbroadcastpkts, final)
# Get interface ifInMulticastPkts data
_get_data('ifInMulticastPkts', self.ifinmulticastpkts, final)
# Get interface ifOutMulticastPkts data
_get_data('ifOutMulticastPkts', self.ifoutmulticastpkts, final)
# Get interface ifLastChange data
_get_data('ifLastChange', self.iflastchange, final)
# Return
return final
def iflastchange(self, oidonly=False):
"""Return dict of IFMIB ifLastChange for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifLastChange using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.9'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifinoctets(self, safe=False, oidonly=False):
"""Return dict of IFMIB ifInOctets for each ifIndex for device.
Args:
safe: Do a failsafe walk if True
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifInOctets using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.10'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
if safe is False:
results = self.snmp_object.walk(oid, normalized=True)
else:
results = self.snmp_object.swalk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifoutoctets(self, safe=False, oidonly=False):
"""Return dict of IFMIB ifOutOctets for each ifIndex for device.
Args:
safe: Do a failsafe walk if True
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifOutOctets using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.16'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
if safe is False:
results = self.snmp_object.walk(oid, normalized=True)
else:
results = self.snmp_object.swalk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifdescr(self, safe=False, oidonly=False):
"""Return dict of IFMIB ifDescr for each ifIndex for device.
Args:
safe: Do a failsafe walk if True
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifDescr using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.2'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
if safe is False:
results = self.snmp_object.walk(oid, normalized=True)
else:
results = self.snmp_object.swalk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = str(bytes(value), encoding='utf-8')
# Return the interface descriptions
return data_dict
def iftype(self, oidonly=False):
"""Return dict of IFMIB ifType for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifType using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.3'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifspeed(self, oidonly=False):
"""Return dict of IFMIB ifSpeed for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifSpeed using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.5'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifadminstatus(self, oidonly=False):
"""Return dict of IFMIB ifAdminStatus for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifAdminStatus using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.7'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifoperstatus(self, oidonly=False):
"""Return dict of IFMIB ifOperStatus for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifOperStatus using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.8'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifalias(self, oidonly=False):
"""Return dict of IFMIB ifAlias for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifAlias using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.31.1.1.1.18'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = str(bytes(value), encoding='utf-8')
# Return the interface descriptions
return data_dict
def ifname(self, oidonly=False):
"""Return dict of IFMIB ifName for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifName using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.31.1.1.1.1'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = str(bytes(value), encoding='utf-8')
# Return the interface descriptions
return data_dict
def ifindex(self, oidonly=False):
"""Return dict of IFMIB ifindex for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifindex using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.1'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifphysaddress(self, oidonly=False):
"""Return dict of IFMIB ifPhysAddress for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifPhysAddress using the oid's last node as key
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.2.2.1.6'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID to get MAC address
data_dict[int(key)] = general.octetstr_2_string(value)
# Return the interface descriptions
return data_dict
def ifinmulticastpkts(self, oidonly=False):
"""Return dict of IFMIB ifInMulticastPkts for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifInMulticastPkts. Key = OID's last node.
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.31.1.1.1.2'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifoutmulticastpkts(self, oidonly=False):
"""Return dict of IFMIB ifOutMulticastPkts for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifOutMulticastPkts. Key = OID's last node.
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.31.1.1.1.4'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifinbroadcastpkts(self, oidonly=False):
"""Return dict of IFMIB ifInBroadcastPkts for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifInBroadcastPkts. Key = OID's last node.
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.31.1.1.1.3'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifoutbroadcastpkts(self, oidonly=False):
"""Return dict of IFMIB ifOutBroadcastPkts for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
data_dict: Dict of ifOutBroadcastPkts. Key = OID's last node.
"""
# Initialize key variables
data_dict = defaultdict(dict)
# Process OID
oid = '.1.3.6.1.2.1.31.1.1.1.5'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=True)
for key, value in results.items():
# Process OID
data_dict[int(key)] = value
# Return the interface descriptions
return data_dict
def ifstackstatus(self, oidonly=False):
"""Return dict of IFMIB ifStackStatus for each ifIndex for device.
Args:
oidonly: Return OID's value, not results, if True
Returns:
final: Dict of ifStackStatus keyed by the ifIndex of the
ifstacklowerlayer as primary, and ifstackhigherlayer as
secondary.
Summary:
According to the official IF-MIB file. ifStackStatus is a
"table containing information on the relationships
between the multiple sub-layers of network interfaces. In
particular, it contains information on which sub-layers run
'on top of' which other sub-layers, where each sub-layer
corresponds to a conceptual row in the ifTable. For
example, when the sub-layer with ifIndex value x runs over
the sub-layer with ifIndex value y, then this table
contains:
ifStackStatus.x.y=active
For each ifIndex value, I, which identifies an active
interface, there are always at least two instantiated rows
in this table associated with I. For one of these rows, I
is the value of ifStackHigherLayer; for the other, I is the
value of ifStackLowerLayer. (If I is not involved in
multiplexing, then these are the only two rows associated
with I.)
For example, two rows exist even for an interface which has
no others stacked on top or below it:
ifStackStatus.0.x=active
ifStackStatus.x.0=active"
In the case of Juniper equipment, VLAN information is only
visible on subinterfaces of the main interface. For example
interface ge-0/0/0 won't have VLAN information assigned to it
directly.
When a VLAN is assigned to this interface, a subinterface
ge-0/0/0.0 is automatically created with a non-Ethernet ifType.
VLAN related OIDs are only maintained for this new subinterface
only. This makes determining an interface's VLAN based on
Ethernet ifType more difficult. ifStackStatus maps the ifIndex of
the primary interface (ge-0/0/0) to the ifIndex of the secondary
interface (ge-0/0/0.0) which manages higher level protocols and
data structures such as VLANs and LLDP.
The primary interface is referred to as the
ifStackLowerLayer and the secondary subinterface is referred to
as the ifStackHigherLayer.
"""
# Initialize key variables
final = defaultdict(lambda: defaultdict(dict))
# Process OID
oid = '.1.3.6.1.2.1.31.1.2.1.3'
# Return OID value. Used for unittests
if oidonly is True:
return oid
# Process results
results = self.snmp_object.walk(oid, normalized=False)
for key in results.keys():
# Get higher and lower layer index values
nodes = key.split('.')
ifstackhigherlayer = int(nodes[-2])
ifstacklowerlayer = int(nodes[-1])
# Skip some values
if ifstacklowerlayer == 0:
continue
# Make primary key the lower layer interface ifIndex and the
# value a list of higher level interface ifIndexes.
if ifstacklowerlayer in final:
final[ifstacklowerlayer].append(ifstackhigherlayer)
else:
final[ifstacklowerlayer] = [ifstackhigherlayer]
# Return the interface descriptions
return final
def _get_data(title, func, dest):
"""Populate dest with data from the given function.
Args:
title: The name of the data
func: The function which will return the data
dest: a dict which will store the data
Returns:
dest: The modified destination dict
"""
# Get interface data
values = func()
for key, value in values.items():
dest[key][title] = value
return dest
| 2.328125
| 2
|
src/pygames/iotgalag/190201_main.py
|
jaeorin/Python
| 0
|
12775260
|
import pygame
WHITE = (48, 48, 48)
displaywidth = 470
displayheight = 840
displayobj = None
clock = None
imgbackA = pygame.image.load('image/back.png')
imgbackB = imgbackA.copy()
def iotsetcaption(caption):
pygame.display.set_caption(caption)
def iotbackdraw(image, x, y):
global displayobj
displayobj.blit(image, (x, y))
def iotgo():
global displayobj
global clock
backAposy = 0
backBposy = -displayheight
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
key = pygame.key.get_pressed()
if key[pygame.K_SPACE]:
pygame.quit()
break
backAposy = backAposy + 2
backBposy = backBposy + 2
if displayheight <= backAposy:
backAposy = 0
backBposy = -displayheight
displayobj.fill(WHITE)
iotbackdraw(imgbackA, 0, backAposy)
iotbackdraw(imgbackB, 0, backBposy)
pygame.display.update()
clock.tick(60)
pygame.quit()
def base():
global displayobj
global clock
pygame.init()
iotsetcaption("IoT Game")
displayobj = pygame.display.set_mode((displaywidth, displayheight))
clock = pygame.time.Clock()
iotgo()
base()
| 2.984375
| 3
|
App/Login/views.py
|
msamunetogetoge/BookRecommendationApp
| 0
|
12775261
|
import inspect
from django.http.response import JsonResponse
from django.shortcuts import render, redirect
from django.contrib.auth import login, logout
from django.http import HttpResponseBadRequest
from Login.models import M_User, T_Attr
from utils.make_display_data import make_user_config_data
from utils.need_login import need_login
import secrets
# Create your views here.
class SecrectCode:
def __init__(self):
self.secret_code=""
secretCode = SecrectCode()
def index(request):
if request.method=="POST":
id = request.POST["id"]
password = request.POST["password"]
if M_User.objects.filter(username=id, password=password).exists():
user = M_User.objects.get(username=id, password=password)
login(request, user)
return redirect('/book/', permanent=True)
else:
return render(request, "index.html", {"msg" :"IDかPASSWORDが間違っています"})
else:
return render(request, "index.html")
def logout_user(request):
"""
ログアウトします
"""
msg={"msg":""}
try:
logout(request)
msg={}
msg["msg"] = "ログアウトしました。"
return render(request, "index.html", msg)
except:
msg = {"msg" :"ログアウトに失敗しました"}
return render(request, "index.html", msg)
def change_password(request):
"""
パスワード変更する
"""
msg={"msg":""}
if request.method == "POST":
if request.POST["secret_code"] != "" and request.POST["secret_code"] == secretCode.secret_code :
# password 変更処理
username = request.POST["username"]
u = M_User.objects.get(username= username)
u.password = request.POST["password"]
u.save()
msg["msg"] = "パスワードを変更しました。"
return render(request, "index.html", msg)
else:
msg["msg"] ="パスワード変更に失敗しました。"
return render(request, "change_password.html", msg)
else:
return render(request, "change_password.html")
def check_and_publish_code(request):
"""
Ajax でリクエストが来る想定。
パスワード変更時、id入力 -> idがあればパスワード入力フォーム表示-> パスワード変更 という流れ。
"""
if request.method=="POST":
id = request.POST["id"]
if M_User.objects.filter(username=id).exists():
secretCode.secret_code = secrets.token_hex(16)
return JsonResponse({"secret":secretCode.secret_code })
else:
return HttpResponseBadRequest(request)
else:
return redirect('/book/', permanent=True)
def signup(request):
"""
新規登録画面を返します
"""
if request.method=="POST":
id = request.POST["id"]
password = request.POST["password"]
name = request.POST["name"]
email = request.POST["email"]
user = M_User(username = id, password = password, email = email, name = name)
if M_User.objects.filter(username=id).exists():
msg="既に存在しているidです。"
res ={"msg":msg}
return render(request, "signup.html", res)
else:
user.save()
msg={}
msg["msg"] = "ユーザー登録しました。\nログインしてください。"
return render(request, "index.html", msg)
return render(request, "signup.html")
@need_login(redirect_field_name='index.html', err_msg="サインアップ、ログインが必要です")
def user_config(request):
"""
config.htmlを表示する。
"""
data = make_user_config_data(username=request.user)
return render(request, "config.html", data)
# この部分はajax にする予定
def register_attr(request):
"""
ajax 処理で使用する。お気に入りを追加して、画面に表示しなおす為に、データを返す
Returns:
[str]: 登録されたお気に入りの名称
"""
if request.method == "POST":
code = request.POST.get("code", None)
if code is None:
return HttpResponseBadRequest(request)
else :
string = request.POST.get("string", "")
if string != "" and not T_Attr.objects.filter(id=request.user, code=code, string=string).exists():
attr = T_Attr(id=request.user, code=code, string=string)
try:
attr.save()
data = {"string":string}
return JsonResponse(data)
except Exception as e:
return HttpResponseBadRequest(request)
return HttpResponseBadRequest(request)
def delete_attr(request):
"""
POSTでリクエストが飛んできた時のみ、T_Attr のデータを削除します
"""
username = request.user
if request.method == "POST":
code = int(request.POST.get("code",-1))
string = request.POST.get("string","")
try:
attr = T_Attr.objects.get(id=username, code=code, string=string)
attr.delete()
data = make_user_config_data(username)
except Exception as e:
print(f"{inspect.currentframe().f_back.f_code.co_filename},{inspect.currentframe().f_back.f_lineno},{e}")
data = make_user_config_data(username)
data["msg"] = "削除に失敗"
return render(request, "config.html", data)
return render(request, "config.html", data)
else:
data = make_user_config_data(username)
return render(request, "config.html", data)
| 2.234375
| 2
|
src/analysis/plot_network.py
|
deepqmc/deeperwin
| 10
|
12775262
|
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
target = ["True", "False"]
el_decay = ["True", "False"]
error = np.array([[4.478, 3.483],
[3.647, 2.502]])
fig, ax = plt.subplots()
im = ax.imshow(error)
# We want to show all ticks...
ax.set_xticks(np.arange(len(el_decay)))
ax.set_yticks(np.arange(len(target)))
# ... and label them with the respective list entries
ax.set_xticklabels(el_decay)
ax.set_yticklabels(target)
ax.set_ylabel("Target electron")
ax.set_xlabel("Electron-Electron shift decay")
# Rotate the tick labels and set their alignment.
plt.setp(ax.get_xticklabels(), rotation=45, ha="right",
rotation_mode="anchor")
# Loop over data dimensions and create text annotations.
for i in range(len(target)):
for j in range(len(el_decay)):
text = ax.text(j, i, error[i, j],
ha="center", va="center", color="w")
ax.set_title("Shift: Energy error (mHa) for Nitrogen")
fig.tight_layout()
plt.show()
| 2.953125
| 3
|
MyApi/scrapingApp/models.py
|
Georgitanev/py_django_scrape
| 0
|
12775263
|
<reponame>Georgitanev/py_django_scrape
""" model parliament1"""
from django.db import models
class Parliament1(models.Model):
""" model parliament1"""
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=60)
date_born = models.DateField(blank=True, null=True) # date
place_born = models.CharField(max_length=50, blank=True, null=True)
profession = models.CharField(max_length=80, blank=True, null=True)
lang = models.CharField(max_length=70, blank=True, null=True)
party = models.CharField(max_length=80, blank=True, null=True)
email = models.CharField(max_length=80, blank=True, null=True)
fb = models.CharField(max_length=80, blank=True, null=True) # facebook url
url = models.TextField(max_length=15, blank=True, null=True)
pp = models.TextField(max_length=10, blank=True, null=True) # political party
dob = models.TextField(max_length=15, blank=True, null=True) # date of birth
def __str__(self):
return self.name
class Meta:
""" db_table parliament1"""
db_table = "Parliament1"
| 2.765625
| 3
|
models/db.py
|
hendrapaiton/mandalika
| 1
|
12775264
|
# Import from system libraries
from flask_mongoengine import MongoEngine
# MongoEngine load to db variable
db = MongoEngine()
# Function to initialize db to app
def initialize_db(app):
db.init_app(app)
| 2.25
| 2
|
bcbio/variation/varscan.py
|
markdunning/bcbio-nextgen
| 1
|
12775265
|
"""Provide variant calling with VarScan from TGI at Wash U.
http://varscan.sourceforge.net/
"""
import os
import sys
from bcbio import broad, utils
from bcbio.distributed.transaction import file_transaction, tx_tmpdir
from bcbio.pipeline import config_utils
from bcbio.provenance import do
from bcbio.variation import samtools, vcfutils
from bcbio.variation.vcfutils import (combine_variant_files, write_empty_vcf,
get_paired_bams, bgzip_and_index)
import pysam
def run_varscan(align_bams, items, ref_file, assoc_files,
region=None, out_file=None):
paired = get_paired_bams(align_bams, items)
if paired and paired.normal_bam and paired.tumor_bam:
call_file = samtools.shared_variantcall(_varscan_paired, "varscan",
align_bams, ref_file, items,
assoc_files, region, out_file)
else:
vcfutils.check_paired_problems(items)
call_file = samtools.shared_variantcall(_varscan_work, "varscan",
align_bams, ref_file,
items, assoc_files,
region, out_file)
return call_file
def _get_jvm_opts(config, tmp_dir):
"""Retrieve common options for running VarScan.
Handles jvm_opts, setting user and country to English to avoid issues
with different locales producing non-compliant VCF.
"""
resources = config_utils.get_resources("varscan", config)
jvm_opts = resources.get("jvm_opts", ["-Xmx750m", "-Xmx2g"])
jvm_opts = config_utils.adjust_opts(jvm_opts,
{"algorithm": {"memory_adjust":
{"magnitude": 1.1, "direction": "decrease"}}})
jvm_opts += ["-Duser.language=en", "-Duser.country=US"]
jvm_opts += broad.get_default_jvm_opts(tmp_dir)
return " ".join(jvm_opts)
def _varscan_options_from_config(config):
"""Retrieve additional options for VarScan from the configuration.
"""
opts = ["--min-coverage 5", "--p-value 0.98", "--strand-filter 1"]
resources = config_utils.get_resources("varscan", config)
if resources.get("options"):
opts += [str(x) for x in resources["options"]]
return opts
def spv_freq_filter(line, tumor_index):
"""Filter VarScan calls based on the SPV value and frequency.
Removes calls with SPV < 0.05 and a tumor FREQ > 0.35.
False positives dominate these higher frequency, low SPV calls. They appear
to be primarily non-somatic/germline variants not removed by other filters.
"""
if line.startswith("#CHROM"):
headers = [('##FILTER=<ID=SpvFreq,Description="High frequency (tumor FREQ > 0.35) '
'and low p-value for somatic (SPV < 0.05)">')]
return "\n".join(headers) + "\n" + line
elif line.startswith("#"):
return line
else:
parts = line.split("\t")
sample_ft = {a: v for (a, v) in zip(parts[8].split(":"), parts[9 + tumor_index].split(":"))}
freq = utils.safe_to_float(sample_ft.get("FREQ"))
spvs = [x for x in parts[7].split(";") if x.startswith("SPV=")]
spv = utils.safe_to_float(spvs[0].split("=")[-1] if spvs else None)
fname = None
if spv is not None and freq is not None:
if spv < 0.05 and freq > 0.35:
fname = "SpvFreq"
if fname:
if parts[6] in set([".", "PASS"]):
parts[6] = fname
else:
parts[6] += ";%s" % fname
line = "\t".join(parts)
return line
def _varscan_paired(align_bams, ref_file, items, target_regions, out_file):
"""Run a paired VarScan analysis, also known as "somatic". """
max_read_depth = "1000"
config = items[0]["config"]
paired = get_paired_bams(align_bams, items)
if not paired.normal_bam:
affected_batch = items[0]["metadata"]["batch"]
message = ("Batch {} requires both tumor and normal BAM files for"
" VarScan cancer calling").format(affected_batch)
raise ValueError(message)
if not utils.file_exists(out_file):
assert out_file.endswith(".vcf.gz"), "Expect bgzipped output to VarScan"
normal_mpileup_cl = samtools.prep_mpileup([paired.normal_bam], ref_file,
config, max_read_depth,
target_regions=target_regions,
want_bcf=False)
tumor_mpileup_cl = samtools.prep_mpileup([paired.tumor_bam], ref_file,
config, max_read_depth,
target_regions=target_regions,
want_bcf=False)
base, ext = utils.splitext_plus(out_file)
indel_file = base + "-indel.vcf"
snp_file = base + "-snp.vcf"
with file_transaction(config, indel_file, snp_file) as (tx_indel, tx_snp):
with tx_tmpdir(items[0]) as tmp_dir:
jvm_opts = _get_jvm_opts(config, tmp_dir)
opts = " ".join(_varscan_options_from_config(config))
remove_zerocoverage = r"{ ifne grep -v -P '\t0\t\t$' || true; }"
export = utils.local_path_export()
varscan_cmd = ("{export} varscan {jvm_opts} somatic "
"<({normal_mpileup_cl} | {remove_zerocoverage}) "
"<({tumor_mpileup_cl} | {remove_zerocoverage}) "
"--output-snp {tx_snp} --output-indel {tx_indel} "
"--output-vcf {opts} ")
# add minimum AF
min_af = float(utils.get_in(paired.tumor_config, ("algorithm",
"min_allele_fraction"), 10)) / 100.0
varscan_cmd += "--min-var-freq {min_af} "
do.run(varscan_cmd.format(**locals()), "Varscan", None, None)
to_combine = []
for fname in [snp_file, indel_file]:
if utils.file_exists(fname):
fix_file = "%s-fix.vcf.gz" % (utils.splitext_plus(fname)[0])
with file_transaction(config, fix_file) as tx_fix_file:
fix_ambig_ref = vcfutils.fix_ambiguous_cl()
fix_ambig_alt = vcfutils.fix_ambiguous_cl(5)
py_cl = os.path.join(os.path.dirname(sys.executable), "py")
normal_name = paired.normal_name
tumor_name = paired.tumor_name
cmd = ("cat {fname} | "
"{py_cl} -x 'bcbio.variation.varscan.fix_varscan_output(x,"
""" "{normal_name}", "{tumor_name}")' | """
"{fix_ambig_ref} | {fix_ambig_alt} | ifne vcfuniqalleles | "
"""{py_cl} -x 'bcbio.variation.vcfutils.add_contig_to_header(x, "{ref_file}")' | """
"""bcftools filter -m + -s REJECT -e "SS != '.' && SS != '2'" 2> /dev/null | """
"{py_cl} -x 'bcbio.variation.varscan.spv_freq_filter(x, 1)' | "
"bgzip -c > {tx_fix_file}")
do.run(cmd.format(**locals()), "Varscan paired fix")
to_combine.append(fix_file)
if not to_combine:
out_file = write_empty_vcf(out_file, config)
else:
out_file = combine_variant_files(to_combine,
out_file, ref_file, config,
region=target_regions)
if os.path.getsize(out_file) == 0:
write_empty_vcf(out_file)
if out_file.endswith(".gz"):
out_file = bgzip_and_index(out_file, config)
def fix_varscan_output(line, normal_name="", tumor_name=""):
"""Fix a varscan VCF line.
Fixes the ALT column and also fixes floating point values
output as strings to by Floats: FREQ, SSC.
This function was contributed by <NAME> <<EMAIL>>,
with minor modifications by <NAME> <<EMAIL>>.
"""
line = line.strip()
tofix = ("##INFO=<ID=SSC", "##FORMAT=<ID=FREQ")
if(line.startswith("##")):
if line.startswith(tofix):
line = line.replace('Number=1,Type=String',
'Number=1,Type=Float')
return line
line = line.split("\t")
if line[0].startswith("#CHROM"):
if tumor_name and normal_name:
mapping = {"NORMAL": normal_name, "TUMOR": tumor_name}
base_header = line[:9]
old_samples = line[9:]
if len(old_samples) == 0:
return "\t".join(line)
samples = [mapping[sample_name] for sample_name in old_samples]
assert len(old_samples) == len(samples)
return "\t".join(base_header + samples)
else:
return "\t".join(line)
try:
REF, ALT = line[3:5]
except ValueError:
return "\t".join(line)
def _normalize_freq(line, sample_i):
"""Ensure FREQ genotype value is float as defined in header.
"""
ft_parts = line[8].split(":")
dat = line[sample_i].split(":")
# Non-conforming no-call sample, don't try to fix FREQ
if len(dat) != len(ft_parts):
return line
freq_i = ft_parts.index("FREQ")
try:
dat[freq_i] = str(float(dat[freq_i].rstrip("%")) / 100)
except ValueError: # illegal binary characters -- set frequency to zero
dat[freq_i] = "0.0"
line[sample_i] = ":".join(dat)
return line
if len(line) > 9:
line = _normalize_freq(line, 9)
if len(line) > 10:
line = _normalize_freq(line, 10)
# HACK: The position of the SS= changes, so we just search for it
ss_vals = [item for item in line[7].split(";") if item.startswith("SS=")]
if len(ss_vals) > 0:
somatic_status = int(ss_vals[0].split("=")[1]) # Get the number
else:
somatic_status = None
if somatic_status == 5:
# "Unknown" states are broken in current versions of VarScan
# so we just bail out here for now
return
# fix FREQ for any additional samples -- multi-sample VarScan calling
if len(line) > 11:
for i in range(11, len(line)):
line = _normalize_freq(line, i)
#FIXME: VarScan also produces invalid REF records (e.g. CAA/A)
# This is not handled yet.
if "+" in ALT or "-" in ALT:
if "/" not in ALT:
if ALT[0] == "+":
R = REF
A = REF + ALT[1:]
elif ALT[0] == "-":
R = REF + ALT[1:]
A = REF
else:
Ins = [p[1:] for p in ALT.split("/") if p[0] == "+"]
Del = [p[1:] for p in ALT.split("/") if p[0] == "-"]
if len(Del):
REF += sorted(Del, key=lambda x: len(x))[-1]
A = ",".join([REF[::-1].replace(p[::-1], "", 1)[::-1]
for p in Del] + [REF + p for p in Ins])
R = REF
REF = R
ALT = A
else:
ALT = ALT.replace('/', ',')
line[3] = REF
line[4] = ALT
return "\t".join(line)
def _create_sample_list(in_bams, vcf_file):
"""Pull sample names from input BAMs and create input sample list.
"""
out_file = "%s-sample_list.txt" % os.path.splitext(vcf_file)[0]
with open(out_file, "w") as out_handle:
for in_bam in in_bams:
with pysam.Samfile(in_bam, "rb") as work_bam:
for rg in work_bam.header.get("RG", []):
out_handle.write("%s\n" % rg["SM"])
return out_file
def _varscan_work(align_bams, ref_file, items, target_regions, out_file):
"""Perform SNP and indel genotyping with VarScan.
"""
config = items[0]["config"]
orig_out_file = out_file
out_file = orig_out_file.replace(".vcf.gz", ".vcf")
max_read_depth = "1000"
sample_list = _create_sample_list(align_bams, out_file)
mpileup = samtools.prep_mpileup(align_bams, ref_file, config, max_read_depth,
target_regions=target_regions, want_bcf=False)
# VarScan fails to generate a header on files that start with
# zerocoverage calls; strip these with grep, we're not going to
# call on them
remove_zerocoverage = r"{ ifne grep -v -P '\t0\t\t$' || true; }"
# we use ifne from moreutils to ensure we process only on files with input, skipping otherwise
# http://manpages.ubuntu.com/manpages/natty/man1/ifne.1.html
with tx_tmpdir(items[0]) as tmp_dir:
jvm_opts = _get_jvm_opts(config, tmp_dir)
opts = " ".join(_varscan_options_from_config(config))
min_af = float(utils.get_in(config, ("algorithm", "min_allele_fraction"), 10)) / 100.0
fix_ambig_ref = vcfutils.fix_ambiguous_cl()
fix_ambig_alt = vcfutils.fix_ambiguous_cl(5)
py_cl = os.path.join(os.path.dirname(sys.executable), "py")
export = utils.local_path_export()
cmd = ("{export} {mpileup} | {remove_zerocoverage} | "
"ifne varscan {jvm_opts} mpileup2cns {opts} "
"--vcf-sample-list {sample_list} --min-var-freq {min_af} --output-vcf --variants | "
"""{py_cl} -x 'bcbio.variation.vcfutils.add_contig_to_header(x, "{ref_file}")' | """
"{py_cl} -x 'bcbio.variation.varscan.fix_varscan_output(x)' | "
"{fix_ambig_ref} | {fix_ambig_alt} | ifne vcfuniqalleles > {out_file}")
do.run(cmd.format(**locals()), "Varscan", None,
[do.file_exists(out_file)])
os.remove(sample_list)
# VarScan can create completely empty files in regions without
# variants, so we create a correctly formatted empty file
if os.path.getsize(out_file) == 0:
write_empty_vcf(out_file)
if orig_out_file.endswith(".gz"):
vcfutils.bgzip_and_index(out_file, config)
| 2.078125
| 2
|
frappe/patches/v12_0/copy_to_parent_for_tags.py
|
ektai/frappe3
| 0
|
12775266
|
<reponame>ektai/frappe3<filename>frappe/patches/v12_0/copy_to_parent_for_tags.py<gh_stars>0
import frappe
def execute():
frappe.db.sql("UPDATE `tabTag Link` SET parenttype=document_type")
frappe.db.sql("UPDATE `tabTag Link` SET parent=document_name")
| 1.023438
| 1
|
config/models.py
|
pablo-moreno/shitter-back
| 0
|
12775267
|
from django.db import models
class Deletable(models.Model):
deleted = models.BooleanField(default=False)
def delete(self, *args, **kwargs):
self.deleted = True
return self.save()
class Meta:
abstract = True
| 2.203125
| 2
|
projetos em python/exercicio43.py
|
gustavo621/projetos-em-python
| 0
|
12775268
|
peso = float(input("digite aqui o seu peso(kg): "))
altura = float(input("digite sua altura: "))
imc = peso/altura**2
print("seu imc é {:3.2f}".format(imc))
if imc < 18.5:
print("você está abaixo do peso")
elif 18.5 <= imc < 25:
print("parabéns, você está no peso ideal!")
elif 25 <= imc < 30:
print("sobrepeso")
elif 30 <= imc < 40:
print("você está obeso")
elif imc >= 40:
print("obesidade morbida!")
| 4
| 4
|
bridge/handler/__init__.py
|
Paula-Kli/IOT
| 0
|
12775269
|
<filename>bridge/handler/__init__.py
from .message_management import createDeviceInitializationMessage
from .message_handler import SerialHandler
| 1.203125
| 1
|
etl/extract_codebook.py
|
jbn/anes_recoder
| 0
|
12775270
|
<reponame>jbn/anes_recoder
#!/usr/bin/env python
import json
import os
import re
from collections import OrderedDict
import modpipe
VERSION_RE = re.compile("^RELEASE VERSION:\s+(\d+)")
LINE_SEP = "=" * 99 + "\n"
DATA_PATH = os.path.join("data", "raw", "anes_timeseries_cdf_codebook_var.txt")
OUTPUT_PATH = os.path.join("data", "clean", "anes_cb.json")
def defn_iterator(file_path):
version = None
lines = []
with open(file_path, errors='ignore') as fp:
# for line in fp:
# if version is None:
# version = VERSION_RE.search(line).group(1)
# yield {'version': version}
# elif line == LINE_SEP:
# break
for line in fp:
if line == LINE_SEP and lines:
yield lines
lines = []
else:
lines.append(line.rstrip())
if lines[-1] == "" and lines[-2] == "" and lines[-3] == "1":
yield lines[:-3]
general_notes, var_defs = [], OrderedDict()
version = None
with modpipe.ModPipe("codebook_pipeline") as pipe:
for lines in defn_iterator(DATA_PATH):
if 'version' in lines:
version = lines['version']
else:
res = pipe(lines.copy())
if '_general_note_lines' in res:
general_notes.append(res)
else:
var_defs[res['name']] = res
codebook = OrderedDict([('version', version)])
codebook['var_defs'] = var_defs
codebook['notes'] = general_notes
with open(OUTPUT_PATH, "w") as fp:
json.dump(codebook, fp)
| 2.140625
| 2
|
derl/alg/dqn_test.py
|
MichaelKonobeev/derl
| 5
|
12775271
|
<reponame>MichaelKonobeev/derl<gh_stars>1-10
# pylint: disable=missing-docstring
from derl.env.make_env import make as make_env
from derl.factory.dqn import DQNFactory
from derl.alg.test import AlgTestCase
class DQNTest(AlgTestCase):
def setUp(self):
super().setUp()
kwargs = DQNFactory.get_kwargs()
kwargs["storage_init_size"] = 42
self.env = make_env("SpaceInvadersNoFrameskip-v4",
nenvs=kwargs.get("nenvs"), seed=0)
self.alg = DQNFactory(**kwargs).make(self.env)
self.alg.model.to("cpu")
def test_interactions(self):
_ = next(self.alg.runner.run())
| 1.90625
| 2
|
Server.py
|
ht21992/Online-Board-Game
| 0
|
12775272
|
from socket import *
from threading import *
clients = set()
nicknames = []
def clientThread(clientSocket, clientAddress,nickname):
while True:
try:
message = clientSocket.recv(1024).decode("utf-8")
# print(clientAddress[0] + ":" + str(clientAddress[1]) +" says: "+ message)
print(f"{nickname} : {message}")
if ":signal number" in message:
print("yes")
for client in clients:
if client is not clientSocket:
client.send((f'{nickname}:'+message).encode("utf-8"))
else:
for client in clients:
if client is not clientSocket:
# client.send((clientAddress[0] + ":" + str(clientAddress[1]) +" says: "+ message).encode("utf-8"))
client.send((f"{nickname} : " + message).encode("utf-8"))
if not message:
clients.remove(clientSocket)
print(clientAddress[0] + ":" + str(clientAddress[1]) +" disconnected")
break
except ConnectionResetError:
pass
clientSocket.close()
hostSocket = socket(AF_INET, SOCK_STREAM)
hostSocket.setsockopt(SOL_SOCKET, SO_REUSEADDR,1)
hostIp = "127.0.0.1"
portNumber = 7500
hostSocket.bind((hostIp, portNumber))
hostSocket.listen()
print ("Waiting for connection...")
if __name__=='__main__':
while True:
clientSocket, clientAddress = hostSocket.accept()
clients.add(clientSocket)
nickname = clientSocket.recv(1024).decode('utf-8')
# print("Nickname",nickname)
# nicknames.append(nickname)
print ("Connection established with: ", clientAddress[0] + ":" + str(clientAddress[1]))
thread = Thread(target=clientThread, args=(clientSocket, clientAddress,nickname ))
thread.start()
| 2.9375
| 3
|
debug/__init__.py
|
Kupoman/BlenderRealtimeEngineAddon
| 49
|
12775273
|
bl_info = {
"name": "RTE Debug",
"author": "<NAME>",
"blender": (2, 75, 0),
"location": "Info header, render engine menu",
"description": "Debug implementation of the Realtime Engine Framework",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"support": 'TESTING',
"category": "Render"}
if "bpy" in locals():
import imp
imp.reload(addon)
else:
import bpy
from .addon import DebugEngine
def register():
panels = [getattr(bpy.types, t) for t in dir(bpy.types) if 'PT' in t]
for panel in panels:
if hasattr(panel, 'COMPAT_ENGINES') and 'BLENDER_GAME' in panel.COMPAT_ENGINES:
panel.COMPAT_ENGINES.add('RTE_DEBUG')
bpy.utils.register_module(__name__)
def unregister():
panels = [getattr(bpy.types, t) for t in dir(bpy.types) if 'PT' in t]
for panel in panels:
if hasattr(panel, 'COMPAT_ENGINES') and 'RTE_FRAMEWORK' in panel.COMPAT_ENGINES:
panel.COMPAT_ENGINES.remove('RTE_DEBUG')
bpy.utils.unregister_module(__name__)
| 1.773438
| 2
|
flask_table/__init__.py
|
nullptrT/flask_table
| 215
|
12775274
|
from .table import Table, create_table
from .columns import (
Col,
BoolCol,
DateCol,
DatetimeCol,
LinkCol,
ButtonCol,
OptCol,
NestedTableCol,
BoolNaCol,
)
| 1.289063
| 1
|
fem_input.py
|
sepitto/OAPproject2
| 0
|
12775275
|
def get_input():
EF1 = float(input("введите значения жесткостей элементов: " + "\n" + "EF1 = "))
EF2 = float(input("EF2 = "))
F = float(input("введите значение усилия:" + "\n" + "F = "))
return EF1, EF2, F
| 3.5
| 4
|
build_segments.py
|
uxai/string-partitioner
| 1
|
12775276
|
import math
def segment_builder(arg, tail):
req_list = arg[0]
divisions = arg[1]
division_dec = divisions / 100
# number of segments to be created in the element passed
partition_count = int(math.ceil(100 / divisions))
if len(arg) == 3:
segments = arg[2]
else:
g = int(100 / divisions + 1)
segments = tuple([n for n in range(1, g)])
def builder(segments):
"""
Calculate the remainder of numbers that won't be evenly split
into groups, and calculates at which point the groups need to start
catering for the extra numbers.
Extra numbers are divided up into the last groups of the list given.
For example: if we have a list of [1, 2, 3, 4, 5] and we ask to split
the group in thirds, the ouput would be [1][2, 3][4, 5]
"""
remainder = int(len(req_list) % partition_count)
"""
Removes the remainder from the length of the passed in list, this way
we can continue with an even division. math.floor is used for precaution only.
"""
partition_size = math.floor(division_dec * (len(req_list) - remainder))
segment_storage = []
if remainder > 0:
"""
Calculates the number of extras from the length of the list passed.
Group extras: tells us when to start adding the extras to generated segments
"""
group_extras = partition_count - remainder
pos = 0 # keep track of position
if tail == False:
cycle = reversed(range(0, partition_count))
else:
cycle = range(0, partition_count)
for group in cycle:
if group < group_extras:
segment_storage.append([pos, pos + partition_size])
pos += partition_size
else:
segment_storage.append([pos, pos + partition_size + 1])
pos += partition_size + 1
else:
if tail == False:
cycle = reversed(range(0, len(req_list), partition_size))
else:
cycle = range(0, len(req_list), partition_size)
for ind in cycle:
segment_storage.append([ind, ind+partition_size])
return req_list[segment_storage[segments-1][0]:segment_storage[segments-1][1]]
if isinstance(segments, int) and segments * division_dec <= 1:
return builder(segments)
elif isinstance(segments, tuple):
multi_segment = []
for segment in segments:
multi_segment.append(builder(segment))
return multi_segment
| 3.765625
| 4
|
models/classifier.py
|
AmanDaVinci/Universal-Sentence-Representations
| 0
|
12775277
|
<filename>models/classifier.py
import torch
import torch.nn as nn
class Classifier(nn.Module):
def __init__(self, encoder, encoded_dim):
super().__init__()
self.encoder = encoder
self.layers = nn.Sequential(
nn.Linear(4 * encoded_dim, 512),
nn.ReLU(),
nn.Linear(512, 3)
)
def forward(self, premise, hypothesis):
u = self.encoder(premise)
v = self.encoder(hypothesis)
diff = u - v
prod = u * v
x = torch.cat((u, v, diff, prod), dim=1)
return self.layers(x)
| 2.9375
| 3
|
rcommander_plain/src/rcommander_plain/rcommander_default.py
|
rummanwaqar/rcommander-core
| 4
|
12775278
|
<gh_stars>1-10
#!/usr/bin/python
import roslib; roslib.load_manifest('rcommander_plain')
import rcommander.rcommander as rc
import rospy
import tf
rospy.init_node('rcommander_plain', anonymous=True)
robot = None
tf = tf.TransformListener()
rc.run_rcommander(['default', 'default_frame', 'plain'], robot, tf)
| 1.773438
| 2
|
python/FastAPI examples/Streaming response example/api.py
|
andrewguest/code-snippets
| 1
|
12775279
|
import io
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from fastapi.staticfiles import StaticFiles
app = FastAPI()
# create a 'static files' directory
# create a '/static' prefix for all files
# serve files from the 'media/' directory under the '/static/' route
# /Big_Buck_Bunny_1080p.avi becomes '/static/Big_Buck_Bunny_1080p.avi'
# name='static' is used internally by FastAPI
app.mount("/static", StaticFiles(directory="media"), name="static")
@app.get("/")
async def main():
# open the movie file to stream it
movie = open("media/Big_Buck_Bunny_1080p.avi", "rb")
# return a stream response with the movie and a MIME type of 'video/avi'
return StreamingResponse(movie, media_type="video/avi")
| 3.125
| 3
|
tictactoe/board.py
|
iTigrisha/tic-tac-toe
| 0
|
12775280
|
<gh_stars>0
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return f"[{self.x},{self.y}]"
class X(Point):
def __init__(self, x, y):
super().__init__(x, y)
def __str__(self):
return "X" + super().__str__()
class O(Point):
def __init__(self, x, y):
super().__init__(x, y)
def __str__(self):
return "O" + super().__str__()
class Board:
def __init__(self):
self._board = [
[" ", " ", " "],
[" ", " ", " "],
[" ", " ", " "],
]
@property
def board(self):
return self._board
@board.setter
def board(self, val):
x = int(val[1]) - 1
y = int(val[2]) - 1
self._board[x][y] = val[0]
def __str__(self):
"""
| X | O | |
| O | | |
| | X | |
"""
board_str = ""
for line in self._board:
board_str += "| " + " | ".join(line) + " |\n"
return board_str
# if __name__ == "__main__":
# board = Board()
# print(board)
#
# x = X(x=1, y=1)
# print(X.__name__)
| 3.90625
| 4
|
swarmopt/mopso_agg.py
|
swarmopt/swarmopt
| 6
|
12775281
|
import numpy as np
import copy
class Particle:
def __init__(self, lb, ub):
"""Initialize the particle.
Attributes
----------
lb : float
lower bounds for initial values
ub : float
upper bounds for initial values
"""
self.lb = lb
self.ub = ub
self.position = np.random.uniform(lb, ub, size=lb.shape[0])
self.velocity = np.random.uniform(lb, ub, size=lb.shape[0])
self.fitness = None
self.pbest_position = self.position
self.pbest_fitness = float('inf')
def move(self):
self.position += self.velocity
class Swarm:
def __init__(self, function_list, n_particles, n_iterations,
lb, ub, w=0.7, c1=2.0, c2=2.0):
"""Initialize the swarm.
Attributes
---------
function_list : list
list of functions to optimize
n_particles : int
number of particles in swarm
n_iterations : int
number of optimization iterations
lb : float
lower bounds for initial values
ub : float
upper bounds for initial values
w : float
inertia weight
c1 : float
cognitive weight
c2 : float
social weight
"""
self.function_list = function_list
self.n_obj = len(function_list)
self.n_particles = n_particles
self.n_iterations = n_iterations
assert len(lb) == len(ub)
self.lb = np.array(lb)
self.ub = np.array(ub)
self.w = w
self.c1 = c1
self.c2 = c2
self.gbest_position = np.random.uniform(lb, ub, size=self.lb.shape[0])
self.gbest_fitness = float('inf')
self.population = []
self.iteration = 0
def reset_environment(self):
self.population = []
self.iteration = 0
def termination_check(self):
if self.iteration > self.n_iterations:
return False
else:
return True
def initialise_swarm(self):
for _ in range(self.n_particles):
self.population.append(Particle(self.lb, self.ub))
def eval_fitness(self, particle):
"""Evaluate particle fitness based on all functions in function_list"""
_fitness = 0
for func in self.function_list:
_fitness += func(particle.position)
particle.fitness = _fitness
def swarm_eval_fitness(self):
for particle in self.population:
self.eval_fitness(particle)
def update_velocity(self, particle):
inertia = self.w * particle.velocity
cognitive = (self.c1 * np.random.uniform()
* (particle.pbest_position - particle.position))
social = (self.c2 * np.random.uniform()
* (self.gbest_position - particle.position))
particle.velocity = inertia + cognitive + social
def swarm_update_velocity(self):
for particle in self.population:
self.update_velocity(particle)
def update_pbest(self, particle):
if particle.fitness < particle.pbest_fitness:
particle.pbest_fitness = particle.fitness
particle.pbest_position = particle.position
def update_gbest(self, particle):
if particle.fitness < self.gbest_fitness:
self.gbest_fitness = copy.deepcopy(particle.fitness)
self.gbest_position = copy.deepcopy(particle.position)
def swarm_update_best(self):
for particle in self.population:
self.update_pbest(particle)
self.update_gbest(particle)
def swarm_move(self):
for particle in self.population:
particle.move()
def optimise(self):
self.reset_environment()
self.initialise_swarm()
while self.termination_check():
self.swarm_eval_fitness()
self.swarm_update_best()
self.swarm_update_velocity()
self.swarm_move()
self.iteration += 1
if __name__ == '__main__':
print('MOPSO: Aggregating Approach')
def function_one(position):
return np.square(position[0])
def function_two(position):
return np.square(position[0] - 2)
function_list = [function_one, function_two]
n_particles = 30
n_iterations = 100
lb = [-100]
ub = [100]
swarm = Swarm(function_list=function_list,
n_particles=n_particles,
n_iterations=n_iterations,
lb=lb,
ub=ub)
swarm.optimise()
print('gbest_position: ', swarm.gbest_position)
print('gbest_fitness: ', swarm.gbest_fitness)
| 3.484375
| 3
|
player.py
|
ellyn/tronbots
| 2
|
12775282
|
import pygame
from pygame.locals import *
from constants import *
from copy import deepcopy
import numpy as np
from heuristic import *
class Player(object):
def __init__(self, color, player_num):
self.color = color
self.direction = UP
self.player_num = player_num
self.move_counter = 0 # Keeps track of movement to regulate growth rate
loc = P1_LOC if player_num == 1 else P2_LOC
self.segments = [Rect(loc[0], loc[1], CELL_WIDTH, CELL_WIDTH)]
def direction_valid(self,direction):
if (direction == UP and self.direction == DOWN):
return False
if (direction == LEFT and self.direction == RIGHT):
return False
if (direction == DOWN and self.direction == UP):
return False
if (direction == RIGHT and self.direction == LEFT):
return False
return True
def set_direction(self, direction):
if self.direction_valid(direction):
self.direction = direction
def set_color(self, color):
self.color = color
def clone(self, player=None, direction=None):
if player == None:
player = self
cloned_player = deepcopy(player)
if direction != None:
cloned_player.direction = direction
cloned_player.move()
return cloned_player
def get_state(self, other_player):
state = np.zeros((GAME_HEIGHT/CELL_WIDTH, GAME_WIDTH/CELL_WIDTH))
for rect in self.segments:
loc = rect.topleft
x,y = loc[0]/CELL_WIDTH, loc[1]/CELL_WIDTH
state[y,x] = FRIENDLY
for rect in other_player.segments:
loc = rect.topleft
x,y = loc[0]/CELL_WIDTH, loc[1]/CELL_WIDTH
state[y,x] = OPPONENT
return state
def has_collided(self, other_player, head = None):
segments_to_check = self.segments[:]
if head == None:
head = self.segments[0]
segments_to_check.pop(0)
head_loc = head.topleft
return (not (0 <= head_loc[0] <= GAME_WIDTH - CELL_WIDTH) or
not (0 <= head_loc[1] <= GAME_HEIGHT - CELL_WIDTH) or
head.collidelist(segments_to_check) != -1 or
head.collidelist(other_player.segments) != -1)
def draw(self, display_surface):
for segment in self.segments:
pygame.draw.rect(display_surface, self.color, segment)
def move(self):
head_loc = self.segments[0].topleft
delta = DIRECTION_DELTAS[self.direction]
new_x = head_loc[0] + delta['x'] * CELL_WIDTH
new_y = head_loc[1] + delta['y'] * CELL_WIDTH
head = Rect(new_x, new_y, CELL_WIDTH, CELL_WIDTH)
self.segments.insert(0, head)
self.move_counter = (self.move_counter + 1) % PLAYER_GROWTH_RATE
if self.move_counter == 0:
self.segments.pop() # Remove last segment of tail
""" Chooses the next move to make in the game.
Subclasses of Player (aka custom bots) should override this method.
other_player is a dict object with the following key/values:
direction: The other player's current direction (i.e. UP)
segments: Copy of list of segments of the other player
"""
def choose_move(self, other_player):
self.move()
| 3.015625
| 3
|
components/elm/src/external_models/sbetr/3rd-party/pfunit/bin/mods/pre/pre.py
|
meng630/GMD_E3SM_SCM
| 0
|
12775283
|
#!/usr/bin/env python
# python2 - Deprecated in python 2.7+
import imp
try:
imp.find_module('argparse')
found = True
except ImportError:
found = False
# Preferred for python 2.7+, python 3
# import importlib
# argparse_loader = importlib.find_loader('argparse')
# found = argparse_loader is not None
if found:
import argparse
else:
print('pre.py::Error. pFUnit requires argparse module provided by python version >= 2.7.')
print('Quitting!'); quit()
#####
from pre_If import *
from pre_Repeat import *
parser = argparse.ArgumentParser(description='A preproccessor for pfunit research')
parser.add_argument('--inFile')
parser.add_argument('--outFile')
args = parser.parse_args()
if __name__ == '__main__' :
result = pre(inFile=args.inFile, outFile=args.outFile)
if args.inFile :
if not args.outFile :
print result
| 2.828125
| 3
|
sepaxml/validation.py
|
CaptainConsternant/python-sepaxml
| 53
|
12775284
|
<filename>sepaxml/validation.py<gh_stars>10-100
import os
class ValidationError(Exception):
pass
def try_valid_xml(xmlout, schema):
import xmlschema # xmlschema does some weird monkeypatching in etree, if we import it globally, things fail
try:
my_schema = xmlschema.XMLSchema(os.path.join(os.path.dirname(__file__), 'schemas', schema + '.xsd'))
my_schema.validate(xmlout.decode())
except xmlschema.XMLSchemaValidationError as e:
raise ValidationError(
"The output SEPA file contains validation errors. This is likely due to an illegal value in one of "
"your input fields."
) from e
| 2.5625
| 3
|
qolsys_client/mqtt_client.py
|
mzac/qolsys_client
| 7
|
12775285
|
import paho.mqtt.client as pmqtt
import paho.mqtt.subscribe as smqtt
import json
import time
import logging
class mqtt:
def __init__(self, broker: str, username: str, password: str, port=1883):
self.client = ""
self.broker = broker
self.port = port
self.username = username
self.password = password
self.connect()
def connect(self):
self.client = pmqtt.Client()
self.client.connect(host=self.broker, port=self.port)
def publish(self, topic:str, message:str):
if topic == "" or message == "":
raise Exception("Topic and Message required")
published = self.client.publish(topic, message)
while not published.is_published():
time.sleep(0.5)
print("published:", published.rc)
def subscribe(self, topics:[], cb:callable):
if topics == []:
raise Exception("Need a topic to listen to")
logging.debug("Starting the MQTT subscriber")
smqtt.callback(cb, topics, hostname=self.broker)
#subscribed.callback(cb,)
#self.client.subscribe(topic)
#self.client.on
| 2.953125
| 3
|
strinks/api/shops/ichigo.py
|
Zeletochoy/strinks
| 1
|
12775286
|
import re
from typing import Iterator, Tuple
import requests
from bs4 import BeautifulSoup
from ...db.models import BeerDB
from ...db.tables import Shop as DBShop
from . import NoBeersError, NotABeerError, Shop, ShopBeer
DIGITS = set("0123456789")
def keep_until_japanese(text: str) -> str:
chars = []
for c in text:
if ord(c) < 0x3000: # first japanese characters
chars.append(c)
else:
break
return "".join(chars)
class IchiGoIchiAle(Shop):
short_name = "ichigo"
display_name = "<NAME>"
def _iter_pages(self) -> Iterator[BeautifulSoup]:
i = 1
while True:
url = f"https://151l.shop/?mode=grp&gid=1978037&sort=n&page={i}"
page = requests.get(url).text
yield BeautifulSoup(page, "html.parser")
i += 1
def _iter_page_beers(self, page_soup: BeautifulSoup) -> Iterator[Tuple[BeautifulSoup, str]]:
empty = True
for item in page_soup("li", class_="productlist_list"):
if item.find("span", class_="item_soldout") is not None:
continue
url = "https://151l.shop/" + item.find("a")["href"]
page = requests.get(url).text
yield BeautifulSoup(page, "html.parser"), url
empty = False
if empty:
raise NoBeersError
def _parse_beer_page(self, page_soup, url) -> ShopBeer:
title = page_soup.find("h2", class_="product_name").get_text().strip()
name_match = re.search(r"[((]([^))]*)[))]$", title)
if name_match is None:
raise NotABeerError
raw_name = name_match.group(1).strip()
price_text = page_soup.find("span", class_="product_price").get_text().strip()
price_match = re.search(r"税込([0-9,]+)円", price_text)
if price_match is None:
raise NotABeerError
price = int(price_match.group(1).replace(",", ""))
desc = page_soup.find("div", class_="product_explain").get_text()
ml_match = re.search(r"容量:(\d+)ml", desc.lower())
if ml_match is None:
raise NotABeerError
ml = int(ml_match.group(1))
image_url = page_soup.find("img", class_="product_img_main_img")["src"]
try:
return ShopBeer(
raw_name=raw_name,
url=url,
milliliters=ml,
price=price,
quantity=1,
image_url=image_url,
)
except UnboundLocalError:
raise NotABeerError
def iter_beers(self) -> Iterator[ShopBeer]:
for listing_page in self._iter_pages():
try:
for beer_page, url in self._iter_page_beers(listing_page):
try:
yield self._parse_beer_page(beer_page, url)
except NotABeerError:
continue
except Exception as e:
print(f"Unexpected exception while parsing page, skipping.\n{e}")
except NoBeersError:
break
def get_db_entry(self, db: BeerDB) -> DBShop:
return db.insert_shop(
name=self.display_name,
url="https://151l.shop/",
image_url="https://img21.shop-pro.jp/PA01423/875/PA01423875.png?cmsp_timestamp=20201017123822",
shipping_fee=950,
)
| 2.921875
| 3
|
sstcam_simulation/data/__init__.py
|
sstcam/sstcam-simulation
| 1
|
12775287
|
<filename>sstcam_simulation/data/__init__.py<gh_stars>1-10
from os.path import join, dirname
from os import environ
import requests
def get_data(path):
return join(dirname(__file__), path)
def download_camera_efficiency_data():
"""
Download the camera efficiency data from the mpi-hd CTA webserver
Obtains KONRAD_USERNAME and KONRAD_PASSWORD from the environment
"""
path = get_data("datasheet/p4eff_ASTRI-CHEC.lis")
r = requests.get(
'https://www.mpi-hd.mpg.de/hfm/CTA/MC/Prod4/Config/'
'Efficiencies/p4eff_ASTRI-CHEC.lis',
auth=(environ["KONRAD_USERNAME"], environ["KONRAD_PASSWORD"])
)
with open(path, 'wb') as f:
f.write(r.content)
if __name__ == '__main__':
download_camera_efficiency_data()
| 2.546875
| 3
|
Desafio73.py
|
VictorCastao/Curso-em-Video-Python
| 0
|
12775288
|
print('=' * 12 + 'Desafio 73' + '=' * 12)
tabelabrasileirao = (
"Flamengo", "Santos", "Palmeiras", "Grêmio", "Athletico-PR", "São Paulo", "Internacional", "Corinthians",
"Fortaleza", "Goiás", "Bahia", "Vasco", "Atlético-MG", "Fluminense", "Botafogo", "Ceará", "Cruzeiro", "CSA",
"Chapecoense", "Avaí")
print(f'Os 5 primeiros colocados são: {tabelabrasileirao[:5]}')
print(f'Os 4 últimos colocados são: {tabelabrasileirao[16:]}')
print(f"""A ordem alfabética dos times é:
{sorted(tabelabrasileirao)}""")
print(f'A Chapecoense está na {tabelabrasileirao.index("Chapecoense")+1}ª posição.')
| 3.6875
| 4
|
src/parser/scraper.py
|
TomMarti/LIL-CRAWLER
| 0
|
12775289
|
<reponame>TomMarti/LIL-CRAWLER
import requests
class Scraper:
def __init__(self):
self.name = "scraper"
@staticmethod
def scrape(url):
r = None
try:
r = requests.get(url)
except:
return []
if r.status_code == 200:
return Scraper.parse(r.text)
else:
return []
@staticmethod
def parse(file):
tofind = "href"
filelenght = len(file) - 1
res = [i for i in range(len(file)) if file.startswith(tofind, i)]
newurls = []
for element in res:
ur = Scraper.get_url(file, element, filelenght)
if len(ur) > 0 and ur[0] == "h":
newurls.append(ur)
return newurls
@staticmethod
def get_url(file, position, filelenght):
url = ""
stop = False
i = 5 + position
iplusun = ""
while (not stop):
url += iplusun
try:
iplusun = file[i + 1]
except:
return ""
if iplusun != '"':
i += 1
else:
stop = True
if iplusun == '\'':
stop = True
return url
| 3.015625
| 3
|
Hello_world/hello_world.py
|
elsuizo/Kivy_work
| 0
|
12775290
|
#= -------------------------------------------------------------------------
# @file hello_world.py
#
# @date 02/14/16 10:41:21
# @author <NAME>
# @email <EMAIL>
#
# @brief
#
# @detail
#
# Licence:
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
#---------------------------------------------------------------------------=#
import kivy
kivy.require('1.9.0')
from kivy.app import App
from kivy.uix.button import Label
class HelloApp(App):
def build(self):
return Label(text='Hello World')
if __name__ == "__main__":
HelloApp().run()
| 2.46875
| 2
|
gui_template.py
|
Llona/AJ-RamDisk
| 0
|
12775291
|
<gh_stars>0
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Oct 26 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class main_frame
###########################################################################
class main_frame ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u"AJ-RamDisk", pos = wx.DefaultPosition, size = wx.Size( 835,601 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
bSizer30 = wx.BoxSizer( wx.VERTICAL )
sbSizer8 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"label" ), wx.VERTICAL )
self.ramdrive_listctrl = wx.ListCtrl( sbSizer8.GetStaticBox(), wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_HRULES|wx.LC_REPORT|wx.LC_VRULES )
sbSizer8.Add( self.ramdrive_listctrl, 1, wx.ALL|wx.EXPAND, 5 )
bSizer30.Add( sbSizer8, 1, wx.ALL|wx.EXPAND, 5 )
bSizer33 = wx.BoxSizer( wx.HORIZONTAL )
self.add_button = wx.Button( self, wx.ID_ANY, u"Add", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer33.Add( self.add_button, 0, wx.ALL, 5 )
self.edit_button = wx.Button( self, wx.ID_ANY, u"Edit", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer33.Add( self.edit_button, 0, wx.ALL, 5 )
self.delete_button = wx.Button( self, wx.ID_ANY, u"Delete", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer33.Add( self.delete_button, 0, wx.ALL, 5 )
bSizer30.Add( bSizer33, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.m_staticline6 = wx.StaticLine( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL )
bSizer30.Add( self.m_staticline6, 0, wx.EXPAND |wx.ALL, 5 )
self.SetSizer( bSizer30 )
self.Layout()
self.m_statusBar1 = self.CreateStatusBar( 1, wx.STB_SIZEGRIP, wx.ID_ANY )
self.Centre( wx.BOTH )
# Connect Events
self.add_button.Bind( wx.EVT_BUTTON, self.add_press )
self.edit_button.Bind( wx.EVT_BUTTON, self.edit_press )
self.delete_button.Bind( wx.EVT_BUTTON, self.del_press )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def add_press( self, event ):
event.Skip()
def edit_press( self, event ):
event.Skip()
def del_press( self, event ):
event.Skip()
###########################################################################
## Class AddEditDiskFrame
###########################################################################
class AddEditDiskFrame ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u"Editor Ram Disk", pos = wx.DefaultPosition, size = wx.Size( 537,425 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
bSizer34 = wx.BoxSizer( wx.VERTICAL )
sbSizer9 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"Config" ), wx.VERTICAL )
bSizer35 = wx.BoxSizer( wx.VERTICAL )
bSizer36 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText17 = wx.StaticText( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Size:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText17.Wrap( -1 )
bSizer36.Add( self.m_staticText17, 0, wx.LEFT|wx.RIGHT|wx.TOP, 7 )
self.size_text = wx.TextCtrl( sbSizer9.GetStaticBox(), wx.ID_ANY, u"1", wx.DefaultPosition, wx.DefaultSize, wx.TE_RIGHT )
bSizer36.Add( self.size_text, 0, wx.ALL|wx.LEFT|wx.RIGHT|wx.TOP, 5 )
size_unit_choiceChoices = [ u"MB", u"GB" ]
self.size_unit_choice = wx.Choice( sbSizer9.GetStaticBox(), wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, size_unit_choiceChoices, 0 )
self.size_unit_choice.SetSelection( 1 )
bSizer36.Add( self.size_unit_choice, 0, wx.ALL, 5 )
bSizer35.Add( bSizer36, 0, wx.EXPAND, 5 )
bSizer37 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText18 = wx.StaticText( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Drive:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText18.Wrap( -1 )
bSizer37.Add( self.m_staticText18, 0, wx.LEFT|wx.RIGHT|wx.TOP, 7 )
driver_choiceChoices = []
self.driver_choice = wx.Choice( sbSizer9.GetStaticBox(), wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, driver_choiceChoices, 0 )
self.driver_choice.SetSelection( 0 )
bSizer37.Add( self.driver_choice, 0, wx.ALL, 5 )
bSizer35.Add( bSizer37, 0, wx.EXPAND, 5 )
bSizer371 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText181 = wx.StaticText( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Label:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText181.Wrap( -1 )
bSizer371.Add( self.m_staticText181, 0, wx.LEFT|wx.RIGHT|wx.TOP, 7 )
self.label_text = wx.TextCtrl( sbSizer9.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer371.Add( self.label_text, 0, wx.ALL, 5 )
bSizer35.Add( bSizer371, 0, wx.EXPAND, 5 )
bSizer3711 = wx.BoxSizer( wx.HORIZONTAL )
self.store_img_checkbox = wx.CheckBox( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Store to HDD?", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer3711.Add( self.store_img_checkbox, 0, wx.ALL|wx.ALIGN_BOTTOM, 5 )
bSizer35.Add( bSizer3711, 1, wx.EXPAND, 5 )
self.m_staticline8 = wx.StaticLine( sbSizer9.GetStaticBox(), wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL )
bSizer35.Add( self.m_staticline8, 0, wx.EXPAND |wx.ALL, 5 )
bSizer37111 = wx.BoxSizer( wx.HORIZONTAL )
self.store_all_checkbox = wx.CheckBox( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Store All?", wx.DefaultPosition, wx.DefaultSize, 0 )
self.store_all_checkbox.SetValue(True)
bSizer37111.Add( self.store_all_checkbox, 0, wx.ALIGN_CENTER|wx.LEFT, 5 )
self.choice_folder_button = wx.Button( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Choise Folder", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer37111.Add( self.choice_folder_button, 0, wx.ALL, 5 )
bSizer35.Add( bSizer37111, 0, wx.EXPAND, 5 )
bSizer61 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText27 = wx.StaticText( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Path:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText27.Wrap( -1 )
bSizer61.Add( self.m_staticText27, 0, wx.LEFT|wx.RIGHT|wx.TOP, 5 )
self.img_path_text = wx.TextCtrl( sbSizer9.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer61.Add( self.img_path_text, 1, wx.ALL, 5 )
self.choice_path_button = wx.Button( sbSizer9.GetStaticBox(), wx.ID_ANY, u"Path", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer61.Add( self.choice_path_button, 0, wx.ALL, 5 )
bSizer35.Add( bSizer61, 0, wx.EXPAND, 5 )
sbSizer9.Add( bSizer35, 1, wx.EXPAND, 5 )
bSizer34.Add( sbSizer9, 1, wx.ALL|wx.EXPAND, 5 )
self.m_staticline7 = wx.StaticLine( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL )
bSizer34.Add( self.m_staticline7, 0, wx.EXPAND |wx.ALL, 5 )
bSizer42 = wx.BoxSizer( wx.HORIZONTAL )
self.m_button20 = wx.Button( self, wx.ID_ANY, u"OK", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer42.Add( self.m_button20, 0, wx.ALL, 5 )
self.m_button22 = wx.Button( self, wx.ID_ANY, u"Cancel", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer42.Add( self.m_button22, 0, wx.ALL, 5 )
bSizer34.Add( bSizer42, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.SetSizer( bSizer34 )
self.Layout()
self.Centre( wx.BOTH )
# Connect Events
self.store_img_checkbox.Bind( wx.EVT_CHECKBOX, self.check_store_img )
self.store_all_checkbox.Bind( wx.EVT_CHECKBOX, self.check_store_all )
self.m_button20.Bind( wx.EVT_BUTTON, self.press_ok )
self.m_button22.Bind( wx.EVT_BUTTON, self.press_cancel )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def check_store_img( self, event ):
event.Skip()
def check_store_all( self, event ):
event.Skip()
def press_ok( self, event ):
event.Skip()
def press_cancel( self, event ):
event.Skip()
###########################################################################
## Class SelectFolderFrame
###########################################################################
class SelectFolderFrame ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 618,640 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
bSizer54 = wx.BoxSizer( wx.VERTICAL )
bSizer55 = wx.BoxSizer( wx.HORIZONTAL )
bSizer57 = wx.BoxSizer( wx.VERTICAL )
self.m_treeCtrl1 = wx.TreeCtrl( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TR_DEFAULT_STYLE )
bSizer57.Add( self.m_treeCtrl1, 1, wx.ALL|wx.EXPAND, 5 )
bSizer55.Add( bSizer57, 1, wx.EXPAND, 5 )
bSizer58 = wx.BoxSizer( wx.VERTICAL )
self.m_button27 = wx.Button( self, wx.ID_ANY, u"->", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer58.Add( self.m_button27, 0, wx.ALL, 5 )
self.m_button28 = wx.Button( self, wx.ID_ANY, u"<-", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer58.Add( self.m_button28, 0, wx.ALL, 5 )
bSizer55.Add( bSizer58, 0, wx.ALIGN_CENTER_VERTICAL, 5 )
bSizer59 = wx.BoxSizer( wx.VERTICAL )
m_listBox1Choices = []
self.m_listBox1 = wx.ListBox( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_listBox1Choices, 0 )
bSizer59.Add( self.m_listBox1, 1, wx.ALL|wx.EXPAND, 5 )
bSizer55.Add( bSizer59, 1, wx.EXPAND, 5 )
bSizer54.Add( bSizer55, 1, wx.EXPAND, 5 )
bSizer56 = wx.BoxSizer( wx.VERTICAL )
self.m_staticline9 = wx.StaticLine( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL )
bSizer56.Add( self.m_staticline9, 0, wx.EXPAND |wx.ALL, 5 )
bSizer60 = wx.BoxSizer( wx.HORIZONTAL )
self.m_button24 = wx.Button( self, wx.ID_ANY, u"OK", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer60.Add( self.m_button24, 0, wx.ALL|wx.ALIGN_RIGHT, 5 )
self.m_button29 = wx.Button( self, wx.ID_ANY, u"Cancel", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer60.Add( self.m_button29, 0, wx.ALL, 5 )
bSizer56.Add( bSizer60, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizer54.Add( bSizer56, 0, wx.EXPAND, 5 )
self.SetSizer( bSizer54 )
self.Layout()
self.Centre( wx.BOTH )
def __del__( self ):
pass
| 1.671875
| 2
|
app.py
|
kshitijsriv/vehicle_location_map_folium
| 0
|
12775292
|
import folium as folium
from flask import Flask, render_template
import rethinkdb as rtdb
import os
from dotenv import load_dotenv
import requests
import json
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello World!'
def get_route_polyline(route):
# "http://routesapi.chartr.in/transit/dimts/get_transit_route_details?route=534UP"
url = f"http://routesapi.chartr.in/transit/dimts/get_transit_route_details?route={route}UP"
response = requests.get(url)
response_dict = json.loads(response.text)
polyline = list()
stop_names = list()
if response_dict['msg'] == 'Found':
stop_list = response_dict['transit_route'][0]['stops']
for stop in stop_list:
polyline.append((float(stop['lat']), float(stop['lon'])))
stop_names.append(stop['name'])
return True, polyline, stop_names
else:
return False, polyline, stop_names
def plot_map(bus_number, coords, ac, route=None):
m = folium.Map(location=[28.630691, 77.217648], zoom_start=11)
if ac == "ac":
folium.Marker(coords, popup=bus_number, icon=folium.Icon(color='red')).add_to(m)
else:
folium.Marker(coords, popup=bus_number, icon=folium.Icon(color='red')).add_to(m)
if route is not None:
got_polyline, route_polyline, stop_names = get_route_polyline(route)
if got_polyline:
folium.PolyLine(route_polyline, color='black', weight=1.5, opacity=1).add_to(m)
for idx, stop_coord in enumerate(route_polyline):
stop_name = stop_names[idx]
folium.CircleMarker(location=stop_coord, radius=4, popup=stop_name,
fill_color='blue', color='red', fill_opacity=1).add_to(m)
return m._repr_html_()
# view/DL1PC0588/534DOWN
@app.route("/view/<bus_number>/<route>", methods=["GET"])
def view(bus_number, route):
# rethinkdb connection
env_path = '.env'
load_dotenv(env_path)
rDB_name = os.getenv("rDB_name")
realtime_table = os.getenv("realtime_table")
host = os.getenv("host")
port = os.getenv("port")
r = rtdb.RethinkDB()
rconn = r.connect(host=host, port=port)
bus_data = json.loads(r.db(rDB_name).table(realtime_table).get(bus_number).to_json().run(rconn))
if bus_data is not None:
ac = bus_data['ac']
coordinates = bus_data['lat'], bus_data['lng']
folium_map = plot_map(bus_number, coordinates, ac, route=route)
return render_template('views/view.html', map=folium_map, bus=bus_number)
else:
return render_template('views/not_found.html')
if __name__ == '__main__':
app.run()
| 2.71875
| 3
|
venv/lib/python3.6/site-packages/ansible_collections/hetzner/hcloud/plugins/modules/hcloud_server_network.py
|
usegalaxy-no/usegalaxy
| 1
|
12775293
|
<reponame>usegalaxy-no/usegalaxy
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Hetzner Cloud GmbH <<EMAIL>>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: hcloud_server_network
short_description: Manage the relationship between Hetzner Cloud Networks and servers
description:
- Create and delete the relationship Hetzner Cloud Networks and servers
author:
- <NAME> (@lkaemmerling)
options:
network:
description:
- The name of the Hetzner Cloud Networks.
type: str
required: true
server:
description:
- The name of the Hetzner Cloud server.
type: str
required: true
ip:
description:
- The IP the server should have.
type: str
alias_ips:
description:
- Alias IPs the server has.
type: list
elements: str
state:
description:
- State of the server_network.
default: present
choices: [ absent, present ]
type: str
requirements:
- hcloud-python >= 1.3.0
extends_documentation_fragment:
- hetzner.hcloud.hcloud
'''
EXAMPLES = """
- name: Create a basic server network
hcloud_server_network:
network: my-network
server: my-server
state: present
- name: Create a server network and specify the ip address
hcloud_server_network:
network: my-network
server: my-server
ip: 10.0.0.1
state: present
- name: Create a server network and add alias ips
hcloud_server_network:
network: my-network
server: my-server
ip: 10.0.0.1
alias_ips:
- 10.1.0.1
- 10.2.0.1
state: present
- name: Ensure the server network is absent (remove if needed)
hcloud_server_network:
network: my-network
server: my-server
state: absent
"""
RETURN = """
hcloud_server_network:
description: The relationship between a server and a network
returned: always
type: complex
contains:
network:
description: Name of the Network
type: str
returned: always
sample: my-network
server:
description: Name of the server
type: str
returned: always
sample: my-server
ip:
description: IP of the server within the Network ip range
type: str
returned: always
sample: 10.0.0.8
alias_ips:
description: Alias IPs of the server within the Network ip range
type: str
returned: always
sample: [10.1.0.1, ...]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible_collections.hetzner.hcloud.plugins.module_utils.hcloud import Hcloud
try:
from hcloud import APIException
except ImportError:
APIException = None
class AnsibleHcloudServerNetwork(Hcloud):
def __init__(self, module):
Hcloud.__init__(self, module, "hcloud_server_network")
self.hcloud_network = None
self.hcloud_server = None
self.hcloud_server_network = None
def _prepare_result(self):
return {
"network": to_native(self.hcloud_network.name),
"server": to_native(self.hcloud_server.name),
"ip": to_native(self.hcloud_server_network.ip),
"alias_ips": self.hcloud_server_network.alias_ips,
}
def _get_server_and_network(self):
try:
self.hcloud_network = self.client.networks.get_by_name(self.module.params.get("network"))
self.hcloud_server = self.client.servers.get_by_name(self.module.params.get("server"))
self.hcloud_server_network = None
except Exception as e:
self.module.fail_json(msg=e.message)
def _get_server_network(self):
for privateNet in self.hcloud_server.private_net:
if privateNet.network.id == self.hcloud_network.id:
self.hcloud_server_network = privateNet
def _create_server_network(self):
params = {
"network": self.hcloud_network
}
if self.module.params.get("ip") is not None:
params["ip"] = self.module.params.get("ip")
if self.module.params.get("alias_ips") is not None:
params["alias_ips"] = self.module.params.get("alias_ips")
if not self.module.check_mode:
try:
self.hcloud_server.attach_to_network(**params).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_server_and_network()
self._get_server_network()
def _update_server_network(self):
params = {
"network": self.hcloud_network
}
alias_ips = self.module.params.get("alias_ips")
if alias_ips is not None and self.hcloud_server_network.alias_ips.sort() != alias_ips.sort():
params["alias_ips"] = alias_ips
if not self.module.check_mode:
try:
self.hcloud_server.change_alias_ips(**params).wait_until_finished()
except APIException as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_server_and_network()
self._get_server_network()
def present_server_network(self):
self._get_server_and_network()
self._get_server_network()
if self.hcloud_server_network is None:
self._create_server_network()
else:
self._update_server_network()
def delete_server_network(self):
self._get_server_and_network()
self._get_server_network()
if self.hcloud_server_network is not None and self.hcloud_server is not None:
if not self.module.check_mode:
try:
self.hcloud_server.detach_from_network(self.hcloud_server_network.network).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_server_network = None
@staticmethod
def define_module():
return AnsibleModule(
argument_spec=dict(
network={"type": "str", "required": True},
server={"type": "str", "required": True},
ip={"type": "str"},
alias_ips={"type": "list", "elements": "str"},
state={
"choices": ["absent", "present"],
"default": "present",
},
**Hcloud.base_module_arguments()
),
supports_check_mode=True,
)
def main():
module = AnsibleHcloudServerNetwork.define_module()
hcloud = AnsibleHcloudServerNetwork(module)
state = module.params["state"]
if state == "absent":
hcloud.delete_server_network()
elif state == "present":
hcloud.present_server_network()
module.exit_json(**hcloud.get_result())
if __name__ == "__main__":
main()
| 2.3125
| 2
|
Knight's Tour Puzzle/game.py
|
oxxio/hyperskill-projects-python
| 0
|
12775294
|
def input_dimension():
while True:
dimension = input("Enter your board dimensions: ").split()
len_x1 = 0
len_y1 = 0
if len(dimension) != 2:
print("Invalid dimensions!")
continue
try:
len_x1 = int(dimension[0])
len_y1 = int(dimension[1])
except ValueError:
print("Invalid dimensions!")
continue
if len_x1 <= 0 or len_y1 <= 0:
print("Invalid dimensions!")
else:
break
return len_x1, len_y1
def input_starting():
while True:
position = input("Enter the knight's starting position: ").split()
x1, y1 = 0, 0
if len(position) != 2:
print("Invalid dimensions!")
continue
try:
x1 = int(position[0])
y1 = int(position[1])
except ValueError:
print("Invalid dimensions!")
continue
if not 1 <= x1 <= len_x or not 1 <= y1 <= len_y:
print("Invalid dimensions!")
else:
break
return x1, y1
def create_board():
for _i in range(len_x):
current_row = []
for _j in range(len_y):
current_row.append("_")
board.append(current_row)
def print_board(board1):
max_len = len(str(len_x * len_y))
print(" " + "-" * (len_x * (max_len + 1) + 3))
for i in range(len_y, 0, -1):
s = ""
for j in range(1, len_x + 1):
if board1[j - 1][i - 1] != '_':
s += " " + " " * (max_len - len(board1[j - 1][i - 1])) + board1[j - 1][i - 1]
elif count(board1, j, i, 'X') != 0:
next_count = str(count(board1, j, i, '_'))
s += " " + " " * (max_len - len(next_count)) + next_count
else:
s += " " + "_" * max_len
print(f"{i}|{s} |")
print(" " + "-" * (len_x * (max_len + 1) + 3))
s = ''
for i in range(len_x):
s += " " * max_len + str(i + 1)
print(" " + s + " ")
print()
def count(board1, x1, y1, symbol):
value = 0
if x1 + 1 <= len_x and y1 + 2 <= len_y and board1[x1][y1 + 1] == symbol:
value += 1
if x1 + 1 <= len_x and y1 - 2 > 0 and board1[x1][y1 - 3] == symbol:
value += 1
if x1 - 1 > 0 and y1 + 2 <= len_y and board1[x1 - 2][y1 + 1] == symbol:
value += 1
if x1 - 1 > 0 and y1 - 2 > 0 and board1[x1 - 2][y1 - 3] == symbol:
value += 1
if x1 + 2 <= len_x and y1 + 1 <= len_y and board1[x1 + 1][y1] == symbol:
value += 1
if x1 + 2 <= len_x and y1 - 1 > 0 and board1[x1 + 1][y1 - 2] == symbol:
value += 1
if x1 - 2 > 0 and y1 + 1 <= len_y and board1[x1 - 3][y1] == symbol:
value += 1
if x1 - 2 > 0 and y1 - 1 > 0 and board1[x1 - 3][y1 - 2] == symbol:
value += 1
return value
def move(board1, new_x1, new_y1):
board2 = []
for i in range(len_x):
current_row = []
for j in range(len_y):
if board1[i][j] == 'X':
current_row.append('*')
else:
current_row.append(board1[i][j])
board2.append(current_row)
board2[new_x1 - 1][new_y1 - 1] = "X"
return board2
def next_step(board1, new_x1, new_y1, index):
board2 = []
for i in range(len_x):
current_row = []
for j in range(len_y):
current_row.append(board1[i][j])
board2.append(current_row)
board2[new_x1 - 1][new_y1 - 1] = str(index)
return board2
def check_solution(board1):
total = 0
for i in range(len_x):
for j in range(len_y):
if board1[i][j] == '_' and count(board1, i + 1, j + 1, 'X') != 0:
board2 = move(board1, i + 1, j + 1)
if check_solution(board2):
return True
elif board1[i][j] in '*X':
total += 1
return total == len_x * len_y
def play_game(board1):
print_board(board1)
invalid = False
count_squares = 1
while True:
movie = input("Invalid move! Enter your next move: " if invalid else 'Enter your next move: ').split()
new_x = int(movie[0])
new_y = int(movie[1])
if board1[new_x - 1][new_y - 1] != '_' or count(board1, new_x, new_y, 'X') == 0:
invalid = True
else:
invalid = False
board1 = move(board1, new_x, new_y)
count_squares += 1
if count(board1, new_x, new_y, '_') == 0:
if len_x * len_y == count_squares:
print('What a great tour! Congratulations!')
else:
print('No more possible moves!')
print(f'Your knight visited {count_squares} squares!')
break
print_board(board1)
def print_solution(board1):
board2 = fill_board(board1, 1)
print_board(board2)
def fill_board(board1, index):
for i in range(len_x):
for j in range(len_y):
if board1[i][j] == '_' and count(board1, i + 1, j + 1, str(index)) != 0:
board2 = next_step(board1, i + 1, j + 1, index + 1)
if index + 1 == len_x * len_y:
return board2
board3 = fill_board(board2, index + 1)
if board3 is not None:
return board3
return None
board = []
len_x, len_y = input_dimension()
create_board()
x, y = input_starting()
board[x - 1][y - 1] = "X"
while True:
try_puzzle = input('Do you want to try the puzzle? (y/n): ')
if try_puzzle == 'y':
if not check_solution(list(board)):
print('No solution exists!')
exit()
play_game(board)
break
elif try_puzzle == 'n':
if not check_solution(list(board)):
print('No solution exists!')
exit()
board[x - 1][y - 1] = "1"
print("Here's the solution!")
print_solution(board)
break
else:
print('Invalid dimensions!')
| 4.03125
| 4
|
keyserv/uuidgenerator.py
|
Kunin-AI/mini-key-server
| 0
|
12775295
|
from baseconv import BaseConverter, BASE16_ALPHABET
from uuid import UUID, uuid4
BASE = 16
HEX_DOUBLE_WORD_LENGTH = 8
HEX_DOUBLE_WORD_UPPER_BYTE = slice(-HEX_DOUBLE_WORD_LENGTH, -(HEX_DOUBLE_WORD_LENGTH - 2))
MAX_DOUBLE_WORD = (1 << 31)
OLD_BIT_FLAG = 0x80
NEW_BIT_FLAG_MASK = OLD_BIT_FLAG - 1
BASE16 = BaseConverter(BASE16_ALPHABET.lower())
class UUIDGenerator():
uuid = None
def __init__(self):
while True:
uuid = uuid4()
replacer = BASE16.encode(int(BASE16.decode(str(uuid)[HEX_DOUBLE_WORD_UPPER_BYTE])) & NEW_BIT_FLAG_MASK)
if int(replacer, BASE) >= 16:
break
self.uuid = UUID(str(uuid)[:-(HEX_DOUBLE_WORD_LENGTH)] + replacer + str(uuid)[-(HEX_DOUBLE_WORD_LENGTH-2):])
@staticmethod
def new_version(uuid):
return not bool(int(BASE16.decode(str(uuid)[HEX_DOUBLE_WORD_UPPER_BYTE])) & OLD_BIT_FLAG)
@staticmethod
def int_to_uuid(int_id):
int_id = int(int_id)
myid = str(uuid4())
replacer1 = BASE16.encode(MAX_DOUBLE_WORD - int_id)
replacer2 = BASE16.encode(int(BASE16.decode(myid[HEX_DOUBLE_WORD_UPPER_BYTE])) | OLD_BIT_FLAG)
return UUID(replacer1 + myid[HEX_DOUBLE_WORD_LENGTH:-(HEX_DOUBLE_WORD_LENGTH)] +
replacer2 + myid[-(HEX_DOUBLE_WORD_LENGTH-2):])
@staticmethod
def uuid_to_int(uuid):
inverse_id = int(BASE16.decode(uuid[:HEX_DOUBLE_WORD_LENGTH]))
return (MAX_DOUBLE_WORD - inverse_id)
@staticmethod
def str_to_uuid(str_uuid):
try:
return UUID(hex=str_uuid)
except:
raise ValueError("UUID (%s provided is NOT a proper uuid" % str_uuid)
@staticmethod
def format_uuid_hex(uuid_str):
if len(uuid_str) != 32:
raise ValueError("UUID (%s provided is NOT a proper uuid" % uuid_str)
return uuid_str[:7]+'-'+uuid_str[7:11]+'-'+uuid_str[11:15]+'-'+uuid_str[15:19]+'-'+uuid_str[19:]
| 2.8125
| 3
|
leetcode/python/41.first-missing-positive.py
|
phiysng/leetcode
| 3
|
12775296
|
from typing import List
class Solution:
def firstMissingPositive(self, nums: List[int]) -> int:
for i in range(len(nums)):
while nums[i] > 0 and nums[i] <= len(nums) and nums[nums[i] - 1] != nums[i]:
# nums[i] , nums[nums[i] - 1] = nums[nums[i] - 1] , nums[i]
"""
TODO: python的swap语法糖不能这样用 swap的idx依赖list的值,而list的值在交换的过程中是不确定的
see https://stackoverflow.com/questions/14836228/is-there-a-standardized-method-to-swap-two-variables-in-python/14836456#14836456
"""
self.swap(nums, i, nums[i] - 1)
for i in range(len(nums)):
if nums[i] != i + 1:
return i + 1
return len(nums) + 1
def swap(self, nums: List[int], l: int, r: int) -> None:
nums[l], nums[r] = nums[r], nums[l]
| 3.78125
| 4
|
python_scripts/LightUpTheNight.py
|
BenoitCarlier/OpenVoices
| 3
|
12775297
|
import board
import neopixel
import time
from time import sleep
pixel_pin = board.D18
num_pixels = 8
ORDER = neopixel.RGB
ColorDict = { "black":0x000000, "white":0x101010, "red":0x100000, "blue":0x000010, "green":0x001000, "yellow":0x101000, "orange":0x100600, "pink":0x100508, "teal":0x100508, "teal":0x000808, "purple":0x080008}
#Example of colorString : "Fear:blue,Surprise:yellow"
def SetColors(colorString):
EmoDict = {}
colorsEmo = colorString.split(",")
for k in range(len(colorsEmo)):
oneColorEmo = colorsEmo[k].split(":")
#print(oneColorEmo, ColorDict(oneColorEmo[1]))
EmoDict[oneColorEmo[0]] = ColorDict[oneColorEmo[1]]
return EmoDict
def LightAll(color):
pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=1, auto_write=False, pixel_order = ORDER)
pixels.fill(ColorDict[color])
def LightLast(emotion, EmoDict):
pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=1, auto_write=False, pixel_order = ORDER)
if (len(pixels)>1):
for k in range(1, len(pixels)):
pixels[k] = pixels[k-1]
pixels[0] = EmoDict[emotion]
print(pixels)
pixels.show()
def play():
EmoDict = SetColors("neutral:white,surprise:yellow,happiness:orange,fear:blue,disgust:green,sad:purple")
LightAll("black")
while(True):
LightLast("happiness",EmoDict)
time.sleep(0.8)
LightLast("surprise",EmoDict)
time.sleep(0.8)
LightLast("fear",EmoDict)
time.sleep(0.8)
LightLast("neutral",EmoDict)
time.sleep(0.8)
LightLast("sad",EmoDict)
time.sleep(0.8)
return 0
play()
| 3.203125
| 3
|
websites/pic/db.py
|
hmumixaM/anything
| 0
|
12775298
|
<reponame>hmumixaM/anything<filename>websites/pic/db.py
import pymongo, re, random
uri = "mongodb+srv://hello:qweasdZxc1@jandan-l7bmq.gcp.mongodb.net/code?retryWrites=true&w=majority"
# client = pymongo.MongoClient(host='127.0.0.1', port=27017)
client = pymongo.MongoClient(uri)
# ooxx = client.jandan.comments
ooxx = client.code.comments
result = ooxx.find({'type':'ooxx'}).sort('oo', pymongo.DESCENDING)
def db():
num = random.randint(0, 500)
pattern = re.compile(r'http.*\.\w+')
gif = result[num]['content']
url = pattern.search(gif)[0]
return url
| 2.59375
| 3
|
lasp_reu_python_tutorial_day1.py
|
michaelaye/LASP-REU-Python
| 1
|
12775299
|
# coding: utf-8
# # Using Python to investigate data
# 
# 
# # MANY important non-standard packages
# 
# 
# ## Which Python distribution?
# * Currently best supported is Anaconda by Continuum.io
# * Works on the major three operating systems
# * Comes with the most important science packages pre-installed.
# ## Getting started
# The following launches the Python interpreter in `interactive` mode:
# ```bash
# $ python
# Python 3.5.1 |Continuum Analytics, Inc.| (default, Dec 7 2015, 11:24:55)
# [GCC 4.2.1 (Apple Inc. build 5577)] on darwin
# Type "help", "copyright", "credits" or "license" for more information.
# >>> print('hello world!')
# hello world!
# ```
# ## To leave Python:
# ```bash
# >>> <Ctrl-D>
# $
# ```
# ## Run an existing script file with Python code
# This is running Python code `non-interactive`:
# ```bash
# $ python script_name.py
# [...output...]
# ```
# * Python itself can be run interactively, but not many features.
# * -> <NAME>, then at CU Boulder, invents IPython, a more powerful interactive environment for Python.
# ## Launching IPython
# Launching works the same way:
# ```bash
# $ ipython
# Python 3.5.1 |Continuum Analytics, Inc.| (default, Dec 7 2015, 11:24:55)
# Type "copyright", "credits" or "license" for more information.
#
# IPython 4.2.0 -- An enhanced Interactive Python.
# ? -> Introduction and overview of IPython's features.
# %quickref -> Quick reference.
# help -> Python's own help system.
# object? -> Details about 'object', use 'object??' for extra details.
# Automatic calling is: Smart
#
# In [1]:
# ```
# # Most important IPython features
# * Tab completion for Python modules
# * Tab completion for object's attributes ("introspection")
# * automatic reload possible of things you are working on.
# # Latest technology jump: IPython notebook (Now Jupyter)
#
# * Cell-based interactivity
# * Combining codes with output including plot display **AND** documentation (like this!)
# * Very successful. Received twice Sloan foundation funding (here to stay!)
# * Became recently language agnostic: JU_lia, PYT_hon, R (and many many more)
# # My recommendation
# * Work with IPython for quick things that don't need plots or on slow remote connection
# * Work with Jupyter notebook for interactive data analysis and to develop working code blocks
# * Put working code blocks together into script files for science production and run "non-interactive"
# # Launching Jupyter notebook
# ```bash
# $ jupyter notebook
# [I 16:27:34.880 NotebookApp] Serving notebooks from local directory: /Users/klay6683/src/RISE
# [I 16:27:34.880 NotebookApp] 0 active kernels
# [I 16:27:34.880 NotebookApp] The Jupyter Notebook is running at: http://localhost:8889/
# [I 16:27:34.881 NotebookApp] Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).
# ```
# * Will launch what is called a "notebook server" and open web browser with dash-board
# * This server pipes web browser cells to the underlying Python interpreter.
# 
# # And here we are!
# ## Now let's do some Python!
# First, let's look at variables
# In[ ]:
a = 5
s = 'mystring' # both single and double quotes are okay.
# new comment
# Python is `dynamically typed`, meaning I can change the type of any variable at any time:
# In[ ]:
a = 'astring'
print(a) # not the last line, so if I want to see it, I need to use the print() function.
s = 23.1
s # note that if the last line of any Jupyter cell contains a printable object, it will be printed.
# Python is written in C, therefore uses many of the same technicalities.
#
# For examples, one equal sign `=` is used for value assignment, while two `==` are used for equality checking:
# In[ ]:
a = 5
a
# In[ ]:
a == 5
# Storing more than one item is done in `lists`.
#
# A list is only one of Python's `containers`, and it is very flexible, it can store any Python object.
# In[ ]:
mylist = [1, 3.4, 'hello', 3, 6]
mylist
# Each item can be accessed by an 0-based index (like in C):
# In[ ]:
print(mylist[0])
print(mylist[2])
len(mylist)
# One can get slices of lists by providing 2 indices, with the right limit being exclusive, not inclusive
# In[ ]:
i = 2
print(mylist[:i])
print(mylist[i:])
# ### Sensible multiplication
# Most Python objects can be multiplied, in the most logical sense depending on its type:
# In[ ]:
a = 5
s = 'mystring'
mylist = [1,2]
# In[ ]:
print(5*a)
print(2*s)
print(3*mylist)
# ### Conditional branching: if statement
# In[ ]:
temp = 80
if temp > 110:
print("It's too hot.")
elif temp > 95 and temp < 110: # test conditions are combined with `and`
print("It's okay.")
else:
print("Could be warmer.")
# See how I use double quotes here to avoid ambiguity with single quote in the string.
# ### Functions
#
# Functions are called with `()` to contain the arguments for a function.
#
# We already used one: `print()`
#
# Learn about function's abilitys using IPython's help system. It is accessed by adding a question mark to any function name. In Jupyter notebooks, a sub-window will open. When done reading, close it by pressing `q` or clicking the x icon:
# In[ ]:
get_ipython().magic('pinfo print')
# #### Making your own function
#
# This is very easy, with using the keyword `def` for "define":
# In[ ]:
def myfunc(something): # note how I don't care about the type here!
"""Print the length of `something` and print itself."""
print("Length:", len(something)) # all `something` needs to support is to have a length
print("You gave:", something)
# In[ ]:
myfunc('mystring')
# In[ ]:
myfunc(['a', 1, 2])
# In[ ]:
myfunc(5)
# The principle of not defining a required type for a function, but require an `ability` is very important in Python and is called `duck typing`:
#
# > "In other words, don't check whether it IS-a duck: check whether it QUACKS-like-a duck, WALKS-like-a duck, etc, etc, depending on exactly what subset of duck-like behaviour you need to play your language-games with."
# ## Loops
#
# Loops are the kings of programming, because they are the main reason why we do programming:
#
# > Execute tasks repeatedly, stop when conditions are fulfilled.
#
# These conditions could be simply that data are exhausted, or a mathematical condition.
#
# 2 main loops exist: The more basic `while` loop, and the more advanced `for` loop.
#
# ### while loops
#
# `while` loops run until their conditional changes from True to False.
#
# The loop is only entered when the condition is True.
#
# Note how in Python sub blocks of code are defined simply by indentation and the previous line
# ending with a colon `:`.
# In[ ]:
i = 0
while i < 3: # this is the condition that is being checked.
print(i, end=' ')
i = i + 1 # very common statement, read it from right to left!
print('Done')
# In[ ]:
i < 3
# In[ ]:
i
# `while` loops are the most low level loops, they always can be made to work, as you design the interruption criteria yourself.
#
# ### For loops
#
# `for` loops are designed to loop over containers, like lists.
#
# They know how to get each element and know when to stop:
# In[ ]:
mylist = [5, 'hello', 23.1]
for item in mylist:
print(item)
# The "`in`" keyword is a powerful and nicely readable concept in Python.
#
# In most cases, one can check ownership of an element in a container with it:
# In[ ]:
5 in mylist
# ### The `range()` function
#
# `range()` is very useful for creating lists for you that you can loop over and work with.
#
# It has two different call signatures:
#
# * range(n) will create a list from 0 to n-1, with an increment of +1.
# * range(n1, n2, [step]) creates a list from n1 to n2-1, with again a default increment of 1
#
# Negative increments are also okay:
# In[ ]:
for i in range(10):
print(i, end=' ')
# In[ ]:
for i in range(2, 5):
print(i, end=' ')
# In[ ]:
for i in range(0, -5, -1):
print(i, end=' ')
# In[ ]:
get_ipython().magic('pinfo range')
# **IMPORTANT**
#
# Note, that for memory efficiency, `range()` is not automatically creating the full list, but returns an object called a `generator`.
#
# This is basically an abstract object that knows **HOW TO** create the requested list, but didn't do it yet.
# In[ ]:
print(range(10))
# It takes either a loop (as above) or a conversion to a list to see the actual content:
# In[ ]:
print(list(range(10)))
# ### Combine `if` and `for`
#
# Let's combine `for` and `if` to write a mini program.
# The task is to scan a container of 1's and 0's and count how many 1's are there.
# In[ ]:
mylist = [0,1,1,0,0,1,0,1,0,0,1]
mylist
# In[ ]:
one_counter = 0
for value in mylist: # note the colon!
if value == 1: # note the indent for each `block` !
one_counter += 1 # this is the short version of a = a + 1
print("Found", one_counter, "ones.")
# ### Writing and reading files
#
# Need to get data in and out.
# In principle, I recommend to use high level readers from science packages.
#
# But you will need to know the principles of file opening nonetheless.
# In[ ]:
afile = open('testfile', 'w')
# In[ ]:
get_ipython().magic('pinfo afile.name')
# In[ ]:
afile.write('some text \n') # \n is the symbol to create a new line
afile.write('write some more \n')
# The `write` function of the `afile` object returns the length of what just was written.
#
# When done, the file needs to be closed!
#
# Otherwise, the content could not end up in the file, because it was `cached`.
# In[ ]:
afile.close()
# I can call operating system commands with a leading exclamation mark:
# In[ ]:
get_ipython().system('cat testfile')
# #### reading the file
#
# One can also use a so called `context manager` and indented code block to indicate to Python when to automatically close the file:
# In[ ]:
with open('testfile', 'r') as afile:
print(afile.readlines())
# In[ ]:
with open('testfile', 'r') as afile:
print(afile.read())
# ### Tutorial practice
#
# Now you will practice a bit of Python yourself.
#
# I recommend to join forces in groups of two.
#
# This working technique is called "pair programming":
# One person is the "driver", typing in code, while the other person is the "navigator", reviewing everything that is being typed in.
# The roles should be frequently changed (or not, depending on preferences).
#
# This way you can discuss anything that's being worked on.
#
# Next session we will learn how to import all these powerful analysis packages into a Python session, learn about the most import science packages and play with some real data.
| 2.40625
| 2
|
google/appengine/tools/devappserver2/devappserver2_test.py
|
micahstubbs/google_appengine
| 0
|
12775300
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for google.apphosting.tools.devappserver2.devappserver2."""
import argparse
import os
import platform
import unittest
import google
import mock
from google.appengine.tools.devappserver2 import devappserver2
class WinError(Exception):
pass
class FakeApplicationConfiguration(object):
def __init__(self, modules):
self.modules = modules
class FakeModuleConfiguration(object):
def __init__(self, module_name):
self.module_name = module_name
class CreateModuleToSettingTest(unittest.TestCase):
def setUp(self):
self.application_configuration = FakeApplicationConfiguration([
FakeModuleConfiguration('m1'), FakeModuleConfiguration('m2'),
FakeModuleConfiguration('m3')])
def test_none(self):
self.assertEquals(
{},
devappserver2.DevelopmentServer._create_module_to_setting(
None, self.application_configuration, '--option'))
def test_dict(self):
self.assertEquals(
{'m1': 3, 'm3': 1},
devappserver2.DevelopmentServer._create_module_to_setting(
{'m1': 3, 'm3': 1}, self.application_configuration, '--option'))
def test_single_value(self):
self.assertEquals(
{'m1': True, 'm2': True, 'm3': True},
devappserver2.DevelopmentServer._create_module_to_setting(
True, self.application_configuration, '--option'))
def test_dict_with_unknown_modules(self):
self.assertEquals(
{'m1': 3.5},
devappserver2.DevelopmentServer._create_module_to_setting(
{'m1': 3.5, 'm4': 2.7}, self.application_configuration, '--option'))
class DatastoreEmulatorSupportcheckTest(unittest.TestCase):
@mock.patch.object(os.path, 'exists', return_value=False)
@mock.patch.object(devappserver2.DevelopmentServer,
'_correct_datastore_emulator_cmd', return_value=None)
def test_fail_missing_emulator(self, mock_correction, unused_mock):
options = argparse.Namespace()
# Following flags simulate the scenario of invoking dev_appserver.py from
# google-cloud-sdk/platform/google_appengine
options.support_datastore_emulator = True
options.datastore_emulator_cmd = None
with self.assertRaises(devappserver2.MissingDatastoreEmulatorError) as ctx:
dev_server = devappserver2.DevelopmentServer()
dev_server._options = options
dev_server._check_datastore_emulator_support()
mock_correction.assert_called_once_with()
self.assertIn('Cannot find Cloud Datastore Emulator', ctx.exception.message)
class PlatformSupportCheckTest(unittest.TestCase):
def test_succeed_non_python3_windows(self):
with mock.patch.object(platform, 'system', return_value='Windows'):
devappserver2.DevelopmentServer._check_platform_support({'python2'})
platform.system.assert_not_called()
def test_succeed_python3_non_windows(self):
with mock.patch.object(platform, 'system', return_value='Linux'):
devappserver2.DevelopmentServer._check_platform_support({'python3'})
platform.system.assert_called_once_with()
def test_fail_python3_windows(self):
with mock.patch.object(platform, 'system', return_value='Windows'):
with self.assertRaises(OSError):
devappserver2.DevelopmentServer._check_platform_support(
{'python3', 'python2'})
platform.system.assert_called_once_with()
if __name__ == '__main__':
unittest.main()
| 2.390625
| 2
|
ndocker/ovs/__init__.py
|
codlin/ndocker
| 0
|
12775301
|
from __future__ import absolute_import
from .vsctl import VSCtl
from .vsctl import VSCtlCmdExecError
from .vsctl import VSCtlCmdParseError
| 1.09375
| 1
|
project/bbmdweb/urls.py
|
kanshao/bbmd_web_ehp
| 0
|
12775302
|
<reponame>kanshao/bbmd_web_ehp
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from . import api, views
router = DefaultRouter()
router.register(
r'run',
api.RunViewset, base_name="run")
router.register(
r'run/(?P<run_id>\d+)/models',
api.ModelSettingsViewset,
base_name="models")
router.register(
r'run/(?P<run_id>\d+)/bmds',
api.BMDAnalysisViewset,
base_name="bmds")
urlpatterns = [
# api
url(r'^api/', include(router.urls, namespace="api")),
# CRUD views
url(r'^create/$',
views.RunCreate.as_view(),
name="run_create"),
url(r'^run/(?P<uuid>[^/]+)/$',
views.RunDetail.as_view(),
name="run_detail"),
url(r'^run/(?P<uuid>[^/]+)/execute/$',
views.RunExecute.as_view(),
name="run_execute"),
url(r'^run/(?P<uuid>[^/]+)/update/$',
views.RunUpdate.as_view(),
name="run_update"),
url(r'^run/(?P<uuid>[^/]+)/delete/$',
views.RunDelete.as_view(),
name="run_delete"),
]
| 1.976563
| 2
|
users/urls.py
|
mdribera/noteworthy
| 1
|
12775303
|
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from . import views
app_name = 'users'
urlpatterns = [
# ex: /users/signup
url(r'^signup/', views.SignupView.as_view(), name='signup'),
# ex: /users/login
url(r'^login/', auth_views.login, name='login'),
# ex: /users/logout
url(r'^logout/', auth_views.logout, name='logout'),
# ex: /users/profile
url(r'^profile/', views.ProfileView.as_view(), name='profile'),
]
| 1.804688
| 2
|
queries.py
|
mhmtsker/SamplePortfolio
| 0
|
12775304
|
query = """select *
from prices
where date > '12.12.2012'"""
| 1.789063
| 2
|
tests/test_parser.py
|
kittinan/twitter_media_downloader
| 161
|
12775305
|
# coding: utf-8
"""
Unit tests for the parser module.
"""
from ..src.parser import parse_tweet
# pylint: disable=old-style-class,too-few-public-methods
class Struct:
"""Basic class to convert a struct to a dict."""
def __init__(self, **entries):
self.__dict__.update(entries)
USER = Struct(**{
'id_str': '456789',
'name': 'Super user',
'screen_name': 'superuser123',
})
TWEET = Struct(**{
'id_str': '123456',
'created_at': '2019-06-24 20:19:35',
'full_text': 'Hello world!',
'entities': {
'urls': [
{'expanded_url': 'https://instagram.com/test'},
{'expanded_url': 'https://www.google.com'},
{'expanded_url': 'https://periscope.tv/test'}
]
},
'user': USER,
'extended_entities': {
'media': [
{
'video_info': {
'variants': [
{
'bitrate': 123,
'url': 'video_123'
},
{
'bitrate': 789,
'url': 'video_789'
}
]
}
},
{
'media_url_https': 'video_789/video_thumb',
'sizes': ['thumb', 'large']
},
{
'media_url_https': 'my_image',
'sizes': ['thumb', 'large']
},
{
'media_url_https': 'other_image',
'sizes': ['thumb', 'medium']
}
]
}
})
TEXT_TWEET = Struct(**{
'id_str': '123456',
'created_at': '2019-06-24 20:19:35',
'user': USER,
'full_text': 'Hello world!'
})
RETWEET = Struct(**{
'id_str': '789',
'created_at': '2019-06-22 12:12:12',
'user': USER,
'retweeted_status': TWEET
})
def test_tweet():
"""Ensure that tweets with images and video are properly parsed."""
results = {
'tweets': 0,
'retweets': 0,
'media': []
}
parse_tweet(TWEET, True, 'large', results)
assert results['tweets'] == 1
assert results['retweets'] == 0
assert len(results['media']) == 1
assert results['media'][0]['tweet_id'] == '123456'
assert results['media'][0]['original_tweet_id'] == '123456'
assert results['media'][0]['text'] == ''
assert results['media'][0]['videos'] == ['video_789']
assert results['media'][0]['images'] == ['my_image:large', 'other_image']
assert results['media'][0]['urls']['periscope'] == ['https://periscope.tv/test']
assert results['media'][0]['urls']['instagram'] == ['https://instagram.com/test']
assert results['media'][0]['urls']['others'] == ['https://www.google.com']
def test_text_tweet():
"""Ensure that text tweets are properly parsed."""
results = {
'tweets': 0,
'retweets': 0,
'media': []
}
parse_tweet(TEXT_TWEET, True, 'large', results)
assert results['tweets'] == 1
assert results['retweets'] == 0
assert len(results['media']) == 1
assert results['media'][0]['tweet_id'] == '123456'
assert results['media'][0]['original_tweet_id'] == '123456'
assert results['media'][0]['text'] == 'Hello world!'
def test_retweet():
"""Ensure that retweets are properly parsed when enabled."""
results = {
'tweets': 0,
'retweets': 0,
'media': []
}
parse_tweet(RETWEET, True, 'large', results)
assert results['tweets'] == 0
assert results['retweets'] == 1
assert len(results['media']) == 1
assert results['media'][0]['tweet_id'] == '789'
assert results['media'][0]['original_tweet_id'] == '123456'
def test_retweet_disabled():
"""Ensure that retweets are not treated as such when they are disabled."""
results = {
'tweets': 0,
'retweets': 0,
'media': []
}
parse_tweet(RETWEET, False, 'large', results)
assert results['tweets'] == 1
assert results['retweets'] == 0
assert len(results['media']) == 1
assert results['media'][0]['tweet_id'] == '789'
assert results['media'][0]['original_tweet_id'] == '789'
| 2.859375
| 3
|
container/private/versions.bzl
|
alexeagle/rules_container
| 0
|
12775306
|
"""Mirror of release info
TODO: generate this file from GitHub API"""
# The integrity hashes can be computed with
# shasum -b -a 384 [downloaded file] | awk '{ print $1 }' | xxd -r -p | base64
TOOL_VERSIONS = {
"7.0.1-rc1": {
"darwin_arm64": "sha384-PMTl7GMV01JnwQ0yoURCuEVq+xUUlhayLzBFzqId8ebIBQ8g8aWnbiRX0e4xwdY1",
},
}
# shasum -b -a 384 /Users/thesayyn/Downloads/go-containerregistry_Darwin_arm64.tar.gz | awk '{ print $1 }' | xxd -r -p | base64
| 1.328125
| 1
|
2015/18/solve.py
|
lamperi/aoc
| 0
|
12775307
|
with open("input.txt") as file:
data = file.read()
m = [-1, 0, 1]
def neight(grid, x, y):
for a in m:
for b in m:
if a == b == 0:
continue
xx = x+a
yy = y+b
if 0 <= xx < len(grid) and 0 <= yy < len(grid[xx]):
yield grid[xx][yy]
def neight_on(grid, x, y):
return sum(1 for c in neight(grid, x, y) if c == '#')
def update_corners(grid):
new_grid = []
for x in range(len(grid)):
l = []
for y in range(len(grid[x])):
if x in (0, len(grid)-1) and y in (0, len(grid[x])-1):
l.append('#')
else: # pass
l.append(grid[x][y])
new_grid.append("".join(l))
return new_grid
def update(grid):
new_grid = []
for x in range(len(grid)):
l = []
for y in range(len(grid[x])):
on_count = neight_on(grid,x,y)
if x in (0, len(grid)-1) and y in (0, len(grid[x])-1):
l.append('#')
elif grid[x][y] == '#': # on
l.append('#' if on_count in (2,3) else '.')
else: # pass
l.append('#' if on_count == 3 else '.')
new_grid.append("".join(l))
return new_grid
# TEST
grid = """.#.#.#
...##.
#....#
..#...
#.#..#
####..""".splitlines()
for i in range(4):
grid = update(grid)
print("\n".join(grid) + "\n")
# PART 1
grid = data.splitlines()
for i in range(100):
grid = update(grid)
s = sum(1 for row in grid for c in row if c == '#')
print(s)
# TEST 2
grid = """.#.#.#
...##.
#....#
..#...
#.#..#
####..""".splitlines()
grid = update_corners(grid)
for i in range(5):
grid = update_corners(update(grid))
print("\n".join(grid) + "\n")
# PART 1
grid = data.splitlines()
grid = update_corners(grid)
for i in range(100):
grid = update_corners(update(grid))
s = sum(1 for row in grid for c in row if c == '#')
print(s)
| 3.03125
| 3
|
webpages/admin.py
|
18praneeth/udayagiri-scl-maxo
| 8
|
12775308
|
<gh_stars>1-10
from django.contrib import admin
from .models import Contact
@admin.register(Contact)
class ContactAdmin(admin.ModelAdmin):
pass
| 1.28125
| 1
|
mobile_version_app/migrations/0001_initial.py
|
SakyaSumedh/mobile_version_app
| 5
|
12775309
|
<reponame>SakyaSumedh/mobile_version_app
# Generated by Django 2.1.7 on 2019-03-01 08:29
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MobileVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('device_type', models.CharField(choices=[('1', 'IOS'), ('2', 'ANDROID')], max_length=1)),
('app_version', models.CharField(max_length=10)),
('app_link', models.TextField()),
('optional_update', models.BooleanField(default=True)),
],
),
]
| 1.664063
| 2
|
examples/tests/embed/embed_division.py
|
chebee7i/ipython
| 2
|
12775310
|
<filename>examples/tests/embed/embed_division.py
"""This tests that future compiler flags are passed to the embedded IPython."""
from __future__ import division
from IPython import embed
embed(banner1='', header='check 1/2 == 0.5 in Python 2')
embed(banner1='', header='check 1/2 = 0 in Python 2', compile_flags=0)
| 1.65625
| 2
|
optimus/engines/pandas/functions.py
|
ironmussa/Optimus
| 1,045
|
12775311
|
<reponame>ironmussa/Optimus
import numpy as np
import pandas as pd
from optimus.engines.base.pandas.functions import PandasBaseFunctions
from optimus.engines.base.dataframe.functions import DataFrameBaseFunctions
class PandasFunctions(PandasBaseFunctions, DataFrameBaseFunctions):
_engine = pd
@staticmethod
def dask_to_compatible(dfd):
from optimus.helpers.converter import dask_dataframe_to_pandas
return dask_dataframe_to_pandas(dfd)
@staticmethod
def df_concat(df_list):
return pd.concat(df_list, axis=0, ignore_index=True)
@staticmethod
def new_df(*args, **kwargs):
return pd.DataFrame(*args, **kwargs)
def count_zeros(self, series, *args):
return int((self.to_float(series).values == 0).sum())
def kurtosis(self, series):
# use scipy to match function from dask.array.stats
from scipy.stats import kurtosis
return kurtosis(self.to_float(series.dropna()))
def skew(self, series):
# use scipy to match function from dask.array.stats
from scipy.stats import skew
return skew(self.to_float(series.dropna()))
def exp(self, series):
return np.exp(self.to_float(series))
def sqrt(self, series):
return np.sqrt(self.to_float(series))
def reciprocal(self, series):
return np.reciprocal(self.to_float(series))
def radians(self, series):
return np.radians(self.to_float(series))
def degrees(self, series):
return np.degrees(self.to_float(series))
def ln(self, series):
return np.log(self.to_float(series))
def log(self, series, base=10):
return np.log(self.to_float(series)) / np.log(base)
def sin(self, series):
return np.sin(self.to_float(series))
def cos(self, series):
return np.cos(self.to_float(series))
def tan(self, series):
return np.tan(self.to_float(series))
def asin(self, series):
return np.arcsin(self.to_float(series))
def acos(self, series):
return np.arccos(self.to_float(series))
def atan(self, series):
return np.arctan(self.to_float(series))
def sinh(self, series):
return np.arcsinh(self.to_float(series))
def cosh(self, series):
return np.cosh(self.to_float(series))
def tanh(self, series):
return np.tanh(self.to_float(series))
def asinh(self, series):
return np.arcsinh(self.to_float(series))
def acosh(self, series):
return np.arccosh(self.to_float(series))
def atanh(self, series):
return np.arctanh(self.to_float(series))
def floor(self, series):
return np.floor(self.to_float(series))
def ceil(self, series):
return np.ceil(self.to_float(series))
def normalize_chars(self, series):
return series.str.normalize("NFKD").str.encode('ascii', errors='ignore').str.decode('utf8')
| 2.578125
| 3
|
server/face_server/np_utils.py
|
yichenj/facegate
| 0
|
12775312
|
import numpy as np
def to_array(image):
array = np.array(image, dtype=np.float32)[..., :3]
array = array / 255.
return array
def l2_normalize(x, axis=0):
norm = np.linalg.norm(x, axis=axis, keepdims=True)
return x / norm
def distance(a, b):
# Euclidean distance
# return np.linalg.norm(a - b)
# Cosine distance, ||a|| and ||b|| is one because embeddings are normalized.
# No need to compute np.dot(a, b) / (np.linalg.norm(a) * np.linalg.norm(b))
return np.dot(a, b)
| 3.34375
| 3
|
scripts/perspective-effects.py
|
smoh/kinesis
| 6
|
12775313
|
<reponame>smoh/kinesis
"""Demonstrate perspective rotation/shear/expansion."""
#%%
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import astropy.coordinates as coord
import astropy.units as u
import kinesis as kn
import gapipes as gp
v0 = [-6.3, 45.2, 5.3]
v0 = [-6.3, 5.2, -45.2]
sigmav = 0.0
N = 100
d = 100
b0 = coord.ICRS(120 * u.deg, 25 * u.deg, d * u.pc).cartesian.xyz.value
# # get cartesian v0 with zero proper motions and only radial velocity
# b0tmp = coord.ICRS(
# 120 * u.deg,
# 25 * u.deg,
# d * u.pc,
# 0.0 * u.mas / u.yr,
# 0.0 * u.mas / u.yr,
# 10 * u.km / u.s,
# )
# v0 = b0tmp.velocity.d_xyz.value
# print(v0)
Rmax = 10 # pc
print(b0)
# coordinate object for the cluster center with velocities
# NOTE: astropy coordinates do not allow to mix spherical X with cartesian V!
cc = coord.ICRS(
*(b0 * u.pc),
*(v0 * u.km / u.s),
representation_type=coord.CartesianRepresentation,
differential_type=coord.CartesianDifferential
)
vra0 = cc.spherical.differentials["s"].d_lon.value * d / 1e3 * 4.74
vdec0 = cc.spherical.differentials["s"].d_lat.value * d / 1e3 * 4.74
# TODO: sample plane normal to b0 -- this will make it very clear I think...
cl = kn.Cluster(v0, sigmav, b0=b0).sample_sphere(N=N, Rmax=Rmax)
# hy = pd.read_csv("../data/hyades_full.csv").groupby('in_dr2').get_group(True)
# v0 = [-6.3, 45.2, 5.3]
# b0 = np.array([17.15474298, 41.28962638, 13.69105771])
# cl = kn.Cluster(v0, sigmav, b0=b0).sample_at(hy.g.icrs)
# vra0 = cl.icrs.pm_ra_cosdec.value * cl.icrs.distance.value /1e3*4.74
# vdec0 = cl.icrs.pm_dec.value * cl.icrs.distance.value /1e3*4.74
c = cl.members.truth.g
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(8, 4))
q1 = ax1.quiver(c.icrs.cartesian.x, c.icrs.cartesian.y, c.vra, c.vdec)
ax1.quiverkey(q1, 0.9, 0.9, 50, "50", coordinates="axes")
q2 = ax2.quiver(c.icrs.cartesian.x, c.icrs.cartesian.y, c.vra - vra0, c.vdec - vdec0)
ax2.quiverkey(q2, 0.9, 0.9, 10, "10", coordinates="axes")
# radius_deg = 5
# ra_grid = np.linspace()
#%%
from scipy.spatial.transform import Rotation as R
def sample_circle(N=1, R=1):
theta = np.random.uniform(size=N) * 2 * np.pi
r = R * np.sqrt(np.random.uniform(size=N))
x = r * np.cos(theta)
y = r * np.sin(theta)
return np.vstack([x, y]).T
def sample_surface(x0=None, Rdeg=1, N=1):
"""
Rdegree (float): radius of patch in degrees
"""
Rrad = np.deg2rad(Rdeg)
assert np.shape(x0) == (3,), "`x0` has a wrong shape."
r, phi_prime, theta = coord.cartesian_to_spherical(
*x0
) # Returned angles are in radians.
r = r.value
phi = np.pi / 2.0 - phi_prime.value # to usual polar angle
theta = theta.value
thetas = np.random.uniform(low=theta - Rrad, high=theta + Rrad, size=N)
# phi = arccos (nu); nu is uniform
nu1, nu2 = np.cos(phi - Rrad), np.cos(phi + Rrad)
nus = np.random.uniform(low=nu1, high=nu2, size=N)
phis = np.cos(nus)
xyz = coord.spherical_to_cartesian(r, np.pi / 2.0 - phis, thetas)
return np.vstack(list(map(lambda x: x.value, xyz))).T
from mpl_toolkits.mplot3d import axes3d
xi, yi, zi = sample_surface([10, -10, 10], 5, N=100).T
fig, ax = plt.subplots(1, 1, subplot_kw={"projection": "3d"}) # , 'aspect':'equal'})
phi = np.linspace(0, np.pi, 10)
theta = np.linspace(0, 2 * np.pi, 20)
x = np.outer(np.sin(theta), np.cos(phi)) * 10 * np.sqrt(3)
y = np.outer(np.sin(theta), np.sin(phi)) * 10 * np.sqrt(3)
z = np.outer(np.cos(theta), np.ones_like(phi)) * 10 * np.sqrt(3)
ax.plot_wireframe(x, y, z, color="k", rstride=1, cstride=1)
ax.scatter(xi, yi, zi, s=100, c="r", zorder=10)
# xy = sample_circle(N=1000).T
# fig, ax = plt.subplots()
# ax.scatter(*xy)
| 2.375
| 2
|
task/vms_report.py
|
midoks/vms
| 1
|
12775314
|
# coding: utf-8
#------------------------------
# [从]服务器上报
#------------------------------
import sys
import os
import json
import time
import threading
import subprocess
import shutil
sys.path.append("/usr/local/lib/python2.7/site-packages")
import psutil
root_dir = os.getcwd()
sys.path.append(root_dir + "/class/core")
reload(sys)
sys.setdefaultencoding('utf-8')
import db
import common
#------------Private Methods--------------
def updateStatus(sid, status):
common.M('video_tmp').where(
"id=?", (sid,)).setField('status', status)
def isMasterNode():
run_model = common.getSysKV('run_model')
run_is_master = common.getSysKV('run_is_master')
if (run_model == '1') or (run_is_master == '1'):
return True
return False
#------------Private Methods--------------
def reportData(data):
_list = common.M('node').field('id,port,name,ip').where(
'ismaster=?', (1,)).select()
if len(_list) > 0:
_url = "http://" + str(_list[0]['ip']) + \
":" + str(_list[0]['port'])
api_url = _url + "/async_master_api/reportData"
ret = common.httpPost(api_url, {
"mark": common.getSysKV('run_mark'),
"data": data,
'name': _list[0]['name']
})
rr = json.loads(ret)
return rr
def pingServer():
_list = common.M('node').field('id,port,name,ip').select()
for x in xrange(0, len(_list)):
_url = "http://" + str(_list[x]['ip']) + \
":" + str(_list[x]['port'])
api_url = _url + "/async_master_api/ping"
try:
ret = common.httpPost(api_url, {
"mark": common.getSysKV('run_mark'),
'name': _list[x]['name']
})
rr = json.loads(ret)
if rr['code'] == 0:
common.M('node').where(
'name=?', (_list[x]['name'],)).setField('status', 1)
except Exception as e:
common.M('node').where(
'name=?', (_list[x]['name'],)).setField('status', 0)
return True
def serverReport():
time_sleep = 3
while True:
if isMasterNode():
time.sleep(time_sleep)
continue
c = os.getloadavg()
data = {}
data['one'] = float(c[0])
data['five'] = float(c[1])
data['fifteen'] = float(c[2])
data['max'] = psutil.cpu_count() * 2
data['limit'] = data['max']
data['safe'] = data['max'] * 0.75
data['report_time'] = common.getDate()
r = reportData(data)
if r['code'] != 0:
print('同步失败![%s]', common.getDate())
time.sleep(time_sleep)
def serverPing():
while True:
pingServer()
time.sleep(3)
def startTask():
import time
try:
while True:
time.sleep(2)
except:
time.sleep(60)
startTask()
if __name__ == "__main__":
t = threading.Thread(target=serverReport)
t.setDaemon(True)
t.start()
t = threading.Thread(target=serverPing)
t.setDaemon(True)
t.start()
startTask()
| 2.09375
| 2
|
notes/__init__.py
|
ibutra/SpyDashServer
| 2
|
12775315
|
from spydashserver.plugins import PluginConfig
plugin_config = PluginConfig("notes", "notes.Notes", models='notes.models')
| 1.304688
| 1
|
programs/pgm06_02.py
|
danielsunzhongyuan/python_practice
| 0
|
12775316
|
#
# This file contains the Python code from Program 6.2 of
# "Data Structures and Algorithms
# with Object-Oriented Design Patterns in Python"
# by <NAME>.
#
# Copyright (c) 2003 by <NAME>, P.Eng. All rights reserved.
#
# http://www.brpreiss.com/books/opus7/programs/pgm06_02.txt
#
class StackAsArray(Stack):
def __init__(self, size = 0):
super(StackAsArray, self).__init__()
self._array = Array(size)
def purge(self):
while self._count > 0:
self._array[self._count] = None
self._count -= 1
#...
| 3.15625
| 3
|
handcam/scratch/stack_exchange_nn_transfer_plz_help.py
|
luketaverne/handcam
| 1
|
12775317
|
<filename>handcam/scratch/stack_exchange_nn_transfer_plz_help.py
from __future__ import print_function
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import re
import torch
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils import model_zoo
from nn_transfer import transfer
# from tensorflow.python.keras.models import Model
# from tensorflow.python.keras.layers import Input, Add, Activation, Dropout, Flatten, Dense,\
# Convolution2D, MaxPooling2D, AveragePooling2D, BatchNormalization
from keras.models import Model
from keras.layers import Input, Add, Activation, Dropout, Flatten, Dense, AveragePooling2D, MaxPooling2D, Reshape
from keras.layers.convolutional import Convolution2D
# Transfer tutorial <https://github.com/gzuidhof/nn-transfer/blob/master/example.ipynb>
## PyTorch Things
# convert numpy arrays to torch Variables
params = model_zoo.load_url('https://s3.amazonaws.com/modelzoo-networks/wide-resnet-50-2-export-5ae25d50.pth')
for k, v in sorted(params.items()):
print(k, tuple(v.shape))
params[k] = Variable(v, requires_grad=True)
print('\nTotal parameters:', sum(v.numel() for v in params.values()))
# PyTorch Model definition, from <https://github.com/szagoruyko/functional-zoo/blob/master/wide-resnet-50-2-export.ipynb>
def define_pytorch_model(params):
def conv2d(input, params, base, stride=1, pad=0):
return F.conv2d(input, params[base + '.weight'],
params[base + '.bias'], stride, pad)
def group(input, params, base, stride, n):
o = input
for i in range(0, n):
b_base = ('%s.block%d.conv') % (base, i)
x = o
o = conv2d(x, params, b_base + '0')
o = F.relu(o)
o = conv2d(o, params, b_base + '1', stride=i == 0 and stride or 1, pad=1)
o = F.relu(o)
o = conv2d(o, params, b_base + '2')
if i == 0:
o += conv2d(x, params, b_base + '_dim', stride=stride)
else:
o += x
o = F.relu(o)
return o
# determine network size by parameters
blocks = [sum([re.match('group%d.block\d+.conv0.weight' % j, k) is not None
for k in params.keys()]) for j in range(4)]
def f(input, params):
o = F.conv2d(input, params['conv0.weight'], params['conv0.bias'], 2, 3)
o = F.relu(o)
o = F.max_pool2d(o, 3, 2, 1)
o_g0 = group(o, params, 'group0', 1, blocks[0])
o_g1 = group(o_g0, params, 'group1', 2, blocks[1])
o_g2 = group(o_g1, params, 'group2', 2, blocks[2])
o_g3 = group(o_g2, params, 'group3', 2, blocks[3])
o = F.avg_pool2d(input=o_g3, kernel_size=7, stride=1, padding=0)
o = o.view(o.size(0), -1)
o = F.linear(o, params['fc.weight'], params['fc.bias'])
return o
return f
## Keras Things
def define_keras_model(input_dim, nb_classes=1000):
def group(input, stride, n, group_num):
o = input
for i in range(0, n):
b_base = ('%s.block%d.conv') % ('group' + str(group_num), i)
kernel0 = 2 ** (7 + group_num)
kernel_1 = 2 ** (7 + group_num)
kernel_2 = 2 ** (8 + group_num)
kernel_dim = 2 ** (8 + group_num)
x = o
# conv0
o = Convolution2D(kernel0, (1, 1), padding='same', strides=(1, 1), kernel_initializer='he_normal',
use_bias=True, name=b_base + '0', activation='relu')(x)
# conv1
stride_0 = i == 0 and stride or 1 # lazy copy from pytorch loop
o = Convolution2D(kernel_1, (3, 3), padding='same', strides=(stride_0, stride_0),
kernel_initializer='he_normal',
use_bias=True, name=b_base + '1', activation='relu')(o)
# conv2
o = Convolution2D(kernel_2, (1, 1), padding='same', strides=(1, 1), kernel_initializer='he_normal',
use_bias=True, name=b_base + '2')(o)
# print(o.shape)
if i == 0:
o = Add()([o, Convolution2D(kernel_dim, (1, 1), padding='same', strides=(stride, stride),
kernel_initializer='he_normal',
use_bias=True, name=b_base + '_dim')(x)])
else:
o = Add()([o, x])
o = Activation('relu')(o)
return o
# input
ip = Input(shape=input_dim)
# conv0
x = Convolution2D(64, (7, 7), padding='same', strides=(2,2), kernel_initializer='he_normal',
use_bias=True, name='conv0', activation='relu')(ip)
# x = BatchNormalization(axis=channel_axis, momentum=0.1, epsilon=1e-5, gamma_initializer='uniform')(x)
# max pool
x = MaxPooling2D(pool_size=(3,3), strides=(2,2), padding='valid')(x)
# group0 (stride 1, n=3)
x = group(x,stride=1,n=3, group_num=0)
# group1 (stride 2, n=4)
x = group(x, stride=2, n=4, group_num=1)
# group2 (stride 2, n=6)
x = group(x, stride=2, n=6, group_num=2)
# group3 (stride 2, n=3)
x = group(x, stride=2, n=3, group_num=3)
# avgpool2d
x = AveragePooling2D((7, 7),strides=(1,1),padding='valid')(x)
x = Flatten()(x)
# x = Reshape((2048,))(x)
x = Dense(nb_classes, name='fc', use_bias=True)(x)
# x = Activation('linear')(x)
model = Model(ip, x)
return model
# Tensorflow
def define_tensorflow_model(inputs, params):
'''Bottleneck WRN-50-2 model definition
'''
def tr(v):
if v.ndim == 4:
return v.transpose(2, 3, 1, 0)
elif v.ndim == 2:
return v.transpose()
return v
params = {k: tf.constant(tr(v)) for k, v in params.items()}
def conv2d(x, params, name, stride=1, padding=0):
x = tf.pad(x, [[0, 0], [padding, padding], [padding, padding], [0, 0]])
z = tf.nn.conv2d(x, params['%s.weight' % name], [1, stride, stride, 1],
padding='VALID')
if '%s.bias' % name in params:
return tf.nn.bias_add(z, params['%s.bias' % name])
else:
return z
def group(input, params, base, stride, n):
o = input
for i in range(0, n):
b_base = ('%s.block%d.conv') % (base, i)
x = o
o = conv2d(x, params, b_base + '0')
o = tf.nn.relu(o)
o = conv2d(o, params, b_base + '1', stride=i == 0 and stride or 1, padding=1)
o = tf.nn.relu(o)
o = conv2d(o, params, b_base + '2')
if i == 0:
o += conv2d(x, params, b_base + '_dim', stride=stride)
else:
o += x
o = tf.nn.relu(o)
return o
# determine network size by parameters
blocks = [sum([re.match('group%d.block\d+.conv0.weight' % j, k) is not None
for k in params.keys()]) for j in range(4)]
o = conv2d(inputs, params, 'conv0', 2, 3)
o = tf.nn.relu(o)
o = tf.pad(o, [[0, 0], [1, 1], [1, 1], [0, 0]])
o = tf.nn.max_pool(o, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='VALID')
o_g0 = group(o, params, 'group0', 1, blocks[0])
o_g1 = group(o_g0, params, 'group1', 2, blocks[1])
o_g2 = group(o_g1, params, 'group2', 2, blocks[2])
o_g3 = group(o_g2, params, 'group3', 2, blocks[3])
o = tf.nn.avg_pool(o_g3, ksize=[1, 7, 7, 1], strides=[1, 1, 1, 1], padding='VALID')
o = tf.reshape(o, [-1, 2048])
o = tf.matmul(o, params['fc.weight']) + params['fc.bias']
return o
# Define the PyTorch model
pytorch_model = define_pytorch_model(params)
# Define the tensorflow model
params_tf = {k: v.numpy() for k, v in model_zoo.load_url('https://s3.amazonaws.com/modelzoo-networks/wide-resnet-50-2-export-5ae25d50.pth').items()}
inputs_tf = tf.placeholder(tf.float32, shape=[None, 224, 224, 3])
tf_model = define_tensorflow_model(inputs_tf, params_tf)
# Define the Keras model, reloading the weights since we can't use get_state_dict() on functional pytorch models
input_state_dict_for_keras = model_zoo.load_url('https://s3.amazonaws.com/modelzoo-networks/wide-resnet-50-2-export-5ae25d50.pth')
keras_model = define_keras_model((224,224,3), nb_classes=1000)
# Transfer the pytorch weights to keras
transfer.pytorch_to_keras(pytorch_model, keras_model, state_dict=input_state_dict_for_keras)
# Create dummy data
torch.manual_seed(0)
data = torch.rand(6,3,224,224)
data_keras_and_tf = data.permute(0,2,3,1).numpy()
data_pytorch = Variable(data, requires_grad=False)
# Do a forward pass in all frameworks
pytorch_pred = pytorch_model(data_pytorch, params).data.numpy()
keras_pred = keras_model.predict(data_keras_and_tf)
sess = tf.Session()
tf_pred = sess.run(tf_model, feed_dict={inputs_tf: data_keras_and_tf})
assert keras_pred.shape == pytorch_pred.shape
# check that difference between PyTorch and Tensorflow is small
assert np.abs(tf_pred - pytorch_pred).max() < 1e-4
print(np.abs(keras_pred - pytorch_pred).max())
print(np.abs(keras_pred[...,::-1] - pytorch_pred).max())
print(np.abs(tf_pred - pytorch_pred).max())
plot_comparison = False
if plot_comparison:
plt.subplot(3, 1, 1)
plt.axis('Off')
plt.imshow(pytorch_pred[:,0:40])
plt.title('pytorch')
plt.subplot(3, 1, 2)
plt.axis('Off')
plt.imshow(tf_pred[:,0:40])
plt.title('tensorflow')
plt.subplot(3, 1, 3)
plt.axis('Off')
plt.imshow(keras_pred[:,0:40])
plt.title('keras')
plt.show()
plot_diff_images = True
if plot_diff_images:
data_keras_and_tf_flip_ch = keras_model.predict(data_keras_and_tf[...,::-1])
data_keras_and_tf_flip_lr = keras_model.predict(data_keras_and_tf[:,::-1,:,:])
data_keras_and_tf_flip_lr_ch = keras_model.predict(data_keras_and_tf[:, ::-1, :, ::-1])
data_keras_and_tf_flip_ud = keras_model.predict(data_keras_and_tf[:,:,::-1,:])
data_keras_and_tf_flip_ud_ch = keras_model.predict(data_keras_and_tf[:,:,::-1,::-1])
data_keras_and_tf_flip_lr_ud = keras_model.predict(data_keras_and_tf[:,::-1,::-1,:])
data_keras_and_tf_flip_lr_ud_ch = keras_model.predict(data_keras_and_tf[:,::-1,::-1,::-1])
swap_lr = data_keras_and_tf.transpose(0,2,1,3)
data_keras_and_tf_swap_lr = keras_model.predict(swap_lr)
data_keras_and_tf_flip_ch_swap_lr = keras_model.predict(swap_lr[..., ::-1])
data_keras_and_tf_flip_lr_swap_lr = keras_model.predict(swap_lr[:, ::-1, :, :])
data_keras_and_tf_flip_lr_ch_swap_lr = keras_model.predict(swap_lr[:, ::-1, :, ::-1])
data_keras_and_tf_flip_ud_swap_lr = keras_model.predict(swap_lr[:, :, ::-1, :])
data_keras_and_tf_flip_ud_ch_swap_lr = keras_model.predict(swap_lr[:, :, ::-1, ::-1])
data_keras_and_tf_flip_lr_ud_swap_lr = keras_model.predict(swap_lr[:, ::-1, ::-1, :])
data_keras_and_tf_flip_lr_ud_ch_swap_lr = keras_model.predict(swap_lr[:, ::-1, ::-1, ::-1])
print(np.abs(data_keras_and_tf_flip_ch - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr_ch - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_ud - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_ud_ch - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr_ud - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr_ud_ch - pytorch_pred).max())
print(np.abs(data_keras_and_tf_swap_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_ch_swap_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr_swap_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr_ch_swap_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_ud_swap_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_ud_ch_swap_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr_ud_swap_lr - pytorch_pred).max())
print(np.abs(data_keras_and_tf_flip_lr_ud_ch_swap_lr - pytorch_pred).max())
# plt.subplot(3, 1, 1)
# plt.axis('Off')
# plt.imshow(pytorch_pred[:, 0:40])
# plt.title('pytorch')
# plt.subplot(3, 1, 2)
# plt.axis('Off')
# plt.imshow(tf_pred[:, 0:40])
# plt.title('tensorflow')
# plt.subplot(3, 1, 3)
# plt.axis('Off')
# plt.imshow(keras_pred[:, 0:40])
# plt.title('keras')
# plt.show()
assert np.abs(keras_pred - pytorch_pred).max() < 1e-4 # This assertion will fail
print('done!')
| 2.359375
| 2
|
tests/app/dao/test_inbound_sms_keyword_dao.py
|
GouvQC/notification-api
| 1
|
12775318
|
<gh_stars>1-10
from datetime import datetime
from itertools import product
from freezegun import freeze_time
from app.dao.inbound_sms_keyword_dao import (
dao_get_inbound_sms_keyword_for_service,
dao_count_inbound_sms_keyword_for_service,
delete_inbound_sms_keyword_older_than_retention,
dao_get_inbound_sms_keyword_by_id
)
from tests.app.db import create_inbound_sms_keyword, create_service, create_service_data_retention
def test_get_all_inbound_sms_keyword(sample_service):
inbound = create_inbound_sms_keyword(sample_service)
res = dao_get_inbound_sms_keyword_for_service(sample_service.id)
assert len(res) == 1
assert res[0] == inbound
def test_get_all_inbound_sms_keyword_when_none_exist(sample_service):
res = dao_get_inbound_sms_keyword_for_service(sample_service.id)
assert len(res) == 0
def test_get_all_inbound_sms_keywords_limits_and_orders(sample_service):
with freeze_time('2017-01-01'):
create_inbound_sms_keyword(sample_service)
with freeze_time('2017-01-03'):
three = create_inbound_sms_keyword(sample_service)
with freeze_time('2017-01-02'):
two = create_inbound_sms_keyword(sample_service)
res = dao_get_inbound_sms_keyword_for_service(sample_service.id, limit=2)
assert len(res) == 2
assert res[0] == three
assert res[0].created_at == datetime(2017, 1, 3)
assert res[1] == two
assert res[1].created_at == datetime(2017, 1, 2)
def test_get_all_inbound_sms_keyword_filters_on_service(notify_db_session):
service_one = create_service(service_name='one')
service_two = create_service(service_name='two')
sms_one = create_inbound_sms_keyword(service_one)
create_inbound_sms_keyword(service_two)
res = dao_get_inbound_sms_keyword_for_service(service_one.id)
assert len(res) == 1
assert res[0] == sms_one
# This test assumes the local timezone is EST
def test_get_all_inbound_sms_keyword_filters_on_time(sample_service, notify_db_session):
create_inbound_sms_keyword(sample_service, created_at=datetime(2017, 8, 7, 3, 59)) # sunday evening
sms_two = create_inbound_sms_keyword(sample_service, created_at=datetime(2017, 8, 7, 4, 0)) # monday (7th) morning
with freeze_time('2017-08-14 12:00'):
res = dao_get_inbound_sms_keyword_for_service(sample_service.id, limit_days=7)
assert len(res) == 1
assert res[0] == sms_two
def test_count_inbound_sms_keyword_for_service(notify_db_session):
service_one = create_service(service_name='one')
service_two = create_service(service_name='two')
create_inbound_sms_keyword(service_one)
create_inbound_sms_keyword(service_one)
create_inbound_sms_keyword(service_two)
assert dao_count_inbound_sms_keyword_for_service(service_one.id, limit_days=1) == 2
# This test assumes the local timezone is EST
def test_count_inbound_sms_keyword_for_service_filters_messages_older_than_n_days(sample_service):
# test between evening sunday 2nd of june and morning of monday 3rd
create_inbound_sms_keyword(sample_service, created_at=datetime(2019, 6, 3, 3, 59))
create_inbound_sms_keyword(sample_service, created_at=datetime(2019, 6, 3, 3, 59))
create_inbound_sms_keyword(sample_service, created_at=datetime(2019, 6, 3, 4, 1))
with freeze_time('Monday 10th June 2019 12:00'):
assert dao_count_inbound_sms_keyword_for_service(sample_service.id, limit_days=7) == 1
@freeze_time("2017-06-08 12:00:00")
# This test assumes the local timezone is EST
def test_should_delete_inbound_sms_keyword_according_to_data_retention(notify_db_session):
no_retention_service = create_service(service_name='no retention')
short_retention_service = create_service(service_name='three days')
long_retention_service = create_service(service_name='thirty days')
services = [short_retention_service, no_retention_service, long_retention_service]
create_service_data_retention(long_retention_service, notification_type='sms', days_of_retention=30)
create_service_data_retention(short_retention_service, notification_type='sms', days_of_retention=3)
# email retention doesn't affect anything
create_service_data_retention(short_retention_service, notification_type='email', days_of_retention=4)
dates = [
datetime(2017, 6, 5, 3, 59), # older than three days
datetime(2017, 6, 1, 3, 59), # older than seven days
datetime(2017, 5, 1, 0, 0), # older than thirty days
]
for date, service in product(dates, services):
create_inbound_sms_keyword(service, created_at=date)
deleted_count = delete_inbound_sms_keyword_older_than_retention()
assert deleted_count == 6
assert {
x.created_at for x in dao_get_inbound_sms_keyword_for_service(short_retention_service.id)
} == set(dates[:1])
assert {
x.created_at for x in dao_get_inbound_sms_keyword_for_service(no_retention_service.id)
} == set(dates[:1])
assert {
x.created_at for x in dao_get_inbound_sms_keyword_for_service(long_retention_service.id)
} == set(dates[:1])
def test_get_inbound_sms_keyword_by_id_returns(sample_service):
inbound_sms_keyword = create_inbound_sms_keyword(service=sample_service)
inbound_from_db = dao_get_inbound_sms_keyword_by_id(inbound_sms_keyword.service.id, inbound_sms_keyword.id)
assert inbound_sms_keyword == inbound_from_db
| 2.1875
| 2
|
examples/favourites.py
|
ChrisPenner/tempered
| 34
|
12775319
|
<filename>examples/favourites.py
# You can write scripts in any language you like to level up your templates!
import sys
print(" and ".join(sys.argv[1:]) + ",")
print("These are a few of my favourite things")
| 2.203125
| 2
|
run_b_to_others.py
|
MnOpenProject/AutoVideoPub
| 25
|
12775320
|
<reponame>MnOpenProject/AutoVideoPub
''' 下载自己的B站投稿视频,并上传到我账号登录的其他视频平台上 '''
from from_mybilibili_to_others.main_script import main_func
if __name__ == '__main__':
main_func()
| 1.25
| 1
|
neoOkpara/Phase-1/Day2/currentDate.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
| 6
|
12775321
|
<reponame>CodedLadiesInnovateTech/-python-challenge-solutions<filename>neoOkpara/Phase-1/Day2/currentDate.py
from datetime import datetime
today = datetime.now()
show = "Current date and time : " + str(today)
print(show)
| 3.6875
| 4
|
algos/td3/core.py
|
DensoITLab/spinningup_in_pytorch
| 11
|
12775322
|
<gh_stars>10-100
import torch
import torch.nn as nn
import copy
class continuous_policy(nn.Module):
def __init__(self, act_dim, obs_dim, hidden_layer=(400,300)):
super().__init__()
layer = [nn.Linear(obs_dim, hidden_layer[0]), nn.ReLU()]
for i in range(1, len(hidden_layer)):
layer.append(nn.Linear(hidden_layer[i-1], hidden_layer[i]))
layer.append(nn.ReLU())
layer.append(nn.Linear(hidden_layer[-1], act_dim))
layer.append(nn.Tanh())
self.policy = nn.Sequential(*layer)
def forward(self, obs):
return self.policy(obs)
class q_function(nn.Module):
def __init__(self, obs_dim, hidden_layer=(400,300)):
super().__init__()
layer = [nn.Linear(obs_dim, hidden_layer[0]), nn.ReLU()]
for i in range(1, len(hidden_layer)):
layer.append(nn.Linear(hidden_layer[i-1], hidden_layer[i]))
layer.append(nn.ReLU())
layer.append(nn.Linear(hidden_layer[-1], 1))
self.policy = nn.Sequential(*layer)
def forward(self, obs):
return self.policy(obs)
class actor_critic(nn.Module):
def __init__(self, act_dim, obs_dim, hidden_layer=(400,300), act_limit=2):
super().__init__()
self.policy = continuous_policy(act_dim, obs_dim, hidden_layer)
self.q1 = q_function(obs_dim+act_dim, hidden_layer)
self.q2 = q_function(obs_dim+act_dim, hidden_layer)
self.act_limit = act_limit
for m in self.modules():
if isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight)
nn.init.constant_(m.bias, 0)
self.policy_targ = continuous_policy(act_dim, obs_dim, hidden_layer)
self.q1_targ = q_function(obs_dim+act_dim, hidden_layer)
self.q2_targ = q_function(obs_dim+act_dim, hidden_layer)
self.copy_param()
def copy_param(self):
self.policy_targ.load_state_dict(self.policy.state_dict())
self.q1_targ.load_state_dict(self.q1.state_dict())
self.q2_targ.load_state_dict(self.q2.state_dict())
# for m_targ, m_main in zip(self.policy_targ.modules(), self.policy.modules()):
# if isinstance(m_targ, nn.Linear):
# m_targ.weight.data = m_main.weight.data
# m_targ.bias.data = m_main.bias.data
# for m_targ, m_main in zip(self.q_targ.modules(), self.q.modules()):
# if isinstance(m_targ, nn.Linear):
# m_targ.weight.data = m_main.weight.data
# m_targ.bias.data = m_main.bias.data
def get_action(self, obs, noise_scale):
pi = self.act_limit * self.policy(obs)
pi += noise_scale * torch.randn_like(pi)
pi.clamp_(max=self.act_limit, min=-self.act_limit)
return pi.squeeze()
def get_target_action(self, obs, noise_scale, clip_param):
pi = self.act_limit * self.policy_targ(obs)
eps = noise_scale * torch.randn_like(pi)
eps.clamp_(max=clip_param, min=-clip_param)
pi += eps
pi.clamp_(max=self.act_limit, min=-self.act_limit)
return pi.detach()
def update_target(self, rho):
# compute rho * targ_p + (1 - rho) * main_p
for poly_p, poly_targ_p in zip(self.policy.parameters(), self.policy_targ.parameters()):
poly_targ_p.data = rho * poly_targ_p.data + (1-rho) * poly_p.data
for q_p, q_targ_p in zip(self.q1.parameters(), self.q1_targ.parameters()):
q_targ_p.data = rho * q_targ_p.data + (1-rho) * q_p.data
for q_p, q_targ_p in zip(self.q2.parameters(), self.q2_targ.parameters()):
q_targ_p.data = rho * q_targ_p.data + (1-rho) * q_p.data
def compute_target(self, obs, pi, gamma, rewards, done):
# compute r + gamma * (1 - d) * Q(s', mu_targ(s'))
q1 = self.q1_targ(torch.cat([obs, pi], -1))
q2 = self.q2_targ(torch.cat([obs, pi], -1))
q = torch.min(q1, q2)
return (rewards + gamma * (1-done) * q.squeeze()).detach()
def q_function(self, obs, detach=True, action=None):
# compute Q(s, a) or Q(s, mu(s))
if action is None:
pi = self.act_limit * self.policy(obs)
else:
pi = action
if detach:
pi = pi.detach()
return self.q1(torch.cat([obs, pi], -1)).squeeze(), self.q2(torch.cat([obs, pi], -1)).squeeze()
| 2.515625
| 3
|
vanilla/core.py
|
byrgazov/vanilla
| 0
|
12775323
|
<reponame>byrgazov/vanilla
import sys
import collections
import functools
import importlib
import logging
import signal
import heapq
import time
from greenlet import getcurrent
from greenlet import greenlet
import vanilla.exception
import vanilla.message
import vanilla.poll
log = logging.getLogger(__name__)
class lazy:
def __init__(self, f):
self.f = f
def __get__(self, ob, type_=None):
value = self.f(ob)
setattr(ob, self.f.__name__, value)
return value
class Scheduler:
Item = collections.namedtuple('Item', ['due', 'action', 'args'])
def __init__(self):
self.count = 0
self.queue = []
self.removed = {}
def add(self, delay, action, *args):
due = time.time() + (delay / 1000.0)
item = self.Item(due, action, args)
heapq.heappush(self.queue, item)
self.count += 1
return item
def __len__(self):
return self.count
def remove(self, item):
self.removed[item] = True
self.count -= 1
def prune(self):
while True:
if self.queue[0] not in self.removed:
break
item = heapq.heappop(self.queue)
del self.removed[item]
def timeout(self):
self.prune()
return self.queue[0].due - time.time()
def pop(self):
self.prune()
item = heapq.heappop(self.queue)
self.count -= 1
return item.action, item.args
class Hub:
"""
A Vanilla Hub is a handle to a self contained world of interwoven
coroutines. It includes an event loop which is responsibile for scheduling
which green thread should have context. Unlike most asynchronous libraries
this Hub is explicit and must be passed to coroutines that need to interact
with it. This is particularly nice for testing, as it makes it clear what's
going on, and other tests can't inadvertently effect each other.
"""
def __init__(self):
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__))
self.ready = collections.deque()
self.scheduled = Scheduler()
self.stopped = self.state()
self.registered = {}
self.poll = vanilla.poll.Poll()
self.loop = greenlet(self.main)
def __getattr__(self, name):
# facilitates dynamic plugin look up
try:
package = '.'.join(__name__.split('.')[:-1])
module = importlib.import_module('.'+name, package=package)
plugin = module.__plugin__(self)
setattr(self, name, plugin)
return plugin
except Exception as e:
log.exception(e)
raise AttributeError(
"'Hub' object has no attribute '{name}'\n"
"You may be trying to use a plugin named vanilla.{name}. "
"If you are, you still need to install it".format(
name=name))
def pipe(self):
"""
Returns a `Pipe`_ `Pair`_.
"""
return vanilla.message.Pipe(self)
def producer(self, f):
"""
Convenience to create a `Pipe`_. *f* is a callable that takes the
`Sender`_ end of this Pipe and the corresponding `Recver`_ is
returned::
def counter(sender):
i = 0
while True:
i += 1
sender.send(i)
recver = h.producer(counter)
recver.recv() # returns 1
recver.recv() # returns 2
"""
sender, recver = self.pipe()
self.spawn(f, sender)
return recver
def consumer(self, f):
# TODO: this isn't symmetric with producer. need to rethink
# TODO: don't form a closure
# TODO: test
sender, recver = self.pipe()
@self.spawn
def _():
for item in recver:
f(item)
return sender
def pulse(self, ms, item=True):
"""
Convenience to create a `Pipe`_ that will have *item* sent on it every
*ms* milliseconds. The `Recver`_ end of the Pipe is returned.
Note that since sends to a Pipe block until the Recver is ready, the
pulses will be throttled if the Recver is unable to keep up::
recver = h.pulse(500)
for _ in recver:
log.info('hello') # logs 'hello' every half a second
"""
@self.producer
def _(sender):
while True:
try:
self.sleep(ms)
except vanilla.exception.Halt:
break
sender.send(item)
sender.close()
return _
def trigger(self, f):
def consume(recver, f):
for item in recver:
f()
sender, recver = self.pipe()
self.spawn(consume, recver, f)
sender.trigger = functools.partial(sender.send, True)
return sender
def dealer(self):
"""
Returns a `Dealer`_ `Pair`_.
"""
return vanilla.message.Dealer(self)
def router(self):
"""
Returns a `Router`_ `Pair`_.
"""
return vanilla.message.Router(self)
def queue(self, size):
"""
Returns a `Queue`_ `Pair`_.
"""
return vanilla.message.Queue(self, size)
def channel(self, size=-1):
"""
::
send --\ +---------+ /--> recv
+-> | Channel | -+
send --/ +---------+ \--> recv
A Channel can have many senders and many recvers. By default it is
unbuffered, but you can create buffered Channels by specifying a size.
They're structurally equivalent to channels in Go. It's implementation
is *literally* a `Router`_ piped to a `Dealer`_, with an optional
`Queue`_ in between.
"""
sender, recver = self.router()
if size > 0:
recver = recver.pipe(self.queue(size))
return vanilla.message.Pair(sender, recver.pipe(self.dealer()))
def serialize(self, func):
"""
Decorator to serialize access to a callable *f*
"""
router = self.router()
exc_marker = object()
@self.spawn
def _():
for args, kwargs, pipe in router.recver:
try:
result = func(*args, **kwargs)
if isinstance(result, collections.Iterator):
raise TypeError(type(result))
except Exception:
# @todo: (?) Failure (call_stack + tb_frames)
result = (exc_marker,) + sys.exc_info()
pipe.send(result)
def send(*args, **kwargs):
# @todo: early params check
pipe = self.pipe()
router.send((args, kwargs, pipe))
result = pipe.recv()
if type(result) is tuple and len(result) == 4 and result[0] is exc_marker:
etype, evalue, tb = result[1:]
raise evalue.with_traceback(tb)
return result
return send
def broadcast(self):
return vanilla.message.Broadcast(self)
def state(self, state=vanilla.message.NoState):
"""
Returns a `State`_ `Pair`_.
*state* if supplied sets the intial state.
"""
return vanilla.message.State(self, state=state)
def value(self):
return vanilla.message.Value(self)
def select(self, ends, timeout=-1):
"""
An end is either a `Sender`_ or a `Recver`_. select takes a list of
*ends* and blocks until *one* of them is ready. The select will block
either forever, or until the optional *timeout* is reached. *timeout*
is in milliseconds.
It returns of tuple of (*end*, *value*) where *end* is the end that has
become ready. If the *end* is a `Recver`_, then it will have already
been *recv*'d on which will be available as *value*. For `Sender`_'s
however the sender is still in a ready state waiting for a *send* and
*value* is None.
For example, the following is an appliance that takes an upstream
`Recver`_ and a downstream `Sender`_. Sending to its upstream will
alter it's current state. This state can be read at anytime by
receiving on its downstream::
def state(h, upstream, downstream):
current = None
while True:
end, value = h.select([upstream, downstream])
if end == upstream:
current = value
elif end == downstream:
end.send(current)
"""
for end in ends:
if end.ready:
return end, isinstance(end, vanilla.message.Recver) and end.recv() or None
for end in ends:
end.select()
try:
fired, item = self.pause(timeout=timeout)
finally:
for end in ends:
end.unselect()
return fired, item
def pause(self, timeout=-1):
if timeout > -1:
item = self.scheduled.add(
timeout,
getcurrent(),
vanilla.exception.Timeout('timeout: %s' % timeout))
assert getcurrent() != self.loop, "cannot pause the main loop"
resume = None
try:
resume = self.loop.switch()
finally:
if timeout > -1:
if isinstance(resume, vanilla.exception.Timeout):
raise resume
# since we didn't timeout, remove ourselves from scheduled
self.scheduled.remove(item)
# TODO: rework State's is set test to be more natural
if self.stopped.recver.ready:
raise vanilla.exception.Stop('Hub stopped while we were paused. There must be a deadlock.')
return resume
def switch_to(self, target, *a):
self.ready.append((getcurrent(), ()))
return target.switch(*a)
def throw_to(self, target, *a):
self.ready.append((getcurrent(), ()))
"""
if len(a) == 1 and isinstance(a[0], preserve_exception):
return target.throw(a[0].typ, a[0].val, a[0].tb)
"""
return target.throw(*a)
def spawn(self, f, *a):
"""
Schedules a new green thread to be created to run *f(\*a)* on the next
available tick::
def echo(pipe, s):
pipe.send(s)
p = h.pipe()
h.spawn(echo, p, 'hi')
p.recv() # returns 'hi'
"""
self.ready.append((f, a))
def spawn_later(self, ms, f, *a):
"""
Spawns a callable on a new green thread, scheduled for *ms*
milliseconds in the future::
def echo(pipe, s):
pipe.send(s)
p = h.pipe()
h.spawn_later(50, echo, p, 'hi')
p.recv() # returns 'hi' after 50ms
"""
self.scheduled.add(ms, f, *a)
def sleep(self, ms=1):
"""
Pauses the current green thread for *ms* milliseconds::
p = h.pipe()
@h.spawn
def _():
p.send('1')
h.sleep(50)
p.send('2')
p.recv() # returns '1'
p.recv() # returns '2' after 50 ms
"""
self.scheduled.add(ms, getcurrent())
self.loop.switch()
def register(self, fd, *masks):
ret = []
self.registered[fd] = {}
for mask in masks:
sender, recver = self.pipe()
self.registered[fd][mask] = sender
ret.append(recver)
self.poll.register(fd, *masks)
if len(ret) == 1:
return ret[0]
return ret
def unregister(self, fd):
if fd in self.registered:
masks = self.registered.pop(fd)
try:
self.poll.unregister(fd, *(masks.keys()))
except:
pass
for mask in masks:
masks[mask].close()
def stop(self):
self.sleep(1)
for fd, masks in list(self.registered.items()):
for mask, sender in masks.items():
sender.stop()
while self.scheduled:
task, a = self.scheduled.pop()
self.throw_to(task, vanilla.exception.Stop('stop'))
try:
self.stopped.recv()
except vanilla.exception.Halt:
return
def stop_on_term(self):
self.signal.subscribe(signal.SIGINT, signal.SIGTERM).recv()
self.stop()
def run_task(self, task, *a):
try:
if isinstance(task, greenlet):
task.switch(*a)
else:
greenlet(task).switch(*a)
except Exception as e:
self.log.warn('Exception leaked back to main loop', exc_info=e)
def dispatch_events(self, events):
for fd, mask in events:
if fd in self.registered:
masks = self.registered[fd]
if mask == vanilla.poll.POLLERR:
for sender in masks.values():
sender.close()
elif masks[mask].ready:
masks[mask].send(True)
def main(self):
"""
Scheduler steps:
- run ready until exhaustion
- if there's something scheduled
- run overdue scheduled immediately
- or if there's nothing registered, sleep until next scheduled
and then go back to ready
- if there's nothing registered and nothing scheduled, we've
deadlocked, so stopped
- poll on registered, with timeout of next scheduled, if something
is scheduled
"""
while True:
while self.ready:
task, a = self.ready.popleft()
self.run_task(task, *a)
if self.scheduled:
timeout = self.scheduled.timeout()
# run overdue scheduled immediately
if timeout < 0:
task, a = self.scheduled.pop()
self.run_task(task, *a)
continue
# if nothing registered, just sleep until next scheduled
if not self.registered:
time.sleep(timeout)
task, a = self.scheduled.pop()
self.run_task(task, *a)
continue
else:
timeout = -1
# TODO: add better handling for deadlock
if not self.registered:
self.stopped.send(True)
return
# run poll
events = None
try:
events = self.poll.poll(timeout=timeout)
# IOError from a signal interrupt
except IOError:
pass
if events:
self.spawn(self.dispatch_events, events)
| 2.484375
| 2
|
deeplearning-holography/deeplearning-cgh/config.py
|
longqianh/3d-holography
| 4
|
12775324
|
<reponame>longqianh/3d-holography
class DefaultConfig(object):
env = 'default' # visdom 环境
model = 'SimpleCGH' # 使用的模型,名字必须与models/__init__.py中的名字一致
train_data_root = './data/training_set/' # 训练集存放路径
# test_data_root = './data/test1' # 测试集存放路径
load_model_path =None# 'checkpoints/model.pth' # 加载预训练的模型的路径,为None代表不加载
batch_size = 10 # batch size
use_gpu = False # use GPU or not
num_workers = 4 # how many workers for loading data
print_freq = 20 # print info every N batch
# debug_file = '/tmp/debug' # if os.path.exists(debug_file): enter ipdb
# result_file = 'result.csv'
max_epoch = 10
lr = 0.1 # initial learning rate
lr_decay = 0.95 # when val_loss increase, lr = lr*lr_decay
weight_decay = 1e-4 # 损失函数
def parse(self, kwargs):
'''
根据字典kwargs 更新 config参数
'''
# 更新配置参数
for k, v in kwargs.items():
if not hasattr(self, k):
# 警告还是报错,取决于你个人的喜好
warnings.warn("Warning: opt has not attribut %s" %k)
setattr(self, k, v)
# 打印配置信息
print('user config:')
for k, v in self.__class__.__dict__.items():
if not k.startswith('__'):
print(k, getattr(self, k))
| 2.125
| 2
|
ddpg/pendulum_ddpg.py
|
Jash-2000/reinforcement_learning
| 97
|
12775325
|
# DDPG Pendulum-v0 example
# ---
# @author <NAME>
# @email luyiren [at] seas [dot] upenn [dot] edu
#
# MIT License
import tensorflow as tf
import numpy as np
import argparse
from ddpg import DDPG
from actor import ActorNetwork
from critic import CriticNetwork
from exp_replay import ExpReplay
from exp_replay import Step
from ou import OUProcess
import matplotlib.pyplot as plt
import sys
import gym
from gym import wrappers
parser = argparse.ArgumentParser(description=None)
parser.add_argument('-d', '--device', default='cpu', type=str, help='choose device: cpu/gpu')
parser.add_argument('-e', '--episodes', default=300, type=int, help='number of episodes')
parser.add_argument('-l', '--log_dir', default='/tmp/pendulum-log-0', type=str, help='log directory')
args = parser.parse_args()
print(args)
DEVICE = args.device
NUM_EPISODES = args.episodes
LOG_DIR=args.log_dir
ACTOR_LEARNING_RATE = 0.0001
CRITIC_LEARNING_RATE = 0.001
GAMMA = 0.99
TAU = 0.001
MEM_SIZE = 1000000
STATE_SIZE = 3
ACTION_SIZE = 1
BATCH_SIZE = 64
MAX_STEPS = 200
FAIL_PENALTY = 0
ACTION_RANGE = 1
EVALUATE_EVERY = 10
def summarize(cum_reward, i, summary_writer):
summary = tf.Summary()
summary.value.add(tag="cumulative reward", simple_value=cum_reward)
summary_writer.add_summary(summary, i)
summary_writer.flush()
def train(agent, env, sess):
for i in xrange(NUM_EPISODES):
cur_state = env.reset()
cum_reward = 0
# tensorboard summary
summary_writer = tf.summary.FileWriter(LOG_DIR+'/train', graph=tf.get_default_graph())
if (i % EVALUATE_EVERY) == 0:
print '====evaluation===='
for t in xrange(MAX_STEPS):
if (i % EVALUATE_EVERY) == 0:
env.render()
action = agent.get_action(cur_state, sess)[0]
else:
# decaying noise
action = agent.get_action_noise(cur_state, sess, rate=(NUM_EPISODES-i)/NUM_EPISODES)[0]
next_state, reward, done, info = env.step(action)
if done:
cum_reward += reward
agent.add_step(Step(cur_step=cur_state, action=action, next_step=next_state, reward=reward, done=done))
print("Episode {} finished after {} timesteps, cum_reward: {}".format(i, t + 1, cum_reward))
summarize(cum_reward, i, summary_writer)
break
cum_reward += reward
agent.add_step(Step(cur_step=cur_state, action=action, next_step=next_state, reward=reward, done=done))
cur_state = next_state
if t == MAX_STEPS - 1:
print("Episode {} finished after {} timesteps, cum_reward: {}".format(i, t + 1, cum_reward))
print action
summarize(cum_reward, i, summary_writer)
agent.learn_batch(sess)
env = gym.make('Pendulum-v0')
# env = wrappers.Monitor(env, '/tmp/pendulum-experiment-0', force=True)
actor = ActorNetwork(state_size=STATE_SIZE, action_size=ACTION_SIZE, lr=ACTOR_LEARNING_RATE, tau=TAU)
critic = CriticNetwork(state_size=STATE_SIZE, action_size=ACTION_SIZE, lr=CRITIC_LEARNING_RATE, tau=TAU)
noise = OUProcess(ACTION_SIZE)
exprep = ExpReplay(mem_size=MEM_SIZE, start_mem=10000, state_size=[STATE_SIZE], kth=-1, batch_size=BATCH_SIZE)
sess = tf.Session()
with tf.device('/{}:0'.format(DEVICE)):
agent = DDPG(actor=actor, critic=critic, exprep=exprep, noise=noise, action_bound=env.action_space.high)
sess.run(tf.initialize_all_variables())
train(agent, env, sess)
| 2.3125
| 2
|
nicenquickplotlib/__init__.py
|
SengerM/nicenquickplotlib
| 2
|
12775326
|
<filename>nicenquickplotlib/__init__.py
name = "nicenquickplotlib"
from .nq_user_functions import *
| 1.023438
| 1
|
output/output.py
|
kusuwada/libcollector
| 3
|
12775327
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from abc import ABCMeta, abstractmethod
class Output:
__metaclass__ = ABCMeta
def __init__(self, output, data=None, path=None):
self.output = output
self.data = data
self.path = path
@abstractmethod
def write(self):
pass
| 3.59375
| 4
|
src/ode_sys.py
|
BardiaMojra/dip
| 0
|
12775328
|
''' control systems - ode simulation
@link https://www.youtube.com/watch?v=yp5x8RMNi7o
'''
import numpy as np
from scipy.integrate import odeint
from matplotlib import pyplot as plt
def sys_ode(x, t):
# set system constants
c = 4 # damping constant
k = 2 # spring stiffness constant
m = 20 # point-mass
F = 5 # input force into the system
# compute state first derivative
dx1 = x[1]
dx2 = (F - c*x[1] - k*x[0])/m
return [dx1, dx2]
def sim():
# set constants
t_0 = 0
t_f = 60
period = 0.1
# set state initial condition
x_init = [0, 0]
# set a discrete time stamp
t = np.arange(t_0, t_f, period)
x = odeint(sys_ode, x_init, t)
x1 = x[:,0]
x2 = x[:,1]
plt.plot(t,x1)
plt.plot(t,x2)
plt.title('Mass-Spring-Damper System')
plt.xlabel('t')
plt.ylabel('x(t)')
plt.legend(['x1', 'x2'])
plt.grid()
plt.show()
| 3.328125
| 3
|
setup.py
|
cope-systems/bottle-cgi-server
| 0
|
12775329
|
<filename>setup.py
#!/usr/bin/env python
import os
from setuptools import setup
def read_reqs(fname):
reqs = []
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
for line in f.readlines():
cleaned = line.split("#")[0].strip()
if cleaned:
reqs.append(cleaned)
return reqs
REQUIREMENTS = read_reqs("requirements.txt")
DEV_REQUIREMENTS = read_reqs("dev-requirements.txt")
VERSION = '0.1.0'
setup(
name='bottle-cgi-server',
version=VERSION,
url='https://github.com/cope-systems/bottle-cgi-server',
download_url='https://github.com/cope-systems/bottle-cgi-server/archive/v{}.tar.gz'.format(VERSION),
description='CGI Server Plugin for Bottle',
long_description=None,
author='<NAME>',
author_email='<EMAIL>',
license='Apache 2.0',
platforms='any',
packages=["bottle_cgi_server"],
install_requires=REQUIREMENTS,
tests_require=DEV_REQUIREMENTS,
classifiers=[
'Environment :: Web Environment',
'Environment :: Plugins',
'Framework :: Bottle',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Software Development :: Libraries :: Python Modules'
],
include_package_data=True
)
| 2.078125
| 2
|
tests/integration/test_hooks.py
|
mobidevke/py-fineract
| 7
|
12775330
|
import random
from fineract.objects.hook import Hook
number = random.randint(0, 10000)
def test_create_hook(fineract):
events = [
{
'actionName': 'DISBURSE',
'entityName': 'LOAN'
},
{
'actionName': 'REPAYMENT',
'entityName': 'LOAN'
}
]
hook = Hook.create_web_hook(fineract.request_handler, 'Test ' + str(number),
'https://localhost:8443', events)
assert isinstance(hook, Hook)
def test_get_all_hooks(fineract):
hooks = [hook for hook in fineract.get_hooks()]
assert len(hooks) >= 1
def test_get_single_hook(fineract):
hooks = [hook for hook in fineract.get_hooks()]
assert fineract.get_hooks(hooks[0].id)
def test_hook_templates(fineract):
assert Hook.template(fineract.request_handler)
def test_hook_exists(fineract):
assert Hook.exists(fineract.request_handler, 'Test ' + str(number))
def test_get_hook_by_name(fineract):
assert Hook.get_by_name(fineract.request_handler, 'Test ' + str(number))
def test_get_hook_by_id(fineract):
hook = Hook.get_by_name(fineract.request_handler, 'Test ' + str(number))
assert Hook.get(fineract.request_handler, hook.id)
def test_hook_update(fineract):
events = [
{
'actionName': 'DISBURSE',
'entityName': 'LOAN'
}
]
hook = Hook.get_by_name(fineract.request_handler, 'Test ' + str(number))
hook = hook.update('https://localhost:8443', events)
assert len(hook.events) == 1
| 2.078125
| 2
|
ApkParse.py
|
jiania/android-apk-parser
| 4
|
12775331
|
<reponame>jiania/android-apk-parser
#coding=utf-8
'''
Created on 2015年5月18日
@author: hzwangzhiwei
'''
import os
import re
import zipfile
class ApkParse(object):
'''
DEMO
parse = ApkParse(filename, aapt_path)
parse = ApkParse(u'C:\\Users\\hzwangzhiwei\\Desktop\\mgapp.apk', 'D:/adt_20140321/sdk/build-tools/android-4.4.2/')
print parse.name()
print parse.package()
print parse.version()
print parse.icon_path()
print parse.mv_icon_to('test_android.png')
'''
aapt_path = ''
aapt_content = None
apk_file_path = None
app_version = None
app_package = None
app_name = None
app_icon = None
def __init__(self, apk_file_path, aapt_path = ''):
'''
Constructor
'''
self.aapt_path = aapt_path
self.apk_file_path = apk_file_path
def _get_aapt_content(self):
self.aapt_content = os.popen(self.aapt_path + "aapt d badging " + self.apk_file_path).read()
if self.aapt_content == None or self.aapt_content == '':
self.aapt_content = ''
print self.aapt_content
return True
def _try_to_parse(self):
pack_version_success = False
name_icon_success = False
apk_pack_ver_reg = 'package\: name=\'(.*)\' (.*) versionName\=\'(.*)\''
re_pat = re.compile(apk_pack_ver_reg)
search_ret = re_pat.search(self.aapt_content)
if search_ret:
g = search_ret.groups()
if g and len(g) == 3:
self.app_package = g[0]
self.app_version = g[2]
pack_version_success = True
apk_name_icon_reg = 'application\: label=\'(.*)\' icon=\'(.*)\''
re_pat = re.compile(apk_name_icon_reg)
search_ret = re_pat.search(self.aapt_content)
if search_ret:
g = search_ret.groups()
if g and len(g) == 2:
self.app_name = g[0]
self.app_icon = g[1]
name_icon_success = True
return name_icon_success and pack_version_success
def _check(self):
if self.aapt_content == None:
self._get_aapt_content()
if self.app_name != None and self.app_package != None and self.app_version != None and self.app_icon != None:
return
self._try_to_parse()
def name(self):
self._check()
return self.app_name
def package(self):
self._check()
return self.app_package
def version(self):
self._check()
return self.app_version
def icon_path(self):
self._check()
return self.app_icon
def mv_icon_to(self, file_name):
icon_path = self.icon_path()
if icon_path:
zfile = zipfile.ZipFile(self.apk_file_path)
icon_file = open(file_name, "wb")
icon_file.write(zfile.read(icon_path))
icon_file.close()
zfile.close()
return True
return False
if __name__ == '__main__':
parse = ApkParse(u'C:\\Users\\hzwangzhiwei\\Desktop\\mgapp.apk', 'D:/adt_20140321/sdk/build-tools/android-4.4.2/')
print parse.name()
print parse.package()
print parse.version()
print parse.icon_path()
print parse.mv_icon_to('test_android.png')
| 2.28125
| 2
|
robot/robot_scenarios/collaborative_task3.py
|
mauricemager/multiagent_robot
| 0
|
12775332
|
<filename>robot/robot_scenarios/collaborative_task3.py
import numpy as np
from robot.robot_scenarios.collaborative_tasks import CollScenario
# np.random.seed(7)
class Scenario(CollScenario):
def reset_world(self, world):
"""Overwrite collaborative scenario reset method and add task specific initial states"""
super().reset_world(world)
# add task specific initial states
world.agents[1].state.angles[0] = np.pi * np.random.rand(1) + np.pi/2
world.objects[0].state.p_pos = world.agents[1].position_end_effector()
world.agents[0].state.angles[0] = 2 * np.random.rand(1) - 1
world.agents[0].state.angles[1] = (2 * np.random.rand(1) - 1 ) * np.pi/2
def reward(self, agent, world):
"""Overwrite collaborative reward function to learn task3 objective"""
# reward based on left agent's (a_0) distance to object
reward = np.linalg.norm(world.objects[0].state.p_pos - world.agents[0].get_joint_pos(world.num_joints)) + 0.5
# give positive reward when left agent has object (termination state)
if world.objects[0].state.who_grabbed == 'agent 0':
reward = 0.
# return negative reward
return -reward
| 2.90625
| 3
|
src/projects/models.py
|
bluesnailstw/flamingos
| 0
|
12775333
|
from django.db import models
from django.contrib.postgres.fields import JSONField, ArrayField
from users.models import User
from django.contrib.auth.models import Group
from django.conf import settings
from asset.models import Host, HostGroup
class Line(models.Model):
name = models.CharField(max_length=255, unique=True, verbose_name=u"产品线")
date_created = models.DateTimeField(verbose_name=u'创建时间', auto_now_add=True)
date_updated = models.DateTimeField(verbose_name=u'更新时间', auto_now=True)
def __str__(self):
return self.name
class Project(models.Model):
name = models.CharField(max_length=255, null=True)
user = models.ManyToManyField(User)
user_group = models.ForeignKey(Group, null=True, on_delete=models.SET_NULL)
host_group = models.ForeignKey(HostGroup, null=True, on_delete=models.SET_NULL)
sls = models.FilePathField(path=settings.SALT_STATE_DIRECTORY,
allow_files=False, allow_folders=True, recursive=True)
description = models.TextField(null=True)
tags = ArrayField(models.CharField(max_length=255), default=list)
status = models.IntegerField(null=True)
line = models.ForeignKey(Line, null=True, related_name=u"business", verbose_name=u"产品线",
on_delete=models.SET_NULL)
date_created = models.DateTimeField(verbose_name=u'创建时间', auto_now_add=True)
date_updated = models.DateTimeField(verbose_name=u'更新时间', auto_now=True)
def __str__(self):
return self.name
| 2
| 2
|
w11/gauss.py
|
cagriulas/algorithm-analysis-17
| 0
|
12775334
|
import numpy as np
a_matris = [[2,0,0],
[0,2,0],
[0,0,2]]
x_matris = []
b_matris = [2, 4, 9]
u_a_matris = np.triu(a_matris)
x3 = float(b_matris[2])/u_a_matris[2][2]
x2 = float(b_matris[1] - x3*u_a_matris[1][2])/u_a_matris[1][1]
x1 = float(b_matris[0] - x2*u_a_matris[0][1] - x3*u_a_matris[0][2])/u_a_matris[0][0]
print(x1, x2, x3)
| 3.140625
| 3
|
eastlake/des_piff.py
|
des-science/eastlake
| 1
|
12775335
|
import os
import logging
import galsim
import galsim.config
import piff
import numpy as np
import ngmix
if ngmix.__version__[0:2] == "v1":
NGMIX_V2 = False
from ngmix.fitting import LMSimple
from ngmix.admom import Admom
else:
NGMIX_V2 = True
from ngmix.fitting import Fitter
from ngmix.admom import AdmomFitter
from scipy.interpolate import CloughTocher2DInterpolator
logger = logging.getLogger(__name__)
# pixel scale used for fitting the Piff models
PIFF_SCALE = 0.25
class DES_Piff(object):
"""A wrapper for Piff to use with Galsim.
This wrapper uses ngmix to fit smooth models to the Piff PSF images. The
parameters of these models are then interpolated across the SE image
and used to generate a smooth approximation to the PSF.
Parameters
----------
file_name : str
The file with the Piff psf solution.
smooth : bool, optional
If True, then smooth the Piff PSFs. Default of False.
"""
_req_params = {'file_name': str}
_opt_params = {}
_single_params = []
_takes_rng = False
def __init__(self, file_name, smooth=False):
self.file_name = file_name
# Read the Piff file. This may fail if the Piff
# file is missing. We catch this and continue
# since if we're substituting in some different
# PSF model for rejectlisted piff files, we'll
# never actually use self._piff
try:
self._piff = piff.read(
os.path.expanduser(os.path.expandvars(file_name)))
except IOError:
print("failed to load Piff file, hopefully it's rejectlisted...")
self._piff = None
self._did_fit = False
self.smooth = smooth
def _fit_smooth_model(self):
dxy = 256
ny = 4096 // dxy + 1
nx = 2048 // dxy + 1
xloc = np.empty((ny, nx), dtype=np.float64)
yloc = np.empty((ny, nx), dtype=np.float64)
pars = np.empty((ny, nx, 3), dtype=np.float64)
for yi, yl in enumerate(np.linspace(1, 4096, ny)):
for xi, xl in enumerate(np.linspace(1, 2048, nx)):
rng = np.random.RandomState(seed=yi + nx * xi)
xloc[yi, xi] = xl
yloc[yi, xi] = yl
pos = galsim.PositionD(x=xl, y=yl)
gs_img = self._draw(pos).drawImage(
nx=19, ny=19, scale=PIFF_SCALE, method='sb')
img = gs_img.array
nse = np.std(
np.concatenate([img[0, :], img[-1, :]]))
obs = ngmix.Observation(
image=img,
weight=np.ones_like(img)/nse**2,
jacobian=ngmix.jacobian.DiagonalJacobian(
x=9, y=9, scale=PIFF_SCALE))
_g1 = np.nan
_g2 = np.nan
_T = np.nan
# there are some nutty PSFs
if gs_img.calculateFWHM() > 0.5:
for _ in range(5):
try:
if NGMIX_V2:
am = AdmomFitter(rng=rng)
res = am.go(obs, 0.3)
if res['flags'] != 0:
continue
lm = Fitter(model='turb')
lm_res = lm.go(obs, res['pars'])
if lm_res['flags'] == 0:
_g1 = lm_res['pars'][2]
_g2 = lm_res['pars'][3]
_T = lm_res['pars'][4]
break
else:
am = Admom(obs, rng=rng)
am.go(0.3)
res = am.get_result()
if res['flags'] != 0:
continue
lm = LMSimple(obs, 'turb')
lm.go(res['pars'])
lm_res = lm.get_result()
if lm_res['flags'] == 0:
_g1 = lm_res['pars'][2]
_g2 = lm_res['pars'][3]
_T = lm_res['pars'][4]
break
except ngmix.gexceptions.GMixRangeError:
pass
try:
irr, irc, icc = ngmix.moments.g2mom(_g1, _g2, _T)
# this is a fudge factor that gets the overall PSF FWHM
# correct
# the naive correction for the pixel size is
# a bit too small
pixel_var = PIFF_SCALE * PIFF_SCALE / 12 * 1.73
irr -= pixel_var
icc -= pixel_var
_g1, _g2, _T = ngmix.moments.mom2g(irr, irc, icc)
except Exception:
_g1 = np.nan
_g2 = np.nan
_T = np.nan
pars[yi, xi, 0] = _g1
pars[yi, xi, 1] = _g2
pars[yi, xi, 2] = _T
xloc = xloc.ravel()
yloc = yloc.ravel()
pos = np.stack([xloc, yloc], axis=1)
assert pos.shape == (xloc.shape[0], 2)
# make interps
g1 = pars[:, :, 0].ravel()
msk = np.isfinite(g1)
if len(msk) < 10:
raise ValueError('DES Piff fitting failed too much!')
if np.any(~msk):
g1[~msk] = np.mean(g1[msk])
self._g1int = CloughTocher2DInterpolator(
pos, g1, fill_value=np.mean(g1[msk]))
g2 = pars[:, :, 1].ravel()
msk = np.isfinite(g2)
if len(msk) < 10:
raise ValueError('DES Piff fitting failed too much!')
if np.any(~msk):
g2[~msk] = np.mean(g2[msk])
self._g2int = CloughTocher2DInterpolator(
pos, g2, fill_value=np.mean(g2[msk]))
T = pars[:, :, 2].ravel()
msk = np.isfinite(T)
if len(msk) < 10:
raise ValueError('DES Piff fitting failed too much!')
if np.any(~msk):
T[~msk] = np.mean(T[msk])
self._Tint = CloughTocher2DInterpolator(
pos, T, fill_value=np.mean(T[msk]))
self._did_fit = True
def _draw(self, image_pos, wcs=None, n_pix=None,
x_interpolant='lanczos15', gsparams=None):
"""Get an image of the PSF at the given location.
Parameters
----------
image_pos : galsim.Position
The image position for the PSF.
wcs : galsim.BaseWCS or subclass, optional
The WCS to use to draw the PSF.
n_pix : int, optional
The image size to use when drawing without smoothing. Defaults to
53 pixels if not given
x_interpolant : str, optional
The interpolant to use.
gsparams : galsim.GSParams, optional
Ootional galsim configuration data to pass along.
Returns
-------
psf : galsim.InterpolatedImage
The PSF at the image position.
"""
if wcs is not None:
if n_pix is not None:
n_pix = n_pix
else:
n_pix = 53
pixel_wcs = wcs.local(image_pos)
else:
n_pix = 19
pixel_wcs = galsim.PixelScale(PIFF_SCALE)
# nice and big image size here cause this has been a problem
image = galsim.ImageD(ncol=n_pix, nrow=n_pix, wcs=pixel_wcs)
psf = self.getPiff().draw(
image_pos.x,
image_pos.y,
image=image,
center=True,
)
psf = galsim.InterpolatedImage(
galsim.ImageD(psf.array), # make sure galsim is not keeping state
wcs=pixel_wcs,
gsparams=gsparams,
x_interpolant=x_interpolant
).withFlux(
1.0
)
return psf
def getPiff(self):
return self._piff
def getPSF(
self, image_pos, wcs=None,
smooth=False, n_pix=None, **kwargs
):
"""Get an image of the PSF at the given location.
Parameters
----------
image_pos : galsim.Position
The image position for the PSF.
wcs : galsim.BaseWCS or subclass, optional
The WCS to use to draw the PSF. Currently used only when smoothing
is turned off.
smooth : bool, optional
If True, then smooth the Piff PSFs. Default of False.
n_pix : int, optional
The image size to use when drawing without smoothing.
**kargs : extra keyword arguments
These are all ignored.
Returns
-------
psf : galsim.GSObject
The PSF at the image position.
"""
if smooth or self.smooth:
if not self._did_fit:
self._fit_smooth_model()
arr = np.array([
np.clip(image_pos.x, 1, 2048),
np.clip(image_pos.y, 1, 4096)])
_g1 = self._g1int(arr)[0]
_g2 = self._g2int(arr)[0]
_T = self._Tint(arr)[0]
if np.any(np.isnan(np.array([_g1, _g2, _T]))):
logger.debug("Piff smooth fit params are NaN: %s %s %s %s", image_pos, _g1, _g2, _T)
raise RuntimeError("NaN smooth Piff params at %s!" % image_pos)
pars = np.array([0, 0, _g1, _g2, _T, 1])
obj = ngmix.gmix.make_gmix_model(pars, 'turb').make_galsim_object()
return obj.withFlux(1)
else:
return self._draw(image_pos, wcs=wcs, n_pix=n_pix)
class PiffLoader(galsim.config.InputLoader):
def getKwargs(self, config, base, logger):
req = {'file_name': str}
opt = {}
kwargs, safe = galsim.config.GetAllParams(
config, base, req=req, opt=opt)
return kwargs, safe
# add a config input section
galsim.config.RegisterInputType('des_piff', PiffLoader(DES_Piff))
# and a builder
def BuildDES_Piff(config, base, ignore, gsparams, logger):
des_piff = galsim.config.GetInputObj('des_piff', config, base, 'DES_Piff')
opt = {'flux': float,
'num': int,
'image_pos': galsim.PositionD,
'x_interpolant': str,
'smooth': bool}
params, safe = galsim.config.GetAllParams(
config, base, opt=opt, ignore=ignore)
if 'image_pos' in params:
image_pos = params['image_pos']
elif 'image_pos' in base:
image_pos = base['image_pos']
else:
raise galsim.GalSimConfigError(
"DES_Piff requested, but no image_pos defined in base.")
if 'wcs' not in base:
raise galsim.GalSimConfigError(
"DES_Piff requested, but no wcs defined in base.")
wcs = base['wcs']
if gsparams:
gsparams = galsim.GSParams(**gsparams)
else:
gsparams = None
psf = des_piff.getPSF(
image_pos,
wcs,
smooth=params.get('smooth', False),
gsparams=gsparams)
if 'flux' in params:
psf = psf.withFlux(params['flux'])
# we make sure to declare the returned object as not safe for reuse
can_be_reused = False
return psf, can_be_reused
def BuildDES_Piff_with_substitute(config, base, ignore, gsparams, logger):
# This builder usually just calls BuildDES_Piff, but can also
# be passed use_substitute = True, in which case it builds some
# other PSF. We use this for rejectlisted Piff files.
if "use_substitute" in config:
use_substitute = galsim.config.ParseValue(config, "use_substitute",
base, bool)[0]
else:
use_substitute = False
if use_substitute:
return (galsim.config.BuildGSObject(
config, "substitute_psf", base=base,
gsparams=gsparams, logger=logger))
else:
ignore += ["use_substitute", "substitute_psf"]
return BuildDES_Piff(config, base, ignore, gsparams, logger)
galsim.config.RegisterObjectType(
'DES_Piff', BuildDES_Piff_with_substitute, input_type='des_piff')
| 2.5
| 2
|
allink_core/apps/people/managers.py
|
allink/allink-core
| 5
|
12775336
|
# -*- coding: utf-8 -*-
from allink_core.core.models.managers import AllinkCategoryModelQuerySet
class AllinkPeopleQuerySet(AllinkCategoryModelQuerySet):
def title_asc(self, lang):
return self.active()\
.order_by('last_name', 'id')\
.distinct('last_name', 'id')
def title_desc(self, lang):
return self.active()\
.order_by('-last_name', 'id')\
.distinct('last_name', 'id')
def category(self):
return self.active()\
.order_by('categories__tree_id', 'categories__lft', 'last_name')\
.distinct()
AllinkPeopleManager = AllinkPeopleQuerySet.as_manager
| 2.203125
| 2
|
annotation_app/migrations/0005_sentences_sent_review_comments.py
|
meisin/annotation_project
| 0
|
12775337
|
<gh_stars>0
# Generated by Django 3.1.7 on 2021-04-07 23:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('annotation_app', '0004_auto_20210408_0728'),
]
operations = [
migrations.AddField(
model_name='sentences',
name='sent_review_comments',
field=models.TextField(help_text='Enter your review/comments on the annotation', max_length=1000, null=True),
),
]
| 1.703125
| 2
|
tests/util/common_test.py
|
adrianmo/python-ecsclient
| 0
|
12775338
|
<gh_stars>0
# Standard lib imports
import unittest
# Third party imports
# None
# Project level imports
from ecsclient.util.common import get_formatted_time_string
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(WhenTestingCommonFunctions())
return test_suite
class WhenTestingCommonFunctions(unittest.TestCase):
def setUp(self):
self.time_bucket_no_minute = '2014-11-18T00'
self.time_bucket_with_minute = '2014-11-18T00:01'
def test_should_get_properly_formatted_timestamp_no_minute(self):
self.assertEqual(self.time_bucket_no_minute,
get_formatted_time_string(2014, 11, 18, 0, None))
def test_should_get_properly_formatted_timestamp_with_minute(self):
self.assertEqual(self.time_bucket_with_minute,
get_formatted_time_string(2014, 11, 18, 0, 1))
def test_should_throw_value_error(self):
self.assertRaises(ValueError,
get_formatted_time_string, 2014, 11, 18, 'abc')
if __name__ == '__main__':
unittest.main()
| 2.640625
| 3
|
blog/admin.py
|
MysteryCoder456/Blog-App
| 3
|
12775339
|
from django.contrib import admin
from .models import *
admin.site.register(BlogList)
admin.site.register(Blog)
admin.site.register(Comment)
| 1.289063
| 1
|
backend/tests.py
|
Marcuse7/openschufa
| 46
|
12775340
|
import json
from io import BytesIO
def test_ping(app):
client = app.test_client()
resp = client.get('/ping')
data = json.loads(resp.data.decode())
assert resp.status_code == 200
assert 'records' in data['message']
assert 'success' in data['status']
def test_add_user(app):
"""Ensure a new user can be added to the database."""
with app.test_client() as client:
data = {
'name': 'test',
'foo': 'bar',
'image_1': (BytesIO(b'my file contents'), "image1.jpg")
}
response = client.post('/upload', content_type='multipart/form-data', data=data)
assert response.status_code == 204
| 2.5625
| 3
|
tools/answer_checker.py
|
CZ-NIC/deckard
| 30
|
12775341
|
"""Functions for sending DNS queries and checking recieved answers checking"""
# pylint: disable=C0301
# flake8: noqa
from ipaddress import IPv4Address, IPv6Address
import random
from typing import Iterable, Optional, Set, Union
import dns.message
import dns.flags
import pydnstest.matchpart
import pydnstest.mock_client
def unset_flag(message: dns.message.Message, flag: int) -> dns.message.Message:
"""Unsets given flag in given DNS message."""
message.flags &= ~flag
return message
def send_and_check(question: Union[dns.message.Message, bytes], # pylint: disable=R0913
expected: dns.message.Message,
server: Union[IPv4Address, IPv6Address],
match_fields: Set[str],
port: int = 53,
tcp: bool = False,
timeout: int = pydnstest.mock_client.SOCKET_OPERATION_TIMEOUT,
unset_flags: Iterable[int] = tuple()) -> bool:
"""Checks if DNS answer recieved for a question from a server matches expected one in specified
field. See pydnstest.matchpart for more information on match fields
Returns True on success, raises an exceptions on failure.
"""
print("Sending query:\n%s\n" % str(question))
answer = get_answer(question, server, port, tcp, timeout=timeout)
for flag in unset_flags:
answer = unset_flag(answer, flag)
print("Got answer:\n%s\n" % answer)
print("Matching:\n%s\n%s\n" % (match_fields, expected))
for field in match_fields:
pydnstest.matchpart.match_part(expected, answer, field)
return True
def get_answer(question: Union[dns.message.Message, bytes],
server: Union[IPv4Address, IPv6Address],
port: int = 53,
tcp: bool = False,
timeout: int = pydnstest.mock_client.SOCKET_OPERATION_TIMEOUT) -> dns.message.Message:
"""Get an DNS message with answer with specific query"""
sock = pydnstest.mock_client.setup_socket(str(server), port, tcp=tcp)
with sock:
pydnstest.mock_client.send_query(sock, question)
return pydnstest.mock_client.get_dns_message(sock, timeout=timeout)
def string_answer(question: Union[dns.message.Message, bytes],
server: Union[IPv4Address, IPv6Address],
port: int = 53,
tcp: bool = False) -> str:
"""Prints answer of a server. Good for generating tests."""
return get_answer(question, server, port, tcp).to_text()
def randomize_case(label: bytes) -> bytes:
"""Randomize case in a DNS name label"""
output = []
for byte in label:
if random.randint(0, 1):
output.append(bytes([byte]).swapcase())
else:
output.append(bytes([byte]))
return b''.join(output)
def make_random_case_query(name: str, *args, **kwargs) -> dns.message.Message:
"""Proxy for dns.message.make_query with rANdoM-cASe"""
query = dns.message.make_query(name, *args, **kwargs)
for label in query.question[0].name.labels:
label = randomize_case(label)
return query
| 2.8125
| 3
|
fantasyProjectHome/fantasyApp/urls.py
|
jaredtewodros/cfbFantasyApp
| 0
|
12775342
|
from django.conf.urls import url
from django.urls import path
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', views.index, name='index'),
path('login/', views.login, name='login'),
]
| 1.617188
| 2
|
bokeh/server/server_backends.py
|
rothnic/bokeh
| 1
|
12775343
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2015, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import logging
logger = logging.getLogger(__name__)
import uuid
from bokeh.exceptions import UnauthorizedException
from flask import (
request, session, flash, redirect, url_for, render_template, jsonify
)
from .app import bokeh_app
from .models import user, docs, convenience
class AbstractAuthentication(object):
def current_user_name(self):
"""obtain current user name from the current request
current request is obtained from flask request thread local
object
"""
raise NotImplementedError
def login(self, username):
"""login the user, sets whatever request information is necessary
(usually, session['username'] = username)
"""
raise NotImplementedError
def logout(self):
"""logs out the user, sets whatever request information is necessary
usually, session.pop('username')
"""
raise NotImplementedError
def current_user(self):
"""returns bokeh User object from self.current_user_name
"""
username = self.current_user_name()
if username is None:
return None
bokehuser = user.User.load(bokeh_app.servermodel_storage, username)
return bokehuser
def login_get(self):
"""custom login view
"""
raise NotImplementedError
def login_post(self):
"""custom login submission. Request form will have
username, password, and possibly an api field.
api indicates that we are
submitting via python, and we should try to return error
codes rather than flash messages
"""
raise NotImplementedError
def login_from_apikey(self):
"""login URL using apikey. This is usually generated
by the python client
"""
raise NotImplementedError
def register_get(self):
"""custom register view
"""
raise NotImplementedError
def register_post(self):
"""custom register submission
request form will have username, password, password_confirm,
and possibly an api field. api indicates that we are
submitting via python, and we should try to return error
codes rather than flash messages
"""
raise NotImplementedError
def can_write_doc(self, docid):
"""whether or not a user can write to a doc
"""
raise NotImplementedError
def can_read_doc(self, docid):
"""whether or not a user can read a doc
"""
raise NotImplementedError
class SingleUserAuthentication(AbstractAuthentication):
def can_write_doc(self, doc_or_docid, temporary_docid=None, userobj=None):
return True
def can_read_doc(self, doc_or_docid, temporary_docid=None, userobj=None):
return True
def current_user_name(self):
return "defaultuser"
def current_user(self):
"""returns bokeh User object matching defaultuser
if the user does not exist, one will be created
"""
username = self.current_user_name()
bokehuser = user.User.load(bokeh_app.servermodel_storage, username)
if bokehuser is not None:
return bokehuser
bokehuser = user.new_user(bokeh_app.servermodel_storage, "defaultuser",
str(uuid.uuid4()), apikey='nokey', docs=[])
return bokehuser
class MultiUserAuthentication(AbstractAuthentication):
def can_write_doc(self, doc_or_docid, temporary_docid=None, userobj=None):
if not isinstance(doc_or_docid, docs.Doc):
doc = docs.Doc.load(bokeh_app.servermodel_storage, doc_or_docid)
else:
doc = doc_or_docid
if userobj is None:
userobj = self.current_user()
return convenience.can_write_from_request(doc, request, userobj,
temporary_docid=temporary_docid)
def can_read_doc(self, doc_or_docid, temporary_docid=None, userobj=None):
if not isinstance(doc_or_docid, docs.Doc):
doc = docs.Doc.load(bokeh_app.servermodel_storage, doc_or_docid)
else:
doc = doc_or_docid
if userobj is None:
userobj = self.current_user()
return convenience.can_read_from_request(doc, request, userobj)
def login(self, username):
session['username'] = username
def print_connection_info(self, bokehuser):
logger.info("connect using the following")
command = "output_server(docname, username='%s', userapikey='%s')"
command = command % (bokehuser.username, bokehuser.apikey)
logger.info(command)
def current_user_name(self):
# users can be authenticated by logging in (setting the session)
# or by setting fields in the http header (api keys, etc..)
username = session.get('username', None)
if username:
return username
else:
# check for auth via apis and headers
bokehuser = user.apiuser_from_request(bokeh_app, request)
if bokehuser:
return bokehuser.username
return None
def register_get(self):
return render_template("register.html", title="Register")
def login_get(self):
return render_template("login.html", title="Login")
def register_post_api(self):
username = request.values['username']
password = request.values['password']
try:
bokehuser = user.new_user(
bokeh_app.servermodel_storage, username, password
)
self.login(username)
self.print_connection_info(bokehuser)
except UnauthorizedException:
return jsonify(status=False,
error="user already exists")
return jsonify(status=True,
userapikey=bokehuser.apikey
)
def register_post(self):
if request.values.get('api', None):
return self.register_post_api()
username = request.values['username']
password = request.values['password']
password_confirm = request.values['password_confirm']
if password != password_confirm:
flash("password and confirmation do not match")
return redirect(url_for('.register_get'))
try:
bokehuser = user.new_user(
bokeh_app.servermodel_storage, username, password
)
self.login(username)
self.print_connection_info(bokehuser)
except UnauthorizedException:
flash("user already exists")
return redirect(url_for('.register_get'))
return redirect(url_for(".index"))
def login_post_api(self):
username = request.values['username']
password = request.values['password']
try:
bokehuser = user.auth_user(bokeh_app.servermodel_storage,
username,
password)
self.login(username)
self.print_connection_info(bokehuser)
except UnauthorizedException:
return jsonify(status=False,
error="incorrect login ")
return jsonify(status=True,
userapikey=bokehuser.apikey
)
def login_post(self):
if request.values.get('api', None):
return self.login_post_api()
username = request.values['username']
password = request.values['password']
try:
bokehuser = user.auth_user(bokeh_app.servermodel_storage,
username,
password=password)
self.login(username)
self.print_connection_info(bokehuser)
except UnauthorizedException:
flash("incorrect login exists")
return redirect(url_for('.login_get'))
return redirect(url_for(".index"))
def login_from_apikey(self):
username = request.values.get('username')
apikey = request.values.get('userapikey')
try:
bokehuser = user.auth_user(bokeh_app.servermodel_storage,
username,
apikey=apikey)
self.login(username)
self.print_connection_info(bokehuser)
except UnauthorizedException:
flash("incorrect login")
return redirect(url_for('.login_get'))
return redirect(url_for(".index"))
def logout(self):
session.pop('username', None)
return redirect(url_for(".index"))
| 2.421875
| 2
|
examples/forms/boostrap.py
|
mulonemartin/kaira
| 3
|
12775344
|
from wtforms import StringField, validators
from kaira.app import App
from kaira.response import response
from kaira.wtf import KairaForm
app = App()
class SigninForm(KairaForm):
username = StringField('Username', [validators.Length(min=4, max=25)])
password = StringField('Password', [validators.Length(min=6, max=35)])
@app.route("/", methods=['GET', 'POST'])
def form_boostrap(request):
form = SigninForm(request)
if form.validate_on_submit():
return response.redirect('/done')
return response.template('boostrap.html', form=form)
@app.route("/done")
def done(request):
return response.text('Done!')
if __name__ == '__main__':
app.run(debug=True, host="0.0.0.0", port=8000)
| 2.265625
| 2
|
Exposure/match_histograms.py
|
Joevaen/Scikit-image_On_CT
| 0
|
12775345
|
# 调整图像,使其累积直方图与另一幅图像相匹配,各个通道独立匹配。
import matplotlib.pyplot as plt
from skimage import data, img_as_float, io
from skimage import exposure
from skimage.exposure import match_histograms
reference = io.imread('/home/qiao/PythonProjects/Scikit-image_On_CT/Test_Img/9.jpg')
image = io.imread('/home/qiao/PythonProjects/Scikit-image_On_CT/Test_Img/10.jpg')
matched = match_histograms(image, reference, multichannel=True)
fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, figsize=(8, 3),
sharex=True, sharey=True)
for aa in (ax1, ax2, ax3):
aa.set_axis_off()
ax1.imshow(image)
ax1.set_title('Source')
ax2.imshow(reference)
ax2.set_title('Reference')
ax3.imshow(matched)
ax3.set_title('Matched')
plt.tight_layout()
plt.show()
| 2.90625
| 3
|
harpoon/src/plugins/Error/Types.py
|
xezzz/Harpoon
| 0
|
12775346
|
import discord
from discord.ext import commands
class NotCachedError(commands.CheckFailure):
pass
class PostParseError(commands.BadArgument):
def __init__(self, type, error):
super().__init__(None)
self.type = type
self.error = error
| 2.5
| 2
|
cascades.py
|
stmorse/cascades
| 1
|
12775347
|
##########################
# Implementation of Persistent Cascades algorithm
# described in [](https://stmorse.github.io/docs/BigD348.pdf)
# For usage see README
# For license see LICENSE
# Author: <NAME>
# Email: <EMAIL>
# License: MIT License (see LICENSE in top folder)
##########################
import os
import numpy as np
import multiprocessing as mp
import networkx as nx
import time as T
import copy
from utils import load_calls
from zss import Node, simple_distance
##################
# METHODS USED IN MP, CAN'T BE CLASS INSTANCE OBJECTS
def jaccard(im1, im2):
# |A\cap B|/|A\cup B| = |A\cap B|/(|A|+|B|-|A\cap B|)
im1 = np.asarray(im1)
im2 = np.asarray(im2)
intersection = len(np.intersect1d(im1, im2))
return intersection / float(len(im1) + len(im2) - intersection)
def build_sim_mx(trees, nodelists=None, method='zss', similarity=True, speedup=-1):
'''Returns a distance matrix of `trees` using metric `method`
trees : list of trees (Node)
method : `zss` is normalized TED, `jaccard` is on node lists, `both` gives both
and can take the speedup parameter
similarity : if False, returns the dissimilarity (1-sim)
speedup : [0,1]. Will set TED=0 when jaccard < speedup. (Zero = no speedup)
Returns sim_mx (or if `both`, returns TED then jaccard)
'''
numt = len(trees)
lists = [[c.label for c in t.iter()] for t in trees] if nodelists==None else nodelists
sizes = [len(li) for li in lists]
sim_mx = np.zeros((numt, numt))
sim_mx2 = np.zeros((numt, numt)) if method=='both' else None
for i in xrange(numt):
for j in xrange(i, numt):
if i==j:
sim_mx[i,j] = 1
if method=='both':
sim_mx2[i,j] = 1
continue
if method=='both':
jacc = jaccard(lists[i], lists[j])
nted = 0
if jacc >= speedup:
ted = simple_distance(trees[i], trees[j])
nted = 1 - (2*ted / float(sizes[i]+sizes[j]+ted))
sim_mx[i,j], sim_mx[j,i] = nted, nted
sim_mx2[i,j], sim_mx2[j,i] = jacc, jacc
else:
val = 0
if method=='zss':
ted = simple_distance(trees[i], trees[j])
val = 1 - (2*ted / float(sizes[i]+sizes[j]+ted))
elif method=='jaccard':
val = jaccard(lists[i], lists[j])
else:
print 'Method unrecognized.'
return
sim_mx[i,j], sim_mx[j,i] = val, val
if method=='both':
if not similarity:
sim_mx = 1 - sim_mx
sim_mx2 = 1 - sim_mx2
return sim_mx, sim_mx2
else:
return sim_mx if similarity else (1 - sim_mx)
# mp method used by build_distmx_tgraph
def do_build(root, alltrees, mintree, minsize):
# get indices of periods with a tree
treex = [i for i, t in enumerate(alltrees) if type(t)==Node]
if len(treex) < mintree:
return None
# build lists of nodes in each tree
nlists = []
bigtreex = []
for x in treex:
li = [c.label for c in alltrees[x].iter()]
if len(li) < minsize:
continue
nlists.append(li)
bigtreex.append(x)
if len(nlists) < mintree: # if num big enough trees is too small
return None
nted_mx, jacc_mx = build_sim_mx([alltrees[v] for v in bigtreex],
nodelists=nlists, # send lists of nodes
method='both', # retrieve jaccard and nted
speedup=0.3) # only compute TED if jaccard >= 0.3
# build tree graph with pairwise comparisons
g = nx.Graph()
g.add_nodes_from(bigtreex)
for i, x in enumerate(bigtreex):
for j, y in enumerate(bigtreex):
if j <= i: continue
if nted_mx[i,j] > 0 or jacc_mx[i,j] > 0:
g.add_edge(x, y, nted=nted_mx[i,j], jacc=jacc_mx[i,j])
return (root, g)
###########################
class Cascades:
'''Loads call data, extracts cascades, and finds persistence classes.
Dependencies: numpy, multiprocessing, networkx, zss, utils
Callable methods:
build : build call matrix, extracts all cascades, creates similarity matrices
build_persistence_classes : takes tgraphs and creates persistence classes
Internal methods:
load_call_data : loads call data
extract_cascade : given root(s), extracts all cascades
build_sim_mx : given set of cascades, builds similarity matrix using Jaccard/TED
build_all_cascades : extracts all cascades using `extract_cascade`
do_build : multiprocessing worker method
build_distmx_tgraph : takes trees and creates tgraphs for each root node
'''
def __init__(self, calls=None, UTC=False, path='', city='', nMonths=1, startx=0, moyr=[]):
'''Initialize Cascades object.
`calls` is an optional numpy array of the raw call data.
`moyr` is list of tuples giving month/year combos. (Default is for the Portugal data.)'''
self.moyr = [(4,2006), (5,2006), (6,2006), (7,2006), (8,2006), (11,2006), (12,2006),
(1,2007), (2,2007), (3,2007), (4,2007), (5,2007), (6,2007)]
if len(moyr) > 0:
self.moyr = moyr
if calls is None:
self.calls = self.load_call_data(path+city+'/', nMonths=nMonths, startx=startx)
else:
self.calls = calls
self.UTC = UTC
def build(self, nsample=3000, mincalls=50, period=1, sample=[], exclude=[], mintree=10, minsize=3,
daybegins=4*60*60, multi=True, batchsize=1000, numproc=7, verbose=True):
'''Load call data, extract all call data, and build the similarity matrices for each
possible root. Store in vars `calls`, `allC`, and `tgraphs`.
'''
if len(sample) == 0:
self.allC = self.build_all_cascades(nsample=nsample, mincalls=mincalls, period=period,
exclude=exclude, daybegins=daybegins, verbose=verbose)
else:
self.allC = self.build_all_cascades(sample=sample, period=period, verbose=verbose)
self.tgraphs = self.build_distmx_tgraph(mintree=mintree, minsize=minsize,
multi=multi, batchsize=batchsize, numproc=numproc)
def build_persistence_classes(self, ell=0.8, dayrange=[], verbose=True):
'''Create (and return) list of lists for each root with maximal persistence classes.
Note: dayrange should be in terms of the periods, not the actual days.
Default is to do the entire dataset.'''
if len(dayrange)==0:
dayrange = range(np.amax(calls[:,6]))
# by root, list of lists
self.pers_class_nted = {}
self.pers_class_jacc = {}
START = T.time()
numg = len(self.tgraphs)
for k, root in enumerate(self.tgraphs):
if k % 10000 == 0 and verbose:
print k, numg, (T.time() - START)
# create a temporary version of tgraph[root] with low-wt edges removed
# find nted persistence classes
gt = self.tgraphs[root].copy()
for n in gt.nodes():
if n not in dayrange:
gt.remove_node(n)
for e in gt.edges():
if gt.edge[e[0]][e[1]]['nted'] < ell:
gt.remove_edge(e[0], e[1])
self.pers_class_nted[root] = list(nx.find_cliques(gt))
# find jaccard persistence classes
gj = self.tgraphs[root].copy()
for n in gj.nodes():
if n not in dayrange:
gj.remove_node(n)
for e in gj.edges():
if gj.edge[e[0]][e[1]]['jacc'] < ell:
gj.remove_edge(e[0], e[1])
self.pers_class_jacc[root] = list(nx.find_cliques(gj))
return self.pers_class_nted, self.pers_class_jacc
##################
def load_call_data(self, path, nMonths=1, startx=8):
'''Load raw call data (possibly multiple months) into single object.
`calls` must be structured caller, tower, callee, tower, time stamp,
duration, period (day).
Time stamp is structured with the last 5 digits giving the second past
midnight, and the leading 2-3 digits giving the 24-period past Jan 2006.
'''
calls = load_calls(path, sort=True,
month=self.moyr[startx][0], year=self.moyr[startx][1], withDays=True)
for m in range(1, nMonths): # if nMonths==1, will skip
calls2 = load_calls(path, sort=True, month=self.moyr[startx+m][0],
year=self.moyr[startx+m][1], withDays=True)
calls2[:,6] += np.amax(calls[:,6]) + 1
calls = np.vstack((calls, calls2))
return calls
#####
def extract_cascade(self, calls, roots, maxdepth=-1, maxhr=24, verbose=False):
'''Extract time-respecting, non-repeating cascades for single or list of nodes
Uses a recursive internal method 'subtree()'
roots : desired root (user id)
maxdepth : cutoff number of tiers (-1 for unlimited, default)
maxhr : cutoff time, starting from first base root call (in hrs)
verbose : debug output
'''
# sort calls[] by outgoing user
O = np.argsort( calls[:,0] )
calls = calls[O,:]
indx = np.cumsum( np.bincount( calls[:,0] ) )
indx = np.append([0],indx)
# now range(indx[u], indx[u+1]) gives the indices in calls[] of user u's calls
# Initialize the trees (indexed by root)
Cs = {}
# Initialize dict of nodes and their first call in the tree
# structure: {node: [parentnode, time]}
firsts = {}
# time cutoff, set as first call by root + maxsec
maxt = 0
def subtree(rootnode, curdepth):
root = rootnode.label
tau = rootnode.tau
# stop if we have reached maxdepth
if curdepth == maxdepth:
return
# if root didn't make any calls, return
try:
if indx[root] == indx[root+1]:
return
except IndexError:
# print 'Index out of bounds for root', root
# this will occur if root didn't make any outgoing calls
# and has an id larger than any of the outgoing users
return
# get slice of all this root's outgoing calls,
# sort by time, and remove all before tau or after maxt
rcalls = calls[indx[root]:indx[root+1],:]
rcalls = rcalls[np.argsort(rcalls[:,4]),:]
rcalls = rcalls[(rcalls[:,4] > tau) & (rcalls[:,4] < maxt)]
# if no valid calls, return
if len(rcalls) == 0:
return
# loop thru root's calls and start new subtree
for i, c in enumerate(rcalls[:,2]):
# if child already reached, check if we remove old or skip
if c in firsts:
if (rcalls[i,4] < firsts[c][1]):
# existing spot was a later event, remove it
oc = firsts[c][0].get(int(c))
firsts[c][0].children.remove(oc)
else:
# existing spot was an earlier event, skip this child
continue
# update firsts
firsts[c] = (rootnode, rcalls[i,4])
# add child
child = Node(int(c), tau=rcalls[i,4])
rootnode.addkid(child)
subtree(child, curdepth+1)
## END def subtree
def order_children(rootnode):
if len(rootnode.children) == 0:
return
# order children of the rootnode
rootnode.children = sorted(Node.get_children(rootnode), key=lambda c: c.label)
for cnode in Node.get_children(rootnode):
order_children(cnode)
## END def order_children
# do build...
maxsec = maxhr * 60 * 60
for i, r in enumerate(roots):
try:
if indx[r] == indx[r+1]:
continue
except IndexError:
# if verbose: print 'Index out of bounds for root', r
continue
r = int(r)
Cs[r] = Node(r, tau=-1)
t1 = np.amin(calls[indx[r]:indx[r+1],4])
maxt = t1 + maxsec
firsts = {r: (None, t1)}
subtree(Cs[r], 0)
# order children by label so tree edit distance is unbiased
# if verbose: print 'Ordering by label...',
for r in Cs:
order_children(Cs[r])
if verbose: print '(%d)' % (len(Cs)),
return Cs
def build_all_cascades(self, nsample=50000, mincalls=10, period=1, sample=[],
exclude=[], daybegins=4*60*60, verbose=True):
'''Extract all cascades for a random sample of users.
calls -- raw call data, form: caller, tow1, callee, tow2, time, duration, day
nsample -- size of random sample (-1 does all users with mincalls)
mincalls -- min calls in the dataset
period -- num days for a cascade. Start of day is 4am by default.
'''
calls = self.calls
#daybegins = 4 * 60 * 60 # 4am is ``start of the day''
print '\nTotal %d total unique users.' % (len(np.unique(calls[:,0])))
if len(sample) == 0:
nCalls = np.bincount(calls[:,0])
users = np.where(nCalls >= mincalls)[0]
users = np.setdiff1d(users, exclude)
print 'Found %d non-excluded users with enough outgoing calls.' % (len(users))
if nsample > 0:
print 'Sampling %d...' % (nsample)
users = np.random.choice(users, nsample, replace=False)
else:
users = sample
print 'Sampling %d users (specified)' % (len(users))
numdays = np.amax(calls[:,6])
numperiods = numdays / period
print 'Num days: %d, Num periods (%d hrs): %d\n' % (numdays, period*24, numperiods)
# all cascades, keyed by root. format: {root: [Node, 0, ...], ...}
allC = {}
print 'Extracting trees... (verbose format: period (trees))'
START = T.time()
for idx, p in enumerate(xrange(0, numdays-(period-1)-1, period)):
if self.UTC:
tcalls = calls[calls[:,6]==p]
if len(tcalls)==0:
if verbose: print 'Skip %d (no data)' % (idx),
continue
if verbose: print '%d' % (idx),
C = self.extract_cascade(tcalls,
users,
maxdepth=-1,
maxhr=(period*24),
verbose=True)
else:
day1 = calls[calls[:,6]==p]
if len(day1)==0:
if verbose: print 'Skip %d (no data)' % (idx),
continue
# the nasty string manipulation is to deal with the time format ...
startsec = int(''.join([str(day1[0,4])[:-5], '{0:0>5}'.format(str(int(daybegins)))]))
endsec = startsec + 86400
if verbose: print '%d' % (idx),
C = self.extract_cascade(calls[(calls[:,4] > startsec) & (calls[:,4] < endsec)],
users,
maxdepth=-1,
maxhr=(period*24),
verbose=verbose)
for tree in C:
try:
allC[tree][idx] = C[tree]
except KeyError:
allC[tree] = [0 for _ in xrange(numperiods)]
allC[tree][idx] = C[tree]
if verbose: print ' .. ',
if idx % 5 == 0 and idx > 0 and verbose:
print 'Time %1.3f' % (T.time() - START)
print 'Complete.\n'
self.users = users
return allC
#########
def build_distmx_tgraph(self, mintree=3, minsize=3,
multi=True, batchsize=1000, numproc=4,
verbose=True):
''' Build distance matrices, store in a graph'''
# all NTED and JACC similarity matrices, indexed by root
# all_nted = {}
# all_jacc = {}
# all tree graphs, indexed by root
all_tgraphs = {}
allC = self.allC
numt = len(self.allC)
START = T.time()
numbatch = (numt / batchsize) + 1 # a little sloppy?
output = []
for k in range(numbatch):
if verbose:
print '[%d to %d). (Batch %d / %d). Total: %d. Time: %1.3f' % \
(k*batchsize, (k+1)*batchsize, k, numbatch, numt, (T.time() - START))
if multi:
results = []
pool = mp.Pool(processes=numproc)
results = [pool.apply_async(do_build,
args=(root, self.allC[root], mintree, minsize, )) for \
i, root in enumerate(self.allC) \
if i >= (k*batchsize) and i < ((k+1)*batchsize)]
pool.close()
pool.join()
temp = [p.get() for p in results]
else:
temp = [do_build(root, self.allC[root], mintree, minsize) for \
i, root in enumerate(self.allC) \
if i >= (k*batchsize) and i < ((k+1)*batchsize)]
output.extend([t for t in temp if t != None])
if verbose: print 'Writing to tgraphs...'
for g in output:
all_tgraphs[g[0]] = g[1]
if verbose:
print 'Complete. Total roots:', len(all_tgraphs)
print 'Total time:', T.time() - START
print ''
# return all_tgraphs
return all_tgraphs
################
| 2.90625
| 3
|
apimetrics_agent/thread.py
|
APImetrics/Agent
| 0
|
12775348
|
import logging
import os
from datetime import datetime
import tempfile
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from apimetrics_agent import VERSION
from .controller import handle_api_request
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class APImetricsThread(object):
def __init__(self, config):
self.config = config
def handle_definition(self, definition, complete_cb):
logger.debug("handle_definition")
# exception_str = None
result = None
if not definition:
logger.error("definition not set")
complete_cb()
return
test_key_str = self.validate_data(definition)
if not test_key_str:
logger.error("Invalid request data: %s", definition)
complete_cb()
return
cert_file_name = None
key_file_name = None
if definition["request"].get("ssl_cert"):
with tempfile.NamedTemporaryFile(mode="w", delete=False) as cert:
cert.write(definition["request"]["ssl_cert"])
definition["_cert_file"] = cert.name
cert_file_name = cert.name
logger.debug("Using cert file %s", cert_file_name)
del definition["request"]["ssl_cert"]
if definition["request"].get("ssl_key"):
with tempfile.NamedTemporaryFile(mode="w", delete=False) as cert:
cert.write(definition["request"]["ssl_key"])
definition["_key_file"] = cert.name
key_file_name = cert.name
del definition["request"]["ssl_key"]
elif self.config.ssl_cert_file:
definition["_cert_file"] = self.config.ssl_cert_file
definition["_key_file"] = self.config.ssl_key_file
try:
complete_cb()
result = handle_api_request(definition)
except Exception as ex: # pylint: disable=W0703
logger.error("Exception in handle_api_request %s", ex)
result = {
"test_key_str": test_key_str,
"result_key_str": definition["result_key_str"],
"start_time": datetime.utcnow().isoformat(),
"request": definition["request"],
"response": None,
"exception": "Problem with test agent: {}".format(repr(ex)),
}
if definition.get("expected_trigger_time"):
result["expected_trigger_time"] = definition["expected_trigger_time"]
if definition.get("trigger_time"):
result["trigger_time"] = definition["trigger_time"]
if cert_file_name:
try:
os.remove(cert_file_name)
except FileNotFoundError:
pass
if key_file_name:
try:
os.remove(key_file_name)
except FileNotFoundError:
pass
res = self.send_result_to_gae(result, test_key_str)
logger.info("Got response %d %s", res.status_code, res.reason)
# logger.debug(res.data) #read(decode_content=True))
def validate_data(self, output):
logger.debug("validate_data")
if (
"access_token" in output
and self.config.access_token == output["access_token"]
):
return output["test_key_str"]
return None
def send_result_to_gae(self, result, test_key_str):
logger.debug("send_result_to_gae")
url = "{}/remote-api/1/test/{}/".format(self.config.host_url, test_key_str)
result["version"] = VERSION
session = requests.Session()
retries = Retry(
total=5,
backoff_factor=1,
method_whitelist=["POST"],
status_forcelist=[500, 501, 502, 503, 504],
)
session.mount(self.config.host_url, HTTPAdapter(max_retries=retries))
logger.info("Calling %s %s proxy: %s", "POST", url, self.config.proxies)
return session.post(url, json=result, proxies=self.config.proxies, verify=False)
def handle_request(config, definition, complete_cb=None):
logger.debug("handle_request")
thread = APImetricsThread(config)
thread.handle_definition(definition, complete_cb)
| 2.296875
| 2
|
Algorithms/Easy/989. Add to Array-Form of Integer/answer.py
|
KenWoo/Algorithm
| 0
|
12775349
|
<gh_stars>0
from typing import List
class Solution:
def addToArrayForm(self, A: List[int], K: int) -> List[int]:
res = []
N = len(A)
S = str(K)
M = len(S)
i = N - 1
j = M - 1
carry = 0
while i >= 0 or j >= 0 or carry != 0:
v = carry
if i >= 0:
v += A[i]
if j >= 0:
v += int(S[j])
carry, rem = divmod(v, 10)
res.append(rem)
i -= 1
j -= 1
return list(reversed(res))
if __name__ == "__main__":
s = Solution()
result = s.addToArrayForm([2, 1, 5], 806)
print(result)
| 3.265625
| 3
|
iexfinance/tests/stocks/test_market_movers.py
|
jto-d/iexfinance
| 653
|
12775350
|
<filename>iexfinance/tests/stocks/test_market_movers.py
import pandas as pd
import pytest
from iexfinance.stocks import (
get_market_gainers,
get_market_iex_percent,
get_market_iex_volume,
get_market_losers,
get_market_most_active,
)
class TestMarketMovers(object):
def test_market_gainers(self):
li = get_market_gainers()
assert isinstance(li, pd.DataFrame)
assert len(li) == pytest.approx(10, 1)
def test_market_losers(self):
li = get_market_losers()
assert isinstance(li, pd.DataFrame)
assert len(li) == pytest.approx(10, 1)
def test_market_most_active(self):
li = get_market_most_active()
assert isinstance(li, pd.DataFrame)
assert len(li) == pytest.approx(10, 1)
def test_market_iex_volume(self):
li = get_market_iex_volume()
assert isinstance(li, pd.DataFrame)
assert len(li) == pytest.approx(10, 1)
def test_market_iex_percent(self):
li = get_market_iex_percent()
assert isinstance(li, pd.DataFrame)
assert len(li) == pytest.approx(10, 1)
| 2.34375
| 2
|