hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4ff11f80f26c7b2649dc72a51fbbd9bf3ebf5728
| 6,605
|
py
|
Python
|
ios/Vendor/SDWebImage/Vendors/libwebp/swig/libwebp.py
|
yue-f/react-native-fast-image
|
b7112d3872277066f643102530dfdbcb0a9aa0ee
|
[
"MIT"
] | 4,538
|
2017-10-20T05:19:03.000Z
|
2022-03-30T02:29:30.000Z
|
ios/Vendor/SDWebImage/Vendors/libwebp/swig/libwebp.py
|
yue-f/react-native-fast-image
|
b7112d3872277066f643102530dfdbcb0a9aa0ee
|
[
"MIT"
] | 1,095
|
2016-04-10T18:15:33.000Z
|
2022-03-31T18:21:20.000Z
|
ios/Vendor/SDWebImage/Vendors/libwebp/swig/libwebp.py
|
yue-f/react-native-fast-image
|
b7112d3872277066f643102530dfdbcb0a9aa0ee
|
[
"MIT"
] | 1,860
|
2017-10-20T05:22:35.000Z
|
2022-03-27T10:54:14.000Z
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.4
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_libwebp', [dirname(__file__)])
except ImportError:
import _libwebp
return _libwebp
if fp is not None:
try:
_mod = imp.load_module('_libwebp', fp, pathname, description)
finally:
fp.close()
return _mod
_libwebp = swig_import_helper()
del swig_import_helper
else:
import _libwebp
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def WebPGetDecoderVersion():
"""WebPGetDecoderVersion() -> int"""
return _libwebp.WebPGetDecoderVersion()
def WebPGetInfo(*args):
"""WebPGetInfo(uint8_t data) -> (width, height)"""
return _libwebp.WebPGetInfo(*args)
def WebPDecodeRGB(*args):
"""WebPDecodeRGB(uint8_t data) -> (rgb, width, height)"""
return _libwebp.WebPDecodeRGB(*args)
def WebPDecodeRGBA(*args):
"""WebPDecodeRGBA(uint8_t data) -> (rgb, width, height)"""
return _libwebp.WebPDecodeRGBA(*args)
def WebPDecodeARGB(*args):
"""WebPDecodeARGB(uint8_t data) -> (rgb, width, height)"""
return _libwebp.WebPDecodeARGB(*args)
def WebPDecodeBGR(*args):
"""WebPDecodeBGR(uint8_t data) -> (rgb, width, height)"""
return _libwebp.WebPDecodeBGR(*args)
def WebPDecodeBGRA(*args):
"""WebPDecodeBGRA(uint8_t data) -> (rgb, width, height)"""
return _libwebp.WebPDecodeBGRA(*args)
def WebPGetEncoderVersion():
"""WebPGetEncoderVersion() -> int"""
return _libwebp.WebPGetEncoderVersion()
def wrap_WebPEncodeRGB(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeRGB(*args)
def wrap_WebPEncodeBGR(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeBGR(*args)
def wrap_WebPEncodeRGBA(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeRGBA(*args)
def wrap_WebPEncodeBGRA(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeBGRA(*args)
def wrap_WebPEncodeLosslessRGB(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeLosslessRGB(*args)
def wrap_WebPEncodeLosslessBGR(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeLosslessBGR(*args)
def wrap_WebPEncodeLosslessRGBA(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeLosslessRGBA(*args)
def wrap_WebPEncodeLosslessBGRA(*args):
"""private, do not call directly."""
return _libwebp.wrap_WebPEncodeLosslessBGRA(*args)
_UNUSED = 1
def WebPEncodeRGB(rgb, width, height, stride, quality_factor):
"""WebPEncodeRGB(uint8_t rgb, int width, int height, int stride, float quality_factor) -> lossy_webp"""
webp = wrap_WebPEncodeRGB(
rgb, _UNUSED, _UNUSED, width, height, stride, quality_factor)
if len(webp[0]) == 0:
return None
return webp[0]
def WebPEncodeRGBA(rgb, width, height, stride, quality_factor):
"""WebPEncodeRGBA(uint8_t rgb, int width, int height, int stride, float quality_factor) -> lossy_webp"""
webp = wrap_WebPEncodeRGBA(
rgb, _UNUSED, _UNUSED, width, height, stride, quality_factor)
if len(webp[0]) == 0:
return None
return webp[0]
def WebPEncodeBGR(rgb, width, height, stride, quality_factor):
"""WebPEncodeBGR(uint8_t rgb, int width, int height, int stride, float quality_factor) -> lossy_webp"""
webp = wrap_WebPEncodeBGR(
rgb, _UNUSED, _UNUSED, width, height, stride, quality_factor)
if len(webp[0]) == 0:
return None
return webp[0]
def WebPEncodeBGRA(rgb, width, height, stride, quality_factor):
"""WebPEncodeBGRA(uint8_t rgb, int width, int height, int stride, float quality_factor) -> lossy_webp"""
webp = wrap_WebPEncodeBGRA(
rgb, _UNUSED, _UNUSED, width, height, stride, quality_factor)
if len(webp[0]) == 0:
return None
return webp[0]
def WebPEncodeLosslessRGB(rgb, width, height, stride):
"""WebPEncodeLosslessRGB(uint8_t rgb, int width, int height, int stride) -> lossless_webp"""
webp = wrap_WebPEncodeLosslessRGB(rgb, _UNUSED, _UNUSED, width, height, stride)
if len(webp[0]) == 0:
return None
return webp[0]
def WebPEncodeLosslessRGBA(rgb, width, height, stride):
"""WebPEncodeLosslessRGBA(uint8_t rgb, int width, int height, int stride) -> lossless_webp"""
webp = wrap_WebPEncodeLosslessRGBA(rgb, _UNUSED, _UNUSED, width, height, stride)
if len(webp[0]) == 0:
return None
return webp[0]
def WebPEncodeLosslessBGR(rgb, width, height, stride):
"""WebPEncodeLosslessBGR(uint8_t rgb, int width, int height, int stride) -> lossless_webp"""
webp = wrap_WebPEncodeLosslessBGR(rgb, _UNUSED, _UNUSED, width, height, stride)
if len(webp[0]) == 0:
return None
return webp[0]
def WebPEncodeLosslessBGRA(rgb, width, height, stride):
"""WebPEncodeLosslessBGRA(uint8_t rgb, int width, int height, int stride) -> lossless_webp"""
webp = wrap_WebPEncodeLosslessBGRA(rgb, _UNUSED, _UNUSED, width, height, stride)
if len(webp[0]) == 0:
return None
return webp[0]
# This file is compatible with both classic and new-style classes.
| 33.190955
| 106
| 0.702952
|
94584c2d7eb4baca47aed40928af5e715132a577
| 1,700
|
py
|
Python
|
test/votes_test.py
|
shihab4t/social_media_api.py
|
7bb4d037032ca92986bc01295625fa368d5e12de
|
[
"Unlicense"
] | null | null | null |
test/votes_test.py
|
shihab4t/social_media_api.py
|
7bb4d037032ca92986bc01295625fa368d5e12de
|
[
"Unlicense"
] | null | null | null |
test/votes_test.py
|
shihab4t/social_media_api.py
|
7bb4d037032ca92986bc01295625fa368d5e12de
|
[
"Unlicense"
] | null | null | null |
import pytest
from app import models
@pytest.fixture
def test_vote(test_posts, session, test_user):
new_vote = models.Vote(
post_id=test_posts[-1].id, user_id=test_user.get("id"))
session.add(new_vote)
session.commit()
def test_votes_on_post(authorized_client, test_posts):
res = authorized_client.post(
"/vote/",
json={
"post_id": test_posts[-1].id,
"dir": True
}
)
assert res.status_code == 201
def test_votes_twice_on_post(authorized_client, test_posts, test_vote):
res = authorized_client.post(
"/vote/",
json={
"post_id": test_posts[-1].id,
"dir": True
}
)
assert res.status_code == 409
def test_delete_vote(authorized_client, test_posts, test_vote):
res = authorized_client.post(
"/vote/",
json={
"post_id": test_posts[-1].id,
"dir": False
}
)
assert res.status_code == 201
def test_delete_vote_non_exist(authorized_client, test_posts):
res = authorized_client.post(
"/vote/",
json={
"post_id": test_posts[-1].id,
"dir": False
}
)
assert res.status_code == 404
def test_vote_post_non_exist(authorized_client, test_posts):
res = authorized_client.post(
"/vote/",
json={
"post_id": 23423423,
"dir": True
}
)
assert res.status_code == 404
def test_vote_unauthorized_client(client, test_posts):
res = client.post(
"/vote/",
json={
"post_id": 23423423,
"dir": True
}
)
assert res.status_code == 401
| 20.481928
| 71
| 0.57
|
f81ed65ba638257683b3ff0ff439cb1d9a79d639
| 3,638
|
py
|
Python
|
backprop/main.py
|
amangour30/BINC
|
08bdea69108fdb2e5db06d86c195ddec1ef0a71e
|
[
"BSD-2-Clause"
] | null | null | null |
backprop/main.py
|
amangour30/BINC
|
08bdea69108fdb2e5db06d86c195ddec1ef0a71e
|
[
"BSD-2-Clause"
] | null | null | null |
backprop/main.py
|
amangour30/BINC
|
08bdea69108fdb2e5db06d86c195ddec1ef0a71e
|
[
"BSD-2-Clause"
] | null | null | null |
from activation_functions import sigmoid_function, tanh_function, linear_function,\
LReLU_function, ReLU_function, elliot_function, symmetric_elliot_function, softmax_function
from cost_functions import sum_squared_error, cross_entropy_cost, exponential_cost, hellinger_distance, softmax_cross_entropy_cost
from learning_algorithms import backpropagation, scaled_conjugate_gradient, scipyoptimize, resilient_backpropagation
from neuralnet import NeuralNet
from tools import Instance
import numpy as np
# Training sets
paper = 3
plastic = 3+3
metal = 10
lst = []
one = [1]
zero = [0]
for i in range(10):
filename = "file" + str(i) + ".txt"
fo = open(filename, "r")
data = fo.read().replace('\n', '').replace("[", "").replace("]", "")
data = np.fromstring(data, dtype=int, sep=',')
#~ if i == 0:
#~ print data
if i < paper :
lst.append(Instance(data, zero))
elif i < plastic:
lst.append(Instance(data, zero))
else :
lst.append(Instance(data, one))
training_one = []
for i in range(0,10):
if i != 8:
training_one.append(lst[i])
#~ fo = open("outputtrainingone.txt", "w")
#~ fo.write(str(training_one))
#~ print(str(training_one[0]))
settings = {
# Required settings
"cost_function" : sum_squared_error,
"n_inputs" : 65536, # Number of network input signals
"layers" : [ (20, tanh_function), (1, sigmoid_function) ],
# [ (number_of_neurons, activation_function) ]
# The last pair in you list describes the number of output signals
# Optional settings
"weights_low" : -0.1, # Lower bound on initial weight range
"weights_high" : 0.1, # Upper bound on initial weight range
"save_trained_network" : True, # Whether to write the trained weights to disk
"input_layer_dropout" : 0.0, # dropout fraction of the input layer
"hidden_layer_dropout" : 0.0, # dropout fraction in all hidden layers
}
# initialize the neural network
network = NeuralNet( settings )
# load a stored network configuration
# network = NeuralNet.load_from_file( "trained_configuration.pkl" )
# Train the network using backpropagation
backpropagation(
network,
training_one, # specify the training set
ERROR_LIMIT = 0.001, # define an acceptable error limit
#max_iterations = 100, # continues until the error limit is reach if this argument is skipped
# optional parameters
learning_rate = 0.03, # learning rate
momentum_factor = 0.4, # momentum
)
# Train the network using SciPy
scipyoptimize(
network,
training_one,
method = "Newton-CG",
ERROR_LIMIT = 1e-4
)
# Train the network using Scaled Conjugate Gradient
scaled_conjugate_gradient(
network,
training_one,
ERROR_LIMIT = 1e-4
)
# Train the network using resilient backpropagation
resilient_backpropagation(
network,
training_one, # specify the training set
ERROR_LIMIT = 1e-3, # define an acceptable error limit
#max_iterations = (), # continues until the error limit is reach if this argument is skipped
# optional parameters
weight_step_max = 50.,
weight_step_min = 0.,
start_step = 0.5,
learn_max = 1.2,
learn_min = 0.5
)
network.print_test( lst)
| 33.685185
| 131
| 0.619571
|
68ede18589b502388c6b0502fe1ae33acaff92e2
| 2,657
|
py
|
Python
|
datasets/smart-crop.py
|
akpun/writer-identification
|
959c438ddd4cdb19976e0197a78d4f6b616030f5
|
[
"MIT"
] | 3
|
2020-09-16T01:59:46.000Z
|
2021-08-12T08:46:14.000Z
|
datasets/smart-crop.py
|
ZenPro100/writer-identification
|
959c438ddd4cdb19976e0197a78d4f6b616030f5
|
[
"MIT"
] | null | null | null |
datasets/smart-crop.py
|
ZenPro100/writer-identification
|
959c438ddd4cdb19976e0197a78d4f6b616030f5
|
[
"MIT"
] | 1
|
2021-02-28T16:07:33.000Z
|
2021-02-28T16:07:33.000Z
|
import os
import sys
from os.path import exists
import cv2
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
path = sys.argv[1]
new_path = sys.argv[2]
dataset = sys.argv[3]
def smart_crop(img):
# (1) Convert to gray, and threshold
s = img.shape
if len(s) > 2:
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
else:
gray = img
th, threshed = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU)
# (2) Morph-op to remove noise
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (200, 200))
morphed = cv2.morphologyEx(threshed, cv2.MORPH_CROSS, kernel)
# (3) Find the max-area contour
cnts = cv2.findContours(morphed, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2]
cnt = sorted(cnts, key=cv2.contourArea)[-1]
# (4) Crop and save it
x, y, w, h = cv2.boundingRect(cnt)
dst = img[y:y + h, x:x + w]
return dst
def crop_iam(image):
image = cv2.imread(image, cv2.IMREAD_UNCHANGED)
result = image.copy()
# gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)
thresh = cv2.threshold(image, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)[1]
# Get horizontal lines
horizontal_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (200, 1))
remove_horizontal = cv2.morphologyEx(thresh, cv2.MORPH_OPEN, horizontal_kernel, iterations=2)
cnts = cv2.findContours(remove_horizontal, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
# get coords of lines
if len(cnts) > 3:
_, y1, _, _ = cv2.boundingRect(cnts[2])
_, y2, _, _ = cv2.boundingRect(cnts[0])
else:
_, y1, _, _ = cv2.boundingRect(cnts[1])
_, y2, _, _ = cv2.boundingRect(cnts[0])
# remove lines
cv2.drawContours(result, cnts, -1, (255, 255, 255), 5)
# crop
result = result[y1:y2, 50:2400]
return result
def crop_firemaker(image, crop):
crop_img = cv2.imread(image, cv2.IMREAD_UNCHANGED)
crop_img = img[700:2800, :].copy()
return crop_img
if __name__ == '__main__':
if not exists(new_path):
os.makedirs(new_path)
print("Started.")
for subdir, dirs, files in os.walk(path):
for image in tqdm(files):
name = os.path.join(subdir, image)
if dataset == 'firemaker':
crop_img = crop_firemaker(name)
elif dataset == 'iam':
crop_img = crop_iam(name)
else:
crop_img = cv2.imread(image, cv2.IMREAD_UNCHANGED)
img = smart_crop(crop_img)
cv2.imwrite(os.path.join(new_path, image), img)
print("Finished")
| 30.54023
| 97
| 0.637185
|
dddfe496017440f105c0bcac6154748b9305ed6f
| 20
|
py
|
Python
|
utils/exporters/blender/modules/msgpack/_version.py
|
wenluzhizhi/threeEx
|
82b1795f9f73bb47fd3c49befc6606944f79d639
|
[
"MIT"
] | 88
|
2015-01-09T02:21:24.000Z
|
2021-11-11T02:54:08.000Z
|
utils/exporters/blender/modules/msgpack/_version.py
|
superguigui/three.js
|
c18be1eca38a1f3c779e8dcb168edf06ee9441ad
|
[
"MIT"
] | 1,054
|
2015-09-11T06:51:27.000Z
|
2022-03-30T09:46:54.000Z
|
utils/exporters/blender/modules/msgpack/_version.py
|
superguigui/three.js
|
c18be1eca38a1f3c779e8dcb168edf06ee9441ad
|
[
"MIT"
] | 80
|
2015-12-09T12:41:52.000Z
|
2022-02-16T11:46:42.000Z
|
version = (0, 4, 2)
| 10
| 19
| 0.5
|
330b76423b65b11a974c32faa840bdccef99ae87
| 1,198
|
py
|
Python
|
django/contrib/admindocs/middleware.py
|
fizista/django
|
16f3a6a4c7bab11644d11c2be029374e5095cb56
|
[
"BSD-3-Clause"
] | 2
|
2016-09-27T09:30:19.000Z
|
2016-10-17T01:47:43.000Z
|
env/lib/python2.7/site-packages/django/contrib/admindocs/middleware.py
|
luiscarlosgph/nas
|
e5acee61e8bbf12c34785fe971ce7df8dee775d4
|
[
"MIT"
] | 10
|
2019-12-26T17:31:31.000Z
|
2022-03-21T22:17:33.000Z
|
env/lib/python2.7/site-packages/django/contrib/admindocs/middleware.py
|
luiscarlosgph/nas
|
e5acee61e8bbf12c34785fe971ce7df8dee775d4
|
[
"MIT"
] | 1
|
2020-05-25T08:55:19.000Z
|
2020-05-25T08:55:19.000Z
|
from django.conf import settings
from django import http
class XViewMiddleware(object):
"""
Adds an X-View header to internal HEAD requests -- used by the documentation system.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
"""
If the request method is HEAD and either the IP is internal or the
user is a logged-in staff member, quickly return with an x-header
indicating the view function. This is used by the documentation module
to lookup the view function for an arbitrary page.
"""
assert hasattr(request, 'user'), (
"The XView middleware requires authentication middleware to be "
"installed. Edit your MIDDLEWARE_CLASSES setting to insert "
"'django.contrib.auth.middleware.AuthenticationMiddleware'.")
if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or
(request.user.is_active and request.user.is_staff)):
response = http.HttpResponse()
response['X-View'] = "%s.%s" % (view_func.__module__, view_func.__name__)
return response
| 47.92
| 100
| 0.652755
|
a1f75e536b0c8c9018a2be7251932840f2b6f221
| 10,443
|
py
|
Python
|
datastage/admin/interactive.py
|
mkozuharov/DataStage
|
a61e67c901c2b43afa0fec6a99ef72152391a55f
|
[
"MIT"
] | null | null | null |
datastage/admin/interactive.py
|
mkozuharov/DataStage
|
a61e67c901c2b43afa0fec6a99ef72152391a55f
|
[
"MIT"
] | null | null | null |
datastage/admin/interactive.py
|
mkozuharov/DataStage
|
a61e67c901c2b43afa0fec6a99ef72152391a55f
|
[
"MIT"
] | null | null | null |
# ---------------------------------------------------------------------
#
# Copyright (c) 2012 University of Oxford
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# ---------------------------------------------------------------------
import grp
import os
import pwd
import re
import socket
import struct
import subprocess
import sys
import libmount
from datastage.config import settings
from .menu_util import interactive, menu
from .util import check_pid
from .projects import projects_menu
from .users import users_menu
from .samba_config import SambaConfigurer
def get_ips():
addrs = (re.findall(r"addr: ?([\d:.a-f]+)", subprocess.check_output('/sbin/ifconfig')))
# Drop link-local addresses for IPv6, as they're uninteresting
addrs = set(addr for addr in addrs if not addr.startswith('fe80:'))
return addrs
def from_hex(value):
return ''.join(chr(int(value[i:i + 2], 16)) for i in range(0, len(value), 2))
def parse_addr(addr):
if all(c == '0' for c in addr):
return None # Listening on all interfaces
if len(addr) == 8:
return '.'.join(reversed([str(int(addr[i:i + 2], 16)) for i in range(0, 8, 2)]))
else:
# Turn it into a binary string
addr = from_hex(addr)
# Use the magic of byte-ordering to move the bytes around, producing a hex string
addr = ''.join(map('%08X'.__mod__, struct.unpack('<' + 'l' * 4, addr)))
# Create a list of parts of the address, e.g ['2001', 'a02', ...]
addr = [addr[i:i + 4].lstrip('0') or '0' for i in range(0, len(addr), 4)]
try:
longest_zero_run = max((j - i, i, j) for i in range(0, len(addr)) for j in range(i + 1, len(addr) + 1) if
all(x == '0' for x in addr[i:j]))
start, end = longest_zero_run[1:3]
return ':'.join(addr[:start]) + '::' + ':'.join(addr[end:])
except ValueError:
# No zeros in this address
return ':'.join(addr)
def parse_port(port):
return struct.unpack('>H', from_hex(port))[0]
def get_all_listening():
listening = []
for proto in ('tcp', 'udp', 'tcp6', 'udp6'):
with open(os.path.join('/proc/net/', proto)) as f:
sockets = [l.split()[1:3] for l in list(f)[1:]]
for socket in sockets:
(local_addr, local_port), (remote_addr, remote_port) = [x.split(':') for x in socket]
local_addr, remote_addr = map(parse_addr, (local_addr, remote_addr))
local_port, remote_port = map(parse_port, (local_port, remote_port))
if remote_addr is not None:
continue # This isn't a listening socket
listening.append((proto, local_addr, local_port))
return listening
def check_port_listening(addrs, port):
available_at = set()
for addr in addrs:
# Choose our IPv6 or IPv4 socket by inferring the address type.
proto = socket.AF_INET6 if ':' in addr else socket.AF_INET
s = socket.socket(proto, socket.SOCK_STREAM)
s.setblocking(3)
try:
s.connect((addr, port))
s.shutdown(2)
available_at.add(addr)
except Exception:
pass
return available_at
def firewall_menu():
print "Hello!"
yield
def samba_menu():
print "SAMBA configuration"
yield
def config_menu():
def service_check(label, check_port, pid_filenames, service_name, firewall_ports):
actions = {}
listening_on = set()
for proto, addr, port in listening:
if port == check_port and proto.startswith('tcp'):
if addr is None:
listening_on |= ips
else:
listening_on.add(addr)
available_at = check_port_listening(listening_on, check_port)
print
pid = check_pid(*pid_filenames)
if pid:
print "%10s: Status: \033[92mRunning\033[0m (%d)" % (label, pid)
print " Listening on: %s" % ', '.join(sorted(listening_on))
print " Available at: %s" % ', '.join(sorted(available_at))
if listening_on != available_at:
print " Warning: Not available on all interfaces."
print " \033[95mAction: Type '%s' to tweak the firewall\033[0m" % service_name
actions[service_name] = update_firewall_service(*firewall_ports)
else:
print "%10s: Status: \033[91mNot running\033[0m" % label
print " \033[95mAction: Type '%s' to start %s\033[0m" % (service_name, label)
actions[service_name] = enable_service(service_name, label)
return actions
while True:
actions = {'refresh': lambda: None}
listening = get_all_listening()
ips = get_ips()
print
print "Status of some services"
actions.update(service_check('DataStage', settings.get('server:port'),
['/var/run/datastage.pid'],
'datastage', ['']))
actions.update(service_check('SSH', 22,
['/var/run/sshd.pid'],
'sshd', ['ssh/tcp']))
actions.update(service_check('Apache', 80,
['/var/run/apache2.pid', '/var/run/httpd/httpd.pid'],
'apache2', ['www/tcp']))
if os.path.exists('/etc/apache2/sites-enabled/000-default'):
print " Warning: Default site exists at /etc/apache2/sites-enabled/000-default"
print " \033[95mAction: Type 'defaultsite' to remove it and restart Apache\033[0m"
actions['defaultsite'] = remove_default_apache_site()
actions.update(service_check('Samba', 445,
['/var/run/samba/smbd.pid'],
'samba', ['netbios-ns/udp', 'netbios-dgm/udp',
'netbios-ssn/tcp', 'microsoft-ds/tcp']))
if SambaConfigurer.needs_configuring():
print " Warning: Samba is not configured to serve DataStage files"
print " \033[95mAction: Type 'confsamba' to configure and restart Samba\033[0m"
actions['confsamba'] = SambaConfigurer()
if FilesystemAttributes.needs_configuring():
print " Warning: The filesystem frpm which DataStage will serve data is missing mount options "
print " \033[95mAction: Type 'fs' to ensure the filesystem is mounted with acl and user_xattr options\033[0m"
actions['fs'] = FilesystemAttributes()
yield menu(actions)
def enable_service(name, label):
def f():
print "Enabling %s..." % label
subprocess.call(["service", name, "start"])
subprocess.call(["chkconfig", name, "on"])
print "%s enabled." % label
return f
def update_firewall_service(*names):
def f():
print "Tweaking the firewall"
for name in names:
subprocess.call(["/usr/sbin/ufw", "allow", name])
subprocess.call(["/usr/sbin/ufw", "enable"])
print "Tweaking complete"
return f
def remove_default_apache_site():
def f():
print "Removing default apache site and restarting apache"
os.unlink('/etc/apache2/sites-enabled/000-default')
subprocess.call(["service", "apache2", "restart"])
print "Done"
return f
class FilesystemAttributes(object):
OPTIONS = frozenset(['user_xattr', 'acl'])
@classmethod
def get_filesystem(cls):
return libmount.get_current_mounts().find_fs_containing(settings.DATA_DIRECTORY)
@classmethod
def needs_configuring(cls):
fs = cls.get_filesystem()
return not ('user_xattr' in fs.options and 'acl' in fs.options)
def __call__(self):
#print "Updating /etc/fstab"
#with libmount.FilesystemTable() as fstab:
# fs = fstab.find_fs_containing(settings.DATA_DIRECTORY)
# fs.options |= self.OPTIONS
# fstab.save()
print "Remounting the filesystem with the necessary options"
fs = self.get_filesystem()
options = fs.options | frozenset(['remount']) | self.OPTIONS
subprocess.call(['mount', fs.target, '-o', ','.join(options)])
print "Filesystem configuration done."
def main_menu():
print "Welcome to the interactive DataStage set-up system."
print "==================================================="
if os.getuid() != 0:
print "This utility must be run as root."
sys.exit(1)
while True:
print "========="
print "Main Menu"
print "========="
print "Using the config(c) option start/ensure that the system services are running to be able to use the datastage system."
print "Select users(u) to add/remove the datastage users."
print "Select projects(p) to add/remove research projects/groups."
yield menu({'config': config_menu,
'users': users_menu,
'projects': projects_menu})
def main():
interactive(main_menu())
if __name__ == '__main__':
main()
| 37.564748
| 139
| 0.583166
|
ee8efbd0714b4c5621af06add7bb465c31589944
| 1,878
|
py
|
Python
|
tests/test_pass1.py
|
hongquan/python-rapidjson
|
bc0cd406d4fecde332467ace06cf1aca1947378a
|
[
"MIT"
] | null | null | null |
tests/test_pass1.py
|
hongquan/python-rapidjson
|
bc0cd406d4fecde332467ace06cf1aca1947378a
|
[
"MIT"
] | null | null | null |
tests/test_pass1.py
|
hongquan/python-rapidjson
|
bc0cd406d4fecde332467ace06cf1aca1947378a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# :Project: python-rapidjson -- Compliance tests
# :Author: John Anderson <sontek@gmail.com>
# :License: MIT License
# :Copyright: © 2015 John Anderson
# :Copyright: © 2017 Lele Gaifax
#
import pytest
# from http://json.org/JSON_checker/test/pass1.json
JSON = r'''
[
"JSON Test Pattern pass1",
{"object with 1 member":["array with 1 element"]},
{},
[],
-42,
true,
false,
null,
{
"integer": 1234567890,
"real": -9876.543210,
"e": 0.123456789e-12,
"E": 1.234567890E+34,
"": 23456789012E66,
"zero": 0,
"one": 1,
"space": " ",
"quote": "\"",
"backslash": "\\",
"controls": "\b\f\n\r\t",
"slash": "/ & \/",
"alpha": "abcdefghijklmnopqrstuvwyz",
"ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ",
"digit": "0123456789",
"special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?",
"hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A",
"true": true,
"false": false,
"null": null,
"array":[ ],
"object":{ },
"address": "50 St. James Street",
"url": "http://www.JSON.org/",
"comment": "// /* <!-- --",
"# -- --> */": " ",
" s p a c e d " :[1,2 , 3
,
4 , 5 , 6 ,7 ],"compact": [1,2,3,4,5,6,7],
"jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}",
"quotes": "" \u0022 %22 0x22 034 "",
"\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"
: "A key can be any string"
},
0.5 ,98.6
,
99.44
,
1066,
1e1,
0.1e1,
1e-1,
1e00,2e+00,2e-00
,"rosebud"]
'''
@pytest.mark.unit
def test_parse(dumps, loads):
# test in/out equivalence and parsing
res = loads(JSON)
out = dumps(res)
assert res == loads(out)
| 23.772152
| 93
| 0.472311
|
8f4faa9949db6e2466f7fb85e463b13ecc1419b4
| 367
|
py
|
Python
|
manage.py
|
changawa-antony/pitches-one-min
|
36dc274e6ae893e460f428297472ab829dc17102
|
[
"MIT"
] | null | null | null |
manage.py
|
changawa-antony/pitches-one-min
|
36dc274e6ae893e460f428297472ab829dc17102
|
[
"MIT"
] | null | null | null |
manage.py
|
changawa-antony/pitches-one-min
|
36dc274e6ae893e460f428297472ab829dc17102
|
[
"MIT"
] | null | null | null |
from app import create_app,db
from flask_script import Manager,Server
from app.models import User,Role
# Creating app instance
app = create_app('development')
manager = Manager(app)
manager.add_command('server',Server)
@manager.shell
def make_shell_context():
return dict(app = app,db = db,User = User,Role =Role)
if __name__ == '__main__':
manager.run()
| 21.588235
| 57
| 0.746594
|
7843bdbd186f79caa123b3c633628a3c5d2a4133
| 7,913
|
py
|
Python
|
test/functional/wallet_hd.py
|
AgenorCore/Agenor
|
c022ba2f2d29cea98a6966205f881389707b558b
|
[
"MIT"
] | 46
|
2021-04-11T20:15:51.000Z
|
2021-06-02T16:13:11.000Z
|
test/functional/wallet_hd.py
|
criptorob/Agenor
|
38aa56e3b1cb75911bbb3fe63f4dab8fd243a85a
|
[
"MIT"
] | 5
|
2021-04-24T13:08:45.000Z
|
2021-11-24T14:28:55.000Z
|
test/functional/wallet_hd.py
|
criptorob/Agenor
|
38aa56e3b1cb75911bbb3fe63f4dab8fd243a85a
|
[
"MIT"
] | 9
|
2021-04-12T12:28:34.000Z
|
2021-05-14T14:45:19.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
import os
import shutil
from test_framework.test_framework import AgenorCoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
assert_raises_rpc_error
)
class WalletHDTest(AgenorCoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Make sure we use hd
if '-legacywallet' in self.nodes[0].extra_args:
self.log.info("Exiting HD test for non-HD wallets")
return
# keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/44'/119'/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak"))
#self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump"))
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
NUM_HD_ADDS = 10
for i in range(NUM_HD_ADDS):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].getaddressinfo(hd_add)
assert_equal(hd_info["hdkeypath"], "m/44'/119'/0'/0'/"+str(i)+"'")
assert_equal(hd_info["hdseedid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/44'/119'/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for i in range(NUM_HD_ADDS):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/44'/119'/0'/0'/"+str(i)+"'")
assert_equal(hd_info_2["hdseedid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes(self.nodes[0], 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes(self.nodes[0], 1)
self.sync_all()
# Wallet automatically scans blocks older than key on startup
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
""" todo: Implement rescanblockchain
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
"""
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:16], "m/44'/119'/0'/1'")
# Generate a new HD seed on node 1 and make sure it is set
orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
self.nodes[1].sethdseed()
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/44\'/119\'/0\'/0\'/0\'') # Make sure the new address is the first from the keypool
self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
# Set a new HD seed on node 1 without flushing the keypool
new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
orig_masterkeyid = new_masterkeyid
self.nodes[1].sethdseed(False, new_seed)
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/44\'/119\'/0\'/0\'/1\'') # Make sure the new address continues previous keypool
# Check that the next address is from the new seed
self.nodes[1].keypoolrefill(1)
next_addr = self.nodes[1].getnewaddress()
assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/44\'/119\'/0\'/0\'/0\'') # Make sure the new address is not from previous keypool
assert next_addr != addr
# Sethdseed parameter validity
assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
if __name__ == '__main__':
WalletHDTest().main ()
| 47.957576
| 159
| 0.654493
|
14d61518c55b1bbbe3003a34e163df2df5c4e634
| 1,347
|
py
|
Python
|
myblog/blog/views.py
|
goodwell42/DjangoTest
|
ec2c302a1317c2bfc317d74c736d45a3235be434
|
[
"MIT"
] | 1
|
2019-06-26T11:24:28.000Z
|
2019-06-26T11:24:28.000Z
|
myblog/blog/views.py
|
goodwell42/DjangoTest
|
ec2c302a1317c2bfc317d74c736d45a3235be434
|
[
"MIT"
] | null | null | null |
myblog/blog/views.py
|
goodwell42/DjangoTest
|
ec2c302a1317c2bfc317d74c736d45a3235be434
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.http import HttpResponse
# Create your views here.
from . import models
def index(request):
articles = models.Article.objects.all()
return render(request, 'blog/index.html', {'articles': articles})
def article_page(request, article_id):
article = models.Article.objects.get(pk=article_id)
return render(request, 'blog/article_page.html', {'article': article})
def edit_page(request, article_id):
if str(article_id) == '0':
return render(request, 'blog/edit_page.html')
article = models.Article.objects.get(pk=article_id)
return render(request, 'blog/edit_page.html', {'article': article})
def edit_action(request):
title = request.POST.get('title', 'TITLE')
content = request.POST.get('content', 'CONTENT')
article_id = request.POST.get('article_id', '0')
if article_id == '0':
models.Article.objects.create(title=title, content=content)
# articles = models.Article.objects.all()
# return render(request, "blog/index.html", {'articles': articles})
return HttpResponseRedirect('/blog/')
article = models.Article.objects.get(pk=article_id)
article.title = title
article.content = content
article.save()
return HttpResponseRedirect('/blog/')
| 32.071429
| 75
| 0.701559
|
8dca06668c6e0a8c1be15a1ebc7bbf7e5f720310
| 298
|
py
|
Python
|
python/WorksharingSession.Duration.py
|
BIMpraxis/Journalysis
|
af0c042b28d01ba5e44dafc2bbe9556434e897b8
|
[
"MIT"
] | 26
|
2017-11-23T19:30:03.000Z
|
2022-02-09T10:35:10.000Z
|
python/WorksharingSession.Duration.py
|
BIMpraxis/Journalysis
|
af0c042b28d01ba5e44dafc2bbe9556434e897b8
|
[
"MIT"
] | 51
|
2017-11-16T15:02:32.000Z
|
2022-03-01T13:51:58.000Z
|
python/WorksharingSession.Duration.py
|
BIMpraxis/Journalysis
|
af0c042b28d01ba5e44dafc2bbe9556434e897b8
|
[
"MIT"
] | 9
|
2017-11-20T09:20:01.000Z
|
2021-09-15T13:08:30.000Z
|
import clr
def process_input(func, input):
if isinstance(input, list): return [func(x) for x in input]
else: return func(input)
def WSSessionDuration(wssess):
if wssess.__repr__() == 'WorksharingSession': return wssess.Duration
else: return None
OUT = process_input(WSSessionDuration,IN[0])
| 27.090909
| 69
| 0.758389
|
486605d894e7ad238b696df2ed380f62d04cf338
| 4,732
|
py
|
Python
|
tests/tests/correctness/EPLAnalytics/Flow_Manipulation/Slicer/Slicer_cor_006/run.py
|
rpeach-sag/apama-industry-analytics-kit
|
a3f6039915501d41251b6f7ec41b0cb8111baf7b
|
[
"Apache-2.0"
] | 3
|
2019-09-02T18:21:22.000Z
|
2020-04-17T16:34:57.000Z
|
tests/tests/correctness/EPLAnalytics/Flow_Manipulation/Slicer/Slicer_cor_006/run.py
|
rpeach-sag/apama-industry-analytics-kit
|
a3f6039915501d41251b6f7ec41b0cb8111baf7b
|
[
"Apache-2.0"
] | null | null | null |
tests/tests/correctness/EPLAnalytics/Flow_Manipulation/Slicer/Slicer_cor_006/run.py
|
rpeach-sag/apama-industry-analytics-kit
|
a3f6039915501d41251b6f7ec41b0cb8111baf7b
|
[
"Apache-2.0"
] | null | null | null |
# $Copyright (c) 2015 Software AG, Darmstadt, Germany and/or Software AG USA Inc., Reston, VA, USA, and/or Terracotta Inc., San Francisco, CA, USA, and/or Software AG (Canada) Inc., Cambridge, Ontario, Canada, and/or, Software AG (UK) Ltd., Derby, United Kingdom, and/or Software A.G. (Israel) Ltd., Or-Yehuda, Israel and/or their licensors.$
# Use, reproduction, transfer, publication or disclosure is prohibited except as specifically provided for in your License Agreement with Software AG
from industry.framework.AnalyticsBaseTest import AnalyticsBaseTest
from pysys.constants import *
class PySysTest(AnalyticsBaseTest):
def execute(self):
# Start the correlator
correlator = self.startTest(inputLog="input.log")
self.injectAnalytic(correlator)
self.injectSlicer(correlator)
self.ready(correlator)
correlator.receive(filename='Output.evt', channels=['Output'])
correlator.send('Config.evt')
self.waitForSignal('correlator.out',
expr='Analytic Slicer started for inputDataNames',
condition='==1',
timeout=5)
correlator.send('Events.evt')
self.waitForSignal('input.log', expr='"Input",com.industry.analytics\.Data', condition='==2', timeout=15)
self.waitForSignal('Output.evt', expr='com.industry.analytics.Data.*', condition='==20', timeout=15)
def validate(self):
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",2,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",3,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",4,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",5,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",5,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",6,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",6,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",7,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",7,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",8,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",8,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",9,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",9,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",10,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",10,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s1",11,1.5,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",11,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",12,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",13,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.assertGrep('Output.evt', expr='com.industry.analytics.Data.*"Output","c","s2",14,2.55,"",1,2,3,{"sliceType":"slicecount","sliceValue":"10","timeInterval":"1"}')
self.checkSanity()
| 100.680851
| 343
| 0.679417
|
c4e2260a2c45542cb26e48363ac3c71df6cf0292
| 2,321
|
py
|
Python
|
ut_plm.py
|
interstar/python-lookin-mel
|
2cd648f4311b5dbd6b31130eb69b39acd4580ddc
|
[
"CNRI-Python"
] | 1
|
2019-10-14T21:55:35.000Z
|
2019-10-14T21:55:35.000Z
|
ut_plm.py
|
interstar/python-lookin-mel
|
2cd648f4311b5dbd6b31130eb69b39acd4580ddc
|
[
"CNRI-Python"
] | null | null | null |
ut_plm.py
|
interstar/python-lookin-mel
|
2cd648f4311b5dbd6b31130eb69b39acd4580ddc
|
[
"CNRI-Python"
] | null | null | null |
from plm import *
import unittest
class TestPLM(unittest.TestCase) :
def testOuterStruct(self):
data = """
html:
head/
body:
hello"""
desired = """<!DOCTYPE html>
<html>
<head/>
<body>
hello
</body>
</html>
"""
self.assertEquals(comp(data)[0],desired)
def testTitle(self):
data = """
html:
head:
title:This is a title"""
desired = """<!DOCTYPE html>
<html>
<head>
<title>This is a title</title>
</head>
</html>
"""
self.assertEquals(comp(data)[0],desired)
def testMeta(self) :
data = """
html:
head:
meta(charset="utf-8")/
body/
"""
desired = """<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
</head>
<body/>
</html>
"""
self.assertEquals(comp(data)[0],desired)
def testStyle(self) :
data = """
html:
head:
stylesheet:mystyles.css
importscript:libs/jquery.min.js
"""
desired = """<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" href="mystyles.css">
<script src="libs/jquery.min.js"></script>
</head>
</html>
"""
self.assertEquals(comp(data)[0],desired)
def testAll(self) :
data = """
html:
head:
title:This is a title
stylesheet:mystyles.css
importscript:libs/jquery.min.js
body:
div(#menu, .mainmenu):
menu
div(#content, .first, .big):
main
p:
img(width=533):http://pictures.com/img1
div(#footer, x=y):
p:
copyright
"""
desired = """<!DOCTYPE html>
<html>
<head>
<title>This is a title</title>
<link rel="stylesheet" href="mystyles.css">
<script src="libs/jquery.min.js"></script>
</head>
<body>
<div id="menu" class="mainmenu" >
menu
</div>
<div id="content" class="first big" >
main
<p>
<img width="533" src="http://pictures.com/img1"/>
</p>
</div>
<div x="y" id="footer" >
<p>
copyright
</p>
</div>
</body>
</html>
"""
self.assertEquals(comp(data)[0],desired)
if __name__ == "__main__" :
unittest.main()
| 18.717742
| 57
| 0.491168
|
8133f71385edafcc9090887e1da40aecf31b84aa
| 3,729
|
py
|
Python
|
test/data/telemac2d/confluence/vnv_confluence.py
|
meracan/meracan-api
|
aff04f3d9d0dce46fe0b8ce89394ec22823a0ea4
|
[
"MIT"
] | null | null | null |
test/data/telemac2d/confluence/vnv_confluence.py
|
meracan/meracan-api
|
aff04f3d9d0dce46fe0b8ce89394ec22823a0ea4
|
[
"MIT"
] | null | null | null |
test/data/telemac2d/confluence/vnv_confluence.py
|
meracan/meracan-api
|
aff04f3d9d0dce46fe0b8ce89394ec22823a0ea4
|
[
"MIT"
] | null | null | null |
"""
Validation script for confluence
"""
from vvytel.vnv_study import AbstractVnvStudy
from execution.telemac_cas import TelemacCas, get_dico
class VnvStudy(AbstractVnvStudy):
"""
Class for validation
"""
def _init(self):
"""
Defines the general parameter
"""
self.rank = 4
self.tags = ['telemac2d']
def _pre(self):
"""
Defining the studies
"""
# confluence scalar mode
self.add_study('vnv_seq',
'telemac2d',
't2d_confluence.cas')
# confluence parallel mode
cas = TelemacCas('t2d_confluence.cas', get_dico('telemac2d'))
cas.set('PARALLEL PROCESSORS', 4)
self.add_study('vnv_par',
'telemac2d',
't2d_confluence_par.cas',
cas=cas)
del cas
def _check_results(self):
"""
Post-treatment processes
"""
# Comparison with the last time frame of the reference file.
self.check_epsilons('vnv_seq:T2DRES',
'f2d_confluence.slf',
eps=[1.e-6])
# Comparison with the last time frame of the reference file.
self.check_epsilons('vnv_par:T2DRES',
'f2d_confluence.slf',
eps=[1.e-6])
# Comparison between sequential and parallel run.
self.check_epsilons('vnv_seq:T2DRES',
'vnv_par:T2DRES',
eps=[1.e-6])
def _post(self):
"""
Post-treatment processes
"""
from postel.plot_vnv import vnv_plot2d
# Getting files
res_vnv_seq_t2dgeo, _ = self.get_study_res('vnv_seq:T2DGEO', load_bnd=True)
res_vnv_seq_t2dres, _ = self.get_study_res('vnv_seq:T2DRES')
#Plotting mesh
vnv_plot2d('',
res_vnv_seq_t2dgeo,
plot_mesh=True,
fig_size=(9, 3),
fig_name='img/mesh',
annotate_bnd=True)
#Plotting mesh
vnv_plot2d('',
res_vnv_seq_t2dgeo,
xlim=[-6.5, -4.0],
ylim=[0., 1.1],
plot_mesh=True,
fig_size=(9, 3),
fig_name='img/mesh2')
# Plotting BOTTOM at 0
vnv_plot2d('BOTTOM',
res_vnv_seq_t2dres,
record=0,
filled_contours=True,
fig_size=(9, 3),
fig_name='img/bathy')
# Plotting WATER DEPTH at -1
vnv_plot2d('WATER DEPTH',
res_vnv_seq_t2dres,
record=-1,
filled_contours=True,
fig_size=(9, 3),
fig_name='img/waterDepth')
# Plotting VELOCITY at -1
vnv_plot2d('VELOCITY',
res_vnv_seq_t2dres,
record=-1,
filled_contours=True,
fig_size=(9, 3),
fig_name='img/velocity')
# Plotting VELOCITY at -1
vnv_plot2d('VELOCITY',
res_vnv_seq_t2dres,
xlim=[-6.5, -4.0],
ylim=[0., 1.1],
record=-1,
filled_contours=True,
grid_resolution=[50, 50],
streamlines=True,
streamlines_density=1.2,
fig_size=(9, 3),
fig_name='img/velocity_strm')
# Closing files
res_vnv_seq_t2dgeo.close()
res_vnv_seq_t2dres.close()
| 28.25
| 83
| 0.474122
|
0f57a1a0c6fed2d8b8fb469ee36ed3a0eb531fcf
| 3,079
|
py
|
Python
|
supports/pyload/src/pyload/plugins/addons/ExpertDecoders.py
|
LuckyNicky/pycrawler
|
4b3fe2f6e8e51f236d95a64a89a44199e4e97743
|
[
"Apache-2.0"
] | 1
|
2020-04-02T17:03:39.000Z
|
2020-04-02T17:03:39.000Z
|
supports/pyload/src/pyload/plugins/addons/ExpertDecoders.py
|
LuckyNicky/pycrawler
|
4b3fe2f6e8e51f236d95a64a89a44199e4e97743
|
[
"Apache-2.0"
] | null | null | null |
supports/pyload/src/pyload/plugins/addons/ExpertDecoders.py
|
LuckyNicky/pycrawler
|
4b3fe2f6e8e51f236d95a64a89a44199e4e97743
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import base64
import uuid
import pycurl
from ...core.network.http.exceptions import BadHeader
from ...core.network.request_factory import get_request
from ..base.addon import BaseAddon, threaded
class ExpertDecoders(BaseAddon):
__name__ = "ExpertDecoders"
__type__ = "addon"
__version__ = "0.12"
__status__ = "testing"
__pyload_version__ = "0.5"
__config__ = [
("enabled", "bool", "Activated", False),
("passkey", "password", "Access key", ""),
("check_client", "bool", "Don't use if client is connected", True),
]
__description__ = """Send captchas to expertdecoders.com"""
__license__ = "GPLv3"
__authors__ = [("RaNaN", "RaNaN@pyload.net"), ("zoidberg", "zoidberg@mujmail.cz")]
API_URL = "http://www.fasttypers.org/imagepost.ashx"
def get_credits(self):
res = self.load(
self.API_URL, post={"key": self.config.get("passkey"), "action": "balance"}
)
if res.isdigit():
self.log_info(self._("{} credits left").format(res))
self.info["credits"] = credits = int(res)
return credits
else:
self.log_error(res)
return 0
@threaded
def _process_captcha(self, task):
task.data["ticket"] = ticket = uuid.uuid4()
result = None
with open(task.captcha_params["file"], mode="rb") as fp:
data = fp.read()
with get_request() as req:
#: Raise timeout threshold
req.c.setopt(pycurl.LOW_SPEED_TIME, 80)
result = self.load(
self.API_URL,
post={
"action": "upload",
"key": self.config.get("passkey"),
"file": base64.b64encode(data),
"gen_task_id": ticket,
},
req=req,
)
self.log_debug(f"Result {ticket}: {result}")
task.set_result(result)
def captcha_task(self, task):
if not task.is_textual():
return False
if not self.config.get("passkey"):
return False
if self.pyload.is_client_connected() and self.config.get("check_client"):
return False
if self.get_credits() > 0:
task.handler.append(self)
task.set_waiting(100)
self._process_captcha(task)
else:
self.log_info(self._("Your ExpertDecoders Account has not enough credits"))
def captcha_invalid(self, task):
if "ticket" in task.data:
try:
res = self.load(
self.API_URL,
post={
"action": "refund",
"key": self.config.get("passkey"),
"gen_task_id": task.data["ticket"],
},
)
self.log_info(self._("Request refund"), res)
except BadHeader as exc:
self.log_error(self._("Could not send refund request"), exc)
| 29.32381
| 87
| 0.53264
|
3554fc320a0e8ac9c889c5c28bbc0a3ea0065677
| 1,571
|
py
|
Python
|
leap_year_finder.py
|
Ronlin1/leap_year_finder
|
baba75ec69fef40d5564276f222b95cb6edbd2c3
|
[
"MIT"
] | null | null | null |
leap_year_finder.py
|
Ronlin1/leap_year_finder
|
baba75ec69fef40d5564276f222b95cb6edbd2c3
|
[
"MIT"
] | null | null | null |
leap_year_finder.py
|
Ronlin1/leap_year_finder
|
baba75ec69fef40d5564276f222b95cb6edbd2c3
|
[
"MIT"
] | null | null | null |
# Leap Year Finder in python
# TODO Upgrade this using Tkinter GUI in future
# Example of leap years from Google 2000, 2004, 2008, 2012,
# 2016, 2020, 2024, 2028, 2032, 2036, 2040, 2044, 2096, 2104 etc
# Over time, these extra 44+ minutes would also cause the seasons
# to drift in our calendar. For this reason, not every four years is a leap year.
# The rule is that if the year is divisible by 100 and not divisible by 400,
# leap year is skipped.
def leap_year():
"""
This functions seeks to return a leap year after user input << integer(4).
Rules for a leap year:
As you surely know, due to some astronomical reasons, years may be leap or common.
The former are 366 days long, while the latter are 365 days long.
Since the introduction of the Gregorian calendar (in 1582), the following rule is used to determine the kind of year:
-->if the year number isn't divisible by four, it's a common year;
-->otherwise, if the year number isn't divisible by 100, it's a leap year;
-->otherwise, if the year number isn't divisible by 400, it's a common year;
-->otherwise, it's a leap year.
:return: Year --> Integer
"""
year = int(input("Enter a year: "))
mess_1 = 'It\'s a common year!'
mess_2 = 'It\'s a leap year!'
if year <= 1582:
return f'{year} does not fall under Gregorian Calendar!!'
elif year % 4 != 0:
return mess_1
elif year % 100 != 0:
return mess_2
elif year % 400 != 0:
return mess_1
else:
return mess_2
print(leap_year())
| 34.911111
| 121
| 0.661999
|
fed101d5641b2358a6faa90cfb699d37b04d769d
| 708
|
py
|
Python
|
examples/charts/file/area.py
|
pyjsdev/googlemap_flask
|
9d0dd899a9cbf756b3d83c33e3d8a47e7db40cc5
|
[
"BSD-3-Clause"
] | 2
|
2019-05-24T14:07:33.000Z
|
2019-05-24T14:36:19.000Z
|
examples/charts/file/area.py
|
pyjsdev/googlemap_flask
|
9d0dd899a9cbf756b3d83c33e3d8a47e7db40cc5
|
[
"BSD-3-Clause"
] | null | null | null |
examples/charts/file/area.py
|
pyjsdev/googlemap_flask
|
9d0dd899a9cbf756b3d83c33e3d8a47e7db40cc5
|
[
"BSD-3-Clause"
] | 1
|
2021-09-09T03:33:04.000Z
|
2021-09-09T03:33:04.000Z
|
from bokeh.charts import Area, show, output_file, defaults
from bokeh.layouts import row
defaults.width = 400
defaults.height = 400
# create some example data
data = dict(
python=[2, 3, 7, 5, 26, 221, 44, 233, 254, 265, 266, 267, 120, 111],
pypy=[12, 33, 47, 15, 126, 121, 144, 233, 254, 225, 226, 267, 110, 130],
jython=[22, 43, 10, 25, 26, 101, 114, 203, 194, 215, 201, 227, 139, 160],
)
area1 = Area(data, title="Area Chart", legend="top_left",
xlabel='time', ylabel='memory')
area2 = Area(data, title="Stacked Area Chart", legend="top_left",
stack=True, xlabel='time', ylabel='memory')
output_file("area.html", title="area.py example")
show(row(area1, area2))
| 30.782609
| 77
| 0.641243
|
e0dfb4e729883488b037f97b23a22c8293580f8f
| 9,103
|
py
|
Python
|
setup.py
|
Sanster/onnxruntime
|
c4e4abce73965ff7819d3183712ce40e2ca13cb4
|
[
"MIT"
] | null | null | null |
setup.py
|
Sanster/onnxruntime
|
c4e4abce73965ff7819d3183712ce40e2ca13cb4
|
[
"MIT"
] | null | null | null |
setup.py
|
Sanster/onnxruntime
|
c4e4abce73965ff7819d3183712ce40e2ca13cb4
|
[
"MIT"
] | null | null | null |
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
#--------------------------------------------------------------------------
from setuptools import setup, find_packages, Extension
from distutils import log as logger
from distutils.command.build_ext import build_ext as _build_ext
from glob import glob
from os import path, getcwd, environ, remove
from shutil import copyfile
import platform
import subprocess
import sys
import datetime
nightly_build = False
package_name = 'onnxruntime'
if '--use_tensorrt' in sys.argv:
package_name = 'onnxruntime-gpu-tensorrt'
sys.argv.remove('--use_tensorrt')
if '--nightly_build' in sys.argv:
package_name = 'ort-trt-nightly'
nightly_build = True
sys.argv.remove('--nightly_build')
elif '--use_cuda' in sys.argv:
package_name = 'onnxruntime-gpu'
sys.argv.remove('--use_cuda')
if '--nightly_build' in sys.argv:
package_name = 'ort-gpu-nightly'
nightly_build = True
sys.argv.remove('--nightly_build')
elif '--use_ngraph' in sys.argv:
package_name = 'onnxruntime-ngraph'
sys.argv.remove('--use_ngraph')
elif '--use_openvino' in sys.argv:
package_name = 'onnxruntime-openvino'
elif '--use_nuphar' in sys.argv:
package_name = 'onnxruntime-nuphar'
sys.argv.remove('--use_nuphar')
if '--nightly_build' in sys.argv:
package_name = 'ort-nightly'
nightly_build = True
sys.argv.remove('--nightly_build')
is_manylinux1 = False
if environ.get('AUDITWHEEL_PLAT', None) == 'manylinux1_x86_64' or environ.get('AUDITWHEEL_PLAT', None) == 'manylinux2010_x86_64' :
is_manylinux1 = True
class build_ext(_build_ext):
def build_extension(self, ext):
dest_file = self.get_ext_fullpath(ext.name)
logger.info('copying %s -> %s', ext.sources[0], dest_file)
copyfile(ext.sources[0], dest_file)
try:
from wheel.bdist_wheel import bdist_wheel as _bdist_wheel
class bdist_wheel(_bdist_wheel):
def finalize_options(self):
_bdist_wheel.finalize_options(self)
if not is_manylinux1:
self.root_is_pure = False
def _rewrite_ld_preload(self, to_preload):
with open('onnxruntime/capi/_ld_preload.py', 'rt') as f:
ld_preload = f.read().splitlines()
with open('onnxruntime/capi/_ld_preload.py', 'wt') as f:
for line in ld_preload:
f.write(line)
f.write('\n')
if 'LD_PRELOAD_BEGIN_MARK' in line:
break
if len(to_preload) > 0:
f.write('from ctypes import CDLL, RTLD_GLOBAL\n')
for library in to_preload:
f.write('_{} = CDLL("{}", mode=RTLD_GLOBAL)\n'.format(library.split('.')[0], library))
def run(self):
if is_manylinux1:
source = 'onnxruntime/capi/onnxruntime_pybind11_state.so'
dest = 'onnxruntime/capi/onnxruntime_pybind11_state_manylinux1.so'
logger.info('copying %s -> %s', source, dest)
copyfile(source, dest)
result = subprocess.run(['patchelf', '--print-needed', dest], check=True, stdout=subprocess.PIPE, universal_newlines=True)
cuda_dependencies = ['libcublas.so', 'libcudnn.so', 'libcudart.so']
to_preload = []
args = ['patchelf', '--debug']
for line in result.stdout.split('\n'):
for dependency in cuda_dependencies:
if dependency in line:
to_preload.append(line)
args.extend(['--remove-needed', line])
args.append(dest)
if len(to_preload) > 0:
subprocess.run(args, check=True, stdout=subprocess.PIPE)
self._rewrite_ld_preload(to_preload)
_bdist_wheel.run(self)
if is_manylinux1:
file = glob(path.join(self.dist_dir, '*linux*.whl'))[0]
logger.info('repairing %s for manylinux1', file)
try:
subprocess.run(['auditwheel', 'repair', '-w', self.dist_dir, file], check=True, stdout=subprocess.PIPE)
finally:
logger.info('removing %s', file)
remove(file)
except ImportError as error:
print("Error importing dependencies:")
print(error)
bdist_wheel = None
# Additional binaries
if platform.system() == 'Linux':
libs = ['onnxruntime_pybind11_state.so', 'libdnnl.so.1', 'libmklml_intel.so', 'libiomp5.so', 'mimalloc.so']
# nGraph Libs
libs.extend(['libngraph.so', 'libcodegen.so', 'libcpu_backend.so', 'libmkldnn.so', 'libtbb_debug.so', 'libtbb_debug.so.2', 'libtbb.so', 'libtbb.so.2'])
# Nuphar Libs
libs.extend(['libtvm.so.0.5.1'])
# Openvino Libs
libs.extend(['libcpu_extension.so'])
if nightly_build:
libs.extend(['libonnxruntime_pywrapper.so'])
elif platform.system() == "Darwin":
libs = ['onnxruntime_pybind11_state.so', 'libdnnl.1.dylib', 'mimalloc.so'] # TODO add libmklml and libiomp5 later.
if nightly_build:
libs.extend(['libonnxruntime_pywrapper.dylib'])
else:
libs = ['onnxruntime_pybind11_state.pyd', 'dnnl.dll', 'mklml.dll', 'libiomp5md.dll']
libs.extend(['ngraph.dll', 'cpu_backend.dll', 'tbb.dll', 'mimalloc-override.dll', 'mimalloc-redirect.dll', 'mimalloc-redirect32.dll'])
# Nuphar Libs
libs.extend(['tvm.dll'])
# Openvino Libs
libs.extend(['cpu_extension.dll'])
if nightly_build:
libs.extend(['onnxruntime_pywrapper.dll'])
if is_manylinux1:
data = ['capi/libonnxruntime_pywrapper.so'] if nightly_build else []
ext_modules = [
Extension(
'onnxruntime.capi.onnxruntime_pybind11_state',
['onnxruntime/capi/onnxruntime_pybind11_state_manylinux1.so'],
),
]
else:
data = [path.join('capi', x) for x in libs if path.isfile(path.join('onnxruntime', 'capi', x))]
ext_modules = []
python_modules_list = list()
if '--use_openvino' in sys.argv:
#Adding python modules required for openvino ep
python_modules_list.extend(['openvino_mo', 'openvino_emitter'])
sys.argv.remove('--use_openvino')
# Additional examples
examples_names = ["mul_1.onnx", "logreg_iris.onnx", "sigmoid.onnx"]
examples = [path.join('datasets', x) for x in examples_names]
# Extra files such as EULA and ThirdPartyNotices
extra = ["LICENSE", "ThirdPartyNotices.txt", "Privacy.md"]
if package_name == 'onnxruntime-nuphar':
extra.extend([path.join('nuphar', 'NUPHAR_CACHE_VERSION')])
# Description
README = path.join(getcwd(), "docs/python/README.rst")
if not path.exists(README):
this = path.dirname(__file__)
README = path.join(this, "docs/python/README.rst")
if not path.exists(README):
raise FileNotFoundError("Unable to find 'README.rst'")
with open(README) as f:
long_description = f.read()
version_number = ''
with open('VERSION_NUMBER') as f:
version_number = f.readline().strip()
if nightly_build:
#https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables
date_suffix = environ.get('BUILD_BUILDNUMBER')
if date_suffix is None:
#The following line is only for local testing
date_suffix = str(datetime.datetime.now().date().strftime("%Y%m%d"))
else:
date_suffix = date_suffix.replace('.','')
version_number = version_number + ".dev" + date_suffix
cmd_classes = {}
if bdist_wheel is not None :
cmd_classes['bdist_wheel'] = bdist_wheel
cmd_classes['build_ext'] = build_ext
# Setup
setup(
name=package_name,
version=version_number,
description='ONNX Runtime Python bindings',
long_description=long_description,
author='Microsoft Corporation',
author_email='onnx@microsoft.com',
cmdclass=cmd_classes,
license="MIT License",
packages=['onnxruntime',
'onnxruntime.backend',
'onnxruntime.capi',
'onnxruntime.datasets',
'onnxruntime.tools',
] + (['onnxruntime.nuphar'] if package_name == 'onnxruntime-nuphar' else []),
ext_modules=ext_modules,
package_data={
'onnxruntime': data + examples + extra,
},
py_modules=python_modules_list,
install_requires=[
'onnx>=1.2.3',
'numpy>=1.18.0'
],
entry_points= {
'console_scripts': [
'onnxruntime_test = onnxruntime.tools.onnxruntime_test:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'],
)
| 37.615702
| 153
| 0.625508
|
91e0549d3e1e10dabd75c78d2dda78c1a64e7330
| 4,866
|
py
|
Python
|
dashboard/src/perf_tests.py
|
cnulenka/fabric8-analytics-common
|
570bfa69d885543944f90decc23397f546385f69
|
[
"Apache-2.0"
] | null | null | null |
dashboard/src/perf_tests.py
|
cnulenka/fabric8-analytics-common
|
570bfa69d885543944f90decc23397f546385f69
|
[
"Apache-2.0"
] | null | null | null |
dashboard/src/perf_tests.py
|
cnulenka/fabric8-analytics-common
|
570bfa69d885543944f90decc23397f546385f69
|
[
"Apache-2.0"
] | null | null | null |
"""Module with class that handle performance test results and compute statistic."""
import csv
class PerfTests:
"""Class that handle performance test results."""
INPUT_FILES = {
"component analysis": {
"sequenced_calls_known_component":
"component_analysis_sequenced_calls_known_component.csv",
"sequenced_calls_unknown_component":
"component_analysis_sequenced_calls_unknown_component.csv",
"parallel_calls_known_component":
"component_analysis_parallel_calls_known_component.csv",
"parallel_calls_unknown_component":
"component_analysis_parallel_calls_unknown_component.csv",
},
"stack analysis": {
"sequenced_calls": "stack_analysis_sequenced_calls.csv",
"parallel_calls": "stack_analysis_parallel_calls.csv"
}
}
@staticmethod
def read_csv(filename, skip_first_line=False):
"""Read the given CSV file, parse it, and return as list of records."""
output = []
with open(filename, 'r') as fin:
csv_content = csv.reader(fin, delimiter=',')
if skip_first_line:
next(csv_content, None)
for row in csv_content:
output.append(row)
return output
def __init__(self):
"""Construct an instance of the class."""
self._results = {}
self._statistic = {}
def read_analysis_results(self, input_files):
"""Read the performence test results from the selected bundle of CSV files.
Typically the bundle would be 'stack analysis' or 'component analysis').
"""
results = {}
for name, filename in input_files.items():
results[name] = PerfTests.read_csv(filename)
return results
def read_results(self):
"""Read results generated by all performance tests."""
for analyse_type, input_files in PerfTests.INPUT_FILES.items():
self._results[analyse_type] = self.read_analysis_results(input_files)
@staticmethod
def compute_stat_for_result_set(results):
"""Compute the statistic for selected result set (that is - for 2D matrix of floats)."""
# let make statistic for all columns, even for columns where it does not
# make much sense to do so (like the 1st column that might contain run #)
COLUMN_MIN_VALUES = 2
COLUMN_MAX_VALUES = 3
COLUMN_AVG_VALUES = 4
n = len(results)
columns = len(results[0])
# initial values to be updated later
sum_val = [0] * columns
avg_val = [None] * columns
max_val = [float("-inf")] * columns
min_val = [float("inf")] * columns
# compute min, max, and sum
for result in results:
for column in range(columns):
r = float(result[column])
sum_val[column] += r
if r < min_val[column]:
min_val[column] = r
if max_val[column] < r:
max_val[column] = r
# really don't wanna to divide by zero
if n > 0:
for column in range(columns):
avg_val[column] = sum_val[column] / n
if columns == 1:
return {
"sum": sum_val[0],
"avg": avg_val[0],
"min": min_val[0],
"max": max_val[0],
}
else:
return {
"sum": sum_val[COLUMN_MAX_VALUES],
"avg": avg_val[COLUMN_AVG_VALUES],
"min": min_val[COLUMN_MIN_VALUES],
"max": max_val[COLUMN_MAX_VALUES],
}
def compute_statistic_for_analysis(self, results):
"""Compute the basic statistic (min, max, sum, avg) for the selected set of results."""
stat = {}
for key, results in results.items():
stat[key] = PerfTests.compute_stat_for_result_set(results)
return stat
def compute_statistic(self):
"""Compute statistic for all results generated by performance tests."""
for analyse in PerfTests.INPUT_FILES.keys():
self._statistic[analyse] = self.compute_statistic_for_analysis(self._results[analyse])
@property
def results(self):
"""Getter for the 'results' attribute."""
return self._results
@property
def statistic(self):
"""Getter for the 'statistic' attribute."""
return self._statistic
if __name__ == "__main__":
# execute only if run as a script
perf_tests = PerfTests()
perf_tests.read_results()
perf_tests.compute_statistic()
print("Results:")
print(perf_tests.results)
print("-----------------------------------------")
print("Statistic:")
print(perf_tests.statistic)
| 35.26087
| 98
| 0.590834
|
4a387e1e1860920951efa79124a664b60d7bccf9
| 4,093
|
py
|
Python
|
sdk/python/pulumi_azure_native/appplatform/v20210303preview/get_deployment.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/appplatform/v20210303preview/get_deployment.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/appplatform/v20210303preview/get_deployment.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetDeploymentResult',
'AwaitableGetDeploymentResult',
'get_deployment',
]
@pulumi.output_type
class GetDeploymentResult:
"""
Deployment resource payload
"""
def __init__(__self__, id=None, name=None, properties=None, sku=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource Id for the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.DeploymentResourcePropertiesResponse':
"""
Properties of the Deployment resource
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
"""
Sku of the Deployment resource
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetDeploymentResult(GetDeploymentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDeploymentResult(
id=self.id,
name=self.name,
properties=self.properties,
sku=self.sku,
type=self.type)
def get_deployment(app_name: Optional[str] = None,
deployment_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDeploymentResult:
"""
Deployment resource payload
:param str app_name: The name of the App resource.
:param str deployment_name: The name of the Deployment resource.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str service_name: The name of the Service resource.
"""
__args__ = dict()
__args__['appName'] = app_name
__args__['deploymentName'] = deployment_name
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:appplatform/v20210303preview:getDeployment', __args__, opts=opts, typ=GetDeploymentResult).value
return AwaitableGetDeploymentResult(
id=__ret__.id,
name=__ret__.name,
properties=__ret__.properties,
sku=__ret__.sku,
type=__ret__.type)
| 32.744
| 171
| 0.636697
|
603c19d33944ad4c4c9480d2af0f67865616a072
| 73,424
|
py
|
Python
|
packages/python/plotly/plotly/graph_objs/scatter3d/line/_colorbar.py
|
TitouenDCL/plotly.py
|
0c98391f575dab0e3f08ede907045cc72b3d40a4
|
[
"MIT"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
packages/python/plotly/plotly/graph_objs/scatter3d/line/_colorbar.py
|
TitouenDCL/plotly.py
|
0c98391f575dab0e3f08ede907045cc72b3d40a4
|
[
"MIT"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
packages/python/plotly/plotly/graph_objs/scatter3d/line/_colorbar.py
|
TitouenDCL/plotly.py
|
0c98391f575dab0e3f08ede907045cc72b3d40a4
|
[
"MIT"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class ColorBar(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scatter3d.line"
_path_str = "scatter3d.line.colorbar"
_valid_props = {
"bgcolor",
"bordercolor",
"borderwidth",
"dtick",
"exponentformat",
"len",
"lenmode",
"minexponent",
"nticks",
"outlinecolor",
"outlinewidth",
"separatethousands",
"showexponent",
"showticklabels",
"showtickprefix",
"showticksuffix",
"thickness",
"thicknessmode",
"tick0",
"tickangle",
"tickcolor",
"tickfont",
"tickformat",
"tickformatstopdefaults",
"tickformatstops",
"ticklabeloverflow",
"ticklabelposition",
"ticklen",
"tickmode",
"tickprefix",
"ticks",
"ticksuffix",
"ticktext",
"ticktextsrc",
"tickvals",
"tickvalssrc",
"tickwidth",
"title",
"titlefont",
"titleside",
"x",
"xanchor",
"xpad",
"y",
"yanchor",
"ypad",
}
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the color of padded area.
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the axis line color.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# borderwidth
# -----------
@property
def borderwidth(self):
"""
Sets the width (in px) or the border enclosing this color bar.
The 'borderwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["borderwidth"]
@borderwidth.setter
def borderwidth(self, val):
self["borderwidth"] = val
# dtick
# -----
@property
def dtick(self):
"""
Sets the step in-between ticks on this axis. Use with `tick0`.
Must be a positive number, or special strings available to
"log" and "date" axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick number. For
example, to set a tick mark at 1, 10, 100, 1000, ... set dtick
to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2.
To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special values;
"L<f>", where `f` is a positive number, gives ticks linearly
spaced in value (but not position). For example `tick0` = 0.1,
`dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To
show powers of 10 plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and
"D2". If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval between
ticks to one day, set `dtick` to 86400000.0. "date" also has
special values "M<n>" gives ticks spaced by a number of months.
`n` must be a positive integer. To set ticks on the 15th of
every third month, set `tick0` to "2000-01-15" and `dtick` to
"M3". To set ticks every 4 years, set `dtick` to "M48"
The 'dtick' property accepts values of any type
Returns
-------
Any
"""
return self["dtick"]
@dtick.setter
def dtick(self, val):
self["dtick"] = val
# exponentformat
# --------------
@property
def exponentformat(self):
"""
Determines a formatting rule for the tick exponents. For
example, consider the number 1,000,000,000. If "none", it
appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If "SI", 1G. If
"B", 1B.
The 'exponentformat' property is an enumeration that may be specified as:
- One of the following enumeration values:
['none', 'e', 'E', 'power', 'SI', 'B']
Returns
-------
Any
"""
return self["exponentformat"]
@exponentformat.setter
def exponentformat(self, val):
self["exponentformat"] = val
# len
# ---
@property
def len(self):
"""
Sets the length of the color bar This measure excludes the
padding of both ends. That is, the color bar length is this
length minus the padding on both ends.
The 'len' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["len"]
@len.setter
def len(self, val):
self["len"] = val
# lenmode
# -------
@property
def lenmode(self):
"""
Determines whether this color bar's length (i.e. the measure in
the color variation direction) is set in units of plot
"fraction" or in *pixels. Use `len` to set the value.
The 'lenmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self["lenmode"]
@lenmode.setter
def lenmode(self, val):
self["lenmode"] = val
# minexponent
# -----------
@property
def minexponent(self):
"""
Hide SI prefix for 10^n if |n| is below this number. This only
has an effect when `tickformat` is "SI" or "B".
The 'minexponent' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["minexponent"]
@minexponent.setter
def minexponent(self, val):
self["minexponent"] = val
# nticks
# ------
@property
def nticks(self):
"""
Specifies the maximum number of ticks for the particular axis.
The actual number of ticks will be chosen automatically to be
less than or equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
The 'nticks' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self["nticks"]
@nticks.setter
def nticks(self, val):
self["nticks"] = val
# outlinecolor
# ------------
@property
def outlinecolor(self):
"""
Sets the axis line color.
The 'outlinecolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["outlinecolor"]
@outlinecolor.setter
def outlinecolor(self, val):
self["outlinecolor"] = val
# outlinewidth
# ------------
@property
def outlinewidth(self):
"""
Sets the width (in px) of the axis line.
The 'outlinewidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["outlinewidth"]
@outlinewidth.setter
def outlinewidth(self, val):
self["outlinewidth"] = val
# separatethousands
# -----------------
@property
def separatethousands(self):
"""
If "true", even 4-digit integers are separated
The 'separatethousands' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["separatethousands"]
@separatethousands.setter
def separatethousands(self, val):
self["separatethousands"] = val
# showexponent
# ------------
@property
def showexponent(self):
"""
If "all", all exponents are shown besides their significands.
If "first", only the exponent of the first tick is shown. If
"last", only the exponent of the last tick is shown. If "none",
no exponents appear.
The 'showexponent' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showexponent"]
@showexponent.setter
def showexponent(self, val):
self["showexponent"] = val
# showticklabels
# --------------
@property
def showticklabels(self):
"""
Determines whether or not the tick labels are drawn.
The 'showticklabels' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showticklabels"]
@showticklabels.setter
def showticklabels(self, val):
self["showticklabels"] = val
# showtickprefix
# --------------
@property
def showtickprefix(self):
"""
If "all", all tick labels are displayed with a prefix. If
"first", only the first tick is displayed with a prefix. If
"last", only the last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
The 'showtickprefix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showtickprefix"]
@showtickprefix.setter
def showtickprefix(self, val):
self["showtickprefix"] = val
# showticksuffix
# --------------
@property
def showticksuffix(self):
"""
Same as `showtickprefix` but for tick suffixes.
The 'showticksuffix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showticksuffix"]
@showticksuffix.setter
def showticksuffix(self, val):
self["showticksuffix"] = val
# thickness
# ---------
@property
def thickness(self):
"""
Sets the thickness of the color bar This measure excludes the
size of the padding, ticks and labels.
The 'thickness' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["thickness"]
@thickness.setter
def thickness(self, val):
self["thickness"] = val
# thicknessmode
# -------------
@property
def thicknessmode(self):
"""
Determines whether this color bar's thickness (i.e. the measure
in the constant color direction) is set in units of plot
"fraction" or in "pixels". Use `thickness` to set the value.
The 'thicknessmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self["thicknessmode"]
@thicknessmode.setter
def thicknessmode(self, val):
self["thicknessmode"] = val
# tick0
# -----
@property
def tick0(self):
"""
Sets the placement of the first tick on this axis. Use with
`dtick`. If the axis `type` is "log", then you must take the
log of your starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when `dtick`=*L<f>* (see
`dtick` for more info). If the axis `type` is "date", it should
be a date string, like date data. If the axis `type` is
"category", it should be a number, using the scale where each
category is assigned a serial number from zero in the order it
appears.
The 'tick0' property accepts values of any type
Returns
-------
Any
"""
return self["tick0"]
@tick0.setter
def tick0(self, val):
self["tick0"] = val
# tickangle
# ---------
@property
def tickangle(self):
"""
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the tick
labels vertically.
The 'tickangle' property is a angle (in degrees) that may be
specified as a number between -180 and 180. Numeric values outside this
range are converted to the equivalent value
(e.g. 270 is converted to -90).
Returns
-------
int|float
"""
return self["tickangle"]
@tickangle.setter
def tickangle(self, val):
self["tickangle"] = val
# tickcolor
# ---------
@property
def tickcolor(self):
"""
Sets the tick color.
The 'tickcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["tickcolor"]
@tickcolor.setter
def tickcolor(self, val):
self["tickcolor"] = val
# tickfont
# --------
@property
def tickfont(self):
"""
Sets the color bar's tick label font
The 'tickfont' property is an instance of Tickfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.line.colorbar.Tickfont`
- A dict of string/value properties that will be passed
to the Tickfont constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
plotly.graph_objs.scatter3d.line.colorbar.Tickfont
"""
return self["tickfont"]
@tickfont.setter
def tickfont(self, val):
self["tickfont"] = val
# tickformat
# ----------
@property
def tickformat(self):
"""
Sets the tick label formatting rule using d3 formatting mini-
languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for
dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal number as
well as "%{n}f" for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display "09~15~23.46"
The 'tickformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["tickformat"]
@tickformat.setter
def tickformat(self, val):
self["tickformat"] = val
# tickformatstops
# ---------------
@property
def tickformatstops(self):
"""
The 'tickformatstops' property is a tuple of instances of
Tickformatstop that may be specified as:
- A list or tuple of instances of plotly.graph_objs.scatter3d.line.colorbar.Tickformatstop
- A list or tuple of dicts of string/value properties that
will be passed to the Tickformatstop constructor
Supported dict properties:
dtickrange
range [*min*, *max*], where "min", "max" -
dtick values which describe some zoom level, it
is possible to omit "min" or "max" value by
passing "null"
enabled
Determines whether or not this stop is used. If
`false`, this stop is ignored even within its
`dtickrange`.
name
When used in a template, named items are
created in the output figure in addition to any
items the figure already has in this array. You
can modify these items in the output figure by
making your own item with `templateitemname`
matching this `name` alongside your
modifications (including `visible: false` or
`enabled: false` to hide it). Has no effect
outside of a template.
templateitemname
Used to refer to a named item in this array in
the template. Named items from the template
will be created even without a matching item in
the input figure, but you can modify one by
making an item with `templateitemname` matching
its `name`, alongside your modifications
(including `visible: false` or `enabled: false`
to hide it). If there is no template or no
matching item, this item will be hidden unless
you explicitly show it with `visible: true`.
value
string - dtickformat for described zoom level,
the same as "tickformat"
Returns
-------
tuple[plotly.graph_objs.scatter3d.line.colorbar.Tickformatstop]
"""
return self["tickformatstops"]
@tickformatstops.setter
def tickformatstops(self, val):
self["tickformatstops"] = val
# tickformatstopdefaults
# ----------------------
@property
def tickformatstopdefaults(self):
"""
When used in a template (as layout.template.data.scatter3d.line
.colorbar.tickformatstopdefaults), sets the default property
values to use for elements of
scatter3d.line.colorbar.tickformatstops
The 'tickformatstopdefaults' property is an instance of Tickformatstop
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.line.colorbar.Tickformatstop`
- A dict of string/value properties that will be passed
to the Tickformatstop constructor
Supported dict properties:
Returns
-------
plotly.graph_objs.scatter3d.line.colorbar.Tickformatstop
"""
return self["tickformatstopdefaults"]
@tickformatstopdefaults.setter
def tickformatstopdefaults(self, val):
self["tickformatstopdefaults"] = val
# ticklabeloverflow
# -----------------
@property
def ticklabeloverflow(self):
"""
Determines how we handle tick labels that would overflow either
the graph div or the domain of the axis. The default value for
inside tick labels is *hide past domain*. In other cases the
default is *hide past div*.
The 'ticklabeloverflow' property is an enumeration that may be specified as:
- One of the following enumeration values:
['allow', 'hide past div', 'hide past domain']
Returns
-------
Any
"""
return self["ticklabeloverflow"]
@ticklabeloverflow.setter
def ticklabeloverflow(self, val):
self["ticklabeloverflow"] = val
# ticklabelposition
# -----------------
@property
def ticklabelposition(self):
"""
Determines where tick labels are drawn.
The 'ticklabelposition' property is an enumeration that may be specified as:
- One of the following enumeration values:
['outside', 'inside', 'outside top', 'inside top',
'outside bottom', 'inside bottom']
Returns
-------
Any
"""
return self["ticklabelposition"]
@ticklabelposition.setter
def ticklabelposition(self, val):
self["ticklabelposition"] = val
# ticklen
# -------
@property
def ticklen(self):
"""
Sets the tick length (in px).
The 'ticklen' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["ticklen"]
@ticklen.setter
def ticklen(self, val):
self["ticklen"] = val
# tickmode
# --------
@property
def tickmode(self):
"""
Sets the tick mode for this axis. If "auto", the number of
ticks is set via `nticks`. If "linear", the placement of the
ticks is determined by a starting position `tick0` and a tick
step `dtick` ("linear" is the default value if `tick0` and
`dtick` are provided). If "array", the placement of the ticks
is set via `tickvals` and the tick text is `ticktext`. ("array"
is the default value if `tickvals` is provided).
The 'tickmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['auto', 'linear', 'array']
Returns
-------
Any
"""
return self["tickmode"]
@tickmode.setter
def tickmode(self, val):
self["tickmode"] = val
# tickprefix
# ----------
@property
def tickprefix(self):
"""
Sets a tick label prefix.
The 'tickprefix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["tickprefix"]
@tickprefix.setter
def tickprefix(self, val):
self["tickprefix"] = val
# ticks
# -----
@property
def ticks(self):
"""
Determines whether ticks are drawn or not. If "", this axis'
ticks are not drawn. If "outside" ("inside"), this axis' are
drawn outside (inside) the axis lines.
The 'ticks' property is an enumeration that may be specified as:
- One of the following enumeration values:
['outside', 'inside', '']
Returns
-------
Any
"""
return self["ticks"]
@ticks.setter
def ticks(self, val):
self["ticks"] = val
# ticksuffix
# ----------
@property
def ticksuffix(self):
"""
Sets a tick label suffix.
The 'ticksuffix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["ticksuffix"]
@ticksuffix.setter
def ticksuffix(self, val):
self["ticksuffix"] = val
# ticktext
# --------
@property
def ticktext(self):
"""
Sets the text displayed at the ticks position via `tickvals`.
Only has an effect if `tickmode` is set to "array". Used with
`tickvals`.
The 'ticktext' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["ticktext"]
@ticktext.setter
def ticktext(self, val):
self["ticktext"] = val
# ticktextsrc
# -----------
@property
def ticktextsrc(self):
"""
Sets the source reference on Chart Studio Cloud for ticktext .
The 'ticktextsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["ticktextsrc"]
@ticktextsrc.setter
def ticktextsrc(self, val):
self["ticktextsrc"] = val
# tickvals
# --------
@property
def tickvals(self):
"""
Sets the values at which ticks on this axis appear. Only has an
effect if `tickmode` is set to "array". Used with `ticktext`.
The 'tickvals' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["tickvals"]
@tickvals.setter
def tickvals(self, val):
self["tickvals"] = val
# tickvalssrc
# -----------
@property
def tickvalssrc(self):
"""
Sets the source reference on Chart Studio Cloud for tickvals .
The 'tickvalssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["tickvalssrc"]
@tickvalssrc.setter
def tickvalssrc(self, val):
self["tickvalssrc"] = val
# tickwidth
# ---------
@property
def tickwidth(self):
"""
Sets the tick width (in px).
The 'tickwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["tickwidth"]
@tickwidth.setter
def tickwidth(self, val):
self["tickwidth"] = val
# title
# -----
@property
def title(self):
"""
The 'title' property is an instance of Title
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.line.colorbar.Title`
- A dict of string/value properties that will be passed
to the Title constructor
Supported dict properties:
font
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
side
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
text
Sets the title of the color bar. Note that
before the existence of `title.text`, the
title's contents used to be defined as the
`title` attribute itself. This behavior has
been deprecated.
Returns
-------
plotly.graph_objs.scatter3d.line.colorbar.Title
"""
return self["title"]
@title.setter
def title(self, val):
self["title"] = val
# titlefont
# ---------
@property
def titlefont(self):
"""
Deprecated: Please use scatter3d.line.colorbar.title.font
instead. Sets this color bar's title font. Note that the
title's font used to be set by the now deprecated `titlefont`
attribute.
The 'font' property is an instance of Font
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.line.colorbar.title.Font`
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
"""
return self["titlefont"]
@titlefont.setter
def titlefont(self, val):
self["titlefont"] = val
# titleside
# ---------
@property
def titleside(self):
"""
Deprecated: Please use scatter3d.line.colorbar.title.side
instead. Determines the location of color bar's title with
respect to the color bar. Note that the title's location used
to be set by the now deprecated `titleside` attribute.
The 'side' property is an enumeration that may be specified as:
- One of the following enumeration values:
['right', 'top', 'bottom']
Returns
-------
"""
return self["titleside"]
@titleside.setter
def titleside(self, val):
self["titleside"] = val
# x
# -
@property
def x(self):
"""
Sets the x position of the color bar (in plot fraction).
The 'x' property is a number and may be specified as:
- An int or float in the interval [-2, 3]
Returns
-------
int|float
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# xanchor
# -------
@property
def xanchor(self):
"""
Sets this color bar's horizontal position anchor. This anchor
binds the `x` position to the "left", "center" or "right" of
the color bar.
The 'xanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'center', 'right']
Returns
-------
Any
"""
return self["xanchor"]
@xanchor.setter
def xanchor(self, val):
self["xanchor"] = val
# xpad
# ----
@property
def xpad(self):
"""
Sets the amount of padding (in px) along the x direction.
The 'xpad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["xpad"]
@xpad.setter
def xpad(self, val):
self["xpad"] = val
# y
# -
@property
def y(self):
"""
Sets the y position of the color bar (in plot fraction).
The 'y' property is a number and may be specified as:
- An int or float in the interval [-2, 3]
Returns
-------
int|float
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# yanchor
# -------
@property
def yanchor(self):
"""
Sets this color bar's vertical position anchor This anchor
binds the `y` position to the "top", "middle" or "bottom" of
the color bar.
The 'yanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top', 'middle', 'bottom']
Returns
-------
Any
"""
return self["yanchor"]
@yanchor.setter
def yanchor(self, val):
self["yanchor"] = val
# ypad
# ----
@property
def ypad(self):
"""
Sets the amount of padding (in px) along the y direction.
The 'ypad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["ypad"]
@ypad.setter
def ypad(self, val):
self["ypad"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this number.
This only has an effect when `tickformat` is "SI" or
"B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display "09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.scatter3d.line.
colorbar.Tickformatstop` instances or dicts with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.data.scatte
r3d.line.colorbar.tickformatstopdefaults), sets the
default property values to use for elements of
scatter3d.line.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of the
axis. The default value for inside tick labels is *hide
past domain*. In other cases the default is *hide past
div*.
ticklabelposition
Determines where tick labels are drawn.
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud for
ticktext .
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud for
tickvals .
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.scatter3d.line.colorbar.Ti
tle` instance or dict with compatible properties
titlefont
Deprecated: Please use
scatter3d.line.colorbar.title.font instead. Sets this
color bar's title font. Note that the title's font used
to be set by the now deprecated `titlefont` attribute.
titleside
Deprecated: Please use
scatter3d.line.colorbar.title.side instead. Determines
the location of color bar's title with respect to the
color bar. Note that the title's location used to be
set by the now deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
"""
_mapped_properties = {
"titlefont": ("title", "font"),
"titleside": ("title", "side"),
}
def __init__(
self,
arg=None,
bgcolor=None,
bordercolor=None,
borderwidth=None,
dtick=None,
exponentformat=None,
len=None,
lenmode=None,
minexponent=None,
nticks=None,
outlinecolor=None,
outlinewidth=None,
separatethousands=None,
showexponent=None,
showticklabels=None,
showtickprefix=None,
showticksuffix=None,
thickness=None,
thicknessmode=None,
tick0=None,
tickangle=None,
tickcolor=None,
tickfont=None,
tickformat=None,
tickformatstops=None,
tickformatstopdefaults=None,
ticklabeloverflow=None,
ticklabelposition=None,
ticklen=None,
tickmode=None,
tickprefix=None,
ticks=None,
ticksuffix=None,
ticktext=None,
ticktextsrc=None,
tickvals=None,
tickvalssrc=None,
tickwidth=None,
title=None,
titlefont=None,
titleside=None,
x=None,
xanchor=None,
xpad=None,
y=None,
yanchor=None,
ypad=None,
**kwargs
):
"""
Construct a new ColorBar object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.scatter3d.line.ColorBar`
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this number.
This only has an effect when `tickformat` is "SI" or
"B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display "09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.scatter3d.line.
colorbar.Tickformatstop` instances or dicts with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.data.scatte
r3d.line.colorbar.tickformatstopdefaults), sets the
default property values to use for elements of
scatter3d.line.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of the
axis. The default value for inside tick labels is *hide
past domain*. In other cases the default is *hide past
div*.
ticklabelposition
Determines where tick labels are drawn.
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud for
ticktext .
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud for
tickvals .
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.scatter3d.line.colorbar.Ti
tle` instance or dict with compatible properties
titlefont
Deprecated: Please use
scatter3d.line.colorbar.title.font instead. Sets this
color bar's title font. Note that the title's font used
to be set by the now deprecated `titlefont` attribute.
titleside
Deprecated: Please use
scatter3d.line.colorbar.title.side instead. Determines
the location of color bar's title with respect to the
color bar. Note that the title's location used to be
set by the now deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
ColorBar
"""
super(ColorBar, self).__init__("colorbar")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scatter3d.line.ColorBar
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scatter3d.line.ColorBar`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("bgcolor", None)
_v = bgcolor if bgcolor is not None else _v
if _v is not None:
self["bgcolor"] = _v
_v = arg.pop("bordercolor", None)
_v = bordercolor if bordercolor is not None else _v
if _v is not None:
self["bordercolor"] = _v
_v = arg.pop("borderwidth", None)
_v = borderwidth if borderwidth is not None else _v
if _v is not None:
self["borderwidth"] = _v
_v = arg.pop("dtick", None)
_v = dtick if dtick is not None else _v
if _v is not None:
self["dtick"] = _v
_v = arg.pop("exponentformat", None)
_v = exponentformat if exponentformat is not None else _v
if _v is not None:
self["exponentformat"] = _v
_v = arg.pop("len", None)
_v = len if len is not None else _v
if _v is not None:
self["len"] = _v
_v = arg.pop("lenmode", None)
_v = lenmode if lenmode is not None else _v
if _v is not None:
self["lenmode"] = _v
_v = arg.pop("minexponent", None)
_v = minexponent if minexponent is not None else _v
if _v is not None:
self["minexponent"] = _v
_v = arg.pop("nticks", None)
_v = nticks if nticks is not None else _v
if _v is not None:
self["nticks"] = _v
_v = arg.pop("outlinecolor", None)
_v = outlinecolor if outlinecolor is not None else _v
if _v is not None:
self["outlinecolor"] = _v
_v = arg.pop("outlinewidth", None)
_v = outlinewidth if outlinewidth is not None else _v
if _v is not None:
self["outlinewidth"] = _v
_v = arg.pop("separatethousands", None)
_v = separatethousands if separatethousands is not None else _v
if _v is not None:
self["separatethousands"] = _v
_v = arg.pop("showexponent", None)
_v = showexponent if showexponent is not None else _v
if _v is not None:
self["showexponent"] = _v
_v = arg.pop("showticklabels", None)
_v = showticklabels if showticklabels is not None else _v
if _v is not None:
self["showticklabels"] = _v
_v = arg.pop("showtickprefix", None)
_v = showtickprefix if showtickprefix is not None else _v
if _v is not None:
self["showtickprefix"] = _v
_v = arg.pop("showticksuffix", None)
_v = showticksuffix if showticksuffix is not None else _v
if _v is not None:
self["showticksuffix"] = _v
_v = arg.pop("thickness", None)
_v = thickness if thickness is not None else _v
if _v is not None:
self["thickness"] = _v
_v = arg.pop("thicknessmode", None)
_v = thicknessmode if thicknessmode is not None else _v
if _v is not None:
self["thicknessmode"] = _v
_v = arg.pop("tick0", None)
_v = tick0 if tick0 is not None else _v
if _v is not None:
self["tick0"] = _v
_v = arg.pop("tickangle", None)
_v = tickangle if tickangle is not None else _v
if _v is not None:
self["tickangle"] = _v
_v = arg.pop("tickcolor", None)
_v = tickcolor if tickcolor is not None else _v
if _v is not None:
self["tickcolor"] = _v
_v = arg.pop("tickfont", None)
_v = tickfont if tickfont is not None else _v
if _v is not None:
self["tickfont"] = _v
_v = arg.pop("tickformat", None)
_v = tickformat if tickformat is not None else _v
if _v is not None:
self["tickformat"] = _v
_v = arg.pop("tickformatstops", None)
_v = tickformatstops if tickformatstops is not None else _v
if _v is not None:
self["tickformatstops"] = _v
_v = arg.pop("tickformatstopdefaults", None)
_v = tickformatstopdefaults if tickformatstopdefaults is not None else _v
if _v is not None:
self["tickformatstopdefaults"] = _v
_v = arg.pop("ticklabeloverflow", None)
_v = ticklabeloverflow if ticklabeloverflow is not None else _v
if _v is not None:
self["ticklabeloverflow"] = _v
_v = arg.pop("ticklabelposition", None)
_v = ticklabelposition if ticklabelposition is not None else _v
if _v is not None:
self["ticklabelposition"] = _v
_v = arg.pop("ticklen", None)
_v = ticklen if ticklen is not None else _v
if _v is not None:
self["ticklen"] = _v
_v = arg.pop("tickmode", None)
_v = tickmode if tickmode is not None else _v
if _v is not None:
self["tickmode"] = _v
_v = arg.pop("tickprefix", None)
_v = tickprefix if tickprefix is not None else _v
if _v is not None:
self["tickprefix"] = _v
_v = arg.pop("ticks", None)
_v = ticks if ticks is not None else _v
if _v is not None:
self["ticks"] = _v
_v = arg.pop("ticksuffix", None)
_v = ticksuffix if ticksuffix is not None else _v
if _v is not None:
self["ticksuffix"] = _v
_v = arg.pop("ticktext", None)
_v = ticktext if ticktext is not None else _v
if _v is not None:
self["ticktext"] = _v
_v = arg.pop("ticktextsrc", None)
_v = ticktextsrc if ticktextsrc is not None else _v
if _v is not None:
self["ticktextsrc"] = _v
_v = arg.pop("tickvals", None)
_v = tickvals if tickvals is not None else _v
if _v is not None:
self["tickvals"] = _v
_v = arg.pop("tickvalssrc", None)
_v = tickvalssrc if tickvalssrc is not None else _v
if _v is not None:
self["tickvalssrc"] = _v
_v = arg.pop("tickwidth", None)
_v = tickwidth if tickwidth is not None else _v
if _v is not None:
self["tickwidth"] = _v
_v = arg.pop("title", None)
_v = title if title is not None else _v
if _v is not None:
self["title"] = _v
_v = arg.pop("titlefont", None)
_v = titlefont if titlefont is not None else _v
if _v is not None:
self["titlefont"] = _v
_v = arg.pop("titleside", None)
_v = titleside if titleside is not None else _v
if _v is not None:
self["titleside"] = _v
_v = arg.pop("x", None)
_v = x if x is not None else _v
if _v is not None:
self["x"] = _v
_v = arg.pop("xanchor", None)
_v = xanchor if xanchor is not None else _v
if _v is not None:
self["xanchor"] = _v
_v = arg.pop("xpad", None)
_v = xpad if xpad is not None else _v
if _v is not None:
self["xpad"] = _v
_v = arg.pop("y", None)
_v = y if y is not None else _v
if _v is not None:
self["y"] = _v
_v = arg.pop("yanchor", None)
_v = yanchor if yanchor is not None else _v
if _v is not None:
self["yanchor"] = _v
_v = arg.pop("ypad", None)
_v = ypad if ypad is not None else _v
if _v is not None:
self["ypad"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 35.764247
| 100
| 0.558836
|
7ca964ad0ce1d200e945d3e8301d447113cbd3c9
| 23
|
py
|
Python
|
inaccel/sklearn/cluster/__init__.py
|
inaccel/scikit-learn
|
8549d8b70c26b7f5e3b80538b97b91c382a6ae45
|
[
"Apache-2.0"
] | 1
|
2021-03-23T14:56:06.000Z
|
2021-03-23T14:56:06.000Z
|
inaccel/sklearn/cluster/__init__.py
|
inaccel/scikit-learn
|
8549d8b70c26b7f5e3b80538b97b91c382a6ae45
|
[
"Apache-2.0"
] | null | null | null |
inaccel/sklearn/cluster/__init__.py
|
inaccel/scikit-learn
|
8549d8b70c26b7f5e3b80538b97b91c382a6ae45
|
[
"Apache-2.0"
] | null | null | null |
from ._kmeans import *
| 11.5
| 22
| 0.73913
|
a8e78e78716e7101c3a26beac99f8681a0e1d12b
| 395
|
py
|
Python
|
cogs/utils/mal_char_find.py
|
loomkoom/appuselfbotmain
|
507e5264a2c63195dd04f439e545a2783424bd21
|
[
"MIT"
] | null | null | null |
cogs/utils/mal_char_find.py
|
loomkoom/appuselfbotmain
|
507e5264a2c63195dd04f439e545a2783424bd21
|
[
"MIT"
] | null | null | null |
cogs/utils/mal_char_find.py
|
loomkoom/appuselfbotmain
|
507e5264a2c63195dd04f439e545a2783424bd21
|
[
"MIT"
] | null | null | null |
import sys
import asyncio
import tokage
list_of_ids = sys.argv[1:]
async def find_chars(all_ids):
tok = tokage.Client()
for id in all_ids:
character = await tok.get_character(id)
if character.name:
print(character.name + ' | ' + str(character.favorites) + '\n')
loop = asyncio.get_event_loop()
loop.run_until_complete(find_chars(list_of_ids))
loop.close()
| 23.235294
| 75
| 0.686076
|
5940bdbb242a0bbd94556eac60c43f723d787215
| 252
|
py
|
Python
|
docs/source/topics/processes/include/snippets/launch/launch_submit_dictionary.py
|
azadoks/aiida-core
|
b806b7fef8fc79090deccfe2019b77cb922e0581
|
[
"MIT",
"BSD-3-Clause"
] | 180
|
2019-07-12T07:45:26.000Z
|
2022-03-22T13:16:57.000Z
|
docs/source/topics/processes/include/snippets/launch/launch_submit_dictionary.py
|
azadoks/aiida-core
|
b806b7fef8fc79090deccfe2019b77cb922e0581
|
[
"MIT",
"BSD-3-Clause"
] | 2,466
|
2016-12-24T01:03:52.000Z
|
2019-07-04T13:41:08.000Z
|
docs/source/topics/processes/include/snippets/launch/launch_submit_dictionary.py
|
azadoks/aiida-core
|
b806b7fef8fc79090deccfe2019b77cb922e0581
|
[
"MIT",
"BSD-3-Clause"
] | 88
|
2019-07-06T01:42:39.000Z
|
2022-03-18T14:20:09.000Z
|
# -*- coding: utf-8 -*-
from aiida import orm
from aiida.engine import submit
ArithmeticAddCalculation = CalculationFactory('core.arithmetic.add')
inputs = {
'x': orm.Int(1),
'y': orm.Int(2)
}
node = submit(ArithmeticAddCalculation, **inputs)
| 22.909091
| 68
| 0.698413
|
f7358ba007f332d22e83dabe5d8ad657102b156a
| 12,059
|
py
|
Python
|
SegFault/CosmoNet.py
|
NERSC/CosmoFlow
|
28937fad012b8bf854916527ebfc74f60de0ac26
|
[
"BSD-2-Clause"
] | 15
|
2018-09-17T09:51:18.000Z
|
2021-06-29T16:46:22.000Z
|
SegFault/CosmoNet.py
|
NERSC/CosmoFlow
|
28937fad012b8bf854916527ebfc74f60de0ac26
|
[
"BSD-2-Clause"
] | null | null | null |
SegFault/CosmoNet.py
|
NERSC/CosmoFlow
|
28937fad012b8bf854916527ebfc74f60de0ac26
|
[
"BSD-2-Clause"
] | 7
|
2018-03-07T08:50:13.000Z
|
2021-05-02T22:10:06.000Z
|
## for this one, change the order between relu and batch
import tensorflow as tf
import numpy as np
from io_Cosmo import *
import hyper_parameters_Cosmo as hp
import time
from numpy import linalg as LA
def weight_variable(shape,name):
W = tf.get_variable(name,shape=shape, initializer=tf.contrib.layers.xavier_initializer())
return W
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def lrelu(x, alpha):
return tf.nn.relu(x) - alpha * tf.nn.relu(-x)
class CosmoNet:
def __init__(self,train_data,train_label, val_data = None, val_label = None, test_data = None, test_label = None, is_train = None):
self.train_data = train_data
self.train_label = train_label
self.val_data = val_data
self.val_label = val_label
self.test_data = test_data
self.test_label = test_label
self.is_train = is_train
self.num_parameters = 1
#initialize weight and bias
self.W = {}
self.b = {}
self.bn_param = {}
self.W['W_conv1'] = weight_variable([3, 3, 3, 1, 2],'w1')
self.b['b_conv1'] = bias_variable([2])
self.W['W_conv2'] = weight_variable([4, 4, 4, 2, 12],'w2')
self.b['b_conv2'] = bias_variable([12])
self.W['W_conv3'] = weight_variable([4,4,4,12,64],'w3')
self.b['b_conv3'] = bias_variable([64])
self.W['W_conv4'] = weight_variable([3,3,3,64,64],'w4')
self.b['b_conv4'] = bias_variable([64])
self.W['W_conv5'] = weight_variable([2,2,2,64,128],'w5')
self.b['b_conv5'] = bias_variable([128])
self.W['W_conv6'] = weight_variable([2,2,2,128,128],'w6')
self.b['b_conv6'] = bias_variable([128])
self.W['W_fc1'] = weight_variable([1024,1024],'w7')
self.b['b_fc1'] = bias_variable([1024])
self.W['W_fc2'] = weight_variable([1024,256],'w8')
self.b['b_fc2'] = bias_variable([256])
self.W['W_fc3'] = weight_variable([256,2],'w9')
self.b['b_fc3'] = bias_variable([2])
#Define some fuctions that might be used
def BatchNorm(self,inputT, IS_TRAINING, scope,reuse=None):
with tf.variable_scope(scope,'model',reuse = reuse):
return tf.contrib.layers.batch_norm(inputT, is_training=IS_TRAINING,center = True, scale = True,epsilon=0.0001,decay=0.99,scope=scope)
def deepNet(self,inputBatch,IS_TRAINING,keep_prob,scope,reuse):
# First convolutional layer
with tf.name_scope('conv1'):
h_conv1 = lrelu(self.BatchNorm(tf.nn.conv3d(inputBatch,self.W['W_conv1'],strides = [1,1,1,1,1],padding = 'VALID') + self.b['b_conv1'],IS_TRAINING = IS_TRAINING, scope = scope+str(1), reuse = reuse),hp.Model['LEAK_PARAMETER'])
with tf.name_scope('pool1'):
h_pool1 = tf.nn.avg_pool3d(h_conv1, ksize=[1,2,2,2,1], strides = [1,2,2,2,1], padding = 'VALID')
#Second convoluational layer
with tf.name_scope('conv2'):
h_conv2 = lrelu(self.BatchNorm(tf.nn.conv3d(h_pool1, self.W['W_conv2'],strides = [1,1,1,1,1],padding = 'VALID') + self.b['b_conv2'],IS_TRAINING=IS_TRAINING,scope = scope+str(2),reuse = reuse),hp.Model['LEAK_PARAMETER'])
with tf.name_scope('pool2'):
h_pool2 = tf.nn.avg_pool3d(h_conv2, ksize=[1,2,2,2,1], strides = [1,2,2,2,1], padding = 'VALID')
#Third convoluational layer
with tf.name_scope('conv3'):
h_conv3 = lrelu(self.BatchNorm(tf.nn.conv3d(h_pool2, self.W['W_conv3'],strides = [1,2,2,2,1],padding = 'VALID') + self.b['b_conv3'],IS_TRAINING=IS_TRAINING, scope = scope+str(3),reuse=reuse),hp.Model['LEAK_PARAMETER'])
#Fourth convoluational layer
with tf.name_scope('conv4'):
h_conv4 = lrelu(self.BatchNorm(tf.nn.conv3d(h_conv3, self.W['W_conv4'],strides = [1,1,1,1,1],padding = 'VALID') + self.b['b_conv4'],IS_TRAINING=IS_TRAINING,scope = scope+str(4),reuse=reuse),hp.Model['LEAK_PARAMETER'])
#Fifth convolutional layer
with tf.name_scope('conv5'):
h_conv5 = lrelu(self.BatchNorm(tf.nn.conv3d(h_conv4, self.W['W_conv5'],strides = [1,1,1,1,1],padding = 'VALID') + self.b['b_conv5'],IS_TRAINING=IS_TRAINING,scope = scope+str(5),reuse=reuse),hp.Model['LEAK_PARAMETER'])
#Sixth convolutional layer
with tf.name_scope('conv6'):
h_conv6 = lrelu(self.BatchNorm(tf.nn.conv3d(h_conv5, self.W['W_conv6'],strides = [1,1,1,1,1],padding = 'VALID') + self.b['b_conv6'],IS_TRAINING=IS_TRAINING,scope = scope+str(6),reuse=reuse),hp.Model['LEAK_PARAMETER'])
with tf.name_scope('fc1'):
h_conv6_flat = tf.reshape(h_conv6,[-1,1024])
h_fc1 = lrelu(tf.matmul(tf.nn.dropout(h_conv6_flat,keep_prob), self.W['W_fc1']) + self.b['b_fc1'],hp.Model['LEAK_PARAMETER'])
with tf.name_scope('fc2'):
h_fc2 = lrelu(tf.matmul(tf.nn.dropout(h_fc1,keep_prob), self.W['W_fc2']) + self.b['b_fc2'],hp.Model['LEAK_PARAMETER'])
with tf.name_scope('fc3'):
h_fc3 = tf.matmul(tf.nn.dropout(h_fc2,keep_prob), self.W['W_fc3']) + self.b['b_fc3']
return h_fc3
def loss(self):
with tf.name_scope('loss'):
predictions = self.deepNet(inputBatch = self.train_data,IS_TRAINING = True,keep_prob = hp.Model['DROP_OUT'],scope='conv_bn',reuse = None)
lossL1 = tf.reduce_mean(tf.abs(self.train_label-predictions))
for w in self.W:
lossL1 += hp.Model["REG_RATE"]*tf.nn.l2_loss(self.W[w])/self.num_parameters
return lossL1
def validation_loss(self):
val_predict = self.deepNet(inputBatch = self.val_data,IS_TRAINING = False,keep_prob = 1,scope='conv_bn',reuse=True)
val_true = self.val_label*tf.constant([2.905168635566176411e-02,4.023372385668218254e-02],dtype = tf.float32)+tf.constant([2.995679839999998983e-01,8.610806619999996636e-01],dtype = tf.float32)
lossL1Val = tf.reduce_mean(tf.abs(val_true-val_predict)/val_true)
return lossL1Val,val_true,val_predict
def train_loss(self):
train_predict = self.deepNet(inputBatch = self.train_data,IS_TRAINING = False,keep_prob = 1,scope='conv_bn',reuse=True)
train_true = self.train_label*tf.constant([2.905168635566176411e-02,4.023372385668218254e-02],dtype = tf.float32)+tf.constant([2.995679839999998983e-01,8.610806619999996636e-01],dtype = tf.float32)
lossL1Train = tf.reduce_mean(tf.abs(train_true-train_predict)/train_true)
return lossL1Train,train_true,train_predict
def test_loss(self):
test_predict = self.deepNet(inputBatch = self.test_data,IS_TRAINING = False,keep_prob = 1,scope='conv_bn',reuse=True)
test_true = self.test_label*tf.constant([2.905168635566176411e-02,4.023372385668218254e-02],dtype = tf.float32)+tf.constant([2.995679839999998983e-01,8.610806619999996636e-01],dtype = tf.float32)
lossL1Test = tf.reduce_mean(tf.abs(test_true-test_predict)/test_true)
return lossL1Test,test_true,test_predict
def optimize(self):
loss = self.loss()
with tf.name_scope('adam_optimizer'):
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_step = tf.train.AdamOptimizer(hp.Model['LEARNING_RATE']).minimize(loss)
lossL1Train,train_true,train_predict = self.train_loss()
return train_step, loss,lossL1Train,train_true,train_predict
def train(self):
train_step, loss, lossL1Train,train_true,train_predict = self.optimize()
lossL1Val,val_true,val_predict = self.validation_loss()
lossL1Test,test_true,test_predict = self.test_loss()
config = tf.ConfigProto()
#used to save the model
global best_validation_accuracy
global last_improvement
global total_iterations
best_validation_accuracy = 1.0 #Best validation accuracy seen so far
last_improvement = 0 #Iteration-number for last improvement to validation accuracy.
require_improvement = hp.RUNPARAM['require_improvement'] #Stop optimization if no improvement found in this many iterations.
total_iterations = 0 #Counter for total number of iterations performed so far.
if(self.is_train):
print "training"
with tf.Session() as sess:
losses_train = []
losses_val = []
losses = []
val_accuracys = []
data_accuracys = []
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
for epoch in range(hp.RUNPARAM['num_epoch']):
print epoch
save_path = os.path.join(hp.Path['Model_path'], 'best_validation')
total_iterations += 1
start_time = time.time()
loss_per_epoch_val = 0
loss_per_epoch_train = 0
for i in range(hp.RUNPARAM['batch_per_epoch']):
_,lossTrain,lossL1Train_,train_true_,train_predict_ = sess.run([train_step,loss,lossL1Train,train_true,train_predict])
loss_per_epoch_train +=lossL1Train_
losses.append(loss_per_epoch_train/hp.RUNPARAM['batch_per_epoch'])
losses_train.append(loss_per_epoch_train/hp.RUNPARAM['batch_per_epoch'])
for i in range(hp.RUNPARAM['batch_per_epoch_val']):
loss_,val_true_,val_predict_ = sess.run([lossL1Val,val_true,val_predict])
loss_per_epoch_val += loss_
losses_val.append(loss_per_epoch_val/hp.RUNPARAM['batch_per_epoch_val'])
if(loss_per_epoch_val/hp.RUNPARAM['batch_per_epoch_val'] < best_validation_accuracy):
best_validation_accuracy = loss_per_epoch_val/hp.RUNPARAM['batch_per_epoch_val']
last_improvement = total_iterations
saver.save(sess=sess, save_path=save_path)
print("Epoch {} took {:.3f}s".format(epoch, time.time() - start_time))
print " training loss: %.3f" %(loss_per_epoch_train/hp.RUNPARAM['batch_per_epoch'])
print " validation loss: %.3f" %(loss_per_epoch_val/hp.RUNPARAM['batch_per_epoch_val'])
print " best loss: %.3f"%best_validation_accuracy
np.savetxt(os.path.join(hp.Path['train_result'],'loss_train.txt'),losses_train)
np.savetxt(os.path.join(hp.Path['val_result'],'loss_val.txt'),losses_val)
np.savetxt(os.path.join(hp.Path['train_result'],'losses.txt'),losses)
if(total_iterations - last_improvement > require_improvement):
print ("No improvement found in a while, stopping optimization.")
break
coord.request_stop();
coord.join(threads);
if __name__ == "__main__":
NbodySimuDataBatch64, NbodySimuLabelBatch64 = readDataSet(filenames = [hp.Path['train_data']+str(i)+'.tfrecord' for i in range(0,4)])
NbodySimuDataBatch32, NbodySimuLabelBatch32 = tf.cast(NbodySimuDataBatch64,tf.float32),tf.cast(NbodySimuLabelBatch64,tf.float32)
valDataBatch64, valLabelbatch64 = readDataSet(filenames=[hp.Path['val_data']+str(i)+".tfrecord" for i in range(4,5)]);
valDataBatch32, valLabelbatch32 = tf.cast(valDataBatch64,tf.float32),tf.cast(valLabelbatch64,tf.float32)
testDataBatch64, testLabelbatch64 = readTestSet(filenames=[hp.Path['test_data']+str(i)+".tfrecord" for i in range(4,5)]);
testDataBatch32, testLabelbatch32 = tf.cast(testDataBatch64,tf.float32),tf.cast(testLabelbatch64,tf.float32)
trainCosmo = CosmoNet(train_data=NbodySimuDataBatch32,train_label=NbodySimuLabelBatch32,val_data=valDataBatch32,val_label=valLabelbatch32,test_data=testDataBatch32,test_label=testLabelbatch32,is_train=True)
trainCosmo.train()
| 49.625514
| 237
| 0.657434
|
2d4083858ad922d2d7ef4872b00f3b939944d7f4
| 1,265
|
py
|
Python
|
Days/Day 23 - Coprocessor Conflagration/Part 1.py
|
jamesjiang52/Advent-of-Code-2017
|
94c85696e1335d7b5b00717a0e5f31c3653ba394
|
[
"MIT"
] | null | null | null |
Days/Day 23 - Coprocessor Conflagration/Part 1.py
|
jamesjiang52/Advent-of-Code-2017
|
94c85696e1335d7b5b00717a0e5f31c3653ba394
|
[
"MIT"
] | null | null | null |
Days/Day 23 - Coprocessor Conflagration/Part 1.py
|
jamesjiang52/Advent-of-Code-2017
|
94c85696e1335d7b5b00717a0e5f31c3653ba394
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 22 21:15:20 2017
@author: James Jiang
"""
def is_int(x):
try:
int(x)
return True
except ValueError:
return False
all_lines = [line.rstrip('\n') for line in open('Data.txt')]
all_instructions = []
for line in all_lines:
components = line.split(' ')
all_instructions.append(components)
dict_values = {}
for i in [j for j in 'abcdefgh']:
dict_values[i] = 0
def value(n):
if is_int(n) == True:
return(int(n))
else:
return(dict_values[n])
total = 0
i = 0
while i in list(range(len(all_instructions))):
current_instruction = all_instructions[i]
if current_instruction[0] == 'set':
dict_values[current_instruction[1]] = value(current_instruction[2])
elif current_instruction[0] == 'sub':
dict_values[current_instruction[1]] -= value(current_instruction[2])
elif current_instruction[0] == 'mul':
dict_values[current_instruction[1]] *= value(current_instruction[2])
total += 1
if current_instruction[0] == 'jnz':
if value(current_instruction[1]) != 0:
i += value(current_instruction[2])
else:
i += 1
else:
i += 1
print(total)
| 23.867925
| 76
| 0.604743
|
e0dca093cc996b84adb33f386da443eef12c4665
| 591
|
py
|
Python
|
news_wall/blog/models.py
|
chiraag-kakar/news_wall
|
5f36dee6694125453a7620cf0883c5f5b35b2b36
|
[
"MIT"
] | 1
|
2022-01-24T13:49:16.000Z
|
2022-01-24T13:49:16.000Z
|
news_wall/blog/models.py
|
chiraag-kakar/news_wall
|
5f36dee6694125453a7620cf0883c5f5b35b2b36
|
[
"MIT"
] | null | null | null |
news_wall/blog/models.py
|
chiraag-kakar/news_wall
|
5f36dee6694125453a7620cf0883c5f5b35b2b36
|
[
"MIT"
] | 1
|
2021-03-11T19:52:44.000Z
|
2021-03-11T19:52:44.000Z
|
from django.db import models
from django.utils import timezone
from django.urls import reverse
from django.contrib.auth.models import User
# from tinymce.models import HTMLField
# Create your models here.
class Post(models.Model):
author = models.ForeignKey(User,on_delete=models.CASCADE)
title = models.CharField(max_length=30)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
def get_absolute_url(self):
return reverse("blogs:blog_detail", kwargs={"pk": self.pk})
def __str__(self):
return self.title
| 28.142857
| 67
| 0.730964
|
83afc6aa8688ee355f1d94a77ed8bb748b84cfc5
| 404
|
py
|
Python
|
urlregex.py
|
biebel24/py43-sample-code
|
2a252ae6352d93ed395246b2e39fcb93edb98a68
|
[
"BSD-3-Clause"
] | 1
|
2020-11-09T21:46:25.000Z
|
2020-11-09T21:46:25.000Z
|
urlregex.py
|
biebel24/py43-sample-code
|
2a252ae6352d93ed395246b2e39fcb93edb98a68
|
[
"BSD-3-Clause"
] | null | null | null |
urlregex.py
|
biebel24/py43-sample-code
|
2a252ae6352d93ed395246b2e39fcb93edb98a68
|
[
"BSD-3-Clause"
] | 1
|
2020-04-18T16:09:04.000Z
|
2020-04-18T16:09:04.000Z
|
# Search for link values within URL input
import urllib.request, urllib.parse, urllib.error
import re
import ssl
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
url = input('Enter - ')
html = urllib.request.urlopen(url).read()
links = re.findall(b'href="(http[s]?://.*?)"', html)
for link in links:
print(link.decode())
| 25.25
| 52
| 0.727723
|
8bf0ed39fae91a95280f3c357beba15cd8e4db6c
| 1,267
|
py
|
Python
|
utils/calculate_weights.py
|
DesmondLei/pytorch-deeplab-xception
|
97602b8095da95de46954d8dfa53307da271f565
|
[
"MIT"
] | null | null | null |
utils/calculate_weights.py
|
DesmondLei/pytorch-deeplab-xception
|
97602b8095da95de46954d8dfa53307da271f565
|
[
"MIT"
] | null | null | null |
utils/calculate_weights.py
|
DesmondLei/pytorch-deeplab-xception
|
97602b8095da95de46954d8dfa53307da271f565
|
[
"MIT"
] | null | null | null |
import os
from tqdm import tqdm
import numpy as np
from mypath import Path
def calculate_weigths_labels(dataset, dataloader, num_classes):
# Create an instance from the data loader
z = np.zeros((num_classes,))
# Initialize tqdm
tqdm_batch = tqdm(dataloader)
print('Calculating classes weights')
for sample in tqdm_batch:
y = sample['label']
y = y.detach().cpu().numpy()
mask = (y >= 0) & (y < num_classes)
labels = y[mask].astype(np.uint8)
count_l = np.bincount(labels, minlength=num_classes)
z += count_l
tqdm_batch.close()
total_frequency = np.sum(z)
class_weights = []
for frequency in z:
class_weight = 1 / (np.log(1.02 + (frequency / total_frequency)))
class_weights.append(class_weight)
ret = np.array(class_weights)
parameters_dir = "/work/scratch/lei/MyProject/t_chucai/models_and_parameters/parameters/classes_weights"
if not os.path.exists(parameters_dir):
os.makedirs(parameters_dir)
# classes_weights_path = os.path.join(Path.db_root_dir(dataset), dataset+'_classes_weights.npy')
classes_weights_path = os.path.join(parameters_dir, dataset + '_classes_weights.npy')
np.save(classes_weights_path, ret)
return ret
| 36.2
| 108
| 0.689029
|
b551811c0095df32f45f4c9b7434f82bf9cf0bb5
| 501
|
py
|
Python
|
shared/data/simulators/mixins/graph.py
|
DougMahoney/metatools
|
112340102962ff0c3e323564357cc4e848939cf7
|
[
"Apache-2.0"
] | 12
|
2020-04-10T07:09:24.000Z
|
2022-03-04T09:22:40.000Z
|
shared/data/simulators/mixins/graph.py
|
DougMahoney/metatools
|
112340102962ff0c3e323564357cc4e848939cf7
|
[
"Apache-2.0"
] | 5
|
2020-05-16T18:22:23.000Z
|
2022-03-29T13:19:27.000Z
|
shared/data/simulators/mixins/graph.py
|
DougMahoney/metatools
|
112340102962ff0c3e323564357cc4e848939cf7
|
[
"Apache-2.0"
] | 2
|
2020-12-10T15:17:40.000Z
|
2021-12-02T17:34:56.000Z
|
from transitions.extensions import GraphMachine
class GraphMixin(GraphMachine):
def _init_graphviz_engine(self, use_pygraphviz):
Graph = super(GraphMixin, self)._init_graphviz_engine(use_pygraphviz)
class TweakedGraph(Graph):
_TRANSITION_CHECK = self._TRANSITION_CHECK
def _transition_label(self, tran):
if tran.get('trigger') == self._TRANSITION_CHECK:
return ''
else:
return super(TweakedGraph, self)._transition_label(tran)
return TweakedGraph
| 21.782609
| 71
| 0.742515
|
d495a8b3306fce3dc945f3f4d7c4b9264474c30c
| 1,630
|
py
|
Python
|
6_kyu/A Rule of Divisibility by 13.py
|
serembon/Codewars-Python-Kata
|
868f9f5deb991b2c0b7e27dce41a5a20805013dc
|
[
"MIT"
] | null | null | null |
6_kyu/A Rule of Divisibility by 13.py
|
serembon/Codewars-Python-Kata
|
868f9f5deb991b2c0b7e27dce41a5a20805013dc
|
[
"MIT"
] | null | null | null |
6_kyu/A Rule of Divisibility by 13.py
|
serembon/Codewars-Python-Kata
|
868f9f5deb991b2c0b7e27dce41a5a20805013dc
|
[
"MIT"
] | null | null | null |
"""
When you divide the successive powers of `10` by `13` you get the following remainders of the integer divisions:
`1, 10, 9, 12, 3, 4`.
Then the whole pattern repeats.
Hence the following method: Multiply the right most digit of the number with the left most number in the sequence shown
above, the second right most digit to the second left most digit of the number in the sequence. The cycle goes on and
you sum all these products. Repeat this process until the sequence of sums is stationary.
...........................................................................
Example: What is the remainder when `1234567` is divided by `13`?
`7×1 + 6×10 + 5×9 + 4×12 + 3×3 + 2×4 + 1×1 = 178`
We repeat the process with 178:
`8x1 + 7x10 + 1x9 = 87`
and again with 87:
`7x1 + 8x10 = 87`
...........................................................................
From now on the sequence is stationary and the remainder of `1234567` by `13` is the same as the remainder of `87` by
`13`: `9`.
Call `thirt` the function which processes this sequence of operations on an integer `n (>=0)`. `thirt` will return
the stationary number.
`thirt(1234567)` calculates 178, then 87, then 87 and returns `87`.
`thirt(321)` calculates 48, 48 and returns `48`
"""
# My solution
def thirt(n):
pattern = [1, 10, 9, 12, 3, 4]
s = 0
while True:
current_sum = 0
for index, digit in enumerate(str(n)[::-1]):
current_index = index % len(pattern)
current_sum += int(digit) * pattern[current_index]
if s == current_sum:
return s
s = current_sum
n = current_sum
| 37.906977
| 119
| 0.609202
|
bd6a8b4008c750f54b93a619b9b65f2191b7e0a2
| 7,645
|
py
|
Python
|
plugins/sophos_central/icon_sophos_central/util/api.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/sophos_central/icon_sophos_central/util/api.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/sophos_central/icon_sophos_central/util/api.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
import requests
from insightconnect_plugin_runtime.exceptions import PluginException
import json
class SophosCentralAPI:
def __init__(self, url, client_id, client_secret, tenant_id, version, logger):
self.url = url
self.client_id = client_id
self.client_secret = client_secret
self.tenant_id = tenant_id
self.version = version
self.logger = logger
def get_endpoint_id(self, entity):
endpoint_id = None
page_key = None
for index in range(9999):
get_agent = self.get_endpoints(page_key=page_key)
page_key = get_agent.get("pages", {}).get("nextKey", None)
for e in get_agent.get("items", []):
if e.get("hostname") == entity:
endpoint_id = e.get("id")
elif e.get("id") == entity:
endpoint_id = e.get("id")
elif entity in e.get("ipv4Addresses", []):
endpoint_id = e.get("id")
elif entity in e.get("macAddresses", []):
endpoint_id = e.get("id")
elif entity in e.get("ipv6Addresses", []):
endpoint_id = e.get("id")
if page_key is None or index > endpoint_id.get("pages", {}).get("total", 0):
break
if endpoint_id is None:
raise PluginException(preset=PluginException.Preset.NOT_FOUND)
return endpoint_id
def tamper_status(self, endpoint_id):
return self._make_request("GET", f"/endpoint/v1/endpoints/{endpoint_id}/tamper-protection", "Tenant")
def get_blacklists(self, page: int = 1):
return self._make_request(
"GET",
"/endpoint/v1/settings/blocked-items",
"Tenant",
params={"page": page, "pageSize": 100, "pageTotal": True},
)
def unblacklist(self, uuid: str):
return self._make_request("DELETE", f"/endpoint/v1/settings/blocked-items/{uuid}", "Tenant")
def blacklist(self, hash: str, description: str):
return self._make_request(
"POST",
"/endpoint/v1/settings/blocked-items",
"Tenant",
json_data={"comment": description, "properties": {"sha256": hash}, "type": "sha256"},
)
def antivirus_scan(self, uuid: str):
return self._make_request("POST", f"/endpoint/v1/endpoints/{uuid}/scans", "Tenant", json_data={})
def get_alerts(self, since: str = None, key: str = None):
params = {"pageTotal": True}
if since:
params = {"from": since}
if key:
params = {"pageFromKey": key}
return self._make_request("GET", "/common/v1/alerts", "Tenant", params=params)
def get_endpoints(self, since=None, page_key=None):
params = {"pageTotal": True}
if since:
params = {"lastSeenAfter": since}
if page_key:
params = {"pageKey": page_key}
return self._make_request("GET", "/endpoint/v1/endpoints", "Tenant", params=params)
def whoami(self, access_token):
return self._call_api(
"GET",
"https://api.central.sophos.com/whoami/v1",
headers={"Authorization": f"Bearer {access_token}"},
)
def get_access_token(self):
token = self._call_api(
method="POST",
url="https://id.sophos.com/api/v2/oauth2/token",
headers={"Content-Type": "application/x-www-form-urlencoded"},
data={
"grant_type": "client_credentials",
"client_id": self.client_id,
"client_secret": self.client_secret,
"scope": "token",
},
)
return token.get("access_token")
def _make_request(self, method, path, key_type, params=None, json_data=None):
access_token = self.get_access_token()
whoami = self.whoami(access_token)
url = None
if self.tenant_id:
id_ = self.tenant_id
else:
id_ = whoami["id"]
url = whoami.get("apiHosts", {}).get("dataRegion")
if not url:
url = self.url
return self._call_api(
method,
f"{url}{path}",
params,
json_data,
headers={"Authorization": f"Bearer {access_token}", f"X-{key_type}-ID": id_},
)
def _call_api(self, method, url, params=None, json_data=None, data=None, headers=None):
response = {"text": ""}
if not headers:
headers = {}
headers["User-Agent"] = f"Rapid7 InsightConnect, Sophos Central:{self.version}"
try:
response = requests.request(method, url, json=json_data, data=data, params=params, headers=headers)
if response.status_code == 400:
raise PluginException(cause="Bad request.", assistance="The API client sent a malformed request.")
if response.status_code == 401:
raise PluginException(
cause="Unauthorized.",
assistance="The client needs to authenticate before making the API call. "
"Either your credentials are invalid or blacklisted,"
" or your JWT authorization token has expired.",
)
if response.status_code == 403:
raise PluginException(
cause="Forbidden.",
assistance="The client has authenticated but doesn't have permission "
"to perform the operation via the API.",
)
if response.status_code == 404:
raise PluginException(
cause="Not found.",
assistance="The requested resource wasn't found. The resource ID provided may be invalid, "
"or the resource may have been deleted, or is no longer addressable.",
)
if response.status_code == 409:
raise PluginException(
cause="Conflict.",
assistance="Request made conflicts with an existing resource. Please check the API documentation "
"or contact Support.",
)
if response.status_code == 451:
raise PluginException(
cause="Unavailable for Legal Reasons",
assistance="An example of a legal reason we can't serve an API is that the caller is located "
"in a country where United States export control restrictions apply, "
"and we are required by law not to handle such API calls.",
)
if response.status_code >= 500:
raise PluginException(preset=PluginException.Preset.SERVER_ERROR)
if response.status_code >= 400:
raise PluginException(preset=PluginException.Preset.UNKNOWN, data=response.text)
if 200 <= response.status_code < 300:
if response.text:
return response.json()
return {}
raise PluginException(preset=PluginException.Preset.UNKNOWN, data=response.text)
except json.decoder.JSONDecodeError as e:
self.logger.info(f"Invalid JSON: {e}")
raise PluginException(preset=PluginException.Preset.INVALID_JSON, data=response.text)
except requests.exceptions.HTTPError as e:
self.logger.info(f"Call to Sophos Central failed: {e}")
raise PluginException(preset=PluginException.Preset.UNKNOWN, data=response.text)
| 40.882353
| 118
| 0.566776
|
e4d09a51359739a816d0c88e762a34fe4d3d3672
| 721
|
py
|
Python
|
grr/server/grr_response_server/databases/mysql_blobs.py
|
dekoder/grr
|
27ba38dc0f5ad4f3e0cdbfb146a0a789e3b0d27b
|
[
"Apache-2.0"
] | 3
|
2018-09-30T01:31:29.000Z
|
2019-04-22T11:44:54.000Z
|
grr/server/grr_response_server/databases/mysql_blobs.py
|
tomchop/grr
|
27ba38dc0f5ad4f3e0cdbfb146a0a789e3b0d27b
|
[
"Apache-2.0"
] | 1
|
2022-03-02T09:58:05.000Z
|
2022-03-02T09:58:05.000Z
|
grr/server/grr_response_server/databases/mysql_blobs.py
|
tomchop/grr
|
27ba38dc0f5ad4f3e0cdbfb146a0a789e3b0d27b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""The MySQL database methods for blobs handling."""
class MySQLDBBlobsMixin(object):
"""MySQLDB mixin for blobs related functions."""
def WriteClientPathBlobReferences(self, references_by_path):
raise NotImplementedError()
def ReadClientPathBlobReferences(self, paths):
raise NotImplementedError()
def WriteBlobs(self, blob_id_data_pairs):
raise NotImplementedError()
def ReadBlobs(self, blob_ids):
raise NotImplementedError()
def CheckBlobsExist(self, blob_ids):
raise NotImplementedError()
def WriteHashBlobReferences(self, references_by_hash):
raise NotImplementedError()
def ReadHashBlobReferences(self, hashes):
raise NotImplementedError()
| 25.75
| 62
| 0.765603
|
b64640f1f6c9bf3610d4d8057a42787d684fdf18
| 21,057
|
py
|
Python
|
ironic/common/exception.py
|
isabella232/ironic
|
9a0bd8a774143e6f767aaa3031be6b70554bc332
|
[
"Apache-2.0"
] | 2
|
2019-06-17T21:37:53.000Z
|
2020-07-11T03:58:39.000Z
|
ironic/common/exception.py
|
openshift/ironic
|
9a0bd8a774143e6f767aaa3031be6b70554bc332
|
[
"Apache-2.0"
] | 1
|
2019-06-16T22:53:49.000Z
|
2019-09-16T09:37:35.000Z
|
ironic/common/exception.py
|
isabella232/ironic
|
9a0bd8a774143e6f767aaa3031be6b70554bc332
|
[
"Apache-2.0"
] | 6
|
2019-06-13T12:49:33.000Z
|
2021-04-17T16:33:19.000Z
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Ironic specific exceptions list."""
from ironic_lib.exception import IronicException
from oslo_log import log as logging
from six.moves import http_client
from ironic.common.i18n import _
LOG = logging.getLogger(__name__)
class NotAuthorized(IronicException):
_msg_fmt = _("Not authorized.")
code = http_client.FORBIDDEN
class OperationNotPermitted(NotAuthorized):
_msg_fmt = _("Operation not permitted.")
class Invalid(IronicException):
_msg_fmt = _("Unacceptable parameters.")
code = http_client.BAD_REQUEST
class Conflict(IronicException):
_msg_fmt = _('Conflict.')
code = http_client.CONFLICT
class TemporaryFailure(IronicException):
_msg_fmt = _("Resource temporarily unavailable, please retry.")
code = http_client.SERVICE_UNAVAILABLE
class NotAcceptable(IronicException):
# TODO(deva): We need to set response headers in the API for this exception
_msg_fmt = _("Request not acceptable.")
code = http_client.NOT_ACCEPTABLE
class InvalidState(Conflict):
_msg_fmt = _("Invalid resource state.")
class NodeAlreadyExists(Conflict):
_msg_fmt = _("A node with UUID %(uuid)s already exists.")
class MACAlreadyExists(Conflict):
_msg_fmt = _("A port with MAC address %(mac)s already exists.")
class ChassisAlreadyExists(Conflict):
_msg_fmt = _("A chassis with UUID %(uuid)s already exists.")
class PortAlreadyExists(Conflict):
_msg_fmt = _("A port with UUID %(uuid)s already exists.")
class PortgroupAlreadyExists(Conflict):
_msg_fmt = _("A portgroup with UUID %(uuid)s already exists.")
class PortgroupDuplicateName(Conflict):
_msg_fmt = _("A portgroup with name %(name)s already exists.")
class PortgroupMACAlreadyExists(Conflict):
_msg_fmt = _("A portgroup with MAC address %(mac)s already exists.")
class InstanceAssociated(Conflict):
_msg_fmt = _("Instance %(instance_uuid)s is already associated with a "
"node, it cannot be associated with this other node %(node)s")
class DuplicateName(Conflict):
_msg_fmt = _("A node with name %(name)s already exists.")
class VolumeConnectorAlreadyExists(Conflict):
_msg_fmt = _("A volume connector with UUID %(uuid)s already exists.")
class VolumeConnectorTypeAndIdAlreadyExists(Conflict):
_msg_fmt = _("A volume connector with type %(type)s and connector ID "
"%(connector_id)s already exists.")
class VolumeTargetAlreadyExists(Conflict):
_msg_fmt = _("A volume target with UUID %(uuid)s already exists.")
class VolumeTargetBootIndexAlreadyExists(Conflict):
_msg_fmt = _("A volume target with boot index '%(boot_index)s' "
"for the same node already exists.")
class VifAlreadyAttached(Conflict):
_msg_fmt = _("Unable to attach VIF because VIF %(vif)s is already "
"attached to Ironic %(object_type)s %(object_uuid)s")
class NoFreePhysicalPorts(Invalid):
_msg_fmt = _("Unable to attach VIF %(vif)s, not "
"enough free physical ports.")
class VifNotAttached(Invalid):
_msg_fmt = _("Unable to detach VIF %(vif)s from node %(node)s "
"because it is not attached to it.")
class InvalidUUID(Invalid):
_msg_fmt = _("Expected a UUID but received %(uuid)s.")
class InvalidUuidOrName(Invalid):
_msg_fmt = _("Expected a logical name or UUID but received %(name)s.")
class InvalidName(Invalid):
_msg_fmt = _("Expected a logical name but received %(name)s.")
class InvalidConductorGroup(Invalid):
_msg_fmt = _("Expected a conductor group but received %(group)s.")
class InvalidIdentity(Invalid):
_msg_fmt = _("Expected a UUID or int but received %(identity)s.")
class InvalidMAC(Invalid):
_msg_fmt = _("Expected a MAC address but received %(mac)s.")
class InvalidSwitchID(Invalid):
_msg_fmt = _("Expected a MAC address or OpenFlow datapath ID but "
"received %(switch_id)s.")
class InvalidDatapathID(Invalid):
_msg_fmt = _("Expected an OpenFlow datapath ID but received "
"%(datapath_id)s.")
class InvalidStateRequested(Invalid):
_msg_fmt = _('The requested action "%(action)s" can not be performed '
'on node "%(node)s" while it is in state "%(state)s".')
class PatchError(Invalid):
_msg_fmt = _("Couldn't apply patch '%(patch)s'. Reason: %(reason)s")
class InstanceDeployFailure(IronicException):
_msg_fmt = _("Failed to deploy instance: %(reason)s")
class ImageUnacceptable(IronicException):
_msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
class ImageConvertFailed(IronicException):
_msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
_msg_fmt = "%(err)s"
class MissingParameterValue(InvalidParameterValue):
_msg_fmt = "%(err)s"
class Duplicate(IronicException):
_msg_fmt = _("Resource already exists.")
class NotFound(IronicException):
_msg_fmt = _("Resource could not be found.")
code = http_client.NOT_FOUND
class DHCPLoadError(IronicException):
_msg_fmt = _("Failed to load DHCP provider %(dhcp_provider_name)s, "
"reason: %(reason)s")
# TODO(dtantsur): word "driver" is overused in class names here, and generally
# means stevedore driver, not ironic driver. Rename them in the future.
class DriverNotFound(NotFound):
_msg_fmt = _("Could not find the following driver(s) or hardware type(s): "
"%(driver_name)s.")
class DriverNotFoundInEntrypoint(DriverNotFound):
_msg_fmt = _("Could not find the following items in the "
"'%(entrypoint)s' entrypoint: %(names)s.")
class InterfaceNotFoundInEntrypoint(InvalidParameterValue):
_msg_fmt = _("Could not find the following interface in the "
"'%(entrypoint)s' entrypoint: %(iface)s. Valid interfaces "
"are %(valid)s.")
class IncompatibleInterface(InvalidParameterValue):
_msg_fmt = _("%(interface_type)s interface implementation "
"'%(interface_impl)s' is not supported by hardware type "
"%(hardware_type)s.")
class NoValidDefaultForInterface(InvalidParameterValue):
# NOTE(rloo): in the line below, there is no blank space after 'For'
# because node_info could be an empty string. If node_info
# is not empty, it should start with a space.
_msg_fmt = _("For%(node_info)s hardware type '%(driver)s', no default "
"value found for %(interface_type)s interface.")
class ImageNotFound(NotFound):
_msg_fmt = _("Image %(image_id)s could not be found.")
class NoValidHost(NotFound):
_msg_fmt = _("No valid host was found. Reason: %(reason)s")
class InstanceNotFound(NotFound):
_msg_fmt = _("Instance %(instance)s could not be found.")
class InputFileError(IronicException):
_msg_fmt = _("Error with file %(file_name)s. Reason: %(reason)s")
class NodeNotFound(NotFound):
_msg_fmt = _("Node %(node)s could not be found.")
class PortgroupNotFound(NotFound):
_msg_fmt = _("Portgroup %(portgroup)s could not be found.")
class PortgroupNotEmpty(Invalid):
_msg_fmt = _("Cannot complete the requested action because portgroup "
"%(portgroup)s contains ports.")
class NodeAssociated(InvalidState):
_msg_fmt = _("Node %(node)s is associated with instance %(instance)s.")
class PortNotFound(NotFound):
_msg_fmt = _("Port %(port)s could not be found.")
class FailedToUpdateDHCPOptOnPort(IronicException):
_msg_fmt = _("Update DHCP options on port: %(port_id)s failed.")
class FailedToCleanDHCPOpts(IronicException):
_msg_fmt = _("Clean up DHCP options on node: %(node)s failed.")
class FailedToGetIPAddressOnPort(IronicException):
_msg_fmt = _("Retrieve IP address on port: %(port_id)s failed.")
class InvalidIPv4Address(IronicException):
_msg_fmt = _("Invalid IPv4 address %(ip_address)s.")
class FailedToUpdateMacOnPort(IronicException):
_msg_fmt = _("Update MAC address on port: %(port_id)s failed.")
class ChassisNotFound(NotFound):
_msg_fmt = _("Chassis %(chassis)s could not be found.")
class VolumeConnectorNotFound(NotFound):
_msg_fmt = _("Volume connector %(connector)s could not be found.")
class VolumeTargetNotFound(NotFound):
_msg_fmt = _("Volume target %(target)s could not be found.")
class NoDriversLoaded(IronicException):
_msg_fmt = _("Conductor %(conductor)s cannot be started "
"because no hardware types were loaded.")
class ConductorNotFound(NotFound):
_msg_fmt = _("Conductor %(conductor)s could not be found.")
class ConductorAlreadyRegistered(IronicException):
_msg_fmt = _("Conductor %(conductor)s already registered.")
class ConductorHardwareInterfacesAlreadyRegistered(IronicException):
_msg_fmt = _("At least one of these (hardware type %(hardware_type)s, "
"interface type %(interface_type)s, interfaces "
"%(interfaces)s) combinations are already registered for "
"this conductor.")
class PowerStateFailure(InvalidState):
_msg_fmt = _("Failed to set node power state to %(pstate)s.")
class ExclusiveLockRequired(NotAuthorized):
_msg_fmt = _("An exclusive lock is required, "
"but the current context has a shared lock.")
class NodeMaintenanceFailure(Invalid):
_msg_fmt = _("Failed to toggle maintenance-mode flag "
"for node %(node)s: %(reason)s")
class NodeConsoleNotEnabled(Invalid):
_msg_fmt = _("Console access is not enabled on node %(node)s")
class NodeInMaintenance(Invalid):
_msg_fmt = _("The %(op)s operation can't be performed on node "
"%(node)s because it's in maintenance mode.")
class ChassisNotEmpty(Invalid):
_msg_fmt = _("Cannot complete the requested action because chassis "
"%(chassis)s contains nodes.")
class IPMIFailure(IronicException):
_msg_fmt = _("IPMI call failed: %(cmd)s.")
class UnsupportedDriverExtension(Invalid):
_msg_fmt = _('Driver %(driver)s does not support %(extension)s '
'(disabled or not implemented).')
class GlanceConnectionFailed(IronicException):
_msg_fmt = _("Connection to glance endpoint %(endpoint)s failed: "
"%(reason)s")
class ImageNotAuthorized(NotAuthorized):
_msg_fmt = _("Not authorized for image %(image_id)s.")
class InvalidImageRef(Invalid):
_msg_fmt = _("Invalid image href %(image_href)s.")
class ImageRefValidationFailed(IronicException):
_msg_fmt = _("Validation of image href %(image_href)s failed, "
"reason: %(reason)s")
class ImageDownloadFailed(IronicException):
_msg_fmt = _("Failed to download image %(image_href)s, reason: %(reason)s")
class KeystoneUnauthorized(IronicException):
_msg_fmt = _("Not authorized in Keystone.")
class KeystoneFailure(IronicException):
pass
class CatalogNotFound(IronicException):
_msg_fmt = _("Service type %(service_type)s with endpoint type "
"%(endpoint_type)s not found in keystone service catalog.")
class ServiceUnavailable(IronicException):
_msg_fmt = _("Connection failed")
class Forbidden(IronicException):
_msg_fmt = _("Requested OpenStack Images API is forbidden")
class BadRequest(IronicException):
pass
class InvalidEndpoint(IronicException):
_msg_fmt = _("The provided endpoint is invalid")
class CommunicationError(IronicException):
_msg_fmt = _("Unable to communicate with the server.")
class HTTPForbidden(NotAuthorized):
_msg_fmt = _("Access was denied to the following resource: %(resource)s")
class Unauthorized(IronicException):
pass
class HTTPNotFound(NotFound):
pass
class ConfigNotFound(IronicException):
_msg_fmt = _("Could not find config at %(path)s")
class NodeLocked(Conflict):
_msg_fmt = _("Node %(node)s is locked by host %(host)s, please retry "
"after the current operation is completed.")
class NodeNotLocked(Invalid):
_msg_fmt = _("Node %(node)s found not to be locked on release")
class NoFreeConductorWorker(TemporaryFailure):
_msg_fmt = _('Requested action cannot be performed due to lack of free '
'conductor workers.')
code = http_client.SERVICE_UNAVAILABLE
class VendorPassthruException(IronicException):
pass
class ConfigInvalid(IronicException):
_msg_fmt = _("Invalid configuration file. %(error_msg)s")
class DriverLoadError(IronicException):
_msg_fmt = _("Driver, hardware type or interface %(driver)s could not be "
"loaded. Reason: %(reason)s.")
class DriverOperationError(IronicException):
_msg_fmt = _("Runtime driver %(driver)s failure. Reason: %(reason)s.")
class ConsoleError(IronicException):
pass
class NoConsolePid(ConsoleError):
_msg_fmt = _("Could not find pid in pid file %(pid_path)s")
class ConsoleSubprocessFailed(ConsoleError):
_msg_fmt = _("Console subprocess failed to start. %(error)s")
class PasswordFileFailedToCreate(IronicException):
_msg_fmt = _("Failed to create the password file. %(error)s")
class IloOperationError(DriverOperationError):
_msg_fmt = _("%(operation)s failed, error: %(error)s")
class IloOperationNotSupported(DriverOperationError):
_msg_fmt = _("%(operation)s not supported. error: %(error)s")
class DracOperationError(DriverOperationError):
_msg_fmt = _('DRAC operation failed. Reason: %(error)s')
class FailedToGetSensorData(IronicException):
_msg_fmt = _("Failed to get sensor data for node %(node)s. "
"Error: %(error)s")
class FailedToParseSensorData(IronicException):
_msg_fmt = _("Failed to parse sensor data for node %(node)s. "
"Error: %(error)s")
class InsufficientDiskSpace(IronicException):
_msg_fmt = _("Disk volume where '%(path)s' is located doesn't have "
"enough disk space. Required %(required)d MiB, "
"only %(actual)d MiB available space present.")
class ImageCreationFailed(IronicException):
_msg_fmt = _('Creating %(image_type)s image failed: %(error)s')
class SwiftOperationError(IronicException):
_msg_fmt = _("Swift operation '%(operation)s' failed: %(error)s")
class SwiftObjectNotFoundError(SwiftOperationError):
_msg_fmt = _("Swift object %(obj)s from container %(container)s "
"not found. Operation '%(operation)s' failed.")
class SNMPFailure(DriverOperationError):
_msg_fmt = _("SNMP operation '%(operation)s' failed: %(error)s")
class FileSystemNotSupported(IronicException):
_msg_fmt = _("Failed to create a file system. "
"File system %(fs)s is not supported.")
class IRMCOperationError(DriverOperationError):
_msg_fmt = _('iRMC %(operation)s failed. Reason: %(error)s')
class IRMCSharedFileSystemNotMounted(DriverOperationError):
_msg_fmt = _("iRMC shared file system '%(share)s' is not mounted.")
class HardwareInspectionFailure(IronicException):
_msg_fmt = _("Failed to inspect hardware. Reason: %(error)s")
class NodeCleaningFailure(IronicException):
_msg_fmt = _("Failed to clean node %(node)s: %(reason)s")
class PathNotFound(IronicException):
_msg_fmt = _("Path %(dir)s does not exist.")
class DirectoryNotWritable(IronicException):
_msg_fmt = _("Directory %(dir)s is not writable.")
class UcsOperationError(DriverOperationError):
_msg_fmt = _("Cisco UCS client: operation %(operation)s failed for node"
" %(node)s. Reason: %(error)s")
class UcsConnectionError(IronicException):
_msg_fmt = _("Cisco UCS client: connection failed for node "
"%(node)s. Reason: %(error)s")
class ImageUploadFailed(IronicException):
_msg_fmt = _("Failed to upload %(image_name)s image to web server "
"%(web_server)s, reason: %(reason)s")
class CIMCException(DriverOperationError):
_msg_fmt = _("Cisco IMC exception occurred for node %(node)s: %(error)s")
class NodeTagNotFound(IronicException):
_msg_fmt = _("Node %(node_id)s doesn't have a tag '%(tag)s'")
class NetworkError(IronicException):
_msg_fmt = _("Network operation failure.")
class IncompleteLookup(Invalid):
_msg_fmt = _("At least one of 'addresses' and 'node_uuid' parameters "
"is required")
class NotificationSchemaObjectError(IronicException):
_msg_fmt = _("Expected object %(obj)s when populating notification payload"
" but got object %(source)s")
class NotificationSchemaKeyError(IronicException):
_msg_fmt = _("Object %(obj)s doesn't have the field \"%(field)s\" "
"required for populating notification schema key "
"\"%(key)s\"")
class NotificationPayloadError(IronicException):
_msg_fmt = _("Payload not populated when trying to send notification "
"\"%(class_name)s\"")
class StorageError(IronicException):
_msg_fmt = _("Storage operation failure.")
class RedfishError(DriverOperationError):
_msg_fmt = _("Redfish exception occurred. Error: %(error)s")
class RedfishConnectionError(RedfishError):
_msg_fmt = _("Redfish connection failed for node %(node)s: %(error)s")
class PortgroupPhysnetInconsistent(IronicException):
_msg_fmt = _("Port group %(portgroup)s has member ports with inconsistent "
"physical networks (%(physical_networks)s). All ports in a "
"port group must have the same physical network.")
class VifInvalidForAttach(Conflict):
_msg_fmt = _("Unable to attach VIF %(vif)s to node %(node)s. Reason: "
"%(reason)s")
class AgentAPIError(IronicException):
_msg_fmt = _('Agent API for node %(node)s returned HTTP status code '
'%(status)s with error: %(error)s')
class NodeTraitNotFound(NotFound):
_msg_fmt = _("Node %(node_id)s doesn't have a trait '%(trait)s'")
class InstanceRescueFailure(IronicException):
_msg_fmt = _('Failed to rescue instance %(instance)s for node '
'%(node)s: %(reason)s')
class InstanceUnrescueFailure(IronicException):
_msg_fmt = _('Failed to unrescue instance %(instance)s for node '
'%(node)s: %(reason)s')
class XClarityError(IronicException):
_msg_fmt = _("XClarity exception occurred. Error: %(error)s")
class BIOSSettingAlreadyExists(Conflict):
_msg_fmt = _('A BIOS setting %(name)s for node %(node)s already exists.')
class BIOSSettingNotFound(NotFound):
_msg_fmt = _("Node %(node)s doesn't have a BIOS setting '%(name)s'")
class BIOSSettingListNotFound(NotFound):
_msg_fmt = _("Node %(node)s doesn't have BIOS settings '%(names)s'")
class DatabaseVersionTooOld(IronicException):
_msg_fmt = _("Database version is too old")
class AgentConnectionFailed(IronicException):
_msg_fmt = _("Connection to agent failed: %(reason)s")
class NodeProtected(HTTPForbidden):
_msg_fmt = _("Node %(node)s is protected and cannot be undeployed, "
"rebuilt or deleted")
class AllocationNotFound(NotFound):
_msg_fmt = _("Allocation %(allocation)s could not be found.")
class AllocationDuplicateName(Conflict):
_msg_fmt = _("An allocation with name %(name)s already exists.")
class AllocationAlreadyExists(Conflict):
_msg_fmt = _("An allocation with UUID %(uuid)s already exists.")
class AllocationFailed(IronicException):
_msg_fmt = _("Failed to process allocation %(uuid)s: %(error)s.")
class DeployTemplateDuplicateName(Conflict):
_msg_fmt = _("A deploy template with name %(name)s already exists.")
class DeployTemplateAlreadyExists(Conflict):
_msg_fmt = _("A deploy template with UUID %(uuid)s already exists.")
class DeployTemplateNotFound(NotFound):
_msg_fmt = _("Deploy template %(template)s could not be found.")
class InvalidDeployTemplate(Invalid):
_msg_fmt = _("Deploy template invalid: %(err)s.")
class IBMCError(DriverOperationError):
_msg_fmt = _("IBMC exception occurred on node %(node)s. Error: %(error)s")
class IBMCConnectionError(IBMCError):
_msg_fmt = _("IBMC connection failed for node %(node)s: %(error)s")
| 29.20527
| 79
| 0.69953
|
09c4046ad477474c0c20f7a9d678e9b833b55f12
| 17,969
|
py
|
Python
|
mindarmour/adv_robustness/attacks/carlini_wagner.py
|
mindspore-ai/mindarmour
|
a5db0825fa06e4da870c0a850a18b374e8cdd086
|
[
"Apache-2.0"
] | 139
|
2020-03-28T02:37:07.000Z
|
2022-03-24T15:35:39.000Z
|
mindarmour/adv_robustness/attacks/carlini_wagner.py
|
mindspore-ai/mindarmour
|
a5db0825fa06e4da870c0a850a18b374e8cdd086
|
[
"Apache-2.0"
] | 2
|
2020-04-02T09:50:21.000Z
|
2020-05-09T06:52:57.000Z
|
mindarmour/adv_robustness/attacks/carlini_wagner.py
|
mindspore-ai/mindarmour
|
a5db0825fa06e4da870c0a850a18b374e8cdd086
|
[
"Apache-2.0"
] | 12
|
2020-03-28T02:52:42.000Z
|
2021-07-15T08:05:06.000Z
|
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Carlini-wagner Attack.
"""
import numpy as np
from mindspore import Tensor
from mindspore.nn import Cell
from mindarmour.utils.logger import LogUtil
from mindarmour.utils._check_param import check_numpy_param, check_model, \
check_pair_numpy_param, check_int_positive, check_param_type, \
check_param_multi_types, check_value_positive, check_equal_shape
from mindarmour.utils.util import GradWrap, jacobian_matrix
from .attack import Attack
LOGGER = LogUtil.get_instance()
TAG = 'CW'
def _best_logits_of_other_class(logits, target_class, value=1):
"""
Choose the index of the largest logits exclude target class.
Args:
logits (numpy.ndarray): Predict logits of samples.
target_class (numpy.ndarray): Target labels.
value (float): Maximum value of output logits. Default: 1.
Returns:
numpy.ndarray, the index of the largest logits exclude the target
class.
Examples:
>>> other_class = _best_logits_of_other_class([[0.2, 0.3, 0.5],
>>> [0.3, 0.4, 0.3]], [2, 1])
"""
LOGGER.debug(TAG, "enter the func _best_logits_of_other_class.")
logits, target_class = check_pair_numpy_param('logits', logits,
'target_class', target_class)
res = np.zeros_like(logits)
for i in range(logits.shape[0]):
res[i][target_class[i]] = value
return np.argmax(logits - res, axis=1)
class CarliniWagnerL2Attack(Attack):
"""
The Carlini & Wagner attack using L2 norm.
References: `Nicholas Carlini, David Wagner: "Towards Evaluating
the Robustness of Neural Networks" <https://arxiv.org/abs/1608.04644>`_
Args:
network (Cell): Target model.
num_classes (int): Number of labels of model output, which should be
greater than zero.
box_min (float): Lower bound of input of the target model. Default: 0.
box_max (float): Upper bound of input of the target model. Default: 1.0.
bin_search_steps (int): The number of steps for the binary search
used to find the optimal trade-off constant between distance
and confidence. Default: 5.
max_iterations (int): The maximum number of iterations, which should be
greater than zero. Default: 1000.
confidence (float): Confidence of the output of adversarial examples.
Default: 0.
learning_rate (float): The learning rate for the attack algorithm.
Default: 5e-3.
initial_const (float): The initial trade-off constant to use to balance
the relative importance of perturbation norm and confidence
difference. Default: 1e-2.
abort_early_check_ratio (float): Check loss progress every ratio of
all iteration. Default: 5e-2.
targeted (bool): If True, targeted attack. If False, untargeted attack.
Default: False.
fast (bool): If True, return the first found adversarial example.
If False, return the adversarial samples with smaller
perturbations. Default: True.
abort_early (bool): If True, Adam will be aborted if the loss hasn't
decreased for some time. If False, Adam will continue work until the
max iterations is arrived. Default: True.
sparse (bool): If True, input labels are sparse-coded. If False,
input labels are onehot-coded. Default: True.
Examples:
>>> attack = CarliniWagnerL2Attack(network)
"""
def __init__(self, network, num_classes, box_min=0.0, box_max=1.0,
bin_search_steps=5, max_iterations=1000, confidence=0,
learning_rate=5e-3, initial_const=1e-2,
abort_early_check_ratio=5e-2, targeted=False,
fast=True, abort_early=True, sparse=True):
LOGGER.info(TAG, "init CW object.")
super(CarliniWagnerL2Attack, self).__init__()
self._network = check_model('network', network, Cell)
self._network.set_grad(True)
self._num_classes = check_int_positive('num_classes', num_classes)
self._min = check_param_type('box_min', box_min, float)
self._max = check_param_type('box_max', box_max, float)
self._bin_search_steps = check_int_positive('search_steps',
bin_search_steps)
self._max_iterations = check_int_positive('max_iterations',
max_iterations)
self._confidence = check_param_multi_types('confidence', confidence,
[int, float])
self._learning_rate = check_value_positive('learning_rate',
learning_rate)
self._initial_const = check_value_positive('initial_const',
initial_const)
self._abort_early = check_param_type('abort_early', abort_early, bool)
self._fast = check_param_type('fast', fast, bool)
self._abort_early_check_ratio = check_value_positive('abort_early_check_ratio',
abort_early_check_ratio)
self._targeted = check_param_type('targeted', targeted, bool)
self._net_grad = GradWrap(self._network)
self._sparse = check_param_type('sparse', sparse, bool)
self._dtype = None
def _loss_function(self, logits, new_x, org_x, org_or_target_class,
constant, confidence):
"""
Calculate the value of loss function and gradients of loss w.r.t inputs.
Args:
logits (numpy.ndarray): The output of network before softmax.
new_x (numpy.ndarray): Adversarial examples.
org_x (numpy.ndarray): Original benign input samples.
org_or_target_class (numpy.ndarray): Original/target labels.
constant (float): A trade-off constant to use to balance loss
and perturbation norm.
confidence (float): Confidence level of the output of adversarial
examples.
Returns:
numpy.ndarray, norm of perturbation, sum of the loss and the
norm, and gradients of the sum w.r.t inputs.
Raises:
ValueError: If loss is less than 0.
Examples:
>>> L2_loss, total_loss, dldx = self._loss_function([0.2 , 0.3,
>>> 0.5], [0.1, 0.2, 0.2, 0.4], [0.12, 0.2, 0.25, 0.4], [1], 2, 0)
"""
LOGGER.debug(TAG, "enter the func _loss_function.")
logits = check_numpy_param('logits', logits)
org_x = check_numpy_param('org_x', org_x)
new_x, org_or_target_class = check_pair_numpy_param('new_x',
new_x,
'org_or_target_class',
org_or_target_class)
new_x, org_x = check_equal_shape('new_x', new_x, 'org_x', org_x)
other_class_index = _best_logits_of_other_class(
logits, org_or_target_class, value=np.inf)
loss1 = np.sum((new_x - org_x)**2,
axis=tuple(range(len(new_x.shape))[1:]))
loss2 = np.zeros_like(loss1, dtype=self._dtype)
loss2_grade = np.zeros_like(new_x, dtype=self._dtype)
jaco_grad = jacobian_matrix(self._net_grad, new_x, self._num_classes)
if self._targeted:
for i in range(org_or_target_class.shape[0]):
loss2[i] = max(0, logits[i][other_class_index[i]]
- logits[i][org_or_target_class[i]]
+ confidence)
loss2_grade[i] = constant[i]*(jaco_grad[other_class_index[
i]][i] - jaco_grad[org_or_target_class[i]][i])
else:
for i in range(org_or_target_class.shape[0]):
loss2[i] = max(0, logits[i][org_or_target_class[i]]
- logits[i][other_class_index[i]] + confidence)
loss2_grade[i] = constant[i]*(jaco_grad[org_or_target_class[
i]][i] - jaco_grad[other_class_index[i]][i])
total_loss = loss1 + constant*loss2
loss1_grade = 2*(new_x - org_x)
for i in range(org_or_target_class.shape[0]):
if loss2[i] < 0:
msg = 'loss value should greater than or equal to 0, ' \
'but got loss2 {}'.format(loss2[i])
LOGGER.error(TAG, msg)
raise ValueError(msg)
if loss2[i] == 0:
loss2_grade[i, ...] = 0
total_loss_grade = loss1_grade + loss2_grade
return loss1, total_loss, total_loss_grade
def _to_attack_space(self, inputs):
"""
Transform input data into attack space.
Args:
inputs (numpy.ndarray): Input data.
Returns:
numpy.ndarray, transformed data which belongs to attack space.
Examples:
>>> x_att = self._to_attack_space([0.2, 0.3, 0.3])
"""
LOGGER.debug(TAG, "enter the func _to_attack_space.")
inputs = check_numpy_param('inputs', inputs)
mean = (self._min + self._max) / 2
diff = (self._max - self._min) / 2
inputs = (inputs - mean) / diff
inputs = inputs*0.999999
return np.arctanh(inputs)
def _to_model_space(self, inputs):
"""
Transform input data into model space.
Args:
inputs (numpy.ndarray): Input data.
Returns:
numpy.ndarray, transformed data which belongs to model space
and the gradient of x_model w.r.t. x_att.
Examples:
>>> x_att = self._to_model_space([10, 21, 9])
"""
LOGGER.debug(TAG, "enter the func _to_model_space.")
inputs = check_numpy_param('inputs', inputs)
inputs = np.tanh(inputs)
the_grad = 1 - np.square(inputs)
mean = (self._min + self._max) / 2
diff = (self._max - self._min) / 2
inputs = inputs*diff + mean
the_grad = the_grad*diff
return inputs, the_grad
def generate(self, inputs, labels):
"""
Generate adversarial examples based on input data and targeted labels.
Args:
inputs (numpy.ndarray): Input samples.
labels (numpy.ndarray): The ground truth label of input samples
or target labels.
Returns:
numpy.ndarray, generated adversarial examples.
Examples:
>>> advs = attack.generate([[0.1, 0.2, 0.6], [0.3, 0, 0.4]], [1, 2]]
"""
LOGGER.debug(TAG, "enter the func generate.")
inputs, labels = check_pair_numpy_param('inputs', inputs,
'labels', labels)
if not self._sparse:
labels = np.argmax(labels, axis=1)
self._dtype = inputs.dtype
att_original = self._to_attack_space(inputs)
reconstructed_original, _ = self._to_model_space(att_original)
# find an adversarial sample
const = np.ones_like(labels, dtype=self._dtype)*self._initial_const
lower_bound = np.zeros_like(labels, dtype=self._dtype)
upper_bound = np.ones_like(labels, dtype=self._dtype)*np.inf
adversarial_res = inputs.copy()
adversarial_loss = np.ones_like(labels, dtype=self._dtype)*np.inf
samples_num = labels.shape[0]
adv_flag = np.zeros_like(labels)
for binary_search_step in range(self._bin_search_steps):
if (binary_search_step == self._bin_search_steps - 1) and \
(self._bin_search_steps >= 10):
const = min(1e10, upper_bound)
LOGGER.debug(TAG,
'starting optimization with const = %s',
str(const))
att_perturbation = np.zeros_like(att_original, dtype=self._dtype)
loss_at_previous_check = np.ones_like(labels, dtype=self._dtype)*np.inf
# create a new optimizer to minimize the perturbation
optimizer = _AdamOptimizer(att_perturbation.shape)
for iteration in range(self._max_iterations):
x_input, dxdp = self._to_model_space(
att_original + att_perturbation)
logits = self._network(Tensor(x_input)).asnumpy()
current_l2_loss, current_loss, dldx = self._loss_function(
logits, x_input, reconstructed_original,
labels, const, self._confidence)
# check if attack success (include all examples)
if self._targeted:
is_adv = (np.argmax(logits, axis=1) == labels)
else:
is_adv = (np.argmax(logits, axis=1) != labels)
for i in range(samples_num):
if is_adv[i]:
adv_flag[i] = True
if current_l2_loss[i] < adversarial_loss[i]:
adversarial_res[i] = x_input[i]
adversarial_loss[i] = current_l2_loss[i]
if np.all(adv_flag):
if self._fast:
LOGGER.debug(TAG, "succeed find adversarial examples.")
msg = 'iteration: {}, logits_att: {}, ' \
'loss: {}, l2_dist: {}' \
.format(iteration,
np.argmax(logits, axis=1),
current_loss, current_l2_loss)
LOGGER.debug(TAG, msg)
return adversarial_res
dldx, inputs = check_equal_shape('dldx', dldx, 'inputs', inputs)
gradient = dldx*dxdp
att_perturbation += \
optimizer(gradient, self._learning_rate)
# check if should stop iteration early
flag = True
iter_check = iteration % (np.ceil(
self._max_iterations*self._abort_early_check_ratio))
if self._abort_early and iter_check == 0:
# check progress
for i in range(inputs.shape[0]):
if current_loss[i] <= .9999*loss_at_previous_check[i]:
flag = False
# stop Adam if all samples has no progress
if flag:
LOGGER.debug(TAG,
'step:%d, no progress yet, stop iteration',
binary_search_step)
break
loss_at_previous_check = current_loss
for i in range(samples_num):
# update bound based on search result
if adv_flag[i]:
LOGGER.debug(TAG,
'example %d, found adversarial with const=%f',
i, const[i])
upper_bound[i] = const[i]
else:
LOGGER.debug(TAG,
'example %d, failed to find adversarial'
' with const=%f',
i, const[i])
lower_bound[i] = const[i]
if upper_bound[i] == np.inf:
const[i] *= 10
else:
const[i] = (lower_bound[i] + upper_bound[i]) / 2
return adversarial_res
class _AdamOptimizer:
"""
AdamOptimizer is used to calculate the optimum attack step.
Args:
shape (tuple): The shape of perturbations.
Examples:
>>> optimizer = _AdamOptimizer(att_perturbation.shape)
"""
def __init__(self, shape):
self._m = np.zeros(shape)
self._v = np.zeros(shape)
self._t = 0
def __call__(self, gradient, learning_rate=0.001,
beta1=0.9, beta2=0.999, epsilon=1e-8):
"""
Calculate the optimum perturbation for each iteration.
Args:
gradient (numpy.ndarray): The gradient of the loss w.r.t. to the
variable.
learning_rate (float): The learning rate in the current iteration.
Default: 0.001.
beta1 (float): Decay rate for calculating the exponentially
decaying average of past gradients. Default: 0.9.
beta2 (float): Decay rate for calculating the exponentially
decaying average of past squared gradients. Default: 0.999.
epsilon (float): Small value to avoid division by zero.
Default: 1e-8.
Returns:
numpy.ndarray, perturbations.
Examples:
>>> perturbs = optimizer([0.2, 0.1, 0.15], 0.005)
"""
gradient = check_numpy_param('gradient', gradient)
self._t += 1
self._m = beta1*self._m + (1 - beta1)*gradient
self._v = beta2*self._v + (1 - beta2)*gradient**2
alpha = learning_rate*np.sqrt(1 - beta2**self._t) / (1 - beta1**self._t)
pertur = -alpha*self._m / (np.sqrt(self._v) + epsilon)
return pertur
| 42.783333
| 87
| 0.57026
|
21f10458fa46380c0ae6194c82064d4b6843fc62
| 440
|
py
|
Python
|
company/migrations/0014_remitamandateactivationdata_mandate_requestid.py
|
minsystems/minloansng
|
225f7c553dc1c7180431c5b84250560b74b0e9cc
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
company/migrations/0014_remitamandateactivationdata_mandate_requestid.py
|
minsystems/minloansng
|
225f7c553dc1c7180431c5b84250560b74b0e9cc
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
company/migrations/0014_remitamandateactivationdata_mandate_requestid.py
|
minsystems/minloansng
|
225f7c553dc1c7180431c5b84250560b74b0e9cc
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
# Generated by Django 3.0.2 on 2020-10-29 06:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0013_auto_20201028_0939'),
]
operations = [
migrations.AddField(
model_name='remitamandateactivationdata',
name='mandate_requestId',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
| 23.157895
| 74
| 0.634091
|
0d860a6f7563c01e077a430f9457c94301c4c071
| 796
|
py
|
Python
|
tests/fixtures/test_supplementary_files_json/content_08_expected.py
|
elifesciences/elife-tools
|
ee345bf0e6703ef0f7e718355e85730abbdfd117
|
[
"MIT"
] | 9
|
2015-04-16T08:13:31.000Z
|
2020-05-18T14:03:06.000Z
|
tests/fixtures/test_supplementary_files_json/content_08_expected.py
|
elifesciences/elife-tools
|
ee345bf0e6703ef0f7e718355e85730abbdfd117
|
[
"MIT"
] | 310
|
2015-02-11T00:30:09.000Z
|
2021-07-14T23:58:50.000Z
|
tests/fixtures/test_supplementary_files_json/content_08_expected.py
|
elifesciences/elife-tools
|
ee345bf0e6703ef0f7e718355e85730abbdfd117
|
[
"MIT"
] | 9
|
2015-02-04T01:21:28.000Z
|
2021-06-15T12:50:47.000Z
|
from collections import OrderedDict
expected = [
OrderedDict(
[
("doi", u"10.7554/eLife.26759.018"),
("id", u"supp1"),
("label", u"Supplementary file 1"),
("title", u"All primer sequences used in this study."),
("mediaType", u"application/docx"),
("uri", u"elife-26759-supp1-v2.docx"),
("filename", u"elife-26759-supp1-v2.docx"),
]
),
OrderedDict(
[
("doi", u"10.7554/eLife.26759.019"),
("id", u"transrepform"),
("label", u"Transparent reporting form"),
("mediaType", u"application/pdf"),
("uri", u"elife-26759-transrepform-v2.pdf"),
("filename", u"elife-26759-transrepform-v2.pdf"),
]
),
]
| 30.615385
| 67
| 0.496231
|
f22415c424b6b91485ab55cd7c84b8021042d756
| 999
|
py
|
Python
|
usaspending_api/references/tests/integration/filter_tree/conftest.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 217
|
2016-11-03T17:09:53.000Z
|
2022-03-10T04:17:54.000Z
|
usaspending_api/references/tests/integration/filter_tree/conftest.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 622
|
2016-09-02T19:18:23.000Z
|
2022-03-29T17:11:01.000Z
|
usaspending_api/references/tests/integration/filter_tree/conftest.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 93
|
2016-09-07T20:28:57.000Z
|
2022-02-25T00:25:27.000Z
|
from usaspending_api.references.tests.integration.filter_tree.tas.tas_data_fixtures import (
basic_agency,
cfo_agencies,
non_cfo_agencies,
unsupported_agencies,
multiple_federal_accounts,
agency_with_unsupported_fa,
multiple_tas,
fa_with_multiple_tas,
fa_with_unsupported_tas,
)
from usaspending_api.references.tests.integration.filter_tree.psc.psc_data_fixtures import (
basic_rnd,
basic_product,
product_with_count_above_one,
product_with_branching_count_above_one,
basic_service,
no_data,
rnd_special,
)
__all__ = [
"basic_agency",
"cfo_agencies",
"non_cfo_agencies",
"unsupported_agencies",
"multiple_federal_accounts",
"agency_with_unsupported_fa",
"multiple_tas",
"fa_with_multiple_tas",
"fa_with_unsupported_tas",
"basic_rnd",
"basic_product",
"product_with_count_above_one",
"product_with_branching_count_above_one",
"basic_service",
"no_data",
"rnd_special",
]
| 24.975
| 92
| 0.741742
|
99aecea1aba5a960c5cbfde0ec6f5241e9df977a
| 5,996
|
py
|
Python
|
tests/forms_tests/widget_tests/test_multiwidget.py
|
ioinfinity/django
|
b6a0ab523751c13ae3eaec102de70f58f73a0d94
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/forms_tests/widget_tests/test_multiwidget.py
|
ioinfinity/django
|
b6a0ab523751c13ae3eaec102de70f58f73a0d94
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2020-07-02T21:10:44.000Z
|
2020-07-02T21:11:21.000Z
|
tests/forms_tests/widget_tests/test_multiwidget.py
|
ioinfinity/django
|
b6a0ab523751c13ae3eaec102de70f58f73a0d94
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2020-08-11T18:46:32.000Z
|
2020-08-11T18:46:32.000Z
|
import copy
from datetime import datetime
from django.forms import (
CharField, FileInput, MultipleChoiceField, MultiValueField, MultiWidget,
RadioSelect, SelectMultiple, SplitDateTimeField, SplitDateTimeWidget,
TextInput,
)
from .base import WidgetTest
class MyMultiWidget(MultiWidget):
def decompress(self, value):
if value:
return value.split('__')
return ['', '']
class ComplexMultiWidget(MultiWidget):
def __init__(self, attrs=None):
widgets = (
TextInput(),
SelectMultiple(choices=WidgetTest.beatles),
SplitDateTimeWidget(),
)
super(ComplexMultiWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
data = value.split(',')
return [
data[0], list(data[1]), datetime.strptime(data[2], "%Y-%m-%d %H:%M:%S")
]
return [None, None, None]
def format_output(self, rendered_widgets):
return '\n'.join(rendered_widgets)
class ComplexField(MultiValueField):
def __init__(self, required=True, widget=None, label=None, initial=None):
fields = (
CharField(),
MultipleChoiceField(choices=WidgetTest.beatles),
SplitDateTimeField(),
)
super(ComplexField, self).__init__(
fields, required, widget, label, initial,
)
def compress(self, data_list):
if data_list:
return '%s,%s,%s' % (
data_list[0], ''.join(data_list[1]), data_list[2],
)
return None
class DeepCopyWidget(MultiWidget):
"""
Used to test MultiWidget.__deepcopy__().
"""
def __init__(self, choices=[]):
widgets = [
RadioSelect(choices=choices),
TextInput,
]
super(DeepCopyWidget, self).__init__(widgets)
def _set_choices(self, choices):
"""
When choices are set for this widget, we want to pass those along to
the Select widget.
"""
self.widgets[0].choices = choices
def _get_choices(self):
"""
The choices for this widget are the Select widget's choices.
"""
return self.widgets[0].choices
choices = property(_get_choices, _set_choices)
class MultiWidgetTest(WidgetTest):
def test_text_inputs(self):
widget = MyMultiWidget(
widgets=(
TextInput(attrs={'class': 'big'}),
TextInput(attrs={'class': 'small'}),
)
)
self.check_html(widget, 'name', ['john', 'lennon'], html=(
'<input type="text" class="big" value="john" name="name_0" />'
'<input type="text" class="small" value="lennon" name="name_1" />'
))
self.check_html(widget, 'name', 'john__lennon', html=(
'<input type="text" class="big" value="john" name="name_0" />'
'<input type="text" class="small" value="lennon" name="name_1" />'
))
self.check_html(widget, 'name', 'john__lennon', attrs={'id': 'foo'}, html=(
'<input id="foo_0" type="text" class="big" value="john" name="name_0" />'
'<input id="foo_1" type="text" class="small" value="lennon" name="name_1" />'
))
def test_constructor_attrs(self):
widget = MyMultiWidget(
widgets=(
TextInput(attrs={'class': 'big'}),
TextInput(attrs={'class': 'small'}),
),
attrs={'id': 'bar'},
)
self.check_html(widget, 'name', ['john', 'lennon'], html=(
'<input id="bar_0" type="text" class="big" value="john" name="name_0" />'
'<input id="bar_1" type="text" class="small" value="lennon" name="name_1" />'
))
def test_value_omitted_from_data(self):
widget = MyMultiWidget(widgets=(TextInput(), TextInput()))
self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True)
self.assertIs(widget.value_omitted_from_data({'field_0': 'x'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_1': 'y'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_0': 'x', 'field_1': 'y'}, {}, 'field'), False)
def test_needs_multipart_true(self):
"""
needs_multipart_form should be True if any widgets need it.
"""
widget = MyMultiWidget(widgets=(TextInput(), FileInput()))
self.assertTrue(widget.needs_multipart_form)
def test_needs_multipart_false(self):
"""
needs_multipart_form should be False if no widgets need it.
"""
widget = MyMultiWidget(widgets=(TextInput(), TextInput()))
self.assertFalse(widget.needs_multipart_form)
def test_nested_multiwidget(self):
"""
MultiWidgets can be composed of other MultiWidgets.
"""
widget = ComplexMultiWidget()
self.check_html(widget, 'name', 'some text,JP,2007-04-25 06:24:00', html=(
"""
<input type="text" name="name_0" value="some text" />
<select multiple="multiple" name="name_1">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
<input type="text" name="name_2_0" value="2007-04-25" />
<input type="text" name="name_2_1" value="06:24:00" />
"""
))
def test_deepcopy(self):
"""
MultiWidget should define __deepcopy__() (#12048).
"""
w1 = DeepCopyWidget(choices=[1, 2, 3])
w2 = copy.deepcopy(w1)
w2.choices = [4, 5, 6]
# w2 ought to be independent of w1, since MultiWidget ought
# to make a copy of its sub-widgets when it is copied.
self.assertEqual(w1.choices, [1, 2, 3])
| 35.064327
| 107
| 0.573716
|
09b5098af093e181fd49ca082a2f1273158163e6
| 1,646
|
py
|
Python
|
marrow/mongo/param/common.py
|
marrow/mongo
|
1a8e5fe9047b6a5bb2eaeea95f3dc085c737067f
|
[
"MIT"
] | 22
|
2016-03-22T05:32:26.000Z
|
2021-06-19T06:23:47.000Z
|
marrow/mongo/param/common.py
|
marrow/mongo
|
1a8e5fe9047b6a5bb2eaeea95f3dc085c737067f
|
[
"MIT"
] | 59
|
2016-02-19T22:21:33.000Z
|
2022-01-17T19:40:44.000Z
|
marrow/mongo/param/common.py
|
marrow/mongo
|
1a8e5fe9047b6a5bb2eaeea95f3dc085c737067f
|
[
"MIT"
] | 5
|
2016-06-24T06:55:44.000Z
|
2021-07-12T22:33:07.000Z
|
# encoding: utf-8
"""Parameterized support akin to Django's ORM or MongoEngine."""
from __future__ import unicode_literals
from ...package.loader import traverse
from ...schema.compat import odict
def _deferred_method(name, _named=None, **kw):
def _deferred_method_inner(self, other):
if _named:
if not len(_named) == len(other):
raise TypeError("Incorrect number of arguments.")
values = iter(other)
for i in _named:
kw[i] = next(values)
return getattr(self, name)(**kw)
return getattr(self, name)(other, **kw)
return _deferred_method_inner
def _operator_choice(conversion, lookup, **kw):
def _operator_choice_inner(self, other):
return lookup[conversion(other)](self, **kw)
return _operator_choice_inner
def _process_arguments(Document, prefixes, suffixes, arguments, passthrough=None):
for name, value in arguments.items():
prefix, _, nname = name.partition('__')
if prefix in prefixes:
name = nname
nname, _, suffix = name.rpartition('__')
if suffix in suffixes:
name = nname
field = traverse(Document, name.replace('__', '.')) # Find the target field.
if passthrough and not passthrough & {prefix, suffix}: # Typecast the value to MongoDB-safe as needed.
value = field._field.transformer.foreign(value, (field, Document)) # pylint:disable=protected-access
yield prefixes.get(prefix or None, None), suffixes.get(suffix, None), field, value
def _current_date(value):
if value in ('ts', 'timestamp'):
return {'$type': 'timestamp'}
return True
def _bit(op):
def bitwiseUpdate(value):
return odict({op: int(value)})
return bitwiseUpdate
| 25.323077
| 105
| 0.707776
|
a293076716bbefa051341e9857c15bd101c771e1
| 5,368
|
py
|
Python
|
dashboard/dashboard/pinpoint/models/change/patch.py
|
bopopescu/chromium72-third-party-catapult
|
774e1355b871e13bb858147a136e9cb476f55030
|
[
"BSD-3-Clause"
] | 1
|
2019-01-04T10:08:58.000Z
|
2019-01-04T10:08:58.000Z
|
dashboard/dashboard/pinpoint/models/change/patch.py
|
kind-john/catapult
|
29635376119833f172a58a48a3282d353ce55d2b
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/pinpoint/models/change/patch.py
|
kind-john/catapult
|
29635376119833f172a58a48a3282d353ce55d2b
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import re
import urlparse
from dashboard.services import gerrit_service
class GerritPatch(collections.namedtuple(
'GerritPatch', ('server', 'change', 'revision'))):
"""A patch in Gerrit.
change is a change ID of the format '<project>~<branch>~<Change-Id>' and
revision is a commit ID. Both are described in the Gerrit API documentation.
https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#ids
They must be in a canonical format so we can look up builds precisely.
"""
def __str__(self):
return self.revision[:7]
@property
def id_string(self):
return '%s/%s/%s' % (self.server, self.change, self.revision)
def BuildParameters(self):
patch_info = gerrit_service.GetChange(
self.server, self.change, fields=('ALL_REVISIONS',))
revision_info = patch_info['revisions'][self.revision]
return {
'patch_gerrit_url': self.server,
'patch_issue': patch_info['_number'],
'patch_project': patch_info['project'],
'patch_ref': revision_info['fetch']['http']['ref'],
'patch_repository_url': revision_info['fetch']['http']['url'],
'patch_set': revision_info['_number'],
'patch_storage': 'gerrit',
}
def AsDict(self):
patch_info = gerrit_service.GetChange(
self.server, self.change, fields=('ALL_REVISIONS', 'DETAILED_ACCOUNTS'))
# TODO: Cache this stuff in memcache.
revision_info = patch_info['revisions'][self.revision]
return {
'server': self.server,
'change': self.change,
'revision': self.revision,
'url': '%s/c/%s/+/%d/%d' % (
self.server, patch_info['project'],
patch_info['_number'], revision_info['_number']),
'subject': patch_info['subject'],
'time': revision_info['created'],
'author': revision_info['uploader']['email'],
}
@classmethod
def FromData(cls, data):
"""Creates a new GerritPatch from the given request data.
Args:
data: A patch URL string, for example:
https://chromium-review.googlesource.com/c/chromium/tools/build/+/679595
Or a dict containing {server, change, revision [optional]}.
change is a {change-id} as described in the Gerrit API documentation.
revision is a commit ID hash or numeric patch number.
If revision is omitted, it is the change's current revision.
Returns:
A GerritPatch.
Raises:
KeyError: The patch doesn't exist or doesn't have the given revision.
ValueError: The URL has an unrecognized format.
"""
if isinstance(data, basestring):
return cls.FromUrl(data)
else:
return cls.FromDict(data)
@classmethod
def FromUrl(cls, url):
"""Creates a new GerritPatch from the given URL.
Args:
url: A patch URL string, for example:
https://chromium-review.googlesource.com/c/chromium/tools/build/+/679595
Returns:
A GerritPatch.
Raises:
KeyError: The patch doesn't have the given revision.
ValueError: The URL has an unrecognized format.
"""
url_parts = urlparse.urlparse(url)
server = urlparse.urlunsplit(
(url_parts.scheme, url_parts.netloc, '', '', ''))
change_rev_match = re.match(r'^.*\/\+\/(\d+)(?:\/(\d+))?\/?$', url)
change_match = re.match(r'^\/(\d+)\/?$', url_parts.path)
if change_rev_match:
change = change_rev_match.group(1)
revision = change_rev_match.group(2)
elif change_match: # support URLs returned by the 'git cl issue' command
change = change_match.group(1)
revision = None
else:
raise ValueError('Unknown patch URL format: ' + url)
return cls.FromDict({
'server': server,
'change': int(change),
'revision': int(revision) if revision else None,
})
@classmethod
def FromDict(cls, data):
"""Creates a new GerritPatch from the given dict.
Args:
data: A dict containing {server, change, revision [optional]}.
change is a {change-id} as described in the Gerrit API documentation.
revision is a commit ID hash or numeric patch number.
If revision is omitted, it is the change's current revision.
Returns:
A GerritPatch.
Raises:
KeyError: The patch doesn't have the given revision.
"""
server = data['server']
change = data['change']
revision = data.get('revision')
# Look up the patch and convert everything to a canonical format.
try:
patch_info = gerrit_service.GetChange(
server, change, fields=('ALL_REVISIONS',))
except gerrit_service.NotFoundError as e:
raise KeyError(str(e))
change = patch_info['id']
# Revision can be a revision ID or numeric patch number.
if not revision:
revision = patch_info['current_revision']
for revision_id, revision_info in patch_info['revisions'].iteritems():
if revision == revision_id or revision == revision_info['_number']:
revision = revision_id
break
else:
raise KeyError('Patch revision not found: %s/%s revision %s' %
(server, change, revision))
return cls(server, change, revision)
| 33.135802
| 80
| 0.652571
|
50c6b2abe3518a35a70df59797b9dbf2054e7f00
| 319
|
py
|
Python
|
Chapter03/07_gaussian_bilateral_filter.py
|
yanboyang713/OpenCV-3-x-with-Python-By-Example
|
7393745014e0108985b860cb9b45c1c72dc0180d
|
[
"MIT"
] | null | null | null |
Chapter03/07_gaussian_bilateral_filter.py
|
yanboyang713/OpenCV-3-x-with-Python-By-Example
|
7393745014e0108985b860cb9b45c1c72dc0180d
|
[
"MIT"
] | null | null | null |
Chapter03/07_gaussian_bilateral_filter.py
|
yanboyang713/OpenCV-3-x-with-Python-By-Example
|
7393745014e0108985b860cb9b45c1c72dc0180d
|
[
"MIT"
] | null | null | null |
import cv2
import numpy as np
img = cv2.imread('./images/blue_carpet.png')
img_gaussian = cv2.GaussianBlur(img, (13,13), 0)
img_bilateral = cv2.bilateralFilter(img, 13, 70, 50)
cv2.imshow('Input', img)
cv2.imshow('Gaussian filter', img_gaussian)
cv2.imshow('Bilateral filter', img_bilateral)
cv2.waitKey()
| 24.538462
| 53
| 0.721003
|
fe046d72b6aa2a4b65190ae384317067c1797537
| 6,291
|
py
|
Python
|
train.py
|
xindubawukong/LQ-Nets_Pytorch
|
32f20693be72223021b25473bb80a9936e5917ef
|
[
"MIT"
] | 6
|
2020-03-25T12:52:48.000Z
|
2020-07-03T00:18:05.000Z
|
train.py
|
xindubawukong/LQ-Nets_Pytorch
|
32f20693be72223021b25473bb80a9936e5917ef
|
[
"MIT"
] | null | null | null |
train.py
|
xindubawukong/LQ-Nets_Pytorch
|
32f20693be72223021b25473bb80a9936e5917ef
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import argparse
import time
import os
from dataset import get_dataset
from utils import *
import vgg
import vgg_quant
import resnet
import resnet_quant
def adjust_learning_rate(optimizer, history):
if not hasattr(adjust_learning_rate, 'lr_count'):
adjust_learning_rate.lr_count = 0
if not hasattr(adjust_learning_rate, 'last_time'):
adjust_learning_rate.last_time = 0
if len(history) > 3 and history[-1]['test_result'][0] < min([history[i - 4]['test_result'][0] for i in range(3)]):
if adjust_learning_rate.lr_count < 2 and adjust_learning_rate.last_time + 5 <= history[-1]['epoch']:
print('Bring down learning rate.')
adjust_learning_rate.lr_count += 1
adjust_learning_rate.last_time = history[-1]['epoch']
lr = optimizer.param_groups[0]['lr'] * 0.2
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def inference(epoch, net, dataloader, optimizer, device, is_train=False):
if is_train:
net.train()
else:
net.eval()
disp_interval = 10
loss_func = torch.nn.CrossEntropyLoss()
loss_avg = AverageMeter()
top1_avg = AverageMeter()
top5_avg = AverageMeter()
start_time = time.time()
for step, (images, labels) in enumerate(dataloader):
images = images.to(device)
labels = labels.to(device)
net = net.to(device)
outputs = net(images)
top1, top5 = get_accuracy(outputs, labels)
loss = loss_func(outputs, labels)
loss_avg.update(loss.item(), images.shape[0])
top1_avg.update(top1.item(), images.shape[0])
top5_avg.update(top5.item(), images.shape[0])
if is_train:
optimizer.zero_grad()
loss.backward()
optimizer.step()
for m in net.modules():
if hasattr(m, 'record'):
if len(m.record) > 0:
new_basis = torch.cat(m.record).mean(dim=0).view(m.num_filters, m.nbit)
new_basis = new_basis.to(m.basis.device)
m.basis.data = m.basis.data * 0.9 + new_basis.data * 0.1
m.record = []
if step > 0 and step % disp_interval == 0:
duration = float(time.time() - start_time)
example_per_second = images.size(0) * disp_interval / duration
lr = optimizer.param_groups[0]['lr']
print("epoch[%.3d] step: %d top1: %f top5: %f loss: %.6f fps: %.3f lr: %.6f " %
(epoch, step, top1_avg.avg, top5_avg.avg, loss.item(), example_per_second, lr)
)
start_time = time.time()
return top1_avg.avg, top5_avg.avg, loss_avg.avg
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--device', type=str, help='cpu or cuda', default='cpu')
parser.add_argument('--gpu', type=str, help='comma separated list of GPU(s) to use.')
parser.add_argument('--model', type=str, help='vgg or resnet', default='vgg')
parser.add_argument('--dataset', type=str, help='cifar10 or imagenet', default='cifar10')
parser.add_argument('--max_epoch', type=int, help='max epochs', default=10)
parser.add_argument('--seed', type=int, help='random seed', default=0)
parser.add_argument('--batch_size', type=int, help='batch size', default=64)
parser.add_argument('--w_bit', type=int, help='weight quant bits', default=0)
parser.add_argument('--a_bit', type=int, help='activation quant bits', default=0)
parser.add_argument('--method', type=str, help='QEM or BP', default='QEM')
parser.add_argument('--lr', type=float, help='init learning rate', default=0.01)
args = parser.parse_args()
print('args:', args)
assert args.device in ['cpu', 'cuda']
if args.device == 'cuda' and args.gpu:
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
torch.manual_seed(args.seed)
assert args.method in ['QEM', 'BP']
assert args.w_bit <= 4
assert args.a_bit <= 4
if not os.path.exists('log'):
os.mkdir('log')
log_path = os.path.join('log', f'{time.strftime("%Y%m%d%H%M%S", time.localtime())}')
os.mkdir(log_path)
train_dataset, test_dataset = get_dataset(args.dataset)
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=2)
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=2)
num_classes = 10 if args.dataset == 'cifar10' else 1000
if args.model == 'vgg':
net = vgg_quant.vgg11_bn(pretrained=False, num_classes=num_classes, w_bit=args.w_bit, a_bit=args.a_bit, method=args.method)
else:
net = resnet_quant.resnet18(pretrained=False, num_classes=num_classes, w_bit=args.w_bit, a_bit=args.a_bit, method=args.method)
if args.device == 'cuda':
net = nn.DataParallel(net)
net = net.to(args.device)
optimizer = torch.optim.SGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=1e-4)
history = []
for epoch in range(args.max_epoch):
adjust_learning_rate(optimizer, history)
train_result = inference(epoch, net, train_dataloader, optimizer, args.device, is_train=True)
with torch.no_grad():
test_result = inference(epoch, net, test_dataloader, optimizer, args.device, is_train=False)
print('train_result: top1: {} top5: {} loss: {}'.format(*train_result))
print('test_result: top1: {} top5: {} loss: {}'.format(*test_result))
history.append({
'epoch': epoch,
'train_result': train_result,
'test_result': test_result,
'lr': optimizer.param_groups[0]['lr'],
})
info = {
'history': history,
'state_dict': net.state_dict(),
'args': args,
}
torch.save(info, os.path.join(log_path, f'epoch_{epoch}.pth'))
with open(os.path.join(log_path, 'aaa.txt'), 'w') as f:
f.write(f'args: {args}\n')
for t in history:
f.write(str(t) + '\n')
print(f'All results saved to {log_path}.\nBye~')
if __name__ == '__main__':
main()
| 41.388158
| 134
| 0.621841
|
2929d3d233cd92a685e94770c5c1caf34e18e171
| 5,324
|
py
|
Python
|
petastorm/fs_utils.py
|
VivekPanyam/petastorm
|
d8dcee4541f26d58195e9cb119ac5acf53d0a58d
|
[
"Apache-2.0"
] | null | null | null |
petastorm/fs_utils.py
|
VivekPanyam/petastorm
|
d8dcee4541f26d58195e9cb119ac5acf53d0a58d
|
[
"Apache-2.0"
] | null | null | null |
petastorm/fs_utils.py
|
VivekPanyam/petastorm
|
d8dcee4541f26d58195e9cb119ac5acf53d0a58d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017-2018 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pyarrow
import six
from six.moves.urllib.parse import urlparse
from petastorm.hdfs.namenode import HdfsNamenodeResolver, HdfsConnector
class FilesystemResolver(object):
"""Resolves a dataset URL, makes a connection via pyarrow, and provides a filesystem object."""
def __init__(self, dataset_url, hadoop_configuration=None, connector=HdfsConnector, hdfs_driver='libhdfs3'):
"""
Given a dataset URL and an optional hadoop configuration, parse and interpret the URL to
instantiate a pyarrow filesystem.
Interpretation of the URL ``scheme://hostname:port/path`` occurs in the following order:
1. If no ``scheme``, no longer supported, so raise an exception!
2. If ``scheme`` is ``file``, use local filesystem path.
3. If ``scheme`` is ``hdfs``:
a. Try the ``hostname`` as a namespace and attempt to connect to a name node.
b. If no host, connect to the default name node.
4. Next, try connecting directly to namenode ``hostname:port``.
5. Fail otherwise.
:param dataset_url: The hdfs URL or absolute path to the dataset
:param hadoop_configuration: an optional hadoop configuration
:param connector: the HDFS connector object to use (ONLY override for testing purposes)
"""
# Cache both the original URL and the resolved, urlparsed dataset_url
self._dataset_url = dataset_url
self._parsed_dataset_url = None
# Cache the instantiated filesystem object
self._filesystem = None
if isinstance(self._dataset_url, six.string_types):
self._parsed_dataset_url = urlparse(self._dataset_url)
else:
self._parsed_dataset_url = self._dataset_url
if not self._parsed_dataset_url.scheme:
# Case 1
raise ValueError('ERROR! A scheme-less dataset url ({}) is no longer supported. '
'Please prepend "file://" for local filesystem.'.format(self._parsed_dataset_url.scheme))
elif self._parsed_dataset_url.scheme == 'file':
# Case 2: definitely local
self._filesystem = pyarrow.localfs
elif self._parsed_dataset_url.scheme == 'hdfs':
if hdfs_driver == 'libhdfs3':
# libhdfs3 does not do any namenode resolution itself so we do it manually. This is not necessary
# if using libhdfs
# Obtain singleton and force hadoop config evaluation
namenode_resolver = HdfsNamenodeResolver(hadoop_configuration)
# Since we can't tell for sure, first treat the URL as though it references a name service
if self._parsed_dataset_url.netloc:
# Case 3a: Use the portion of netloc before any port, which doesn't get lowercased
nameservice = self._parsed_dataset_url.netloc.split(':')[0]
namenodes = namenode_resolver.resolve_hdfs_name_service(nameservice)
if namenodes:
self._filesystem = connector.connect_to_either_namenode(namenodes)
if self._filesystem is None:
# Case 5: That didn't work; try the URL as a namenode host
self._filesystem = connector.hdfs_connect_namenode(self._parsed_dataset_url)
else:
# Case 3b: No netloc, so let's try to connect to default namenode
# HdfsNamenodeResolver will raise exception if it fails to connect.
nameservice, namenodes = namenode_resolver.resolve_default_hdfs_service()
filesystem = connector.connect_to_either_namenode(namenodes)
if filesystem is not None:
# Properly replace the parsed dataset URL once default namenode is confirmed
self._parsed_dataset_url = urlparse(
'hdfs://{}{}'.format(nameservice, self._parsed_dataset_url.path))
self._filesystem = filesystem
else:
self._filesystem = connector.hdfs_connect_namenode(self._parsed_dataset_url, hdfs_driver)
else:
# Case 5
raise ValueError('Unsupported scheme in dataset url {}. '
'Currently, only "file" and "hdfs" are supported.'.format(self._parsed_dataset_url.scheme))
def parsed_dataset_url(self):
"""
:return: The urlparse'd dataset_url
"""
return self._parsed_dataset_url
def filesystem(self):
"""
:return: The pyarrow filesystem object
"""
return self._filesystem
| 47.535714
| 120
| 0.644065
|
c7de194184a79683510dd4c302348745371631e2
| 5,846
|
py
|
Python
|
AP.py
|
ilyakava/pyfst
|
7c8daa5493e22a12bf0c37201c0887550036ab25
|
[
"MIT"
] | 3
|
2021-03-26T12:34:25.000Z
|
2022-02-23T09:35:44.000Z
|
AP.py
|
ilyakava/pyfst
|
7c8daa5493e22a12bf0c37201c0887550036ab25
|
[
"MIT"
] | 7
|
2020-02-18T21:26:59.000Z
|
2020-03-22T12:58:14.000Z
|
AP.py
|
ilyakava/pyfst
|
7c8daa5493e22a12bf0c37201c0887550036ab25
|
[
"MIT"
] | 5
|
2020-02-17T18:24:42.000Z
|
2022-02-23T09:34:19.000Z
|
"""Attribute Profiles
https://github.com/andreybicalho/ExtendedMorphologicalProfiles/blob/master/Remote%20Sensed%20Hyperspectral%20Image%20Classification%20with%20the%20Extended%20Morphological%20Profiles%20and%20Support%20Vector%20Machines.ipynb
"""
import numpy as np
import tensorflow as tf
# Opening and Closing by Reconstruction
from skimage.morphology import reconstruction
from skimage.morphology import erosion
from skimage.morphology import disk
from skimage import util
import pdb
def opening_by_reconstruction(image, se):
"""
Performs an Opening by Reconstruction.
Parameters:
image: 2D matrix.
se: structuring element
Returns:
2D matrix of the reconstructed image.
"""
eroded = erosion(image, se)
reconstructed = reconstruction(eroded, image)
return reconstructed
def closing_by_reconstruction(image, se):
"""
Performs a Closing by Reconstruction.
Parameters:
image: 2D matrix.
se: structuring element
Returns:
2D matrix of the reconstructed image.
"""
obr = opening_by_reconstruction(image, se)
obr_inverted = util.invert(obr)
obr_inverted_eroded = erosion(obr_inverted, se)
obr_inverted_eroded_rec = reconstruction(
obr_inverted_eroded, obr_inverted)
obr_inverted_eroded_rec_inverted = util.invert(obr_inverted_eroded_rec)
return obr_inverted_eroded_rec_inverted
def build_morphological_profiles(image, se_size=4, se_size_increment=2, num_openings_closings=4):
"""
Build the morphological profiles for a given image.
Parameters:
base_image: 2d matrix, it is the spectral information part of the MP.
se_size: int, initial size of the structuring element (or kernel). Structuring Element used: disk
se_size_increment: int, structuring element increment step
num_openings_closings: int, number of openings and closings by reconstruction to perform.
Returns:
emp: 3d matrix with both spectral (from the base_image) and spatial information
"""
x, y = image.shape
cbr = np.zeros(shape=(x, y, num_openings_closings))
obr = np.zeros(shape=(x, y, num_openings_closings))
it = 0
tam = se_size
while it < num_openings_closings:
se = disk(tam)
temp = closing_by_reconstruction(image, se)
cbr[:, :, it] = temp[:, :]
temp = opening_by_reconstruction(image, se)
obr[:, :, it] = temp[:, :]
tam += se_size_increment
it += 1
mp = np.zeros(shape=(x, y, (num_openings_closings*2)+1))
cont = num_openings_closings - 1
for i in range(num_openings_closings):
mp[:, :, i] = cbr[:, :, cont]
cont = cont - 1
mp[:, :, num_openings_closings] = image[:, :]
cont = 0
for i in range(num_openings_closings+1, num_openings_closings*2+1):
mp[:, :, i] = obr[:, :, cont]
cont += 1
return mp
def build_profile(base_image, se_size=4, se_size_increment=2, num_openings_closings=4):
"""
Build the extended morphological profiles for a given set of images.
Parameters:
base_image: 3d matrix, each 'channel' is considered for applying the morphological profile. It is the spectral information part of the EMP.
se_size: int, initial size of the structuring element (or kernel). Structuring Element used: disk
se_size_increment: int, structuring element increment step
num_openings_closings: int, number of openings and closings by reconstruction to perform.
Returns:
emp: 3d matrix with both spectral (from the base_image) and spatial information
"""
base_image_rows, base_image_columns, base_image_channels = base_image.shape
se_size = se_size
se_size_increment = se_size_increment
num_openings_closings = num_openings_closings
morphological_profile_size = (num_openings_closings * 2) + 1
emp_size = morphological_profile_size * base_image_channels
emp = np.zeros(
shape=(base_image_rows, base_image_columns, emp_size))
cont = 0
for i in range(base_image_channels):
# build MPs
mp_temp = build_morphological_profiles(
base_image[:, :, i], se_size, se_size_increment, num_openings_closings)
aux = morphological_profile_size * (i+1)
# build the EMP
cont_aux = 0
for k in range(cont, aux):
emp[:, :, k] = mp_temp[:, :, cont_aux]
cont_aux += 1
cont = morphological_profile_size * (i+1)
return emp
def aptoula_net(x_dict, dropout, reuse, is_training, n_classes):
"""
Based on:
Deep Learning With Attribute Profiles for Hyperspectral Image Classification
Erchan Aptoula, Murat Can Ozdemir, and Berrin Yanikoglu
bs=XX, dropout=0.5, input should be (batch,9,9,nbands)
"""
# Define a scope for reusing the variables
with tf.variable_scope('ConvNet', reuse=reuse):
x = x_dict['subimages']
# x should be (batch, h, w, channel)
conv1 = tf.layers.conv2d(x, 48, 5, activation=None)
conv1 = tf.nn.relu(conv1)
conv2 = tf.layers.conv2d(conv1, 96, 3, activation=None)
conv2 = tf.nn.relu(conv2)
conv3 = tf.layers.conv2d(conv2, 96, 3, activation=None)
conv3 = tf.nn.relu(conv3)
fc1 = tf.layers.dense(conv3, 1024)
fc1 = tf.nn.relu(fc1)
fc1 = tf.layers.dropout(fc1, rate=dropout, training=is_training)
fc2 = tf.layers.dense(fc1, 1024)
fc2 = tf.nn.relu(fc2)
fc2 = tf.layers.dropout(fc2, rate=dropout, training=is_training)
out = tf.layers.dense(fc2, n_classes)
return tf.squeeze(out, axis=(1,2))
| 35.005988
| 224
| 0.659938
|
1a364eb411c7203a99de080728de3b2f5760c115
| 363
|
py
|
Python
|
src/fosslight_reuse_example/test_run2/test_both_have_2.py
|
LGE-OSS/example
|
e0ee00b3bdb3c2b957235f712144402e77297b37
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2021-04-13T02:10:34.000Z
|
2021-04-13T02:10:34.000Z
|
src/fosslight_reuse_example/test_run1/test_both_have_2.py
|
LGE-OSS/example
|
e0ee00b3bdb3c2b957235f712144402e77297b37
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2021-06-23T03:01:05.000Z
|
2021-06-23T05:42:55.000Z
|
src/fosslight_reuse_example/test_run2/test_both_have_2.py
|
LGE-OSS/example
|
e0ee00b3bdb3c2b957235f712144402e77297b37
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2021-04-07T09:35:38.000Z
|
2021-04-07T09:35:38.000Z
|
# SPDX-FileCopyrightText: Copyright (c) 2011 LG Electronics Inc.
#
# SPDX-License-Identifier: GPL-3.0-only
import os
from fosslight_util.set_log import init_log
def main():
output_dir = "tests"
logger, _result_log = init_log(os.path.join(output_dir, "test_add_log.txt"))
logger.warning("TESTING - add mode")
if __name__ == '__main__':
main()
| 21.352941
| 80
| 0.713499
|
6d59fcb2d7c1c1485a0f6c2080dcd6baeab6ead7
| 1,897
|
py
|
Python
|
official/vision/beta/configs/backbones.py
|
gujralsanyam22/models
|
d96f8f043dbe2b5ca8ea1785f57df8faf68d8875
|
[
"Apache-2.0"
] | 2
|
2020-12-11T04:07:55.000Z
|
2020-12-11T04:08:11.000Z
|
official/vision/beta/configs/backbones.py
|
gujralsanyam22/models
|
d96f8f043dbe2b5ca8ea1785f57df8faf68d8875
|
[
"Apache-2.0"
] | null | null | null |
official/vision/beta/configs/backbones.py
|
gujralsanyam22/models
|
d96f8f043dbe2b5ca8ea1785f57df8faf68d8875
|
[
"Apache-2.0"
] | 2
|
2021-01-27T06:39:52.000Z
|
2021-03-05T06:08:08.000Z
|
# Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Backbones configurations."""
from typing import Optional
# Import libraries
import dataclasses
from official.modeling import hyperparams
@dataclasses.dataclass
class ResNet(hyperparams.Config):
"""ResNet config."""
model_id: int = 50
@dataclasses.dataclass
class EfficientNet(hyperparams.Config):
"""EfficientNet config."""
model_id: str = 'b0'
stochastic_depth_drop_rate: float = 0.0
se_ratio: float = 0.0
@dataclasses.dataclass
class SpineNet(hyperparams.Config):
"""SpineNet config."""
model_id: str = '49'
@dataclasses.dataclass
class RevNet(hyperparams.Config):
"""RevNet config."""
# Specifies the depth of RevNet.
model_id: int = 56
@dataclasses.dataclass
class Backbone(hyperparams.OneOfConfig):
"""Configuration for backbones.
Attributes:
type: 'str', type of backbone be used, one the of fields below.
resnet: resnet backbone config.
revnet: revnet backbone config.
efficientnet: efficientnet backbone config.
spinenet: spinenet backbone config.
"""
type: Optional[str] = None
resnet: ResNet = ResNet()
revnet: RevNet = RevNet()
efficientnet: EfficientNet = EfficientNet()
spinenet: SpineNet = SpineNet()
| 27.897059
| 80
| 0.710069
|
79e68b1bec7d49cdb2f26c8e41ac48727f692fee
| 10,442
|
py
|
Python
|
report_generator/partner/sponsor.py
|
tai271828/pycontw-postevent-report-generator
|
b0149644bde4929f1a2ca8f94bb7574a6d990354
|
[
"BSD-3-Clause"
] | 1
|
2020-03-17T13:45:50.000Z
|
2020-03-17T13:45:50.000Z
|
report_generator/partner/sponsor.py
|
tai271828/pycontw-postevent-report-generator
|
b0149644bde4929f1a2ca8f94bb7574a6d990354
|
[
"BSD-3-Clause"
] | 34
|
2019-10-25T09:05:36.000Z
|
2021-11-14T22:00:55.000Z
|
report_generator/partner/sponsor.py
|
tai271828/pycontw-postevent-report-generator
|
b0149644bde4929f1a2ca8f94bb7574a6d990354
|
[
"BSD-3-Clause"
] | 6
|
2019-10-24T11:23:04.000Z
|
2021-02-18T13:13:22.000Z
|
import logging
import report_generator.io.yaml as report_generatoryaml
logger = logging.getLogger("report_generator")
# resource_package = __name__
# resource_path_packages = '/'.join(('../data', 'packages.yaml'))
# resource_path_sponsors = '/'.join(('../data', 'sponsors.yaml'))
#
# template_packages = pkg_resources.resource_stream(resource_package,
# resource_path_packages)
#
# template_sponsors = pkg_resources.resource_stream(resource_package,
# resource_path_sponsors)
# yaml_packages = report_generatoryaml.read_yaml(template_packages.name)
# yaml_sponsors = report_generatoryaml.read_yaml(template_sponsors.name)
NA_CONTENT_MESSAGE = "Not Available form this Package"
class Sponsor:
def __init__(self, sponsor_name, package_yaml, sponsor_yaml):
yaml_packages = report_generatoryaml.read_yaml(package_yaml)
yaml_sponsors = report_generatoryaml.read_yaml(sponsor_yaml)
self.yaml_sponsors = yaml_sponsors
self.name = sponsor_name
self.package_name = yaml_sponsors[sponsor_name]["package"]
self.content = yaml_sponsors[self.name]
self.package_content_flag = yaml_packages[self.package_name]
self.package_content_generic_flag = yaml_packages["generic"]
@property
def flag_description(self):
return self.package_content_flag["description"]
@property
def description(self):
if self.flag_description:
return self.content["description"]
return NA_CONTENT_MESSAGE
@property
def if_one_true_promotion(self):
tree = self.package_content_flag["promotion"]
return self._if_one_true_in_2_fold(tree)
@property
def if_one_true_web(self):
tree = self.package_content_flag["promotion"]["web"]
return self._if_one_true_in_1_fold(tree)
@property
def flag_web_click(self):
return self.package_content_flag["promotion"]["web"]["click"]
@property
def web_click(self):
if self.flag_web_click:
return self.content["promotion"]["web"]["click"]
return NA_CONTENT_MESSAGE
@property
def flag_web_click_rank(self):
return self.package_content_flag["promotion"]["web"]["click_rank"]
@property
def web_click_portion(self):
if self.flag_web_click_rank:
clicks = self._get_all_sponsor_web_click()
click_target = self.content["promotion"]["web"]["click"]
percentage = click_target / float(sum(clicks))
return "{:.1%}".format(percentage)
return NA_CONTENT_MESSAGE
@property
def web_click_rank(self):
if self.flag_web_click_rank:
clicks = self._get_all_sponsor_web_click()
click_target = self.content["promotion"]["web"]["click"]
clicks_sorted = sorted(clicks, reverse=True)
idx = clicks_sorted.index(click_target)
rank = idx + 1
return rank
return NA_CONTENT_MESSAGE
@property
def if_one_true_facebook(self):
tree = self.package_content_flag["promotion"]["facebook"]
return self._if_one_true_in_1_fold(tree)
@property
def flag_facebook_url(self):
return self.package_content_flag["promotion"]["facebook"]["url"]
@property
def facebook_url(self):
if self.flag_facebook_url:
return self.content["promotion"]["facebook"]["url"]
return NA_CONTENT_MESSAGE
@property
def facebook_total_reached_people(self):
return sum(self._get_sponsor_fb_field("reach"))
@property
def flag_facebook_reach_rank(self):
return self.package_content_flag["promotion"]["facebook"]["reach_rank"]
@property
def facebook_total_reach_portion(self):
field = "reach"
if self.flag_facebook_reach_rank:
all_data = self._get_all_sponsor_fb_field(field)
target_data = self._get_sponsor_fb_field(field)
percentage = sum(target_data) / float(sum(all_data))
return "{:.1%}".format(percentage)
return NA_CONTENT_MESSAGE
@property
def facebook_total_reach_rank(self):
field = "reach"
if self.flag_facebook_reach_rank:
all_data = self._get_all_sponsor_fb_field(field)
target_data = self._get_sponsor_fb_field(field)
all_data_sorted = sorted(all_data, reverse=True)
idx = all_data_sorted.index(sum(target_data))
rank = idx + 1
return rank
return NA_CONTENT_MESSAGE
@property
def if_one_true_booth(self):
tree = self.package_content_flag["booth"]
return self._if_one_true_in_1_fold(tree)
@property
def flag_booth_participant(self):
return self.package_content_flag["booth"]["participant"]
@property
def booth_participant(self):
if self.flag_booth_participant:
return self.content["booth"]["participant"]
return NA_CONTENT_MESSAGE
@property
def flag_booth_participant_rank(self):
return self.package_content_flag["booth"]["participant_rank"]
@property
def booth_participant_portion(self):
if self.flag_booth_participant_rank:
data = self._get_all_sponsor_booth_participant()
data_target = self.content["booth"]["participant"]
percentage = data_target / float(sum(data))
return "{:.1%}".format(percentage)
return NA_CONTENT_MESSAGE
@property
def booth_participant_rank(self):
if self.flag_booth_participant_rank:
data = self._get_all_sponsor_booth_participant()
data_target = self.content["booth"]["participant"]
data_sorted = sorted(data, reverse=True)
idx = data_sorted.index(data_target)
rank = idx + 1
return rank
return NA_CONTENT_MESSAGE
@property
def if_one_true_workshop(self):
tree = self.package_content_flag["workshop"]
return self._if_one_true_in_1_fold(tree)
@property
def flag_workshop_pictures(self):
return self.package_content_flag["workshop"]["pictures"]
@property
def workshop_pictures(self):
if self.flag_workshop_pictures:
return self.content["workshop"]["pictures"]
return self.flag_workshop_pictures
@property
def flag_workshop_description(self):
return self.package_content_flag["workshop"]["description"]
@property
def workshop_description(self):
if self.flag_workshop_description:
return self.content["workshop"]["description"]
return NA_CONTENT_MESSAGE
@property
def flag_workshop_event_url(self):
return self.package_content_flag["workshop"]["event_url"]
@property
def workshop_event_url(self):
if self.flag_workshop_event_url:
return self.content["workshop"]["event_url"]
return NA_CONTENT_MESSAGE
def _if_one_true_in_1_fold(self, tree):
flag = False
for key in tree.keys():
if tree[key]:
flag = True
return flag
def _if_one_true_in_2_fold(self, tree):
flag = False
for entry in tree.keys():
for key in tree[entry].keys():
if tree[entry][key]:
flag = True
return flag
def _get_all_sponsor_web_click(self):
all_data = []
for sponsor in self.yaml_sponsors.keys():
spw = self.yaml_sponsors[sponsor]["promotion"]["web"]["click"]
all_data.append(spw)
return all_data
def _get_sponsor_fb_field(self, field):
all_data = []
for url in self.content["promotion"]["facebook"]["url"]:
data = self.content["promotion"]["facebook"]["url"][url][field]
all_data.append(data)
return all_data
def _get_all_sponsor_fb_field(self, field):
all_data = []
for sponsor in self.yaml_sponsors.keys():
all_data_each_sponsor = []
spf = self.yaml_sponsors[sponsor]["promotion"]["facebook"]["url"]
for url in spf:
data = spf[url][field]
all_data_each_sponsor.append(data)
all_data.append(sum(all_data_each_sponsor))
return all_data
def _get_all_sponsor_booth_participant(self):
all_data = []
for sponsor in self.yaml_sponsors.keys():
data = self.yaml_sponsors[sponsor]["booth"]["participant"]
all_data.append(data)
return all_data
def get_all_sponsors(package_yaml, sponsor_yaml):
yaml_sponsors = report_generatoryaml.read_yaml(sponsor_yaml)
sponsors = []
for entry in yaml_sponsors:
sponsor = Sponsor(entry, package_yaml, sponsor_yaml)
# TODO: to port these debug information to be a part of test scripts
# description = sponsor.description
#
# flag_promotion = sponsor.if_one_true_promotion
#
# flag_web = sponsor.if_one_true_web
# web_click = sponsor.web_click
# web_click_rank = sponsor.web_click_rank
#
# flag_facebook = sponsor.if_one_true_facebook
# flag_facebook_reach = sponsor.flag_facebook_reach
# facebook_reach = sponsor.facebook_reach
# flag_facebook_reach_rank = sponsor.flag_facebook_reach_rank
# facebook_reach_rank = sponsor.facebook_reach_rank
#
# flag_facebook_engagement = sponsor.flag_facebook_engagement
# facebook_engagement = sponsor.facebook_engagement
# flag_facebook_engagement_rank = sponsor.flag_facebook_engagement_rank
# facebook_engagement_rank = sponsor.facebook_engagement_rank
#
# flag_booth = sponsor.if_one_true_booth
# flag_booth_participant = sponsor.flag_booth_participant
# booth_participant = sponsor.boot_participant
# # flag_booth_participant_rank = sponsor.flag_booth_participant_rank
# # booth_participant_rank = sponsor.booth_participant_rank
#
# flag_workshop = sponsor.if_one_true_workshop
# workshop_pictures = sponsor.flag_workshop_pictures
# workshop_description = sponsor.workshop_description
# workshop_event_url = sponsor.workshop_event_url
# pass
sponsors.append(sponsor)
return sponsors
| 33.683871
| 79
| 0.659931
|
934b301a1a1c738eb40c33f35ed2f04dc250d9bb
| 391
|
py
|
Python
|
sistemaComercial/mercado/migrations/0003_carrinho_valor_carrinho.py
|
reglabel/SistemaComercial
|
ca33ebf84bf3b1961a533a6f852782ad1ee3c65c
|
[
"MIT"
] | null | null | null |
sistemaComercial/mercado/migrations/0003_carrinho_valor_carrinho.py
|
reglabel/SistemaComercial
|
ca33ebf84bf3b1961a533a6f852782ad1ee3c65c
|
[
"MIT"
] | null | null | null |
sistemaComercial/mercado/migrations/0003_carrinho_valor_carrinho.py
|
reglabel/SistemaComercial
|
ca33ebf84bf3b1961a533a6f852782ad1ee3c65c
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.7 on 2021-09-10 20:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mercado', '0002_auto_20210910_1525'),
]
operations = [
migrations.AddField(
model_name='carrinho',
name='valor_carrinho',
field=models.FloatField(default=0),
),
]
| 20.578947
| 47
| 0.603581
|
40947bf5f0bd2546637e33c4a11fc658467c0b1b
| 67
|
py
|
Python
|
spambayes/spambayes/__init__.py
|
alexeip0/spambayes
|
0c7d12cf4702ef82974ab16d2a36fa9ccedf185a
|
[
"PSF-2.0"
] | 13
|
2015-07-03T06:53:47.000Z
|
2021-02-19T13:21:56.000Z
|
spambayes/spambayes/__init__.py
|
alexeip0/spambayes
|
0c7d12cf4702ef82974ab16d2a36fa9ccedf185a
|
[
"PSF-2.0"
] | 5
|
2015-07-16T07:33:46.000Z
|
2021-04-05T20:27:52.000Z
|
spambayes/spambayes/__init__.py
|
alexeip0/spambayes
|
0c7d12cf4702ef82974ab16d2a36fa9ccedf185a
|
[
"PSF-2.0"
] | 20
|
2015-05-16T22:42:14.000Z
|
2021-09-18T17:46:40.000Z
|
# package marker.
__version__ = "1.1b3"
__date__ = "Nov 23, 2017"
| 13.4
| 25
| 0.671642
|
3c67eaf286b2cde2519bcb97fa0dd97232f8240c
| 2,288
|
py
|
Python
|
cloudbio/package/shared.py
|
pfpjs/cloudbiolinux
|
60c0dfeba0c18c2eb201f5e7fcc84bf0f43a284d
|
[
"MIT"
] | null | null | null |
cloudbio/package/shared.py
|
pfpjs/cloudbiolinux
|
60c0dfeba0c18c2eb201f5e7fcc84bf0f43a284d
|
[
"MIT"
] | null | null | null |
cloudbio/package/shared.py
|
pfpjs/cloudbiolinux
|
60c0dfeba0c18c2eb201f5e7fcc84bf0f43a284d
|
[
"MIT"
] | null | null | null |
"""Shared functionality useful for multiple package managers.
"""
import yaml
from fabric.api import *
from fabric.contrib.files import *
def _yaml_to_packages(yaml_file, to_install=None, subs_yaml_file=None, namesort=True):
"""Read a list of packages from a nested YAML configuration file.
"""
env.logger.info("Reading %s" % yaml_file)
with open(yaml_file) as in_handle:
full_data = yaml.load(in_handle)
if full_data is None:
full_data = {}
if subs_yaml_file is not None:
with open(subs_yaml_file) as in_handle:
subs = yaml.load(in_handle)
else:
subs = {}
# filter the data based on what we have configured to install
data = [(k, v) for (k, v) in full_data.iteritems()
if to_install is None or k in to_install]
data.sort()
packages = []
pkg_to_group = dict()
while len(data) > 0:
cur_key, cur_info = data.pop(0)
if cur_info:
if isinstance(cur_info, (list, tuple)):
packages.extend(_filter_subs_packages(cur_info, subs, namesort))
for p in cur_info:
pkg_to_group[p] = cur_key
elif isinstance(cur_info, dict):
for key, val in cur_info.iteritems():
# if we are okay, propagate with the top level key
if key == 'needs_64bit':
if env.is_64bit:
data.insert(0, (cur_key, val))
elif key.startswith(env.distribution):
if key.endswith(env.dist_name):
data.insert(0, (cur_key, val))
else:
data.insert(0, (cur_key, val))
else:
raise ValueError(cur_info)
env.logger.debug("Packages to install: {0}".format(",".join(packages)))
return packages, pkg_to_group
def _filter_subs_packages(initial, subs, namesort=True):
"""Rename and filter package list with subsitutions; for similar systems.
"""
final = []
for p in initial:
try:
new_p = subs[p]
except KeyError:
new_p = p
if new_p:
final.append(new_p)
if namesort:
final.sort()
return final
| 36.31746
| 86
| 0.566434
|
44486c00c382de0ecb87ffbe56f1b1a615dbcf72
| 4,361
|
py
|
Python
|
applications/popart/bert/utils/device.py
|
kew96/GraphcoreExamples
|
22dc0d7e3755b0a7f16cdf694c6d10c0f91ee8eb
|
[
"MIT"
] | null | null | null |
applications/popart/bert/utils/device.py
|
kew96/GraphcoreExamples
|
22dc0d7e3755b0a7f16cdf694c6d10c0f91ee8eb
|
[
"MIT"
] | null | null | null |
applications/popart/bert/utils/device.py
|
kew96/GraphcoreExamples
|
22dc0d7e3755b0a7f16cdf694c6d10c0f91ee8eb
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 Graphcore Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import logging
import popart
import popdist
__all__ = ["acquire_device", "device_is_replicated"]
logger = logging.getLogger(__name__)
def _request_ipus(num_ipus):
return pow(2, math.ceil(math.log2(num_ipus)))
def get_ipu_model(args, request_ipus):
opts = {"numIPUs": request_ipus}
if args.device_version is not None:
opts["ipuVersion"] = args.device_version
return get_device_manager(args).createIpuModelDevice(opts)
def get_offline_device(args, request_ipus):
opts = {
"numIPUs": request_ipus,
"syncPattern": get_sync_pattern(args)
}
if args.device_tiles:
opts["tilesPerIPU"] = args.device_tiles
if args.device_version:
opts["ipuVersion"] = args.device_version
return get_device_manager(args).createOfflineIPUDevice(opts)
def get_device_manager(args):
manager = popart.DeviceManager()
manager.setOnDemandAttachTimeout(args.device_ondemand_timeout)
return manager
def get_connection_type(args):
if args.device_connection_type == "ondemand":
return popart.DeviceConnectionType.OnDemand
return popart.DeviceConnectionType.Always
def device_is_replicated(args):
return args.replication_factor > 1 or (args.use_popdist and args.popdist_size > 1)
def get_sync_pattern(args):
if args.execution_mode == "PHASED" and args.phased_execution_type == "DUAL":
return popart.SyncPattern.ReplicaAndLadder
if args.execution_mode == "PIPELINE" and not device_is_replicated(args):
return popart.SyncPattern.SinglePipeline
return popart.SyncPattern.Full
def get_device_by_id(args, request_ipus):
device = get_device_manager(args).acquireDeviceById(
args.device_id,
pattern=get_sync_pattern(args),
connectionType=get_connection_type(args))
if device is not None and device.numIpus != request_ipus:
raise RuntimeError(f"Number of IPUs in device selected by id ({device.numIpus}) does not match"
f" the required IPUs from the model configuration ({request_ipus})")
return device
def get_available_device(args, request_ipus):
return get_device_manager(args).acquireAvailableDevice(
request_ipus,
pattern=get_sync_pattern(args),
connectionType=get_connection_type(args))
def get_popdist_device(args, request_ipus):
ipus_per_replica = request_ipus // args.replication_factor
if not popdist.checkNumIpusPerReplica(ipus_per_replica):
raise RuntimeError(f"The number IPUs per replica ({ipus_per_replica}) required for the model configuration"
f" does not match the specified popdist IPUs per replica ({popdist.getNumIpusPerReplica()})")
args.device_id = popdist.getDeviceId(ipus_per_replica)
return get_device_by_id(args, request_ipus)
def _acquire_device(args, num_ipus):
request_ipus = _request_ipus(num_ipus)
if args.use_popdist:
logger.info(f"Need {num_ipus} IPUs per instance. Requesting {request_ipus} IPUs per instance.")
else:
logger.info(f"Need {num_ipus} IPUs. Requesting {request_ipus} IPUs.")
if args.use_ipu_model:
return get_ipu_model(args, request_ipus)
if args.device_connection_type == "offline":
return get_offline_device(args, request_ipus)
if args.use_popdist:
return get_popdist_device(args, request_ipus)
if args.device_id is not None:
return get_device_by_id(args, request_ipus)
return get_available_device(args, request_ipus)
def acquire_device(args, num_ipus):
device = _acquire_device(args, num_ipus)
if device is None:
raise OSError("Failed to acquire IPU.")
logger.info(f"Acquired device: {device}")
return device
| 31.832117
| 120
| 0.731254
|
a265390455d551d099114b7debe00f02b4a0eb01
| 1,951
|
py
|
Python
|
benchmark/startPyquil882.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startPyquil882.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startPyquil882.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=5
# total number=40
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=3
prog += H(1) # number=4
prog += H(2) # number=5
prog += H(3) # number=6
prog += H(4) # number=21
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=7
prog += H(3) # number=8
prog += H(0) # number=34
prog += CZ(1,0) # number=35
prog += H(0) # number=36
prog += X(1) # number=29
prog += CNOT(1,0) # number=30
prog += X(0) # number=31
prog += CNOT(1,0) # number=32
prog += X(1) # number=33
prog += CNOT(1,0) # number=27
prog += CNOT(0,1) # number=37
prog += X(1) # number=38
prog += CNOT(0,1) # number=39
prog += CNOT(0,2) # number=22
prog += X(2) # number=23
prog += CNOT(0,2) # number=24
prog += H(3) # number=28
prog += X(3) # number=12
prog += X(0) # number=13
prog += X(1) # number=14
prog += X(2) # number=15
prog += X(3) # number=16
prog += H(0) # number=17
prog += H(1) # number=18
prog += H(2) # number=19
prog += H(3) # number=20
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('5q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil882.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| 24.3875
| 64
| 0.547924
|
c4404fbde13399d6583c5c0d8877ae512b1fde1f
| 115
|
py
|
Python
|
everstone/sql/__init__.py
|
scragly/everstone
|
a780cb59aaf2401749264d44acc0345b82a045df
|
[
"MIT"
] | 7
|
2021-03-26T04:42:49.000Z
|
2021-08-28T16:07:04.000Z
|
everstone/sql/__init__.py
|
scragly/everstone
|
a780cb59aaf2401749264d44acc0345b82a045df
|
[
"MIT"
] | null | null | null |
everstone/sql/__init__.py
|
scragly/everstone
|
a780cb59aaf2401749264d44acc0345b82a045df
|
[
"MIT"
] | null | null | null |
from .column import Column
from .constraints import Constraint
from .schema import Schema
from .table import Table
| 23
| 35
| 0.826087
|
3ec52e6e7a157c8fe378eb0f947798a037a07810
| 10,792
|
py
|
Python
|
osx/devkit/plug-ins/scripted/pyFootPrintNode.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | 10
|
2018-03-30T16:09:02.000Z
|
2021-12-07T07:29:19.000Z
|
osx/devkit/plug-ins/scripted/pyFootPrintNode.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | null | null | null |
osx/devkit/plug-ins/scripted/pyFootPrintNode.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | 9
|
2018-06-02T09:18:49.000Z
|
2021-12-20T09:24:35.000Z
|
#-
# ===========================================================================
# Copyright 2015 Autodesk, Inc. All rights reserved.
#
# Use of this software is subject to the terms of the Autodesk license
# agreement provided at the time of installation or download, or which
# otherwise accompanies this software in either electronic or hard copy form.
# ===========================================================================
#+
import sys
import maya.api.OpenMaya as om
import maya.api.OpenMayaUI as omui
import maya.api.OpenMayaAnim as oma
import maya.api.OpenMayaRender as omr
def maya_useNewAPI():
"""
The presence of this function tells Maya that the plugin produces, and
expects to be passed, objects created using the Maya Python API 2.0.
"""
pass
def matrixAsArray(matrix):
array = []
for i in range(16):
array.append(matrix[i])
return array
## Foot Data
##
sole = [ [ 0.00, 0.0, -0.70 ],
[ 0.04, 0.0, -0.69 ],
[ 0.09, 0.0, -0.65 ],
[ 0.13, 0.0, -0.61 ],
[ 0.16, 0.0, -0.54 ],
[ 0.17, 0.0, -0.46 ],
[ 0.17, 0.0, -0.35 ],
[ 0.16, 0.0, -0.25 ],
[ 0.15, 0.0, -0.14 ],
[ 0.13, 0.0, 0.00 ],
[ 0.00, 0.0, 0.00 ],
[ -0.13, 0.0, 0.00 ],
[ -0.15, 0.0, -0.14 ],
[ -0.16, 0.0, -0.25 ],
[ -0.17, 0.0, -0.35 ],
[ -0.17, 0.0, -0.46 ],
[ -0.16, 0.0, -0.54 ],
[ -0.13, 0.0, -0.61 ],
[ -0.09, 0.0, -0.65 ],
[ -0.04, 0.0, -0.69 ],
[ -0.00, 0.0, -0.70 ] ]
heel = [ [ 0.00, 0.0, 0.06 ],
[ 0.13, 0.0, 0.06 ],
[ 0.14, 0.0, 0.15 ],
[ 0.14, 0.0, 0.21 ],
[ 0.13, 0.0, 0.25 ],
[ 0.11, 0.0, 0.28 ],
[ 0.09, 0.0, 0.29 ],
[ 0.04, 0.0, 0.30 ],
[ 0.00, 0.0, 0.30 ],
[ -0.04, 0.0, 0.30 ],
[ -0.09, 0.0, 0.29 ],
[ -0.11, 0.0, 0.28 ],
[ -0.13, 0.0, 0.25 ],
[ -0.14, 0.0, 0.21 ],
[ -0.14, 0.0, 0.15 ],
[ -0.13, 0.0, 0.06 ],
[ -0.00, 0.0, 0.06 ] ]
soleCount = 21
heelCount = 17
#############################################################################
##
## Node implementation with standard viewport draw
##
#############################################################################
class footPrint(omui.MPxLocatorNode):
id = om.MTypeId( 0x80007 )
drawDbClassification = "drawdb/geometry/footPrint"
drawRegistrantId = "FootprintNodePlugin"
size = None ## The size of the foot
@staticmethod
def creator():
return footPrint()
@staticmethod
def initialize():
unitFn = om.MFnUnitAttribute()
footPrint.size = unitFn.create( "size", "sz", om.MFnUnitAttribute.kDistance )
unitFn.default = om.MDistance(1.0)
om.MPxNode.addAttribute( footPrint.size )
def __init__(self):
omui.MPxLocatorNode.__init__(self)
def compute(self, plug, data):
return None
def draw(self, view, path, style, status):
## Get the size
##
thisNode = self.thisMObject()
plug = om.MPlug( thisNode, footPrint.size )
sizeVal = plug.asMDistance()
multiplier = sizeVal.asCentimeters()
global sole, soleCount
global heel, heelCount
view.beginGL()
## drawing in VP1 views will be done using V1 Python APIs:
import maya.OpenMayaRender as v1omr
glRenderer = v1omr.MHardwareRenderer.theRenderer()
glFT = glRenderer.glFunctionTable()
if ( style == omui.M3dView.kFlatShaded ) or ( style == omui.M3dView.kGouraudShaded ):
## Push the color settings
##
glFT.glPushAttrib( v1omr.MGL_CURRENT_BIT )
# Show both faces
glFT.glDisable( v1omr.MGL_CULL_FACE )
if status == omui.M3dView.kActive:
view.setDrawColor( 13, omui.M3dView.kActiveColors )
else:
view.setDrawColor( 13, omui.M3dView.kDormantColors )
glFT.glBegin( v1omr.MGL_TRIANGLE_FAN )
for i in range(soleCount-1):
glFT.glVertex3f( sole[i][0] * multiplier, sole[i][1] * multiplier, sole[i][2] * multiplier )
glFT.glEnd()
glFT.glBegin( v1omr.MGL_TRIANGLE_FAN )
for i in range(heelCount-1):
glFT.glVertex3f( heel[i][0] * multiplier, heel[i][1] * multiplier, heel[i][2] * multiplier )
glFT.glEnd()
glFT.glPopAttrib()
## Draw the outline of the foot
##
glFT.glBegin( v1omr.MGL_LINES )
for i in range(soleCount-1):
glFT.glVertex3f( sole[i][0] * multiplier, sole[i][1] * multiplier, sole[i][2] * multiplier )
glFT.glVertex3f( sole[i+1][0] * multiplier, sole[i+1][1] * multiplier, sole[i+1][2] * multiplier )
for i in range(heelCount-1):
glFT.glVertex3f( heel[i][0] * multiplier, heel[i][1] * multiplier, heel[i][2] * multiplier )
glFT.glVertex3f( heel[i+1][0] * multiplier, heel[i+1][1] * multiplier, heel[i+1][2] * multiplier )
glFT.glEnd()
view.endGL()
## Draw the name of the footPrint
view.setDrawColor( om.MColor( (0.1, 0.8, 0.8, 1.0) ) )
view.drawText( "Footprint", om.MPoint( 0.0, 0.0, 0.0 ), omui.M3dView.kCenter )
def isBounded(self):
return True
def boundingBox(self):
## Get the size
##
thisNode = self.thisMObject()
plug = om.MPlug( thisNode, footPrint.size )
sizeVal = plug.asMDistance()
multiplier = sizeVal.asCentimeters()
corner1 = om.MPoint( -0.17, 0.0, -0.7 )
corner2 = om.MPoint( 0.17, 0.0, 0.3 )
corner1 *= multiplier
corner2 *= multiplier
return om.MBoundingBox( corner1, corner2 )
#############################################################################
##
## Viewport 2.0 override implementation
##
#############################################################################
class footPrintData(om.MUserData):
def __init__(self):
om.MUserData.__init__(self, False) ## don't delete after draw
self.fColor = om.MColor()
self.fSoleLineList = om.MPointArray()
self.fSoleTriangleList = om.MPointArray()
self.fHeelLineList = om.MPointArray()
self.fHeelTriangleList = om.MPointArray()
class footPrintDrawOverride(omr.MPxDrawOverride):
@staticmethod
def creator(obj):
return footPrintDrawOverride(obj)
@staticmethod
def draw(context, data):
return
def __init__(self, obj):
omr.MPxDrawOverride.__init__(self, obj, footPrintDrawOverride.draw)
## We want to perform custom bounding box drawing
## so return True so that the internal rendering code
## will not draw it for us.
self.mCustomBoxDraw = True
self.mCurrentBoundingBox = om.MBoundingBox()
def supportedDrawAPIs(self):
## this plugin supports both GL and DX
return omr.MRenderer.kOpenGL | omr.MRenderer.kDirectX11 | omr.MRenderer.kOpenGLCoreProfile
def isBounded(self, objPath, cameraPath):
return True
def boundingBox(self, objPath, cameraPath):
corner1 = om.MPoint( -0.17, 0.0, -0.7 )
corner2 = om.MPoint( 0.17, 0.0, 0.3 )
multiplier = self.getMultiplier(objPath)
corner1 *= multiplier
corner2 *= multiplier
self.mCurrentBoundingBox.clear()
self.mCurrentBoundingBox.expand( corner1 )
self.mCurrentBoundingBox.expand( corner2 )
return self.mCurrentBoundingBox
def disableInternalBoundingBoxDraw(self):
return self.mCustomBoxDraw
def prepareForDraw(self, objPath, cameraPath, frameContext, oldData):
## Retrieve data cache (create if does not exist)
data = oldData
if not isinstance(data, footPrintData):
data = footPrintData()
## compute data and cache it
global soleCount, sole
global heelCount, heel
fMultiplier = self.getMultiplier(objPath)
data.fSoleLineList.clear()
for i in range(soleCount):
data.fSoleLineList.append( om.MPoint(sole[i][0] * fMultiplier, sole[i][1] * fMultiplier, sole[i][2] * fMultiplier) )
data.fHeelLineList.clear()
for i in range(heelCount):
data.fHeelLineList.append( om.MPoint(heel[i][0] * fMultiplier, heel[i][1] * fMultiplier, heel[i][2] * fMultiplier) )
data.fSoleTriangleList.clear()
for i in range(1,soleCount-1):
data.fSoleTriangleList.append( om.MPoint(sole[0][0] * fMultiplier, sole[0][1] * fMultiplier, sole[0][2] * fMultiplier) )
data.fSoleTriangleList.append( om.MPoint(sole[i][0] * fMultiplier, sole[i][1] * fMultiplier, sole[i][2] * fMultiplier) )
data.fSoleTriangleList.append( om.MPoint(sole[i+1][0] * fMultiplier, sole[i+1][1] * fMultiplier, sole[i+1][2] * fMultiplier) )
data.fHeelTriangleList.clear()
for i in range(1,heelCount-1):
data.fHeelTriangleList.append( om.MPoint(heel[0][0] * fMultiplier, heel[0][1] * fMultiplier, heel[0][2] * fMultiplier) )
data.fHeelTriangleList.append( om.MPoint(heel[i][0] * fMultiplier, heel[i][1] * fMultiplier, heel[i][2] * fMultiplier) )
data.fHeelTriangleList.append( om.MPoint(heel[i+1][0] * fMultiplier, heel[i+1][1] * fMultiplier, heel[i+1][2] * fMultiplier) )
data.fColor = omr.MGeometryUtilities.wireframeColor(objPath)
return data
def hasUIDrawables(self):
return True
def addUIDrawables(self, objPath, drawManager, frameContext, data):
locatordata = data
if not isinstance(locatordata, footPrintData):
return
drawManager.beginDrawable()
##Draw the foot print solid/wireframe
drawManager.setColor( locatordata.fColor )
drawManager.setDepthPriority(5)
if (frameContext.getDisplayStyle() & omr.MFrameContext.kGouraudShaded):
drawManager.mesh(omr.MGeometry.kTriangles, locatordata.fSoleTriangleList)
drawManager.mesh(omr.MGeometry.kTriangles, locatordata.fHeelTriangleList)
drawManager.mesh(omr.MUIDrawManager.kClosedLine, locatordata.fSoleLineList)
drawManager.mesh(omr.MUIDrawManager.kClosedLine, locatordata.fHeelLineList)
## Draw a text "Foot"
pos = om.MPoint( 0.0, 0.0, 0.0 ) ## Position of the text
textColor = om.MColor( (0.1, 0.8, 0.8, 1.0) ) ## Text color
drawManager.setColor( textColor )
drawManager.setFontSize( omr.MUIDrawManager.kSmallFontSize )
drawManager.text(pos, "Footprint", omr.MUIDrawManager.kCenter )
drawManager.endDrawable()
def getMultiplier(self, objPath):
## Retrieve value of the size attribute from the node
footprintNode = objPath.node()
plug = om.MPlug(footprintNode, footPrint.size)
if not plug.isNull:
sizeVal = plug.asMDistance()
return sizeVal.asCentimeters()
return 1.0
def initializePlugin(obj):
plugin = om.MFnPlugin(obj, "Autodesk", "3.0", "Any")
try:
plugin.registerNode("footPrint", footPrint.id, footPrint.creator, footPrint.initialize, om.MPxNode.kLocatorNode, footPrint.drawDbClassification)
except:
sys.stderr.write("Failed to register node\n")
raise
try:
omr.MDrawRegistry.registerDrawOverrideCreator(footPrint.drawDbClassification, footPrint.drawRegistrantId, footPrintDrawOverride.creator)
except:
sys.stderr.write("Failed to register override\n")
raise
def uninitializePlugin(obj):
plugin = om.MFnPlugin(obj)
try:
plugin.deregisterNode(footPrint.id)
except:
sys.stderr.write("Failed to deregister node\n")
pass
try:
omr.MDrawRegistry.deregisterDrawOverrideCreator(footPrint.drawDbClassification, footPrint.drawRegistrantId)
except:
sys.stderr.write("Failed to deregister override\n")
pass
| 30.922636
| 146
| 0.650389
|
f936c08a15fef5ba5f55e8d918887d0261f7f899
| 3,223
|
py
|
Python
|
announcer/subscribers/ticket_custom.py
|
dokipen/trac-announcer-plugin
|
7ef4123a7508c5395c8008fa2a8478b1888b4f63
|
[
"BSD-3-Clause"
] | null | null | null |
announcer/subscribers/ticket_custom.py
|
dokipen/trac-announcer-plugin
|
7ef4123a7508c5395c8008fa2a8478b1888b4f63
|
[
"BSD-3-Clause"
] | 1
|
2018-06-11T14:48:06.000Z
|
2018-06-11T14:48:06.000Z
|
announcer/subscribers/ticket_custom.py
|
dokipen/trac-announcer-plugin
|
7ef4123a7508c5395c8008fa2a8478b1888b4f63
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009, Robert Corsaro
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <ORGANIZATION> nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
import re
from trac.core import Component, implements
from trac.ticket import model
from trac.web.chrome import add_warning
from trac.config import ListOption
from announcer.api import IAnnouncementSubscriber, istrue
from announcer.api import IAnnouncementPreferenceProvider
class TicketCustomFieldSubscriber(Component):
implements(IAnnouncementSubscriber)
custom_cc_fields = ListOption('announcer', 'custom_cc_fields',
doc="Field names that contain users that should be notified on "
"ticket changes")
def subscriptions(self, event):
if event.realm == 'ticket':
ticket = event.target
if event.category in ('changed', 'created', 'attachment added'):
for sub in self._get_membership(event.target):
yield sub
def _get_membership(self, ticket):
for field in self.custom_cc_fields:
subs = ticket[field] or ''
for chunk in re.split('\s|,', subs):
chunk = chunk.strip()
if not chunk or chunk.startswith('@'):
continue
if '@' in chunk:
address = chunk
name = None
else:
name = chunk
address = None
if name or address:
self.log.debug("TicketCustomFieldSubscriber " \
"added '%s <%s>'"%(name,address))
yield ('email', name, name and True or False, address)
| 44.150685
| 79
| 0.653739
|
ed1918b374d9031ba8897c20bc3ff14fc6135104
| 672
|
py
|
Python
|
backend/api/core/models.py
|
jacorea/ismp
|
81cf55559005753f3055165689889b18aec958ac
|
[
"CC0-1.0"
] | 3
|
2020-05-08T03:51:43.000Z
|
2020-06-13T23:12:26.000Z
|
backend/api/core/models.py
|
jacorea/ismp
|
81cf55559005753f3055165689889b18aec958ac
|
[
"CC0-1.0"
] | 15
|
2020-05-04T05:49:17.000Z
|
2020-06-01T21:31:03.000Z
|
backend/api/core/models.py
|
jacorea/ismp
|
81cf55559005753f3055165689889b18aec958ac
|
[
"CC0-1.0"
] | 11
|
2020-05-01T04:35:24.000Z
|
2020-05-28T17:17:21.000Z
|
from django.db import models
class TimestampedModel(models.Model):
# A timestamp representing when this object was created.
created_at = models.DateTimeField(auto_now_add=True)
# A timestamp representing when this object was last updated.
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
# By default, any model that inherits from `TimestampedModel` should
# be ordered in reverse-chronological order. We can override this on a
# per-model basis as needed, but reverse-chronological is a good
# default ordering for most models.
ordering = ['-created_at', '-updated_at']
| 35.368421
| 78
| 0.709821
|
78cb1f7df3c99047f363527476b7fd6f4a20e952
| 3,030
|
py
|
Python
|
SRC/Python/module/locistat.py
|
wenching/CRI-BIO-646-BMB-RKeenan
|
6dfda3f7f1d3f90b45609bcc656a372a87cc795d
|
[
"MIT"
] | null | null | null |
SRC/Python/module/locistat.py
|
wenching/CRI-BIO-646-BMB-RKeenan
|
6dfda3f7f1d3f90b45609bcc656a372a87cc795d
|
[
"MIT"
] | null | null | null |
SRC/Python/module/locistat.py
|
wenching/CRI-BIO-646-BMB-RKeenan
|
6dfda3f7f1d3f90b45609bcc656a372a87cc795d
|
[
"MIT"
] | null | null | null |
"""Loci Statistics"""
import os
import sys
import logging
import datetime
import lib.venn
import util.ddictfunc
SELF_FILE_PATH = os.path.realpath(__file__)
SELF_DIR_PATH = os.path.dirname(SELF_FILE_PATH)
SELF_FILE = os.path.basename(SELF_FILE_PATH)
def loci_stat(args, sw_cfg, task_cfg):
"""
Loci Statistics
:parm args: an argparse.Namespace object of main argumens {.log_file}
:parm sw_cfg: a dictionary of corresponding software configureation {locistattool}
:parm task_cfg: a dictionary of corresponding task configureation {main, featurecounts}
:returns: returns corresponding code snippets, in which will be written to shell_script_path if provided
:raises keyError: NA
"""
# logging
if args.log_file is None:
log_file_path = '{}.{}.log'.format(
SELF_FILE_PATH,
datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
else:
log_file_path = args.log_file
formatter = "%(asctime)-15s %(levelname)-8s %(message)s"
logging.basicConfig(
level=[logging.NOTSET, logging.DEBUG, logging.INFO,
logging.WARNING, logging.ERROR, logging.CRITICAL][1],
format=formatter,
filename=log_file_path)
for quantifier in task_cfg.keys():
if quantifier in ["main", "references"]: continue
logging.info("[ {} ] Quantifier: {}".format(SELF_FILE, quantifier))
for aligner in task_cfg[quantifier].keys():
if aligner in ["main"]: continue
logging.info("[ {} ] Aligner: {}\n".format(SELF_FILE, aligner))
for proj_comp in task_cfg[quantifier][aligner].keys():
if proj_comp in ["main"]: continue
logging.info("[ {} ] Comparison: {}".format(SELF_FILE, proj_comp))
out_dir_t_path = task_cfg[quantifier][aligner][proj_comp]['out_dir_path']
if not os.path.exists(out_dir_t_path):
logging.debug("MKDIR: [ $out_dir_t_path ]")
os.makedirs(out_dir_t_path, exist_ok=True)
task_cfg_comp = {
proj_comp: task_cfg[quantifier][aligner][proj_comp]
}
task_cfg_comp[proj_comp].update(
util.ddictfunc.subset(task_cfg[quantifier][aligner]['main'], ['anchor_file_path'])
)
#util.ddictfunc.pprint_ddicts(task_cfg_comp)
if(task_cfg['main']['application'] == "RNAseq"):
sw_cfg_tool = util.ddictfunc.subset(sw_cfg, ['venn'])
lib.venn.venn(args, sw_cfg_tool, task_cfg_comp)
logging.debug("[ {} ] Comparison: {} - DONE\n".format(SELF_FILE, proj_comp))
logging.debug("[ {} ] Aligner: {} - DONE\n".format(SELF_FILE, aligner))
logging.debug("[ {} ] Quantifier: {} - DONE\n".format(SELF_FILE, quantifier))
return
| 38.846154
| 114
| 0.587459
|
a74e61e9a135b059c558ae6cffd2081c54e95290
| 187
|
py
|
Python
|
setup.py
|
RaphaelOlivier/deepspeech.pytorch
|
eb73ef61807ab01fad3662ad03dfea8fd44439aa
|
[
"MIT"
] | 13
|
2022-01-25T01:26:56.000Z
|
2022-03-18T00:46:38.000Z
|
setup.py
|
RaphaelOlivier/deepspeech.pytorch
|
eb73ef61807ab01fad3662ad03dfea8fd44439aa
|
[
"MIT"
] | 1
|
2019-02-07T12:52:46.000Z
|
2019-02-07T12:52:46.000Z
|
setup.py
|
RaphaelOlivier/deepspeech.pytorch
|
eb73ef61807ab01fad3662ad03dfea8fd44439aa
|
[
"MIT"
] | 6
|
2020-11-26T07:57:25.000Z
|
2021-12-07T10:55:11.000Z
|
from setuptools import setup, find_packages
setup(name='deepspeech_pytorch',
version='0.1',
author='SeanNaren',
packages=find_packages(),
zip_safe=False,
)
| 20.777778
| 43
| 0.657754
|
6309c3727be6bdb5f5485c5576fad2daf00c20d3
| 1,953
|
py
|
Python
|
tech_project/lib/python2.7/site-packages/phonenumbers/data/region_CZ.py
|
priyamshah112/Project-Descripton-Blog
|
8e01016c6be79776c4f5ca75563fa3daa839e39e
|
[
"MIT"
] | 27
|
2019-11-18T05:06:01.000Z
|
2021-02-28T19:38:09.000Z
|
tech_project/lib/python2.7/site-packages/phonenumbers/data/region_CZ.py
|
priyamshah112/Project-Descripton-Blog
|
8e01016c6be79776c4f5ca75563fa3daa839e39e
|
[
"MIT"
] | 21
|
2020-12-29T21:29:31.000Z
|
2022-03-12T00:53:57.000Z
|
tech_project/lib/python2.7/site-packages/phonenumbers/data/region_CZ.py
|
priyamshah112/Project-Descripton-Blog
|
8e01016c6be79776c4f5ca75563fa3daa839e39e
|
[
"MIT"
] | 6
|
2020-01-09T21:55:38.000Z
|
2021-09-17T01:22:48.000Z
|
"""Auto-generated file, do not edit by hand. CZ metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_CZ = PhoneMetadata(id='CZ', country_code=420, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='(?:[2-578]\\d|60)\\d{7}|9\\d{8,11}', possible_length=(9, 10, 11, 12)),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:2\\d|3[1257-9]|4[16-9]|5[13-9])\\d{7}', example_number='212345678', possible_length=(9,)),
mobile=PhoneNumberDesc(national_number_pattern='(?:60[1-8]|7(?:0[2-5]|[2379]\\d))\\d{6}', example_number='601123456', possible_length=(9,)),
toll_free=PhoneNumberDesc(national_number_pattern='800\\d{6}', example_number='800123456', possible_length=(9,)),
premium_rate=PhoneNumberDesc(national_number_pattern='9(?:0[05689]|76)\\d{6}', example_number='900123456', possible_length=(9,)),
shared_cost=PhoneNumberDesc(national_number_pattern='8[134]\\d{7}', example_number='811234567', possible_length=(9,)),
personal_number=PhoneNumberDesc(national_number_pattern='70[01]\\d{6}', example_number='700123456', possible_length=(9,)),
voip=PhoneNumberDesc(national_number_pattern='9[17]0\\d{6}', example_number='910123456', possible_length=(9,)),
uan=PhoneNumberDesc(national_number_pattern='9(?:5\\d|7[2-4])\\d{6}', example_number='972123456', possible_length=(9,)),
voicemail=PhoneNumberDesc(national_number_pattern='9(?:3\\d{9}|6\\d{7,10})', example_number='93123456789', possible_length=(9, 10, 11, 12)),
number_format=[NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['[2-8]|9[015-7]']),
NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['9']),
NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['9'])],
mobile_number_portable_region=True)
| 102.789474
| 149
| 0.693804
|
924b43f75fca9b5ac66dd14fbdcbb74f19b4c0c6
| 46
|
py
|
Python
|
bigquery_dl/__main__.py
|
ROYALBEFF/bigquery-dl
|
21147d27d91f59568de0f420f72e32c0810b14a9
|
[
"MIT"
] | null | null | null |
bigquery_dl/__main__.py
|
ROYALBEFF/bigquery-dl
|
21147d27d91f59568de0f420f72e32c0810b14a9
|
[
"MIT"
] | null | null | null |
bigquery_dl/__main__.py
|
ROYALBEFF/bigquery-dl
|
21147d27d91f59568de0f420f72e32c0810b14a9
|
[
"MIT"
] | null | null | null |
from bigquery_dl.__init__ import main
main()
| 11.5
| 37
| 0.804348
|
12d42408db8b9224ca5e55329a9030c28d1aa9bb
| 19,215
|
py
|
Python
|
mi/idk/test/test_package.py
|
cdobs/mi-instrument
|
99f9322a4afabc5dff9b0fad12166075efce838c
|
[
"BSD-2-Clause"
] | 1
|
2018-09-14T23:28:29.000Z
|
2018-09-14T23:28:29.000Z
|
mi/idk/test/test_package.py
|
cdobs/mi-instrument
|
99f9322a4afabc5dff9b0fad12166075efce838c
|
[
"BSD-2-Clause"
] | 33
|
2017-04-25T19:53:45.000Z
|
2022-03-18T17:42:18.000Z
|
mi/idk/test/test_package.py
|
cdobs/mi-instrument
|
99f9322a4afabc5dff9b0fad12166075efce838c
|
[
"BSD-2-Clause"
] | 31
|
2015-03-04T01:01:09.000Z
|
2020-10-28T14:42:12.000Z
|
# CURRENTLY UNUSED
# KEPT IN CASE WE WANT TO RESTORE THE PACKAGE BUILDING CODE
# #!/usr/bin/env python
#
# """
# @package mi.idk.test.test_package
# @file mi.idk/test/test_package.py
# @author Bill French
# @brief test file package process
# """
#
# __author__ = 'Bill French'
# __license__ = 'Apache 2.0'
#
# import sys
# import pkg_resources
#
# from shutil import rmtree
# from os.path import basename, dirname
# from os import makedirs, path, remove
# from os.path import exists
# from zipfile import ZipFile
# from shutil import copyfile
#
# from nose.plugins.attrib import attr
# from mock import Mock
# import unittest
# from mi.core.unit_test import MiUnitTest
# from time import sleep
#
# from mi.core.log import get_logger ; log = get_logger()
# #from mi.core.log import log
# from mi.idk.metadata import Metadata
# from mi.idk.driver_generator import DriverGenerator
#
# from mi.idk.exceptions import NotPython
# from mi.idk.exceptions import NoRoot
# from mi.idk.exceptions import FileNotFound
# from mi.idk.exceptions import ValidationFailure
#
# from mi.idk.config import Config
# from mi.idk.egg_generator import DriverFileList
# from mi.idk.egg_generator import DependencyList
# from mi.idk.egg_generator import EggGenerator
#
#
# ROOTDIR="/tmp/test_package.idk_test"
# # /tmp is a link on OS X
# if exists("/private/tmp"):
# ROOTDIR = "/private%s" % ROOTDIR
# TESTDIR="%s/mi/foo" % ROOTDIR
# TESTBASEDIR="%s/mi" % ROOTDIR
#
#
# @attr('UNIT', group='mi')
# class IDKPackageNose(MiUnitTest):
# """
# Base class for IDK Package Tests
# """
# def setUp(self):
# """
# Setup the test case
# """
# # Our test path needs to be in the python path for SnakeFood to work.
# sys.path = ["%s/../.." % TESTDIR] + sys.path
#
# self.write_basefile()
# self.write_implfile()
# self.write_nosefile()
# self.write_resfile()
#
# def write_basefile(self):
# """
# Create all of the base python modules. These files live in the same root
# and should be reported as internal dependencies
# """
#
# destdir = dirname(self.basefile())
# if not exists(destdir):
# makedirs(destdir)
#
# ofile = open(self.basefile(), "w")
# ofile.write( "class MiFoo():\n")
# ofile.write( " def __init__():\n")
# ofile.write( " pass\n\n")
# ofile.close()
#
# # base2.py is a simple python module with no dependencies
# initfile = self.basefile().replace("base.py", 'base2.py')
# ofile = open(initfile, "w")
# ofile.write( "import mi.base4\n")
# ofile.close()
#
# # base3.py has an external dependency
# initfile = self.basefile().replace("base.py", 'base3.py')
# ofile = open(initfile, "w")
# ofile.write( "import string\n\n")
# ofile.close()
#
# # base4.py has an circular dependency
# initfile = self.basefile().replace("base.py", 'base4.py')
# ofile = open(initfile, "w")
# ofile.write( "import base2\n\n")
# ofile.close()
#
# # We need out init file
# initfile = self.basefile().replace("base.py", '__init__.py')
# ofile = open(initfile, "w")
# ofile.write("")
# ofile.close()
#
# def write_implfile(self):
# """
# The impl.py file is the target of our test. All tests will report the
# dependencies of this file.
# """
# destdir = dirname(self.implfile())
# if not exists(destdir):
# makedirs(destdir)
#
# # Write a base file
# ofile = open(self.implfile(), "w")
#
# # Test various forms of import. MiFoo is a class defined in base.py
# # The rest are py file imports.
# ofile.write( "from mi.base import MiFoo\n")
# ofile.write( "import mi.base2\n")
# ofile.write( "from mi import base3\n\n")
# ofile.close()
#
# # Add a pyc file to ignore
# initfile = self.implfile().replace("impl.py", 'impl.pyc')
# ofile = open(initfile, "w")
# ofile.close()
#
# # Ensure we have an import in an __init__ py file
# initfile = self.implfile().replace("impl.py", '__init__.py')
# ofile = open(initfile, "w")
# ofile.close()
#
# def write_nosefile(self):
# """
# The test.py file is the target of our test. All tests will report the
# dependencies of this file.
# """
# destdir = dirname(self.nosefile())
# if not exists(destdir):
# makedirs(destdir)
#
# # Write a base test file
# ofile = open(self.nosefile(), "w")
# ofile.close()
#
# # Ensure we have an import in an __init__ py file
# initfile = self.nosefile().replace("test_process.py", '__init__.py')
# ofile = open(initfile, "w")
# ofile.close()
#
# def write_resfile(self):
# """
# The impl.py file is the target of our test. All tests will report the
# dependencies of this file.
# """
# destdir = dirname(self.resfile())
# #log.debug(self.resfile())
# if not exists(destdir):
# makedirs(destdir)
#
# # Write a base file
# ofile = open(self.resfile(), "w")
#
# # Test various forms of import. MiFoo is a class defined in base.py
# # The rest are py file imports.
# ofile.write( "hello world\n")
# ofile.close()
#
# def basefile(self):
# """
# The main base python file imported by the target file.
# """
# return "%s/%s" % (TESTBASEDIR, "base.py")
#
# def implfile(self):
# """
# The main python we will target for the tests
# """
# return "%s/%s" % (TESTDIR, "impl.py")
#
# def nosefile(self):
# """
# The main test python we will target for the tests
# """
# return "%s/%s" % (TESTDIR, "test/test_process.py")
#
# def resfile(self):
# """
# The main test resource we will target for the tests
# """
# return "%s/%s" % (TESTDIR, "res/test_file")
#
#
# @attr('UNIT', group='mi')
# class TestDependencyList(IDKPackageNose):
# """
# Test the DependencyList object that uses the snakefood module.
# """
# def test_exceptions(self):
# """
# Test all of the failure states for DependencyList
# """
# generator = None
# try:
# generator = DependencyList("this_file_does_not_exist.foo")
# except FileNotFound, e:
# self.assertTrue(e)
# self.assertFalse(generator)
#
# generator = None
# try:
# generator = DependencyList("/etc/hosts")
# except NotPython, e:
# self.assertTrue(e)
# self.assertFalse(generator)
#
#
# def test_internal_dependencies(self):
# """
# Test internal the dependency lists. This should include
# all of the files we created in setUp()
# """
# generator = DependencyList(self.implfile())
# root_list = generator.internal_roots()
# dep_list = generator.internal_dependencies()
#
# self.assertTrue(ROOTDIR in root_list)
#
# internal_deps = [
# "mi/base.py",
# "mi/base2.py",
# "mi/base3.py",
# "mi/base4.py",
# "mi/foo/impl.py",
# ]
#
# self.assertEqual(internal_deps, dep_list)
#
# def test_internal_dependencies_with_init(self):
# """
# Test internal the dependency lists. This should include
# all of the files we created in setUp()
# """
# generator = DependencyList(self.implfile(), include_internal_init = True)
# root_list = generator.internal_roots()
# dep_list = generator.internal_dependencies()
#
# self.assertTrue(ROOTDIR in root_list)
#
# internal_deps = [
# "mi/__init__.py",
# "mi/base.py",
# "mi/base2.py",
# "mi/base3.py",
# "mi/base4.py",
# "mi/foo/__init__.py",
# "mi/foo/impl.py",
# ]
#
# self.assertEqual(internal_deps, dep_list)
#
# def test_internal_test_dependencies_with_init(self):
# """
# Test internal the dependency lists for the unit test.
# """
# generator = DependencyList(self.nosefile(), include_internal_init = True)
# root_list = generator.internal_roots()
# dep_list = generator.internal_dependencies()
#
# self.assertTrue(ROOTDIR in root_list)
#
# internal_deps = [
# "mi/__init__.py",
# "mi/foo/__init__.py",
# "mi/foo/test/__init__.py",
# "mi/foo/test/test_process.py",
# ]
#
# self.assertEqual(internal_deps, dep_list)
#
#
# def test_external_dependencies(self):
# """
# Test external the dependency lists. This should exclude
# all of the files we created in setUp()
# """
# generator = DependencyList(self.implfile())
# root_list = generator.external_roots()
# dep_list = generator.external_dependencies()
#
# self.assertFalse(ROOTDIR in root_list)
#
# self.assertFalse("mi/base4.py" in dep_list)
# self.assertFalse("mi/base3.py" in dep_list)
# self.assertFalse("mi/base2.py" in dep_list)
# self.assertFalse("mi/foo/impl.py" in dep_list)
# self.assertFalse("mi/base.py" in dep_list)
# self.assertTrue("string.py" in dep_list)
#
# def test_all_dependencies(self):
# """
# Test the full dependency lists. This should exclude
# all of the files we created in setUp()
# """
# generator = DependencyList(self.implfile())
# root_list = generator.all_roots()
# dep_list = generator.all_dependencies()
#
# self.assertTrue(ROOTDIR in root_list)
#
# self.assertTrue("mi/base4.py" in dep_list)
# self.assertTrue("mi/base3.py" in dep_list)
# self.assertTrue("mi/base2.py" in dep_list)
# self.assertTrue("mi/foo/impl.py" in dep_list)
# self.assertTrue("mi/base.py" in dep_list)
# self.assertTrue("string.py" in dep_list)
#
#
# @attr('UNIT', group='mi')
# class TestDriverFileList(IDKPackageNose):
# """
# Test the driver file list object. The driver file list is what is
# stored in the driver egg
# """
# def test_extra_list(self):
# """
# Find all the files in the driver directory
# """
# rootdir = dirname(TESTDIR)
# filelist = DriverFileList(Metadata(), ROOTDIR, self.implfile(), self.nosefile())
# self.assertTrue(filelist)
#
# known_files = [
# '%s/res/test_file' % TESTDIR,
# ]
#
# files = filelist._extra_files()
#
# #log.debug(sorted(files))
# #log.debug(sorted(known_files))
#
# self.assertEqual(sorted(files), sorted(known_files))
#
#
# def test_list(self):
# """
# Test the full file manifest
# """
# filelist = DriverFileList(Metadata(), ROOTDIR, self.implfile(), self.nosefile())
# self.assertTrue(filelist)
#
# known_files = [
# 'mi/__init__.py',
# 'mi/base.py',
# 'mi/base2.py',
# 'mi/base3.py',
# 'mi/base4.py',
# 'mi/foo/__init__.py',
# 'mi/foo/impl.py',
# 'mi/foo/res/test_file',
# 'mi/foo/test/__init__.py',
# 'mi/foo/test/test_process.py'
# ]
#
# files = filelist.files()
# #log.debug( "F: %s" % files)
#
# self.assertEqual(sorted(files), sorted(known_files))
#
# @unittest.skip("skip until all baseclass work complete")
# def test_sbe37_list(self):
# metadata = Metadata('seabird', 'sbe37smb', 'ooicore')
# filelist = DriverFileList(metadata, Config().get('working_repo'))
# known_files = ['mi/instrument/seabird/sbe37smb/ooicore/comm_config.yml',
# 'mi/instrument/seabird/sbe37smb/ooicore/metadata.yml',
# 'mi/__init__.py',
# 'mi/core/__init__.py',
# 'mi/core/common.py',
# 'mi/core/exceptions.py',
# 'mi/core/instrument/__init__.py',
# 'mi/core/instrument/data_particle.py',
# 'mi/core/instrument/instrument_driver.py',
# 'mi/core/instrument/instrument_fsm.py',
# 'mi/core/instrument/instrument_protocol.py',
# 'mi/core/instrument/protocol_param_dict.py',
# 'mi/instrument/__init__.py',
# 'mi/instrument/seabird/__init__.py',
# 'mi/instrument/seabird/sbe37smb/__init__.py',
# 'mi/instrument/seabird/sbe37smb/ooicore/__init__.py',
# 'mi/instrument/seabird/sbe37smb/ooicore/driver.py',
# 'mi/core/instrument/driver_client.py',
# 'mi/core/instrument/driver_process.py',
# 'mi/core/instrument/zmq_driver_client.py',
# 'mi/core/instrument/zmq_driver_process.py',
# 'mi/idk/__init__.py',
# 'mi/idk/comm_config.py',
# 'mi/idk/common.py',
# 'mi/idk/config.py',
# 'mi/idk/exceptions.py',
# 'mi/idk/prompt.py',
# 'mi/core/log.py',
# 'mi/core/tcp_client.py',
# 'mi/core/unit_test.py',
# 'mi/idk/util.py',
# 'mi/idk/instrument_agent_client.py',
# 'mi/core/instrument/port_agent_client.py',
# 'mi/core/instrument/logger_client.py',
# 'mi/idk/unit_test.py',
# 'mi/instrument/seabird/sbe37smb/ooicore/test/__init__.py',
# 'mi/instrument/seabird/sbe37smb/ooicore/test/test_driver.py']
# self.maxDiff = None
# files = filelist.files()
# log.debug("FILES = " + str(sorted(files)))
# self.assertEqual(sorted(files), sorted(known_files))
#
# @attr('UNIT', group='mi')
# class TestDriverEggGenerator(IDKPackageNose):
# """
# Test the egg generation process
# """
# def setUp(self):
# IDKPackageNose.setUp(self)
#
# self._repo_dir = Config().get('working_repo')
# self._tmp_dir = Config().get('tmp_dir')
#
# self._metadata = Metadata('seabird', 'sbe37smb', 'ooicore', '.')
# self._generator = EggGenerator(self._metadata, self._repo_dir)
#
# # Ensure the base build dir doesnt exists
# build_dir = path.join(self._generator._tmp_dir(), self._generator._build_name())
# if exists(build_dir):
# rmtree(build_dir)
# self._generator._generate_build_dir()
#
# def tearDown(self):
# IDKPackageNose.tearDown(self)
# if exists(self._generator._build_dir()):
# rmtree(self._generator._build_dir())
#
#
# def test_path(self):
# """
# Test the object paths
# """
# known_name = "%s_%s_%s_%s" % (
# self._metadata.driver_make,
# self._metadata.driver_model,
# self._metadata.driver_name,
# self._metadata.version.replace('.', '_'),
# )
#
# self.assertEqual(self._generator._tmp_dir(), self._tmp_dir)
# self.assertEqual(self._generator._setup_path(), path.join(self._tmp_dir,self._generator._build_name(),'setup.py'))
# self.assertEqual(self._generator._build_name(), known_name)
# self.assertEqual(self._generator._build_dir(), path.join(self._tmp_dir,self._generator._build_name()))
#
# def test_build_dir_create(self):
# """
# test to ensure that the build dir is created properly
# """
# build_dir_orig = self._generator._generate_build_dir()
# self.assertFalse(exists(build_dir_orig))
# makedirs(build_dir_orig)
# self.assertTrue(exists(build_dir_orig))
#
# build_dir = self._generator._generate_build_dir()
#
# rmtree(build_dir_orig, True)
# self.assertFalse(exists(build_dir_orig))
#
# self.assertEqual(build_dir, build_dir_orig)
#
#
# def test_version_verify(self):
# with self.assertRaises(ValidationFailure):
# self._generator._verify_version(0)
#
# with self.assertRaises(ValidationFailure):
# self._generator._verify_version("5.1")
#
# with self.assertRaises(ValidationFailure):
# self._generator._verify_version(-1)
#
# with self.assertRaises(ValidationFailure):
# self._generator._verify_version("-1.1.1")
#
# self._generator._verify_version("1.1.1")
#
#
# def test_egg_build(self):
# '''
# Build an egg with some python source files. Verify the
# egg was created properly and contains all expected files.
# @return:
# '''
# # files = [ 'mi/__init__.py',
# # 'mi/idk/__init__.py',
# # 'mi/idk/config.py',
# # 'res/config/mi-logging.yml',
# # 'res/config/__init__.py',
# # 'res/__init__.py'
# # ]
# #
# # egg_files = [
# # 'EGG-INFO/dependency_links.txt',
# # 'EGG-INFO/entry_points.txt',
# # 'EGG-INFO/PKG-INFO',
# # 'EGG-INFO/requires.txt',
# # 'EGG-INFO/SOURCES.txt',
# # 'EGG-INFO/top_level.txt',
# # 'EGG-INFO/zip-safe',
# # 'mi/main.py',
# # ]
# # log.error(repr(files))
# # egg_file = self._generator._build_egg(files)
# # self.assertTrue(exists(egg_file))
# #
# # # Verify that the files in the egg are what we expect.
# # zipped = ZipFile(egg_file)
# #
# # # this files is actually moved to mi/mi-logging.yml and appears
# # # in the egg_files list.
# # #files.remove('res/config/mi-logging.yml')
# #
# # log.debug("EGG FILES: %s", sorted(zipped.namelist()))
# # log.debug("EXP FILES: %s", sorted(files + egg_files))
# #
# # self.assertListEqual(sorted(zipped.namelist()), sorted(files + egg_files))
# pass
#
# def test_sbe37_egg(self):
# # egg_file = self._generator.save()
# # self.assertTrue(exists(egg_file))
# pass
| 35.256881
| 124
| 0.546448
|
491b796405d82ca7b8ec9958b006fe35eb6e1cf1
| 18,347
|
py
|
Python
|
pysmappee/mqtt.py
|
smappee/pysmappee
|
d8d8e0025507f89fcbf967ce0a338b4aba612d58
|
[
"MIT"
] | 1
|
2020-06-30T08:50:10.000Z
|
2020-06-30T08:50:10.000Z
|
pysmappee/mqtt.py
|
smappee/pysmappee
|
d8d8e0025507f89fcbf967ce0a338b4aba612d58
|
[
"MIT"
] | 6
|
2020-07-02T13:03:20.000Z
|
2021-06-09T22:48:00.000Z
|
pysmappee/mqtt.py
|
smappee/pysmappee
|
d8d8e0025507f89fcbf967ce0a338b4aba612d58
|
[
"MIT"
] | 5
|
2020-09-10T07:37:47.000Z
|
2022-03-29T04:40:11.000Z
|
"""Support for cloud and local Smappee MQTT."""
import json
import threading
import socket
import time
import traceback
import schedule
import uuid
from functools import wraps
import paho.mqtt.client as mqtt
from .config import config
TRACKING_INTERVAL = 60 * 5
HEARTBEAT_INTERVAL = 60 * 1
def tracking(func):
# Decorator to reactivate trackers
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self._kind == 'central':
if time.time() - self._last_tracking > TRACKING_INTERVAL:
self._publish_tracking()
if time.time() - self._last_heartbeat > HEARTBEAT_INTERVAL:
self._publish_heartbeat()
return func(*args, **kwargs)
return wrapper
class SmappeeMqtt(threading.Thread):
"""Smappee MQTT wrapper."""
def __init__(self, service_location, kind, farm):
self._client = None
self._service_location = service_location
self._kind = kind
self._farm = farm
self._client_id = f"pysmappee-{self._service_location.service_location_uuid}-{self._kind}-{uuid.uuid4()}"
self._last_tracking = 0
self._last_heartbeat = 0
threading.Thread.__init__(
self,
name=f'SmappeeMqttListener_{self._service_location.service_location_uuid}'
)
@property
def topic_prefix(self):
return f'servicelocation/{self._service_location.service_location_uuid}'
@tracking
def _on_connect(self, client, userdata, flags, rc):
if self._kind == 'local':
self._client.subscribe(topic='#')
else:
self._client.subscribe(topic=f'{self.topic_prefix}/#')
self._schedule_tracking_and_heartbeat()
def _schedule_tracking_and_heartbeat(self):
schedule.every(60).seconds.do(lambda: self._publish_tracking())
schedule.every(60).seconds.do(lambda: self._publish_heartbeat())
def _publish_tracking(self):
# turn OFF current tracking and restore
self._client.publish(
topic=f"{self.topic_prefix}/tracking",
payload=json.dumps({
"value": "OFF",
"clientId": self._client_id,
"serialNumber": self._service_location.device_serial_number,
"type": "RT_VALUES",
})
)
time.sleep(2)
self._client.publish(
topic=f"{self.topic_prefix}/tracking",
payload=json.dumps({
"value": "ON",
"clientId": self._client_id,
"serialNumber": self._service_location.device_serial_number,
"type": "RT_VALUES",
})
)
self._last_tracking = time.time()
def _publish_heartbeat(self):
self._client.publish(
topic=f"{self.topic_prefix}/homeassistant/heartbeat",
payload=json.dumps({
"serviceLocationId": self._service_location.service_location_id,
})
)
self._last_heartbeat = time.time()
def _on_disconnect(self, client, userdata, rc):
pass
@tracking
def _on_message(self, client, userdata, message):
try:
#print('{0} - Processing {1} MQTT message from topic {2} with value {3}'.format(self._service_location.service_location_id, self._kind, message.topic, message.payload))
# realtime central power values
if message.topic == f'{self.topic_prefix}/power':
power_data = json.loads(message.payload)
self._service_location._update_power_data(power_data=power_data)
# realtime local power values
elif message.topic == f'{self.topic_prefix}/realtime':
realtime_data = json.loads(message.payload)
self._service_location._update_realtime_data(realtime_data=realtime_data)
# powerquality
elif message.topic == f'{self.topic_prefix}/powerquality':
pass
# tracking and heartbeat
elif message.topic == f'{self.topic_prefix}/tracking':
pass
elif message.topic == f'{self.topic_prefix}/homeassistant/heartbeat':
pass
# config topics
elif message.topic == f'{self.topic_prefix}/config':
config_details = json.loads(message.payload)
self._service_location.firmware_version = config_details.get('firmwareVersion')
self._service_location._service_location_uuid = config_details.get('serviceLocationUuid')
self._service_location._service_location_id = config_details.get('serviceLocationId')
elif message.topic == f'{self.topic_prefix}/sensorConfig':
pass
elif message.topic == f'{self.topic_prefix}/homeControlConfig':
pass
# aggregated consumption values
elif message.topic == f'{self.topic_prefix}/aggregated':
pass
# presence topic
elif message.topic == f'{self.topic_prefix}/presence':
presence = json.loads(message.payload)
self._service_location.is_present = presence.get('value')
# trigger topic
elif message.topic == f'{self.topic_prefix}/trigger':
pass
elif message.topic == f'{self.topic_prefix}/trigger/appliance':
pass
elif message.topic == f'{self.topic_prefix}/triggerpush':
pass
elif message.topic == f'{self.topic_prefix}/triggervalue':
pass
# harmonic vectors
elif message.topic == f'{self.topic_prefix}/h1vector':
pass
# nilm
elif message.topic == f'{self.topic_prefix}/nilm':
pass
# controllable nodes (general messages)
elif message.topic == f'{self.topic_prefix}':
msg = json.loads(message.payload)
# turn ON/OFF comfort plug
if msg.get('messageType') == 1283:
id = msg['content']['controllableNodeId']
plug_state = msg['content']['action']
plug_state_since = int(msg['content']['timestamp'] / 1000)
self._service_location.set_actuator_state(id=id,
state=plug_state,
since=plug_state_since,
api=False)
# smart device and ETC topics
elif message.topic.startswith(f'{self.topic_prefix}/etc/'):
pass
# specific HASS.io topics
elif message.topic == f'{self.topic_prefix}/homeassistant/event':
pass
elif message.topic == f'{self.topic_prefix}/homeassistant/trigger/etc':
pass
elif message.topic.startswith(f'{self.topic_prefix}/outputmodule/'):
pass
elif message.topic == f'{self.topic_prefix}/scheduler':
pass
# actuator topics
elif message.topic.startswith(f'{self.topic_prefix}/plug/'):
plug_id = int(message.topic.split('/')[-2])
payload = json.loads(message.payload)
plug_state, plug_state_since = payload.get('value'), payload.get('since')
state_type = message.topic.split('/')[-1]
if state_type == 'state' and self._kind == 'central': # todo: remove and condition
self._service_location.set_actuator_state(id=plug_id,
state=plug_state,
since=plug_state_since,
api=False)
elif state_type == 'connectionState':
self._service_location.set_actuator_connection_state(id=plug_id,
connection_state=plug_state,
since=plug_state_since)
elif config['MQTT']['discovery']:
print(message.topic, message.payload)
except Exception:
traceback.print_exc()
def start(self):
self._client = mqtt.Client(client_id=self._client_id)
if self._kind == 'central':
self._client.username_pw_set(username=self._service_location.service_location_uuid,
password=self._service_location.service_location_uuid)
self._client.on_connect = lambda client, userdata, flags, rc: self._on_connect(client, userdata, flags, rc)
self._client.on_message = lambda client, userdata, message: self._on_message(client, userdata, message)
self._client.on_disconnect = lambda client, userdata, rc: self._on_disconnect(client, userdata, rc)
# self._client.tls_set(None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1)
if self._kind == 'central':
self._client.connect(host=config['MQTT'][self._farm]['host'],
port=config['MQTT'][self._farm]['port'])
elif self._kind == 'local':
try:
self._client.connect(host=f'smappee{self._service_location.device_serial_number}.local',
port=config['MQTT']['local']['port'])
except socket.gaierror as _:
# unable to connect to local Smappee device (host unavailable)
return
except socket.timeout as _:
return
self._client.loop_start()
def stop(self):
self._client.loop_stop()
class SmappeeLocalMqtt(threading.Thread):
"""Smappee local MQTT wrapper."""
def __init__(self, serial_number=None):
self._client = None
self.service_location = None
self._serial_number = serial_number
self._service_location_id = None
self._service_location_uuid = None
threading.Thread.__init__(
self,
name=f'SmappeeLocalMqttListener_{self._serial_number}'
)
self.realtime = {}
self.phase_type = None
self.measurements = {}
self.switch_sensors = []
self.smart_plugs = []
self.actuators_connection_state = {}
self.actuators_state = {}
self._timezone = None
@property
def topic_prefix(self):
return f'servicelocation/{self._service_location_uuid}'
def _on_connect(self, client, userdata, flags, rc):
self._client.subscribe(topic='#')
def _on_disconnect(self, client, userdata, rc):
pass
def _get_client_id(self):
return f"smappeeLocalMQTT-{self._serial_number}"
def _on_message(self, client, userdata, message):
try:
# realtime local power values
if message.topic.endswith('/realtime'):
self.realtime = json.loads(message.payload)
if self.service_location is not None:
self.service_location._update_realtime_data(realtime_data=self.realtime)
elif message.topic.endswith('/config'):
c = json.loads(message.payload)
self._timezone = c.get('timeZone')
self._service_location_id = c.get('serviceLocationId')
self._service_location_uuid = c.get('serviceLocationUuid')
self._serial_number = c.get('serialNumber')
elif message.topic.endswith('channelConfig'):
pass
elif message.topic.endswith('/channelConfigV2'):
self._channel_config = json.loads(message.payload)
self.phase_type = self._channel_config.get('dataProcessingSpecification', {}).get('phaseType', None)
# extract measurements from channelConfigV2
measurements_dict = {}
for m in self._channel_config.get('dataProcessingSpecification', {}).get('measurements', []):
if m.get('flow') == 'CONSUMPTION' and m.get('connectionType') == 'SUBMETER':
if not m['name'] in measurements_dict.keys():
measurements_dict[m['name']] = []
measurements_dict[m['name']].append(m['publishIndex'])
elif m.get('flow') == 'CONSUMPTION' and m.get('connectionType') == 'GRID':
if not 'Grid' in measurements_dict.keys():
measurements_dict['Grid'] = []
measurements_dict['Grid'].append(m['publishIndex'])
elif m.get('flow') == 'PRODUCTION' and m.get('connectionType') == 'GRID':
if not 'Solar' in measurements_dict.keys():
measurements_dict['Solar'] = []
measurements_dict['Solar'].append(m['publishIndex'])
self.measurements = {}
for m_name, m_index in measurements_dict.items():
self.measurements[m_name] = list(set(m_index))
elif message.topic.endswith('/sensorConfig'):
pass
elif message.topic.endswith('/homeControlConfig'):
# switches
switches = json.loads(message.payload).get('switchActuators', [])
for switch in switches:
if switch['serialNumber'].startswith('4006'):
self.switch_sensors.append({
'nodeId': switch['nodeId'],
'name': switch['name'],
'serialNumber': switch['serialNumber']
})
# plugs
plugs = json.loads(message.payload).get('smartplugActuators', [])
for plug in plugs:
self.smart_plugs.append({
'nodeId': plug['nodeId'],
'name': plug['name']
})
elif message.topic.endswith('/presence'):
pass
elif message.topic.endswith('/aggregated'):
pass
elif message.topic.endswith('/aggregatedGW'):
pass
elif message.topic.endswith('/aggregatedSwitch'):
pass
elif message.topic.endswith('/etc/measuredvalues'):
pass
elif message.topic.endswith('/networkstatistics'):
pass
elif message.topic.endswith('/scheduler'):
pass
elif message.topic.endswith('/devices'):
pass
elif message.topic.endswith('/action/setcurrent'):
pass
elif message.topic.endswith('/trigger'):
pass
elif message.topic.endswith('/connectionState'):
actuator_id = int(message.topic.split('/')[-2])
self.actuators_connection_state[actuator_id] = json.loads(message.payload).get('value')
elif message.topic.endswith('/state'):
actuator_id = int(message.topic.split('/')[-2])
self.actuators_state[actuator_id] = json.loads(message.payload).get('value')
if self.service_location is not None:
self.service_location.set_actuator_state(
id=actuator_id,
state='{0}_{0}'.format(self.actuators_state[actuator_id]),
api=False
)
elif message.topic.endswith('/setstate'):
actuator_id = int(message.topic.split('/')[-2])
p = str(message.payload.decode('utf-8')).replace("\'", "\"")
self.actuators_state[actuator_id] = json.loads(p).get('value')
elif config['MQTT']['discovery']:
print('Processing MQTT message from topic {0} with value {1}'.format(message.topic, message.payload))
except Exception:
traceback.print_exc()
def set_actuator_state(self, service_location_id, actuator_id, state_id):
state = None
if state_id == 'ON_ON':
state = 'ON'
elif state_id == 'OFF_OFF':
state = 'OFF'
if state is not None:
self._client.publish(
topic="servicelocation/{0}/plug/{1}/setstate".format(self._service_location_uuid, actuator_id),
payload=json.dumps({"value": state})
)
def is_config_ready(self, timeout=60, interval=5):
c = 0
while c < timeout:
if self.phase_type is not None and self._serial_number is not None:
return self._serial_number
c += interval
time.sleep(interval)
def start_and_wait_for_config(self):
self.start()
return self.is_config_ready()
def start_attempt(self):
client = mqtt.Client(client_id='smappeeLocalMqttConnectionAttempt')
try:
client.connect(host=f'smappee{self._serial_number}.local', port=config['MQTT']['local']['port'])
except Exception:
return False
return True
def start(self):
self._client = mqtt.Client(client_id=self._get_client_id())
self._client.on_connect = lambda client, userdata, flags, rc: self._on_connect(client, userdata, flags, rc)
self._client.on_message = lambda client, userdata, message: self._on_message(client, userdata, message)
self._client.on_disconnect = lambda client, userdata, rc: self._on_disconnect(client, userdata, rc)
# self._client.tls_set(None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1)
try:
self._client.connect(host=f'smappee{self._serial_number}.local', port=config['MQTT']['local']['port'])
except socket.gaierror as _:
# unable to connect to local Smappee device (host unavailable)
return
except socket.timeout as _:
return
self._client.loop_start()
def stop(self):
self._client.loop_stop()
| 42.568445
| 180
| 0.564942
|
f48f1ef2dd22f73c7ad678ede68dcca81f4bd544
| 4,007
|
py
|
Python
|
syn/types/a/sequence.py
|
mbodenhamer/syn
|
aeaa3ad8a49bac8f50cf89b6f1fe97ad43d1d258
|
[
"MIT"
] | 1
|
2021-07-15T08:55:12.000Z
|
2021-07-15T08:55:12.000Z
|
syn/types/a/sequence.py
|
mbodenhamer/syn
|
aeaa3ad8a49bac8f50cf89b6f1fe97ad43d1d258
|
[
"MIT"
] | 7
|
2021-01-07T23:51:57.000Z
|
2021-12-13T19:50:57.000Z
|
syn/types/a/sequence.py
|
mbodenhamer/syn
|
aeaa3ad8a49bac8f50cf89b6f1fe97ad43d1d258
|
[
"MIT"
] | 2
|
2016-07-11T08:46:31.000Z
|
2017-12-13T13:30:51.000Z
|
import collections
from syn.five import xrange
from syn.base_utils import rand_list, rand_tuple, get_fullname, tuple_prepend, \
get_typename, escape_for_eval
from .base import Type, hashable, deserialize, serialize, SER_KEYS, rstr, \
estr, primitive_form, collect
from syn.base_utils.rand import SEQ_TYPES, MAX_DEPTH, PRIMITIVE_TYPES
from .ne import DiffersAtIndex, DifferentLength
#-------------------------------------------------------------------------------
# Utilities
def list_enumval(x, **kwargs):
top_level = kwargs.get('top_level', True)
if top_level:
if x == 0:
return []
kwargs['top_level'] = False
return list_enumval(x - 1, **kwargs)
depth = kwargs.get('depth', 0)
max_depth = kwargs.get('max_depth', MAX_DEPTH)
types = kwargs.get('types', SEQ_TYPES)
if depth >= max_depth:
types = [t for t in types if t in PRIMITIVE_TYPES]
kwargs['depth'] = depth + 1
N = len(types)
i = x % N
j = x // N
l = j + 1
ret = []
for k in xrange(l):
i_k = (i + k) % N
x_k = j + (k // N)
item = Type.type_dispatch(types[i_k])._enumeration_value(x_k, **kwargs)
ret.append(item)
return ret
#-------------------------------------------------------------------------------
# Sequence
class Sequence(Type):
type = collections.Sequence
def _collect(self, func, **kwargs):
ret = [collect(item, func, **kwargs) for item in self.obj]
return func(ret, **kwargs)
@classmethod
def deserialize(cls, seq, **kwargs):
if not isinstance(seq, collections.Sequence):
return super(Sequence, cls).deserialize(seq, **kwargs)
ret = [deserialize(item, **kwargs) for item in seq]
return cls.type(ret)
def estr(self, **kwargs):
parts = [estr(item, **kwargs) for item in self.obj]
ret = '[' + ', '.join(parts) + ']'
ret = '{}({})'.format(get_typename(self.obj), ret)
return escape_for_eval(ret)
@classmethod
def _enumeration_value(cls, x, **kwargs):
return list_enumval(x, **kwargs)
def _find_ne(self, other, func, **kwargs):
for k, item in enumerate(self.obj):
if k >= len(other):
return DifferentLength(self.obj, other)
if not func(item, other[k]):
return DiffersAtIndex(self.obj, other, k)
return DifferentLength(self.obj, other)
def _hashable(self, **kwargs):
tup = tuple([hashable(item) for item in self.obj])
return tuple_prepend(get_fullname(self.obj), tup)
def _rstr(self, **kwargs):
# TODO: add pretty option
parts = [rstr(item, **kwargs) for item in self.obj]
ret = '[' + ', '.join(parts) + ']'
return ret
def _serialize(self, dct, **kwargs):
dct[SER_KEYS.args] = [[serialize(item, **kwargs) for item in self.obj]]
def _visit(self, k, **kwargs):
return self.obj[k]
def _visit_len(self, **kwargs):
return len(self.obj)
#-------------------------------------------------------------------------------
# Sequences
class List(Sequence):
type = list
@classmethod
def _generate(cls, **kwargs):
# TODO: pull types from registry (unless they are marked as
# excluded or included in an exclude parameter set)
return rand_list(**kwargs)
class Tuple(Sequence):
type = tuple
@classmethod
def _enumeration_value(cls, x, **kwargs):
return tuple(super(Tuple, cls)._enumeration_value(x, **kwargs))
@classmethod
def _generate(cls, **kwargs):
return rand_tuple(**kwargs)
def _rstr(self, **kwargs):
ret = super(Tuple, self)._rstr(**kwargs)[1:-1]
return '(' + ret + ')'
#-------------------------------------------------------------------------------
# __all__
__all__ = ('Sequence',
'List', 'Tuple')
#-------------------------------------------------------------------------------
| 29.036232
| 80
| 0.541802
|
9b9a86b08dbd6250f139b16f8ae2ab4e80669d2c
| 2,128
|
py
|
Python
|
nltk/metrics/spearman.py
|
dmcc/nltk
|
33c193d2de3876ca89fb08140557e16f01c79c6f
|
[
"Apache-2.0"
] | 1
|
2015-01-25T19:20:11.000Z
|
2015-01-25T19:20:11.000Z
|
nltk/metrics/spearman.py
|
dmcc/nltk
|
33c193d2de3876ca89fb08140557e16f01c79c6f
|
[
"Apache-2.0"
] | 1
|
2018-09-21T22:16:48.000Z
|
2018-09-21T22:16:48.000Z
|
nltk/metrics/spearman.py
|
dmcc/nltk
|
33c193d2de3876ca89fb08140557e16f01c79c6f
|
[
"Apache-2.0"
] | 1
|
2018-09-21T22:10:56.000Z
|
2018-09-21T22:10:56.000Z
|
# Natural Language Toolkit: Spearman Rank Correlation
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Joel Nothman <jnothman@student.usyd.edu.au>
# URL: <http://nltk.org>
# For license information, see LICENSE.TXT
"""
Tools for comparing ranked lists.
"""
def _rank_dists(ranks1, ranks2):
"""Finds the difference between the values in ranks1 and ranks2 for keys
present in both dicts. If the arguments are not dicts, they are converted
from (key, rank) sequences.
"""
ranks1 = dict(ranks1)
ranks2 = dict(ranks2)
for k in ranks1:
try:
yield k, ranks1[k] - ranks2[k]
except KeyError:
pass
def spearman_correlation(ranks1, ranks2):
"""Returns the Spearman correlation coefficient for two rankings, which
should be dicts or sequences of (key, rank). The coefficient ranges from
-1.0 (ranks are opposite) to 1.0 (ranks are identical), and is only
calculated for keys in both rankings (for meaningful results, remove keys
present in only one list before ranking)."""
n = 0
res = 0
for k, d in _rank_dists(ranks1, ranks2):
res += d * d
n += 1
try:
return 1 - (6 * float(res) / (n * (n*n - 1)))
except ZeroDivisionError:
# Result is undefined if only one item is ranked
return 0.0
def ranks_from_sequence(seq):
"""Given a sequence, yields each element with an increasing rank, suitable
for use as an argument to ``spearman_correlation``.
"""
return ((k, i) for i, k in enumerate(seq))
def ranks_from_scores(scores, rank_gap=1e-15):
"""Given a sequence of (key, score) tuples, yields each key with an
increasing rank, tying with previous key's rank if the difference between
their scores is less than rank_gap. Suitable for use as an argument to
``spearman_correlation``.
"""
prev_score = None
rank = 0
for i, (key, score) in enumerate(scores):
try:
if abs(score - prev_score) > rank_gap:
rank = i
except TypeError:
pass
yield key, rank
prev_score = score
| 30.84058
| 78
| 0.645207
|
bbdbaee7b415c6dd410e293838e1e17f568afaf1
| 884
|
py
|
Python
|
actions.py
|
srimtech07/rasa-chat
|
5a0f19bce3d3c9c172a57d5094e52f5ac4ff899c
|
[
"Apache-2.0"
] | 1
|
2020-05-31T12:35:11.000Z
|
2020-05-31T12:35:11.000Z
|
actions.py
|
srimtech07/rasa-chat
|
5a0f19bce3d3c9c172a57d5094e52f5ac4ff899c
|
[
"Apache-2.0"
] | null | null | null |
actions.py
|
srimtech07/rasa-chat
|
5a0f19bce3d3c9c172a57d5094e52f5ac4ff899c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core_sdk.events import SlotSet
import logging
import requests
import json
from rasa_core_sdk import Action
logger = logging.getLogger(__name__)
class ActionJoke(Action):
def name(self):
# define the name of the action which can then be included in training stories
return "action_joke"
def run(self, dispatcher, tracker, domain):
# what your action should do
request = json.loads(
requests.get("https://api.chucknorris.io/jokes/random").text
) # make an api call
joke = request["value"] # extract a joke from returned json response
dispatcher.utter_message(joke) # send the message back to the user
return []
| 30.482759
| 86
| 0.71267
|
cec00221aaf0f23ce9a14f0c9ee6a0c22494bfa0
| 5,179
|
py
|
Python
|
homeassistant/components/device_tracker/upc_connect.py
|
shire210/home-assistant
|
63cd8bbee6f1b74ae9c6c249ac820119a8a573d8
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/device_tracker/upc_connect.py
|
shire210/home-assistant
|
63cd8bbee6f1b74ae9c6c249ac820119a8a573d8
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/device_tracker/upc_connect.py
|
shire210/home-assistant
|
63cd8bbee6f1b74ae9c6c249ac820119a8a573d8
|
[
"Apache-2.0"
] | null | null | null |
"""
Support for UPC ConnectBox router.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.upc_connect/
"""
import asyncio
import logging
import xml.etree.ElementTree as ET
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import (
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
_LOGGER = logging.getLogger(__name__)
DEFAULT_IP = '192.168.0.1'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_IP): cv.string,
})
CMD_LOGIN = 15
CMD_LOGOUT = 16
CMD_DEVICES = 123
@asyncio.coroutine
def async_get_scanner(hass, config):
"""Return the UPC device scanner."""
scanner = UPCDeviceScanner(hass, config[DOMAIN])
success_init = yield from scanner.async_login()
return scanner if success_init else None
class UPCDeviceScanner(DeviceScanner):
"""This class queries a router running UPC ConnectBox firmware."""
def __init__(self, hass, config):
"""Initialize the scanner."""
self.hass = hass
self.host = config[CONF_HOST]
self.password = config[CONF_PASSWORD]
self.data = {}
self.token = None
self.headers = {
'X-Requested-With': 'XMLHttpRequest',
'Referer': "http://{}/index.html".format(self.host),
'User-Agent': ("Mozilla/5.0 (Windows NT 10.0; WOW64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/47.0.2526.106 Safari/537.36")
}
self.websession = async_get_clientsession(hass)
@asyncio.coroutine
def async_logout(event):
"""Logout from upc connect box."""
yield from self._async_ws_function(CMD_LOGOUT)
self.token = None
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, async_logout)
@asyncio.coroutine
def async_scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
if self.token is None:
reconnect = yield from self.async_login()
if not reconnect:
_LOGGER.error("Not connected to %s", self.host)
return []
raw = yield from self._async_ws_function(CMD_DEVICES)
try:
xml_root = ET.fromstring(raw)
return [mac.text for mac in xml_root.iter('MACAddr')]
except (ET.ParseError, TypeError):
_LOGGER.warning("Can't read device from %s", self.host)
self.token = None
return []
@asyncio.coroutine
def async_get_device_name(self, device):
"""The firmware doesn't save the name of the wireless device."""
return None
@asyncio.coroutine
def async_login(self):
"""Login into firmware and get first token."""
try:
# get first token
with async_timeout.timeout(10, loop=self.hass.loop):
response = yield from self.websession.get(
"http://{}/common_page/login.html".format(self.host)
)
yield from response.text()
self.token = response.cookies['sessionToken'].value
# login
data = yield from self._async_ws_function(CMD_LOGIN, {
'Username': 'NULL',
'Password': self.password,
})
# successfull?
return data is not None
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Can not load login page from %s", self.host)
return False
@asyncio.coroutine
def _async_ws_function(self, function, additional_form=None):
"""Execute a command on UPC firmware webservice."""
form_data = {
'token': self.token,
'fun': function
}
if additional_form:
form_data.update(additional_form)
redirects = function != CMD_DEVICES
try:
with async_timeout.timeout(10, loop=self.hass.loop):
response = yield from self.websession.post(
"http://{}/xml/getter.xml".format(self.host),
data=form_data,
headers=self.headers,
allow_redirects=redirects
)
# error?
if response.status != 200:
_LOGGER.warning("Receive http code %d", response.status)
self.token = None
return
# load data, store token for next request
self.token = response.cookies['sessionToken'].value
return (yield from response.text())
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Error on %s", function)
self.token = None
| 31.969136
| 76
| 0.608032
|
2c98bb622e91eebf3f5d0b97e0110ca81fe4fd18
| 374
|
py
|
Python
|
src/clean_tablets.py
|
frc1678/server-2021-public
|
d61e35f8385bf1debc9daaaed40208f6c783ed77
|
[
"MIT"
] | null | null | null |
src/clean_tablets.py
|
frc1678/server-2021-public
|
d61e35f8385bf1debc9daaaed40208f6c783ed77
|
[
"MIT"
] | null | null | null |
src/clean_tablets.py
|
frc1678/server-2021-public
|
d61e35f8385bf1debc9daaaed40208f6c783ed77
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2019 FRC Team 1678: Citrus Circuits
"""Changes font size of tablets for app consistency."""
from data_transfer import adb_communicator
import utils
FILE_PATH = utils.create_file_path('data/tablets')
utils.run_command(f'rm -R {FILE_PATH}', True)
utils.run_command(f'mkdir {FILE_PATH}', True)
adb_communicator.adb_font_size_enforcer()
| 31.166667
| 55
| 0.783422
|
44903edf42db855d8f0b2cb441a562313b1ef1e4
| 2,452
|
py
|
Python
|
peakdetect.py
|
antonjan/LSTM_morse
|
1028af4715c84fc2ff5bacd2e015cecbda2aa9a6
|
[
"MIT"
] | 45
|
2015-12-28T19:41:22.000Z
|
2021-11-01T20:09:42.000Z
|
peakdetect.py
|
antonjan/LSTM_morse
|
1028af4715c84fc2ff5bacd2e015cecbda2aa9a6
|
[
"MIT"
] | 13
|
2019-02-25T05:11:45.000Z
|
2022-02-09T23:30:11.000Z
|
peakdetect.py
|
antonjan/LSTM_morse
|
1028af4715c84fc2ff5bacd2e015cecbda2aa9a6
|
[
"MIT"
] | 24
|
2016-12-16T16:31:26.000Z
|
2021-08-16T03:30:55.000Z
|
import sys
from numpy import NaN, Inf, arange, isscalar, asarray, array
def peakdet(v, delta, x = None):
"""
Converted from MATLAB script at http://billauer.co.il/peakdet.html
Returns two arrays
function [maxtab, mintab]=peakdet(v, delta, x)
%PEAKDET Detect peaks in a vector
% [MAXTAB, MINTAB] = PEAKDET(V, DELTA) finds the local
% maxima and minima ("peaks") in the vector V.
% MAXTAB and MINTAB consists of two columns. Column 1
% contains indices in V, and column 2 the found values.
%
% With [MAXTAB, MINTAB] = PEAKDET(V, DELTA, X) the indices
% in MAXTAB and MINTAB are replaced with the corresponding
% X-values.
%
% A point is considered a maximum peak if it has the maximal
% value, and was preceded (to the left) by a value lower by
% DELTA.
% Eli Billauer, 3.4.05 (Explicitly not copyrighted).
% This function is released to the public domain; Any use is allowed.
"""
maxtab = []
mintab = []
if x is None:
x = arange(len(v))
v = asarray(v)
if len(v) != len(x):
sys.exit('Input vectors v and x must have same length')
if not isscalar(delta):
sys.exit('Input argument delta must be a scalar')
if delta <= 0:
sys.exit('Input argument delta must be positive')
mn, mx = Inf, -Inf
mnpos, mxpos = NaN, NaN
lookformax = True
for i in arange(len(v)):
this = v[i]
if this > mx:
mx = this
mxpos = x[i]
if this < mn:
mn = this
mnpos = x[i]
if lookformax:
if this < mx-delta:
maxtab.append((mxpos, mx))
mn = this
mnpos = x[i]
lookformax = False
else:
if this > mn+delta:
mintab.append((mnpos, mn))
mx = this
mxpos = x[i]
lookformax = True
return array(maxtab), array(mintab)
if __name__=="__main__":
from matplotlib.pyplot import plot, scatter, show
series = [0,0,0,2,0,0,0,-2,0,0,0,2,0,0,0,-2,0]
maxtab, mintab = peakdet(series,.3)
plot(series)
scatter(array(maxtab)[:,0], array(maxtab)[:,1], color='blue')
scatter(array(mintab)[:,0], array(mintab)[:,1], color='red')
show()
| 29.902439
| 73
| 0.53385
|
58a0079e2b1c33438e8a940607465850b2522edf
| 963
|
py
|
Python
|
atom/nucleus/python/test/test_account_allocation_mapping.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/test/test_account_allocation_mapping.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/test/test_account_allocation_mapping.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Hydrogen Atom API
The Hydrogen Atom API # noqa: E501
OpenAPI spec version: 1.7.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import nucleus_api
from nucleus_api.models.account_allocation_mapping import AccountAllocationMapping # noqa: E501
from nucleus_api.rest import ApiException
class TestAccountAllocationMapping(unittest.TestCase):
"""AccountAllocationMapping unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAccountAllocationMapping(self):
"""Test AccountAllocationMapping"""
# FIXME: construct object with mandatory attributes with example values
# model = nucleus_api.models.account_allocation_mapping.AccountAllocationMapping() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 23.487805
| 104
| 0.726895
|
ff565a9ffb6b560fc15cf8357e24b52f3ea3e21a
| 460
|
py
|
Python
|
setup.py
|
mvcisback/magnumSTL
|
e48d641118bc9c1fb28be2a38a55654441a78701
|
[
"BSD-3-Clause"
] | 1
|
2016-10-07T20:10:35.000Z
|
2016-10-07T20:10:35.000Z
|
setup.py
|
mvcisback/py-blustl
|
e48d641118bc9c1fb28be2a38a55654441a78701
|
[
"BSD-3-Clause"
] | 15
|
2016-07-01T04:46:09.000Z
|
2017-01-06T22:09:20.000Z
|
setup.py
|
mvcisback/py-blustl
|
e48d641118bc9c1fb28be2a38a55654441a78701
|
[
"BSD-3-Clause"
] | 5
|
2016-12-23T06:12:40.000Z
|
2017-01-10T01:58:27.000Z
|
from setuptools import setup, find_packages
setup(
name='magnumSTL',
version='0.1',
description='TODO',
url='http://github.com/mvcisback/magnumSTL',
author='Marcell Vazquez-Chanlatte',
author_email='marcell.vc@eecs.berkeley.edu',
license='MIT',
install_requires=[
'bidict',
'funcy',
'lenses',
'py-stl',
'optlang',
'pysmt',
'traces'
],
packages=find_packages(),
)
| 20.909091
| 48
| 0.578261
|
526351f88c896dafb6db0d11bf1f3c0a80187352
| 2,012
|
py
|
Python
|
src/main/python/testapi/test_fixture.py
|
photowey/pytest-in-action
|
e3ce03920cd51b418022cac5aca2b187494054a6
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/testapi/test_fixture.py
|
photowey/pytest-in-action
|
e3ce03920cd51b418022cac5aca2b187494054a6
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/testapi/test_fixture.py
|
photowey/pytest-in-action
|
e3ce03920cd51b418022cac5aca2b187494054a6
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf-8 -*-
# ---------------------------------------------
# @file test_fixture.py
# @description test_fixture
# @author WcJun
# @date 2021/07/16
# ---------------------------------------------
import pytest
# ---------------------------------------------
# @pytest.fixture(scope='', params='', autouse='', ids='', name='')
# scope: function(default) | class | module | package/session
# params: parametrize -> [] | () | [{},{},...,{}] | ({},{},...,{})
# autouse: auto execution default: False
# ids: variable
# name: alisa
# ---------------------------------------------
# @pytest.fixture(scope='function', autouse=True)
# def hello_fixture():
# """
# hello_fixture
# """
# print('\nhello_fixture::do pre()')
# yield
# print('\nhello_fixture::do post()')
# ---------------------------------------------
# @pytest.fixture(scope='function', autouse=False, params=['Java', 'Python', 'C++'])
# def hello_fixture(request):
# return request.param
# ---------------------------------------------
@pytest.fixture(
scope='function',
autouse=False,
params=['Java', 'Python', 'Go'], ids=['J', 'P', 'G'],
name='class_post_processor'
)
def hello_fixture(request):
print('\nyield::hello_fixture::do pre()')
yield request.param
print('\nyield::hello_fixture::do post()')
class TestFixture:
"""
test fixture
"""
def test_do_not_fixture(self):
"""
test_do_not_fixture
"""
print('\ntest_do_not_fixture')
def test_do_fixture(self, class_post_processor):
"""
test_do_fixture
"""
print('\ntest_do_fixture::' + str(class_post_processor))
def test_do_auto_fixture(self):
"""
test_do_auto_fixture
"""
print('\ntest_do_auto_fixture')
def test_do_global_fixture(self, global_class_post_processor):
"""
test_do_auto_fixture
"""
print('\ntest_do_global_fixture::' + str(global_class_post_processor))
| 25.468354
| 84
| 0.518887
|
b570acc081cb58f82e7bda098df4e08830f2b70b
| 1,496
|
py
|
Python
|
Proyecto/compilation/utils.py
|
leynier/IA-Sim-Com
|
f6e99bb1aa4b02d5d558dc76a9bf802c3761e428
|
[
"MIT"
] | null | null | null |
Proyecto/compilation/utils.py
|
leynier/IA-Sim-Com
|
f6e99bb1aa4b02d5d558dc76a9bf802c3761e428
|
[
"MIT"
] | 1
|
2022-02-11T07:26:54.000Z
|
2022-02-11T07:26:54.000Z
|
Proyecto/compilation/utils.py
|
leynier/IA-Sim-Com
|
f6e99bb1aa4b02d5d558dc76a9bf802c3761e428
|
[
"MIT"
] | null | null | null |
from enum import Enum
from compilation.tokens import Token, TokenType
def split_lines(tokens: [Token]) -> [[TokenType]]:
t_pointer = 0
current_line = []
lines = []
while t_pointer < len(tokens):
if tokens[t_pointer].token_type in [TokenType.T_IF, TokenType.T_ELIF, TokenType.T_ELSE, TokenType.T_WHILE, TokenType.T_METHOD, TokenType.T_RIDER, TokenType.T_BIKE]:
t_pointer = loop(tokens, t_pointer, current_line, TokenType.T_OPEN_BRACE)
elif tokens[t_pointer].token_type == TokenType.T_CLOSE_BRACE:
current_line = [tokens[t_pointer]]
#lines.append(current_line)
t_pointer += 1
if t_pointer < len(tokens) and (tokens[t_pointer].token_type == TokenType.T_ELIF or tokens[t_pointer].token_type == TokenType.T_ELSE):
t_pointer = loop(tokens, t_pointer, current_line, TokenType.T_OPEN_BRACE)
else:
t_pointer = loop(tokens, t_pointer, current_line, TokenType.T_SEMICOLON)
lines.append(current_line)
current_line = []
return lines
def loop(tokens: [Token], t_pointer: int, current_line: [Token], comparator: TokenType) -> int:
while t_pointer < len(tokens):
current_line.append(tokens[t_pointer])
if tokens[t_pointer].token_type == comparator:
return t_pointer + 1
t_pointer += 1
return t_pointer
class Region(Enum):
R_IF = 0
R_ELIF = 1
R_ELSE = 2
R_WHILE = 3
R_METHOD = 4
R_TYPE = 5
| 35.619048
| 172
| 0.660428
|
40e047a2afd9b741dad650f196a58b15b7d27c7e
| 1,054
|
py
|
Python
|
dialog/__init__.py
|
audaciouscode/Dialog-Engine-Django
|
76c9063be1a5bf592142c10ea7d3af4649fa353b
|
[
"Apache-2.0"
] | null | null | null |
dialog/__init__.py
|
audaciouscode/Dialog-Engine-Django
|
76c9063be1a5bf592142c10ea7d3af4649fa353b
|
[
"Apache-2.0"
] | null | null | null |
dialog/__init__.py
|
audaciouscode/Dialog-Engine-Django
|
76c9063be1a5bf592142c10ea7d3af4649fa353b
|
[
"Apache-2.0"
] | null | null | null |
# pylint: disable=line-too-long
from .dialog_machine import DialogMachine, MISSING_NEXT_NODE_KEY
from .base_node import DialogError, MissingNextDialogNodeError, BaseNode, fetch_default_logger, DialogTransition
from .alert_node import AlertNode
from .begin_node import BeginNode
from .branching_conditions_node import BranchingConditionsNode
from .branching_prompt_node import BranchingPromptNode
from .custom_node import CustomNode
from .echo_node import EchoNode
from .embed_dialog_node import EmbedDialogNode
from .end_node import EndNode
from .external_choice_node import ExternalChoiceNode
from .http_response_branch_node import HttpResponseBranchNode
from .if_node import IfNode
from .interrupt_node import InterruptNode
from .interrupt_resume_node import InterruptResumeNode
from .loop_node import LoopNode
from .pause_node import PauseNode
from .prompt_node import PromptNode
from .random_branch_node import RandomBranchNode
from .record_variable_node import RecordVariableNode
from .time_elapsed_interrupt_node import TimeElapsedInterruptNode
| 40.538462
| 112
| 0.880455
|
68b05a76dc2af3d17233a90762edff7dfcde5063
| 589
|
py
|
Python
|
Easy/Lettercase Percentage Ratio/main.py
|
AstrorEnales/CodeEval
|
eae0fb471d27d3a83d544ff4a4651ed1a2076930
|
[
"MIT"
] | null | null | null |
Easy/Lettercase Percentage Ratio/main.py
|
AstrorEnales/CodeEval
|
eae0fb471d27d3a83d544ff4a4651ed1a2076930
|
[
"MIT"
] | null | null | null |
Easy/Lettercase Percentage Ratio/main.py
|
AstrorEnales/CodeEval
|
eae0fb471d27d3a83d544ff4a4651ed1a2076930
|
[
"MIT"
] | null | null | null |
import sys
import re
lines = open(sys.argv[1], 'r')
for line in lines:
line = line.replace('\n', '').replace('\r', '')
if len(line) > 0:
matchesLower = [match.start() for match in re.finditer('[a-z]', line)]
matchesUpper = [match.start() for match in re.finditer('[A-Z]', line)]
matchesLowerPercentage = len(matchesLower) / float(len(line)) * 100.0
matchesUpperPercentage = len(matchesUpper) / float(len(line)) * 100.0
print('lowercase: %0.2f uppercase: %0.2f' % (matchesLowerPercentage, matchesUpperPercentage))
lines.close()
| 42.071429
| 102
| 0.624788
|
d08daea1de81f8f48947af9ae1d3e253b3c34998
| 43,185
|
py
|
Python
|
azurelinuxagent/common/osutil/default.py
|
sundaxi/WALinuxAgent
|
416b8b10b27a4cb0d4988834d26af7aae6836d84
|
[
"Apache-2.0"
] | null | null | null |
azurelinuxagent/common/osutil/default.py
|
sundaxi/WALinuxAgent
|
416b8b10b27a4cb0d4988834d26af7aae6836d84
|
[
"Apache-2.0"
] | null | null | null |
azurelinuxagent/common/osutil/default.py
|
sundaxi/WALinuxAgent
|
416b8b10b27a4cb0d4988834d26af7aae6836d84
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2018 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import array
import base64
import datetime
import fcntl
import glob
import multiprocessing
import os
import platform
import pwd
import re
import shutil
import socket
import struct
import sys
import time
import azurelinuxagent.common.logger as logger
import azurelinuxagent.common.conf as conf
import azurelinuxagent.common.utils.fileutil as fileutil
import azurelinuxagent.common.utils.shellutil as shellutil
import azurelinuxagent.common.utils.textutil as textutil
from azurelinuxagent.common.exception import OSUtilError
from azurelinuxagent.common.future import ustr
from azurelinuxagent.common.utils.cryptutil import CryptUtil
from azurelinuxagent.common.utils.flexible_version import FlexibleVersion
__RULES_FILES__ = [ "/lib/udev/rules.d/75-persistent-net-generator.rules",
"/etc/udev/rules.d/70-persistent-net.rules" ]
"""
Define distro specific behavior. OSUtil class defines default behavior
for all distros. Each concrete distro classes could overwrite default behavior
if needed.
"""
IPTABLES_VERSION_PATTERN = re.compile("^[^\d\.]*([\d\.]+).*$")
IPTABLES_VERSION = "iptables --version"
IPTABLES_LOCKING_VERSION = FlexibleVersion('1.4.21')
FIREWALL_ACCEPT = "iptables {0} -t security -{1} OUTPUT -d {2} -p tcp -m owner --uid-owner {3} -j ACCEPT"
# Note:
# -- Initially "flight" the change to ACCEPT packets and develop a metric baseline
# A subsequent release will convert the ACCEPT to DROP
# FIREWALL_DROP = "iptables {0} -t security -{1} OUTPUT -d {2} -p tcp -m conntrack --ctstate INVALID,NEW -j ACCEPT"
FIREWALL_DROP = "iptables {0} -t security -{1} OUTPUT -d {2} -p tcp -m conntrack --ctstate INVALID,NEW -j DROP"
FIREWALL_LIST = "iptables {0} -t security -L -nxv"
FIREWALL_PACKETS = "iptables {0} -t security -L OUTPUT --zero OUTPUT -nxv"
FIREWALL_FLUSH = "iptables {0} -t security --flush"
# Precisely delete the rules created by the agent.
# this rule was used <= 2.2.25. This rule helped to validate our change, and determine impact.
FIREWALL_DELETE_CONNTRACK_ACCEPT = "iptables {0} -t security -D OUTPUT -d {1} -p tcp -m conntrack --ctstate INVALID,NEW -j ACCEPT"
FIREWALL_DELETE_OWNER_ACCEPT = "iptables {0} -t security -D OUTPUT -d {1} -p tcp -m owner --uid-owner {2} -j ACCEPT"
FIREWALL_DELETE_CONNTRACK_DROP = "iptables {0} -t security -D OUTPUT -d {1} -p tcp -m conntrack --ctstate INVALID,NEW -j DROP"
PACKET_PATTERN = "^\s*(\d+)\s+(\d+)\s+DROP\s+.*{0}[^\d]*$"
_enable_firewall = True
DMIDECODE_CMD = 'dmidecode --string system-uuid'
PRODUCT_ID_FILE = '/sys/class/dmi/id/product_uuid'
UUID_PATTERN = re.compile(
r'^\s*[A-F0-9]{8}(?:\-[A-F0-9]{4}){3}\-[A-F0-9]{12}\s*$',
re.IGNORECASE)
IOCTL_SIOCGIFCONF = 0x8912
IOCTL_SIOCGIFFLAGS = 0x8913
IOCTL_SIOCGIFHWADDR = 0x8927
IFNAMSIZ = 16
class DefaultOSUtil(object):
def __init__(self):
self.agent_conf_file_path = '/etc/waagent.conf'
self.selinux = None
self.disable_route_warning = False
def get_firewall_dropped_packets(self, dst_ip=None):
# If a previous attempt failed, do not retry
global _enable_firewall
if not _enable_firewall:
return 0
try:
wait = self.get_firewall_will_wait()
rc, output = shellutil.run_get_output(FIREWALL_PACKETS.format(wait), log_cmd=False)
if rc == 3:
# Transient error that we ignore. This code fires every loop
# of the daemon (60m), so we will get the value eventually.
return 0
if rc != 0:
return -1
pattern = re.compile(PACKET_PATTERN.format(dst_ip))
for line in output.split('\n'):
m = pattern.match(line)
if m is not None:
return int(m.group(1))
return 0
except Exception as e:
_enable_firewall = False
logger.warn("Unable to retrieve firewall packets dropped"
"{0}".format(ustr(e)))
return -1
def get_firewall_will_wait(self):
# Determine if iptables will serialize access
rc, output = shellutil.run_get_output(IPTABLES_VERSION)
if rc != 0:
msg = "Unable to determine version of iptables"
logger.warn(msg)
raise Exception(msg)
m = IPTABLES_VERSION_PATTERN.match(output)
if m is None:
msg = "iptables did not return version information"
logger.warn(msg)
raise Exception(msg)
wait = "-w" \
if FlexibleVersion(m.group(1)) >= IPTABLES_LOCKING_VERSION \
else ""
return wait
def _delete_rule(self, rule):
"""
Continually execute the delete operation until the return
code is non-zero or the limit has been reached.
"""
for i in range(1, 100):
rc = shellutil.run(rule, chk_err=False)
if rc == 1:
return
elif rc == 2:
raise Exception("invalid firewall deletion rule '{0}'".format(rule))
def remove_firewall(self, dst_ip=None, uid=None):
# If a previous attempt failed, do not retry
global _enable_firewall
if not _enable_firewall:
return False
try:
if dst_ip is None or uid is None:
msg = "Missing arguments to enable_firewall"
logger.warn(msg)
raise Exception(msg)
wait = self.get_firewall_will_wait()
# This rule was <= 2.2.25 only, and may still exist on some VMs. Until 2.2.25
# has aged out, keep this cleanup in place.
self._delete_rule(FIREWALL_DELETE_CONNTRACK_ACCEPT.format(wait, dst_ip))
self._delete_rule(FIREWALL_DELETE_OWNER_ACCEPT.format(wait, dst_ip, uid))
self._delete_rule(FIREWALL_DELETE_CONNTRACK_DROP.format(wait, dst_ip))
return True
except Exception as e:
_enable_firewall = False
logger.info("Unable to remove firewall -- "
"no further attempts will be made: "
"{0}".format(ustr(e)))
return False
def enable_firewall(self, dst_ip=None, uid=None):
# If a previous attempt failed, do not retry
global _enable_firewall
if not _enable_firewall:
return False
try:
if dst_ip is None or uid is None:
msg = "Missing arguments to enable_firewall"
logger.warn(msg)
raise Exception(msg)
wait = self.get_firewall_will_wait()
# If the DROP rule exists, make no changes
drop_rule = FIREWALL_DROP.format(wait, "C", dst_ip)
rc = shellutil.run(drop_rule, chk_err=False)
if rc == 0:
logger.verbose("Firewall appears established")
return True
elif rc == 2:
self.remove_firewall(dst_ip, uid)
msg = "please upgrade iptables to a version that supports the -C option"
logger.warn(msg)
raise Exception(msg)
# Otherwise, append both rules
accept_rule = FIREWALL_ACCEPT.format(wait, "A", dst_ip, uid)
drop_rule = FIREWALL_DROP.format(wait, "A", dst_ip)
if shellutil.run(accept_rule) != 0:
msg = "Unable to add ACCEPT firewall rule '{0}'".format(
accept_rule)
logger.warn(msg)
raise Exception(msg)
if shellutil.run(drop_rule) != 0:
msg = "Unable to add DROP firewall rule '{0}'".format(
drop_rule)
logger.warn(msg)
raise Exception(msg)
logger.info("Successfully added Azure fabric firewall rules")
rc, output = shellutil.run_get_output(FIREWALL_LIST.format(wait))
if rc == 0:
logger.info("Firewall rules:\n{0}".format(output))
else:
logger.warn("Listing firewall rules failed: {0}".format(output))
return True
except Exception as e:
_enable_firewall = False
logger.info("Unable to establish firewall -- "
"no further attempts will be made: "
"{0}".format(ustr(e)))
return False
def _correct_instance_id(self, id):
'''
Azure stores the instance ID with an incorrect byte ordering for the
first parts. For example, the ID returned by the metadata service:
D0DF4C54-4ECB-4A4B-9954-5BDF3ED5C3B8
will be found as:
544CDFD0-CB4E-4B4A-9954-5BDF3ED5C3B8
This code corrects the byte order such that it is consistent with
that returned by the metadata service.
'''
if not UUID_PATTERN.match(id):
return id
parts = id.split('-')
return '-'.join([
textutil.swap_hexstring(parts[0], width=2),
textutil.swap_hexstring(parts[1], width=2),
textutil.swap_hexstring(parts[2], width=2),
parts[3],
parts[4]
])
def is_current_instance_id(self, id_that):
'''
Compare two instance IDs for equality, but allow that some IDs
may have been persisted using the incorrect byte ordering.
'''
id_this = self.get_instance_id()
return id_that == id_this or \
id_that == self._correct_instance_id(id_this)
def get_agent_conf_file_path(self):
return self.agent_conf_file_path
def get_instance_id(self):
'''
Azure records a UUID as the instance ID
First check /sys/class/dmi/id/product_uuid.
If that is missing, then extracts from dmidecode
If nothing works (for old VMs), return the empty string
'''
if os.path.isfile(PRODUCT_ID_FILE):
s = fileutil.read_file(PRODUCT_ID_FILE).strip()
else:
rc, s = shellutil.run_get_output(DMIDECODE_CMD)
if rc != 0 or UUID_PATTERN.match(s) is None:
return ""
return self._correct_instance_id(s.strip())
def get_userentry(self, username):
try:
return pwd.getpwnam(username)
except KeyError:
return None
def is_sys_user(self, username):
"""
Check whether use is a system user.
If reset sys user is allowed in conf, return False
Otherwise, check whether UID is less than UID_MIN
"""
if conf.get_allow_reset_sys_user():
return False
userentry = self.get_userentry(username)
uidmin = None
try:
uidmin_def = fileutil.get_line_startingwith("UID_MIN",
"/etc/login.defs")
if uidmin_def is not None:
uidmin = int(uidmin_def.split()[1])
except IOError as e:
pass
if uidmin == None:
uidmin = 100
if userentry != None and userentry[2] < uidmin:
return True
else:
return False
def useradd(self, username, expiration=None):
"""
Create user account with 'username'
"""
userentry = self.get_userentry(username)
if userentry is not None:
logger.info("User {0} already exists, skip useradd", username)
return
if expiration is not None:
cmd = "useradd -m {0} -e {1}".format(username, expiration)
else:
cmd = "useradd -m {0}".format(username)
retcode, out = shellutil.run_get_output(cmd)
if retcode != 0:
raise OSUtilError(("Failed to create user account:{0}, "
"retcode:{1}, "
"output:{2}").format(username, retcode, out))
def chpasswd(self, username, password, crypt_id=6, salt_len=10):
if self.is_sys_user(username):
raise OSUtilError(("User {0} is a system user, "
"will not set password.").format(username))
passwd_hash = textutil.gen_password_hash(password, crypt_id, salt_len)
cmd = "usermod -p '{0}' {1}".format(passwd_hash, username)
ret, output = shellutil.run_get_output(cmd, log_cmd=False)
if ret != 0:
raise OSUtilError(("Failed to set password for {0}: {1}"
"").format(username, output))
def conf_sudoer(self, username, nopasswd=False, remove=False):
sudoers_dir = conf.get_sudoers_dir()
sudoers_wagent = os.path.join(sudoers_dir, 'waagent')
if not remove:
# for older distros create sudoers.d
if not os.path.isdir(sudoers_dir):
sudoers_file = os.path.join(sudoers_dir, '../sudoers')
# create the sudoers.d directory
os.mkdir(sudoers_dir)
# add the include of sudoers.d to the /etc/sudoers
sudoers = '\n#includedir ' + sudoers_dir + '\n'
fileutil.append_file(sudoers_file, sudoers)
sudoer = None
if nopasswd:
sudoer = "{0} ALL=(ALL) NOPASSWD: ALL".format(username)
else:
sudoer = "{0} ALL=(ALL) ALL".format(username)
if not os.path.isfile(sudoers_wagent) or \
fileutil.findstr_in_file(sudoers_wagent, sudoer) is False:
fileutil.append_file(sudoers_wagent, "{0}\n".format(sudoer))
fileutil.chmod(sudoers_wagent, 0o440)
else:
# remove user from sudoers
if os.path.isfile(sudoers_wagent):
try:
content = fileutil.read_file(sudoers_wagent)
sudoers = content.split("\n")
sudoers = [x for x in sudoers if username not in x]
fileutil.write_file(sudoers_wagent, "\n".join(sudoers))
except IOError as e:
raise OSUtilError("Failed to remove sudoer: {0}".format(e))
def del_root_password(self):
try:
passwd_file_path = conf.get_passwd_file_path()
passwd_content = fileutil.read_file(passwd_file_path)
passwd = passwd_content.split('\n')
new_passwd = [x for x in passwd if not x.startswith("root:")]
new_passwd.insert(0, "root:*LOCK*:14600::::::")
fileutil.write_file(passwd_file_path, "\n".join(new_passwd))
except IOError as e:
raise OSUtilError("Failed to delete root password:{0}".format(e))
def _norm_path(self, filepath):
home = conf.get_home_dir()
# Expand HOME variable if present in path
path = os.path.normpath(filepath.replace("$HOME", home))
return path
def deploy_ssh_keypair(self, username, keypair):
"""
Deploy id_rsa and id_rsa.pub
"""
path, thumbprint = keypair
path = self._norm_path(path)
dir_path = os.path.dirname(path)
fileutil.mkdir(dir_path, mode=0o700, owner=username)
lib_dir = conf.get_lib_dir()
prv_path = os.path.join(lib_dir, thumbprint + '.prv')
if not os.path.isfile(prv_path):
raise OSUtilError("Can't find {0}.prv".format(thumbprint))
shutil.copyfile(prv_path, path)
pub_path = path + '.pub'
crytputil = CryptUtil(conf.get_openssl_cmd())
pub = crytputil.get_pubkey_from_prv(prv_path)
fileutil.write_file(pub_path, pub)
self.set_selinux_context(pub_path, 'unconfined_u:object_r:ssh_home_t:s0')
self.set_selinux_context(path, 'unconfined_u:object_r:ssh_home_t:s0')
os.chmod(path, 0o644)
os.chmod(pub_path, 0o600)
def openssl_to_openssh(self, input_file, output_file):
cryptutil = CryptUtil(conf.get_openssl_cmd())
cryptutil.crt_to_ssh(input_file, output_file)
def deploy_ssh_pubkey(self, username, pubkey):
"""
Deploy authorized_key
"""
path, thumbprint, value = pubkey
if path is None:
raise OSUtilError("Public key path is None")
crytputil = CryptUtil(conf.get_openssl_cmd())
path = self._norm_path(path)
dir_path = os.path.dirname(path)
fileutil.mkdir(dir_path, mode=0o700, owner=username)
if value is not None:
if not value.startswith("ssh-"):
raise OSUtilError("Bad public key: {0}".format(value))
fileutil.write_file(path, value)
elif thumbprint is not None:
lib_dir = conf.get_lib_dir()
crt_path = os.path.join(lib_dir, thumbprint + '.crt')
if not os.path.isfile(crt_path):
raise OSUtilError("Can't find {0}.crt".format(thumbprint))
pub_path = os.path.join(lib_dir, thumbprint + '.pub')
pub = crytputil.get_pubkey_from_crt(crt_path)
fileutil.write_file(pub_path, pub)
self.set_selinux_context(pub_path,
'unconfined_u:object_r:ssh_home_t:s0')
self.openssl_to_openssh(pub_path, path)
fileutil.chmod(pub_path, 0o600)
else:
raise OSUtilError("SSH public key Fingerprint and Value are None")
self.set_selinux_context(path, 'unconfined_u:object_r:ssh_home_t:s0')
fileutil.chowner(path, username)
fileutil.chmod(path, 0o644)
def is_selinux_system(self):
"""
Checks and sets self.selinux = True if SELinux is available on system.
"""
if self.selinux == None:
if shellutil.run("which getenforce", chk_err=False) == 0:
self.selinux = True
else:
self.selinux = False
return self.selinux
def is_selinux_enforcing(self):
"""
Calls shell command 'getenforce' and returns True if 'Enforcing'.
"""
if self.is_selinux_system():
output = shellutil.run_get_output("getenforce")[1]
return output.startswith("Enforcing")
else:
return False
def set_selinux_context(self, path, con):
"""
Calls shell 'chcon' with 'path' and 'con' context.
Returns exit result.
"""
if self.is_selinux_system():
if not os.path.exists(path):
logger.error("Path does not exist: {0}".format(path))
return 1
return shellutil.run('chcon ' + con + ' ' + path)
def conf_sshd(self, disable_password):
option = "no" if disable_password else "yes"
conf_file_path = conf.get_sshd_conf_file_path()
conf_file = fileutil.read_file(conf_file_path).split("\n")
textutil.set_ssh_config(conf_file, "PasswordAuthentication", option)
textutil.set_ssh_config(conf_file, "ChallengeResponseAuthentication", option)
textutil.set_ssh_config(conf_file, "ClientAliveInterval", str(conf.get_ssh_client_alive_interval()))
fileutil.write_file(conf_file_path, "\n".join(conf_file))
logger.info("{0} SSH password-based authentication methods."
.format("Disabled" if disable_password else "Enabled"))
logger.info("Configured SSH client probing to keep connections alive.")
def get_dvd_device(self, dev_dir='/dev'):
pattern = r'(sr[0-9]|hd[c-z]|cdrom[0-9]|cd[0-9])'
device_list = os.listdir(dev_dir)
for dvd in [re.match(pattern, dev) for dev in device_list]:
if dvd is not None:
return "/dev/{0}".format(dvd.group(0))
inner_detail = "The following devices were found, but none matched " \
"the pattern [{0}]: {1}\n".format(pattern, device_list)
raise OSUtilError(msg="Failed to get dvd device from {0}".format(dev_dir),
inner=inner_detail)
def mount_dvd(self,
max_retry=6,
chk_err=True,
dvd_device=None,
mount_point=None,
sleep_time=5):
if dvd_device is None:
dvd_device = self.get_dvd_device()
if mount_point is None:
mount_point = conf.get_dvd_mount_point()
mount_list = shellutil.run_get_output("mount")[1]
existing = self.get_mount_point(mount_list, dvd_device)
if existing is not None:
# already mounted
logger.info("{0} is already mounted at {1}", dvd_device, existing)
return
if not os.path.isdir(mount_point):
os.makedirs(mount_point)
err = ''
for retry in range(1, max_retry):
return_code, err = self.mount(dvd_device,
mount_point,
option="-o ro -t udf,iso9660",
chk_err=False)
if return_code == 0:
logger.info("Successfully mounted dvd")
return
else:
logger.warn(
"Mounting dvd failed [retry {0}/{1}, sleeping {2} sec]",
retry,
max_retry - 1,
sleep_time)
if retry < max_retry:
time.sleep(sleep_time)
if chk_err:
raise OSUtilError("Failed to mount dvd device", inner=err)
def umount_dvd(self, chk_err=True, mount_point=None):
if mount_point is None:
mount_point = conf.get_dvd_mount_point()
return_code = self.umount(mount_point, chk_err=chk_err)
if chk_err and return_code != 0:
raise OSUtilError("Failed to unmount dvd device at {0}",
mount_point)
def eject_dvd(self, chk_err=True):
dvd = self.get_dvd_device()
retcode = shellutil.run("eject {0}".format(dvd))
if chk_err and retcode != 0:
raise OSUtilError("Failed to eject dvd: ret={0}".format(retcode))
def try_load_atapiix_mod(self):
try:
self.load_atapiix_mod()
except Exception as e:
logger.warn("Could not load ATAPI driver: {0}".format(e))
def load_atapiix_mod(self):
if self.is_atapiix_mod_loaded():
return
ret, kern_version = shellutil.run_get_output("uname -r")
if ret != 0:
raise Exception("Failed to call uname -r")
mod_path = os.path.join('/lib/modules',
kern_version.strip('\n'),
'kernel/drivers/ata/ata_piix.ko')
if not os.path.isfile(mod_path):
raise Exception("Can't find module file:{0}".format(mod_path))
ret, output = shellutil.run_get_output("insmod " + mod_path)
if ret != 0:
raise Exception("Error calling insmod for ATAPI CD-ROM driver")
if not self.is_atapiix_mod_loaded(max_retry=3):
raise Exception("Failed to load ATAPI CD-ROM driver")
def is_atapiix_mod_loaded(self, max_retry=1):
for retry in range(0, max_retry):
ret = shellutil.run("lsmod | grep ata_piix", chk_err=False)
if ret == 0:
logger.info("Module driver for ATAPI CD-ROM is already present.")
return True
if retry < max_retry - 1:
time.sleep(1)
return False
def mount(self, dvd, mount_point, option="", chk_err=True):
cmd = "mount {0} {1} {2}".format(option, dvd, mount_point)
retcode, err = shellutil.run_get_output(cmd, chk_err)
if retcode != 0:
detail = "[{0}] returned {1}: {2}".format(cmd, retcode, err)
err = detail
return retcode, err
def umount(self, mount_point, chk_err=True):
return shellutil.run("umount {0}".format(mount_point), chk_err=chk_err)
def allow_dhcp_broadcast(self):
# Open DHCP port if iptables is enabled.
# We supress error logging on error.
shellutil.run("iptables -D INPUT -p udp --dport 68 -j ACCEPT",
chk_err=False)
shellutil.run("iptables -I INPUT -p udp --dport 68 -j ACCEPT",
chk_err=False)
def remove_rules_files(self, rules_files=__RULES_FILES__):
lib_dir = conf.get_lib_dir()
for src in rules_files:
file_name = fileutil.base_name(src)
dest = os.path.join(lib_dir, file_name)
if os.path.isfile(dest):
os.remove(dest)
if os.path.isfile(src):
logger.warn("Move rules file {0} to {1}", file_name, dest)
shutil.move(src, dest)
def restore_rules_files(self, rules_files=__RULES_FILES__):
lib_dir = conf.get_lib_dir()
for dest in rules_files:
filename = fileutil.base_name(dest)
src = os.path.join(lib_dir, filename)
if os.path.isfile(dest):
continue
if os.path.isfile(src):
logger.warn("Move rules file {0} to {1}", filename, dest)
shutil.move(src, dest)
def get_mac_addr(self):
"""
Convenience function, returns mac addr bound to
first non-loopback interface.
"""
ifname = ''
while len(ifname) < 2:
ifname = self.get_first_if()[0]
addr = self.get_if_mac(ifname)
return textutil.hexstr_to_bytearray(addr)
def get_if_mac(self, ifname):
"""
Return the mac-address bound to the socket.
"""
sock = socket.socket(socket.AF_INET,
socket.SOCK_DGRAM,
socket.IPPROTO_UDP)
param = struct.pack('256s', (ifname[:15]+('\0'*241)).encode('latin-1'))
info = fcntl.ioctl(sock.fileno(), IOCTL_SIOCGIFHWADDR, param)
sock.close()
return ''.join(['%02X' % textutil.str_to_ord(char) for char in info[18:24]])
@staticmethod
def _get_struct_ifconf_size():
"""
Return the sizeof struct ifinfo. On 64-bit platforms the size is 40 bytes;
on 32-bit platforms the size is 32 bytes.
"""
python_arc = platform.architecture()[0]
struct_size = 32 if python_arc == '32bit' else 40
return struct_size
def _get_all_interfaces(self):
"""
Return a dictionary mapping from interface name to IPv4 address.
Interfaces without a name are ignored.
"""
expected=16 # how many devices should I expect...
struct_size = DefaultOSUtil._get_struct_ifconf_size()
array_size = expected * struct_size
buff = array.array('B', b'\0' * array_size)
param = struct.pack('iL', array_size, buff.buffer_info()[0])
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
ret = fcntl.ioctl(sock.fileno(), IOCTL_SIOCGIFCONF, param)
retsize = (struct.unpack('iL', ret)[0])
sock.close()
if retsize == array_size:
logger.warn(('SIOCGIFCONF returned more than {0} up '
'network interfaces.'), expected)
ifconf_buff = buff.tostring()
ifaces = {}
for i in range(0, array_size, struct_size):
iface = ifconf_buff[i:i+IFNAMSIZ].split(b'\0', 1)[0]
if len(iface) > 0:
iface_name = iface.decode('latin-1')
if iface_name not in ifaces:
ifaces[iface_name] = socket.inet_ntoa(ifconf_buff[i+20:i+24])
return ifaces
def get_first_if(self):
"""
Return the interface name, and IPv4 addr of the "primary" interface or,
failing that, any active non-loopback interface.
"""
primary = self.get_primary_interface()
ifaces = self._get_all_interfaces()
if primary in ifaces:
return primary, ifaces[primary]
logger.warn('Primary interface {0} not found in ifconf list', primary)
for iface_name in ifaces.keys():
if not self.is_loopback(iface_name):
if not self.disable_route_warning:
logger.info("Choosing non-primary {0}".format(iface_name))
return iface_name, ifaces[iface_name]
logger.warn('No non-loopback interface found in ifconf list')
return '', ''
def get_primary_interface(self):
"""
Get the name of the primary interface, which is the one with the
default route attached to it; if there are multiple default routes,
the primary has the lowest Metric.
:return: the interface which has the default route
"""
# from linux/route.h
RTF_GATEWAY = 0x02
DEFAULT_DEST = "00000000"
hdr_iface = "Iface"
hdr_dest = "Destination"
hdr_flags = "Flags"
hdr_metric = "Metric"
idx_iface = -1
idx_dest = -1
idx_flags = -1
idx_metric = -1
primary = None
primary_metric = None
if not self.disable_route_warning:
logger.info("Examine /proc/net/route for primary interface")
with open('/proc/net/route') as routing_table:
idx = 0
for header in filter(lambda h: len(h) > 0, routing_table.readline().strip(" \n").split("\t")):
if header == hdr_iface:
idx_iface = idx
elif header == hdr_dest:
idx_dest = idx
elif header == hdr_flags:
idx_flags = idx
elif header == hdr_metric:
idx_metric = idx
idx = idx + 1
for entry in routing_table.readlines():
route = entry.strip(" \n").split("\t")
if route[idx_dest] == DEFAULT_DEST and int(route[idx_flags]) & RTF_GATEWAY == RTF_GATEWAY:
metric = int(route[idx_metric])
iface = route[idx_iface]
if primary is None or metric < primary_metric:
primary = iface
primary_metric = metric
if primary is None:
primary = ''
if not self.disable_route_warning:
with open('/proc/net/route') as routing_table_fh:
routing_table_text = routing_table_fh.read()
logger.warn('Could not determine primary interface, '
'please ensure /proc/net/route is correct')
logger.warn('Contents of /proc/net/route:\n{0}'.format(routing_table_text))
logger.warn('Primary interface examination will retry silently')
self.disable_route_warning = True
else:
logger.info('Primary interface is [{0}]'.format(primary))
self.disable_route_warning = False
return primary
def is_primary_interface(self, ifname):
"""
Indicate whether the specified interface is the primary.
:param ifname: the name of the interface - eth0, lo, etc.
:return: True if this interface binds the default route
"""
return self.get_primary_interface() == ifname
def is_loopback(self, ifname):
"""
Determine if a named interface is loopback.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
ifname_buff = ifname + ('\0'*256)
result = fcntl.ioctl(s.fileno(), IOCTL_SIOCGIFFLAGS, ifname_buff)
flags, = struct.unpack('H', result[16:18])
isloopback = flags & 8 == 8
if not self.disable_route_warning:
logger.info('interface [{0}] has flags [{1}], '
'is loopback [{2}]'.format(ifname, flags, isloopback))
s.close()
return isloopback
def get_dhcp_lease_endpoint(self):
"""
OS specific, this should return the decoded endpoint of
the wireserver from option 245 in the dhcp leases file
if it exists on disk.
:return: The endpoint if available, or None
"""
return None
@staticmethod
def get_endpoint_from_leases_path(pathglob):
"""
Try to discover and decode the wireserver endpoint in the
specified dhcp leases path.
:param pathglob: The path containing dhcp lease files
:return: The endpoint if available, otherwise None
"""
endpoint = None
HEADER_LEASE = "lease"
HEADER_OPTION = "option unknown-245"
HEADER_DNS = "option domain-name-servers"
HEADER_EXPIRE = "expire"
FOOTER_LEASE = "}"
FORMAT_DATETIME = "%Y/%m/%d %H:%M:%S"
logger.info("looking for leases in path [{0}]".format(pathglob))
for lease_file in glob.glob(pathglob):
leases = open(lease_file).read()
if HEADER_OPTION in leases:
cached_endpoint = None
has_option_245 = False
expired = True # assume expired
for line in leases.splitlines():
if line.startswith(HEADER_LEASE):
cached_endpoint = None
has_option_245 = False
expired = True
elif HEADER_DNS in line:
cached_endpoint = line.replace(HEADER_DNS, '').strip(" ;")
elif HEADER_OPTION in line:
has_option_245 = True
elif HEADER_EXPIRE in line:
if "never" in line:
expired = False
else:
try:
expire_string = line.split(" ", 4)[-1].strip(";")
expire_date = datetime.datetime.strptime(expire_string, FORMAT_DATETIME)
if expire_date > datetime.datetime.utcnow():
expired = False
except:
logger.error("could not parse expiry token '{0}'".format(line))
elif FOOTER_LEASE in line:
logger.info("dhcp entry:{0}, 245:{1}, expired:{2}".format(
cached_endpoint, has_option_245, expired))
if not expired and cached_endpoint is not None and has_option_245:
endpoint = cached_endpoint
logger.info("found endpoint [{0}]".format(endpoint))
# we want to return the last valid entry, so
# keep searching
if endpoint is not None:
logger.info("cached endpoint found [{0}]".format(endpoint))
else:
logger.info("cached endpoint not found")
return endpoint
def is_missing_default_route(self):
routes = shellutil.run_get_output("route -n")[1]
for route in routes.split("\n"):
if route.startswith("0.0.0.0 ") or route.startswith("default "):
return False
return True
def get_if_name(self):
ifname = ''
while len(ifname) < 2:
ifname = self.get_first_if()[0]
return ifname
def get_ip4_addr(self):
return self.get_first_if()[1]
def set_route_for_dhcp_broadcast(self, ifname):
return shellutil.run("route add 255.255.255.255 dev {0}".format(ifname),
chk_err=False)
def remove_route_for_dhcp_broadcast(self, ifname):
shellutil.run("route del 255.255.255.255 dev {0}".format(ifname),
chk_err=False)
def is_dhcp_enabled(self):
return False
def stop_dhcp_service(self):
pass
def start_dhcp_service(self):
pass
def start_network(self):
pass
def start_agent_service(self):
pass
def stop_agent_service(self):
pass
def register_agent_service(self):
pass
def unregister_agent_service(self):
pass
def restart_ssh_service(self):
pass
def route_add(self, net, mask, gateway):
"""
Add specified route using /sbin/route add -net.
"""
cmd = ("/sbin/route add -net "
"{0} netmask {1} gw {2}").format(net, mask, gateway)
return shellutil.run(cmd, chk_err=False)
def get_dhcp_pid(self):
ret = shellutil.run_get_output("pidof dhclient", chk_err=False)
return ret[1] if ret[0] == 0 else None
def set_hostname(self, hostname):
fileutil.write_file('/etc/hostname', hostname)
shellutil.run("hostname {0}".format(hostname), chk_err=False)
def set_dhcp_hostname(self, hostname):
autosend = r'^[^#]*?send\s*host-name.*?(<hostname>|gethostname[(,)])'
dhclient_files = ['/etc/dhcp/dhclient.conf', '/etc/dhcp3/dhclient.conf', '/etc/dhclient.conf']
for conf_file in dhclient_files:
if not os.path.isfile(conf_file):
continue
if fileutil.findre_in_file(conf_file, autosend):
#Return if auto send host-name is configured
return
fileutil.update_conf_file(conf_file,
'send host-name',
'send host-name "{0}";'.format(hostname))
def restart_if(self, ifname, retries=3, wait=5):
retry_limit=retries+1
for attempt in range(1, retry_limit):
return_code=shellutil.run("ifdown {0} && ifup {0}".format(ifname))
if return_code == 0:
return
logger.warn("failed to restart {0}: return code {1}".format(ifname, return_code))
if attempt < retry_limit:
logger.info("retrying in {0} seconds".format(wait))
time.sleep(wait)
else:
logger.warn("exceeded restart retries")
def publish_hostname(self, hostname):
self.set_dhcp_hostname(hostname)
self.set_hostname_record(hostname)
ifname = self.get_if_name()
self.restart_if(ifname)
def set_scsi_disks_timeout(self, timeout):
for dev in os.listdir("/sys/block"):
if dev.startswith('sd'):
self.set_block_device_timeout(dev, timeout)
def set_block_device_timeout(self, dev, timeout):
if dev is not None and timeout is not None:
file_path = "/sys/block/{0}/device/timeout".format(dev)
content = fileutil.read_file(file_path)
original = content.splitlines()[0].rstrip()
if original != timeout:
fileutil.write_file(file_path, timeout)
logger.info("Set block dev timeout: {0} with timeout: {1}",
dev, timeout)
def get_mount_point(self, mountlist, device):
"""
Example of mountlist:
/dev/sda1 on / type ext4 (rw)
proc on /proc type proc (rw)
sysfs on /sys type sysfs (rw)
devpts on /dev/pts type devpts (rw,gid=5,mode=620)
tmpfs on /dev/shm type tmpfs
(rw,rootcontext="system_u:object_r:tmpfs_t:s0")
none on /proc/sys/fs/binfmt_misc type binfmt_misc (rw)
/dev/sdb1 on /mnt/resource type ext4 (rw)
"""
if (mountlist and device):
for entry in mountlist.split('\n'):
if(re.search(device, entry)):
tokens = entry.split()
#Return the 3rd column of this line
return tokens[2] if len(tokens) > 2 else None
return None
def device_for_ide_port(self, port_id):
"""
Return device name attached to ide port 'n'.
"""
if port_id > 3:
return None
g0 = "00000000"
if port_id > 1:
g0 = "00000001"
port_id = port_id - 2
device = None
path = "/sys/bus/vmbus/devices/"
if os.path.exists(path):
for vmbus in os.listdir(path):
deviceid = fileutil.read_file(os.path.join(path, vmbus, "device_id"))
guid = deviceid.lstrip('{').split('-')
if guid[0] == g0 and guid[1] == "000" + ustr(port_id):
for root, dirs, files in os.walk(path + vmbus):
if root.endswith("/block"):
device = dirs[0]
break
else : #older distros
for d in dirs:
if ':' in d and "block" == d.split(':')[0]:
device = d.split(':')[1]
break
break
return device
def set_hostname_record(self, hostname):
fileutil.write_file(conf.get_published_hostname(), contents=hostname)
def get_hostname_record(self):
hostname_record = conf.get_published_hostname()
if not os.path.exists(hostname_record):
# this file is created at provisioning time with agents >= 2.2.3
hostname = socket.gethostname()
logger.info('Hostname record does not exist, '
'creating [{0}] with hostname [{1}]',
hostname_record,
hostname)
self.set_hostname_record(hostname)
record = fileutil.read_file(hostname_record)
return record
def del_account(self, username):
if self.is_sys_user(username):
logger.error("{0} is a system user. Will not delete it.", username)
shellutil.run("> /var/run/utmp")
shellutil.run("userdel -f -r " + username)
self.conf_sudoer(username, remove=True)
def decode_customdata(self, data):
return base64.b64decode(data).decode('utf-8')
def get_total_mem(self):
# Get total memory in bytes and divide by 1024**2 to get the value in MB.
return os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024**2)
def get_processor_cores(self):
return multiprocessing.cpu_count()
def check_pid_alive(self, pid):
return pid is not None and os.path.isdir(os.path.join('/proc', pid))
@property
def is_64bit(self):
return sys.maxsize > 2**32
| 39.116848
| 130
| 0.576195
|
70e97f0d378e46d2991cc6a4dcbe92406a86f86b
| 3,298
|
py
|
Python
|
benchmarks/f3_wrong_hints/scaling_software_termination/1-2Nested_false-termination_9.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 3
|
2021-04-23T23:29:26.000Z
|
2022-03-23T10:00:30.000Z
|
benchmarks/f3_wrong_hints/scaling_software_termination/1-2Nested_false-termination_9.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | null | null | null |
benchmarks/f3_wrong_hints/scaling_software_termination/1-2Nested_false-termination_9.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 1
|
2021-11-17T22:02:56.000Z
|
2021-11-17T22:02:56.000Z
|
from typing import Tuple, FrozenSet
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
import pysmt.typing as types
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
symbols = frozenset([pc, x, y])
m_1 = mgr.Int(-1)
n_locs = 3
max_int = n_locs
ints = []
pcs = []
x_pcs = []
for idx in range(n_locs):
num = mgr.Int(idx)
ints.append(num)
pcs.append(mgr.Equals(pc, num))
x_pcs.append(mgr.Equals(x_pc, num))
for idx in range(n_locs, max_int):
num = mgr.Int(idx)
ints.append(num)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
init = pcs[0]
cfg = []
# pc = 0 & (x >= 0) -> pc' = 1
cond = mgr.GE(x, ints[0])
cfg.append(mgr.Implies(mgr.And(pcs[0], cond), x_pcs[1]))
# pc = 0 & !(x >= 0) -> pc' = -1
cfg.append(mgr.Implies(mgr.And(pcs[0], mgr.Not(cond)), x_pcend))
# pc = 1 -> pc' = 2
cfg.append(mgr.Implies(pcs[1], x_pcs[2]))
# pc = 2 -> pc' = 0
cfg.append(mgr.Implies(pcs[2], x_pcs[0]))
# pc = -1 -> pc' = -1
cfg.append(mgr.Implies(pcend, x_pcend))
trans = []
same_x = mgr.Equals(x_x, x)
same_y = mgr.Equals(x_y, y)
same = mgr.And(same_x, same_y)
# pc = 0 -> same
trans.append(mgr.Implies(pcs[0], same))
# pc = 1 -> x' = x + y & same_y
trans.append(mgr.Implies(pcs[1],
mgr.And(mgr.Equals(x_x, mgr.Plus(x, y)),
same_y)))
# pc = 2 -> same_x & y' = y + 1
trans.append(mgr.Implies(pcs[2],
mgr.And(same_x,
mgr.Equals(x_y, mgr.Plus(y, ints[1])))))
# pc = end -> same
trans.append(mgr.Implies(pcend, same))
trans = mgr.And(*cfg, *trans)
fairness = mgr.Not(mgr.Equals(pc, m_1))
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
symbs = frozenset([pc, x, y])
m_100 = mgr.Int(-100)
m_1 = mgr.Int(-1)
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_4 = mgr.Int(4)
i_20 = mgr.Int(20)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
res = []
loc0 = Location(env, mgr.GE(y, m_100), mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(x, y)))
loc1 = Location(env, mgr.TRUE(), mgr.GE(x, m_100))
loc1.set_progress(2, mgr.GE(x_y, i_20))
loc2 = Location(env, mgr.TRUE())
loc2.set_progress(0, mgr.And(mgr.GE(x_y, m_100), mgr.LE(x_y, i_0)))
h_y = Hint("h_y4", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1, loc2])
res.append(h_y)
return frozenset(res)
| 28.188034
| 77
| 0.557004
|
92c12a48b58663c93d9c8c38ba4153c231784199
| 10,920
|
py
|
Python
|
src/uproot/models/TBasket.py
|
klieret/uproot4
|
0cce6990b89db0ef7d47fc2857616ab2933c5d03
|
[
"BSD-3-Clause"
] | null | null | null |
src/uproot/models/TBasket.py
|
klieret/uproot4
|
0cce6990b89db0ef7d47fc2857616ab2933c5d03
|
[
"BSD-3-Clause"
] | null | null | null |
src/uproot/models/TBasket.py
|
klieret/uproot4
|
0cce6990b89db0ef7d47fc2857616ab2933c5d03
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
"""
This module defines a versionless model for ``TBasket``, including much of the
functionality of basket-reading.
Includes both "embedded" ``TBaskets`` (as a member of TBranch) and "free"
``TBaskets`` (top-level objects, located by ``TKeys``).
"""
from __future__ import absolute_import
import struct
import numpy
import uproot
_tbasket_format1 = struct.Struct(">ihiIhh")
_tbasket_format2 = struct.Struct(">Hiiii")
_tbasket_offsets_dtype = numpy.dtype(">i4")
class Model_TBasket(uproot.model.Model):
"""
A versionless :doc:`uproot.model.Model` for ``TBasket``.
Since this model is versionless and most of its functionality is internal
(not to be directly accessed by most users), it is defined on the model
instead of creating a behavior class to mix in functionality.
"""
def __repr__(self):
basket_num = self._basket_num if self._basket_num is not None else "(unknown)"
return "<TBasket {0} of {1} at 0x{2:012x}>".format(
basket_num, repr(self._parent.name), id(self)
)
@property
def raw_data(self):
"""
The raw but uncompressed data in the ``TBasket``, which combines data
content with entry offsets, if the latter exists.
If there are no entry offsets, this is identical to
:ref:`uproot.models.TBasket.Model_TBasket.data`.
"""
return self._raw_data
@property
def data(self):
"""
The uncompressed data content in the ``TBasket``, not including any
entry offsets, if they exist.
If there are no entry offsets, this is identical to
:ref:`uproot.models.TBasket.Model_TBasket.raw_data`.
"""
return self._data
@property
def byte_offsets(self):
"""
The index where each entry starts and stops in the
:ref:`uproot.models.TBasket.Model_TBasket.data`, not including header.
The first offset is ``0`` and the number of offsets is one greater than
the number of entries, such that the last offset is the length of
:ref:`uproot.models.TBasket.Model_TBasket.data`.
"""
return self._byte_offsets
def array(self, interpretation=None, library="ak"):
"""
The ``TBasket`` data and entry offsets as an array, given an
:doc:`uproot.interpretation.Interpretation` (or the ``TBranch`` parent's
:ref:`uproot.behaviors.TBranch.TBranch.interpretation`) and a
``library``.
"""
if interpretation is None:
interpretation = self._parent.interpretation
library = uproot.interpretation.library._regularize_library(library)
basket_array = interpretation.basket_array(
self.data,
self.byte_offsets,
self,
self._parent,
self._parent.context,
self._members["fKeylen"],
library,
)
return interpretation.final_array(
[basket_array],
0,
self.num_entries,
[0, self.num_entries],
library,
self._parent,
)
@property
def counts(self):
"""
The number of items in each entry as a NumPy array, derived from the
parent ``TBranch``'s
:ref:`uproot.behaviors.TBranch.TBranch.count_branch`. If there is
no such branch (e.g. the data are ``std::vector``), then this method
returns None.
"""
count_branch = self._parent.count_branch
if count_branch is not None:
entry_offsets = count_branch.entry_offsets
entry_start = entry_offsets[self._basket_num]
entry_stop = entry_offsets[self._basket_num + 1]
return count_branch.array(
entry_start=entry_start, entry_stop=entry_stop, library="np"
)
else:
return None
@property
def basket_num(self):
"""
The index of this ``TBasket`` within its ``TBranch``.
"""
return self._basket_num
@property
def entry_start_stop(self):
"""
The starting and stopping entry numbers for this ``TBasket``.
"""
return self._parent.basket_entry_start_stop(self._basket_num)
@property
def key_version(self):
"""
The instance version of the ``TKey`` for this ``TBasket`` (which is
deserialized along with the ``TBasket``, unlike normal objects).
"""
return self._key_version
@property
def num_entries(self):
"""
The number of entries in this ``TBasket``.
"""
return self._members["fNevBuf"]
@property
def is_embedded(self):
"""
If this ``TBasket`` is embedded within its ``TBranch`` (i.e. must be
deserialized as part of the ``TBranch``), then ``is_embedded`` is True.
If this ``TBasket`` is a free-standing object, then ``is_embedded`` is
False.
"""
return self._members["fNbytes"] <= self._members["fKeylen"]
@property
def uncompressed_bytes(self):
"""
The number of bytes for the uncompressed data, not including the header.
If the ``TBasket`` is uncompressed, this is equal to
:ref:`uproot.models.TBasket.Model_TBasket.compressed_bytes`.
"""
if self.is_embedded:
if self._byte_offsets is None:
return self._data.nbytes
else:
return self._data.nbytes + 4 + self.num_entries * 4
else:
return self._members["fObjlen"]
@property
def compressed_bytes(self):
"""
The number of bytes for the compressed data, not including the header
(which is always uncompressed).
If the ``TBasket`` is uncompressed, this is equal to
:ref:`uproot.models.TBasket.Model_TBasket.uncompressed_bytes`.
"""
if self.is_embedded:
if self._byte_offsets is None:
return self._data.nbytes
else:
return self._data.nbytes + 4 + self.num_entries * 4
else:
return self._members["fNbytes"] - self._members["fKeylen"]
@property
def block_compression_info(self):
"""
For compressed ``TBaskets``, a tuple of 3-tuples containing
``(compression type class, num compressed bytes, num uncompressed bytes)``
to describe the actual compression algorithms and sizes encountered in
each block of data.
For uncompressed ``TBaskets``, this is None.
"""
return self._block_compression_info
@property
def border(self):
"""
The byte position of the boundary between data content and entry offsets.
Equal to ``self.member("fLast") - self.member("fKeylen")``.
"""
return self._members["fLast"] - self._members["fKeylen"]
def read_numbytes_version(self, chunk, cursor, context):
pass
def read_members(self, chunk, cursor, context, file):
assert isinstance(self._parent, uproot.behaviors.TBranch.TBranch)
self._basket_num = context.get("basket_num")
(
self._members["fNbytes"],
self._key_version,
self._members["fObjlen"],
self._members["fDatime"],
self._members["fKeylen"],
self._members["fCycle"],
) = cursor.fields(chunk, _tbasket_format1, context)
# skip the class name, name, and title
cursor.move_to(
self._cursor.index + self._members["fKeylen"] - _tbasket_format2.size - 1
)
(
self._members["fVersion"],
self._members["fBufferSize"],
self._members["fNevBufSize"],
self._members["fNevBuf"],
self._members["fLast"],
) = cursor.fields(chunk, _tbasket_format2, context)
cursor.skip(1)
self._block_compression_info = None
if not context.get("read_basket", True):
self._byte_offsets = None
self._raw_data = None
self._data = None
return
if self.is_embedded:
# https://github.com/root-project/root/blob/0e6282a641b65bdf5ad832882e547ca990e8f1a5/tree/tree/inc/TBasket.h#L62-L65
maybe_entry_size = self._members["fNevBufSize"]
num_entries = self._members["fNevBuf"]
key_length = self._members["fKeylen"]
if maybe_entry_size * num_entries + key_length != self._members["fLast"]:
raw_byte_offsets = cursor.bytes(
chunk, 8 + self.num_entries * 4, context
).view(_tbasket_offsets_dtype)
cursor.skip(-4)
# subtracting fKeylen makes a new buffer and converts to native endian
self._byte_offsets = raw_byte_offsets[1:] - self._members["fKeylen"]
# so modifying it in place doesn't have non-local consequences
self._byte_offsets[-1] = self.border
else:
self._byte_offsets = None
# second key has no new information
cursor.skip(self._members["fKeylen"])
self._raw_data = None
self._data = cursor.bytes(chunk, self.border, context)
else:
if self.compressed_bytes != self.uncompressed_bytes:
self._block_compression_info = []
uncompressed = uproot.compression.decompress(
chunk,
cursor,
{},
self.compressed_bytes,
self.uncompressed_bytes,
self._block_compression_info,
)
self._block_compression_info = tuple(self._block_compression_info)
self._raw_data = uncompressed.get(
0,
self.uncompressed_bytes,
uproot.source.cursor.Cursor(0),
context,
)
else:
self._raw_data = cursor.bytes(chunk, self.uncompressed_bytes, context)
if self.border != self.uncompressed_bytes:
self._data = self._raw_data[: self.border]
raw_byte_offsets = self._raw_data[self.border :].view(
_tbasket_offsets_dtype
)
# subtracting fKeylen makes a new buffer and converts to native endian
self._byte_offsets = raw_byte_offsets[1:] - self._members["fKeylen"]
# so modifying it in place doesn't have non-local consequences
self._byte_offsets[-1] = self.border
else:
self._data = self._raw_data
self._byte_offsets = None
uproot.classes["TBasket"] = Model_TBasket
| 33.913043
| 128
| 0.593315
|
d6b1e84e2cb90eb1df32e3cb33e2567e267e39e8
| 9,889
|
py
|
Python
|
speaker_id.py
|
huda-irs/Project3
|
8700cafd145425c5ca0080ca79e7020078b8d8f9
|
[
"MIT"
] | null | null | null |
speaker_id.py
|
huda-irs/Project3
|
8700cafd145425c5ca0080ca79e7020078b8d8f9
|
[
"MIT"
] | null | null | null |
speaker_id.py
|
huda-irs/Project3
|
8700cafd145425c5ca0080ca79e7020078b8d8f9
|
[
"MIT"
] | null | null | null |
# speaker_id.py
# Mirco Ravanelli
# Mila - University of Montreal
# July 2018
# Description:
# This code performs a speaker_id experiments with SincNet.
# How to run it:
# python speaker_id.py --cfg=cfg/SincNet_TIMIT.cfg
import os
#import scipy.io.wavfile
import soundfile as sf
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
import sys
import numpy as np
from dnn_models import MLP,flip
from dnn_models import SincNet as CNN
from data_io import ReadList,read_conf,str_to_bool
def create_batches_rnd(batch_size,data_folder,wav_lst,N_snt,wlen,lab_dict,fact_amp):
# Initialization of the minibatch (batch_size,[0=>x_t,1=>x_t+N,1=>random_samp])
sig_batch=np.zeros([batch_size,wlen])
lab_batch=np.zeros(batch_size)
snt_id_arr=np.random.randint(N_snt, size=batch_size)
rand_amp_arr = np.random.uniform(1.0-fact_amp,1+fact_amp,batch_size)
for i in range(batch_size):
# select a random sentence from the list
#[fs,signal]=scipy.io.wavfile.read(data_folder+wav_lst[snt_id_arr[i]])
#signal=signal.astype(float)/32768
[signal, fs] = sf.read(data_folder+wav_lst[snt_id_arr[i]])
# accesing to a random chunk
snt_len=signal.shape[0]
snt_beg=np.random.randint(snt_len-wlen-1) #randint(0, snt_len-2*wlen-1)
snt_end=snt_beg+wlen
channels = len(signal.shape)
if channels == 2:
print('WARNING: stereo to mono: '+data_folder+wav_lst[snt_id_arr[i]])
signal = signal[:,0]
sig_batch[i,:]=signal[snt_beg:snt_end]*rand_amp_arr[i]
lab_batch[i]=lab_dict[wav_lst[snt_id_arr[i]]]
inp=Variable(torch.from_numpy(sig_batch).float().cuda().contiguous())
lab=Variable(torch.from_numpy(lab_batch).float().cuda().contiguous())
return inp,lab
# Reading cfg file
options=read_conf()
#[data]
tr_lst=options.tr_lst
te_lst=options.te_lst
pt_file=options.pt_file
class_dict_file=options.lab_dict
data_folder=options.data_folder+'/'
output_folder=options.output_folder
#[windowing]
fs=int(options.fs)
cw_len=int(options.cw_len)
cw_shift=int(options.cw_shift)
#[cnn]
cnn_N_filt=list(map(int, options.cnn_N_filt.split(',')))
cnn_len_filt=list(map(int, options.cnn_len_filt.split(',')))
cnn_max_pool_len=list(map(int, options.cnn_max_pool_len.split(',')))
cnn_use_laynorm_inp=str_to_bool(options.cnn_use_laynorm_inp)
cnn_use_batchnorm_inp=str_to_bool(options.cnn_use_batchnorm_inp)
cnn_use_laynorm=list(map(str_to_bool, options.cnn_use_laynorm.split(',')))
cnn_use_batchnorm=list(map(str_to_bool, options.cnn_use_batchnorm.split(',')))
cnn_act=list(map(str, options.cnn_act.split(',')))
cnn_drop=list(map(float, options.cnn_drop.split(',')))
#[dnn]
fc_lay=list(map(int, options.fc_lay.split(',')))
fc_drop=list(map(float, options.fc_drop.split(',')))
fc_use_laynorm_inp=str_to_bool(options.fc_use_laynorm_inp)
fc_use_batchnorm_inp=str_to_bool(options.fc_use_batchnorm_inp)
fc_use_batchnorm=list(map(str_to_bool, options.fc_use_batchnorm.split(',')))
fc_use_laynorm=list(map(str_to_bool, options.fc_use_laynorm.split(',')))
fc_act=list(map(str, options.fc_act.split(',')))
#[class]
class_lay=list(map(int, options.class_lay.split(',')))
class_drop=list(map(float, options.class_drop.split(',')))
class_use_laynorm_inp=str_to_bool(options.class_use_laynorm_inp)
class_use_batchnorm_inp=str_to_bool(options.class_use_batchnorm_inp)
class_use_batchnorm=list(map(str_to_bool, options.class_use_batchnorm.split(',')))
class_use_laynorm=list(map(str_to_bool, options.class_use_laynorm.split(',')))
class_act=list(map(str, options.class_act.split(',')))
#[optimization]
lr=float(options.lr)
batch_size=int(options.batch_size)
N_epochs=int(options.N_epochs)
N_batches=int(options.N_batches)
N_eval_epoch=int(options.N_eval_epoch)
seed=int(options.seed)
# training list
wav_lst_tr=ReadList(tr_lst)
snt_tr=len(wav_lst_tr)
# test list
wav_lst_te=ReadList(te_lst)
snt_te=len(wav_lst_te)
# Folder creation
try:
os.stat(output_folder)
except:
os.mkdir(output_folder)
# setting seed
torch.manual_seed(seed)
np.random.seed(seed)
# loss function
cost = nn.NLLLoss()
# Converting context and shift in samples
wlen=int(fs*cw_len/1000.00)
wshift=int(fs*cw_shift/1000.00)
# Batch_dev
Batch_dev=128
# Feature extractor CNN
CNN_arch = {'input_dim': wlen,
'fs': fs,
'cnn_N_filt': cnn_N_filt,
'cnn_len_filt': cnn_len_filt,
'cnn_max_pool_len':cnn_max_pool_len,
'cnn_use_laynorm_inp': cnn_use_laynorm_inp,
'cnn_use_batchnorm_inp': cnn_use_batchnorm_inp,
'cnn_use_laynorm':cnn_use_laynorm,
'cnn_use_batchnorm':cnn_use_batchnorm,
'cnn_act': cnn_act,
'cnn_drop':cnn_drop,
}
CNN_net=CNN(CNN_arch)
CNN_net.cuda()
# Loading label dictionary
#lab_dict=np.load(class_dict_file).item()
# save np.load
np_load_old = np.load
# modify the default parameters of np.load
np.load = lambda *a,**k: np_load_old(*a, allow_pickle=True, **k)
# call load_data with allow_pickle implicitly set to true
lab_dict=np.load(class_dict_file).item()
# restore np.load for future normal usage
np.load = np_load_old
DNN1_arch = {'input_dim': CNN_net.out_dim,
'fc_lay': fc_lay,
'fc_drop': fc_drop,
'fc_use_batchnorm': fc_use_batchnorm,
'fc_use_laynorm': fc_use_laynorm,
'fc_use_laynorm_inp': fc_use_laynorm_inp,
'fc_use_batchnorm_inp':fc_use_batchnorm_inp,
'fc_act': fc_act,
}
DNN1_net=MLP(DNN1_arch)
DNN1_net.cuda()
DNN2_arch = {'input_dim':fc_lay[-1] ,
'fc_lay': class_lay,
'fc_drop': class_drop,
'fc_use_batchnorm': class_use_batchnorm,
'fc_use_laynorm': class_use_laynorm,
'fc_use_laynorm_inp': class_use_laynorm_inp,
'fc_use_batchnorm_inp':class_use_batchnorm_inp,
'fc_act': class_act,
}
DNN2_net=MLP(DNN2_arch)
DNN2_net.cuda()
if pt_file!='none':
checkpoint_load = torch.load(pt_file)
CNN_net.load_state_dict(checkpoint_load['CNN_model_par'])
DNN1_net.load_state_dict(checkpoint_load['DNN1_model_par'])
DNN2_net.load_state_dict(checkpoint_load['DNN2_model_par'])
optimizer_CNN = optim.RMSprop(CNN_net.parameters(), lr=lr,alpha=0.95, eps=1e-8)
optimizer_DNN1 = optim.RMSprop(DNN1_net.parameters(), lr=lr,alpha=0.95, eps=1e-8)
optimizer_DNN2 = optim.RMSprop(DNN2_net.parameters(), lr=lr,alpha=0.95, eps=1e-8)
for epoch in range(N_epochs):
test_flag=0
CNN_net.train()
DNN1_net.train()
DNN2_net.train()
loss_sum=0
err_sum=0
for i in range(N_batches):
[inp,lab]=create_batches_rnd(batch_size,data_folder,wav_lst_tr,snt_tr,wlen,lab_dict,0.2)
pout=DNN2_net(DNN1_net(CNN_net(inp)))
pred=torch.max(pout,dim=1)[1]
loss = cost(pout, lab.long())
err = torch.mean((pred!=lab.long()).float())
optimizer_CNN.zero_grad()
optimizer_DNN1.zero_grad()
optimizer_DNN2.zero_grad()
loss.backward()
optimizer_CNN.step()
optimizer_DNN1.step()
optimizer_DNN2.step()
loss_sum=loss_sum+loss.detach()
err_sum=err_sum+err.detach()
loss_tot=loss_sum/N_batches
err_tot=err_sum/N_batches
# Full Validation new
if epoch%N_eval_epoch==0:
CNN_net.eval()
DNN1_net.eval()
DNN2_net.eval()
test_flag=1
loss_sum=0
err_sum=0
err_sum_snt=0
with torch.no_grad():
for i in range(snt_te):
#[fs,signal]=scipy.io.wavfile.read(data_folder+wav_lst_te[i])
#signal=signal.astype(float)/32768
[signal, fs] = sf.read(data_folder+wav_lst_te[i])
signal=torch.from_numpy(signal).float().cuda().contiguous()
lab_batch=lab_dict[wav_lst_te[i]]
# split signals into chunks
beg_samp=0
end_samp=wlen
N_fr=int((signal.shape[0]-wlen)/(wshift))
sig_arr=torch.zeros([Batch_dev,wlen]).float().cuda().contiguous()
lab= Variable((torch.zeros(N_fr+1)+lab_batch).cuda().contiguous().long())
pout=Variable(torch.zeros(N_fr+1,class_lay[-1]).float().cuda().contiguous())
count_fr=0
count_fr_tot=0
while end_samp<signal.shape[0]:
sig_arr[count_fr,:]=signal[beg_samp:end_samp]
beg_samp=beg_samp+wshift
end_samp=beg_samp+wlen
count_fr=count_fr+1
count_fr_tot=count_fr_tot+1
if count_fr==Batch_dev:
inp=Variable(sig_arr)
pout[count_fr_tot-Batch_dev:count_fr_tot,:]=DNN2_net(DNN1_net(CNN_net(inp)))
count_fr=0
sig_arr=torch.zeros([Batch_dev,wlen]).float().cuda().contiguous()
if count_fr>0:
inp=Variable(sig_arr[0:count_fr])
pout[count_fr_tot-count_fr:count_fr_tot,:]=DNN2_net(DNN1_net(CNN_net(inp)))
pred=torch.max(pout,dim=1)[1]
loss = cost(pout, lab.long())
err = torch.mean((pred!=lab.long()).float())
[val,best_class]=torch.max(torch.sum(pout,dim=0),0)
err_sum_snt=err_sum_snt+(best_class!=lab[0]).float()
loss_sum=loss_sum+loss.detach()
err_sum=err_sum+err.detach()
err_tot_dev_snt=err_sum_snt/snt_te
loss_tot_dev=loss_sum/snt_te
err_tot_dev=err_sum/snt_te
print("epoch %i, loss_tr=%f err_tr=%f loss_te=%f err_te=%f err_te_snt=%f" % (epoch, loss_tot,err_tot,loss_tot_dev,err_tot_dev,err_tot_dev_snt))
with open(output_folder+"/res.res", "a") as res_file:
res_file.write("epoch %i, loss_tr=%f err_tr=%f loss_te=%f err_te=%f err_te_snt=%f\n" % (epoch, loss_tot,err_tot,loss_tot_dev,err_tot_dev,err_tot_dev_snt))
checkpoint={'CNN_model_par': CNN_net.state_dict(),
'DNN1_model_par': DNN1_net.state_dict(),
'DNN2_model_par': DNN2_net.state_dict(),
}
torch.save(checkpoint,output_folder+'/model_raw.pkl')
else:
print("epoch %i, loss_tr=%f err_tr=%f" % (epoch, loss_tot,err_tot))
| 28.09375
| 161
| 0.708262
|
3aa2e5746464ebb3591c70453d026b81a96c36dc
| 1,923
|
py
|
Python
|
tasks/tv_raffle_handler.py
|
paipaitou/bili2.0
|
73dbe4738706b05fce57106544e4a784ccb52760
|
[
"MIT"
] | null | null | null |
tasks/tv_raffle_handler.py
|
paipaitou/bili2.0
|
73dbe4738706b05fce57106544e4a784ccb52760
|
[
"MIT"
] | null | null | null |
tasks/tv_raffle_handler.py
|
paipaitou/bili2.0
|
73dbe4738706b05fce57106544e4a784ccb52760
|
[
"MIT"
] | null | null | null |
import bili_statistics
from reqs.tv_raffle_handler import TvRaffleHandlerReq
from tasks.utils import UtilsTask
from .task_func_decorator import normal
from .base_class import ForcedTask
class TvRaffleJoinTask(ForcedTask):
TASK_NAME = 'join_tv_raffle'
# 这是superuser做的,做完之后就broadcast
@staticmethod
async def check(user, real_roomid):
if not await UtilsTask.is_normal_room(user, real_roomid):
return None
json_rsp = await user.req_s(TvRaffleHandlerReq.check, user, real_roomid)
next_step_settings = []
for raffle in json_rsp['data']['gift']:
raffle_id = raffle['raffleId']
raffle_type = raffle['type']
max_wait = raffle['time'] - 10
# 处理一些重复
if not bili_statistics.is_raffleid_duplicate(raffle_id):
user.info(f'确认获取到小电视抽奖 {raffle_id}', with_userid=False)
next_step_setting = (-2, (raffle['time_wait'], max_wait), real_roomid, raffle_id, raffle_type)
next_step_settings.append(next_step_setting)
bili_statistics.add2raffle_ids(raffle_id, 'TV')
return next_step_settings
@staticmethod
@normal
async def work(user, real_roomid, raffle_id, raffle_type):
json_rsp = await user.req_s(TvRaffleHandlerReq.join, user, real_roomid, raffle_id, raffle_type)
bili_statistics.add2joined_raffles('小电视(合计)', user.id)
code = json_rsp['code']
if not code:
data = json_rsp['data']
gift_name = data['award_name']
gift_num = data['award_num']
user.info(f'小电视({raffle_id})的参与结果: {gift_name}X{gift_num}')
bili_statistics.add2results(gift_name, user.id, gift_num)
elif code == -403 and '拒绝' in json_rsp['msg']:
user.fall_in_jail()
else:
user.info(f'小电视({raffle_id})的参与结果: {json_rsp}')
| 40.914894
| 110
| 0.643786
|
5ad9fd5958626a6c413522211822fee96b688c75
| 2,221
|
py
|
Python
|
virtual/lib/python3.6/site-packages/alembic/templates/pylons/env.py
|
mzazakeith/flask-blog
|
2833404cc5e96ffdbfb767f35b9caf2bdcce7997
|
[
"MIT"
] | 207
|
2018-10-01T08:53:01.000Z
|
2022-03-14T12:15:54.000Z
|
virtual/lib/python3.6/site-packages/alembic/templates/pylons/env.py
|
annstella/blog
|
1cdb7e7e7df028a84fae9b7d901116aae577589d
|
[
"MIT"
] | 32
|
2018-05-01T05:24:43.000Z
|
2022-03-11T23:20:39.000Z
|
virtual/lib/python3.6/site-packages/alembic/templates/pylons/env.py
|
annstella/blog
|
1cdb7e7e7df028a84fae9b7d901116aae577589d
|
[
"MIT"
] | 53
|
2019-03-12T16:50:21.000Z
|
2022-03-15T23:16:18.000Z
|
"""Pylons bootstrap environment.
Place 'pylons_config_file' into alembic.ini, and the application will
be loaded from there.
"""
from alembic import context
from paste.deploy import loadapp
from logging.config import fileConfig
from sqlalchemy.engine.base import Engine
try:
# if pylons app already in, don't create a new app
from pylons import config as pylons_config
pylons_config['__file__']
except:
config = context.config
# can use config['__file__'] here, i.e. the Pylons
# ini file, instead of alembic.ini
config_file = config.get_main_option('pylons_config_file')
fileConfig(config_file)
wsgi_app = loadapp('config:%s' % config_file, relative_to='.')
# customize this section for non-standard engine configurations.
meta = __import__("%s.model.meta" % wsgi_app.config['pylons.package']).model.meta
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=meta.engine.url, target_metadata=target_metadata,
literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# specify here how the engine is acquired
# engine = meta.engine
raise NotImplementedError("Please specify engine connectivity here")
with engine.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| 28.113924
| 81
| 0.717244
|
6894b705cec8bc726dffebb450d66c1eb9ba98fb
| 914
|
py
|
Python
|
gcloud_dataproc/v02/run_script_GRCh38.py
|
NLSVTN/hail-elasticsearch-pipelines
|
8b895a2e46a33d347dd2a1024101a6d515027a03
|
[
"MIT"
] | null | null | null |
gcloud_dataproc/v02/run_script_GRCh38.py
|
NLSVTN/hail-elasticsearch-pipelines
|
8b895a2e46a33d347dd2a1024101a6d515027a03
|
[
"MIT"
] | null | null | null |
gcloud_dataproc/v02/run_script_GRCh38.py
|
NLSVTN/hail-elasticsearch-pipelines
|
8b895a2e46a33d347dd2a1024101a6d515027a03
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import argparse
import os
import random
import sys
from kubernetes.shell_utils import simple_run as run
unique_id = random.randint(10**5, 10**6 - 1)
random_cluster_name = "vep-grch38-%s" % unique_id
p = argparse.ArgumentParser()
p.add_argument("-c", "--cluster", default=random_cluster_name)
p.add_argument("script")
args, unparsed_args = p.parse_known_args()
cluster_name = args.cluster
script = args.script
script_args = " ".join(['"%s"' % arg for arg in unparsed_args])
os.chdir(os.path.join(os.path.dirname(__file__), "../.."))
run("python gcloud_dataproc/v02/create_cluster_GRCh38.py %(cluster_name)s 2 12" % locals())
if "-h" in sys.argv or "--help" in sys.argv:
run("python %(script)s -h" % locals())
sys.exit(0)
run((
"time ./gcloud_dataproc/submit.py "
"--hail-version 0.2 "
"--cluster %(cluster_name)s "
"%(script)s %(script_args)s") % locals())
| 24.702703
| 91
| 0.689278
|
4d3720041c316a3b4a763f8342d8c13dfb03b0d2
| 204
|
py
|
Python
|
Module05/debug.py
|
geiyer/cis189-python
|
2b85ff66277b337aab9f6a7a6fa1c86dccf0178c
|
[
"MIT"
] | 2
|
2021-02-24T00:32:36.000Z
|
2021-04-21T00:09:36.000Z
|
Module05/debug.py
|
geiyer/cis189-python
|
2b85ff66277b337aab9f6a7a6fa1c86dccf0178c
|
[
"MIT"
] | null | null | null |
Module05/debug.py
|
geiyer/cis189-python
|
2b85ff66277b337aab9f6a7a6fa1c86dccf0178c
|
[
"MIT"
] | 2
|
2021-03-30T23:37:49.000Z
|
2021-04-21T00:08:32.000Z
|
def print_to_number(number):
""" Prints to the number value passed in, beginning at 1"""
for counter in range(1,number):
print (counter)
if __name__ == "__main__":
print_to_number(5)
| 25.5
| 63
| 0.666667
|
12ee15dcf7ae3ee454aabc308ad18944c68650be
| 5,908
|
py
|
Python
|
nnclicker/preprocess.py
|
wbwvos/IRNeuralClickModels
|
942ccc23f1615473a30d18e2ee55381f7bcea170
|
[
"MIT"
] | null | null | null |
nnclicker/preprocess.py
|
wbwvos/IRNeuralClickModels
|
942ccc23f1615473a30d18e2ee55381f7bcea170
|
[
"MIT"
] | null | null | null |
nnclicker/preprocess.py
|
wbwvos/IRNeuralClickModels
|
942ccc23f1615473a30d18e2ee55381f7bcea170
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os.path
import cPickle as pickle
from utils import get_index_from_click_pattern
from utils import dict_batch_writer
__author__ = 'Wolf Vos, Casper Thuis, Alexander van Someren, Jeroen Rooijmans'
class NNclickParser(object):
"""
A Parser for the Yandex database, available from the Yandex Personalized
Web Search Challenge from Kaggle.
(https://www.kaggle.com/c/yandex-personalized-web-search-challenge)
"""
def __init__(self):
self.TOTAL_NUMBER_OF_QUERIES = 65172853
self.query_sessions = None
self.query_docs = None
self.queries = None
self.docs = None
self.users = None
def parse(self, session_filename, sessions_start=None, sessions_max=None):
"""
Function that parses a file containing search sessions,
formatted according to the Yandex Personalized Web Seach Database
(https://www.kaggle.com/c/yandex-personalized-web-search-challenge/data)
stores a list of search sessions.
Session file contains either query of click actions:
Format query action:
SessionID; TimePassed; TypeOfRecord; SERPID; QueryID; ListOfTerms;
ListOfURLsAndDomains
Format search action:
SessionID; TimePassed; TypeOfRecord; SERPID; URLID
:params session_filename: name of file containing seach sessions
:session_start: session_id from which the parser starts parsing
:sessions_max: maximal number of search sessions that are parsed,
if not set, all search sessions are parsed and returned
"""
sessions_file = open(session_filename, "r")
sessions = []
session_id = None
for line in sessions_file:
if sessions_max and (sessions_max-sessions_start) <= len(sessions):
break
entry_array = line.strip().split("\t")
# continue until session_start is reached
if sessions_start > int(entry_array[0]):
continue
if len(entry_array) <= 5 and entry_array[1] == "M":
user_id = entry_array[3]
# check if line is query action
if len(entry_array) >= 6 and entry_array[2] == "Q":
click_pattern = 10 * [0]
session_id = entry_array[0]
query_id = entry_array[4]
doc_urls = [comb.split(",")[0] for comb in entry_array[6::]]
session = {"query_id": query_id,
"doc_urls": doc_urls,
"click_pattern": click_pattern,
"user_id": user_id}
sessions.append(session)
# if we have found a query, check if line is click action
if session_id and len(entry_array) == 5 and entry_array[2] == "C":
if entry_array[0] == session_id:
clicked_doc = entry_array[4]
if clicked_doc in doc_urls:
click_pattern[doc_urls.index(clicked_doc)] = 1
# store sessions
self.sessions = sessions
def write_sessions(self, filename):
"""
Function that writes list of search sessions to pickle file
"""
with open(filename, "w") as f:
pickle.dump(self.sessions, f, -1)
def write_query_docs(self, filename):
"""
Function that writes query doc dicts to pickle file
"""
# print "Number of documents: " + str(len(self.docs))
dict_batch_writer(self.query_docs, filename + "-qd")
dict_batch_writer(self.queries, filename + "-q")
dict_batch_writer(self.docs, filename + "-d")
def load_sessions(self, filename):
"""
Function that loads list of search sessions from pickle file
"""
if os.path.isfile(filename):
with open(filename, "rb") as f:
query_sessions = pickle.load(f)
self.query_sessions = query_sessions
def load_query_docs(self, filename):
"""
Function that loads dics with query documents from pickle file
"""
if os.path.isfile(filename):
with open(filename, "rb") as f:
query_docs = pickle.load(f)
self.query_docs = query_docs
def create_data_dicts(self):
"""
Function that creates dictionaries to store the preprocessed data
"""
self.query_docs = {}
self.queries = {}
self.docs = {}
for query in self.query_sessions:
query_doc = self.query_docs.get(query["query_id"], {})
serps = query_doc.get("serps", [])
serps.append({
"doc_ids": query["doc_urls"],
"click_pattern": query["click_pattern"]
})
query_doc["serps"] = serps
# append index to query representation (for set 2)
query_indices = self.queries.get(query["query_id"], [])
query_indices.append(get_index_from_click_pattern(query["click_pattern"]))
self.queries[query["query_id"]] = query_indices
# append user id
query_doc['user_id'] = query['user_id']
for (doc_location, doc_id) in enumerate(query["doc_urls"]):
index = get_index_from_click_pattern(query["click_pattern"], doc_location)
# append index to query-document representation
query_doc_indices = query_doc.get(doc_id, [])
query_doc_indices.append(index)
query_doc[doc_id] = query_doc_indices
# append index to document representation (for set 3)
doc_indices = self.docs.get(doc_id, [])
doc_indices.append(index)
self.docs[doc_id] = doc_indices
self.query_docs[query["query_id"]] = query_doc
| 37.157233
| 90
| 0.593433
|
6aa8ce37b53e338abf62ecdfddbf79639858690e
| 4,296
|
py
|
Python
|
research/delf/delf/python/examples/match_images.py
|
xychu/models
|
0344c5503ee55e24f0de7f37336a6e08f10976fd
|
[
"Apache-2.0"
] | 79
|
2019-03-02T17:40:25.000Z
|
2021-08-17T13:22:03.000Z
|
research/delf/delf/python/examples/match_images.py
|
xychu/models
|
0344c5503ee55e24f0de7f37336a6e08f10976fd
|
[
"Apache-2.0"
] | 7
|
2019-01-07T16:49:27.000Z
|
2020-04-28T16:48:52.000Z
|
research/delf/delf/python/examples/match_images.py
|
xychu/models
|
0344c5503ee55e24f0de7f37336a6e08f10976fd
|
[
"Apache-2.0"
] | 43
|
2018-12-28T15:01:44.000Z
|
2022-02-15T06:23:05.000Z
|
# Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Matches two images using their DELF features.
The matching is done using feature-based nearest-neighbor search, followed by
geometric verification using RANSAC.
The DELF features can be extracted using the extract_features.py script.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
from scipy.spatial import cKDTree
from skimage.feature import plot_matches
from skimage.measure import ransac
from skimage.transform import AffineTransform
import tensorflow as tf
from tensorflow.python.platform import app
from delf import feature_io
cmd_args = None
_DISTANCE_THRESHOLD = 0.8
def main(unused_argv):
tf.logging.set_verbosity(tf.logging.INFO)
# Read features.
locations_1, _, descriptors_1, _, _ = feature_io.ReadFromFile(
cmd_args.features_1_path)
num_features_1 = locations_1.shape[0]
tf.logging.info("Loaded image 1's %d features" % num_features_1)
locations_2, _, descriptors_2, _, _ = feature_io.ReadFromFile(
cmd_args.features_2_path)
num_features_2 = locations_2.shape[0]
tf.logging.info("Loaded image 2's %d features" % num_features_2)
# Find nearest-neighbor matches using a KD tree.
d1_tree = cKDTree(descriptors_1)
_, indices = d1_tree.query(
descriptors_2, distance_upper_bound=_DISTANCE_THRESHOLD)
# Select feature locations for putative matches.
locations_2_to_use = np.array([
locations_2[i,]
for i in range(num_features_2)
if indices[i] != num_features_1
])
locations_1_to_use = np.array([
locations_1[indices[i],]
for i in range(num_features_2)
if indices[i] != num_features_1
])
# Perform geometric verification using RANSAC.
_, inliers = ransac(
(locations_1_to_use, locations_2_to_use),
AffineTransform,
min_samples=3,
residual_threshold=20,
max_trials=1000)
tf.logging.info('Found %d inliers' % sum(inliers))
# Visualize correspondences, and save to file.
_, ax = plt.subplots()
img_1 = mpimg.imread(cmd_args.image_1_path)
img_2 = mpimg.imread(cmd_args.image_2_path)
inlier_idxs = np.nonzero(inliers)[0]
plot_matches(
ax,
img_1,
img_2,
locations_1_to_use,
locations_2_to_use,
np.column_stack((inlier_idxs, inlier_idxs)),
matches_color='b')
ax.axis('off')
ax.set_title('DELF correspondences')
plt.savefig(cmd_args.output_image)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.register('type', 'bool', lambda v: v.lower() == 'true')
parser.add_argument(
'--image_1_path',
type=str,
default='test_images/image_1.jpg',
help="""
Path to test image 1.
""")
parser.add_argument(
'--image_2_path',
type=str,
default='test_images/image_2.jpg',
help="""
Path to test image 2.
""")
parser.add_argument(
'--features_1_path',
type=str,
default='test_features/image_1.delf',
help="""
Path to DELF features from image 1.
""")
parser.add_argument(
'--features_2_path',
type=str,
default='test_features/image_2.delf',
help="""
Path to DELF features from image 2.
""")
parser.add_argument(
'--output_image',
type=str,
default='test_match.png',
help="""
Path where an image showing the matches will be saved.
""")
cmd_args, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
| 29.627586
| 80
| 0.693901
|
d3f0c293287a3c90c4544793259b31067c455599
| 19,425
|
py
|
Python
|
pyuavcan/transport/udp/_udp.py
|
WshgL/pyuavcan
|
f2b8d2d743f09ad4af8d62fc96d8f0b013aeb8b0
|
[
"MIT"
] | null | null | null |
pyuavcan/transport/udp/_udp.py
|
WshgL/pyuavcan
|
f2b8d2d743f09ad4af8d62fc96d8f0b013aeb8b0
|
[
"MIT"
] | null | null | null |
pyuavcan/transport/udp/_udp.py
|
WshgL/pyuavcan
|
f2b8d2d743f09ad4af8d62fc96d8f0b013aeb8b0
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2019 UAVCAN Consortium
# This software is distributed under the terms of the MIT License.
# Author: Pavel Kirienko <pavel@uavcan.org>
import copy
import typing
import asyncio
import logging
import warnings
import ipaddress
import dataclasses
import pyuavcan
from ._session import UDPInputSession, SelectiveUDPInputSession, PromiscuousUDPInputSession
from ._session import UDPOutputSession
from ._frame import UDPFrame
from ._ip import SocketFactory, Sniffer, LinkLayerCapture, unicast_ip_to_node_id, node_id_to_unicast_ip
from ._socket_reader import SocketReader, SocketReaderStatistics
from ._tracer import UDPTracer, UDPCapture
_logger = logging.getLogger(__name__)
@dataclasses.dataclass
class UDPTransportStatistics(pyuavcan.transport.TransportStatistics):
received_datagrams: typing.Dict[pyuavcan.transport.DataSpecifier, SocketReaderStatistics] = dataclasses.field(
default_factory=dict
)
"""
Basic input session statistics: instances of :class:`SocketReaderStatistics` keyed by data specifier.
"""
class UDPTransport(pyuavcan.transport.Transport):
"""
The UAVCAN/UDP (IP v4/v6) transport is designed for low-latency, high-throughput, high-reliability
vehicular networks based on Ethernet.
Please read the module documentation for details.
"""
TRANSFER_ID_MODULO = UDPFrame.TRANSFER_ID_MASK + 1
VALID_MTU_RANGE = 1200, 9000
"""
The minimum is based on the IPv6 specification, which guarantees that the path MTU is at least 1280 bytes large.
This value is also acceptable for virtually all IPv4 local or real-time networks.
Lower MTU values shall not be used because they may lead to multi-frame transfer fragmentation where this is
not expected by the designer, possibly violating the real-time constraints.
A conventional Ethernet jumbo frame can carry up to 9 KiB (9216 bytes).
These are the application-level MTU values, so we take overheads into account.
"""
VALID_SERVICE_TRANSFER_MULTIPLIER_RANGE = (1, 5)
def __init__(
self,
local_ip_address: typing.Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address],
local_node_id: typing.Optional[int] = -1,
*,
mtu: int = min(VALID_MTU_RANGE),
service_transfer_multiplier: int = 1,
loop: typing.Optional[asyncio.AbstractEventLoop] = None,
anonymous: bool = False,
):
"""
:param local_ip_address: Specifies which local IP address to use for this transport.
This setting also implicitly specifies the network interface to use.
All output sockets will be bound (see ``bind()``) to the specified local address.
If the specified address is not available locally, the transport will fail with
:class:`pyuavcan.transport.InvalidMediaConfigurationError`.
For use on the loopback interface, any IP address from the loopback range can be used;
for example, ``127.0.0.123``.
This generally does not work with physical interfaces;
for example, if a host has one physical interface at ``192.168.1.200``,
an attempt to run a node at ``192.168.1.201`` will trigger the media configuration error
because ``bind()`` will fail with ``EADDRNOTAVAIL``.
One can change the node-ID of a physical transport by altering the network
interface configuration in the underlying operating system itself.
Using ``INADDR_ANY`` here (i.e., ``0.0.0.0`` for IPv4) is not expected to work reliably or be portable
because this configuration is, generally, incompatible with multicast sockets (even in the anonymous mode).
In order to set up even a listening multicast socket, it is necessary to specify the correct local
address such that the underlying IP stack is aware of which interface to receive multicast packets from.
When the anonymous mode is enabled, it is quite possible to snoop on the network even if there is
another node running locally on the same interface
(because sockets are initialized with ``SO_REUSEADDR`` and ``SO_REUSEPORT``, when available).
:param local_node_id: As explained previously, the node-ID is part of the IP address,
but this parameter allows one to use the UDP transport in anonymous mode or easily build the
node IP address from a subnet address (like ``127.42.0.0``) and a node-ID.
- If the value is negative, the node-ID equals the 16 least significant bits of the ``local_ip_address``.
This is the default behavior.
- If the value is None, an anonymous instance will be constructed,
where the transport will reject any attempt to create an output session.
The transport instance will also report its own :attr:`local_node_id` as None.
The UAVCAN/UDP transport does not support anonymous transfers by design.
- If the value is a non-negative integer, the 16 least significant bits of the ``local_ip_address``
are replaced with this value.
Examples:
+-----------------------+-------------------+----------------------------+--------------------------+
| ``local_ip_address`` | ``local_node_id`` | Local IP address | Local node-ID |
+=======================+===================+============================+==========================+
| 127.42.1.200 | (default) | 127.42.1.200 | 456 (from IP address) |
+-----------------------+-------------------+----------------------------+--------------------------+
| 127.42.1.200 | 42 | 127.42.0.42 | 42 |
+-----------------------+-------------------+----------------------------+--------------------------+
| 127.42.0.0 | 42 | 127.42.0.42 | 42 |
+-----------------------+-------------------+----------------------------+--------------------------+
| 127.42.1.200 | None | 127.42.1.200 | anonymous |
+-----------------------+-------------------+----------------------------+--------------------------+
:param mtu: The application-level MTU for outgoing packets.
In other words, this is the maximum number of serialized bytes per UAVCAN/UDP frame.
Transfers where the number of payload bytes does not exceed this value will be single-frame transfers,
otherwise, multi-frame transfers will be used.
This setting affects only outgoing frames;
the MTU of incoming frames is fixed at a sufficiently large value to accept any meaningful UDP frame.
The default value is the smallest valid value for reasons of compatibility.
:param service_transfer_multiplier: Deterministic data loss mitigation is disabled by default.
This parameter specifies the number of times each outgoing service transfer will be repeated.
This setting does not affect message transfers.
:param loop: Deprecated.
:param anonymous: DEPRECATED and scheduled for removal; replace with ``local_node_id=None``.
"""
if anonymous: # Backward compatibility. Will be removed.
local_node_id = None
warnings.warn("Parameter 'anonymous' is deprecated. Use 'local_node_id=None' instead.", DeprecationWarning)
if loop:
warnings.warn("The loop parameter is deprecated.", DeprecationWarning)
if not isinstance(local_ip_address, (ipaddress.IPv4Address, ipaddress.IPv6Address)):
local_ip_address = ipaddress.ip_address(local_ip_address)
assert not isinstance(local_ip_address, str)
if local_node_id is not None and local_node_id >= 0:
local_ip_address = node_id_to_unicast_ip(local_ip_address, local_node_id)
self._sock_factory = SocketFactory.new(local_ip_address)
self._anonymous = local_node_id is None
self._mtu = int(mtu)
self._srv_multiplier = int(service_transfer_multiplier)
low, high = self.VALID_SERVICE_TRANSFER_MULTIPLIER_RANGE
if not (low <= self._srv_multiplier <= high):
raise ValueError(f"Invalid service transfer multiplier: {self._srv_multiplier}")
low, high = self.VALID_MTU_RANGE
if not (low <= self._mtu <= high):
raise ValueError(f"Invalid MTU: {self._mtu} bytes")
self._socket_reader_registry: typing.Dict[pyuavcan.transport.DataSpecifier, SocketReader] = {}
self._input_registry: typing.Dict[pyuavcan.transport.InputSessionSpecifier, UDPInputSession] = {}
self._output_registry: typing.Dict[pyuavcan.transport.OutputSessionSpecifier, UDPOutputSession] = {}
self._sniffer: typing.Optional[Sniffer] = None
self._capture_handlers: typing.List[pyuavcan.transport.CaptureCallback] = []
self._closed = False
self._statistics = UDPTransportStatistics()
assert (local_node_id is None) or (local_node_id < 0) or (self.local_node_id == local_node_id)
assert (self.local_node_id is None) or (0 <= self.local_node_id <= 0xFFFF)
_logger.debug("%s: Initialized with local node-ID %s", self, self.local_node_id)
@property
def protocol_parameters(self) -> pyuavcan.transport.ProtocolParameters:
return pyuavcan.transport.ProtocolParameters(
transfer_id_modulo=self.TRANSFER_ID_MODULO,
max_nodes=self._sock_factory.max_nodes,
mtu=self._mtu,
)
@property
def local_node_id(self) -> typing.Optional[int]:
addr = self._sock_factory.local_ip_address
return None if self._anonymous else unicast_ip_to_node_id(addr, addr)
def close(self) -> None:
self._closed = True
for s in (*self.input_sessions, *self.output_sessions):
try:
s.close()
except Exception as ex: # pragma: no cover
_logger.exception("%s: Failed to close %r: %s", self, s, ex)
if self._sniffer is not None:
self._sniffer.close()
self._sniffer = None
def get_input_session(
self, specifier: pyuavcan.transport.InputSessionSpecifier, payload_metadata: pyuavcan.transport.PayloadMetadata
) -> UDPInputSession:
self._ensure_not_closed()
if specifier not in self._input_registry:
self._setup_input_session(specifier, payload_metadata)
assert specifier.data_specifier in self._socket_reader_registry
out = self._input_registry[specifier]
assert isinstance(out, UDPInputSession)
assert out.specifier == specifier
return out
def get_output_session(
self, specifier: pyuavcan.transport.OutputSessionSpecifier, payload_metadata: pyuavcan.transport.PayloadMetadata
) -> UDPOutputSession:
self._ensure_not_closed()
if specifier not in self._output_registry:
if self.local_node_id is None:
# In UAVCAN/UDP, the anonymous mode is somewhat bolted-on.
# The underlying protocol (IP) does not have the concept of anonymous packet.
# We add it artificially as an implementation detail of this library.
raise pyuavcan.transport.OperationNotDefinedForAnonymousNodeError(
"Cannot create an output session instance because this UAVCAN/UDP transport instance is "
"configured in the anonymous mode."
)
def finalizer() -> None:
del self._output_registry[specifier]
multiplier = (
self._srv_multiplier
if isinstance(specifier.data_specifier, pyuavcan.transport.ServiceDataSpecifier)
else 1
)
sock = self._sock_factory.make_output_socket(specifier.remote_node_id, specifier.data_specifier)
self._output_registry[specifier] = UDPOutputSession(
specifier=specifier,
payload_metadata=payload_metadata,
mtu=self._mtu,
multiplier=multiplier,
sock=sock,
finalizer=finalizer,
)
out = self._output_registry[specifier]
assert isinstance(out, UDPOutputSession)
assert out.specifier == specifier
return out
def sample_statistics(self) -> UDPTransportStatistics:
return copy.copy(self._statistics)
@property
def input_sessions(self) -> typing.Sequence[UDPInputSession]:
return list(self._input_registry.values())
@property
def output_sessions(self) -> typing.Sequence[UDPOutputSession]:
return list(self._output_registry.values())
@property
def local_ip_address(self) -> typing.Union[ipaddress.IPv4Address, ipaddress.IPv6Address]:
return self._sock_factory.local_ip_address
def begin_capture(self, handler: pyuavcan.transport.CaptureCallback) -> None:
"""
Reported events are of type :class:`UDPCapture`.
In order for the network capture to work, the local machine should be connected to a SPAN port of the switch.
See https://en.wikipedia.org/wiki/Port_mirroring and read the documentation for your networking hardware.
Additional preconditions must be met depending on the platform:
- On GNU/Linux, network capture requires that either the process is executed by root,
or the raw packet capture capability ``CAP_NET_RAW`` is enabled.
For more info read ``man 7 capabilities`` and consider checking the docs for Wireshark/libpcap.
- On Windows, Npcap needs to be installed and configured; see https://nmap.org/npcap/.
Packets that do not originate from the current UAVCAN/UDP subnet (configured on this transport instance)
are not reported via this interface.
This restriction is critical because there may be other UAVCAN/UDP networks running on the same physical
L2 network segregated by different subnets, so that if foreign packets were not dropped,
conflicts would occur.
"""
self._ensure_not_closed()
if self._sniffer is None:
_logger.debug("%s: Starting UDP/IP packet capture (hope you have permissions)", self)
self._sniffer = self._sock_factory.make_sniffer(self._process_capture)
self._capture_handlers.append(handler)
@property
def capture_active(self) -> bool:
return self._sniffer is not None
@staticmethod
def make_tracer() -> UDPTracer:
"""
See :class:`UDPTracer`.
"""
return UDPTracer()
async def spoof(self, transfer: pyuavcan.transport.AlienTransfer, monotonic_deadline: float) -> bool:
"""
Not implemented yet. Always raises :class:`NotImplementedError`.
When implemented, this method will rely on libpcap to emit spoofed link-layer packets.
"""
raise NotImplementedError
def _setup_input_session(
self, specifier: pyuavcan.transport.InputSessionSpecifier, payload_metadata: pyuavcan.transport.PayloadMetadata
) -> None:
"""
In order to set up a new input session, we have to link together a lot of objects. Tricky.
Also, the setup and teardown actions shall be atomic. Hence the separate method.
"""
assert specifier not in self._input_registry
try:
if specifier.data_specifier not in self._socket_reader_registry:
_logger.debug(
"%r: Setting up new socket reader for %s. Existing entries at the moment: %s",
self,
specifier.data_specifier,
self._socket_reader_registry,
)
self._socket_reader_registry[specifier.data_specifier] = SocketReader(
sock=self._sock_factory.make_input_socket(specifier.data_specifier),
local_ip_address=self._sock_factory.local_ip_address,
anonymous=self._anonymous,
statistics=self._statistics.received_datagrams.setdefault(
specifier.data_specifier, SocketReaderStatistics()
),
)
cls: typing.Union[typing.Type[PromiscuousUDPInputSession], typing.Type[SelectiveUDPInputSession]] = (
PromiscuousUDPInputSession if specifier.is_promiscuous else SelectiveUDPInputSession
)
session = cls(
specifier=specifier,
payload_metadata=payload_metadata,
finalizer=lambda: self._teardown_input_session(specifier),
)
self._socket_reader_registry[specifier.data_specifier].add_listener(
specifier.remote_node_id, session._process_frame # pylint: disable=protected-access
)
except Exception:
self._teardown_input_session(specifier) # Rollback to ensure atomicity.
raise
self._input_registry[specifier] = session
def _teardown_input_session(self, specifier: pyuavcan.transport.InputSessionSpecifier) -> None:
"""
The finalizer may be invoked at any point during the setup process,
so it must be able to deconstruct the pipeline even if it is not fully set up.
This is why we have these try-except everywhere. Who knew that atomic transactions can be so messy?
"""
# Unregister the session first.
try:
del self._input_registry[specifier]
except LookupError:
pass
# Remove the session from the list of socket reader listeners.
try:
demux = self._socket_reader_registry[specifier.data_specifier]
except LookupError:
pass # The reader has not been set up yet, nothing to do.
else:
try:
demux.remove_listener(specifier.remote_node_id)
except LookupError:
pass
# Destroy the reader if there are no listeners left.
if not demux.has_listeners:
try:
_logger.debug("%r: Destroying %r for %s", self, demux, specifier.data_specifier)
demux.close()
finally:
del self._socket_reader_registry[specifier.data_specifier]
def _process_capture(self, capture: LinkLayerCapture) -> None:
"""This handler may be invoked from a different thread (the capture thread)."""
pyuavcan.util.broadcast(self._capture_handlers)(UDPCapture(capture.timestamp, capture.packet))
def _ensure_not_closed(self) -> None:
if self._closed:
raise pyuavcan.transport.ResourceClosedError(f"{self} is closed")
def _get_repr_fields(self) -> typing.Tuple[typing.List[typing.Any], typing.Dict[str, typing.Any]]:
return [repr(str(self.local_ip_address))], {
"local_node_id": self.local_node_id,
"service_transfer_multiplier": self._srv_multiplier,
"mtu": self._mtu,
}
| 49.553571
| 120
| 0.639794
|
b82e94cfcec7a71d4366d98e0676ac1744960d21
| 1,005
|
py
|
Python
|
GeneticAlgorithm/StringChromosome.py
|
Bastianleaf/TravellingSalesman
|
fafd7bb2e2ac79abc23bc261899e7d89cd0d8e9e
|
[
"Apache-2.0"
] | null | null | null |
GeneticAlgorithm/StringChromosome.py
|
Bastianleaf/TravellingSalesman
|
fafd7bb2e2ac79abc23bc261899e7d89cd0d8e9e
|
[
"Apache-2.0"
] | null | null | null |
GeneticAlgorithm/StringChromosome.py
|
Bastianleaf/TravellingSalesman
|
fafd7bb2e2ac79abc23bc261899e7d89cd0d8e9e
|
[
"Apache-2.0"
] | null | null | null |
import random
class StringChromosome:
def __init__(self, value):
self.value = value
self.mutation_rate = 0.01
self.score = 0
def evaluate_fitness(self, solution):
score = 0
for a, b in zip(self.value, solution):
if a.value == b:
score += 1
self.score = score
def set_mutation_rate(self, rate):
"""
Modifica el ratio de mutacion
:param rate: nuevo ratio de mutacion
"""
self.mutation_rate = rate
def mutation(self):
if random.uniform(0, 1) < self.mutation_rate:
index = random.randint(0, len(self.value) - 1)
self.value[index].mutate()
self.value = self.value[:index] + [self.value[index]] + self.value[index + 1:]
def reproduction(self, gen):
random_index = random.randint(0, len(self.value) - 1)
gen_value = []
for i in range(0, random_index):
gen_value.append(self.value[i])
for j in range(random_index, len(self.value)):
gen_value.append(gen.value[j])
child = StringChromosome(gen_value)
child.mutation()
return child
| 22.840909
| 81
| 0.674627
|
1b987f84fc9b243958a35404cb6f88bd5094434f
| 724
|
py
|
Python
|
Python-Programs/vowel_consonant_detection/vowel_consonant_detection.py
|
adityaverma121/Simple-Programs
|
8450560b97f89e0fa3da16a623ad35c0b26409c9
|
[
"MIT"
] | 71
|
2021-09-30T11:25:12.000Z
|
2021-10-03T11:33:22.000Z
|
Python-Programs/vowel_consonant_detection/vowel_consonant_detection.py
|
adityaverma121/Simple-Programs
|
8450560b97f89e0fa3da16a623ad35c0b26409c9
|
[
"MIT"
] | 186
|
2021-09-30T12:25:16.000Z
|
2021-10-03T13:45:04.000Z
|
Python-Programs/vowel_consonant_detection/vowel_consonant_detection.py
|
adityaverma121/Simple-Programs
|
8450560b97f89e0fa3da16a623ad35c0b26409c9
|
[
"MIT"
] | 385
|
2021-09-30T11:34:23.000Z
|
2021-10-03T13:41:00.000Z
|
# This code is used to find consonant and fowel in a string
string = input("Please insert a string : ").lower()
vowel_counts = {}
consonant_counts = {}
# Define vowels and consonants
vowels = ["a", "e", "i", "o", "u"]
consonants = [
"b",
"c",
"d",
"f",
"g",
"h",
"j",
"k",
"l",
"m",
"n",
"p",
"q",
"r",
"s",
"t",
"v",
"w",
"y",
"z",
]
for word in string:
if word in vowels:
count = string.count(word)
vowel_counts[word] = count
elif word in consonants:
count = string.count(word)
consonant_counts[word] = count
print(f"\nVowel Count : {vowel_counts}")
print(f"Consonant Count : {consonant_counts}")
| 16.837209
| 59
| 0.517956
|
b05b2fbf1c7a48fba6b9123ded939059b7615e60
| 549
|
py
|
Python
|
web/election/context_processors.py
|
mhamzaerol/kisaweb
|
6852bbfb018f14df6caeaf75c4087553df6a1357
|
[
"MIT"
] | 2
|
2020-11-06T10:32:52.000Z
|
2021-11-05T06:56:08.000Z
|
web/election/context_processors.py
|
mhamzaerol/kisaweb
|
6852bbfb018f14df6caeaf75c4087553df6a1357
|
[
"MIT"
] | 29
|
2020-10-09T18:29:11.000Z
|
2022-03-12T14:23:36.000Z
|
web/election/context_processors.py
|
mhamzaerol/kisaweb
|
6852bbfb018f14df6caeaf75c4087553df6a1357
|
[
"MIT"
] | 11
|
2020-10-26T03:59:47.000Z
|
2021-10-04T07:03:44.000Z
|
from django.shortcuts import HttpResponse
from django.utils import timezone
from election.models import Election
# def navbar_election_link_visible(request):
# latest_election = Election.objects.latest('start_datetime')
# semyear = str(latest_election).replace(' ', '-')
# now = timezone.now()
# # visible = now >= latest_election.start_datetime and now <= latest_election.end_datetime
# visible = True
# return {
# 'election_link_visible': visible,
# 'election_semyear': semyear,
# }
# return True
| 32.294118
| 95
| 0.695811
|
d819a53c0e15b68b43e26894d70ac6acdf0591e4
| 3,861
|
py
|
Python
|
tests/generic_relations/models.py
|
pomarec/django
|
98514849dce07acfaa224a90a784bba9d97249e5
|
[
"BSD-3-Clause"
] | 166
|
2015-01-07T08:23:17.000Z
|
2022-02-23T00:09:44.000Z
|
tests/generic_relations/models.py
|
pomarec/django
|
98514849dce07acfaa224a90a784bba9d97249e5
|
[
"BSD-3-Clause"
] | 107
|
2015-01-19T22:11:09.000Z
|
2021-09-18T19:29:44.000Z
|
tests/generic_relations/models.py
|
pomarec/django
|
98514849dce07acfaa224a90a784bba9d97249e5
|
[
"BSD-3-Clause"
] | 89
|
2015-01-08T19:52:16.000Z
|
2021-12-17T11:26:53.000Z
|
"""
34. Generic relations
Generic relations let an object have a foreign key to any object through a
content-type/object-id field. A ``GenericForeignKey`` field can point to any
object, be it animal, vegetable, or mineral.
The canonical example is tags (although this example implementation is *far*
from complete).
"""
from __future__ import unicode_literals
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class TaggedItem(models.Model):
"""A tag on an item."""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey()
class Meta:
ordering = ["tag", "content_type__name"]
def __str__(self):
return self.tag
class ValuableTaggedItem(TaggedItem):
value = models.PositiveIntegerField()
@python_2_unicode_compatible
class Comparison(models.Model):
"""
A model that tests having multiple GenericForeignKeys
"""
comparative = models.CharField(max_length=50)
content_type1 = models.ForeignKey(ContentType, related_name="comparative1_set")
object_id1 = models.PositiveIntegerField()
content_type2 = models.ForeignKey(ContentType, related_name="comparative2_set")
object_id2 = models.PositiveIntegerField()
first_obj = generic.GenericForeignKey(ct_field="content_type1", fk_field="object_id1")
other_obj = generic.GenericForeignKey(ct_field="content_type2", fk_field="object_id2")
def __str__(self):
return "%s is %s than %s" % (self.first_obj, self.comparative, self.other_obj)
@python_2_unicode_compatible
class Animal(models.Model):
common_name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
tags = generic.GenericRelation(TaggedItem)
comparisons = generic.GenericRelation(Comparison,
object_id_field="object_id1",
content_type_field="content_type1")
def __str__(self):
return self.common_name
@python_2_unicode_compatible
class Vegetable(models.Model):
name = models.CharField(max_length=150)
is_yucky = models.BooleanField(default=True)
tags = generic.GenericRelation(TaggedItem)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Mineral(models.Model):
name = models.CharField(max_length=150)
hardness = models.PositiveSmallIntegerField()
# note the lack of an explicit GenericRelation here...
def __str__(self):
return self.name
class GeckoManager(models.Manager):
def get_queryset(self):
return super(GeckoManager, self).get_queryset().filter(has_tail=True)
class Gecko(models.Model):
has_tail = models.BooleanField(default=False)
objects = GeckoManager()
# To test fix for #11263
class Rock(Mineral):
tags = generic.GenericRelation(TaggedItem)
class ManualPK(models.Model):
id = models.IntegerField(primary_key=True)
tags = generic.GenericRelation(TaggedItem)
class ForProxyModelModel(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
obj = generic.GenericForeignKey(for_concrete_model=False)
title = models.CharField(max_length=255, null=True)
class ForConcreteModelModel(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
obj = generic.GenericForeignKey()
class ConcreteRelatedModel(models.Model):
bases = generic.GenericRelation(ForProxyModelModel, for_concrete_model=False)
class ProxyRelatedModel(ConcreteRelatedModel):
class Meta:
proxy = True
| 31.137097
| 90
| 0.741518
|
3b60d324ffe318f2d5281c809daf880640025ab1
| 38
|
py
|
Python
|
UNET_GM_DATASET/UNET_Quant_6_0_0_4/gmchallenge_unet.py
|
hossein1387/U-Net-Fixed-Point-Quantization-for-Medical-Image-Segmentation
|
f919935b975461052d5196372a837356379f973a
|
[
"MIT"
] | 69
|
2019-07-28T06:53:55.000Z
|
2022-03-22T01:07:28.000Z
|
UNET_GM_DATASET/UNET_Quant_Ternary/gmchallenge_unet.py
|
hossein1387/Fixed-Point-U-Net-Quantization-for-Medical-Image-Segmentation
|
4835a3d08b38617b952023f19bf59679126f5f59
|
[
"MIT"
] | 6
|
2019-08-02T07:47:09.000Z
|
2021-06-11T14:35:53.000Z
|
UNET_GM_DATASET/UNET_Quant_Ternary/gmchallenge_unet.py
|
hossein1387/Fixed-Point-U-Net-Quantization-for-Medical-Image-Segmentation
|
4835a3d08b38617b952023f19bf59679126f5f59
|
[
"MIT"
] | 20
|
2019-08-10T07:08:56.000Z
|
2021-10-09T17:02:30.000Z
|
../UNET_Quant_Base/gmchallenge_unet.py
| 38
| 38
| 0.868421
|
a96f6f01fa7d44e9ccbc35c2874f120a46bcb855
| 916
|
py
|
Python
|
src/ss_sys.py
|
BardiaMojra/dip
|
201bd14c13052b81967e051444f4e5c08c72631a
|
[
"MIT"
] | null | null | null |
src/ss_sys.py
|
BardiaMojra/dip
|
201bd14c13052b81967e051444f4e5c08c72631a
|
[
"MIT"
] | null | null | null |
src/ss_sys.py
|
BardiaMojra/dip
|
201bd14c13052b81967e051444f4e5c08c72631a
|
[
"MIT"
] | null | null | null |
''' control systems - ode simulation
@link https://www.youtube.com/watch?v=yp5x8RMNi7o
'''
import numpy as np
from matplotlib import pyplot as plt
import control
def ss_sys(x, t):
# compute state first derivative
dx1 = x[1]
dx2 = (F - c*x[1] - k*x[0])/m
return [dx1, dx2]
def sim():
# set constants
t_0 = 0
t_f = 60
dt = 0.1
# set a discrete time stamp
t = np.arange(t_0, t_f, dt)
# set system constants
c = 4 # damping constant
k = 2 # spring stiffness constant
m = 20 # point-mass
F = 5 # input force into the system
# set state initial condition
x_init = [0, 0]
# set up state space matrices describing the control system
A = [[0, 1], [-k/m, c/m]]
B = [[0], [1/m]]
C = [[1, 0]]
D = [0] # feedforward vector
# instantiate the system and run the simulation
sys = control.ss(A, B, C, D, dt)
t, y, x = control.forced_respone(sys, t, F)
return t, y, x
| 19.489362
| 61
| 0.617904
|
fa15f34cfaeea349a0ecd0ba78adf370dd141c1b
| 929
|
py
|
Python
|
paystack/views/webhook.py
|
Nyior/django-rest-paystack
|
fd74dd26703fe4ce63664736c2063ace7020f71a
|
[
"MIT"
] | 9
|
2021-12-12T17:59:15.000Z
|
2022-02-05T17:13:46.000Z
|
paystack/views/webhook.py
|
Nyior/django-rest-paystack
|
fd74dd26703fe4ce63664736c2063ace7020f71a
|
[
"MIT"
] | null | null | null |
paystack/views/webhook.py
|
Nyior/django-rest-paystack
|
fd74dd26703fe4ce63664736c2063ace7020f71a
|
[
"MIT"
] | 1
|
2021-12-21T18:57:03.000Z
|
2021-12-21T18:57:03.000Z
|
from rest_framework.views import APIView
from paystack.services import WebhookService
from paystack.utils import return_okay_response
class WebhookFacadeView(APIView):
"""
Exsits for extensibility reasons. Users might want to capture
the data returned from Paystack and do some stuff with it.
E.g retrieve the user tied to the
payment(usually passed as a meta data in this package)
and clear the user's cart or create an order for that user.
"""
authentication_classes = []
permission_classes = []
def post(self, request):
webhook_service = WebhookService(request)
return (
webhook_service.webhook_handler()
) # This returns raw JSON data from Paystack
class WebhookView(WebhookFacadeView):
def post(self, request):
webhook_data = super().post(request)
return_okay_response(webhook_data) # Return instance of JsonResponse
| 29.967742
| 77
| 0.7169
|
c89d68af1a1f477a7708d6b72be14ec554c38cb6
| 4,602
|
py
|
Python
|
examples/remarketing/add_conversion_based_user_list.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 285
|
2018-10-05T16:47:58.000Z
|
2022-03-31T00:58:39.000Z
|
examples/remarketing/add_conversion_based_user_list.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 425
|
2018-09-10T13:32:41.000Z
|
2022-03-31T14:50:05.000Z
|
examples/remarketing/add_conversion_based_user_list.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | 369
|
2018-11-28T07:01:00.000Z
|
2022-03-28T09:53:22.000Z
|
#!/usr/bin/env python
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates a basic user list based on conversion actions.
The given conversion action IDs will be associated with the new user list.
A user will be added to the list upon triggering more than one of the actions,
e.g. by visiting a site and making a purchase.
"""
import argparse
import sys
from uuid import uuid4
from google.ads.googleads.client import GoogleAdsClient
from google.ads.googleads.errors import GoogleAdsException
# [START add_conversion_based_user_list]
def main(client, customer_id, conversion_action_ids):
"""Creates a combination user list.
Args:
client: The Google Ads client.
customer_id: The customer ID for which to add the user list.
conversion_action_ids: The IDs of the conversion actions for the basic
user list.
"""
# Get the UserListService and ConversionActionService clients.
user_list_service = client.get_service("UserListService")
conversion_action_service = client.get_service("ConversionActionService")
# Create a list of UserListActionInfo objects for the given conversion
# actions. These specify the conversion actions that, when triggered, will
# cause a user to be added to a UserList.
user_list_action_info_list = []
for conversion_action_id in conversion_action_ids:
user_list_action_info = client.get_type("UserListActionInfo")
user_list_action_info.conversion_action = (
conversion_action_service.conversion_action_path(
customer_id, conversion_action_id
)
)
user_list_action_info_list.append(user_list_action_info)
# Create a UserListOperation and populate the UserList.
user_list_operation = client.get_type("UserListOperation")
user_list = user_list_operation.create
user_list.name = f"Example BasicUserList #{uuid4()}"
user_list.description = (
"A list of people who have triggered one or more conversion actions"
)
user_list.membership_status = client.enums.UserListMembershipStatusEnum.OPEN
user_list.membership_life_span = 365
# The basic user list info object contains the conversion action info.
user_list.basic_user_list.actions.extend(user_list_action_info_list)
# Issue a mutate request to add the user list, then print the results.
response = user_list_service.mutate_user_lists(
customer_id=customer_id, operations=[user_list_operation]
)
print(
"Created basic user list with resource name "
f"'{response.results[0].resource_name}.'"
)
# [END add_conversion_based_user_list]
if __name__ == "__main__":
# GoogleAdsClient will read the google-ads.yaml configuration file in the
# home directory if none is specified.
googleads_client = GoogleAdsClient.load_from_storage(version="v8")
parser = argparse.ArgumentParser(
description="Creates a basic user list based on conversion actions."
)
# The following argument(s) should be provided to run the example.
parser.add_argument(
"-c",
"--customer_id",
type=str,
required=True,
help="The Google Ads customer ID.",
)
parser.add_argument(
"-a",
"--conversion_action_ids",
nargs="+",
type=str,
required=True,
help="The IDs of the conversion actions for the basic user list.",
)
args = parser.parse_args()
try:
main(googleads_client, args.customer_id, args.conversion_action_ids)
except GoogleAdsException as ex:
print(
f'Request with ID "{ex.request_id}" failed with status '
f'"{ex.error.code().name}" and includes the following errors:'
)
for error in ex.failure.errors:
print(f'\tError with message "{error.message}".')
if error.location:
for field_path_element in error.location.field_path_elements:
print(f"\t\tOn field: {field_path_element.field_name}")
sys.exit(1)
| 38.672269
| 80
| 0.707084
|
675bc06d1ca04dbcfb3d3cda0d7e919795b51d37
| 3,472
|
py
|
Python
|
lega/ingest.py
|
jbygdell/LocalEGA
|
e0e5f9ee5c7e18cd4fbb1c8e89a77832c7c122b9
|
[
"Apache-2.0"
] | null | null | null |
lega/ingest.py
|
jbygdell/LocalEGA
|
e0e5f9ee5c7e18cd4fbb1c8e89a77832c7c122b9
|
[
"Apache-2.0"
] | null | null | null |
lega/ingest.py
|
jbygdell/LocalEGA
|
e0e5f9ee5c7e18cd4fbb1c8e89a77832c7c122b9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Worker reading messages from the ``files`` queue, splitting the Crypt4GH header from the remainder of the file.
The header is stored in the database and the remainder is sent to the backend storage:
either a regular file system or an S3 object store.
It is possible to start several workers.
When a message is consumed, it must at least contain the following fields:
* ``filepath``
* ``user_id``
Upon completion, a message is sent to the local exchange with the
routing key :``archived``.
"""
import sys
import logging
from functools import partial
from legacryptor.crypt4gh import get_header
from .conf import CONF
from .utils import db, exceptions, sanitize_user_id, storage
from .utils.amqp import consume, publish, get_connection
LOG = logging.getLogger(__name__)
@db.catch_error
@db.crypt4gh_to_user_errors
def work(fs, inbox_fs, channel, data):
"""Read a message, split the header and send the remainder to the backend store."""
filepath = data['filepath']
LOG.info(f"Processing {filepath}")
# Remove the host part of the user name
user_id = sanitize_user_id(data['user'])
# Keeping data as-is (cuz the decorator is using it)
# It will be augmented, but we keep the original data first
org_msg = data.copy()
data['org_msg'] = org_msg
# Insert in database
file_id = db.insert_file(filepath, user_id)
data['file_id'] = file_id # must be there: database error uses it
# Instantiate the inbox backend
inbox = inbox_fs(user_id)
LOG.info("Inbox backend: %s", inbox)
# Check if file is in inbox
if not inbox.exists(filepath):
raise exceptions.NotFoundInInbox(filepath) # return early
# Ok, we have the file in the inbox
# Record in database
db.mark_in_progress(file_id)
# Sending a progress message to CentralEGA
org_msg['status'] = 'PROCESSING'
LOG.debug(f'Sending message to CentralEGA: {data}')
publish(org_msg, channel, 'cega', 'files.processing')
org_msg.pop('status', None)
# Strip the header out and copy the rest of the file to the vault
LOG.debug('Opening %s', filepath)
with inbox.open(filepath, 'rb') as infile:
LOG.debug(f'Reading header | file_id: {file_id}')
beginning, header = get_header(infile)
header_hex = (beginning+header).hex()
data['header'] = header_hex
db.store_header(file_id, header_hex) # header bytes will be .hex()
target = fs.location(file_id)
LOG.info(f'[{fs.__class__.__name__}] Moving the rest of {filepath} to {target}')
target_size = fs.copy(infile, target) # It will copy the rest only
LOG.info(f'Vault copying completed. Updating database')
db.set_archived(file_id, target, target_size)
data['vault_path'] = target
LOG.debug(f"Reply message: {data}")
return data
def main(args=None):
"""Run ingest service."""
if not args:
args = sys.argv[1:]
CONF.setup(args) # re-conf
inbox_fs = getattr(storage, CONF.get_value('inbox', 'driver', default='FileStorage'))
fs = getattr(storage, CONF.get_value('vault', 'driver', default='FileStorage'))
broker = get_connection('broker')
do_work = partial(work, fs('vault', 'lega'), partial(inbox_fs, 'inbox'), broker.channel())
# upstream link configured in local broker
consume(do_work, broker, 'files', 'archived')
if __name__ == '__main__':
main()
| 31.279279
| 114
| 0.686636
|
0daae2ec7275e9b450ace9255cf5b71391e0df3a
| 642
|
py
|
Python
|
deepshift/kernels/cuda/setup.py
|
poppin-mice/ShiftAddNet
|
a17369a50da5bba6250fdeac7c065bd00f293f3c
|
[
"MIT"
] | 55
|
2020-10-04T17:17:46.000Z
|
2022-03-31T02:56:51.000Z
|
deepshift/kernels/cuda/setup.py
|
poppin-mice/ShiftAddNet
|
a17369a50da5bba6250fdeac7c065bd00f293f3c
|
[
"MIT"
] | 8
|
2020-12-07T03:37:48.000Z
|
2021-07-21T09:26:45.000Z
|
deepshift/kernels/cuda/setup.py
|
poppin-mice/ShiftAddNet
|
a17369a50da5bba6250fdeac7c065bd00f293f3c
|
[
"MIT"
] | 14
|
2020-10-29T16:51:41.000Z
|
2021-11-16T01:36:43.000Z
|
from setuptools import setup
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
setup(
name='deepshift_cuda',
ext_modules=[
CUDAExtension('deepshift_cuda', [
'shift_cuda.cpp',
'shift.cu',
],extra_compile_args=['-O3'])
],
cmdclass={
'build_ext': BuildExtension
})
setup(
name='unoptimized_cuda_kernel',
ext_modules=[
CUDAExtension('unoptimized_cuda_kernel', [
'unoptimized_cuda.cpp',
'unoptimized_cuda_kernel.cu',
],extra_compile_args=['-O3'])
],
cmdclass={
'build_ext': BuildExtension
})
| 24.692308
| 67
| 0.609034
|
0cde37c7f7da75f4fe6da67f0368d0044c30acb4
| 1,201
|
py
|
Python
|
xlsxwriter/test/comparison/test_hyperlink20.py
|
hugovk/XlsxWriter
|
e97cc66637d9895480ee32cfb5e561d652d3787b
|
[
"BSD-2-Clause"
] | null | null | null |
xlsxwriter/test/comparison/test_hyperlink20.py
|
hugovk/XlsxWriter
|
e97cc66637d9895480ee32cfb5e561d652d3787b
|
[
"BSD-2-Clause"
] | null | null | null |
xlsxwriter/test/comparison/test_hyperlink20.py
|
hugovk/XlsxWriter
|
e97cc66637d9895480ee32cfb5e561d652d3787b
|
[
"BSD-2-Clause"
] | null | null | null |
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('hyperlink20.xlsx')
def test_hyperlink_formatting_explicit(self):
"""Test the creation of a simple XlsxWriter file with hyperlinks. This example has link formatting."""
workbook = Workbook(self.got_filename)
# Simulate custom color for testing.
workbook.custom_colors = ['FF0000FF']
worksheet = workbook.add_worksheet()
format1 = workbook.add_format({'color': 'blue', 'underline': 1})
format2 = workbook.add_format({'color': 'red', 'underline': 1})
worksheet.write_url('A1', 'http://www.python.org/1', format1)
worksheet.write_url('A2', 'http://www.python.org/2', format2)
workbook.close()
self.assertExcelEqual()
| 29.292683
| 110
| 0.632806
|
ac07792a20a3659277d8a1854bb8209d426f2088
| 570
|
py
|
Python
|
blockkit/__init__.py
|
ddnomad/blockkit-slack
|
886598d73acc4e545bc5fca4e36902997b00168a
|
[
"MIT"
] | null | null | null |
blockkit/__init__.py
|
ddnomad/blockkit-slack
|
886598d73acc4e545bc5fca4e36902997b00168a
|
[
"MIT"
] | null | null | null |
blockkit/__init__.py
|
ddnomad/blockkit-slack
|
886598d73acc4e545bc5fca4e36902997b00168a
|
[
"MIT"
] | null | null | null |
from .objects import Confirm, MarkdownText, Option, OptionGroup, PlainText, Text, Filter
from .elements import (
Button,
DatePicker,
Image,
MultiExternalSelect,
MultiStaticSelect,
MultiUsersSelect,
MultiConversationsSelect,
MultiChannelsSelect,
StaticSelect,
ExternalSelect,
UsersSelect,
ConversationsSelect,
ChannelsSelect,
Overflow,
PlainTextInput,
RadioButtons,
Checkboxes,
)
from .blocks import Section, Divider, ImageBlock, Actions, Context, Input, File
from .surfaces import Message, Modal, Home
| 24.782609
| 88
| 0.731579
|
973c18b24c5da3b031d3ad895b09efe440ce5b04
| 179
|
py
|
Python
|
notebooks/01.Basics/solutions/intro/widgets-in-a-box.py
|
jupytercon/2020-mwcraig
|
0fd6cf2acc1498d148bcda1f274a0aa5cb0c9085
|
[
"BSD-3-Clause"
] | 342
|
2017-08-23T18:36:58.000Z
|
2022-03-11T18:47:31.000Z
|
notebooks/01.Basics/solutions/intro/widgets-in-a-box.py
|
jupytercon/2020-mwcraig
|
0fd6cf2acc1498d148bcda1f274a0aa5cb0c9085
|
[
"BSD-3-Clause"
] | 118
|
2017-08-23T01:42:45.000Z
|
2022-02-14T18:11:47.000Z
|
notebooks/01.Basics/solutions/intro/widgets-in-a-box.py
|
jupytercon/2020-mwcraig
|
0fd6cf2acc1498d148bcda1f274a0aa5cb0c9085
|
[
"BSD-3-Clause"
] | 152
|
2017-08-22T22:24:28.000Z
|
2022-03-31T12:45:37.000Z
|
import ipywidgets as widgets
a = widgets.Play(description='Value A', min=1, max=10, value=5)
b = widgets.IntText(description='Value B')
vbox = widgets.HBox(children=[a, b])
vbox
| 25.571429
| 63
| 0.726257
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.