repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
xmnlab/minilab
|
ia/ocr/alpr.py
|
Python
|
gpl-3.0
| 10,141
| 0.008776
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import pymeanshift as pms
from blobs.BlobResult import CBlobResult
from blobs.Blob import CBlob # Note: This must be imported in order to destroy blobs and use other methods
#############################################################################
# so, here is the main part of the program
if __name__ == '__main__':
import sys
import os
blob_overlay = True
file_name = "plates/license1.png"
if len(sys.argv) != 1:
file_name = sys.argv[1]
base_name = os.path.basename(file_name)
fname_prefix = ".".join(base_name.split(".")[:-1])
print fname_prefix
# Image load & conversion to cvmat
license_plate = cv2.imread(file_name, cv2.CV_LOAD_IMAGE_COLOR)
# Segment
segmented, labels, regions = pms.segment(license_plate, 3, 3, 50)
print "Segmentation results"
print "%s: %s" % ("labels", labels)
print "%s: %s" % ("regions", regions)
cv2.imwrite('%s_segmented.png' % fname_prefix, segmented)
license_plate = cv2.imread('%s_segmented.png' % fname_prefix, cv2.CV_LOAD_IMAGE_COLOR)
license_plate_size = (license_plate.shape[1], license_plate.shape[0])
license_plate_cvmat = cv2.cv.fromarray(license_plate)
license_plate_ipl = cv2.cv.CreateImage(license_plate_size, cv2.cv.IPL_DEPTH_8U, 3)
cv2.cv.SetData(
license_plate_ipl,
license_plate.tostring(),
license_plate.dtype.itemsize * 3 * license_plate.shape[1])
license_plate_white_ipl = cv2.cv.CreateImage(license_plate_size, cv2.cv.IPL_DEPTH_8U, 1)
cv2.cv.Set(license_plate_white_ipl, 255)
# Grayscale conversion
inverted_license_plate_grayscale_ipl = cv2.cv.CreateImage(
license_plate_size,
cv2.cv.IPL_DEPTH_8U, 1)
license_plate_grayscale_ipl = cv2.cv.CreateImage(
license_plate_size,
cv2.cv.IPL_DEPTH_8U, 1)
cv2.cv.CvtColor(
license_plate_cvmat,
license_plate_grayscale_ipl,
cv2.COLOR_RGB2GRAY);
license_plate_grayscale_np = np.asarray(license_plate_grayscale_ipl[:,:])
# We can also use cv.saveimage
# cv2.cv.SaveImage('license1_grayscale.png', license_plate_grayscale_ipl)
cv2.imwrite('%s_grayscale.png' % fname_prefix, license_plate_grayscale_np)
# Thresholding or binarization of images
(threshold_value, thresh_image) = cv2.threshold(
license_plate_grayscale_np,
128,
255,
cv2.THRESH_BINARY | cv2.THRESH_OTSU)
print "Thresholding complete. Partition value is %d" % threshold_value
cv2.imwrite('%s_threshold.png' % fname_prefix, thresh_image)
# Create a mask that will cover the entire image
mask = cv2.cv.CreateImage (license_plate_size, 8, 1)
cv2.cv.Set(mask, 1)
#if not blob_overlay:
# # Convert black-and-white version back into three-color representation
# cv2.cv.CvtColor(my_grayscale, frame_cvmat, cv2.COLOR_GRAY2RGB);
# Blob detection
thresh_image_ipl = cv2.cv.CreateImage(license_plate_size, cv2.cv.IPL_DEPTH_8U, 1)
cv2.cv.SetData(
thresh_image_ipl,
thresh_image.tostring(),
thresh_image.dtype.itemsize * 1 * thresh_image.shape[1])
cv2.cv.Not(thresh_image_ipl, inverted_license_plate_grayscale_ipl)
# Min blob size and Max blob size
min_blob_size = 100 # Blob must be 30 px by 30 px
max_blob_size = 10000
threshold = 100
# Plate area as % of image area:
max_plate_to_image_ratio = 0.3
min_plate_to_image_ratio = 0.01
image_area = license_plate_size[0] * license_plate_size[1]
# Mask - Blob extracted where mask is set to 1
# Third parameter is threshold value to apply prior to blob detection
# Boolean indicating whether we find moments
myblobs = CBlobResult(thresh_image_ipl, mask, threshold, True)
myblobs.filter_blobs(min_blob_size, max_blob_size)
blob_count = myblobs.GetNumBlobs()
print "Found %d blob[s] betweeen size %d and %d using threshold %d" % (
blob_count, min_blob_size, max_blob_size, threshold)
for i in range(blob_count):
my_enumerated_blob = myblobs.GetBlob(i)
# print "%d: Area = %d" % (i, my_enumerated_blob.Area())
my_enumerated_blob.FillBlob(
license_plate_grayscale_ipl,
#license_plate_ipl,
#cv2.cv.Scalar(255, 0, 0),
cv2.cv.CV_RGB(255, 0, 0),
0, 0)
my_enumerated_blob.FillBlob(
license_plate_white_ipl,
#license_plate_ipl,
#cv2.cv.Scalar(255, 0, 0),
|
cv2.cv.CV_RGB(255, 255, 255),
0, 0)
# we can now save the image
#annotated_image = np.asarray(license_plate_ipl[:,:])
blob_image = np.asarray(license_plate_grayscale_ipl[:,:])
cv2.imwrite("%s_blobs.png" % fname_prefix, blob_image)
blob_white_image = np.asarray(license_plate_white_ipl[:,:])
cv2.imwrite("%s_white_blobs.png" %
|
fname_prefix, blob_white_image)
# Looking for a rectangle - Plates are rectangular
# Thresholding image, the find contours then approxPolyDP
(threshold_value, blob_threshold_image) = cv2.threshold(
blob_white_image,
128,
255,
cv2.THRESH_BINARY | cv2.THRESH_OTSU)
print "Thresholding complete. Partition value is %d" % threshold_value
cv2.imwrite('%s_blob_threshold.png' % fname_prefix, blob_threshold_image)
# Blur to reduce noise?
#blurred_plate = cv2.GaussianBlur(blob_threshold_image, (5,5), 0)
#blob_threshold_image = blurred_plate
# Erode then dilate to reduce noise
blob_threshold_image_invert = cv2.bitwise_not(blob_threshold_image)
cv2.imwrite("%s_pre_dilated_and_eroded.png" % fname_prefix, blob_threshold_image_invert)
eroded_white_blobs = cv2.erode(blob_threshold_image_invert, None, iterations=4);
cv2.imwrite("%s_eroded_image.png" % fname_prefix, eroded_white_blobs)
dilated_white_blobs = cv2.dilate(eroded_white_blobs, None, iterations=4);
cv2.imwrite("%s_dilated.png" % fname_prefix, dilated_white_blobs)
blob_threshold_image = cv2.bitwise_not(blob_threshold_image_invert)
cv2.imwrite("%s_dilated_and_eroded.png" % fname_prefix, blob_threshold_image)
blob_threshold_image_invert = cv2.bitwise_not(blob_threshold_image)
contours, hierarchy = cv2.findContours(
blob_threshold_image,
cv2.RETR_LIST,
cv2.CHAIN_APPROX_SIMPLE)
#print "Contours: ", contours
# We now have contours. Approximate the polygon shapes
largest_rectangle_idx = 0
largest_rectangle_area = 0
rectangles = []
colours = ( (255,0,0), (0,255,0), (0,0,255), (255,255,0), (0,255,255))
for idx, contour in enumerate(contours):
print "Contour: %d" % idx
contour_area = cv2.contourArea(contour)
if float(contour_area / image_area) < min_plate_to_image_ratio:
print "Contour %d under threshold. Countour Area: %f" % (idx, contour_area)
continue
elif float(contour_area / image_area) > max_plate_to_image_ratio:
print "Contour %d over threshold. Countour Area: %f" % (idx, contour_area)
continue
approx = cv2.approxPolyDP(
contour,
0.02 * cv2.arcLength(contour, True),
True)
print "\n -"
print "%d. Countour Area: %f, Arclength: %f, Polygon %d colour:%s" % (idx,
contour_area,
cv2.arcLength(contour, True),
len(approx),
colours[idx%len(colours)])
minarea_rectangle = cv2.minAreaRect(contour)
minarea_box = cv2.cv.BoxPoints(minarea_rectangle)
print "> ", minarea_rectangle
print ">> ", minarea_box
centre, width_and_height, theta = minarea_rectangle
aspect_ratio = float(max(width_and_height) / min(width_and_height))
print " aspect ratio: %f for %s " % (aspect_ratio, width_and_height)
minarea_box = np.int0(minarea_box)
cv2.drawContours(license_plate, [minarea_box],
|
Caoimhinmg/PmagPy
|
programs/gofish.py
|
Python
|
bsd-3-clause
| 1,976
| 0.0167
|
#!/usr/bin/env python
from __future__ import print_function
from builtins import input
import sys
import pmagpy.pmag as pmag
def main():
"""
NAME
gofish.py
DESCRIPTION
calculates fisher parameters from dec inc data
INPUT FORMAT
takes dec/inc as first two columns in space delimited file
SYNTAX
gofish.py [options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive filename entry
-f FILE, specify input file
-F FILE, specifies output file name
< filename for reading from standard input
OUTPUT
mean dec, mean inc, N, R, k, a95, csd
"""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with dec, inc data: ")
f=open(file,'r')
data=f.readlines()
elif '-f' in sys.argv:
dat=[]
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data = sys.stdin.readlines() # read from standard input
ofile = ""
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile= sys.argv[ind+1]
out = open(ofile, 'w + a')
DIs= [] # set up list for dec inc data
for line in data: # read in the data fr
|
om standard input
if '\t' in line:
rec=line.split('\t') # split each line on space to get records
|
else:
rec=line.split() # split each line on space to get records
DIs.append((float(rec[0]),float(rec[1])))
#
fpars=pmag.fisher_mean(DIs)
outstring='%7.1f %7.1f %i %10.4f %8.1f %7.1f %7.1f'%(fpars['dec'],fpars['inc'],fpars['n'],fpars['r'],fpars['k'],fpars['alpha95'], fpars['csd'])
if ofile == "":
print(outstring)
else:
out.write(outstring+'\n')
#
if __name__ == "__main__":
main()
|
cschwede/django-userattributes
|
userattributes/runtests.py
|
Python
|
mit
| 846
| 0.002364
|
""" This file mainly exists to allow python setup.py test to work. """
import os
import sys
from django.conf import settings
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'testdb',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'userattributes',),
USERATTRIBUTE_FILTER_COUNT=3
)
sys.path.insert(0, os.path.dirname(os.path.dirname
|
(os.path.abspath(__file__))))
from django.test.utils import get_runner
def runtests():
""" Runs test.py """
Test
|
Runner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['userattributes'])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
|
20centaurifux/meat-a
|
controller.py
|
Python
|
agpl-3.0
| 29,635
| 0.02902
|
# -*- coding: utf-8 -*-
"""
project............: meat-a
description........: web application for sharing meta information
date...............: 04/2013
copyright..........: Sebastian Fedrau
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
##
# @file controller.py
# Controller classes.
## @package controller
# Controller classes.
import config, app, view, exception, util, template, factory, re, sys, inspect, os, logger, mimetypes
from base64 import b64decode, b64encode
## Converts an exception to a view.JSONView.
# @param e an exception
# @return a view.JSONView instance
def exception_to_json_view(e):
if not isinstance(e, exception.BaseException):
e = exception.InternalFailureException(str(e))
m = {}
m["message"] = e.message
if isinstance(e, exception.InvalidParameterException) or isinstance(e, exception.MissingParameterException):
m["field"] = e.parameter
v = view.JSONView(e.http_status)
v.bind(m)
if e.http_status == 401:
v.headers["WWW-Authenticate"] = "Basic realm=\"%s\"" % (b64encode(config.REALM))
return v
## Converts an exception to a view.HTMLTemplateView.
# @param e an exception
# @return a view.HTMLTemplateView instance
def exception_to_html_view(e):
if not isinstance(e, exception.BaseException):
e = exception.InternalFailureException(str(e))
v = view.HTMLTemplateView(e.http_status, template.MessagePage, config.DEFAULT_LANGUAGE)
v.bind({"title": "Exception", "message": e.message})
if e.http_status == 401:
v.headers["WWW-Authenticate"] = "Basic realm=\"%s\"" % (b64encode(config.REALM))
return v
## Controller base class.
class Controller:
def __init__(self, exception_handler=exception_to_json_view):
## An app.Application instance.
self.app = app.Application()
## Function to convert exceptions to a view.View instance.
self.__exception_handler = exception_handler
## A logger.
self.log = None
## Handles an HTTP request.
# @param request_id id for identifying the request
# @param method the HTTP method (post, get, put or delete)
# @param env a dictionary providing environment details
# @param kwargs received parameters
# @return a view.View instance with a binded model
def handle_request(self, request_id, method, env, **kwargs):
try:
self.log = logger.get_lo
|
gger(request_id)
if method == "OPTIONS":
return self.__options__()
m = {"post": self.__post__, "get": self.__get__, "put": self.__put__, "delete": self.__delete__}
self.__start_process__(env, **kwargs)
self.__check_rate_limit__(env)
f = m[method.lower()]
# get function argument names:
spec = inspect.getargspec(f)
argnames = spec[0][2:]
# get argument values from kwargs:
values = util.select
|
_values(kwargs, argnames)
# set default values:
defaults = spec[3]
if not defaults is None:
diff = len(values) - len(defaults)
for i in range(len(values)):
if values[i] is None and i >= diff:
values[i] = defaults[diff - i]
# test required parameters:
if hasattr(f, "__required__"):
for k, v in dict(zip(argnames, values)).items():
if k in f.__required__ and v is None:
raise exception.MissingParameterException(k)
# merge argument list:
args = [env] + values
# call method:
v = apply(f, args)
# default headers:
if not v.headers.has_key("Cache-Control"):
v.headers["Cache-Control"] = "no-cache"
v.headers["Access-Control-Allow-Origin"] = "*"
v.headers["Access-Control-Allow-Headers"] = "accept, authorization"
except:
self.log.error("Couldn't handle request: %s", sys.exc_info()[1])
v = self.__exception_handler(sys.exc_info()[1])
return v
def __start_process__(self, env, **kwargs):
pass
def __check_rate_limit__(self, env):
self.log.debug("Checking rate limit.")
if not config.LIMIT_REQUESTS_BY_IP:
return
address = env["REMOTE_ADDR"]
with factory.create_db_connection() as conn:
with conn.enter_scope() as scope:
db = factory.create_request_db()
count = db.count_requests_by_ip(scope, address, 3600)
self.log.debug("'%s' has made %d of %d allowed requests.", address, count, config.IP_REQUESTS_PER_HOUR)
if count > config.IP_REQUESTS_PER_HOUR:
raise exception.HTTPException(402, "IP request limit reached.")
db.add_request(scope, address)
scope.complete()
def __method_not_supported__(self):
return self.__exception_handler(exception.MethodNotSupportedException())
def __options__(self):
methods = ["OPTIONS"]
for m in ["__get__", "__post__", "__delete__", "__put__"]:
f = getattr(self, m).__func__
b = getattr(Controller, m).__func__
if not f is b:
methods.append(m[2:-2].upper())
v = view.View("text/plain", 200)
v.headers["Access-Control-Allow-Methods"] = ", ".join(methods)
v.headers["Access-Control-Allow-Origin"] = "*"
v.headers["Access-Control-Allow-Headers"] = "accept, authorization"
return v
def __post__(self, env, *args):
return self.__method_not_supported__()
def __get__(self, env, *args):
return self.__method_not_supported__()
def __put__(self, env, *args):
return self.__method_not_supported__()
def __delete__(self, env, *args):
return self.__method_not_supported__()
## A controller with HTTP basic authentication support.
class AuthorizedController(Controller):
def __init__(self ):
Controller.__init__(self)
self.username = None
def __start_process__(self, env, **kwargs):
# get & decode Authorization header:
try:
self.log.debug( "Starting HTTP basic authentication.")
header = env["HTTP_AUTHORIZATION"]
self.log.debug("Found Authorization header: '%s'", header)
m = re.match("^Basic ([a-zA-Z0-9=/_\-]+)$", header)
auth = b64decode(m.group(1))
index = auth.find(":")
if index == -1:
raise exception.HTTPException(400, "Bad request. Authorization header is malformed.")
self.username, password = auth[:index], auth[index + 1:]
self.log.debug("Parsed Authorization header: '%s:%s'", self.username, password)
except KeyError:
raise exception.AuthenticationFailedException()
except:
raise exception.HTTPException(400, "Bad request: Authorization header is malformed.")
# validate password:
authenticated = False
try:
authenticated = self.app.validate_password(self.username, password)
except exception.UserNotFoundException:
pass
except exception.UserIsBlockedException:
pass
except:
raise sys.exc_info()[1]
if not authenticated:
raise exception.NotAuthorizedException()
def __check_rate_limit__(self, env):
self.log.debug("Checking rate limit.")
if not config.LIMIT_REQUESTS_BY_IP and not config.LIMIT_REQUESTS_BY_USER:
return
address = env["REMOTE_ADDR"]
with factory.create_db_connection() as conn:
with conn.enter_scope() as scope:
request_db = factory.create_request_db()
user_db = factory.create_user_db()
if config.LIMIT_REQUESTS_BY_IP:
count = request_db.count_requests_by_ip(scope, address, 3600)
self.log.debug("'%s' has made %d of %d allowed requests.", address, count, config.IP_REQUESTS_PER_HOUR)
if count > config.IP_REQUESTS_PER_HOUR:
raise exception.HTTPException(402, "IP reque
|
oposs/check_mk_mirror
|
web/htdocs/guitester.py
|
Python
|
gpl-2.0
| 10,454
| 0.005452
|
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import re
import defaults
from lib import *
class MKGuitestFailed(MKException):
def __init__(self, errors):
self.errors = errors
MKException.__init__(self, _("GUI Test failed"))
class GUITester:
def __init__(self):
self.guitest = None
self.replayed_guitest_step = None
self.guitest_repair_step = None
def init_guitests(self):
if self.myfile == "guitest":
self.replay_guitest()
elif self.guitest_recording_active():
self.begin_guitest_recording()
def begin_guitest_recording(self):
self.guitest = {
"variables" : self.vars.copy(),
"filename" : self.myfile,
"output" : {},
}
# Fix transaction ID: We are just interested in whether it is valid or not
if "_transid" in self.vars:
if self.transaction_valid():
self.guitest["variables"]["_transid"] = "valid"
else:
self.guitest["variables"]["_transid"] = "invalid"
self.add_status_icon("guitest", _("GUI test recording is active"))
def end_guitest_recording(self):
if self.guitest != None:
self.guitest["user"] = self.user
self.guitest["elapsed_time"] = time.time() - self.start_time
self.save_guitest_step(self.guitest)
def save_guitest_step(self, step):
path = defaults.var_dir + "/guitests/RECORD"
if not os.path.exists(path):
test_steps = []
else:
test_steps = eval(file(path).read())
if self.guitest_repair_step != None:
mod_step = test_steps[self.guitest_repair_step]
mod_step["output"] = step["output"]
mod_step["user"] = step["user"]
mod_step["elapsed_time"] = step["elapsed_time"]
else:
test_steps.append(step)
file(path, "w").write("%s\n" % pprint.pformat(test_steps))
def load_guitest(self, name):
path = defaults.var_dir + "/guitests/" + name + ".mk"
try:
return eval(file(path).read())
except IOError, e:
raise MKGeneralException(_("Cannot load GUI test file %s: %s") % (self.attrencode(pa
|
th), e))
def replay_guitest(self):
test_name = self.var("test")
if not test_name:
raise MKGuitestFailed([_("Missing the name of the GUI test to run (URL variable 'test')")])
guitest = self.load_guitest(test_name)
step_nr_text = self.var("step")
|
try:
step_nr = int(step_nr_text)
except:
raise MKGuitestFailed([_("Invalid or missing test step number (URL variable 'step')")])
if step_nr >= len(guitest) or step_nr < 0:
raise MKGuitestFailed([_("Invalid test step number %d (only 0...%d)") % (step_nr, len(guitest)-1)])
repair = self.var("repair") == "1"
if repair:
self.guitest_repair_step = step_nr
self.begin_guitest_recording()
self.replayed_guitest_step = guitest[step_nr]
self.replayed_guitest_step["replay"] = {}
self.myfile = self.replayed_guitest_step["filename"]
self.guitest_fake_login(self.replayed_guitest_step["user"])
self.vars = self.replayed_guitest_step["variables"]
if "_transid" in self.vars and self.vars["_transid"] == "valid":
self.vars["_transid"] = self.get_transid()
self.store_new_transids()
def guitest_recording_active(self):
# Activated by symoblic link pointing to recording file
return os.path.lexists(defaults.var_dir + "/guitests/RECORD") and not \
self.myfile in self.guitest_ignored_pages()
def guitest_ignored_pages(self):
return [ "run_cron", "index", "side", "sidebar_snapin", "dashboard", "dashboard_dashlet", "login" ]
def guitest_record_output(self, key, value):
if self.guitest:
self.guitest["output"].setdefault(key, []).append(value)
elif self.replayed_guitest_step:
self.replayed_guitest_step["replay"].setdefault(key, []).append(value)
def finalize_guitests(self):
if self.guitest:
self.end_guitest_recording()
if self.replayed_guitest_step:
try:
self.end_guitest_replay()
except MKGuitestFailed, e:
self.write("\n[[[GUITEST FAILED]]]\n%s" % ("\n".join(e.errors)))
def end_guitest_replay(self):
if self.replayed_guitest_step and self.guitest_repair_step == None:
errors = []
for varname in self.replayed_guitest_step["output"].keys():
method = self.guitest_test_method(varname)
errors += [ "%s: %s" % (varname, error)
for error in method(
self.replayed_guitest_step["output"][varname],
self.replayed_guitest_step["replay"].get(varname, [])) ]
if errors:
raise MKGuitestFailed(errors)
def guitest_test_method(self, varname):
if varname == "data_tables":
return guitest_check_datatables
elif varname == "page_title":
return guitest_check_single_value
else:
return guitest_check_element_list
def guitest_check_single_value(reference, reality):
if len(reference) > 1:
errors.append("More than one reference value: %s" % ", ".join(reference))
if len(reality) > 1:
errors.append("More than one value: %s" % ", ".join(reality))
diff_text = guitest_check_text(reference[0], reality[0])
if diff_text:
return [ diff_text ]
else:
return []
def guitest_check_element_list(reference, reality):
errors = []
one_missing = False
for entry in reference:
if not guitest_entry_in_reference_list(entry, reality):
errors.append("missing entry %r" % (entry,))
one_missing = True
if one_missing:
for entry in reality:
if not guitest_entry_in_reference_list(entry, reference):
errors.append("exceeding entry %r" % (entry,))
return errors
def guitest_entry_in_reference_list(entry, ref_list):
for ref_entry in ref_list:
if guitest_entries_match(ref_entry, entry):
return True
return False
def guitest_entries_match(ref, real):
if type(ref) in (list, tuple):
return len(ref) == len(real) and \
map(guitest_drop_dynamic_ids, ref) == map(guitest_drop_dynamic_ids, real)
else:
return guitest_drop_dynamic_ids(ref) == guitest_drop_dynamic_ids(real)
def guitest_check_datatables(referenc
|
elli0ttB/problems
|
sorting/test_are_anagrams.py
|
Python
|
mit
| 556
| 0.008993
|
from strings_anagrams import are_anagrams
def test_funny_anagrams():
assert are_anagrams("the eyes", "they see")
assert are_anag
|
rams("Allahu Akbar, Obama", "Aha bub, koala alarm")
assert are_anagrams("Donald Trump", "Damp Old Runt")
def test_same_is_anagram():
ass
|
ert are_anagrams("foo", "Foo")
assert are_anagrams(" ", " ")
def test_wrong():
assert not are_anagrams("mary", "cow")
assert not are_anagrams("123", "12345")
def test_explosion():
assert not are_anagrams(None, "")
assert not are_anagrams(321, 123)
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyQt4/QtGui/QGraphicsLayout.py
|
Python
|
gpl-2.0
| 2,337
| 0.009414
|
# encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python3/dist-packages/PyQt4/QtGui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
from .QGraphicsLayoutItem import QGraphicsLayoutItem
class QGraphicsLayout(QGraphicsLayoutItem):
""" QGraphicsLayout(QGraphicsLayoutItem parent=None) """
def activate(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.activate() """
pass
def addChildLayoutItem(self, QGraphicsLayoutItem): # real signature unknown; restored from __doc__
""" QGraphicsLayout.addChildLayoutItem(QGraphicsLayoutItem) """
pass
def count(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.count() -> int """
return 0
def getContentsMargins(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.getContentsMargins() -> (float, float, float, float) """
pass
def invalidate(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.invalidate() """
pass
def isActivated(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.isActivated() -> bool """
return False
def itemAt
|
(self, p_int): # real signature unknown; restored from __doc__
""" QGraphicsLayout.itemAt(int) -> QGraphicsLayoutItem """
return QGraphicsLayoutItem
def removeAt(self, p_int): # real signature unknown; restored from __doc__
""" QGraphicsLayout.removeAt(int) """
pass
d
|
ef setContentsMargins(self, p_float, p_float_1, p_float_2, p_float_3): # real signature unknown; restored from __doc__
""" QGraphicsLayout.setContentsMargins(float, float, float, float) """
pass
def updateGeometry(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.updateGeometry() """
pass
def widgetEvent(self, QEvent): # real signature unknown; restored from __doc__
""" QGraphicsLayout.widgetEvent(QEvent) """
pass
def __init__(self, QGraphicsLayoutItem_parent=None): # real signature unknown; restored from __doc__
pass
def __len__(self, *args, **kwargs): # real signature unknown
""" Return len(self). """
pass
|
flying-sheep/aiohttp
|
tests/test_client_session.py
|
Python
|
apache-2.0
| 13,926
| 0
|
# -*- coding: utf-8 -*-
"""Tests for aiohttp/client.py"""
import asyncio
import gc
import unittest
from unittest import mock
import aiohttp
from aiohttp.client import ClientSession
from aiohttp.multidict import MultiDict, CIMultiDict
from aiohttp.connector import BaseConnector, TCPConnector
class TestClientSession(unittest.TestCase):
maxDiff = None
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.run = self.loop.run_until_complete
def tearDown(self):
self.loop.close()
gc.collect()
def make_open_connector(self):
conn = BaseConnector(loop=self.loop)
transp = unittest.mock.Mock()
conn._conns['a'] = [(transp, 'proto', 123)]
return conn
def test_init_headers_simple_dict(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
self.assertEqual(
sorted(session._default_headers.items()),
([("H1", "header1"),
("H2", "header2")]))
session.close()
def test_init_headers_list_of_tuples(self):
session = ClientSession(
headers=[("h1", "header1"),
("h2", "header2"),
("h3", "header3")],
loop=self.loop)
self.assertEqual(
session._default_headers,
CIMultiDict([("h1", "header1"),
("h2", "header2"),
("h3", "header3")]))
session.close()
def test_init_headers_MultiDict(self):
session = ClientSession(
headers=MultiDict(
[("h1", "header1"),
("h2", "header2"),
("h3", "header3")]),
loop=self.loop)
self.assertEqual(
session._default_headers,
CIMultiDict([("H1", "header1"),
("H2", "header2"),
("H3", "header3")]))
session.close()
def test_init_headers_list_of_tuples_with_duplicates(self):
session = ClientSession(
headers=[("h1", "header11"),
("h2", "header21"),
("h1", "header12")],
loop=self.loop)
self.assertEqual(
session._default_headers,
CIMultiDict([("H1", "header11"),
("H2", "header21"),
("H1", "header12")]))
session.close()
def test_init_cookies_with_simple_dict(self):
session = ClientSession(
cookies={
"c1": "cookie1",
"c2": "cookie2"
}, loop=self.loop)
self.assertEqual(set(session.cookies), {'c1', 'c2'})
self.assertEqual(session.cookies['c1'].value, 'cookie1')
self.assertEqual(session.cookies['c2'].value, 'cookie2')
session.close()
def test_init_cookies_with_list_of_tuples(self):
session = ClientSession(
cookies=[("c1", "cookie1"),
("c2", "cookie2")],
loop=self.loop)
self.assertEqual(set(session.cookies), {'c1', 'c2'})
self.assertEqual(session.cookies['c1'].value, 'cookie1')
self.assertEqual(session.cookies['c2'].value, 'cookie2')
session.close()
def test_merge_headers(self):
# Check incoming simple dict
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
headers = session._prepare_headers({
"h1": "h1"
})
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("h2", "header2"),
("h1", "h1")
]))
session.close()
def test_merge_headers_with_mu
|
lti_dict(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
},
|
loop=self.loop)
headers = session._prepare_headers(MultiDict([("h1", "h1")]))
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("h2", "header2"),
("h1", "h1")
]))
session.close()
def test_merge_headers_with_list_of_tuples(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
headers = session._prepare_headers([("h1", "h1")])
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("h2", "header2"),
("h1", "h1")
]))
session.close()
def test_merge_headers_with_list_of_tuples_duplicated_names(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
headers = session._prepare_headers([("h1", "v1"),
("h1", "v2")])
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("H2", "header2"),
("H1", "v1"),
("H1", "v2"),
]))
session.close()
def _make_one(self, **kwargs):
session = ClientSession(loop=self.loop, **kwargs)
params = dict(
headers={"Authorization": "Basic ..."},
max_redirects=2,
encoding="latin1",
version=aiohttp.HttpVersion10,
compress="deflate",
chunked=True,
expect100=True,
read_until_eof=False)
return session, params
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_GET(self, patched):
session, params = self._make_one()
session.get(
"http://test.example.com",
params={"x": 1},
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("GET", "http://test.example.com",),
dict(
params={"x": 1},
allow_redirects=True,
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_OPTIONS(self, patched):
session, params = self._make_one()
session.options(
"http://opt.example.com",
params={"x": 2},
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("OPTIONS", "http://opt.example.com",),
dict(
params={"x": 2},
allow_redirects=True,
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_HEAD(self, patched):
session, params = self._make_one()
session.head(
"http://head.example.com",
params={"x": 2},
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("HEAD", "http://head.example.com",),
dict(
params={"x": 2},
allow_redirects=False,
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_POST(self, patched):
session, params = self._make_one()
session.post(
"http://post.example.com",
params={"x": 2},
data="Some_data",
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("POST", "http://post.example.com",),
dict(
params={"x": 2},
data="Some_data",
**params)])
session.close()
@mock.patch("aiohttp.client.
|
ArcheProject/arche_pas
|
arche_pas/tests/test_models.py
|
Python
|
gpl-2.0
| 17,498
| 0.000343
|
import unittest
from BTrees.OOBTree import OOBTree
from arche.interfaces import IObjectUpdatedEvent
from arche.interfaces import IWillLoginEvent
from arche.interfaces import IUser
from arche.testing import barebone_fixture
from pyramid import testing
from zope.interface.verify import verifyObject
from zope.interface.verify import verifyClass
from arche.api import User
from pyramid.request import apply_request_extensions
from pyramid.request import Request
from arche_pas.interfaces import IProviderData
from arche_pas.interfaces import IPASProvider
from arche_pas.exceptions import ProviderConfigError
class ProviderDataTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _cut(self):
from arche_pas.models import ProviderData
return ProviderData
def test_verify_object(self):
context = User()
self.failUnless(verifyObject(IProviderData, self._cut(context)))
def test_verify_class(self):
self.failUnless(verifyClass(IProviderData, self._cut))
def test_setitem(self):
context = User()
obj = self._cut(context)
obj['one'] = {'one': 1}
self.assertIsInstance(obj['one'], OOBTree)
class PASProviderTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _cut(self):
from arche_pas.models import PASProvider
return PASProvider
def _dummy_provider(self):
class DummyProvider(self._cut):
name = 'dummy'
title = 'Wakka'
settings = None
id_key = 'dummy_key'
default_settings = {'one': 1}
return DummyProvider
def test_verify_object(self):
request = testing.DummyRequest()
self.failUnless(verifyObject(IPASProvider, self._cut(request)))
def test_verify_class(self):
self.failUnless(verifyClass(IPASProvider, self._cut))
def test_settings(self):
factory = self._dummy_provider()
factory.update_settings({'two': 2}, three=3)
obj = factory(testing.DummyModel())
self.assertEqual(obj.settings, {'one': 1, 'two': 2, 'three': 3})
def test_settings_update_provider(self):
factory = self._dummy_provider()
factory.update_settings({'two': 2, 'provider': {'title': 'Hello'}})
obj = factory(testing.DummyModel())
self.assertEqual(obj.title, 'Hello')
def test_validate_settings_error(self):
factory = self._dummy_provider()
factory.update_settings(one=2)
self.assertRaises(ProviderConfigError, factory.validate_settings)
def test_validate_settings_default(self):
factory = self._dummy_provider()
factory.update_settings({
'client_id': 'client_id',
'auth_uri': 'auth_uri',
'token_uri': 'token_uri',
'client_secret': 'client_secret'
})
self.assertEqual(factory.validate_settings(), None)
def test_callback_url(self):
self.config.include('betahaus.viewcomponent')
self.config.include('arche_pas.views')
factory = self._dummy_provider()
request = Request.blank('/')
obj = factory(request)
self.assertEqual(obj.callback_url(), 'http://localhost/pas_callback/dummy')
def test_get_id(self):
self.config.include('arche_pas.models')
user = User()
provider_data = IProviderData(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
obj = self._dummy_provider()(testing.DummyModel())
self.assertEqual(obj.get_id(user), 'very_secret')
def test_get_user(self):
self.config.include('arche.testing')
self.config.include('arche.testing.catalog')
self.config.include('arche_pas.catalog')
self.config.include('arche_pas.models')
root = barebone_fixture(self.config)
request = testing.DummyRequest()
self.config.begin(request)
apply_request_extensions(request)
request.root = root
user = User()
provider_data = IProviderData(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
provider = self._dummy_provider()
self.config.registry.registerAdapter(provider, name=provider.name)
root['users']['jane'] = user
query = "pas_ident == ('dummy', 'very_secret')"
docids = root.catalog.query(query)[1]
self.assertEqual(tuple(request.resolve_docids(docids))[0], user)
obj = provider(request)
self.assertEqual(obj.get_user('very_secret'), user)
# def test_build_reg_case_params(self):
# request = testing.DummyRequest()
# factory = self._dummy_provider()
# obj = factory(request)
# data = {
#
# }
# obj.build_reg_case_params(data)
# def prepare_register(self, request, data):
#
# def login(self, user, request, first_login = False, came_from = None):
#
def test_login(self):
from arche.resources import User
request = testing.DummyRequest()
root = barebone_fixture(self.config)
root['users']['jane'] = user = User()
L = []
def subscriber(event):
L.append(event)
self.config.add_subscriber(subscriber, IWillLoginEvent)
factory = self._dummy_provider()
obj = factory(request)
obj.login(user)
self.assertEqual(L[0].user, user)
d
|
ef test_store(self):
self.config.include('arche.testing')
self.config.include('arche.testing.catalog')
self.config.include('arche_pas.catalog')
self.config.include('arche_pas.models')
root = barebone_fixture(self.config)
request = testing.DummyRequest()
apply_request_extensions(request)
request.root = root
self.config.begin(request)
user = User()
provider_data = IProviderDa
|
ta(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
provider = self._dummy_provider()
self.config.registry.registerAdapter(provider, name=provider.name)
root['users']['jane'] = user
obj = provider(request)
L = []
def subsc(obj, event):
L.append(event)
self.config.add_subscriber(subsc, [IUser, IObjectUpdatedEvent])
obj.store(user, {'hello': 'world', 1: 2})
self.assertIn('pas_ident', L[0].changed)
def test_store_saves_new_keys(self):
self.config.include('arche.testing')
self.config.include('arche.testing.catalog')
self.config.include('arche_pas.models')
self.config.include('arche_pas.catalog')
root = barebone_fixture(self.config)
request = testing.DummyRequest()
apply_request_extensions(request)
request.root = root
self.config.begin(request)
user = User()
provider_data = IProviderData(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
provider = self._dummy_provider()
self.config.registry.registerAdapter(provider, name=provider.name)
root['users']['jane'] = user
obj = provider(request)
self.assertEqual(obj.store(user, {'hello': 'world', 1: 2}), set(['hello', 1]))
self.assertEqual(obj.store(user, {'hello': 'world', 1: 2}), set())
# hello removed
self.assertEqual(obj.store(user, {1: 2}), set())
self.assertNotIn('hello', provider_data['dummy'])
# 1 was updated
self.assertEqual(obj.store(user, {1: 3}), set([1]))
class AddPASTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _fut(self):
from arche_pas.models import add_pas
return add_pas
# FIXME: Proper tests for add_pas
class RegistrationCaseTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _cut(self):
from arche_pas.
|
puddl3glum/gen_scraper
|
build/lib/gen_scraper/gen_master.py
|
Python
|
mit
| 5,572
| 0.006999
|
import os
import pickle
import socket
import sys
import threading
import struct
import time
import yaml
# from gen_scraper import scraper
def handle_slave(slaveconn_info, config_info, work_info):
# print(v_chunk)
"""
handles a slave connection, sending config, work, or receiving more results.
params:
slavesocket: <socket> connection to slave
address: IP addr, port of slave
config: <dict> config
v_list: <list> of places to visit and partially scraped stuff.
"""
# recv type of communication
slaveconn = slaveconn_info['conn']
address = slaveconn_info['address']
# print(address)
config = config_info['config']
config_dump = config_info['config_dump']
v_list = work_info['v_list']
work_loc = work_info['work_loc']
r_lock = work_info['r_lock']
print('HANDLING SLAVE', address)
comm = slaveconn.recv(4)
comm = comm.decode()
print(comm)
# if asking for config, send initial files to client.
if comm == 'CNFG':
# send config
# convert config to bytes (will have to use pickle)
byte_len = len(config_dump)
# print(byte_len)
byte_len = struct.pack('!I', byte_len)
# print(byte_len)
slaveconn.send(byte_len)
slaveconn.send(config_dump)
# if asking for work:
elif comm == 'WORK':
# send queue chunk.
# in dict, track IP : (chunk, time)
# TODO: check work_loc for work which has been out for too long. Assign that instead of new chunk if so.
chunk_len = min([config['batch size'], len(v_list)]) # length is either batch size or size of queue
# TODO: must lock this.
r_lock.acquire()
v_chunk = [v_list.pop(0) for _ in range(chunk_len)] # get a chunk
work_loc[address] = (v_chunk, time.time()) # add chunk to work loc with time
r_lock.release()
chunk_pickle = pickle.dumps(v_chunk)
byte_len = len(chunk_pickle)
# print(byte_len)
byte_len = struct.pack('!I', byte_len)
slaveconn.send(byte_len)
slaveconn.send(chunk_pickle)
# if sending back results:
elif comm == 'RSLT':
# append should be thread safe
# append results to queue
# send new chunk
# update IP dict
res = slaveconn.recv(1024)
print(res.decode())
print('DONE WITH SLAVE', address)
def get_config(directory='', config_file_loc=''):
print('STARTING MASTER')
# print(directory)
# config file location
config_file_loc = os.path.join(directory, config_file_loc)
# master reads and parses config file, creates objects for scraping
with open(config_file_loc, 'rb') as config_file:
config = yaml.load(config_file)
# grab user's scraper class and put its methods into our scraper
scraper_loc = os.path.join(directory, config['scraper']['loc'])
'''
u_module = SourceFileLoader(config['scraper']['module'], scraper_loc).load_module()
u_scraper = getattr(u_module, config['scraper']['class'])
u_processor = getattr(u_module, config['processor']['class'])
print(dir(u_module))
print(dir(u_scraper))
print(dir(u_processor))
'''
with open(scraper_loc) as u_file:
scraper_script = u_file.read()
scraper_loc = os.path.join(directory, config['processor']['loc'])
with open(scraper_loc) as u_file:
processor_script = u_file.read()
config_dump = pickle.dumps([config, scraper_script, processor_script])
return config, config_dump
# send entire user-created file over socket.
# slave uses cmdline to exec file.
# u_scraper_attr = [x for x in dir(u_scraper) if '__' not in x]
# u_processor_attr = [x for x in dir(u_processor) if '__' not in x]
# u_scraper.test()
# print(config)
# master generates initial queue
def main(argv=sys.argv):
# main loop of
# argv[1] is cwd, argv[2] is config file loc
config, config_dump = get_config(directory=argv[1], config_file_loc=argv[2])
run(config, config_dump)
def run(config, config_dump):
v_list = [] # list of pages to visit and their data of form -> function_name, args, data{}
work_loc = {} # dict of work location of form -> (address): (work, time)
r_lock = threading.Lock()
# populate v_list with 1 initial entry
# data is passed with dict w/ function_name, args, data{}
for x in config['start']:
init_bundle = {'function':x['function'], 'args':x['args'], 'data':{}}
v_list.append(init_bundle)
# print(v_list)
# receives connection
slave_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
slave_sock.bind(('', 666))
slave_sock.listen(5)
# enters main loop
while True:
slaveconn, address = slave_sock.accept()
# print('Got conn from', address)
# now do som
|
ething with the clientsocket
# in this case, we'll pretend this is a threaded server
# co
|
nstruct v_chunk
slaveconn_info = {'conn':slaveconn, 'address':address}
config_info = {'config':config, 'config_dump': config_dump}
work_info = {'v_list':v_list, 'work_loc': work_loc, 'r_lock':r_lock}
print(v_list)
ct = threading.Thread(target=handle_slave, args=[slaveconn_info, config_info, work_info])
ct.start()
if __name__ == '__main__':
test = ['', 'C:\\Users\\setzer\\Documents\\GitHub\\gen_scraper\\example', 'example_config.yml']
main()
|
magul/volontulo
|
backend/apps/volontulo/utils.py
|
Python
|
mit
| 1,857
| 0
|
# -*- coding: utf-8 -*-
"""
.. module:: utils
"""
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.utils.text import slugify
from apps.volontulo.models import UserProfile
# Offers statuses dictionary with meaningful names.
# todo: remove dependency
OFFERS_STATUSES = {
'NEW': 'Nowa',
'ACTIVE': 'Aktywna',
'FINISHED': 'Zakończona',
'SUSPENDED': 'Zawieszona',
'CLOSED': 'Zamknięta',
}
def get_administrators_emails():
"""Get all administrators emails or superuser email
Format returned:
emails = {
1: 'admin1@example.com',
2: 'admin2@example.com',
}
"""
administrators = UserProfile.objects.filter(is_administrator=True)
emails = {}
for admin in administrators:
emails[str(admin.user.id)] = admin.user.email
if not emails:
administrators = User.objects.filter(is_superuser=True)
for admin in administrators:
emails[str(admin.id)] = admin.email
return emails
def correct_slug(model_class, view_name, slug_field):
"""Decorator
|
that is reposponsible for redirect to url with correct slug.
It is used by url for offers, organizations and users.
"""
def decorator(wrapped_func):
"""Decorator function for correcting slugs."""
def wrappi
|
ng_func(request, slug, id_):
"""Wrapping function for correcting slugs."""
obj = get_object_or_404(model_class, id=id_)
if slug != slugify(getattr(obj, slug_field)):
return redirect(
view_name,
slug=slugify(getattr(obj, slug_field)),
id_=id_
)
return wrapped_func(request, slug, id_)
return wrapping_func
return decorator
|
natural/django-objectcounters
|
objectcounters/templatetags/counter_tags.py
|
Python
|
bsd-3-clause
| 398
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.template import Library
from ..models import Coun
|
ter
register = Library()
@register.assignment_tag
def counter_for_object(name, obj, default=0):
"""Returns the counter value for the given name and instance."""
try:
r
|
eturn Counter.objects.get_for_object(name, obj).value
except (Exception, ):
return default
|
pengli09/Paddle
|
python/paddle/v2/framework/optimizer.py
|
Python
|
apache-2.0
| 21,289
| 0.000141
|
from collections import defaultdict
import paddle.v2.framework.framework as framework
from paddle.v2.framework.backward import append_backward_ops
from paddle.v2.framework.regularizer import append_regularization_ops
__all__ = [
'SGDOptimizer', 'MomentumOptimizer', 'AdagradOptimizer', 'AdamOptimizer',
'AdamaxOptimizer'
]
class Optimizer(object):
"""Optimizer Base class.
Define the common interface of an optimizer.
User should not use this class directly,
but need to use one of it's implementation.
"""
def __init__(self, global_step=None):
self._global_step = global_step
# Dictionary of accumulators. Some optimizer subclasses need to
# allocate and manage extra variables associated with the parameters
# to train. These variables are called accumulators.
# {accum_name : { paramter_name : accumulator_for_parameter, ...}, ...}
self._accumulators = defaultdict(lambda: dict())
def _append_optimize_op(self, block, param_and_grad):
""" append optimize operator to block and return all the added optimize_op
"""
raise NotImplementedError()
def _initialize_tensors(self, block):
"""Create all necessary tensors, that will be shared for all parameter updates.
Tensors like learning rate should be initialized here.
Args:
block: the block in which the loss variable is present
"""
pass
def _create_accumulators(self, block, parameters):
"""Create all accumulators needed by the parameters
Args:
block: the block in which the loss variable is present
parameters: list of parameter variables for the optimizer
"""
pass
def _finish_update(self, block):
"""Finish any custom updates needed
before completing an optimization step
Args:
block: the block in which the loss variable is present
parameters: list of parameter variables for the optimizer
Returns:
list of finish ops or None
"""
pass
def _add_accumulator(self, block, name, param, dtype=None, fill_value=0.0):
"""Utility function to add an accumulator for a parameter
Args:
block: the block in which the loss variable is present
name: name of the accumulator
param: parameter variable for which accumulator is to be added
dtype: data type of the accumulator variable
fill_value: value to initialize the accumulator variable
"""
if (name in self._accumulators and
param.name in self._accumulators[name]):
raise Exception("Accumulator {} already exists for parmeter {}".
format(name, param.name))
global_block = block.program.global_block()
param_shape = list(param.shape)
param_acc = global_block.create_var(
dtype=dtype, shape=param_shape, lod_level=0)
# Initialize the accumulator with fill_value
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
global_block.append_op(
type="fill_constant",
outputs={"Out": param_acc},
attrs={"shape": param_shape,
"value": fill_value})
# Add to accumulators dict
self._accumulators[name][param.name] = param_acc
def _get_accumulator(self, name, param):
"""Utility function to fetch an accumulator for a parameter
Args:
name: name of the accumulator
param: parameter variable for which accumulator is to be fetched
Returns:
accumulator variable for the parameter
"""
if (name not in self._accumulators or
param.name not in self._accumulators[name]):
raise Exception("Accumulator {} does not exist for parameter {}".
format(name, param.name))
return self._accumulators[name][param.name]
def _increment_global_step(self, block):
"""Increment the global step by 1 after every iteration
Args:
block: the block in which the loss variable is present
Returns:
list with global_step increment op as its only element
"""
assert isinstance(block, framework.Block)
assert self._global_step is not None
# create the increment op
increment_op = block.append_op(
type="increment",
inputs={"X": self._global_step},
outputs={"Out": self._global_step},
attrs={"step": 1.0})
return increment_op
def create_optimization_pass(self, parameters_and_grads, loss):
"""Add optimization operators to update gradients to variables.
Args:
loss: the target that this optimization is for.
parameters_and_grads: a list of (variable, gradient) pair to update.
Returns:
return_op_list: a list of operators that will complete one step of
optimization. This will include parameter update ops, global step
update ops and any other custom ops required by subcl
|
asses to manage
their internal state.
"""
# This is a default implementation of create_optimization_pass that
# can be shared by most optimizers. This implementation assumes that
# the subclass will implement the _append_optimize_op method and the
# _initialize_tensors method. The subclass can extend the
# _create_accumulators method if it needs to create accumulators
# for parameters and extend _finish_update method to add custom ops.
|
# Create any accumulators
self._create_accumulators(loss.block,
[p[0] for p in parameters_and_grads])
# Create any necessary tensors
self._initialize_tensors(loss.block)
optimize_ops = []
for param_and_grad in parameters_and_grads:
if param_and_grad[1] is not None:
optimize_op = self._append_optimize_op(loss.block,
param_and_grad)
optimize_ops.append(optimize_op)
# Returned list of ops can include more ops in addition
# to optimization ops
return_ops = optimize_ops
# Get custom finish ops for subclasses
# FIXME: Need to fix this once we figure out how to handle dependencies
finish_ops = self._finish_update(loss.block)
if finish_ops is not None:
return_ops += finish_ops
if self._global_step is not None:
return_ops.append(self._increment_global_step(loss.block))
return return_ops
def minimize(self, loss, parameter_list=None, no_grad_set=None):
"""Add operations to minimize `loss` by updating `parameter_list`.
This method combines interface `append_backward_ops()` and
`create_optimization_pass()` into one.
"""
params_grads = append_backward_ops(loss, parameter_list, no_grad_set or
set())
# Add regularization if any
params_grads = append_regularization_ops(params_grads)
optimize_ops = self.create_optimization_pass(params_grads, loss)
return optimize_ops
class SGDOptimizer(Optimizer):
""" Simple SGD optimizer without any state.
"""
def __init__(self, learning_rate, global_step=None):
assert learning_rate is not None
super(SGDOptimizer, self).__init__(global_step)
self.type = "sgd"
self._learning_rate = learning_rate
def _initialize_tensors(self, block):
assert isinstance(block, framework.Block)
lr_shape = [1]
# create a variable for learning_rate
self._lr = block.create_var(
dtype="float32", shape=lr_shape, lod_level=0)
# create an op to init the learning_rate
# FIXME: Fix when Initialization design has been implemented
# https://github.c
|
suninsky/ReceiptOCR
|
Python/server/lib/python2.7/site-packages/pyvirtualdisplay/__init__.py
|
Python
|
mit
| 216
| 0
|
import logging
from pyvirtualdisplay.display import Display
from pyvirtualdisplay.about import __version__
log = logging.getLo
|
gger(__name_
|
_)
log = logging.getLogger(__name__)
log.debug('version=%s', __version__)
|
lisael/pg-django
|
tests/regressiontests/admin_changelist/tests.py
|
Python
|
bsd-3-clause
| 24,196
| 0.003017
|
from __future__ import with_statement, absolute_import
from django.contrib import admin
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.views.main import ChangeList, SEARCH_VAR, ALL_VAR
from django.contrib.auth.models import User
from django.template import Context, Template
from django.test import TestCase
from django.test.client import RequestFactory
from .admin import (ChildAdmin, QuartetAdmin, BandAdmin, ChordsBandAdmin,
GroupAdmin, ParentAdmin, DynamicListDisplayChildAdmin,
DynamicListDisplayLinksChildAdmin, CustomPaginationAdmin,
FilteredChildAdmin, CustomPaginator, site as custom_site,
SwallowAdmin)
from .models import (Child, Parent, Genre, Band, Musician, Group, Quartet,
Membership, ChordsMusician, ChordsBand, Invitation, Swallow,
UnorderedObject, OrderedObject)
class ChangeListTests(TestCase):
urls = "regressiontests.admin_changelist.urls"
def setUp(self):
self.factory = RequestFactory()
def _create_superuser(self, username):
return User.objects.create(username=username, is_superuser=True)
def _mocked_authenticated_request(self, url, user):
request = self.factory.get(url)
request.user = user
return request
def test_select_related_preserved(self):
"""
Regression test for #10348: ChangeList.get_query_set() shouldn't
overwrite a custom select_related provided by ModelAdmin.queryset().
"""
m = ChildAdmin(Child, admin.site)
request = self.factory.get('/child/')
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
self.assertEqual(cl.query_set.query.select_related, {'parent': {'name': {}}})
def test_result_list_empty_changelist_value(self):
"""
Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored
for relationship fields
"""
new_child = Child.objects.create(name='name', parent=None)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">(None)</td></tr></tbody>' % new_child.id
self.assertFalse(table_output.find(row_html) == -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_html(self):
"""
Verifies that inclusion tag result_list generates a table when with
default ModelAdmin settings.
|
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request
|
, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">Parent object</td></tr></tbody>' % new_child.id
self.assertFalse(table_output.find(row_html) == -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_editable_html(self):
"""
Regression tests for #11791: Inclusion tag result_list generates a
table and this checks that the items are nested within the table
element tags.
Also a regression test for #13599, verifies that hidden fields
when list_editable is enabled are rendered in a div outside the
table.
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
FormSet = m.get_changelist_formset(request)
cl.formset = FormSet(queryset=cl.result_list)
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
# make sure that hidden fields are in the correct place
hiddenfields_div = '<div class="hiddenfields"><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></div>' % new_child.id
self.assertFalse(table_output.find(hiddenfields_div) == -1,
'Failed to find hidden fields in: %s' % table_output)
# make sure that list editable fields are rendered in divs correctly
editable_name_field = '<input name="form-0-name" value="name" class="vTextField" maxlength="30" type="text" id="id_form-0-name" />'
self.assertFalse('<td>%s</td>' % editable_name_field == -1,
'Failed to find "name" list_editable field in: %s' % table_output)
def test_result_list_editable(self):
"""
Regression test for #14312: list_editable with pagination
"""
new_parent = Parent.objects.create(name='parent')
for i in range(200):
new_child = Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/', data={'p': -1}) # Anything outside range
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
self.assertRaises(IncorrectLookupParameters, lambda: \
ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m))
def test_custom_paginator(self):
new_parent = Parent.objects.create(name='parent')
for i in range(200):
new_child = Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/')
m = CustomPaginationAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
self.assertIsInstance(cl.paginator, CustomPaginator)
def test_distinct_for_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Basic ManyToMany.
"""
blues = Genre.objects.cre
|
florianpaquet/sublime-sync
|
upload.py
|
Python
|
mit
| 3,373
| 0.001779
|
# -*- coding:utf-8 -*-
import os
import sys
import sublime
import sublime_plugin
from .archiver import Archiver
from .settings import API_UPLOAD_URL
from .command import CommandWithStatus
sys.path.append(os.path.dirname(__file__))
import requests
class SublimeSyncUploadCommand(sublime_plugin.ApplicationCommand, CommandWithStatus):
def __init__(self, *args, **kwargs):
super(SublimeSyncUploadCommand, self).__init__(*args, **kwargs)
self.running = False
self.password = None
self.archive_filename = None
def post_send(self):
"""
Resets values
"""
self.unset_message()
self.running = False
self.password = None
self.archive_filename = None
def prompt_password(self):
"""
Shows an input panel for entering password
"""
sublime.active_window().show_input_panel(
"Enter archive password",
initial_text='',
on_done=self.pack_and_send_async,
on_cancel=self.pack_and_send_async,
on_change=None
)
def pack_and_send(self):
"""
Create archive and send it to the API
"""
self.set_message("Creating archive...")
archiver = Archiver()
self.archive_filename = archiver.pack_packages(password=self.password, exclude_from_package_control=self.exclude_from_package_control)
self.send_to_api()
def pack_and_send_async(self, password=None):
"""
Starts ansync command
"""
self.password = password or None
sublime.set_timeout_async(self.pack_and_send, 0)
def send_to_api(self):
"""
Send archive file to API
"""
self.set_message("Sending archive...")
f = open(self.archive_filename, 'rb')
files = {
'package': f.read(),
'version': sublime.version()[:1],
'username': self.username,
'api_key': self.api_key,
}
# Send data and delete temporary file
re
|
sponse = requests.post(url=API_UPLOAD_URL, files=files)
status_code = response.status_code
f.close()
os.unlink(self.archive_filename)
if status_code == 200:
self.set_message("Successfuly sent archive")
elif status_code == 403:
self.set_message("Error while sending archive: wrong credentials")
elif status_code == 413:
|
self.set_message("Error while sending archive: filesize too large (>10MB)")
else:
self.set_message("Unexpected error (HTTP STATUS: %s)" % response.status_code)
self.post_send()
def run(self, *args):
"""
Create an archive of all packages and settings
"""
if self.running:
self.set_quick_message("Already working on a backup...")
return
settings = sublime.load_settings('sublime-sync.sublime-settings')
self.running = True
self.username = settings.get('username', '')
self.api_key = settings.get('api_key', '')
self.exclude_from_package_control = settings.get('exclude_from_package_control', False)
self.encrypt = settings.get('encrypt', False)
if self.encrypt:
self.prompt_password()
else:
self.pack_and_send_async()
|
singingwolfboy/flask-dance
|
flask_dance/consumer/storage/session.py
|
Python
|
mit
| 1,085
| 0.001843
|
from flask_dance.consumer.storage import BaseStorage
import flask
class SessionSto
|
rage(BaseStorage):
"""
The default storage backend. Stores and retrieves OAuth tokens u
|
sing
the :ref:`Flask session <flask:sessions>`.
"""
def __init__(self, key="{bp.name}_oauth_token"):
"""
Args:
key (str): The name to use as a key for storing the OAuth token in the
Flask session. This string will have ``.format(bp=self.blueprint)``
called on it before it is used. so you can refer to information
on the blueprint as part of the key. For example, ``{bp.name}``
will be replaced with the name of the blueprint.
"""
self.key = key
def get(self, blueprint):
key = self.key.format(bp=blueprint)
return flask.session.get(key)
def set(self, blueprint, token):
key = self.key.format(bp=blueprint)
flask.session[key] = token
def delete(self, blueprint):
key = self.key.format(bp=blueprint)
del flask.session[key]
|
NeCTAR-RC/designate
|
designate/utils.py
|
Python
|
apache-2.0
| 8,767
| 0.000114
|
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import functools
import inspect
import os
import pkg_resources
import uuid
from jinja2 import Template
from oslo.config import cfg
from designate import exceptions
from designate.openstack.common import log as logging
from designate.openstack.common import processutils
from designate.openstack.common import timeutils
LOG = logging.getLogger(__name__)
cfg.CONF.register_opts([
cfg.StrOpt('root-helper',
default='sudo designate-rootwrap /etc/designate/rootwrap.conf')
])
def find_config(config_path):
"""
Find a configuration file using the given hint.
Code nabbed from cinder.
:param config_path: Full or relative path to the config.
:returns: List of config paths
"""
possible_locations = [
config_path,
os.path.join(cfg.CONF.pybasedir, "etc", "designate", config_path),
os.path.join(cfg.CONF.pybasedir, "etc", config_path),
os.path.join(cfg.CONF.pybasedir, config_path),
"/etc/designate/%s" % config_path,
]
found_locations = []
for path in possible_locations:
LOG.debug('Searching for configuration at path: %s' % path)
if os.path.exists(path):
LOG.debug('Found configuration at path: %s' % path)
found_locations.append(os.path.abspath(path))
return found_locations
def read_config(prog, argv):
config_files = find_config('%s.conf' % prog)
cfg.CONF(argv[1:], project='designate', prog=prog,
default_config_files=config_files)
def resource_string(*args):
if len(args) == 0:
raise ValueError()
resource_path = os.path.join('resources', *args)
if not pkg_resources.resource_exists('designate', resource_path):
raise exceptions.ResourceNotFound('Could not find the requested '
'resource: %s' % resource_path)
return pkg_resources.resource_string('designate', resource_path)
def load_schema(version, name):
schema_string = resource_string('schemas', version, '%s.json' % name)
return json.loads(schema_string)
def load_template(template_name):
template_string = resource_string('templates', template_name)
return Template(template_string)
def render_template(template, **template_context):
if not isinstance(template, Template):
template = load_template(template)
return template.render(**template_context)
def render_template_to_file(template_name, output_path, makedirs=True,
**template_context):
output_folder = os.path.dirname(output_path)
# Create the output folder tree if necessary
if makedirs and not os.path.exists(output_folder):
os.makedirs(output_folder)
# Render the template
content = render_template(template_name, **template_context)
with open(output_path, 'w') as output_fh:
output_fh.write(content)
def execute(*cmd, **kw):
root_helper = kw.pop('root_helper', cfg.CONF.root_helper)
run_as_root = kw.pop('run_as_root', True)
return processutils.execute(*cmd, run_as_root=run_as_root,
root_helper=root_helper, **kw)
def get_item_properties(item, fields, mixed_case_fields=[], formatters={}):
"""Return a tuple containing the item properties.
:param item: a single item resource (e.g. Server, Tenant, etc)
:param fields: tuple of strings with the desired field names
:param mixed_case_fields: tuple of field names to preserve case
:param formatters: dictionary mapping field names to callables
to format the values
"""
row = []
for field in fields:
if field in formatters:
row.append(formatters[field](item))
else:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
else:
field_name = field.lower().replace(' ', '_')
if not hasattr(item, field_name) and \
(isinstance(item, dict) and field_name in item):
data = item[field_name]
else:
data = getattr(item, field_name, '')
if data is None:
data = ''
row.append(data)
return tuple(row)
def get_columns(data):
"""
Some row's might have variable count of columns, ensure that we have the
same.
:param data: Results in [{}, {]}]
"""
columns = set()
def _seen(col):
columns.add(str(col))
map(lambda item: map(_seen, item.keys()), data)
return list(columns)
def increment_serial(serial=0):
new_date = int(timeutils.strtime(fmt="%Y%m%d"))
old_date = serial / 100
new_serial = new_date * 100
if new_date == old_date:
new_serial = serial + 1
return new_serial
def quote_string(string):
inparts = string.split(' ')
outparts = []
tmp = None
for part in inparts:
if part == '':
continue
elif part[0] == '"' and part[-1:] == '"' and part[-2:] != '\\"':
# Handle Quoted Words
outparts.append(part.strip('"'))
elif part[0] == '"':
# Handle Start of Quoted Sentance
tmp = part[1:]
elif tmp i
|
s not None and part[-1:] == '"' and part[-2:] != '\\"':
# Handle End of Quoted Sentance
tmp += " " + part.strip('"')
outparts.append(tmp)
tmp = None
elif tmp is not
|
None:
# Handle Middle of Quoted Sentance
tmp += " " + part
else:
# Handle Standalone words
outparts.append(part)
if tmp is not None:
# Handle unclosed quoted strings
outparts.append(tmp)
# This looks odd, but both calls are necessary to ensure the end results
# is always consistent.
outparts = [o.replace('\\"', '"') for o in outparts]
outparts = [o.replace('"', '\\"') for o in outparts]
return '"' + '" "'.join(outparts) + '"'
def deep_dict_merge(a, b):
if not isinstance(b, dict):
return b
result = copy.deepcopy(a)
for k, v in b.iteritems():
if k in result and isinstance(result[k], dict):
result[k] = deep_dict_merge(result[k], v)
else:
result[k] = copy.deepcopy(v)
return result
def generate_uuid():
return str(uuid.uuid4())
def is_uuid_like(val):
"""Returns validation of a value as a UUID.
For our purposes, a UUID is a canonical form string:
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
"""
try:
return str(uuid.UUID(val)) == val
except (TypeError, ValueError, AttributeError):
return False
def validate_uuid(*check):
"""
A wrapper to ensure that API controller methods arguments are valid UUID's.
Usage:
@validate_uuid('zone_id')
def get_all(self, zone_id):
return {}
"""
def inner(f):
def wrapper(*args, **kwargs):
arg_spec = inspect.getargspec(f).args
# Ensure that we have the exact number of parameters that the
# function expects. This handles URLs like
# /v2/zones/<UUID - valid or invalid>/invalid
# get, patch and delete return a 404, but Pecan returns a 405
# for a POST at the same URL
if (len(arg_spec) != len(args)):
raise exceptions.NotFound()
# Ensure that we have non-empty parameters in the cases where we
# have sub controllers - i.e. controllers at the 2nd level
# Thi
|
airween/hamlib
|
bindings/pytest.py
|
Python
|
gpl-2.0
| 4,198
| 0.00143
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import sys
## Uncomment to run this script from an in-tree build (or adjust to the
## build directory) without installing the bindings.
#sys.path.append ('.')
#sys.path.append ('.libs')
import Hamlib
def StartUp():
"""Simple script to test the Hamlib.py module with Python2."""
print "%s: Python %s; %s\n" \
% (sys.argv[0], sys.version.split()[0], Hamlib.cvar.hamlib_version)
Hamlib.rig_set_debug(Hamlib.RIG_DEBUG_NONE)
# Init RIG_MODEL_DUMMY
my_rig = Hamlib.Rig(Hamlib.RIG_MODEL_DUMMY)
my_rig.set_conf("rig_pathname", "/dev/Rig")
|
my_rig.set_conf("retry", "5")
my_rig.open()
# 1073741944 is token value for "itu_region"
# but using get_conf is much more convenient
region = my_rig.get_conf(1073741944)
|
rpath = my_rig.get_conf("rig_pathname")
retry = my_rig.get_conf("retry")
print "status(str):\t\t", Hamlib.rigerror(my_rig.error_status)
print "get_conf:\t\tpath = %s, retry = %s, ITU region = %s" \
% (rpath, retry, region)
my_rig.set_freq(Hamlib.RIG_VFO_B, 5700000000)
my_rig.set_vfo(Hamlib.RIG_VFO_B)
print "freq:\t\t\t", my_rig.get_freq()
my_rig.set_freq(Hamlib.RIG_VFO_A, 145550000)
(mode, width) = my_rig.get_mode()
print "mode:\t\t\t", Hamlib.rig_strrmode(mode), "\nbandwidth:\t\t", width
my_rig.set_mode(Hamlib.RIG_MODE_CW)
(mode, width) = my_rig.get_mode()
print "mode:\t\t\t", Hamlib.rig_strrmode(mode), "\nbandwidth:\t\t", width
print "ITU_region:\t\t", my_rig.state.itu_region
print "Backend copyright:\t", my_rig.caps.copyright
print "Model:\t\t\t", my_rig.caps.model_name
print "Manufacturer:\t\t", my_rig.caps.mfg_name
print "Backend version:\t", my_rig.caps.version
print "Backend status:\t\t", Hamlib.rig_strstatus(my_rig.caps.status)
print "Rig info:\t\t", my_rig.get_info()
my_rig.set_level("VOX", 1)
print "VOX level:\t\t", my_rig.get_level_i("VOX")
my_rig.set_level(Hamlib.RIG_LEVEL_VOX, 5)
print "VOX level:\t\t", my_rig.get_level_i(Hamlib.RIG_LEVEL_VOX)
af = 12.34
print "Setting AF to %0.2f...." % (af)
my_rig.set_level("AF", af)
print "status:\t\t\t%s - %s" % (my_rig.error_status,
Hamlib.rigerror(my_rig.error_status))
print "AF level:\t\t%0.2f" % my_rig.get_level_f(Hamlib.RIG_LEVEL_AF)
print "strength:\t\t", my_rig.get_level_i(Hamlib.RIG_LEVEL_STRENGTH)
print "status:\t\t\t", my_rig.error_status
print "status(str):\t\t", Hamlib.rigerror(my_rig.error_status)
chan = Hamlib.channel(Hamlib.RIG_VFO_B)
my_rig.get_channel(chan)
print "get_channel status:\t", my_rig.error_status
print "VFO:\t\t\t", Hamlib.rig_strvfo(chan.vfo), ", ", chan.freq
print "Attenuators:\t\t", my_rig.caps.attenuator
print "\nSending Morse, '73'"
my_rig.send_morse(Hamlib.RIG_VFO_A, "73")
my_rig.close ()
print "\nSome static functions:"
err, lon1, lat1 = Hamlib.locator2longlat("IN98XC")
err, lon2, lat2 = Hamlib.locator2longlat("DM33DX")
err, loc1 = Hamlib.longlat2locator(lon1, lat1, 3)
err, loc2 = Hamlib.longlat2locator(lon2, lat2, 3)
print "Loc1:\t\tIN98XC -> %9.4f, %9.4f -> %s" % (lon1, lat1, loc1)
print "Loc2:\t\tDM33DX -> %9.4f, %9.4f -> %s" % (lon2, lat2, loc2)
err, dist, az = Hamlib.qrb(lon1, lat1, lon2, lat2)
longpath = Hamlib.distance_long_path(dist)
print "Distance:\t%.3f km, azimuth %.2f, long path:\t%.3f km" \
% (dist, az, longpath)
# dec2dms expects values from 180 to -180
# sw is 1 when deg is negative (west or south) as 0 cannot be signed
err, deg1, mins1, sec1, sw1 = Hamlib.dec2dms(lon1)
err, deg2, mins2, sec2, sw2 = Hamlib.dec2dms(lat1)
lon3 = Hamlib.dms2dec(deg1, mins1, sec1, sw1)
lat3 = Hamlib.dms2dec(deg2, mins2, sec2, sw2)
print 'Longitude:\t%4.4f, %4d° %2d\' %2d" %1s\trecoded: %9.4f' \
% (lon1, deg1, mins1, sec1, ('W' if sw1 else 'E'), lon3)
print 'Latitude:\t%4.4f, %4d° %2d\' %2d" %1s\trecoded: %9.4f' \
% (lat1, deg2, mins2, sec2, ('S' if sw2 else 'N'), lat3)
if __name__ == '__main__':
StartUp()
|
mtagle/airflow
|
airflow/configuration.py
|
Python
|
apache-2.0
| 29,594
| 0.002298
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import copy
import logging
import os
import pathlib
import shlex
import subprocess
import sys
import warnings
from base64 import b64encode
from collections import OrderedDict
# Ignored Mypy on configparser because it thinks the configparser module has no _UNSET attribute
from configparser import _UNSET, ConfigParser, NoOptionError, NoSectionError # type: ignore
from typing import Dict, Optional, Tuple, Union
import yaml
from cryptography.fernet import Fernet
from airflow.exceptions import AirflowConfigException
log = logging.getLogger(__name__)
# show Airflow's deprecation warnings
warnings.filterwarnings(
action='default', category=DeprecationWarning, module='airflow')
warnings.filterwarnings(
action='default', category=PendingDeprecationWarning, module='airflow')
def expand_env_var(env_var):
"""
Expands (potentially nested) env vars by repeatedly applying
`expandvars` and `expanduser` until interpolation stops having
any effect.
"""
if not env_var:
return env_var
while True:
interpolated = os.path.expanduser(os.path.expandvars(str(env_var)))
if interpolated == env_var:
return interpolated
else:
env_var = interpolated
def run_command(command):
"""
Runs command and returns stdout
"""
process = subprocess.Popen(
shlex.split(command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
output, stderr = [stream.decode(sys.getdefaultencoding(), 'ignore')
for stream in process.communicate()]
if process.returncode != 0:
raise AirflowConfigException(
"Cannot execute {}. Error code is: {}. Output: {}, Stderr: {}"
.format(command, process.returncode, output, stderr)
)
return output
def _read_default_config_file(file_name: str) -> Tuple[str, str]:
templates_dir = os.path.join(os.path.dirname(__file__), 'config_templates')
file_path = os.path.join(templates_dir, file_name)
with open(file_path, encoding='utf-8') as config_file:
return config_file.read(), file_path
DEFAULT_CONFIG, DEFAULT_CONFIG_FILE_PATH = _read_default_config_file('default_airflow.cfg')
TEST_CONFIG, TEST_CONFIG_FILE_PATH = _read_default_config_file('default_test.cfg')
def default_config_yaml() -> dict:
"""
Read Airflow configs from YAML file
:return: Python dictionary containing configs & their info
"""
templates_dir = os.path.join(os.path.dirname(__file__), 'config_templates')
file_path = os.path.join(templates_dir, "config.yml")
with open(file_path) as config_file:
return yaml.safe_load(config_file)
class AirflowConfigParser(ConfigParser):
# These configuration elements can be fetched as the stdout of commands
# following the "{section}__{name}__cmd" pattern, the idea behind this
# is to not store password on boxes in text files.
as_command_stdout = {
('core', 'sql_alchemy_conn'),
('core', 'fernet_key'),
('celery', 'broker_url'),
('celery', 'flower_basic_auth'),
('celery', 'result_backend'),
('atlas', 'password'),
('smtp', 'smtp_password'),
('ldap', 'bind_password'),
('kubernetes', 'git_password'),
}
# A mapping of (new option -> old option). where option is a tuple of section name and key.
# When reading new option, the old option will be checked to see if it exists. If it does a
# DeprecationWarning will be issued and the old option will be used instead
deprecated_options = {
('elasticsearch', 'host'): ('elasticsearch', 'elasticsearch_host'),
('elasticsearch', 'log_id_template'): ('elasticsearch', 'elasticsearch_log_id_template'),
('elasticsearch', 'end_of_log_mark'): ('elasticsearch', 'elasticsearch_end_of_log_mark'),
('elasticsearch', 'frontend'): ('elasticsearch', 'elasticsearch_frontend'),
('elasticsearch', 'write_stdout'): ('elasticsearch', 'elasticsearch_write_stdout'),
('elasticsearch', 'json_format'): ('elasticsearch', 'elasticsearch_json_format'),
('elasticsearch', 'json_fields'): ('elasticsearch', 'elasticsearch_json_fields'),
('logging', 'base_log_folder'): ('core', 'base_log_folder'),
('logging', 'remote_logging'): ('core', 'remote_logging'),
('logging', 'remote_log_conn_id'): ('core', 'remote_log_conn_id'),
('logging', 'remote_base_log_folder'): ('core', 'remote_base_log_folder'),
('logging', 'encrypt_s3_logs'): ('core', 'encrypt_s3_logs'),
('logging', 'logging_level'): ('core', 'logging_level'),
('logging', 'fab_logging_level'): ('core', 'fab_logging_level'),
('logging', 'logging_config_class'): ('core', 'logging_config_class'),
('logging', 'colored_console_log'): ('core', 'colored_console_log'),
('logging', 'colored_log_format'): ('core', 'colored_log_format'),
('logging', 'colored_formatter_class'): ('core', 'colored_formatter_class'),
('logging', 'log_format'): ('core', 'log_format'),
('logging', 'simple_log_format'): ('core', 'simple_log_format'),
('logging', 'task_log_prefix_template'): ('core', 'task_log_prefix_template'),
('logging', 'log_filename_template'): ('core', 'log_filename_template'),
('logging', 'log_processor_filename_template'): ('core', 'log_processor_filename_template'),
('logging', 'dag_processor_manager_log_location'): ('core', 'dag_processor_manager_log_location'),
('logging', 'task_log_reader'): ('core', 'task_log_reader'),
}
# A mapping of old default values that we want to change and warn the user
# about. Mapping of section -> setting -> { old, replace, by_version }
deprecated_values = {
'core': {
'task_runner': ('BashTaskRunner', 'StandardTaskRunner', '2.0'),
},
}
# This method transforms option names on every read, get, or set operation.
# This changes from the default behaviour of ConfigParser from lowercasing
# to instead be case-preserving
def optionxform(self, optionstr: str) -> str:
return optionstr
def __init__(self, default_config=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.airflow_defaults = ConfigParser(*args, **kwargs)
if default_config is not None:
self.airflow_defaults.read_string(default_config)
self.is_validated = False
def _validate(self):
if (
self.get("core", "executor") not in ('DebugExecutor', 'SequentialExecutor') and
"sqlite" in self.get('core', 'sql_alchemy_conn')):
raise AirflowConfigException(
"error: cannot use sqlite with the {}".format(
self.get('core', 'executor')))
for section, replacement in self.deprecated_values.items():
|
for name, info in replacement.items():
old, new, version = info
if self.get(section, name, fallback=None) == old:
|
# Make sure the env var option is removed, otherwise it
# would be read and used instead of the value we set
env_var = self._env_var_name(section, name)
os.environ.pop(env_var, None)
self.set(section, name
|
CRLab/curvox
|
src/curvox/pc_vox_utils.py
|
Python
|
mit
| 14,287
| 0.00378
|
import numpy as np
import binvox_rw
import numba
import mcubes
@numba.jit(forceobj=True)
def get_voxel_resolution(pc, patch_size):
"""
This function takes in a pointcloud and returns the resolution
of a voxel given that there will be a fixed number of voxels.
For example if patch_size is 40, then we are determining the
side length of a single voxel in meters. Sovoxel_resolution
may end up being something like 0.01 for a 1cm^3 voxel size
jaccard_distance
:type pc: numpy.ndarray
:param pc: nx3 numpy array representing a pointcloud
:type patch_size: int
:param patch_size: int, how many voxels are there going to be.
:rtype voxel_resolution: float
"""
if not pc.shape[1] == 3:
raise Exception("Invalid pointcloud size, should be nx3, but is {}".format(pc.shape))
min_x = pc[:, 0].min()
min_y = pc[:, 1].min()
min_z = pc[:, 2].min()
max_x = pc[:, 0].max()
max_y = pc[:, 1].max()
max_z = pc[:, 2].max()
max_dim = max((max_x - min_x),
(max_y - min_y),
(max_z - min_z))
voxel_resolution = (1.0 * max_dim) / patch_size
return voxel_resolution
@numba.jit(forceobj=True)
def get_bbox_center(pc):
"""
This function takes an nx3 pointcloud and returns a tuple
(x,y,z) which is the center of the bbox that contains
the pointcloud
:type pc: numpy.ndarray
:param pc: a nx3 numpy array representing a pointcloud
:rtype numpy.ndarray
"""
if not pc.shape[1] == 3:
raise Exception("Invalid pointcloud size, should be nx3, but is {}".format(pc.shape))
min_x = pc[:, 0].min()
min_y = pc[:, 1].min()
min_z = pc[:, 2].min()
max_x = pc[:, 0].max()
max_y = pc[:, 1].max()
max_z = pc[:, 2].max()
center = np.array([min_x + (max_x - min_x) / 2.0,
min_y + (max_y - min_y) / 2.0,
min_z + (max_z - min_z) / 2.0])
return center
@numba.jit(forceobj=True)
def voxelize_points(points, pc_bbox_center, voxel_resolution, num_voxels_per_dim, pc_center_in_voxel_grid):
"""
This function takes a pointcloud and produces a an occupancy map or voxel grid surrounding the points.
:type points: numpy.ndarray
:param points: an nx3 numpy array representing a pointcloud
:type pc_bbox_center: numpy.ndarray
:param pc_bbox_center: numpy.ndarray of shape (3,) representing the center of the bbox that contains points
:type voxel_resolution: float
:param voxel_resolution: float describing in meters the length of an individual voxel edge. i.e 0.01 would
mean each voxel is 1cm^3
:type num_voxels_per_dim: int
:param num_voxels_per_dim: how many voxels along a dimension. normally 40, for a 40x40x40 voxel grid
:type pc_center_in_voxel_grid: tuple
:param pc_center_in_voxel_grid: (x,y,z) in voxel coords of where to place the center of the points in the voxel grid
if using 40x40x40 voxel grid, then pc_center_in_voxel_grid = (20,20,20). We often using something more
like (20,20,18) when doing shape completion so there is more room in the back of the grid for the
object to be completed.
"""
# this is the voxel grid we are going to return
voxel_grid = np.zeros((num_voxels_per_dim,
num_voxels_per_dim,
num_voxels_per_dim), dtype=np.bool)
# take the points and convert them from meters to voxel space coords
centered_scaled_points = np.floor(
(points - np.array(pc_bbox_center) + np.array(
pc_center_in_voxel_grid) * voxel_resolution) / voxel_resolution)
# remove any points that are beyond the area that falls in our voxel grid
mask = centered_scaled_points.max(axis=1) < num_voxels_per_dim
centered_scaled_points = centered_scaled_points[mask]
# if we don't have any more points that fall within our voxel grid
# return an empty grid
if centered_scaled_points.shape[0] == 0:
return voxel_grid
# remove any points that are outside of the region we are voxelizing
# as they are to small.
mask = centered_scaled_points.min(axis=1) > 0
centered_scaled_points = centered_scaled_points[mask]
# if we don't have any more points that fall within our voxel grid,
# return an empty grid
if centered_scaled_points.shape[0] == 0:
return voxel_grid
# treat our remaining points as ints, since we are already in voxel coordinate space.
# this points shoule be things like (5, 6, 7) which represent indices in the voxel grid.
csp_int = centered_scaled_points.astype(int)
# create a mask from our set of points.
mask = (csp_int[:, 0], csp_int[:, 1], csp_int[:, 2])
# apply the mask to our voxel grid setting voxel that had points in them to be occupied
voxel_grid[mask] = 1
return voxel_grid
def pc_to_binvox(points, **kwargs):
"""
This function creates a binvox object from a pointcloud. The voxel grid is slightly off center from the
pointcloud bbox center so that the back of the grid has more room for the completion.
:type points: numpy.ndarray
:param points: nx3 numpy array representing a pointcloud
:rtype: binvox_rw.Voxels
:param kwargs:
See below
:Keyword Arguments:
* *patch_size* (``int``) --
how many voxels along a single dimension of the voxel grid.
Ex: patch_size=40 gives us a 40^3 voxel grid
Defaults to 40
* *percent_patch_size* (``float``) --
how much of the voxel grid do we want our pointcloud to fill.
make this < 1 so that there is some padding on the edges
Defaults to 0.8
* *percent_offset* (``tuple``) --
Where should the center of the points be placed inside the voxel grid.
normally make PERCENT_Z < 0.5 so that the points are placed towards the front of the grid
this leaves more room for the shape completion to fill in the occluded back half of the occupancy grid.
"""
patch_size = kwargs.get("patch_size", 40)
percent_offset = kwargs.get("percent_offset", (0.5, 0.5, 0.45))
percent_patch_size = kwargs.get("percent_patch_size", 0.8)
if points.shape[1] != 3:
raise Exception("Invalid pointcloud size, should be nx3, but is {}".format(points.shape))
if len(percent_offset) != 3:
raise Exception("Percent offset should be a tuple of size 3, instead got {}".format(percent_offset))
percent_x, percent_y, percent_z = percent_offset
# get the center of the pointcloud in meters. Ex: center = np.array([0.2, 0.1, 2.0])
voxel_center = get_bbox_center(points)
# get the size of an individual voxel. Ex: voxel_resolution=0.01 meaning 1cm^3 voxel
# PERCENT_PATCH_SIZE determines how much extra padding to leave on the sides
voxel_resolution = get_voxel_resolution(points, percent_patch_size * patch_size)
# this tuple is where we want to stick the center of the pointcloud in our voxel grid
# Ex: (20, 20, 18) leaving some extra room in the back half.
pc_center_in_voxel_grid = (patch_size*percent_x, patch_size*percent_y, patch_size*percent_z)
# create a voxel grid.
vox_np = voxelize_points(
points=points[:, 0:3],
pc_bbox_center=voxel_center,
voxel_resolution=voxel_resolution,
num_voxels_per_dim=patch_size,
pc_center_in_voxel_grid=pc_center_in_voxel_grid)
# location in meters of the bottom corner of the voxel grid in world space
offset = np.array(voxel_center) - np.array(pc_center_in_voxel_grid) * voxel_resolution
# create a voxel grid object t
|
o contain the grid, shape, offset in the world, and grid resolution
voxel_grid = binvox
|
_rw.Voxels(vox_np, vox_np.shape, tuple(offset), voxel_resolution * patch_size, "xyz")
# Where am I putting my point cloud relative to the center of my voxel grid
# ex. (20, 20, 20) or (20, 20, 18)
center_point_in_voxel_grid = (patch_size * percent_x, patch_size * percent_y, patch_size * percent_z)
return voxel_grid, voxel_center, voxel_resolution, center_point_in_voxel_
|
nikolas/splinter
|
tests/cookies.py
|
Python
|
bsd-3-clause
| 3,162
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class CookiesTest(object):
def test_create_and_access_a_cookie(self):
"should be able to create and access a cookie"
|
self.browser.cookies.add({'sha': 'zam'})
self.assertEqual(self.browser.cookies['sha'], 'zam')
def test_create_many_cookies_at_once_as_dict(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam', 'foo': 'bar'}
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_m
|
any_cookies_at_once_as_list(self):
"should be able to create many cookies at once as list"
cookies = [{'sha': 'zam'}, {'foo': 'bar'}]
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_some_cookies_and_delete_them_all(self):
"should be able to delete all cookies"
self.browser.cookies.add({'whatever': 'and ever'})
self.browser.cookies.add({'anothercookie': 'im bored'})
self.browser.cookies.delete()
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_a_cookie(self):
"should be able to create and destroy a cookie"
self.browser.cookies.delete()
self.browser.cookies.add({'cookie': 'with milk'})
self.browser.cookies.delete('cookie')
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_many_cookies(self):
"should be able to create and destroy many cookies"
self.browser.cookies.delete()
self.browser.cookies.add({'acookie': 'cooked'})
self.browser.cookies.add({'anothercookie': 'uncooked'})
self.browser.cookies.add({'notacookie': 'halfcooked'})
self.browser.cookies.delete('acookie', 'notacookie')
self.assertEqual('uncooked', self.browser.cookies['anothercookie'])
def test_try_to_destroy_an_absent_cookie_and_nothing_happens(self):
self.browser.cookies.delete()
self.browser.cookies.add({'foo': 'bar'})
self.browser.cookies.delete('mwahahahaha')
self.assertEqual(self.browser.cookies, {'foo': 'bar'})
def test_create_and_get_all_cookies(self):
"should be able to create some cookies and retrieve them all"
self.browser.cookies.delete()
self.browser.cookies.add({'taco': 'shrimp'})
self.browser.cookies.add({'lavar': 'burton'})
self.assertEqual(len(self.browser.cookies.all()), 2)
self.browser.cookies.delete()
self.assertEqual(self.browser.cookies.all(), {})
def test_create_and_use_contains(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam'}
self.browser.cookies.add(cookies)
self.assertIn('sha', self.browser.cookies)
self.assertNotIn('foo', self.browser.cookies)
|
sorgerlab/belpy
|
indra/literature/pubmed_client.py
|
Python
|
mit
| 22,934
| 0.000392
|
"""
Search and get metadata for articles in Pubmed.
"""
import logging
import requests
from time import sleep
from typing import List
from functools import lru_cache
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
logger = logging.getLogger(__name__)
pubmed_search = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi'
pubmed_fetch = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi'
# Send request can't be cached by lru_cache because it takes a dict
# (a mutable/unhashable type) as an argument. We cache the callers instead.
def send_request(url, data):
try:
res = requests.get(url, params=data)
except requests.exceptions.Timeout as e:
logger.error('PubMed request timed out')
logger.error('url: %s, data: %s' % (url, data))
logger.error(e)
return None
except requests.exceptions.RequestException as e:
logger.error('PubMed request exception')
logger.error('url: %s, data: %s' % (url, data))
logger.error(e)
return None
if res.status_code == 429:
sleep(0.5)
res = requests.get(url, params=data)
if not res.status_code == 200:
logger.error('Got return code %d from pubmed client.'
% res.status_code)
return None
tree = ET.XML(res.content, parser=UTB())
return tree
@lru_cache(maxsize=100)
def get_ids(search_term, **kwargs):
"""Search Pubmed for paper IDs given a search term.
Search options can be passed as keyword arguments, some of which are
custom keywords identified by this function, while others are passed on
as parameters for the request to the PubMed web service
For details on parameters that can be used in PubMed searches, see
https://www.ncbi.nlm.nih.gov/books/NBK25499/#chapter4.ESearch Some useful
parameters to pass are db='pmc' to search PMC instead of pubmed reldate=2
to search for papers within the last 2 days mindate='2016/03/01',
maxdate='2016/03/31' to search for papers in March 201
|
6.
PubMed, by default, limits returned PMIDs to a small number, and this
number can be controlled by the "retmax" parameter. This function
uses a retmax value of 100,000 by default that can be changed via the
corresponding keyword argument.
Parameters
----------
search_term : str
A term for which the PubMed search should be performed.
use_text_word
|
: Optional[bool]
If True, the "[tw]" string is appended to the search term to constrain
the search to "text words", that is words that appear as whole
in relevant parts of the PubMed entry (excl. for instance the journal
name or publication date) like the title and abstract. Using this
option can eliminate spurious search results such as all articles
published in June for a search for the "JUN" gene, or journal names
that contain Acad for a search for the "ACAD" gene.
See also: https://www.nlm.nih.gov/bsd/disted/pubmedtutorial/020_760.html
Default : True
kwargs : kwargs
Additional keyword arguments to pass to the PubMed search as
parameters.
"""
use_text_word = kwargs.pop('use_text_word', True)
if use_text_word:
search_term += '[tw]'
params = {'term': search_term,
'retmax': 100000,
'retstart': 0,
'db': 'pubmed',
'sort': 'pub+date'}
params.update(kwargs)
tree = send_request(pubmed_search, params)
if tree is None:
return []
if tree.find('ERROR') is not None:
logger.error(tree.find('ERROR').text)
return []
if tree.find('ErrorList') is not None:
for err in tree.find('ErrorList'):
logger.error('Error - %s: %s' % (err.tag, err.text))
return []
count = int(tree.find('Count').text)
id_terms = tree.findall('IdList/Id')
if id_terms is None:
return []
ids = [idt.text for idt in id_terms]
if count != len(ids):
logger.warning('Not all ids were retrieved for search %s;\n'
'limited at %d.' % (search_term, params['retmax']))
return ids
def get_id_count(search_term):
"""Get the number of citations in Pubmed for a search query.
Parameters
----------
search_term : str
A term for which the PubMed search should be performed.
Returns
-------
int or None
The number of citations for the query, or None if the query fails.
"""
params = {'term': search_term,
'rettype': 'count',
'db': 'pubmed'}
tree = send_request(pubmed_search, params)
if tree is None:
return None
else:
count = list(tree)[0].text
return int(count)
@lru_cache(maxsize=100)
def get_ids_for_gene(hgnc_name, **kwargs):
"""Get the curated set of articles for a gene in the Entrez database.
Search parameters for the Gene database query can be passed in as
keyword arguments.
Parameters
----------
hgnc_name : str
The HGNC name of the gene. This is used to obtain the HGNC ID
(using the hgnc_client module) and in turn used to obtain the Entrez
ID associated with the gene. Entrez is then queried for that ID.
"""
from indra.databases import hgnc_client
# Get the HGNC ID for the HGNC name
hgnc_id = hgnc_client.get_hgnc_id(hgnc_name)
if hgnc_id is None:
raise ValueError('Invalid HGNC name.')
# Get the Entrez ID
entrez_id = hgnc_client.get_entrez_id(hgnc_id)
if entrez_id is None:
raise ValueError('Entrez ID not found in HGNC table.')
# Query the Entrez Gene database
params = {'db': 'gene',
'retmode': 'xml',
'id': entrez_id}
params.update(kwargs)
tree = send_request(pubmed_fetch, params)
if tree is None:
return []
if tree.find('ERROR') is not None:
logger.error(tree.find('ERROR').text)
return []
# Get all PMIDs from the XML tree
id_terms = tree.findall('.//PubMedId')
if id_terms is None:
return []
# Use a set to remove duplicate IDs
ids = list(set([idt.text for idt in id_terms]))
return ids
def get_ids_for_mesh(mesh_id, major_topic=False, **kwargs):
"""Return PMIDs that are annotated with a given MeSH ID.
Parameters
----------
mesh_id : str
The MeSH ID of a term to search for, e.g., D009101.
major_topic : bool
If True, only papers for which the given MeSH ID is annotated as
a major topic are returned. Otherwise all annotations are considered.
Default: False
**kwargs
Any further PudMed search arguments that are passed to
get_ids.
"""
from indra.databases import mesh_client
mesh_name = mesh_client.get_mesh_name(mesh_id)
if not mesh_name:
logger.error('Could not get MeSH name for ID %s' % mesh_id)
return []
suffix = 'majr' if major_topic else 'mh'
search_term = '%s [%s]' % (mesh_name, suffix)
ids = get_ids(search_term, use_text_word=False, **kwargs)
if mesh_id.startswith('C') and not major_topic:
# Get pmids for supplementary concepts as well
search_term = '%s [nm]' % mesh_name
ids2 = get_ids(search_term, use_text_word=False, **kwargs)
ids = list(set(ids) | set(ids2))
return ids
def get_article_xml(pubmed_id):
"""Get the Article subtree a single article from the Pubmed database.
Parameters
----------
pubmed_id : str
A PubMed ID.
Returns
-------
xml.etree.ElementTree.Element
The XML ElementTree Element that represents the Article portion of the
PubMed entry.
"""
full_xml_tree = get_full_xml(pubmed_id)
if full_xml_tree is None:
return None
article = full_xml_tree.find('PubmedArticle/MedlineCitation/Article')
return article # May be none
@lru_cache(maxsize=100)
def get_full_xml(pubmed_id):
"""Get the full XML tree of a single article from the Pubmed database.
Parameters
----------
pubmed_id : str
A PubMed ID
|
victorianorton/SimpleRPGGame
|
src/game/Decorators.py
|
Python
|
mit
| 1,582
| 0.007585
|
__author__ = 'Victoria'
#Decorator Pattern in Characters
class BarDecorators(Barbarian):
pass
class ImageBarDecorator(BarDecorators):
def __init__(self, decorated, picFile):
self.decorated = decorated
self.picFile = picFile
super(ImageBarDecorator, self).__init__(self.decorated.canvas,
self.decorated.posit
|
ionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
class FastBarMoveDecorator(BarDecorators):
def __init__(self, decorated):
self.decorated = decorated()
super(FastBarMoveDecorator, self).__init__(self.decorated.can
|
vas,
self.decorated.positionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
#Decorator Pattern in Monsters
class DragDecorators(Dragon):
pass
class ImageDragDecorator(DragDecorators):
def __init__(self, decorated, picFile):
self.decorated = decorated
self.picFile = picFile
super(ImageDragDecorator, self).__init__(self.decorated.canvas,
self.decorated.positionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
class FastDragMoveDecorator(DragDecorators):
def __init__(self, decorated):
self.decorated = decorated
super(FastDragMoveDecorator, self).__init__(self.decorated.canvas,
self.decorated.positionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
|
level12/blazeweb
|
tests/apps/nlsupporting/components/news/__init__.py
|
Python
|
bsd-3-clause
| 26
| 0
|
def som
|
efunc():
p
|
ass
|
ajaybhatia/archlinux-dotfiles
|
home/.config/offlineimap/offlineimap-helpers.py
|
Python
|
mit
| 2,790
| 0.010394
|
import os
import sys
import subprocess
""" Use gpg to decrypt password.
"""
def mailpasswd(path):
cmd = "gpg --quiet --batch --use-agent --decrypt --output - " + os.path.expanduser(path)
try:
return subprocess.check_output(cmd, shell=True).strip()
except subprocess.CalledProcessError:
return ""
# get password either from gpg file (when run from shell) or from stdin (when run from imapfilter)
def get_passwd_check_ppid(path):
# get parent process cmdline
f = open("/proc/%s/cmdline" % os.getppid(), "r")
cmdline = f.read()
f.close()
# check if run from imapfilter
if "imapfilter" in cmdline:
return raw_input()
else:
return mailpasswd(path)
# mapping for nametrans
# dictionary of strings {<remote>: <local>, ...} shape, where <remote> is mapped to <local>
mapping_fjfi = {
'INBOX' : 'INBOX',
'Drafts' : 'drafts',
'Sent Items' : 'sent',
'Deleted Items' : 'trash',
'Junk E-Mail'
|
: 'spam',
}
mapping_gmail = {
'INBOX' : 'INBOX',
'[Gmail]/Drafts' : 'drafts',
'[Gmail]/Sent Mail' : 'sent',
'[Gmail]/Bin' : 'trash',
'[Gmail]/Spam' : 'spam',
}
mapping_gmx = {
'INBOX' : 'INBOX',
'Drafts' : 'drafts',
'Sent' : 'sent',
'Spam' : 'spam',
'Trash'
|
: 'trash',
'arch' : 'arch',
'aur-general' : 'aur-general',
'arch-general' : 'arch-general',
'arch-wiki' : 'arch-wiki',
'mw' : 'mw',
}
# values from mapping_* dicts with high priority
prio_queue_fjfi = ['INBOX']
prio_queue_gmail = ['INBOX']
prio_queue_gmx = ['INBOX', 'arch', 'arch-wiki', 'arch-general', 'aur-general']
def nt_remote(mapping):
def inner(folder):
try:
return mapping[folder]
except:
return folder
return inner
def nt_local(mapping):
r_mapping = dict(zip(mapping.values(), mapping.keys()))
def inner(folder):
try:
return r_mapping[folder]
except:
return folder
return inner
# return False if folder not in mapping.keys()
def exclude(mapping):
def inner(folder):
if folder in mapping.keys():
return True
return False
return inner
# compare by position in queue (mapping_*.values())
def fd_priority(prio_queue):
def inner(x, y):
if x in prio_queue and y in prio_queue:
return cmp(prio_queue.index(x), prio_queue.index(y))
elif x in prio_queue:
return -1
elif y in prio_queue:
return 1
else:
return 0
return inner
|
epequeno/ThinkPy-Solutions
|
ch08/8.06.py
|
Python
|
gpl-3.0
| 828
| 0.002415
|
# -*- coding: utf-8 -*-
"""
Created on Su
|
n Aug 7 18:08:41 2011
@author: steven
"""
# word = 'banana'
# count = 0
# for letter in word:
# if letter == 'a':
# count = count + 1
# print count
# Rewrite this function so that instead of traversing the string, it uses the
# three-parameter version of find from the previous section.
# Current Status: Complete
def find(letter, word, index):
while index < len(word):
if word[index] == letter:
|
return index
index += 1
return -1
def count(letter, word):
counter = 0
index = 0
while index < len(word):
result = find(letter, word, index)
if result == -1:
return counter
else:
counter += 1
index = result + 1
return counter
print count("n", "Think Python")
|
FedeRez/webldap
|
app/webldap/local_settings.docker.py
|
Python
|
mit
| 764
| 0
|
# Docker-specific local settings
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db',
}
}
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
TEMPLATE_DIRS = (
'/srv/webldap/templates',
)
EMAIL_FROM = 'root@localhost'
REQ_EXPIRE_HRS = 48
REQ_EXPIRE_STR = '48 heures'
LDAP_URI = 'ldap://{}:{}'.format(os.en
|
viron['LDAP_PORT_389_TCP_ADDR'],
os.environ['LDAP_PORT_389_TCP_PORT'])
LDAP_STARTTLS = False
LDAP_CACERT = ''
LDAP_BASE = 'dc=example,dc=net'
LDAP_WEBLDAP_USER = 'cn=webldap,ou=service-users,dc=example,dc=net'
LDA
|
P_WEBLDAP_PASSWD = 'secret'
LDAP_DEFAULT_GROUPS = []
LDAP_DEFAULT_ROLES = ['member']
|
jpmfribeiro/PyCharts
|
pycharts/fields/plot_options/data_labels.py
|
Python
|
mit
| 493
| 0.004057
|
class DataLabels(object):
def __init__(self, enabled=Tr
|
ue):
self.enabled = enabled
def show_labels(self, enable):
if not type(enable) is bool:
raise TypeError('enable should be a boolean (True or False).')
self.enabled = enable
def to_javascript(self):
jsc = "dataLabels: {"
jsc += "enabled: "
if self.enabled:
jsc += "true"
else:
jsc += "false"
jsc += "}"
return
|
jsc
|
tanglei528/glance
|
glance/tests/functional/v1/test_api.py
|
Python
|
apache-2.0
| 24,300
| 0
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Functional test case that utilizes httplib2 against the API server"""
import hashlib
import httplib2
from glance.openstack.common import jsonutils
from glance.openstack.common import units
from glance.tests import functional
from glance.tests.utils import minimal_headers
from glance.tests.utils import skip_if_disabled
FIVE_KB = 5 * units.Ki
FIVE_GB = 5 * units.Gi
class TestApi(functional.FunctionalTest):
"""Functional tests using httplib2 against the API server"""
@skip_if_disabled
def test_get_head_simple_post(self):
"""
We test the following sequential series of actions:
0. GET /images
- Verify no public images
1. GET /images/detail
- Verify no public images
2. POST /images with public image named Image1
and no custom properties
- Verify 201 returned
3. HEAD image
- Verify HTTP headers have correct information we just added
4. GET image
- Verify all information on image we just added is correct
5. GET /images
- Verify the image we just added is returned
6. GET /images/detail
- Verify the image we just added is returned
7. PUT image with custom properties of "distro" and "arch"
- Verify 200 returned
8. PUT image with too many custom properties
- Verify 413 returned
9. GET image
- Verify updated information about image was stored
10. PUT image
- Remove a previously existing property.
11. PUT image
- Add a previously deleted property.
12. PUT image/members/member1
- Add member1 to image
13. PUT image/members/member2
- Add member2 to image
14. GET image/members
- List image members
15. DELETE image/members/member1
- Delete image member1
16. PUT image/members
- Attempt to replace members with an overlimit amount
17. PUT image/members/member11
- Attempt to add a member while at limit
18. POST /images with another public image named Image2
- attribute and three custom properties, "distro", "arch" & "foo"
- Verify a 200 OK is returned
19. HEAD image2
- Verify image2 found now
20. GET /images
- Verify 2 public images
21. GET /images with filter on user-defined property "distro".
- Verify both images are returned
22. GET /images with filter on user-defined property 'distro' but
- with non-existent value. Verify no images are returned
23. GET /images with filter on non-existent user-defined property
- "boo". Verify no images are returned
24. GET /images with filter 'arch=i386'
- Verify only image2 is returned
25. GET /images with filter 'arch=x86_64'
- Verify only image1 is returned
26. GET /images with filter 'foo=bar'
- Verify only image2 is returned
27. DELETE image1
- Delete image
28. GET image/members
- List deleted image members
29. PUT image/members/member2
- Update existing member2 of deleted image
30. PUT image/members/member3
- Add member3 to deleted image
31. DELETE image/members/member2
- Delete member2 from deleted image
32. DELETE image2
- Delete image
33. GET /images
- Verify no images are listed
"""
self.cleanup()
self.start_s
|
ervers(**self.__dict__.copy())
# 0. GET /images
# Verify no publi
|
c images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. GET /images/detail
# Verify no public images
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 2. POST /images with public image named Image1
# attribute and no custom properties. Verify a 200 OK is returned
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 201)
data = jsonutils.loads(content)
image_id = data['image']['id']
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 3. HEAD image
# Verify image found now
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
# 4. GET image
# Verify all information on image we just added is correct
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image_headers = {
'x-image-meta-id': image_id,
'x-image-meta-name': 'Image1',
'x-image-meta-is_public': 'True',
'x-image-meta-status': 'active',
'x-image-meta-disk_format': 'raw',
'x-image-meta-container_format': 'ovf',
'x-image-meta-size': str(FIVE_KB)}
expected_std_headers = {
'content-length': str(FIVE_KB),
'content-type': 'application/octet-stream'}
for expected_key, expected_value in expected_image_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
for expected_key, expected_value in expected_std_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
self.assertEqual(content, "*" * FIVE_KB)
self.assertEqual(hashlib.md5(content).hexdigest(),
hashlib.md5("*" * FIVE_KB).hexdigest())
# 5. GET /images
# Verify one public image
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_result = {"i
|
OCA/bank-payment
|
account_payment_partner/tests/test_account_payment_partner.py
|
Python
|
agpl-3.0
| 21,944
| 0.001276
|
# Copyright 2017 ForgeFlow S.L.
# Copyright 2021 Tecnativa - Víctor Martínez
# License AGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import _, fields
from odoo.exceptions import UserError, ValidationError
from odoo.fields import Date
from odoo.tests.common import Form, SavepointCase
class TestAccountPaymentPartner(SavepointCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.res_users_model = cls.env["res.users"]
cls.move_model = cls.env["account.move"]
cls.journal_model = cls.env["account.journal"]
cls.payment_mode_model = cls.env["account.payment.mode"]
cls.partner_bank_model = cls.env["res.partner.bank"]
# Refs
cls.company = cls.env.ref("base.main_company")
cls.acct_type_payable = cls.env.ref("account.data_account_type_payable")
cls.acct_type_receivable = cls.env.ref("account.data_account_type_receivable")
cls.acct_type_expenses = cls.env.ref("account.data_account_type_expenses")
cls.company_2 = cls.env["res.company"].create({"name": "Company 2"})
charts = cls.env["account.chart.template"].search([])
if charts:
cls.chart = charts[0]
else:
raise ValidationError(_("No Chart of Account Template has been defined !"))
old_company = cls.env.user.company_id
cls.env.user.company
|
_id = cls.company_2.id
cls.chart.try_loading()
cls.env.user.company_id = old_company.id
# refs
cls.manual_out = cls.env
|
.ref("account.account_payment_method_manual_out")
cls.manual_out.bank_account_required = True
cls.manual_in = cls.env.ref("account.account_payment_method_manual_in")
cls.journal_sale = cls.env["account.journal"].create(
{
"name": "Test Sales Journal",
"code": "tSAL",
"type": "sale",
"company_id": cls.company.id,
}
)
cls.journal_purchase = cls.env["account.journal"].create(
{
"name": "Test Purchases Journal",
"code": "tPUR",
"type": "purchase",
"company_id": cls.company.id,
}
)
cls.journal_c1 = cls.journal_model.create(
{
"name": "J1",
"code": "J1",
"type": "bank",
"company_id": cls.company.id,
"bank_acc_number": "123456",
}
)
cls.journal_c2 = cls.journal_model.create(
{
"name": "J2",
"code": "J2",
"type": "bank",
"company_id": cls.company_2.id,
"bank_acc_number": "552344",
}
)
cls.supplier_payment_mode = cls.payment_mode_model.create(
{
"name": "Suppliers Bank 1",
"bank_account_link": "variable",
"payment_method_id": cls.manual_out.id,
"show_bank_account_from_journal": True,
"company_id": cls.company.id,
"fixed_journal_id": cls.journal_c1.id,
"variable_journal_ids": [(6, 0, [cls.journal_c1.id])],
}
)
cls.supplier_payment_mode_c2 = cls.payment_mode_model.create(
{
"name": "Suppliers Bank 2",
"bank_account_link": "variable",
"payment_method_id": cls.manual_out.id,
"company_id": cls.company_2.id,
"fixed_journal_id": cls.journal_c2.id,
"variable_journal_ids": [(6, 0, [cls.journal_c2.id])],
}
)
cls.customer_payment_mode = cls.payment_mode_model.create(
{
"name": "Customers to Bank 1",
"bank_account_link": "fixed",
"payment_method_id": cls.manual_in.id,
"company_id": cls.company.id,
"fixed_journal_id": cls.journal_c1.id,
"refund_payment_mode_id": cls.supplier_payment_mode.id,
"variable_journal_ids": [(6, 0, [cls.journal_c1.id])],
}
)
cls.supplier_payment_mode.write(
{"refund_payment_mode_id": cls.customer_payment_mode.id}
)
cls.customer = (
cls.env["res.partner"]
.with_company(cls.company.id)
.create(
{
"name": "Test customer",
"customer_payment_mode_id": cls.customer_payment_mode,
}
)
)
cls.supplier = (
cls.env["res.partner"]
.with_company(cls.company.id)
.create(
{
"name": "Test supplier",
"supplier_payment_mode_id": cls.supplier_payment_mode,
}
)
)
cls.supplier_bank = cls.env["res.partner.bank"].create(
{
"acc_number": "5345345",
"partner_id": cls.supplier.id,
"company_id": cls.company.id,
}
)
cls.supplier_bank_2 = cls.env["res.partner.bank"].create(
{
"acc_number": "3452342",
"partner_id": cls.supplier.id,
"company_id": cls.company_2.id,
}
)
cls.supplier.with_company(
cls.company_2.id
).supplier_payment_mode_id = cls.supplier_payment_mode_c2
cls.invoice_account = cls.env["account.account"].search(
[
("user_type_id", "=", cls.acct_type_payable.id),
("company_id", "=", cls.company.id),
],
limit=1,
)
cls.invoice_line_account = cls.env["account.account"].search(
[
("user_type_id", "=", cls.acct_type_expenses.id),
("company_id", "=", cls.company.id),
],
limit=1,
)
cls.journal_bank = cls.env["res.partner.bank"].create(
{
"acc_number": "GB95LOYD87430237296288",
"partner_id": cls.env.user.company_id.partner_id.id,
}
)
cls.journal = cls.env["account.journal"].create(
{
"name": "BANK TEST",
"code": "TEST",
"type": "bank",
"bank_account_id": cls.journal_bank.id,
}
)
cls.supplier_invoice = cls.move_model.create(
{
"partner_id": cls.supplier.id,
"invoice_date": fields.Date.today(),
"move_type": "in_invoice",
"journal_id": cls.journal_purchase.id,
}
)
def _create_invoice(self, default_move_type, partner):
move_form = Form(
self.env["account.move"].with_context(default_move_type=default_move_type)
)
move_form.partner_id = partner
move_form.invoice_date = Date.today()
with move_form.invoice_line_ids.new() as line_form:
line_form.product_id = self.env.ref("product.product_product_4")
line_form.name = "product that cost 100"
line_form.quantity = 1.0
line_form.price_unit = 100.0
line_form.account_id = self.invoice_line_account
return move_form.save()
def test_create_partner(self):
customer = (
self.env["res.partner"]
.with_company(self.company.id)
.create(
{
"name": "Test customer",
"customer_payment_mode_id": self.customer_payment_mode,
}
)
)
self.assertEqual(
customer.with_company(self.company.id).customer_payment_mode_id,
self.customer_payment_mode,
)
self.assertEqual(
customer.with_company(self.company_2.id).customer_payment_mode_id,
self.payment_mode_model,
)
def test_partner_id_changes_compute_partner_bank(self):
# Test _compute_par
|
fluks/youtube-dl
|
youtube_dl/extractor/common.py
|
Python
|
unlicense
| 38,144
| 0.001022
|
from __future__ import unicode_literals
import base64
import datetime
import hashlib
import json
import netrc
import os
import re
import socket
import sys
import time
import xml.etree.ElementTree
from ..compat import (
compat_cookiejar,
compat_http_client,
compat_urllib_error,
compat_urllib_parse_urlparse,
compat_urlparse,
compat_str,
)
from ..utils import (
clean_html,
compiled_regex_type,
ExtractorError,
float_or_none,
int_or_none,
RegexNotFoundError,
sanitize_filename,
unescapeHTML,
)
_NO_DEFAULT = object()
class InfoExtractor(object):
"""Information Extractor class.
Information extractors are the classes that, given a URL, extract
information about the video (or videos) the URL refers to. This
information includes the real video URL, the video title, author and
others. The information is stored in a dictionary which is then
passed to the YoutubeDL. The YoutubeDL processes this
information possibly downloading the video to the file system, among
other possible outcomes.
The type field determines the the type of the result.
By far the most common value (and the default if _type is missing) is
"video", which indicates a single video.
For a video, the dictionaries must include the following fields:
id: Video identifier.
title: Video title, unescaped.
Additionally, it must contain either a formats entry or a url one:
formats: A list of dictionaries for each format available, ordered
from worst to best quality.
Potential fields:
* url Mandatory. The URL of the video file
* ext Will be calculated from url if missing
* format A human-readable description of the format
("mp4 container with h264/opus").
Calculated from the format_id, width, height.
and format_note fields if missing.
* format_id A short description of the format
("mp4_h264_opus" or "19").
Technically optional, but strongly recommended.
* format_note Additional info about the format
("3D" or "DASH video")
* width Width of the video, if known
* height Height of the video, if known
* resolution Textual description of width and height
* tbr Average bitrate of audio and video in KBit/s
* abr Average audio bitrate in KBit/s
* acodec Name of the audio codec in use
* asr Audio sampling rate in Hertz
* vbr Average video bitrate in KBit/s
* fps Frame rate
* vcodec Name of the video codec in use
* container Name of the container format
* filesize The number of bytes, if known in advance
* filesize_approx An estimate for the number of bytes
* player_url SWF Player URL (used for rtmpdump).
* protocol The protocol that will be used for the actual
download, lower-case.
"http", "https", "rtsp", "rtmp", "m3u8" or so.
* preference Order number of this format. If this field is
present and not None, the formats get sorted
by this field, regardless of all other values.
-1 for default (order by other properties),
-2 or smaller for less than default.
* language_preference Is this in the correct requested
language?
10 if it's what the URL is about,
-1 for default (don't know),
-10 otherwise, other values reserved for now.
* quality Order number of the video quality of this
format, irrespective of the file format.
-1 for default (order by other properties),
-2 or smaller for less than default.
* source_preference Order number for this video source
(quality takes higher priority)
-1 for default (order by other properties),
-2 or smaller for less than default.
* http_referer HTTP Referer header value to set.
* http_method HTTP method to use for the download.
* http_headers A dictionary of additional HTTP headers
to add to the request.
* http_post_data Additional data to send with a POST
request.
url: Final video URL.
ext: Video filename extension.
format: The video format, defaults to ext (used for --get-format)
player_url: SWF Player URL (used for rtmpdump).
The following fields are optional:
alt_title: A secondary title of the video.
display_id An alternative identifier for the video, not necessarily
unique, but available before title. Typically, id is
something like "4234987", title "Dancing naked mole rats",
and display_id "dancing-naked-mole-rats"
thumbnails: A list of dictionaries, with the following entries:
* "url"
* "width" (optional, int)
* "height" (optional, int)
* "resolution" (optional, string "{width}x{height"},
deprecated)
thumbnail: Full URL to a video thumbnail image.
description: Full video description.
uploader: Full name of the video uploader.
timestamp: UNIX timestamp of the moment the video became available.
upload_date: Video upload date (YYYYMMDD).
If not explicitly set, calculated from timestamp.
uploader_id: Nickname or id of the video uploader.
location: Physical location where the video was filmed.
subtitles: The subtitle file contents as a dictionary in the format
{language: subtitles}.
duration: Length of the video in seconds, as an integer.
view_count: How many users have watched the video on the platform.
like_count: Number of positive ratings of the video
dislike_count: Number of negative ratings of the video
comment_count: Number of comments on the video
age_limit: Age restriction for the video, as an integer (years)
webpage_url: The url to the video webpage, if given to youtube-dl it
should allow to get the same result again. (It will be set
by YoutubeDL if it's missing)
categories: A list of categories that the video falls in, for example
["Sports", "Berlin"]
is_live: True, False, or None (=unknown). Whether this video is a
|
live stream that goes on instead of a fixed-length video.
Unless mentioned otherwise, the fields should be Unicode strings.
Unless mentioned otherwise, None is equivalent to absence of information.
_type "playlist" indicates multiple videos.
There must be a key "entries", which is a list, an iterable, or a PagedList
object, each ele
|
ment of which is a valid dictionary by this specification.
Additionally, playlists can have "title" and "id" attributes with the same
semantics as videos (see above).
_type "multi_video" indicates that there are multiple videos that
f
|
sserrot/champion_relationships
|
venv/Lib/site-packages/win32com/test/testArrays.py
|
Python
|
mit
| 2,505
| 0.050699
|
# Originally contributed by Stefan Schukat as part of this arbitrary-sized
# arrays patch.
from win32com.client import gencache
from win32com.test import util
import unittest
ZeroD = 0
OneDEmpty = []
OneD = [1,2,3]
TwoD = [
[1,2,3],
[1,2,3],
[1,2,3]
]
TwoD1 = [
[
[1,2,3,5],
[1,2,3],
[1,2,3]
],
[
[1,2,3],
[1,2,3],
[1,2,3]
]
]
OneD1 = [
[
[1,2,3],
[1,2,3],
[1,2,3]
],
[
[1,2,3],
[1,2,3]
]
]
OneD2 = [
[1,2,3],
[1,2,3,4,5],
[
[1,2,3,4,5],
[1,2,3,4,5],
[1,2,3,4,5]
]
]
ThreeD = [
|
[
[1,2,3],
[1,2,3],
[1,2,3]
],
[
[1,2,3],
[1,2,3],
[1,2,3]
]
]
FourD = [
[
[[1,2,3],[1,2,3],[1,2,3]],
[[1,2,3],[1,2,3],[1,2,3]],
[[1,2,3],[1,2,3],[1,2,3]]
],
[
[[1,2,3],[1,2,3],[1,2,3]],
[[1,2,3],[1,2,3],[1,2,3]],
[[1,
|
2,3],[1,2,3],[1,2,3]]
]
]
LargeD = [
[ [list(range(10))] * 10],
] * 512
def _normalize_array(a):
if type(a) != type(()):
return a
ret = []
for i in a:
ret.append(_normalize_array(i))
return ret
class ArrayTest(util.TestCase):
def setUp(self):
self.arr = gencache.EnsureDispatch("PyCOMTest.ArrayTest")
def tearDown(self):
self.arr = None
def _doTest(self, array):
self.arr.Array = array
self.failUnlessEqual(_normalize_array(self.arr.Array), array)
def testZeroD(self):
self._doTest(ZeroD)
def testOneDEmpty(self):
self._doTest(OneDEmpty)
def testOneD(self):
self._doTest(OneD)
def testTwoD(self):
self._doTest(TwoD)
def testThreeD(self):
self._doTest(ThreeD)
def testFourD(self):
self._doTest(FourD)
def testTwoD1(self):
self._doTest(TwoD1)
def testOneD1(self):
self._doTest(OneD1)
def testOneD2(self):
self._doTest(OneD2)
def testLargeD(self):
self._doTest(LargeD)
if __name__ == "__main__":
try:
util.testmain()
except SystemExit as rc:
if not rc:
raise
|
getconnect/connect-python
|
tests/test_api.py
|
Python
|
mit
| 9,933
| 0.010772
|
# -*- coding: utf-8 -*-
import json
from mock import patch
from colle
|
ctions import defaultdict
from requests import Session
from connect.api import ConnectApi
from connect.event import Event
from connect import responses
PROJECT_ID = "MY_PROJECT_ID"
API_PUSH_KEY
|
= "MY_PUSH_API_KEY"
BASE_URL = "https://api.getconnect.io"
COLLECTION_NAME = 'my_collection'
MULTI_EVENT_DATA = [{
'type': 'cycling',
'distance': 21255,
'caloriesBurned': 455,
'duration': 67,
'user': {
'id': '638396',
'name': 'Bruce'
}
},
{
'type': 'swimming',
'distance': 21255,
'caloriesBurned': 455,
'duration': 67,
'user': {
'id': '638396',
'name': 'Bruce',
}
}
]
SINGLE_EVENT_DATA = {
'type': 'cycling',
'distance': 21255,
'caloriesBurned': 455,
'duration': 67,
'user': {
'id': '638396',
'first': 'Bruce'
}
}
@patch("connect.api.Session.post")
class TestConnectAPI():
def setup_method(self, method):
batched = defaultdict(list)
for e in MULTI_EVENT_DATA:
e = Event(COLLECTION_NAME, e)
batched[COLLECTION_NAME].append(e.body)
self.multi_events = batched
self.single_event = Event(COLLECTION_NAME,SINGLE_EVENT_DATA)
self.connect = ConnectApi(project_id=PROJECT_ID,
api_key=API_PUSH_KEY
)
def test_init(self, post):
assert PROJECT_ID == self.connect._project_id
assert API_PUSH_KEY == self.connect._api_key
assert "https://api.getconnect.io" == self.connect._base_url
assert 60 == self.connect._get_timeout
assert 60 == self.connect._post_timeout
assert isinstance(self.connect._session, Session)
connect = ConnectApi(project_id=PROJECT_ID,
api_key=API_PUSH_KEY,
base_url="myurl",
post_timeout=10,
get_timeout=5
)
assert connect._base_url == "myurl"
assert connect._post_timeout == 10
assert connect._get_timeout == 5
def test_post_event(self, post ):
#200 - empty response
mocked_response = mocked_connect_response(200,None)
post.return_value = mocked_response
result = self.connect.post_event(self.single_event)
assert isinstance(result,responses.PushResponse)
post.reset_mock()
with patch("connect.api.ConnectApi._build_response") as build_response:
self.connect.post_event(self.single_event)
url = "{0}/events/{1}".format(BASE_URL,COLLECTION_NAME)
data = json.dumps(self.single_event.body)
post.assert_called_once_with(url=url, data=data, timeout=60)
build_response.assert_called_once_with(response_body=None,
raw_event=self.single_event.body,
status_code=200)
build_response.reset_mock()
# Non-empty response (!= 200)
body = {"errorMessage": "Maximum event size of 64kb exceeded."}
mocked_response = mocked_connect_response(413,body)
post.return_value = mocked_response
self.connect.post_event(self.single_event)
build_response.assert_called_once_with(response_body=body,
raw_event=self.single_event.body,
status_code=413)
def test_post_events(self,post):
events = []
expected_events = defaultdict(list)
for e in MULTI_EVENT_DATA:
events.append(Event(COLLECTION_NAME, e))
expected_events[COLLECTION_NAME].append(e)
body = {
COLLECTION_NAME: [{
"event": events[0].body,
"success": True
}, {
"event": events[1].body,
"success": False,
"message": "An error occured inserting the event please try again."
}]
}
mocked_response = mocked_connect_response(200,body)
post.return_value = mocked_response
result = self.connect.post_events(self.multi_events)
url = "{0}/events".format(BASE_URL)
data = json.dumps(self.multi_events)
post.assert_called_with(url=url, data=data, timeout=60)
assert isinstance(result,responses.PushBatchResponse)
with patch("connect.api.ConnectApi._build_batch_response") as build_batch_response:
self.connect.post_events(self.multi_events)
build_batch_response.assert_called_once_with(response_body=body,
events_by_collection=self.multi_events,
status_code=200)
def test__build_response(self, post):
single_event = Event(COLLECTION_NAME,SINGLE_EVENT_DATA)
r = self.connect._build_response(None, single_event.body,200)
assert isinstance(r, responses.PushResponse)
assert r.error_message is None
assert r.http_status_code == 200
assert r.event == single_event.body
r = self.connect._build_response(None, single_event.body,401)
assert isinstance(r, responses.PushResponse)
assert r.error_message == "Unauthorised. Please check your Project Id and API Key"
assert r.event == single_event.body
assert r.http_status_code == 401
response_body = {
"errors": [{
"field": "fieldName",
"description": "There was an error with this field."
}]
}
r = self.connect._build_response(response_body, single_event.body, 422)
assert isinstance(r, responses.PushResponse)
assert r.error_message == [{
"field": "fieldName",
"description": "There was an error with this field."
}]
assert r.event == single_event.body
assert r.http_status_code == 422
response_body = {
"errorMessage": "Maximum event size of 64kb exceeded."
}
r = self.connect._build_response(response_body, single_event.body, 413)
assert isinstance(r, responses.PushResponse)
assert r.error_message == "Maximum event size of 64kb exceeded."
assert r.event == single_event.body
assert r.http_status_code == 413
def test__build_batch_response(self, post):
response_body = {
COLLECTION_NAME: [
{
"success": True
},
{
"success": False,
"message": "An error occurred inserting the event please try again."
}
],
"my_collection2": [
{
"success": True
}
]
}
events = defaultdict(list)
events["my_collection2"].append(SINGLE_EVENT_DATA)
events[COLLECTION_NAME] = [e for e in MULTI_EVENT_DATA]
r = self.connect._build_batch_response(response_body,
events,
200)
assert isinstance(r, responses.PushBatchResponse)
assert r.http_status_code == 200
assert r.error_message is None
#to do: assert event body
for collection in r.results:
collection_results = r.results[collection]
for i in range(0, len(collection_results)):
assert isinstance(collection_results[i], responses.PushRes
|
erickpeirson/django-ncbi
|
ncbi/manage.py
|
Python
|
gpl-3.0
| 247
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__ma
|
in__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ncbi.settings")
from django.core.management import execute_from_command_li
|
ne
execute_from_command_line(sys.argv)
|
MrYsLab/s2-pi
|
s2_pi/s2_pi.py
|
Python
|
agpl-3.0
| 5,512
| 0.000727
|
#!/usr/bin/env python3
"""
s2_pi.py
Copyright (c) 2016-2018 Alan Yorinks All right reserved.
Python Banyan is free software; you can redistribute it and/or
modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
Version 3 as published by the Free Software Foundation; either
or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU AFFERO GENERAL PUBLIC LICENSE
along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import json
import os
import sys
import time
from subprocess import call
import pigpio
import psutil
from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket
# This class inherits from WebSocket.
# It receives messages from the Scratch and reports back for any digital input
# changes.
class S2Pi(WebSocket):
def handleMessage(self):
# get command from Scratch2
payload = json.loads(self.data)
print(payload)
client_cmd = payload['command']
# When the user wishes to set a pin as a digital Input
if client_cmd == 'input':
pin = int(payload['pin'])
self.pi.set_glitch_filter(pin, 20000)
self.pi.set_mode(pin, pigpio.INPUT)
self.pi.callback(pin, pigpio.EITHER_EDGE, self.input_callback)
# when a user wishes to set the state of a digital output pin
elif client_cmd == 'digital_write':
pin = int(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
state = payload['state']
if state == '0':
self.pi.write(pin, 0)
else:
self.pi.write(pin, 1)
# when a user wishes to set a pwm level for a digital input pin
elif client_cmd == 'analog_write':
pin = in
|
t(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
value = int(payload['value'])
self.pi.set_PWM_dutycycle(pin, value)
elif client_cmd == 'servo':
# HackEduca ---> When a user wishes to set a servo:
# Using S
|
G90 servo:
# 180° = 2500 Pulses; 0° = 690 Pulses
# Want Servo 0°-->180° instead of 180°-->0°:
# Invert pulse_max to pulse_min
# pulse_width = int((((pulse_max - pulse_min)/(degree_max - degree_min)) * value) + pulse_min)
# Where:
# Test the following python code to know your Pulse Range: Replace it in the formula
# >>>>----------------------->
# import RPi.GPIO as GPIO
# import pigpio
# Pulse = 690 # 0°
# Pulse = 2500 # 180°
# pi = pigpio.pi()
# pi.set_mode(23, pigpio.OUTPUT)
# pi.set_servo_pulse_width(23, Pulse)
# pi.stop()
# <------------------------<<<<<
pin = int(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
value = int(payload['value'])
DegreeMin = 0
DegreeMax = 180
PulseMin = 2500
PulseMax = 690
Pulsewidth = int((((PulseMax - PulseMin) / (DegreeMax - DegreeMin)) * value) + PulseMin)
self.pi.set_servo_pulsewidth(pin, Pulsewidth)
time.sleep(0.01)
# when a user wishes to output a tone
elif client_cmd == 'tone':
pin = int(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
frequency = int(payload['frequency'])
frequency = int((1000 / frequency) * 1000)
tone = [pigpio.pulse(1 << pin, 0, frequency),
pigpio.pulse(0, 1 << pin, frequency)]
self.pi.wave_add_generic(tone)
wid = self.pi.wave_create()
if wid >= 0:
self.pi.wave_send_repeat(wid)
time.sleep(1)
self.pi.wave_tx_stop()
self.pi.wave_delete(wid)
elif client_cmd == 'ready':
pass
else:
print("Unknown command received", client_cmd)
# call back from pigpio when a digital input value changed
# send info back up to scratch
def input_callback(self, pin, level, tick):
payload = {'report': 'digital_input_change', 'pin': str(pin), 'level': str(level)}
print('callback', payload)
msg = json.dumps(payload)
self.sendMessage(msg)
def handleConnected(self):
self.pi = pigpio.pi()
print(self.address, 'connected')
def handleClose(self):
print(self.address, 'closed')
def run_server():
# checking running processes.
# if the backplane is already running, just note that and move on.
found_pigpio = False
for pid in psutil.pids():
p = psutil.Process(pid)
if p.name() == "pigpiod":
found_pigpio = True
print("pigpiod is running")
else:
continue
if not found_pigpio:
call(['sudo', 'pigpiod'])
print('pigpiod has been started')
os.system('scratch2&')
server = SimpleWebSocketServer('', 9000, S2Pi)
server.serveforever()
if __name__ == "__main__":
try:
run_server()
except KeyboardInterrupt:
sys.exit(0)
|
sunchaoatmo/cplot
|
plotset.py
|
Python
|
gpl-3.0
| 28,891
| 0.077291
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import cm
import numpy as np# reshape
from cstoolkit import drange
from matplotlib.colors import LinearSegmentedColormap
"""
cmap_cs_precp = [ (242, 242, 242), (191, 239, 255), (178, 223, 238),
(154, 192, 205), ( 0, 235, 235), ( 0, 163, 247),
(153, 255, 51),( 0, 255, 0), ( 0, 199, 0), ( 0, 143, 0),
( 0, 63, 0), (255, 255, 0),(255, 204, 0) , (255, 143, 0),
(255, 0, 0), (215, 0, 0),
(255, 0, 255) ] #, (155, 87, 203)]
"""
cmap_cs_precp = [ (242, 242, 242), (178, 223, 238), (154, 192, 205), (68, 176, 213),
( 0, 163, 247), ( 0, 235, 235), (153, 255, 51 ), ( 0, 255, 0),
( 0, 199, 0), ( 0, 143, 0), ( 0, 63, 0), (255, 255, 0),
( 255, 204, 0), (255, 143, 0), (255, 0, 0), (215, 0, 0),
(255, 0, 255) ] #, (155, 87, 203)]
WBGYR=[#(255,255,255),
#(252,254,255),
#(250,253,255),
#(247,252,254),
#(244,251,254),
#(242,250,254),
#(239,249,254),
#(236,248,253),
#(234,247,253),
#(231,246,253),
#(229,245,253),
#(226,244,253),
#(223,243,252),
#(221,242,252),
#(218,241,252),
#(215,240,252),
#(213,239,252),
#(210,238,251),
#(207,237,251),
#(205,236,251),
#(202,235,251),
#(199,234,250),
#(197,233,250),
#(194,232,250),
#(191,231,250),
#(189,230,250),
#(186,229,249),
(183,228,249),
(181,227,249),
(178,226,249),
(176,225,249),
(173,224,248),
(170,223,248),
(168,222,248),
(165,221,248),
(162,220,247),
(157,218,247),
(155,216,246),
(152,214,245),
(150,212,243),
(148,210,242),
(146,208,241),
(143,206,240),
(141,204,238),
(139,202,237),
(136,200,236),
(134,197,235),
(132,195,234),
(129,193,232),
(127,191,231),
(125,189,230),
(123,187,229),
(120,185,228),
(118,183,226),
(116,181,225),
(113,179,224),
(111,177,223),
(109,175,221),
(106,173,220),
(104,171,219),
(102,169,218),
(100,167,217),
(97,165,215),
(95,163,214),
(93,160,213),
(90,158,212),
(88,156,211),
(86,154,209),
(83,152,208),
(81,150,207),
(79,148,206),
(77,146,204),
(72,142,202),
(72,143,198),
(72,144,195),
(72,145,191),
(72,146,188),
(72,147,184),
(72,148,181),
(72,149,177),
(72,150,173),
(72,151,170),
(72,153,166),
(72,154,163),
(72,155,159),
(72,156,156),
(72,157,152),
(72,158,148),
(72,159,145),
(72,160,141),
(72,161,138),
(73,162,134),
(73,163,131),
(73,164,127),
(73,165,124),
(73,166,12
|
0),
(73,167,116),
(
|
73,168,113),
(73,169,109),
(73,170,106),
(73,172,102),
(73,173,99),
(73,174,95),
(73,175,91),
(73,176,88),
(73,177,84),
(73,178,81),
(73,179,77),
(73,181,70),
(78,182,71),
(83,184,71),
(87,185,72),
(92,187,72),
(97,188,73),
(102,189,74),
(106,191,74),
(111,192,75),
(116,193,75),
(121,195,76),
(126,196,77),
(130,198,77),
(135,199,78),
(140,200,78),
(145,202,79),
(150,203,80),
(154,204,80),
(159,206,81),
(164,207,81),
(169,209,82),
(173,210,82),
(178,211,83),
(183,213,84),
(188,214,84),
(193,215,85),
(197,217,85),
(202,218,86),
(207,220,87),
(212,221,87),
(217,222,88),
(221,224,88),
(226,225,89),
(231,226,90),
(236,228,90),
(240,229,91),
(245,231,91),
(250,232,92),
(250,229,91),
(250,225,89),
(250,222,88),
(249,218,86),
(249,215,85),
(249,212,84),
(249,208,82),
(249,205,81),
(249,201,80),
(249,198,78),
(249,195,77),
(248,191,75),
(248,188,74),
(248,184,73),
(248,181,71),
(248,178,70),
(248,174,69),
(248,171,67),
(247,167,66),
(247,164,64),
(247,160,63),
(247,157,62),
(247,154,60),
(247,150,59),
(247,147,58),
(246,143,56),
(246,140,55),
(246,137,53),
(246,133,52),
(246,130,51),
(246,126,49),
(246,123,48),
(246,120,47),
(245,116,45),
(245,113,44),
(245,106,41),
(244,104,41),
(243,102,41),
(242,100,41),
(241,98,41),
(240,96,41),
(239,94,41),
(239,92,41),
(238,90,41),
(237,88,41),
(236,86,41),
(235,84,41),
(234,82,41),
(233,80,41),
(232,78,41),
(231,76,41),
(230,74,41),
(229,72,41),
(228,70,41),
(228,67,40),
(227,65,40),
(226,63,40),
(225,61,40),
(224,59,40),
(223,57,40),
(222,55,40),
(221,53,40),
(220,51,40),
(219,49,40),
(218,47,40),
(217,45,40),
(217,43,40),
(216,41,40),
(215,39,40),
(214,37,40),
(213,35,40),
(211,31,40),
(209,31,40),
(207,30,39),
(206,30,39),
(204,30,38),
(202,30,38),
(200,29,38),
(199,29,37),
(197,29,37),
(195,29,36),
(193,28,36),
(192,28,36),
(190,28,35),
(188,27,35),
(186,27,34),
(185,27,34),
(183,27,34),
(181,26,33),
(179,26,33),
(178,26,32),
(176,26,32),
(174,25,31),
(172,25,31),
(171,25,31),
(169,25,30),
(167,24,30),
(165,24,29),
(164,24,29),
(162,23,29),
(160,23,28),
(158,23,28),
(157,23,27),
(155,22,27),
(153,22,27),
(151,22,26),
(150,22,26),
(146,21,25)]
hotcold18= [( 24 , 24 ,112),
( 16 , 78 ,139),
( 23 ,116 ,205),
( 72 ,118 ,255),
( 91 ,172 ,237),
( 173 ,215 ,230),
( 209 ,237 ,237),
( 229 ,239 ,249),
#( 242 ,255 ,255),
( 255 ,255 ,255),
#( 253 ,245 ,230),
( 255 ,228 ,180),
( 243 ,164 , 96),
( 237 ,118 , 0),
( 205 ,102 , 29),
( 224 , 49 , 15),
#( 255, 0 , 0),
( 255, 0 , 255),
(183,75,243),
(183,75,243)]
#(255,0,255)] #,
#(81,9,121)]
"""
( 237 , 0 , 0),
( 205 , 0 , 0),
( 139 , 0 , 0)]
"""
haxby= [ (37,57,175) ,
(37,68,187) ,
(38,79,199) ,
(38,90,211) ,
(39,101,223) ,
(39,113,235) ,
(40,124,247) ,
(41,134,251) ,
(43,144,252) ,
(44,154,253) ,
(46,164,253) ,
(47,174,254) ,
(49,184,255) ,
(54,193,255) ,
(62,200,255) ,
(71,207,255) ,
(80,214,255) ,
(89,221,255) ,
(98,229,255) ,
(107,235,254) ,
(112,235,241) ,
(117,235,228) ,
(122,235,215) ,
(127,236,202) ,
(132,236,189) ,
(137,236,177) ,
(147,238,172) ,
(157,241,171) ,
(168,244,169) ,
(178,247,167) ,
(189,250,165) ,
(200,253,163) ,
(208,253,159) ,
(213,250,152) ,
(219,247,146) ,
(224,244,139) ,
(230,241,133) ,
(236,238,126) ,
(240,235,120) ,
(243,227,115) ,
(245,220,109) ,
(248,212,104) ,
(250,205,98) ,
(252,197,93) ,
(255,190,88) ,
(255,185,84) ,
(255,181,81) ,
(255,176,78) ,
(255,172,75) ,
(255,167,72) ,
(255,163,69) ,
(255,163,74) ,
(255,167,85) ,
(255,171,95) ,
(255,175,105) ,
(255,179,115) ,
(255,183,126) ,
(255,189,139) ,
(255,200,158) ,
(255,211,178) ,
(255,222,197) ,
(255,233,216) ,
(255,244,236) ,
(255,255,255) ]
BWR=[ ( 36 , 0 , 216),
( 24 , 28 , 247),
( 40 , 87 , 255),
( 61 , 135 , 255),
( 86 , 176 , 255),
( 117 , 211 , 255),
( 153 , 234 , 255),
( 188 , 249 , 255),
( 234 , 255 , 255),
( 255 , 255 , 255),
( 255 , 241 , 188),
( 255 , 214 , 153),
( 255 , 172 , 117),
( 255 , 120 , 86),
( 255 , 61 , 61),
#( 247 , 39 , 53),
( 165 , 0 , 33)]
"""
( 216 , 21 , 47),
( 165 , 0 , 33)]
"""
BWR=[ #( 0 , 0 , 0),
( 16 , 78 , 139),
#( 23 , 116 , 205),
#( 61 , 135 , 255),
( 86 , 176 , 255),
( 117 , 211 , 255),
( 153 , 234 , 255),
( 188 , 249 , 255),
( 234 , 255 , 255),
( 255 , 255 , 255),
( 255 , 241 , 188),
( 255 , 214 , 153),
( 255 , 172 , 117),
( 255 , 120 , 86),
( 255 , 61 , 61),
( 165 , 0 , 33)]
#( 247 , 39 , 53)]
tableau20 = [ (127, 127, 127),(174, 199, 232), (31, 119, 180), (255, 187, 120),
(214, 39, 40),(152, 223, 138), (44, 160, 44), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (255, 127, 14),(199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229),(65,68,81),(0,0,0)]
def buildcmp(cmaplist):
for i in range(len(cmaplist)):
r, g, b = cmaplist[i]
cmaplist[i] = (r / 255., g / 255., b / 255.)
return LinearSegmentedColormap.from_list( "precip", cmaplist,N=len(cmaplist)),cmaplist
cmap_cs_precp,cs_precp_list=buildcmp(cmap_cs_precp)
cmap_haxby,haxby_list=buildcmp(haxby[::5])
cmap_BWR,BWR_list=buildcmp(BWR)
cmap_BWR.set_over('purple')
cmap_BWR.set_under('blue')
cmap_cs_precp.set_over('purple')
cmap_tableau20,tableau20=buildcmp(tableau20)
cmap_hotcold18,hotcold18=buildcmp(hotcold18)
cmap_hotcold18.set_over('blueviolet')
cmap_hotcold18.set_under('black')
cmap_WBGYR,WBGYR=buildcmp(WBGYR)
sim_nicename={"ERI"
|
dhuppenkothen/UTools
|
simpower.py
|
Python
|
bsd-2-clause
| 7,317
| 0.02132
|
import numpy as np
import scipy
import math
import argparse
import lightcurve
import powerspectrum
#### MULTIPLY LIGHT CURVES TOGETHER ##################
#
# Little functions that multiplies light curves of different
# processes together.
# Base_lc can be any LightCurve object, even one of the three options given below
# Base_lc should be normalized to the desired mean, rednoise and qpo to 1 and the envelope
# to 1/mean(flux)
#
# base_lc [LightCurve] = base light curve to use --> should be the longest one
# env [list]= burst envelope, deterministic function
# rednoise [list] = red noise profile
# QPO [list] = quasi-periodic oscillation
#
# !!! IMPORTANT!!! MAKE SURE envelope, rednoise AND qpo ARE LISTS, NOT NUMPY ARRAYS!!!
#
#
#
#
def multiply_lightcurves(base_lc, envelope=None, rednoise=None, qpo=None):
if envelope:
base_lc.counts[:len(envelope)] = base_lc.counts[:len(envelope)]*envelope
if rednoise:
base_lc.counts = base_lc.counts*rednoise
if qpo:
base_lc.counts = base_lc.counts*qpo
return
#############################################
#
# python implementation of Timmer+Koenig 1995
# simulations of red noise
#
#
#
class TimmerPS(powerspectrum.PowerSpectrum):
### fnyquist = number of points in light curve / 2.0
### dnu = frequency resolution, dnu = 1.0/length of time interval
### rms = fractional rms amplitude of the light curve
### nu0 = centroid frequency of Lorentzian (for QPO)
### gamma = width of Lorentzian (for QPO)
### pind = power law index
### pnorm = normalization of power law
### nphot = number of photons in light curve
### psshape = shape of power spectrum
### psmanual = put in array with chosen shape
def __init__(self, fnyquist=4096.0, dnu=1.0, rms=1.0, nu0=None, gamma=None, pind=None, pnorm = None, nphot=None, psshape='plaw', psmanual=None):
### make an empty PowerSpectrum object
powerspectrum.PowerSpectrum.__init__(self,lc=None, counts=None)
#### CREATE ARTIFICIAL POWER SPECTRUM
### number of elements in lc/ps
N = np.ceil(2.0*fnyquist/dnu)
#print "N: " + str(N)
### frequency array
self.n = N
self.freq = np.arange(math.floor(N/2.0))*dnu + dnu
self.fnyquist = fnyquist
self.dnu = dnu
self.nphot = nphot
### turn rms into a variance of the log-normal light curve
lnv
|
ar = np.log(rms**2.0 + 1.0)
#print("Variance of log-normal light curve: " + str(lnvar))
### make a shape for the power spectrum, depending on
### psshape specified
if psshape.lower() in ['flat', 'constant', 'white', 'white noise']:
## assume white noise power spectrum, <P> = N*sigma_ln
s = np.array([self.n*lnvar for x in self.freq])
elif psshape.lower() in ['powerlaw', 'plaw']:
|
s = self.n*lnvar*(1.0/self.freq)**pind
### Don't do other shapes for now, until I need them
### CAREFUL: normalization of these is not right yet!
elif psshape.lower() in ['qpo', 'lorentzian', 'periodic']:
#print('I am here!')
alpha = (gamma/math.pi)*dnu*N/2.0
sold = alpha/((self.freq-nu0)**2.0 + gamma**2.0)
snew = sold/sum(sold)
#print('sum snew: ' + str(sum(snew)))
s = (sold/sum(sold))*lnvar*fnyquist*self.n/self.dnu
# elif psshape.lower() in ['w+p', 'combined plaw']:
# s = np.array([rms**2.0+pnorm*(1/x)**2.0 for x in self.freq])
# elif psshape.lower() in ['w+q', 'combined qpo']:
# alpha = (sigma**2.0)*(gamma/math.pi)*dnu*N/2.0
# s = 2.0 + nphot*alpha/((self.freq-nu0)**2.0 + gamma**2.0)
elif psshape.lower() in ['manual', 'psmanual']:
if not psmanual is None:
#print(sum(psmanual/sum(psmanual)))
### for now, assume variance normalization
#s = (psmanual/sum(psmanual))*lnvar*fnyquist*self.n**2.0/2.0
s = (psmanual/sum(psmanual))*lnvar*fnyquist*self.n**2.0/(self.dnu)
#s = (psmanual/sum(psmanual))*self.n*(self.n/2.0)*lnvar
else:
raise Exception("No shape given!")
#sfinal = np.insert(s, 0, 0)
#print "len(s) : " + str(len(s))
#print "type(s): " + str(type(s))
### first element is zero, that will be the number of photons
#sfinal = np.insert(s, 0, 0.0)
### s is the power spectrum
self.s = s #sfinal
def makeFourierCoeffs(self):
nphot = self.nphot
N = self.n
a = np.zeros(len(self.s))
x1 = np.random.normal(size=len(self.s))*(self.s/2.0)**0.5
y1 = np.random.normal(size=len(self.s))*(self.s/2.0)**0.5
### S(fnyquist) is real
y1[-1] = 0.0
self.x1 = x1
self.y1 = y1
### now make complex Fourier pair
Fpos = [complex(re,im) for re, im in zip(x1,y1)]
Fneg = [complex(re,-im) for re, im in zip(x1,y1)]
#print "Fpos: " + str(Fpos[:5])
#print "Fpos: " + str(Fpos[-5:])
#print "Fneg: " + str(Fneg[:5])
#print "Fneg: " + str(Fneg[-5:])
Fpos.insert(0, (0+0j))
Fneg.reverse()
#print "Fneg: " + str(Fneg[:5])
#print "Fneg: " + str(len(Fneg))
#print "Fneg: " + str(Fneg[-5:])
### remove duplicate nyquist frequency and the nu=0 term
#Fneg = Fneg[1:1+int(np.round((N-1)/2))]
#Fneg = Fneg[:-1]
#print "Fneg: " + str(len(Fneg))
#Fpos.extend(Fneg)
Fpos.extend(Fneg)
#print "Fpos: " + str(len(Fpos))
#print "Fpos: " + str(Fpos[:5])
#print "Fpos: " + str(Fpos[1168:1172])
#print "Fpos: " + str(Fpos[-5:])
return Fpos
def simulateLightcurve(self, fourier, expon=True, lcmean = None):
### length of time interval
tmax = 1.0/self.dnu
### sampling rate
dt = tmax/self.n
#print(self.n)
### make a time array
time = np.arange(len(fourier))*tmax/self.n
f = fourier
phi = np.fft.ifft(f)#/np.sqrt(self.n)#/(self.nphot**0.5)
phi = np.array([x.real for x in phi])
### if expon == True, transform into lognormally distributed
###light curve such that there are no values < 0:
if expon == False:
flux = phi
elif expon == True and not lcmean is None:
lncounts = np.exp(phi)
flux = lncounts*lcmean/np.mean(lncounts)
else:
raise Exception("You must either specify a mean flux or set expon=False !")
lc = lightcurve.Lightcurve(time, counts=flux)
return lc
def makePeriodogram(self, fourier, norm='variance'):
f = fourier
f2 = np.array(f).conjugate()
ff2 = np.real(f*f2)
s = ff2[0:self.n/2]#*2.0/(self.fnyquist*2*self.fnyquist)
if norm.lower() in ['variance', 'var']:
s = s*2.0/(self.fnyquist*2*self.fnyquist)
if norm.lower() in ['leahy']:
s = 2.0*s/self.nphot
if norm.lower() in ['rms']:
s = 2.0*s/(df*self.nphot**2.0)
ps = powerspectrum.PowerSpectrum()
ps.freq = self.freq
ps.ps = s
return ps
##########################################################
|
puffinrocks/puffin
|
puffin/core/network.py
|
Python
|
agpl-3.0
| 1,606
| 0.003113
|
import ipaddress
import docker.types
def init():
pass
def get_next_cidr(client):
networks = client.networks.list()
last_cidr = ipaddress.ip_network("10.0.0.0/24")
for network in networks:
if (network.attrs["IPAM"] and network.attrs["IPAM"]["Config"]
and len(network.attrs["IPAM"]["Config"]) > 0
and network.attrs["IPAM"]["Config"][0]["Subnet"]):
cidr = ipaddress.ip_network(network.attrs["IPAM"]["Config"][0]["Subnet"])
if cidr.network_address.packed[0] == 10:
if cidr.prefixlen != 24:
raise Exception(
"Invalid network prefix length {0} for network {1}"
.format(cidr.prefixlen, network.name))
if cidr > last_cidr:
last_cidr = cidr
next_cidr = ipaddress.ip_network((last_cidr.network_address + 256).exploded + "/24")
if next_cidr.network_address.packed[0] > 10:
raise Exception("No more networks available")
last_cidr = next_cidr
return next_cidr
def create_network(client, name):
cidr = get_next_cidr(client)
print("Creating network {0} with subnet {1}".format(name, cidr.exploded))
networks = client.networks.list(names=[name])
if len(networks) > 0:
for network in networks:
network.remove()
ipam_pool = docker.types.IPAMPool(subnet=cidr.exploded,
gateway=(cidr.network_address + 1).exploded)
ipam_config = docker.types.IPAMCon
|
fig(pool_c
|
onfigs=[ipam_pool])
client.networks.create(name, ipam=ipam_config)
|
m-weigand/Cole-Cole-fit
|
tests/2-term/test_cases.py
|
Python
|
gpl-3.0
| 1,602
| 0.003121
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Execute to run double Cole-Cole term characterization
"""
import sys
sys.path.append('..')
import test_helper as th
import os
import itertools
import numpy as np
import subprocess
testdir = 'data'
def _generate_cc_sets():
"""Generate multiple complex resistivity spectra by sampling a certain
CC-parameter space
"""
rho0_list = np.log(np.array((10, )))
# values the first (low-frequency) term is constructed from
m1_list = (0.05, 0.1)
tau1_list = np.log(np.array((0.4, 1.0)))
c1_list = (0.6, 0.8)
# values for the second term
m2_list = (0.1, )
tau2_list = np.log(np.array((0.0004, 0.00001)))
c2_list = (0.6, )
for cc_set in itertools.product(rho0_list,
m1_list,
tau1_list,
c1_list,
m2_list,
tau2_list,
c2_list
):
yield cc_set
def _get_frequencies():
return np.logspace(-3, 4, 20)
def _fit_spectra():
pwd = os.getcwd()
for directory in th._get_cc_dirs(testdir):
|
os.chdir(directory)
cmd = 'cc_fit.py -p -c 2 -m 2'
subprocess.call(cmd, shell=True)
os.chdir(pwd)
if __name__ == '__main__':
frequencies = _get_frequencies()
for x in _generate_cc_sets():
print x
th._generate_spectra(frequencies, testdir, _generate_cc_sets)
|
_fit_spectra()
th._evaluate_fits(testdir)
|
teriyakichild/photoboard
|
photoboard/tests/test.py
|
Python
|
apache-2.0
| 133
| 0.007519
|
from backend import photos, boards
p = photos()
#print p.n
|
ew('asdf',1,1)
print p.get(1)
b = b
|
oards()
print p.all(1)
print b.get(1)
|
jtrobec/pants
|
contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen.py
|
Python
|
apache-2.0
| 4,867
| 0.005137
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from textwrap import dedent
from mock import MagicMock
from pants.backend.codegen.targets.java_thrift_library import JavaThriftLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.scala_library import ScalaLibrary
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.build_graph.address import Address
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.context import Context
from pants.util.dirutil import safe_rmtree
from pants_test.tasks.task_test_base import TaskTestBase
from twitter.common.collections import OrderedSet
from pants.contrib.scrooge.tasks.scrooge_gen import ScroogeGen
# TODO (tdesai) Issue-240: Use JvmToolTaskTestBase for ScroogeGenTest
class ScroogeGenTest(TaskTestBase):
@classmethod
def task_type(cls):
return ScroogeGen
@property
def alias_groups(self):
return BuildFileAliases(targets={'java_thrift_library': JavaThriftLibrary})
def setUp(self):
super(ScroogeGenTest, self).setUp()
self.task_outdir = os.path.join(self.build_root, 'scrooge', 'gen-java')
def tearDown(self):
super(ScroogeGenTest, self).tearDown()
safe_rmtree(self.task_outdir)
def test_validate_compiler_configs(self):
# Set synthetic defaults for the global scope.
self.set_options_for_scope('thrift-defaults',
compiler='unchecked',
language='uniform',
rpc_style='async')
self.add_to_build_file('test_validate', dedent('''
java_thrift_library(name='one',
sources=[],
dependencies=[],
)
'''))
self.add_to_build_file('test_validate', dedent('''
java_thrift_library(name='two',
sources=[],
dependencies=[':one'],
)
'''))
self.add_to_build_file('test_validate', dedent('''
java_thrift_library(name='three',
sources=[],
dependencies=[':one'],
rpc_style='finagle',
)
'''))
target = self.target('test_validate:one')
context = self.context(target_roots=[target])
task = self.create_task(context)
task._validate_compiler_configs([self.target('test_validate:one')])
task._validate_compiler_configs([self.target('test_validate:two')])
|
with self.assertRaises(TaskError):
task._validate_compiler
|
_configs([self.target('test_validate:three')])
def test_scala(self):
build_string = '''
java_thrift_library(name='a',
sources=['a.thrift'],
dependencies=[],
compiler='scrooge',
language='scala',
rpc_style='finagle'
)
'''
sources = [os.path.join(self.task_outdir, 'org/pantsbuild/example/Example.scala')]
self._test_help(build_string, ScalaLibrary, sources)
def test_android(self):
build_string = '''
java_thrift_library(name='a',
sources=['a.thrift'],
dependencies=[],
compiler='scrooge',
language='android',
rpc_style='finagle'
)
'''
sources = [os.path.join(self.task_outdir, 'org/pantsbuild/android_example/Example.java')]
self._test_help(build_string, JavaLibrary, sources)
def _test_help(self, build_string, library_type, sources):
contents = dedent('''#@namespace android org.pantsbuild.android_example
namespace java org.pantsbuild.example
struct Example {
1: optional i64 number
}
''')
self.create_file(relpath='test_smoke/a.thrift', contents=contents)
self.add_to_build_file('test_smoke', dedent(build_string))
target = self.target('test_smoke:a')
context = self.context(target_roots=[target])
task = self.create_task(context)
task._declares_service = lambda source: False
task._outdir = MagicMock()
task._outdir.return_value = self.task_outdir
task.gen = MagicMock()
task.gen.return_value = {'test_smoke/a.thrift': sources}
saved_add_new_target = Context.add_new_target
try:
mock = MagicMock()
Context.add_new_target = mock
task.execute()
self.assertEquals(1, mock.call_count)
_, call_kwargs = mock.call_args
self.assertEquals(call_kwargs['target_type'], library_type)
self.assertEquals(call_kwargs['dependencies'], OrderedSet())
self.assertEquals(call_kwargs['provides'], None)
self.assertEquals(call_kwargs['sources'], [])
self.assertEquals(call_kwargs['derived_from'], target)
finally:
Context.add_new_target = saved_add_new_target
|
looker-open-source/sdk-codegen
|
examples/python/simple_schedule_plan.py
|
Python
|
mit
| 1,296
| 0.022377
|
from looker_sdk import methods, models40
import looker_sdk
import exceptions
sdk = looker_sdk.init40("../looker.ini")
def create_simple_schedule(dashboard_id:int,user_id:int,schedule_title:str, format:str, email:str,type:str, message:str, crontab:str):
### For more information on the Params accepted https://github.com/looker-open-source/sdk-codegen/blob/master/python/looker_sdk/sdk/api31/methods.py#L2144
### And for schedule destination go: https://github.com/looker-open-source/sdk-codegen/blob/master/python/looker_sdk/sdk/api31/models.py#L4601
### Supported formats vary by destination, but include: "txt", "csv", "
|
inline_json", "json", "json_detail", "xlsx", "html", "wysiwyg_pdf", "assembled_pdf", "wysiwyg_png"
### type: Type of the address ('email', 'webhook', 's3', or 'sftp')
schedule = sdk.create_scheduled_plan(
body=models40.WriteScheduledPlan(name=schedule_title, dashboard_id=dashboard_id, user_id=user_id, run_as_recipient= True, crontab=crontab, scheduled_plan_destination = [models40.ScheduledPlanDestination(format=format, apply_formatting
|
=True, apply_vis=True, address=email, type=type, message=message)]))
create_simple_schedule(1234,453,"This is an automated test", "assembled_pdf", "test@looker.com", "email", "Hi Looker User!", "0 1 * * *")
|
openstack/tempest
|
tempest/api/object_storage/test_container_acl_negative.py
|
Python
|
apache-2.0
| 10,958
| 0
|
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class ObjectACLsNegativeTest(base.BaseObjectTest):
"""Negative tests of object ACLs"""
credentials = [['operator', CONF.object_storage.operator_role],
['operator_alt', CONF.object_storage.operator_role]]
@classmethod
def setup_credentials(cls):
super(ObjectACLsNegativeTest, cls).setup_credentials()
cls.os_operator = cls.os_roles_operator_alt
@classmethod
def resource_setup(cls):
super(ObjectACLsNegativeTest, cls).resource_setup()
cls.test_auth_data = cls.os_operator.auth_provider.auth_data
def setUp(self):
super(ObjectACLsNegativeTest, self).setUp()
self.container_name = data_utils.rand_name(name='TestContainer')
self.container_client.update_container(self.container_name)
@classmethod
def resource_cleanup(cls):
cls.delete_containers()
super(ObjectACLsNegativeTest, cls).resource_cleanup()
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af587587-0c24-4e15-9822-8352ce711013')
def test_write_object_without_using_creds(self):
"""Test writing object without using credentials"""
# trying to create object with empty headers
# X-Auth-Token is not provided
object_name = data_utils.rand_name(name='Object')
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af85af0b-a025-4e72-a90e-121babf55720')
def test_delete_object_without_using_creds(self):
"""Test deleting object without using credentials"""
# create object
object_name = data_utils.rand_name(name='Object')
self.object_client.create_object(self.container_name, object_name,
'data')
# trying to delete object with empty headers
# X-Auth-Token is not provided
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('63d84e37-55a6-42e2-9e5f-276e60e26a00')
def test_write_object_with_non_authorized_user(self):
"""Test writing object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
# trying to create object with non-authorized user
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('abf63359-be52-4feb-87dd-447689fc77fd')
def test_read_object_with_non_authorized_user(self):
"""Test reading object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _
|
= self.object_client.create_object(
self.containe
|
r_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to get object with non authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('7343ac3d-cfed-4198-9bb0-00149741a492')
def test_delete_object_with_non_authorized_user(self):
"""Test deleting object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to delete object with non-authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('9ed01334-01e9-41ea-87ea-e6f465582823')
def test_read_object_without_rights(self):
"""Test reading object without rights"""
# update X-Container-Read metadata ACL
cont_headers = {'X-Container-Read': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to read the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('a3a585a7-d8cf-4b65-a1a0-edc2b1204f85')
def test_write_object_without_rights(self):
"""Test writing object without rights"""
# update X-Container-Write metadata ACL
cont_headers = {'X-Container-Write': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name,
object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.id
|
pattisdr/osf.io
|
osf/models/preprint.py
|
Python
|
apache-2.0
| 40,401
| 0.002401
|
# -*- coding: utf-8 -*-
import functools
import urlparse
import logging
import re
import pytz
from dirtyfields import DirtyFieldsMixin
from include import IncludeManager
from django.db import models
from django.db.models import Q
from django.utils import timezone
from django.contrib.contenttypes.fields import GenericRelation
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from guardian.shortcuts import get_objects_for_user
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from framework.auth import Auth
from framework.exceptions import PermissionsError
from framework.analytics import increment_user_activity_counters
from framework.auth import oauth_scopes
from osf.models import Subject, Tag, OSFUser, PreprintProvider
from osf.models.preprintlog import PreprintLog
from osf.models.contributor import PreprintContributor
from osf.models.mixins import ReviewableMixin, Taggable, Loggable, GuardianMixin
from osf.models.validators import validate_subject_hierarchy, validate_title, validate_doi
from osf.utils.fields import NonNaiveDateTimeField
from osf.utils.workflows import DefaultStates, ReviewStates
from osf.utils import sanitize
from osf.utils.requests import get_request_and_user_id, string_type_request_headers
from website.notifications.emails import get_user_subscriptions
from website.notifications import utils
from website.identifiers.clients import CrossRefClient, ECSArXivCrossRefClient
from website.project.licenses import set_license
from website.util import api_v2_url, api_url_for, web_url_for
from website.citations.utils import datetime_to_csl
from website import settings, mails
from website.preprints.tasks import update_or_enqueue_on_preprint_updated
from osf.models.base import BaseModel, GuidMixin, GuidMixinQuerySet
from osf.models.identifiers import IdentifierMixin, Identifier
from osf.models.mixins import TaxonomizableMixin, ContributorMixin, SpamOverrideMixin
from addons.osfstorage.models import OsfStorageFolder, Region, BaseFileNode, OsfStorageFile
from framework.sentry import log_exception
from osf.exceptions import (
PreprintStateError, InvalidTagError, TagNotFoundError
)
logger = logging.getLogger(__name__)
class PreprintManager(IncludeManager):
def get_queryset(self):
return GuidMixinQuerySet(self.model, using=self._db)
no_user_query = Q(
is_published=True,
is_public=True,
deleted__isnull=True,
primary_file__isnull=False,
primary_file__deleted_on__isnull=True) & ~Q(machine_state=DefaultStates.INITIAL.value) \
& (Q(date_withdrawn__isnull=True) | Q(ever_public=True))
def preprint_permissions_query(self, user=None, allow_contribs=True, public_only=False):
include_non_public = user and not public_only
if include_non_public:
moderator_for = get_objects_for_user(user, 'view_submissions', PreprintProvider)
admin_user_query = Q(id__in=get_objects_for_user(user, 'admin_preprint', self.filter(Q(preprintcontributor__user_id=user.id))))
reviews_user_query = Q(is_public=True, provider__in=moderator_for)
if allow_contribs:
contrib_user_query = ~Q(machine_state=DefaultStates.INITIAL.value) & Q(id__in=get_objects_for_user(user, 'read_preprint', self.filter(Q(preprintcontributor__user_id=user.id))))
query = (self.no_user_query | contrib_user_query | admin_user_query | reviews_user_query)
else:
query = (self.no_user_query | admin_user_query | reviews_user_query)
else:
moderator_for = PreprintProvider.objects.none()
query = self.no_user_query
if not moderator_for.exists():
query = query & Q(Q(date_withdrawn__isnull=True) | Q(ever_public=True))
return query
def can_view(self, base_queryset=None, user=None, allow_contribs=True, public_only=False):
if base_queryset is None:
base_queryset = self
include_non_public = user and not public_only
ret = base_queryset.filter(
self.preprint_permissions_query(
user=user,
allow_contribs=allow_contribs,
public_only=public_only,
) & Q(deleted__isnull=True) & ~Q(machine_state=DefaultStates.INITIAL.value)
)
# The auth subquery currently results in duplicates returned
# https://openscience.atlassian.net/browse/OSF-9058
# TODO: Remove need for .distinct using correct subqueries
return ret.distinct('id', 'created') if include_non_public else ret
class Preprint(DirtyFieldsMixin, GuidMixin, IdentifierMixin, ReviewableMixin, BaseModel,
Loggable, Taggable, GuardianMixin, SpamOverrideMixin, TaxonomizableMixin, ContributorMixin):
objects = PreprintManager()
# Preprint fields that trigger a check to the spam filter on save
SPAM_CHECK_FIELDS = {
'ti
|
tle',
'description',
}
# Node fields that trigger an update to elastic search on save
SEARCH_UPDATE_FIELDS = {
'title',
'description',
'is_published',
'license',
'is_public',
'deleted',
'subjects',
'primary_file',
'contributors',
'tags',
}
# Setting for ContributorMixin
DEFAULT_CONTRIBUTOR_PERMISSIONS = 'write'
provider = models.ForeignK
|
ey('osf.PreprintProvider',
on_delete=models.SET_NULL,
related_name='preprints',
null=True, blank=True, db_index=True)
node = models.ForeignKey('osf.AbstractNode', on_delete=models.SET_NULL,
related_name='preprints',
null=True, blank=True, db_index=True)
is_published = models.BooleanField(default=False, db_index=True)
date_published = NonNaiveDateTimeField(null=True, blank=True)
original_publication_date = NonNaiveDateTimeField(null=True, blank=True)
license = models.ForeignKey('osf.NodeLicenseRecord',
on_delete=models.SET_NULL, null=True, blank=True)
identifiers = GenericRelation(Identifier, related_query_name='preprints')
preprint_doi_created = NonNaiveDateTimeField(default=None, null=True, blank=True)
date_withdrawn = NonNaiveDateTimeField(default=None, null=True, blank=True)
withdrawal_justification = models.TextField(default='', blank=True)
ever_public = models.BooleanField(default=False, blank=True)
title = models.TextField(
validators=[validate_title]
) # this should be a charfield but data from mongo didn't fit in 255
description = models.TextField(blank=True, default='')
creator = models.ForeignKey(OSFUser,
db_index=True,
related_name='preprints_created',
on_delete=models.SET_NULL,
null=True, blank=True)
_contributors = models.ManyToManyField(OSFUser,
through=PreprintContributor,
related_name='preprints')
article_doi = models.CharField(max_length=128,
validators=[validate_doi],
null=True, blank=True)
files = GenericRelation('osf.OsfStorageFile', object_id_field='target_object_id', content_type_field='target_content_type')
primary_file = models.ForeignKey('osf.OsfStorageFile', null=True, blank=True, related_name='preprint')
# (for legacy preprints), pull off of node
is_public = models.BooleanField(default=True, db_index=True)
# Datetime when old node was deleted (for legacy preprints)
deleted = NonNaiveDateTimeField(null=True, blank=True)
# For legacy preprints
migrated = NonNaiveDateTimeField(null=True, blank=True)
region = models.ForeignKey(Region, null=True, blank=True, on_delete=models.CASCADE)
groups = {
'read': ('read_preprint',),
'write': ('read_preprint',
|
micfan/dinner
|
src/public/forms.py
|
Python
|
mit
| 2,640
| 0.001515
|
# coding=utf-8
__author__ = 'mic'
from django import forms
from django.utils.translation import ugettext, ugettext_lazy as _
from django.contrib.auth.forms import UserCreationForm
from public.models import User
class UserSignupForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'duplicate_username': _("A user with that username already exists."),
'password_mismatch': _("The two password fields didn't match."),
}
username = forms.RegexField(label=_("Username"), max_length=30,
regex=r'^[\w.@+-]+$',
help_text=_("Required. 30 characters or fewer. Letters, digits and "
|
"@/./+/-/_ only."),
error_messages={
'invalid': _("This value may contain only letters, numbers and "
"@/./+/-/_ characters.")})
password1 = forms.CharField(label=_("Password"),
|
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification."))
invite_code = forms.RegexField(label=_("Invention code"),
regex=r'^[^\s]{8}$',
help_text=_("Required. 8 characters: digital, Letters, digits or symbols"))
class Meta:
model = User
fields = ("username",)
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User._default_manager.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(
self.error_messages['duplicate_username'],
code='duplicate_username',
)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(UserSignupForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
|
amcat/amcat
|
amcat/scripts/__init__.py
|
Python
|
agpl-3.0
| 482
| 0.008299
|
"""
|
Main organisation:
- L{amcat.scripts.tools} contains helper classes that are
|
used by the scripts
- L{amcat.scripts.searchscripts} contains scripts that search the index or the database
- L{amcat.scripts.processors} contains scripts that process the input of a script
- L{amcat.scripts.output} contains scripts that output script results in various formats, such as csv and html.
- L{amcat.scripts.to_be_updated} contains legacy scripts that still have to be updated.
"""
|
2gis/Winium.StoreApps
|
Winium/TestApp.Test/py-functional/tests_silverlight/test_commands.py
|
Python
|
mpl-2.0
| 8,916
| 0.002916
|
# coding: utf-8
import base64
import pytest
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from tests_silverlight import SilverlightTestCase
By.XNAME = 'xname'
class TestGetCommands(SilverlightTestCase):
"""
Test GET commands that do not change anything in app, meaning they can all be run in one session.
"""
def test_get_current_window_handle(self):
"""
GET /session/:sessionId/window_handle Retrieve the current window handle.
"""
assert 'current' == self.driver.current_window_handle
def test_screenshot(self):
"""
GET /session/:sessionId/screenshot Take a screenshot of the current page.
"""
assert self.driver.get_screenshot_as_png(), 'Screenshot should not be empty'
def test_get_window_size(self):
"""
GET /session/:sessionId/window/:windowHandle/size Get the size of the specified window.
"""
size = self.driver.get_window_size()
assert {'height': 800, 'width': 480} == size
def test_get_page_source(self):
"""
GET /session/:sessionId/source Get the current page source (as xml).
"""
from xml.etree import ElementTree
source = self.driver.page_source
root = ElementTree.fromstring(source.encode('utf-8'))
visual_root = next(root.iterfind('*'))
assert 'System.Windows.Controls.Border' == visual_root.tag
@pytest.mark.parametrize(("by", "value"), [
(By.ID, 'MyTextBox'),
(By.NAME, 'NonUniqueName'),
(By.CLASS_NAME, 'System.Windows.Controls.TextBox'),
(By.TAG_NAME, 'System.Windows.Controls.TextBox'),
], ids=['by id', 'by name', 'by class name', 'by tag name'])
def test_find_element(self, by, value):
"""
POST /session/:sessionId/element Search for an element on the page, starting from the document root.
"""
try:
self.driver.find_element(by, value)
except NoSuchElementException as e:
pytest.fail(e)
@pytest.mark.parametrize(("by", "value", "expected_count"), [
(By.NAME, 'NonUniqueName', 2),
(By.TAG_NAME, 'System.Windows.Controls.TextBlock', 30),
], ids=['by name', 'by class name'])
def test_find_elements(self, by, value, expected_count):
"""
POST /session/:sessionId/elements Search for multiple elements on the page, starting from the document root.
"""
assert expected_count == len(self.driver.find_elements(by, value))
def test_find_child_element(self):
"""
POST /session/:sessionId/element/:id/element
Search for an element on the page, starting from the identified element.
"""
parent_element = self.driver.find_element_by_class_name('TestApp.MainPage')
try:
parent_element.find_element_by_id('MyTextBox')
except NoSuchElementException as e:
pytest.fail(e)
def test_find_child_elements(self):
"""
POST /session/:sessionId/element/:id/elements
Search for multiple elements on the page, starting from the identified element.
"""
parent_element = self.driver.find_element_by_id('MyListBox')
elements = parent_element.find_elements_by_class_name('System.Windows.Controls.TextBlock')
assert 25 == len(elements)
def test_get_element_text(self):
"""
GET /session/:sessionId/element/:id/text Returns the visible text for the element.
"""
text = self.driver.find_element_by_id('SetButton').text
assert "Set 'CARAMBA' text to TextBox" == text
@pytest.mark.parametrize(("attr_name", "expected_value"), [('Width', '400', )])
def test_get_element_attribute(self, attr_name, expected_value):
"""
GET /session/:sessionId/element/:id/attribute/:name Get the value of an element's attribute.
"""
element = self.driver.find_element_by_id('MyTextBox')
value = element.get_attribute(attr_name)
assert expected_value == value
def test_get_element_attribute_dot_syntax(self):
element = self.driver.find_element_by_id('SetButton')
value = element.get_attribute('Background.Color')
assert '#00FFFFFF' == value
@pytest.mark.parametrize(("automation_id", "expected_value"), [
('MyTextBox', True),
])
def test_is_element_displayed(self, automation_id, expected_value):
"""
GET /session/:sessionId/element/:id/displayed Determine if an element is currently displayed.
"""
is_displayed = self.driver.find_element_by_id(automation_id).is_displayed()
assert expected_value == is_displayed
def test_get_element_location(self):
"""
GET /session/:sessionId/element/:id/location Determine an element's location on the page.
"""
location = self.driver.find_element_by_id('MyTextBox').location
assert {'x': 240, 'y': 269} == location
def test_get_element_size(self):
size = self.driver.find_element_by_id('MyTextBox').size
assert {'height': 100, 'width': 400} == size
def test_get_element_rect(self):
rect = self.driver.find_element_by_id('MyTextBox').rect
assert {'x': 40, 'y': 219, 'height': 100, 'width': 400} == rect
def test_get_orientation(self):
"""
GET /session/:sessionId/orientation Get the current browser orientation.
Note: we lost orientation support in universal driver, atm it always returns portrait
"""
# TODO: rewrite and parametrize test to test different orientations
assert 'PORTRAIT' == self.driver.orientation
|
@pytest.mark.parametrize(("name", "expected_value"), [
(
|
'May', True),
('June', True),
('November', False),
])
def test_is_displayed(self, name, expected_value):
element = self.driver.find_element_by_name(name)
assert expected_value == element.is_displayed()
def test_file_ops(self):
encoding = 'utf-8'
with open(__file__, encoding=encoding) as f:
encoded = base64.b64encode(f.read().encode(encoding)).decode(encoding)
self.driver.push_file(r"test\sample.dat", encoded)
data = self.driver.pull_file(r"test\sample.dat")
assert encoded == data
def test_execute_script_invoke_method_echo_with_arg(self):
rv = self.driver.execute_script('mobile: invokeMethod', 'TestApp.AutomationApi', 'Echo', 'blah blah')
assert 'blah blah' == rv
def test_execute_script_invoke_method_complex_return_value_no_args(self):
expected = {u'Date': u'1985-10-21T01:20:00', u'Text': u'Flux', u'Value': 3}
rv = self.driver.execute_script('mobile: invokeMethod', 'TestApp.AutomationApi', 'ReturnStubState')
assert expected == rv
class TestExecuteScript(SilverlightTestCase):
__shared_session__ = False
@pytest.mark.parametrize("command_alias", ["automation: InvokePattern.Invoke"])
def test_automation_invoke(self, command_alias):
self.driver.find_element_by_id('MyTextBox').send_keys('')
element = self.driver.find_element_by_id('SetButton')
self.driver.execute_script(command_alias, element)
assert 'CARAMBA' == self.driver.find_element_by_id('MyTextBox').text
@pytest.mark.parametrize("command_alias", ["automation: ScrollPattern.Scroll"])
def test_automation_scroll(self, command_alias):
list_box = self.driver.find_element_by_id('MyListBox')
list_item = list_box.find_element_by_name('November')
start_location = list_item.location
scroll_info = {"v": "smallIncrement", "count": 10}
self.driver.execute_script(command_alias, list_box, scroll_info)
end_location = list_item.location
assert (end_location['y'] - start_location['y']) < 0
class TestBasicInput(SilverlightTestCase):
__shared_session__ = False
def test_send_keys_to_element(self):
"""
POST /session/:sessionId/element/:id/value Send a sequence of key strokes to an element.
TODO: test magic keys
"
|
SEC-i/ecoControl
|
server/urls.py
|
Python
|
mit
| 2,212
| 0.005425
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import hooks
import manager.hooks
import technician.hooks
urlpatterns = patterns('',
# general hooks
(r'^$', hooks.index),
(r'^api/$', hooks.api_index),
(r'^api/export/$', hooks.export_csv),
(r'^api/login/$', hooks.login_user),
(r'^api/logout/$', hooks.logout_user),
(r'^api/notifications/(start/(?P<start>[0-9]+)/)?(end/(?P<end>[0-9]+)/)?$', hooks.list_notifications),
(r'^api/sensors/$', hooks.list_sensors),
(r'^api/settings/$', hooks.list_settings),
(r'^api/status/$', hooks.status),
# technician hooks
(r'^api/configure/$', technician.hooks.configure),
(r'^api/data/monthly/$', technician.hooks.list_sensor_values),
(r'^api/data/yearly/$', technician.hooks.list_sensor_values, {'interval': 'year'}),
(r'^api/forecast/$', technician.hooks.forecast),
(r'^api/forward/$', technician.hooks.forward),
(r'^api/live/$', technician.ho
|
oks.live_data),
(r'^api/manage/thresholds/$', technician.hooks.handle_threshold),
(r'^api/settings/tunable/$', technician.hooks.get_tunable_device_configurations),
(r'^api/snippets/$', technician.hooks.handle_snippets),
(r'^api/code/$', technician.hooks.handle_code),
(r'^api/start/$', technician.hooks.start_device),
(r'^api/statistics/$', technician.hooks.get_statistics),
(r'^api/statistics/monthly/$', technician.hooks.get_monthly_statistics),
(r'^api
|
/thresholds/$', technician.hooks.list_thresholds),
# manager hooks
(r'^api/avgs/(sensor/(?P<sensor_id>[0-9]+)/)?(year/(?P<year>[0-9]+)/)?$', manager.hooks.get_avgs),
(r'^api/balance/total/((?P<year>\d+)/)?((?P<month>\d+)/)?$', manager.hooks.get_total_balance),
(r'^api/balance/total/latest/$', manager.hooks.get_latest_total_balance),
(r'^api/history/$', manager.hooks.get_sensorvalue_history_list),
(r'^api/loads/$', manager.hooks.get_daily_loads),
(r'^api/sensor/((?P<sensor_id>\d+)/)?$', manager.hooks.get_detailed_sensor_values),
(r'^api/sums/(sensor/(?P<sensor_id>[0-9]+)/)?(year/(?P<year>[0-9]+)/)?$', manager.hooks.get_sums),
url(r'^admin/', include(admin.site.urls)),
)
|
aboyett/blockdiag
|
src/blockdiag/plugins/__init__.py
|
Python
|
apache-2.0
| 1,527
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2011 Takeshi KOMIYA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apac
|
he.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# W
|
ITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pkg_resources import iter_entry_points
node_handlers = []
def load(plugins, diagram, **kwargs):
for name in plugins:
for ep in iter_entry_points('blockdiag_plugins', name):
module = ep.load()
if hasattr(module, 'setup'):
module.setup(module, diagram, **kwargs)
break
else:
msg = "WARNING: unknown plugin: %s\n" % name
raise AttributeError(msg)
def install_node_handler(handler):
if handler not in node_handlers:
node_handlers.append(handler)
def fire_node_event(node, name, *args):
method = "on_" + name
for handler in node_handlers:
getattr(handler, method)(node, *args)
class NodeHandler(object):
def __init__(self, diagram, **kwargs):
self.diagram = diagram
def on_created(self, node):
pass
def on_attr_changed(self, node, attr):
pass
|
jxtech/wechatpy
|
tests/test_create_reply.py
|
Python
|
mit
| 4,741
| 0
|
# -*- coding: utf-8 -*-
import unittest
from wechatpy.replies import TextReply, create_reply
class CreateReplyTestCase(unittest.TestCase):
def test_create_reply_with_text_not_render(self):
text = "test"
reply = create_reply(text, render=False)
self.assertEqual("text", reply.type)
self.assertEqual(text, reply.content)
reply.render()
def test_create_reply_with_text_render(self):
text = "test"
reply = create_reply(text, render=True)
self.assertTrue(isinstance(reply, str))
def test_create_reply_with_message(self):
from wechatpy.messages import TextMessage
msg = TextMessage(
{
"FromUserName": "user1",
"ToUserName": "user2",
}
)
reply = create_reply("test", msg, render=False)
self.assertEqual("user1", reply.target)
self.assertEqual("user2", reply.source)
reply.render()
def test_create_reply_with_reply(self):
_reply = TextReply(content="test")
reply = create_reply(_reply, render=False)
self.assertEqual(_reply, reply)
reply.render()
def test_create_reply_with_articles(self):
articles = [
{
"title": "test 1",
"description": "test 1",
"image": "http://www.qq.com/1.png",
"url": "http://www.qq.com/1",
},
{
"title": "test 2",
"description": "test 2",
"image": "http://www.qq.com/2.png",
"url": "http://www.qq.com/2",
},
{
"title": "test 3",
"description": "test 3",
"image": "http://www.qq.com/3.png",
"url": "http://www.qq.com/3",
},
]
reply = create_reply(articles, render=False)
self.assertEqual("news", reply.type)
reply.render()
def test_create_reply_with_more_than_ten_articles(self):
articles = [
{
"title": "test 1",
"description": "test 1",
"image": "http://www.qq.com/1.png",
"url": "http://www.qq.com/1",
},
{
"title": "test 2",
"description": "test 2",
"image": "http://www.qq.com/2.png",
"url": "http://www.qq.com/2",
},
{
"title": "test 3",
"description": "test 3",
"image": "http://www.qq.com/3.png",
"url": "http://www.qq.com/3",
},
{
"title": "test 4",
"description": "test 4",
"image": "http://www.qq.com/4.
|
png",
"url": "http://www.qq.com/4",
},
{
"title": "test 5",
"description": "test 5",
"image": "http://www.qq.com/5.png",
"url": "http://www.qq.com/5",
},
{
"title": "test 6",
"descri
|
ption": "test 6",
"image": "http://www.qq.com/6.png",
"url": "http://www.qq.com/6",
},
{
"title": "test 7",
"description": "test 7",
"image": "http://www.qq.com/7.png",
"url": "http://www.qq.com/7",
},
{
"title": "test 8",
"description": "test 8",
"image": "http://www.qq.com/8.png",
"url": "http://www.qq.com/8",
},
{
"title": "test 9",
"description": "test 9",
"image": "http://www.qq.com/9.png",
"url": "http://www.qq.com/9",
},
{
"title": "test 10",
"description": "test 10",
"image": "http://www.qq.com/10.png",
"url": "http://www.qq.com/10",
},
{
"title": "test 11",
"description": "test 11",
"image": "http://www.qq.com/11.png",
"url": "http://www.qq.com/11",
},
]
self.assertRaises(AttributeError, create_reply, articles)
def test_create_empty_reply(self):
from wechatpy.replies import EmptyReply
reply = create_reply("")
self.assertTrue(isinstance(reply, EmptyReply))
reply = create_reply(None)
self.assertTrue(isinstance(reply, EmptyReply))
reply = create_reply(False)
self.assertTrue(isinstance(reply, EmptyReply))
|
mmilaprat/policycompass-services
|
apps/feedbackmanager/views.py
|
Python
|
agpl-3.0
| 891
| 0.001122
|
from rest_framework import generics
from rest_
|
framework.views import APIView
from rest_framework.response import Response
from rest_framework.reverse import reverse
from .models import Feedback
from .serializers import *
class FeedbackListView(generics.ListCreateAPIView):
# permission_classes = IsAuthenticatedOrReadOnly,
queryset = Feedback.objects.all()
# paginate_by = 10
paginate_by_param = 'page_size'
s
|
erializer_class = FeedbackSerializer
class FeedbackCategoryListView(generics.ListCreateAPIView):
queryset = FeedbackCategory.objects.all()
serializer_class = FeedbackCategorySerializer
class Base(APIView):
def get(self, request):
result = {
"Feedbacks": reverse('feedback-list', request=request),
"Feedback Categories": reverse('feedbackcategory-list', request=request)
}
return Response(result)
|
ryfeus/lambda-packs
|
pytorch/source/caffe2/python/operator_test/adadelta_test.py
|
Python
|
mit
| 7,954
| 0.002137
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import functools
import hypothesis
from hypothesis import given, settings, HealthCheck
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
class TestAdadelta(serial.SerializedTestCase):
@staticmethod
def ref_adadelta(param_in,
mom_in,
mom_delta_in,
grad, lr,
epsilon,
decay,
using_fp16=False):
param_in_f32 = param_in
mom_in_f32 = mom_in
mom_delta_in_f32 = mom_delta_in
if(using_fp16):
param_in_f32 = param_in.astype(np.float32)
mom_in_f32 = mom_in.astype(np.float32)
mom_delta_in_f32 = mom_delta_in.astype(np.float32)
mom_out = decay * mom_in_f32 + (1.0 - decay) * grad * grad
new_grad = (np.sqrt(mom_delta_in_f32 + epsilon) /
np.sqrt(mom_out + epsilon)) * grad
param_out = param_in_f32 + lr * new_grad
mom_delta_out = decay * mom_delta_in_f32 + (1.0 - decay
) * new_grad * new_grad
if(using_fp16):
return (param_out.astype(np.float16), mom_out.astype(np.float16),
mom_delta_out.astype(np.float16))
else:
return (param_out.astype(np.float32), mom_out.astype(np.float32),
mom_delta_out.astype(np.float32))
@serial.given(inputs=hu.tensors(n=4),
lr=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
epsilon=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
decay=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
**hu.gcs)
def test_adadelta(self, inputs, lr, epsilon, decay, gc, dc):
param, moment, moment_delta, grad = inputs
lr = np.array([lr], dtype=np.float32)
op = core.CreateOperator(
"Adadelta",
["param", "moment", "moment_delta", "grad", "lr"],
["param", "moment", "moment_delta"],
epsilon=epsilon,
decay=decay,
device_option=gc,
)
self.assertReferenceChecks(
gc, op,
[param, moment, moment_delta, grad, lr],
functools.partial(self.ref_adadelta, epsilon=epsilon, decay=decay))
# Suppress filter_too_much health check.
# Likely caused by `assume` call falling through too often.
@settings(suppress_health_check=[HealthCheck.filter_too_much])
@given(inputs=hu.tensors(n=4),
lr=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
epsilon=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
decay=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
**hu.gcs)
def test_sparse_adadelta(self, inputs, lr, epsilon, decay, gc, dc):
param, moment, moment_delta, grad = inputs
moment = np.abs(moment)
lr = np.array([lr], dtype=np.float32)
# Create an indexing array containing values that are lists of indices,
# which index into grad
indices = np.random.choice(np.arange(grad.shape[0]),
size=np.random.randint(grad.shape[0]), replace=False)
# Sparsify grad
grad = grad[indices]
op = core.CreateOperator(
"SparseAdadelta",
["param", "moment", "moment_delta", "indices", "grad", "lr"],
["param", "moment", "moment_delta"],
epsilon=epsilon,
decay=decay,
device_option=gc)
def ref_sparse(param, moment, moment_delta, indices, grad, lr, decay,
ref_using_fp16):
param_out = np.copy(param)
moment_out = np.copy(moment)
moment_delta_out = np.copy(moment_delta)
for i, index in enumerate(indices):
param_out[index], moment_out[index], moment_delta_out[
index] = self.ref_adadelta(param[index], moment[index],
moment_delta[index], grad[i], lr,
epsilon, decay, ref_using_fp16)
return (param_out, moment_out, moment_delta_out)
ref_using_fp16_values = [False]
if dc == hu.gpu_do:
ref_using_fp16_values.append(True)
for ref_using_fp16 in ref_using_fp16_values:
moment_i = None
moment_delta_i = None
param_i = None
if(ref_using_fp16):
moment_i = moment.astype(np.float16)
moment_delta_i = moment_delta.astype(np.float16)
param_i = param.astype(np.float16)
else:
moment_i = moment.astype(np.float32)
moment_delta_i = moment_delta.astype(np.float32)
param_i = param.astype(np.float32)
self.assertReferenceChecks(gc, op, [
param_i, moment_i, moment_delta_i, indices, grad, lr, decay,
ref_using_fp16
], ref_sparse)
@serial.given(inputs=hu.tensors(n=3),
lr=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
epsilon=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
|
decay=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow
|
_infinity=False),
data_strategy=st.data(),
**hu.gcs)
def test_sparse_adadelta_empty(self, inputs, lr, epsilon, decay,
data_strategy, gc, dc):
param, moment, moment_delta = inputs
moment = np.abs(moment)
lr = np.array([lr], dtype=np.float32)
grad = np.empty(shape=(0,) + param.shape[1:], dtype=np.float32)
indices = np.empty(shape=(0,), dtype=np.int64)
hypothesis.note('indices.shape: %s' % str(indices.shape))
op = core.CreateOperator(
"SparseAdadelta",
["param", "moment", "moment_delta", "indices", "grad", "lr"],
["param", "moment", "moment_delta"],
epsilon=epsilon,
decay=decay,
device_option=gc)
def ref_sparse_empty(param, moment, moment_delta, indices, grad, lr, decay):
param_out = np.copy(param)
moment_out = np.copy(moment)
moment_delta_out = np.copy(moment_delta)
return (param_out, moment_out, moment_delta_out)
ref_using_fp16_values = [False]
if dc == hu.gpu_do:
ref_using_fp16_values.append(True)
for ref_using_fp16 in ref_using_fp16_values:
moment_i = None
moment_delta_i = None
param_i = None
if(ref_using_fp16):
moment_i = moment.astype(np.float16)
moment_delta_i = moment_delta.astype(np.float16)
param_i = param.astype(np.float16)
else:
moment_i = moment.astype(np.float32)
moment_delta_i = moment_delta.astype(np.float32)
param_i = param.astype(np.float32)
self.assertReferenceChecks(
gc,
op,
[param_i, moment_i, moment_delta_i, indices, grad, lr, decay],
ref_sparse_empty
)
|
BrummbQ/plantcontrol
|
plant/modelcontrol/views.py
|
Python
|
gpl-3.0
| 1,079
| 0.000927
|
# Create your views here.
from django.shortcuts import render, get_object_or_404
from django.views import generic
from modelcontrol.models import Plant
from xmlrpclib import ServerProxy, Error
class IndexView(generic.ListView):
template_name = 'modelcontrol/index.html'
context_object_name = 'plant_list'
def get_queryset(self):
return Plant.ob
|
jects.all()
def update(request, plant_id):
p = get_object_or_404(Plant, pk=plant_id)
try:
motor = ServerProxy('http://127.0.0.1:1337', allow_none=True)
if 'position' in request.POST:
p.servo.position = request.POST['position']
p.servo.save()
if 'speed' in request.POST:
p.motor.speed = request.POST['speed']
p.motor.save()
mo
|
tor.set_rate(0, 7)
motor.set_rate(int(p.motor.speed), 25)
# set device
except (KeyError):
# error page
pass
plant_list = Plant.objects.all()
context = {'plant_list': plant_list}
return render(request, 'modelcontrol/index.html', context)
|
tBaxter/tango-happenings
|
happenings/signals.py
|
Python
|
mit
| 368
| 0.002717
|
def update_time(sender, **kwargs):
"""
When a Comment is added, updates the Update to set "last_updated" time
"""
|
comment = kwargs['instance']
if comment.content_type.app_label == "happenings" and comment.content_type.name == "Update":
from .models import Update
it
|
em = Update.objects.get(id=comment.object_pk)
item.save()
|
ricomoss/learn-tech
|
python/track_2/lesson2/apples_to_apples/common/models.py
|
Python
|
gpl-3.0
| 490
| 0
|
from django.db import models
class Person(models.Model):
first_nam
|
e = models.CharField(max_length=25, default='Rico')
last_name = models.CharField(max_length=25, blank=True)
hair_color = models.CharField(max_length=10, blank=True)
eye_color = models.CharField(max_length=10)
age = models.IntegerField()
height = models.CharField(max_length=6)
favor
|
ite_animal = models.CharField(max_length=25, blank=True)
number_of_animals = models.IntegerField(null=True)
|
dyf102/Gomoku-online
|
client/controller/basecontroller.py
|
Python
|
apache-2.0
| 2,390
| 0.000418
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
import os
# import json as JSON
from network import Client
from PyQt4.QtCore import SIGNAL, QObject, QString
from PyQt4 import Qt, QtCore, QtGui
import threading
import socket
import Queue
import time
sys.path.append('../')
# from util.util import print_trace_exception
def singleton(cls, *args, **kw):
instances = {}
def _singleton():
if cls not in instances:
instances[cls] = cls(*args, **kw)
return instances[cls]
return _singleton
class BaseController(QObject):
_instance = None
def __init__(self, service_name):
QObject.__init__(self)
self.is_connecting = False
self.is_connected = False
self.service_name = service_name
self.connector = SafeConnector()
self.c = Client()
def connect_client(self, adr, port):
if not (self.is_connected or self.is_connecting):
self.is_connecting = True
self.c.connect(adr, port) # will not return any error code
# if ret == -1:
# self.is_connecting = False
# print_trace_exception()
# raise os.ConnectionError()
self.is_connected = True
self.is_connecting = Fal
|
se
def get_client(self):
return self.c
# Object of this class has to be shared between
# the two threads (Python and Qt one).
# Qt thread calls 'connect',
# Python thread calls 'emit'.
# The slot corresponding to the emitted signal
# will be called in Qt's thread.
@singleton
class SafeConnector:
def __init__(self):
self._rsock, self._wsock = socket.socketpair()
self._queue = Queue
|
.Queue()
self._qt_object = QtCore.QObject()
self._notifier = QtCore.QSocketNotifier(self._rsock.fileno(),
QtCore.QSocketNotifier.Read)
self._notifier.activated.connect(self._recv)
def connect(self, signal, receiver):
QtCore.QObject.connect(self._qt_object, signal, receiver)
# should be called by Python thread
def emit(self, signal, *args):
self._queue.put((signal, args))
self._wsock.send('!')
# happens in Qt's main thread
def _recv(self):
self._rsock.recv(1)
signal, args = self._queue.get()
self._qt_object.emit(signal, *args)
|
frerepoulet/ZeroNet
|
src/File/FileRequest.py
|
Python
|
gpl-2.0
| 19,416
| 0.003348
|
# Included modules
import os
import time
import json
import itertools
# Third party modules
import gevent
from Debug import Debug
from Config import config
from util import RateLimit
from util import StreamingMsgpack
from util import helper
from Plugin import PluginManager
FILE_BUFF = 1024 * 512
# Incoming requests
@PluginManager.acceptPlugins
class FileRequest(object):
__slots__ = ("server", "connection", "req_id", "sites", "log", "responded")
def __init__(self, server, connection):
self.server = server
self.connection = connection
self.req_id = None
self.sites = self.server.sites
self.log = server.log
self.responded = False # Responded to the request
def send(self, msg, streaming=False):
if not self.connection.closed:
self.connection.send(msg, streaming)
def sendRawfile(self, file, read_bytes):
if not self.connection.closed:
self.connection.sendRawfile(file, read_bytes)
def response(self, msg, streaming=False):
if self.responded:
if config.verbose:
self.log.debug("Req id %s already responded" % self.req_id)
return
if not isinstance(msg, dict): # If msg not a dict create a {"body": msg}
msg = {"body": msg}
msg["cmd"] = "response"
msg["to"] = self.req_id
self.responded = True
self.send(msg, streaming=streaming)
# Route file requests
def route(self, cmd, req_id, params):
self.req_id = req_id
# Don't allow other sites than locked
if "site" in params and self.connection.target_onion:
valid_sites = self.connection.getValidSites()
if params["site"] not in valid_sites:
self.response({"error": "Invalid site"})
self.connection.log(
"%s site lock violation: %s not in %s, target onion: %s" %
(params["site"], valid_sites, self.connection.target_onion)
)
self.connection.badAction(5)
return False
if cmd == "update":
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second
time.sleep(5)
self.response({"ok": "File update queued"})
# If called more than once within 15 sec only keep the last update
RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params)
else:
func_name = "action" + cmd[0].upper() + cmd[1:]
func = getattr(self, func_name, None)
if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions
s = time.time()
if self.connection.cpu_time > 0.5:
|
self.log.debug(
"Delay %s %s, cpu_time used by connection: %.3fs" %
(self.connection.ip, cmd, self.connection.cpu_time)
)
time.sleep(self.connection.cpu_time)
if self.connection.cpu_time > 5:
|
self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time)
if func:
func(params)
else:
self.actionUnknown(cmd, params)
if cmd not in ["getFile", "streamFile"]:
taken = time.time() - s
self.connection.cpu_time += taken
# Update a site file request
def actionUpdate(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(1)
return False
if not params["inner_path"].endswith("content.json"):
self.response({"error": "Only content.json update allowed"})
self.connection.badAction(5)
return
try:
content = json.loads(params["body"])
except Exception, err:
self.log.debug("Update for %s is invalid JSON: %s" % (params["inner_path"], err))
self.response({"error": "File invalid JSON"})
self.connection.badAction(5)
return
file_uri = "%s/%s:%s" % (site.address, params["inner_path"], content["modified"])
if self.server.files_parsing.get(file_uri): # Check if we already working on it
valid = None # Same file
else:
valid = site.content_manager.verifyFile(params["inner_path"], content)
if valid is True: # Valid and changed
self.log.info("Update for %s/%s looks valid, saving..." % (params["site"], params["inner_path"]))
self.server.files_parsing[file_uri] = True
site.storage.write(params["inner_path"], params["body"])
del params["body"]
site.onFileDone(params["inner_path"]) # Trigger filedone
if params["inner_path"].endswith("content.json"): # Download every changed file from peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
# On complete publish to other peers
diffs = params.get("diffs", {})
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"], diffs=diffs, limit=2), "publish_%s" % params["inner_path"])
# Load new content file and download changed files in new thread
def downloader():
site.downloadContent(params["inner_path"], peer=peer, diffs=params.get("diffs", {}))
del self.server.files_parsing[file_uri]
gevent.spawn(downloader)
else:
del self.server.files_parsing[file_uri]
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
self.connection.goodAction()
elif valid is None: # Not changed
if params.get("peer"):
peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
else:
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
if peer:
if not peer.connection:
peer.connect(self.connection) # Assign current connection to peer
if params["inner_path"] in site.content_manager.contents:
peer.last_content_json_update = site.content_manager.contents[params["inner_path"]]["modified"]
if config.verbose:
self.log.debug(
"Same version, adding new peer for locked files: %s, tasks: %s" %
(peer.key, len(site.worker_manager.tasks))
)
for task in site.worker_manager.tasks: # New peer add to every ongoing task
if task["peers"] and not task["optional_hash_id"]:
# Download file from this peer too if its peer locked
site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
self.response({"ok": "File not changed"})
self.connection.badAction()
else: # Invalid sign or sha hash
self.log.debug("Update for %s is invalid" % params["inner_path"])
self.response({"error": "File invalid"})
self.connection.badAction(5)
# Send file content request
def actionGetFile(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
try:
file_path = site.storage.getPath(params["inner_path"])
with StreamingMsgpack.FilePart(file_path, "rb") as file:
file.seek(params["location"])
file.read_bytes = FILE_BUFF
file_si
|
brain-tec/server-tools
|
users_ldap_groups/__manifest__.py
|
Python
|
agpl-3.0
| 704
| 0
|
# Copyright 2020 initOS GmbH
# Copyright 2012-2018 Therp BV <https://therp.nl>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
"name": "LDAP groups assignment",
"version": "11.0.1.0.0",
"depends": ["auth_ldap"],
"author": "init
|
OS GmbH, Ther
|
p BV, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/server-tools",
"license": "AGPL-3",
"summary": "Adds user accounts to groups based on rules defined "
"by the administrator.",
"category": "Authentication",
"data": [
'views/base_config_settings.xml',
'security/ir.model.access.csv',
],
"external_dependencies": {
'python': ['ldap'],
},
}
|
jonyroda97/redbot-amigosprovaveis
|
lib/matplotlib/quiver.py
|
Python
|
gpl-3.0
| 46,115
| 0.00039
|
"""
Support for plotting vector fields.
Presently this contains Quiver and Barb. Quiver plots an arrow in the
direction of the vector, with the size of the arrow related to the
magnitude of the vector.
Barbs are like quiver in that they point along a vector, but
the magnitude of the vector is given schematically by the presence of barbs
or flags on the barb.
This will also become a home for things such as standard
deviation ellipses, which can and will be derived very easily from
the Quiver code.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import weakref
import numpy as np
from numpy import ma
import matplotlib.collections as mcollections
import matplotlib.transforms as transforms
import matplotlib.text as mtext
import matplotlib.artist as martist
from matplotlib.artist import allow_rasterization
from matplotlib import docstring
import matplotlib.font_manager as font_manager
import matplotlib.cbook as cbook
from matplotlib.cbook import delete_masked_points
from matplotlib.patches import CirclePolygon
import math
_quiver_doc = """
Plot a 2-D field of arrows.
Call signatures::
quiver(U, V, **kw)
quiver(U, V, C, **kw)
quiver(X, Y, U, V, **kw)
quiver(X, Y, U, V, C, **kw)
*U* and *V* are the arrow data, *X* and *Y* set the location of the
arrows, and *C* sets the color of the arrows. These arguments may be 1-D or
2-D arrays or sequences.
If *X* and *Y* are absent, they will be generated as a uniform grid.
If *U* and *V* are 2-D arrays and *X* and *Y* are 1-D, and if ``len(X)`` and
``len(Y)`` match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
The default settings auto-scales the length of the arrows to a reasonable size.
To change this behavior see the *scale* and *scale_units* kwargs.
The defaults give a slightly swept-back arrow; to make the head a
triangle, make *headaxislength* the same as *headlength*. To make the
arrow more pointed, reduce *headwidth* or increase *headlength* and
*headaxislength*. To make the head smaller relative to the shaft,
scale down all the head parameters. You will probably do best to leave
minshaft alone.
*linewidths* and *edgecolors* can be used to customize the arrow
outlines.
Parameters
----------
X : 1D or 2D array, sequence, optional
The x coordinates of the arrow locations
Y : 1D or 2D array, sequence, optional
The y coordinates of the arrow locations
U : 1D or 2D array or masked array, sequence
The x components of the arrow vectors
V : 1D or 2D array or masked array, sequence
The y components of the arrow vectors
C : 1D or 2D array, sequence, optional
The arrow colors
units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ]
The arrow dimensions (except for *length*) are measured in multiples of
this unit.
'width' or 'height': the width or height of the axis
'dots' or 'inches': pixels or inches, based on the figure dpi
'x', 'y', or 'xy': respectively *X*, *Y*, or :math:`\\sqrt{X^2 + Y^2}`
in data units
The arrows scale differently depending on the units. For
'x' or 'y', the arrows get larger as one zooms in; for other
units, the arrow size is independent of the zoom state. For
'width or 'height', the arrow size increases with the width and
height of the axes, respectively, when the window is resized;
for 'dots' or 'inches', resizing does not change the arrows.
angles : [ 'uv' | 'xy' ], array, optional
Method for determining the angle of the arrows. Default is 'uv'.
'uv': the arrow axis aspect ratio is 1 so that
if *U*==*V* the orientation of the arrow on the plot is 45 degrees
counter-clockwise from the horizontal axis (positive to the right).
'xy': arrows point from (x,y) to (x+u, y+v).
Use this for plotting a gradient field, for example.
Alternatively, arbitrary angles may be specified as an array
of values in degrees, counter-clockwise from the horizontal axis.
Note: inverting a data axis will correspondingly invert the
arrows only with ``angles='xy'``.
scale : None, float, optional
Number of data units per arrow length unit, e.g., m/s per plot width; a
smaller scale parameter makes the arrow longer. Default is *None*.
If *None*, a simple autoscaling algorithm is used, based on the average
vector length and the number of vectors. The arrow length unit is given by
the *scale_units* parameter
scale_units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ], \
None, optional
If the *scale* kwarg is *None*, the arrow length unit. Default is
|
*None*.
e.g. *scale_units* is 'inches', *scale* is 2.0, and
``(u,v) = (1,0)``, then the vector will be 0.5 inches long.
If *scale_units* is 'width'/'height', then t
|
he vector will be half the
width/height of the axes.
If *scale_units* is 'x' then the vector will be 0.5 x-axis
units. To plot vectors in the x-y plane, with u and v having
the same units as x and y, use
``angles='xy', scale_units='xy', scale=1``.
width : scalar, optional
Shaft width in arrow units; default depends on choice of units,
above, and number of vectors; a typical starting value is about
0.005 times the width of the plot.
headwidth : scalar, optional
Head width as multiple of shaft width, default is 3
headlength : scalar, optional
Head length as multiple of shaft width, default is 5
headaxislength : scalar, optional
Head length at shaft intersection, default is 4.5
minshaft : scalar, optional
Length below which arrow scales, in units of head length. Do not
set this to less than 1, or small arrows will look terrible!
Default is 1
minlength : scalar, optional
Minimum length as a multiple of shaft width; if an arrow length
is less than this, plot a dot (hexagon) of this diameter instead.
Default is 1.
pivot : [ 'tail' | 'mid' | 'middle' | 'tip' ], optional
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*.
color : [ color | color sequence ], optional
This is a synonym for the
:class:`~matplotlib.collections.PolyCollection` facecolor kwarg.
If *C* has been set, *color* has no effect.
Notes
-----
Additional :class:`~matplotlib.collections.PolyCollection`
keyword arguments:
%(PolyCollection)s
See Also
--------
quiverkey : Add a key to a quiver plot
""" % docstring.interpd.params
_quiverkey_doc = """
Add a key to a quiver plot.
Call signature::
quiverkey(Q, X, Y, U, label, **kw)
Arguments:
*Q*:
The Quiver instance returned by a call to quiver.
*X*, *Y*:
The location of the key; additional explanation follows.
*U*:
The length of the key
*label*:
A string with the length and units of the key
Keyword arguments:
*angle* = 0
The angle of the key arrow. Measured in degrees anti-clockwise from the
x-axis.
*coordinates* = [ 'axes' | 'figure' | 'data' | 'inches' ]
Coordinate system and units for *X*, *Y*: 'axes' and 'figure' are
normalized coordinate systems with 0,0 in the lower left and 1,1
in the upper right; 'data' are the axes data coordinates (used for
the locations of the vectors in the quiver plot itself); 'inches'
is position in the figure in inches, with 0,0 at the lower left
corner.
*color*:
overrides face and edge colors from *Q*.
*labelpos* = [ 'N' | 'S' | 'E' | 'W' ]
Position the label above, below, to the right, to the left of the
arrow, respectively.
*labelsep*:
Distance in inches between the arrow and the label. Default is
0.1
*labelcolor*:
defaults to default :class:`~matplotlib.text.Text` color.
*fontproperties*:
A dictionary with keyword arguments accepted by the
:class:`~matplotlib.font_manager.FontProperties` initializer:
*family*, *style*, *variant*, *size*, *weight*
Any additional keyword arguments are used to override vector
properties taken from *Q*.
The positioning of the key depends on *X*, *Y*, *coordinates*, and
*labelpos*. If *labelpos* is 'N' or 'S', *X*, *Y* give t
|
qma/pants
|
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_classpath_publisher.py
|
Python
|
apache-2.0
| 2,151
| 0.010693
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.core.tasks.task import Task
from pants.util.dirutil import safe_mkdir, safe_open, safe_rmtree
class RuntimeClasspathPublisher(Task):
"""Creates symlinks in pants_distdir to classpath entries per target."""
@classmethod
def prepare(cls, options, round_manager):
round_manager.require_data('runtime_classpath')
@property
def _output_folder(self):
return self.options_scope.replace('.', os.sep)
def _stable_output_folder(self, target):
"""
:type target: pants.build_graph.target.Target
"""
address = target.address
return os.path.join(
self.get_options().pants_distdir,
self._output_folder,
# target.address.spec is used in export goal to identify targets
address.spec.replace(':', os.sep) if address.spec_path else address.target_name,
)
def execute(self):
runtime_classpath = self.context.products.get_data('runtime_classpath')
for target in self.context.targets():
folder_for_symlinks = self._stable_output_folder(target)
safe_rmtree(folder_for_symlinks)
classpath_entries_for_target = runtime_classpath.get_internal_classpath_entries_for_targets(
[target])
if len(classpath_entries_for_target) > 0:
safe_mkdir(folder_for_symlinks)
classpath = []
for (index, (conf, entry)) in enumerate(classpath_entries_for_target):
classpath.append(entry.path)
file_name = os.path.basename(entry.path)
# Avoid name collisions
|
symlink_name = '{}-{}'.format(index, file_name)
os.symlink(entry.path, os.path.join(folder_for_symlinks, symlink_name))
with safe_open(os.path.join(folder_for_symlinks, 'classpath.txt'), 'w') as classpath_file:
classpath_file.write(os.pathsep.join(classpath))
classpath_file.writ
|
e('\n')
|
50wu/gpdb
|
src/backend/gporca/scripts/cal_bitmap_test.py
|
Python
|
apache-2.0
| 70,529
| 0.003474
|
#!/usr/bin/env python3
# Optimizer calibration test for bitmap and brin indexes, also btree on AO tables
#
# This program runs a set of queries, varying several parameters:
#
# - Selectivity of predicates
# - Plan type (plan chosen by the optimizer, various forced plans)
# - Width of the selected columns
#
# The program then reports the result of explaining these queries and
# letting the optimizer choose plans vs. forcing plans. Execution tim
|
e
# can be reported, computing a mean and standard deviation of several
# query executions.
#
# The printed results are useful to copy and paste in
|
to a Google Sheet
# (expand columns after pasting)
#
# Run this program with the -h or --help option to see argument syntax
#
# See comment "How to add a test" below in the program for how to
# extend this program.
import argparse
import time
import re
import math
import os
import sys
try:
from gppylib.db import dbconn
except ImportError as e:
sys.exit('ERROR: Cannot import modules. Please check that you have sourced greenplum_path.sh. Detail: ' + str(e))
# constants
# -----------------------------------------------------------------------------
_help = """
Run optimizer bitmap calibration tests. Optionally create the tables before running, and drop them afterwards.
This explains and runs a series of queries and reports the estimated and actual costs.
The results can be copied and pasted into a spreadsheet for further processing.
"""
TABLE_NAME_PATTERN = r"cal_txtest"
NDV_TABLE_NAME_PATTERN = r"cal_ndvtest"
BFV_TABLE_NAME_PATTERN = r"cal_bfvtest"
WIDE_TABLE_NAME_PATTERN = r"cal_widetest"
BRIN_TABLE_NAME_PATTERN = r"cal_brintest"
TABLE_SCAN = "table_scan"
TABLE_SCAN_PATTERN = r"Seq Scan"
TABLE_SCAN_PATTERN_V5 = r"Table Scan"
INDEX_SCAN = "index_scan"
INDEX_SCAN_PATTERN = r"> Index Scan"
INDEX_SCAN_PATTERN_V5 = r"> Index Scan"
INDEX_ONLY_SCAN = "indexonly_scan"
INDEX_ONLY_SCAN_PATTERN = r"> Index Only Scan"
INDEX_ONLY_SCAN_PATTERN_V5 = r"> Index Only Scan"
BITMAP_SCAN = "bitmap_scan"
BITMAP_SCAN_PATTERN = r"Bitmap Heap Scan"
BITMAP_SCAN_PATTERN_V5 = r"Bitmap Table Scan"
HASH_JOIN = "hash_join"
HASH_JOIN_PATTERN = r"Hash Join"
HASH_JOIN_PATTERN_V5 = r"Hash Join"
NL_JOIN = "nl_join"
NL_JOIN_PATTERN = r"Nested Loop"
NL_JOIN_PATTERN_V5 = r"Nested Loop"
FALLBACK_PLAN = "fallback"
FALLBACK_PATTERN = "Postgres query optimizer"
FALLBACK_PATTERN_V5 = "legacy query optimizer"
OPTIMIZER_DEFAULT_PLAN = "optimizer"
# global variables
# -----------------------------------------------------------------------------
# constants
# only consider optimizer errors beyond x * sigma (standard deviation) as significant
glob_sigma_diff = 3
glob_log_file = None
glob_exe_timeout = 40000
glob_gpdb_major_version = 7
glob_dim_table_rows = 10000
# global variables that may be modified
glob_verbose = False
glob_rowcount = -1
glob_appendonly = False
# SQL statements, DDL and DML
# -----------------------------------------------------------------------------
_drop_tables = """
DROP TABLE IF EXISTS cal_txtest, cal_temp_ids, cal_dim, cal_bfvtest, cal_bfv_dim, cal_ndvtest, cal_widetest;
"""
# create the table. Parameters:
# - WITH clause (optional), for append-only tables
_create_cal_table = """
CREATE TABLE cal_txtest(id int,
btreeunique int,
btree10 int,
btree100 int,
btree1000 int,
btree10000 int,
bitmap10 int,
bitmap100 int,
bitmap1000 int,
bitmap10000 int,
txt text)
%s
DISTRIBUTED BY (id);
"""
_create_bfv_table = """
CREATE TABLE cal_bfvtest (col1 integer,
wk_id int,
id integer)
%s
DISTRIBUTED BY (col1);
"""
_create_ndv_table = """
CREATE TABLE cal_ndvtest (id int, val int)
%s
DISTRIBUTED BY (id);
"""
_create_brin_table = """
CREATE TABLE cal_brintest(id int,
clust_10 int,
clust_100 int,
clust_1000 int,
clust_10000 int,
clust_uniq int,
rand_10 int,
rand_100 int,
rand_1000 int,
rand_10000 int,
rand_uniq int,
txt text)
%s
DISTRIBUTED BY (id);
"""
_with_appendonly = """
WITH (appendonly=true)
"""
_create_other_tables = ["""
CREATE TABLE cal_temp_ids(f_id int, f_rand double precision) DISTRIBUTED BY (f_id);
""",
"""
CREATE TABLE cal_dim(dim_id int,
dim_id2 int,
txt text)
DISTRIBUTED BY (dim_id);
""",
"""
CREATE TABLE cal_bfv_dim (id integer, col2 integer) DISTRIBUTED BY (id);
"""]
# insert into temp table. Parameters:
# - integer stop value (suggested value is 10,000,000)
_insert_into_temp = """
INSERT INTO cal_temp_ids SELECT x, random() FROM (SELECT * FROM generate_series(1,%d)) T(x);
"""
_insert_into_table = """
INSERT INTO cal_txtest
SELECT f_id,
f_id,
f_id%10 + 1,
f_id%100 + 1,
f_id%1000 + 1,
f_id%10000 + 1,
f_id%10 + 1,
f_id%100 + 1,
f_id%1000 + 1,
f_id%10000 + 1,
repeat('a', 960)
FROM cal_temp_ids
order by f_rand;
"""
# use a row_number() function to create column values that are strongly correlated
# to the physical order of the rows on disk
_insert_into_brin_table = """
INSERT INTO cal_brintest
SELECT ordered_id,
ceil(ordered_id*(10.0/{rows})),
ceil(ordered_id*(100.0/{rows})),
ceil(ordered_id*(1000.0/{rows})),
ceil(ordered_id*(10000.0/{rows})),
ordered_id,
f_id%10 + 1,
f_id%100 + 1,
f_id%1000 + 1,
f_id%10000 + 1,
f_id,
repeat('a', 956)
FROM (select row_number() over(order by f_rand) as ordered_id, f_id, f_rand from cal_temp_ids) src
order by f_rand;
"""
_insert_into_other_tables = """
INSERT INTO cal_dim SELECT x, x, repeat('d', 100) FROM (SELECT * FROM generate_series(%d,%d)) T(x);
"""
_create_index_arr = ["""
CREATE INDEX cal_txtest_i_bitmap_10 ON cal_txtest USING bitmap(bitmap10);
""",
"""
CREATE INDEX cal_txtest_i_bitmap_100 ON cal_txtest USING bitmap(bitmap100);
""",
"""
CREATE INDEX cal_txtest_i_bitmap_1000 ON cal_txtest USING bitmap(bitmap1000);
""",
"""
CREATE INDEX cal_txtest_i_bitmap_10000 ON cal_txtest USING bitmap(bitmap10000);
""",
]
_create_bfv_index_arr = ["""
CREATE INDEX idx_cal_bfvtest_bitmap ON cal_bfvtest USING bitmap(id);
""",
]
_create_ndv_index_arr = ["""
CREATE INDEX cal_ndvtest_bitmap ON cal_ndvtest USING bitmap(val);
""",
]
_create_btree_indexes_arr = ["""
CREATE INDEX cal_txtest_i_btree_unique ON cal_txtest USING btree(btreeunique);
""",
"""
CREATE INDEX cal_txtest_i_btree_10 ON cal_txtest USING btree(btree10);
""",
"""
CREATE INDEX cal_txtest_i_btree_100 ON cal_txtest USING btree(btree100);
""",
"""
CREATE INDEX cal_txtest_i_btree_1000 ON cal_txtest USING btree(btree1000);
""",
"""
CREATE INDEX cal_txtest_i_btree_10000 ON cal_txtest USING btree(btree10000);
""",
"""
CREATE INDEX idx_cal_bfvtest_btree ON cal_bfvtest USING btree(id);
""",
"""
CREATE INDEX cal_ndvtest_btree ON cal_ndvtest USING btree(val);
""",
]
_create_brin_index_arr = ["""
CREATE INDEX cal_brintest_brin ON cal_brintest USING brin(
id, clust_10, clust_100, clust_1000, clust_10000, clust_uniq, rand_10, rand_100, rand_1000, rand_10000, rand_uniq, txt)
WITH(pages_per_range=4);
""",
]
_analyze_table = """
ANALYZE cal_txtest;
ANALYZE cal_brintest;
"""
_allow_system_mods = """
SET allow_system_table_mods to on;
"""
_allow_system_mods_v5 = """
SET allow_system_tabl
|
cryptapus/electrum
|
electrum/gui/qt/console.py
|
Python
|
mit
| 11,672
| 0.002656
|
# source: http://stackoverflow.com/questions/2758159/how-to-embed-a-python-interpreter-in-a-pyqt-widget
import sys
import os
import re
import traceback
import platform
from PyQt5 import QtCore
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from electrum import util
from electrum.i18n import _
if platform.system() == 'Windows':
MONOSPACE_FONT = 'Lucida Console'
elif platform.system() == 'Darwin':
MONOSPACE_FONT = 'Monaco'
else:
MONOSPACE_FONT = 'monospace'
class OverlayLabel(QtWidgets.QLabel):
STYLESHEET = '''
QLabel, QLabel link {
color: rgb(0, 0, 0);
background-color: rgb(248, 240, 200);
border: 1px solid;
border-color: rgb(255, 114, 47);
padding: 2px;
}
'''
def __init__(self, text, parent):
super().__init__(text, paren
|
t)
self.setMinimumHeight(150)
self.setGeometry(0, 0, self.width(), self.height())
self.setStyleSheet(self.STYLESHEET)
self.setMargin(0)
parent.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setWordWrap(True)
def mousePress
|
Event(self, e):
self.hide()
def on_resize(self, w):
padding = 2 # px, from the stylesheet above
self.setFixedWidth(w - padding)
class Console(QtWidgets.QPlainTextEdit):
def __init__(self, prompt='>> ', startup_message='', parent=None):
QtWidgets.QPlainTextEdit.__init__(self, parent)
self.prompt = prompt
self.history = []
self.namespace = {}
self.construct = []
self.setGeometry(50, 75, 600, 400)
self.setWordWrapMode(QtGui.QTextOption.WrapAnywhere)
self.setUndoRedoEnabled(False)
self.document().setDefaultFont(QtGui.QFont(MONOSPACE_FONT, 10, QtGui.QFont.Normal))
self.showMessage(startup_message)
self.updateNamespace({'run':self.run_script})
self.set_json(False)
warning_text = "<h1>{}</h1><br>{}<br><br>{}".format(
_("Warning!"),
_("Do not paste code here that you don't understand. Executing the wrong code could lead "
"to your coins being irreversibly lost."),
_("Click here to hide this message.")
)
self.messageOverlay = OverlayLabel(warning_text, self)
def resizeEvent(self, e):
super().resizeEvent(e)
vertical_scrollbar_width = self.verticalScrollBar().width() * self.verticalScrollBar().isVisible()
self.messageOverlay.on_resize(self.width() - vertical_scrollbar_width)
def set_json(self, b):
self.is_json = b
def run_script(self, filename):
with open(filename) as f:
script = f.read()
# eval is generally considered bad practice. use it wisely!
result = eval(script, self.namespace, self.namespace)
def updateNamespace(self, namespace):
self.namespace.update(namespace)
def showMessage(self, message):
self.appendPlainText(message)
self.newPrompt()
def clear(self):
self.setPlainText('')
self.newPrompt()
def newPrompt(self):
if self.construct:
prompt = '.' * len(self.prompt)
else:
prompt = self.prompt
self.completions_pos = self.textCursor().position()
self.completions_visible = False
self.appendPlainText(prompt)
self.moveCursor(QtGui.QTextCursor.End)
def getCommand(self):
doc = self.document()
curr_line = doc.findBlockByLineNumber(doc.lineCount() - 1).text()
curr_line = curr_line.rstrip()
curr_line = curr_line[len(self.prompt):]
return curr_line
def setCommand(self, command):
if self.getCommand() == command:
return
doc = self.document()
curr_line = doc.findBlockByLineNumber(doc.lineCount() - 1).text()
self.moveCursor(QtGui.QTextCursor.End)
for i in range(len(curr_line) - len(self.prompt)):
self.moveCursor(QtGui.QTextCursor.Left, QtGui.QTextCursor.KeepAnchor)
self.textCursor().removeSelectedText()
self.textCursor().insertText(command)
self.moveCursor(QtGui.QTextCursor.End)
def show_completions(self, completions):
if self.completions_visible:
self.hide_completions()
c = self.textCursor()
c.setPosition(self.completions_pos)
completions = map(lambda x: x.split('.')[-1], completions)
t = '\n' + ' '.join(completions)
if len(t) > 500:
t = t[:500] + '...'
c.insertText(t)
self.completions_end = c.position()
self.moveCursor(QtGui.QTextCursor.End)
self.completions_visible = True
def hide_completions(self):
if not self.completions_visible:
return
c = self.textCursor()
c.setPosition(self.completions_pos)
l = self.completions_end - self.completions_pos
for x in range(l): c.deleteChar()
self.moveCursor(QtGui.QTextCursor.End)
self.completions_visible = False
def getConstruct(self, command):
if self.construct:
prev_command = self.construct[-1]
self.construct.append(command)
if not prev_command and not command:
ret_val = '\n'.join(self.construct)
self.construct = []
return ret_val
else:
return ''
else:
if command and command[-1] == (':'):
self.construct.append(command)
return ''
else:
return command
def getHistory(self):
return self.history
def setHisory(self, history):
self.history = history
def addToHistory(self, command):
if command[0:1] == ' ':
return
if command and (not self.history or self.history[-1] != command):
self.history.append(command)
self.history_index = len(self.history)
def getPrevHistoryEntry(self):
if self.history:
self.history_index = max(0, self.history_index - 1)
return self.history[self.history_index]
return ''
def getNextHistoryEntry(self):
if self.history:
hist_len = len(self.history)
self.history_index = min(hist_len, self.history_index + 1)
if self.history_index < hist_len:
return self.history[self.history_index]
return ''
def getCursorPosition(self):
c = self.textCursor()
return c.position() - c.block().position() - len(self.prompt)
def setCursorPosition(self, position):
self.moveCursor(QtGui.QTextCursor.StartOfLine)
for i in range(len(self.prompt) + position):
self.moveCursor(QtGui.QTextCursor.Right)
def register_command(self, c, func):
methods = { c: func}
self.updateNamespace(methods)
def runCommand(self):
command = self.getCommand()
self.addToHistory(command)
command = self.getConstruct(command)
if command:
tmp_stdout = sys.stdout
class stdoutProxy():
def __init__(self, write_func):
self.write_func = write_func
self.skip = False
def flush(self):
pass
def write(self, text):
if not self.skip:
stripped_text = text.rstrip('\n')
self.write_func(stripped_text)
QtCore.QCoreApplication.processEvents()
self.skip = not self.skip
if type(self.namespace.get(command)) == type(lambda:None):
self.appendPlainText("'{}' is a function. Type '{}()' to use it in the Python console."
.format(command, command))
self.newPrompt()
return
sys.stdout = stdoutProxy(self.appendPlainText)
try:
try:
# eval is generally considered bad practice. use it wisely!
result = eval(command, self.namespa
|
PhonologicalCorpusTools/PolyglotDB
|
tests/test_io_ilg.py
|
Python
|
mit
| 1,972
| 0.000507
|
import pytest
import os
import sys
from polyglotdb.io import ins
|
pect_ilg
from polyglotdb.io.helper import guess_type
from polyglotdb.exceptions import DelimiterError, ILGWordMismatchError
from polyglotdb import CorpusContext
def test_inspect_ilg(ilg_test_dir):
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
assert (len(parser.annotation_tiers) == 2)
assert (parser.annotation_tiers[1].trans_delimiter == '.')
def test_inspect_ilg_directory(ilg_test_dir):
parser = inspect_ilg(ilg_test_dir)
|
assert (len(parser.annotation_tiers) == 2)
@pytest.mark.xfail
def test_export_ilg(graph_db, export_test_dir):
export_path = os.path.join(export_test_dir, 'export_ilg.txt')
with CorpusContext('untimed', **graph_db) as c:
export_discourse_ilg(c, 'test', export_path,
annotations=['label', 'transcription'], words_per_line=3)
expected_lines = ['cats are cute',
'k.ae.t.s aa.r k.uw.t',
'dogs are too',
'd.aa.g.z aa.r t.uw',
'i guess',
'ay g.eh.s']
with open(export_path, 'r') as f:
for i, line in enumerate(f):
assert (line.strip() == expected_lines[i])
def test_ilg_basic(graph_db, ilg_test_dir):
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
with CorpusContext('basic_ilg', **graph_db) as c:
c.reset()
c.load(parser, basic_path)
# assert(c.lexicon['a'].frequency == 2)
def test_ilg_mismatched(graph_db, ilg_test_dir):
mismatched_path = os.path.join(ilg_test_dir, 'mismatched.txt')
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
with CorpusContext('mismatch', **graph_db) as c:
c.reset()
with pytest.raises(ILGWordMismatchError):
c.load(parser, mismatched_path)
|
MartinThoma/pysec
|
pysec/utils.py
|
Python
|
mit
| 108
| 0.009259
|
#!/usr/bin
|
/env python
# -*- coding: utf-8 -*-
"""Utility functions that can be used in
|
multiple scripts."""
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/deed/guild_deed/shared_tatooine_guild_style_02_deed.py
|
Python
|
mit
| 473
| 0.046512
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DO
|
NE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/guild_deed/shared_tatooine_guild_style_02_deed.iff"
resu
|
lt.attribute_template_id = 2
result.stfName("deed","tatooine_guild_2_deed")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
shaded-enmity/docker-hica
|
injectors/libs.py
|
Python
|
mit
| 1,615
| 0.011765
|
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
import os, sys
from json import loads
from base.hica_base import *
library_path='/usr/lib64'
class LibraryInjector(HicaInjector):
def _get_libs(self):
return sorted(loads(self.labels.get_value('io.hica.libraries')))
def get_description(self):
return 'Bind mounts libraries {0} into the container'.format(', '.join(self._get_libs()))
def get_config_key(self):
return 'io.hica.libraries'
def get_injected_args(self):
return (('--libraries', HicaValueType.STRING, ''), ('--library-path', HicaValueType.PATH, '/usr/lib64'))
def inject_config(self, config, from_args):
"""
:param config:
:type config: list
:param from_args
|
:
:type from_args: dict
"""
load_libs = self._get_libs()
all_libs = {}
found_libs = []
for root, dirs, files in os.walk(library_path):
for f in files:
if not f.endswith('.so'):
continu
|
e
full_path = os.path.join(root, f)
if '.' in f:
name, ext = f.split('.', 1)
else:
name = f
if name in all_libs:
all_libs[name].append(full_path)
else:
all_libs[name] = [full_path]
for lib in load_libs:
if 'lib' + lib in all_libs:
p = list(sorted(all_libs['lib' + lib], key=lambda x: len(x))).pop()
v = '--volume={0}:{1}'.format(os.path.realpath(p), p)
config.append(v)
else:
print('*** Unknown lib: {}'.format(lib))
sys.exit(1)
|
fstagni/DIRAC
|
FrameworkSystem/Client/SystemAdministratorClient.py
|
Python
|
gpl-3.0
| 683
| 0.005857
|
""" The SystemAdministratorClient is a class representing the client of the DIRAC
SystemAdministrator service. It has also methods to update the Configuration
Service with the DIRAC components options
"""
__RCSID__ =
|
"$Id$"
from DIRAC.Core.Base.Client import Client, createClient
SYSADMIN_PORT = 9162
@createClient('Framework/SystemAdministrator')
class SystemAdministratorClient(Client):
def __init__(
|
self, host, port=None, **kwargs):
""" Constructor function. Takes a mandatory host parameter
"""
Client.__init__(self, **kwargs)
if not port:
port = SYSADMIN_PORT
self.setServer('dips://%s:%s/Framework/SystemAdministrator' % (host, port))
|
matrix-org/synapse
|
synapse/rest/client/versions.py
|
Python
|
apache-2.0
| 4,936
| 0.003241
|
# Copyright 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
from typing import TYPE_CHECKING, Tuple
from twisted.web.server import Request
from synapse.api.constants import RoomCreationPreset
from synapse.http.server import HttpServer
from synapse.http.servlet import RestServlet
from synapse.types import JsonDict
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class VersionsRestServlet(RestServlet):
PATTERNS = [re.compile("^/_matrix/client/versions$")]
def __init__(self, hs: "HomeServer"):
super().__init__()
self.config = hs.config
# Calculate these once since they shouldn't change after start-up.
self.e2ee_forced_public = (
RoomCreationPreset.PUBLIC_CHAT
in self.config.room.encryption_enabled_by_default_for_room_presets
)
self.e2ee_forced_private = (
RoomCreationPreset.PRIVATE_CHAT
in self.config.room.encryption_enabled_by_default_for_room_presets
)
self.e2ee_forced_trusted_private = (
RoomCreationPreset.TRUSTED_PRIVATE_CHAT
in self.config.room.encryption_enabled_by_default_for_room_presets
)
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
return (
200,
{
"versions": [
# XXX: at some point we need to decide whether we need to include
# the previous version numbers, given we've defined r0.3.0 to be
# backwards compatible with r0.2.0. But need to check how
# conscientious we've been in compatibility, and decide whether the
# middle number is the major revision when at 0.X.Y (as opposed to
# X.Y.Z). And we need to decide whether it's fair to make clients
# parse the version string to figure out what's going on.
"r0.0.1",
"r0.1.0",
"r0.2.0",
"r0.3.0",
"r0.4.0",
"r0.5.0",
"r0.6.0",
"r0.6.1",
"v1.1",
"v1.2",
],
# as per MSC1497:
"unstable_features": {
# Implements support for label-based filtering as described in
# MSC2326.
"org.matrix.label_based_filtering": True,
# Implements support for cros
|
s signing as described in MSC1756
"org.matrix.e2e_cross_signing": True,
# Implements additional endpoints as described in MSC2432
"org.matrix.msc2432": True,
# Implements additional endpoints as described in MSC2666
|
"uk.half-shot.msc2666": True,
# Whether new rooms will be set to encrypted or not (based on presets).
"io.element.e2ee_forced.public": self.e2ee_forced_public,
"io.element.e2ee_forced.private": self.e2ee_forced_private,
"io.element.e2ee_forced.trusted_private": self.e2ee_forced_trusted_private,
# Supports the busy presence state described in MSC3026.
"org.matrix.msc3026.busy_presence": self.config.experimental.msc3026_enabled,
# Supports receiving hidden read receipts as per MSC2285
"org.matrix.msc2285": self.config.experimental.msc2285_enabled,
# Adds support for importing historical messages as per MSC2716
"org.matrix.msc2716": self.config.experimental.msc2716_enabled,
# Adds support for jump to date endpoints (/timestamp_to_event) as per MSC3030
"org.matrix.msc3030": self.config.experimental.msc3030_enabled,
# Adds support for thread relations, per MSC3440.
"org.matrix.msc3440": self.config.experimental.msc3440_enabled,
},
},
)
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
VersionsRestServlet(hs).register(http_server)
|
gppezzi/easybuild-framework
|
test/framework/suite.py
|
Python
|
gpl-2.0
| 4,872
| 0.001437
|
#!/usr/bin/python
# #
# Copyright 2012-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
This script is a collection of all the testcases.
Usage: "python -m test.framework.suite" or "python test/framework/suite.py"
@author: Toon Willems (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
import glob
import os
import sys
import tempfile
import unittest
# initialize EasyBuild logging, so we disable it
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.options import set_tmpdir
import test.framework.asyncprocess as a
import test.framework.build_log as bl
import test.framework.config as c
import test.framework.containers as ct
import test.framework.easyblock as b
import test.framework.easyconfig as e
import test.framework.easyconfigparser as ep
import test.framework.easyconfigformat as ef
import test.framework.ebconfigobj as ebco
import test.framework.easyconfigversion as ev
import test.framework.environment as env
import test.framework.docs as d
import test.framework.filetools as f
import test.framework.format_convert as f_c
import test.framework.general as gen
import test.framework.github as g
import test.framework.hooks as h
import test.framework.include as i
import test.framework.lib as lib
import test.framework.license as lic
import test.framework.module_generator as mg
import test.framework.modules as m
import test.framework.modulestool as mt
import test.framework.options as o
import test.framework.parallelbuild as p
import test.framework.package as pkg
import test.framework.repository as r
import test.framework.robot as robot
import test.framework.run as run
import test.framework.style as st
import test.framework.systemtools as s
import test.framework.toolchain as tc
import test.framework.toolchainvariables as tcv
import test.framework.toy_build as t
import test.framework.type_checking as et
import test.framework.tweak as tw
import test.framework.variables as v
import test.framework.yeb as y
# set plain text key ring to be used,
# so a GitHub token stored in it can be obtained without having to provide a password
try:
# with recent versions of keyring, PlaintextKeyring comes from keyrings.alt
import keyring
from keyrings.alt.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
try:
# with old versions of keyring, PlaintextKeyring comes from keyring.backends
import keyring
from keyring.backends.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
pass
# disable all logging to significantly speed up tests
fancylogger.disableDefaultHandlers()
fancylogger.setLogLevelError()
# make sure temporary files can be created/used
try:
set_tmpdir(raise_
|
error=True)
except EasyBuildError as err:
sys.stderr.write("No execution rights on temporary files, specify another location via $TMPDIR: %s\n" % err)
sys.exit(1)
# initialize logger for all the unit tests
fd, log_fn = tempfile.mkstemp(prefix='easybuild-tests-', suffix='.log')
os.close(fd)
os.remove(log_fn)
fancylogger.logToFile(log_fn)
log = fancylogger.getLogger()
# call suite() for each module and then run them all
# note:
|
make sure the options unit tests run first, to avoid running some of them with a readily initialized config
tests = [gen, bl, o, r, ef, ev, ebco, ep, e, mg, m, mt, f, run, a, robot, b, v, g, tcv, tc, t, c, s, lic, f_c,
tw, p, i, pkg, d, env, et, y, st, h, ct, lib]
SUITE = unittest.TestSuite([x.suite() for x in tests])
res = unittest.TextTestRunner().run(SUITE)
fancylogger.logToFile(log_fn, enable=False)
if not res.wasSuccessful():
sys.stderr.write("ERROR: Not all tests were successful.\n")
print("Log available at %s" % log_fn)
sys.exit(2)
else:
for fn in glob.glob('%s*' % log_fn):
os.remove(fn)
|
yuuki0xff/nvpy
|
setup.py
|
Python
|
bsd-3-clause
| 1,989
| 0.001508
|
#!/usr/bin/env python3
import os
from setuptools import setup
import nvpy
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="nvpy",
version=nvpy.VERSION,
author="Charl P. Botha",
author_email="cpbotha@vxlabs.com",
description="A cross-platform simplenote-syncing note-taking app inspired by Notational Velocity.",
license="BSD",
keywords="simplenote note-taking tkinter nvalt markdown",
url="https://github.com/cpbotha/nvpy",
packages=['nvpy'],
long_description=read('README.rst'),
install_requires=[
# These are in reality not hard requirements of nvpy. If these packages are not installed,
# the Markdown/reStructuredText rendering feature will not work. But basic functions should work.
'Markdown',
'docutils',
# This is hard requirements of nvpy.
'simplenote>=2.1.4',
],
extras_require={
# development and test requirements.
'dev': ['mock', 'yap
|
f', 'pdoc3'
|
, 'nose', 'nose-timer', 'mypy'],
},
entry_points={'gui_scripts': ['nvpy = nvpy.nvpy:main']},
# use MANIFEST.in file
# because package_data is ignored during sdist
include_package_data=True,
classifiers=[
# See https://pypi.org/classifiers/
"Development Status :: 5 - Production/Stable",
"Environment :: X11 Applications",
"Environment :: MacOS X",
"Environment :: Win32 (MS Windows)",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
oVirt/ovirt-hosted-engine-ha
|
ovirt_hosted_engine_ha/broker/submonitors/mem_free.py
|
Python
|
lgpl-2.1
| 1,774
| 0
|
#
# ovirt-hosted-engine-ha -- ovirt hosted engine high availability
# Copyright (C) 2013 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; e
|
ither
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with thi
|
s library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import logging
from ovirt_hosted_engine_ha.broker import submonitor_base
from ovirt_hosted_engine_ha.lib import log_filter
from ovirt_hosted_engine_ha.lib import util as util
from vdsm.client import ServerError
def register():
return "mem-free"
class Submonitor(submonitor_base.SubmonitorBase):
def setup(self, options):
self._log = logging.getLogger("%s.MemFree" % __name__)
self._log.addFilter(log_filter.get_intermittent_filter())
def action(self, options):
cli = util.connect_vdsm_json_rpc(
logger=self._log
)
try:
stats = cli.Host.getStats()
except ServerError as e:
self._log.error(e)
self.update_result(None)
return
mem_free = str(stats['memFree'])
self._log.info("memFree: %s", mem_free,
extra=log_filter.lf_args('status', 60))
self.update_result(mem_free)
|
googleapis/python-aiplatform
|
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_metadata_service_query_context_lineage_subgraph_async.py
|
Python
|
apache-2.0
| 1,624
| 0.001847
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distr
|
ibuted on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for QueryContextLineageSubgraph
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install
|
the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1beta1_MetadataService_QueryContextLineageSubgraph_async]
from google.cloud import aiplatform_v1beta1
async def sample_query_context_lineage_subgraph():
# Create a client
client = aiplatform_v1beta1.MetadataServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.QueryContextLineageSubgraphRequest(
context="context_value",
)
# Make the request
response = await client.query_context_lineage_subgraph(request=request)
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1beta1_MetadataService_QueryContextLineageSubgraph_async]
|
zeroincombenze/tools
|
wok_code/scripts/main.py
|
Python
|
agpl-3.0
| 6,236
| 0.00016
|
# -*- coding: utf-8 -*-
# template 18
"""
Various tools at your fingertips.
The available tools are:
* cvt_csv_2_rst.py: convert csv file into rst file
* cvt_csv_2_xml.py: convert csv file into xml file
* cvt_script: parse bash script and convert to meet company standard
* gen_readme.py: generate documentation files, mainly README.rst
* odoo_dependency.py: show odoo depencies and/or Odoo module tree
* odoo_translation.py: manage Odoo translation
* pep8: parse source .py file to meet pep8 and convert across Odoo versions
* please: developer shell
* wget_odoo_repositories.py: get repository names from github.com
"""
import os
import sys
import pkg_resources
import gzip
import shutil
__version__ = '1.0.7.1'
def fake_setup(**kwargs):
globals()['setup_args'] = kwargs
def read_setup():
setup_info = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'setup.info'))
if not os.path.isfile(setup_info):
setup_info = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'setup.py'))
setup_args = {}
if os.path.isfile(setup_info):
with open(setup_info, 'r') as fd:
exec(fd.read().replace('setup(', 'fake_setup('))
setup_args = globals()['setup_args']
else:
print('Not internal configuration file found!')
setup_args['setup'] = setup_info
try:
pkg = pkg_resources.get_distribution(__package__.split('.')[0])
setup_args['name'] = pkg.key
setup_args['version'] = pkg.version
except BaseException:
pass
return setup_args
def get_pypi_paths():
local_venv = '/devel/venv/'
pkgpath = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
bin_path = lib_path = ''
path = pkgpath
while not bin_path and path != '/' and path != os.environ['HOME']:
path = os.path.dirname(path)
if os.path.isdir(path) and os.path.basename(path) == 'lib':
bin_path = os.path.join(os.path.dirname(path), 'bin')
lib_path = path
if not bin_path and local_venv:
for path in sys.path:
if local_venv in path:
bin_path = os.path.join(
path[:path.find(local_venv)],
*[x for x in local_venv.split('/') if x][:-1])
break
return pkgpath, bin_path, lib_path
def copy_pkg_data(setup_args, verbose):
if setup_args.get('package_data'):
pkgpath, bin_path, lib_path = get_pypi_paths()
if bin_path:
# TODO> compatibility mode
bin2_path = os.path.join(os.environ['HOME'], 'devel')
if not os.path.isdir(bin2_path):
bin2_path = ''
man_path = os.path.join(bin_path, 'man', 'man8')
if not os.path.isdir(man_path):
man_path = ''
for pkg in setup_args['package_data'].keys():
for fn in setup_args['package_data'][pkg]:
base = os.path.basename(fn)
if base in ('setup.info', '*'):
continue
full_fn = os.path.abspath(os.path.join(pkgpath, fn))
if base.endswith('.man') and man_path:
with open(full_fn, 'r') as fd:
help_text = fd.read()
tgt_fn = os.path.join(man_path, '%s.8.gz' % base[:-4])
with gzip.open(tgt_fn, 'w') as fd:
if sys.version_info[0] == 3:
fd.write(help_text.encode('utf-8'))
else:
fd.write(help_text)
continue
if lib_path:
tgt_fn = os.path.join(lib_path, base)
if verbose:
print('$ cp %s %s' % (full_fn, tgt_fn))
shutil.copy(full_fn, tgt_fn)
# TODO> compatibility mode
tgt_fn = os.path.join(bin_path, base)
if os.path.isfile(tgt_fn):
os.unlink(tgt_fn)
if not os.path.exists(tgt_fn):
if verbose:
print('$ ln -s %s %s' % (full_fn, tgt_fn))
os.symlink(full_fn, tgt_fn)
if bin2_path:
tgt_fn = os.path.join(bin2_path, base)
if os.path.isfile(tgt_fn):
os.unlink(tgt_fn)
# if not os.path.exists(tgt_fn):
# if verbose:
# print('$ ln -s %s %s' % (full_fn, tgt_fn))
# os.symlink(full_fn, tgt_fn)
# TODO> compatibility mode to remove early
if lib_path and bin2_path:
for base in ('z0librc', 'odoorc', 'travisrc'):
full_fn = os.path.join(bin2_path, base)
tgt_fn = os.path.join(bin_path, base)
if os.path.exists(full_fn) and not os.path.exists(tgt_fn):
if verbose:
print('$ cp %s %s' % (full_fn, tgt_fn))
shutil.copy(full_fn, tgt_fn)
def main(cli_args=None):
if not cli_args:
cli_args = sys.argv[1:]
action = '-H'
verbose = False
for arg in cli_args:
if arg in ('-h', '-H', '--help', '-V', '--version', '--copy-pkg-data'):
action = arg
elif arg == '-v':
verbose = True
setup_args =
|
read_setup()
if action == '-h':
print('%s [-h][-H][--help][-V][--version][-C][--copy-pkg-data]' %
setup_args['name'])
elif action in ('-V', '--version'):
if setup_args['version'] == __version__:
print(setup_args['version'])
else:
|
print('Version mismatch %s/%s' % (setup_args['version'],
__version__))
elif action in ('-H', '--help'):
for text in __doc__.split('\n'):
print(text)
elif action in ('-C', '--copy-pkg-data'):
copy_pkg_data(setup_args, verbose)
return 0
|
bzamecnik/sms-tools
|
smst/ui/transformations/sineTransformations_GUI_frame.py
|
Python
|
agpl-3.0
| 9,380
| 0.004158
|
# GUI frame for the sineTransformations_function.py
import os
from Tkinter import *
import tkFileDialog
import tkMessageBox
import numpy as np
from smst.utils import audio
from . import sineTransformations_function as sT
from smst.utils.files import strip_file
class SineTransformationsFrame:
def __init__(self, parent):
self.parent = parent
self.initUI()
def initUI(self):
choose_label = "inputFile:"
Label(self.parent, text=choose_label).grid(row=0, column=0, sticky=W, padx=5, pady=(10, 2))
# TEXTBOX TO PRINT PATH OF THE SOUND FILE
self.filelocation = Entry(self.parent)
self.filelocation.focus_set()
self.filelocation["width"] = 32
self.filelocation.grid(row=0, column=0, sticky=W, padx=(70, 5), pady=(10, 2))
self.filelocation.delete(0, END)
self.filelocation.insert(0, 'sounds/mridangam.wav')
# BUTTON TO BROWSE SOUND FILE
open_file = Button(self.parent, text="...", command=self.browse_file) # see: def browse_file(self)
open_file.grid(row=0, column=0, sticky=W, padx=(340, 6), pady=(10, 2)) # put it beside the filelocation textbox
# BUTTON TO PREVIEW SOUND FILE
preview = Button(self.parent, text=">", command=lambda: audio.play_wav(self.filelocation.get()), bg="gray30",
fg="white")
preview.grid(row=0, column=0, sticky=W, padx=(385, 6), pady=(10, 2))
## SINE TRANSFORMATIONS ANALYSIS
# ANALYSIS WINDOW TYPE
wtype_label = "window:"
Label(self.parent, text=wtype_label).grid(row=1, column=0, sticky=W, padx=5, pady=(10, 2))
self.w_type = StringVar()
self.w_type.set("hamming") # initial value
window_option = OptionMenu(self.parent, self.w_type, "rectangular", "hanning", "hamming", "blackman",
"blackmanharris")
window_option.grid(row=1, column=0, sticky=W, padx=(65, 5), pady=(10, 2))
# WINDOW SIZE
M_label = "M:"
Label(self.parent, text=M_label).grid(row=1, column=0, sticky=W, padx=(180, 5), pady=(10, 2))
self.M = Entry(self.parent, justify=CENTER)
self.M["width"] = 5
self.M.grid(row=1, column=0, sticky=W, padx=(200, 5), pady=(10, 2))
self.M.delete(0, END)
self.M.insert(0, "801")
# FFT SIZE
N_label = "N:"
Label(self.parent, text=N_label).grid(row=1, column=0, sticky=W, padx=(255, 5), pady=(10, 2))
self.N = Entry(self.parent, justify=CENTER)
self.N["width"] = 5
self.N.grid(row=1, column=0, sticky=W, padx=(275, 5), pady=(10, 2))
self.N.delete(0, END)
self.N.insert(0, "2048")
# THRESHOLD MAGNITUDE
t_label = "t:"
Label(self.parent, text=t_label).grid(row=1, column=0, sticky=W, padx=(330, 5), pady=(10, 2))
self.t = Entry(self.parent, justify=CENTER)
self.t["width"] = 5
self.t.grid(row=1, column=0, sticky=W, padx=(348, 5), pady=(10, 2))
self.t.delete(0, END)
self.t.insert(0, "-90")
# MIN DURATION SINUSOIDAL TRACKS
minSineDur_label = "minSineDur:"
Label(self.parent, text=minSineDur_label).grid(row=2, column=0, sticky=W, padx=(5, 5), pady=(10, 2))
self.minSineDur = Entry(self.parent, justify=CENTER)
self.minSineDur["width"] = 5
self.minSineDur.grid(row=2, column=0, sticky=W, padx=(87, 5), pady=(10, 2))
self.minSineDur.delete(0, END)
self.minSineDur.insert(0, "0.01")
# MAX NUMBER OF SINES
maxnSines_label = "maxnSines:"
Label(self.parent, text=maxnSines_label).grid(row=2, column=0, sticky=W, padx=(145, 5), pady=(10, 2))
self.maxnSines = Entry(self.parent, justify=CENTER)
self.maxnSines["width"] = 5
self.maxnSines.grid(row=2, column=0, sticky=W, padx=(220, 5), pady=(10, 2))
self.maxnSines.delete(0, END)
self.maxnSines.insert(0, "150")
# FREQUENCY DEVIATION ALLOWED
freqDevOffset_label = "freqDevOffset:"
Label(self.parent, text=freqDevOffset_label).grid(row=2, column=0, sticky=W, padx=(280, 5), pady=(10, 2))
self.freqDevOffset = Entry(self.parent, justify=CENTER)
self.freqDevOffset["width"] = 5
self.freqDevOffset.grid(row=2, column=0, sticky=W, padx=(372, 5), pady=(10, 2))
self.freqDevOffset.delete(0, END)
self.freqDevOffset.insert(0, "20")
# SLOPE OF THE FREQUENCY DEVIATION
freqDevSlope_label = "freqDevSlope:"
Label(self.parent, text=freqDevSlope_label).grid(row=3, column=0, sticky=W, padx=(5, 5), pady=(10, 2))
self.freqDevSlope = Entry(self.parent, justify=CENTER)
self.freqDevSlope["width"] = 5
self.freqDevSlope.grid(row=3, column=0, sticky=W, padx=(98, 5), pady=(10, 2))
self.freqDevSlope.delete(0, END)
self.freqDevSlope.insert(0, "0.02")
# BUTTON TO DO THE ANALYSIS OF THE SOUND
self.compute = Button(self.parent, text="Analysis/Synthesis", command=self.analysis, bg="dark red", fg="white")
self.compute.grid(row=4, column=0, padx=5, pady=(10, 5), sticky=W)
# BUTTON TO PLAY ANALYSIS/SYNTHESIS OUTPUT
self.output = Button(self.parent, text=">", command=lambda: audio.play_wav(
'output_sounds/' + strip_file(self.filelocation.get()) + '_sineModel.wav'), bg="gray30",
fg="white")
self.output.grid(row=4, column=0, padx=(145, 5), pady=(10, 5), sticky=W)
###
# SEPARATION LINE
Frame(self.parent, height=1, width=50, bg="black").grid(row=5, pady=5, sticky=W + E)
###
# FREQUENCY SCALING FACTORS
freqScaling_label = "Frequency scaling factors (time, value pairs):"
Label(self.parent, text=freqScaling_label).grid(row=6, column=0, sticky=W, padx=5, pady=(5, 2))
self.freqScaling = Entry(self.parent, justify=CENTER)
self.freqScaling["width"] = 35
self.freqScaling.grid(row=7, column=0, sticky=W + E, padx=5, pady=(0, 2))
self.freqScaling.delete(0, END)
self.freqScaling.insert(0, "[0, 2.0, 1, .3]")
# TIME SCALING FACTORS
timeScaling_label = "Time scaling factors (in time, value pairs):"
Label(self.parent, text=timeScaling_label).grid(row=8, column=0, sticky=W, padx=5, pady=(5, 2))
self.timeScaling = Entry(self.parent, justify=CENTER)
self.timeScaling["width"] = 35
self.timeScaling.grid(row=9, column=0, sti
|
cky=W + E, padx=5, pady=(0, 2))
self.timeScaling.del
|
ete(0, END)
self.timeScaling.insert(0, "[0, .0, .671, .671, 1.978, 1.978+1.0]")
# BUTTON TO DO THE SYNTHESIS
self.compute = Button(self.parent, text="Apply Transformation", command=self.transformation_synthesis,
bg="dark green", fg="white")
self.compute.grid(row=13, column=0, padx=5, pady=(10, 15), sticky=W)
# BUTTON TO PLAY TRANSFORMATION SYNTHESIS OUTPUT
self.transf_output = Button(self.parent, text=">", command=lambda: audio.play_wav(
'output_sounds/' + strip_file(self.filelocation.get()) + '_sineModelTransformation.wav'),
bg="gray30", fg="white")
self.transf_output.grid(row=13, column=0, padx=(165, 5), pady=(10, 15), sticky=W)
# define options for opening file
self.file_opt = options = {}
options['defaultextension'] = '.wav'
options['filetypes'] = [('All files', '.*'), ('Wav files', '.wav')]
options['initialdir'] = 'sounds/'
options['title'] = 'Open a mono audio file .wav with sample frequency 44100 Hz'
def browse_file(self):
self.filename = tkFileDialog.askopenfilename(**self.file_opt)
# set the text of the self.filelocation
self.filelocation.delete(0, END)
self.filelocation.insert(0, self.filename)
def analysis(self):
try:
inputFile = self.filelocation.get()
window = self.w_type.get()
M = int(self.M.get())
N = int(self.N.get())
t = int(self.t.get())
m
|
knuu/competitive-programming
|
atcoder/corp/codethanksfes2017_e.py
|
Python
|
mit
| 389
| 0
|
N = int(input())
ans = [0] * N
for i in range(0, N, 5):
q = [0] * N
for j in range(i, min(N, i + 5)):
q[j] = 10 ** (j - i)
print('? {}'.format(' '.join(map(str, q))), flush=
|
True)
S = str
|
(int(input().strip()) - sum(q) * 7)[::-1]
for j in range(i, min(N, i + 5)):
ans[j] = (int(S[j - i]) % 2) ^ 1
print('! {}'.format(' '.join(map(str, ans))), flush=True)
|
krukas/Mage2Gen
|
mage2gen/utils.py
|
Python
|
gpl-3.0
| 1,144
| 0.012238
|
# A Magento 2 module generator library
# Copyright (C) 2016 Maikel Martens
#
# This file is part of Mage2Gen.
#
# Mage2Gen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
|
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import string
class DefaultFormatter(string.Formatter):
def __init__(self, default=''):
self.default = default
def get_field(self, field_name, args, kwargs):
try:
return super().get_field(field_name, args, kwargs)
except (KeyError, AttributeError):
return self.default
def upperfirst(word):
return word[0].up
|
per() + word[1:]
def lowerfirst(word):
return word[0].lower() + word[1:]
|
openstack/cinder
|
cinder/zonemanager/drivers/cisco/cisco_fc_zone_client_cli.py
|
Python
|
apache-2.0
| 20,088
| 0
|
# (c) Copyright 2014 Cisco Systems Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless requ
|
ired by applicable law or agreed to in writing, software
# distributed under the License is di
|
stributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Script to push the zone configuration to Cisco SAN switches.
"""
import random
import re
from eventlet import greenthread
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
import six
from cinder import exception
from cinder.i18n import _
from cinder import ssh_utils
from cinder import utils
from cinder.zonemanager.drivers.cisco import exception as c_exception
import cinder.zonemanager.drivers.cisco.fc_zone_constants as ZoneConstant
LOG = logging.getLogger(__name__)
class CiscoFCZoneClientCLI(object):
"""Cisco FC zone client cli implementation.
OpenStack Fibre Channel zone client cli connector
to manage FC zoning in Cisco SAN fabrics.
Version history:
1.0 - Initial Cisco FC zone client cli
"""
switch_ip = None
switch_port = '22'
switch_user = 'admin'
switch_pwd = 'none'
def __init__(self, ipaddress, username, password, port, vsan):
"""initializing the client."""
self.switch_ip = ipaddress
self.switch_port = port
self.switch_user = username
self.switch_pwd = password
self.fabric_vsan = vsan
self.sshpool = None
def get_active_zone_set(self):
"""Return the active zone configuration.
Return active zoneset from fabric. When none of the configurations
are active then it will return empty map.
:returns: Map -- active zone set map in the following format
.. code-block:: python
{
'zones':
{'openstack50060b0000c26604201900051ee8e329':
['50060b0000c26604', '201900051ee8e329']
},
'active_zone_config': 'OpenStack_Cfg'
}
"""
zone_set = {}
zone = {}
zone_member = None
zone_name = None
switch_data = None
zone_set_name = None
try:
switch_data = self._get_switch_info(
[ZoneConstant.GET_ACTIVE_ZONE_CFG, self.fabric_vsan,
' | no-more'])
except c_exception.CiscoZoningCliException:
with excutils.save_and_reraise_exception():
LOG.error("Failed getting active zone set "
"from fabric %s", self.switch_ip)
try:
for line in switch_data:
# Split on non-word characters,
line_split = re.split(r'[\s\[\]]+', line)
if ZoneConstant.CFG_ZONESET in line_split:
# zoneset name [name] vsan [vsan]
zone_set_name = \
line_split[line_split.index(ZoneConstant.CFG_ZONESET)
+ 2]
continue
if ZoneConstant.CFG_ZONE in line_split:
# zone name [name] vsan [vsan]
zone_name = \
line_split[line_split.index(ZoneConstant.CFG_ZONE) + 2]
zone[zone_name] = list()
continue
if ZoneConstant.CFG_ZONE_MEMBER in line_split:
# Examples:
# pwwn c0:50:76:05:15:9f:00:12
# * fcid 0x1e01c0 [pwwn 50:05:07:68:02:20:48:04] [V7K_N1P2]
zone_member = \
line_split[
line_split.index(ZoneConstant.CFG_ZONE_MEMBER) + 1]
zone_member_list = zone.get(zone_name)
zone_member_list.append(zone_member)
zone_set[ZoneConstant.CFG_ZONES] = zone
zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG] = zone_set_name
except Exception as ex:
# In case of parsing error here, it should be malformed cli output.
msg = _("Malformed zone configuration: (switch=%(switch)s "
"zone_config=%(zone_config)s)."
) % {'switch': self.switch_ip,
'zone_config': switch_data}
LOG.error(msg)
exc_msg = _("Exception: %s") % six.text_type(ex)
LOG.error(exc_msg)
raise exception.FCZoneDriverException(reason=msg)
return zone_set
def add_zones(self, zones, activate, fabric_vsan, active_zone_set,
zone_status):
"""Add zone configuration.
This method will add the zone configuration passed by user.
:param zones: Zone names mapped to members and VSANs
Zone members are colon separated but case-insensitive
.. code-block:: python
{ zonename1:[zonememeber1,zonemember2,...],
zonename2:[zonemember1, zonemember2,...]...}
e.g:
{
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29']
}
:param activate: True will activate the zone config.
:param fabric_vsan:
:param active_zone_set: Active zone set dict retrieved from
get_active_zone_set method
:param zone_status: Status of the zone
:raises CiscoZoningCliException:
"""
LOG.debug("Add Zones - Zones passed: %s", zones)
LOG.debug("Active zone set: %s", active_zone_set)
zone_list = active_zone_set[ZoneConstant.CFG_ZONES]
LOG.debug("zone list: %s", zone_list)
LOG.debug("zone status: %s", zone_status)
cfg_name = active_zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG]
zone_cmds = [['conf'],
['zoneset', 'name', cfg_name, 'vsan', fabric_vsan]]
for zone in zones.keys():
zone_cmds.append(['zone', 'name', zone])
for member in zones[zone]:
zone_cmds.append(['member', 'pwwn', member])
zone_cmds.append(['end'])
try:
LOG.debug("Add zones: Config cmd to run: %s", zone_cmds)
self._ssh_execute(zone_cmds, True, 1)
if activate:
self.activate_zoneset(cfg_name, fabric_vsan, zone_status)
self._cfg_save()
except Exception as e:
msg = _("Creating and activating zone set failed: "
"(Zone set=%(zoneset)s error=%(err)s)."
) % {'zoneset': cfg_name, 'err': six.text_type(e)}
LOG.error(msg)
raise c_exception.CiscoZoningCliException(reason=msg)
def update_zones(self, zones, activate, fabric_vsan, operation,
active_zone_set, zone_status):
"""Update the zone configuration.
This method will update the zone configuration passed by user.
:param zones: zone names mapped to members. Zone members
are colon separated but case-insensitive
.. code-block:: python
{ zonename1:[zonememeber1, zonemember2,...],
zonename2:[zonemember1, zonemember2,...]...}
e.g:
{
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04',
'20:19:00:05:1e:e8:e3:29']
}
:param activate: True will activate the zone config.
:param operation: zone add or zone remove
:param fabric_vsan: Virtual San #
:param active_zone_set: Active zone set dict retrieved from
get_active_zone_set method
:param zone_status: Status o
|
AlexandreDecan/Lexpage
|
app/profile/migrations/0002_auto_20171206_0943.py
|
Python
|
gpl-3.0
| 651
| 0.001541
|
# Generated by Django 2.0 on 2017-12-06 09:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profile', '0001_squashed_0005_auto_20170408_1400'),
]
operations = [
migrations.AlterField(
mode
|
l_name='Profile',
name='theme',
field=models.CharField(blank=True,
|
choices=[('style', 'Lexpage'), ('style_nowel', 'Nowel'), ('style_st_patrick', 'Saint-Patrick'), ('style_halloween', 'Halloween')], help_text='Laissez vide pour adopter automatiquement le thème du moment.', max_length=16, null=True, verbose_name='Thème'),
),
]
|
UTSA-ICS/keystone-kerberos
|
keystone/exception.py
|
Python
|
apache-2.0
| 15,384
| 0
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
from oslo_utils import encodeutils
import six
from keystone.i18n import _, _LW
CONF = cfg.CONF
LOG = log.getLogger(__name__)
# Tests use this to make exception
|
message format errors fatal
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class Error(Exception):
"""Base error class.
Child classes should define an HTTP status code, title, and a
message_format.
"""
c
|
ode = None
title = None
message_format = None
def __init__(self, message=None, **kwargs):
try:
message = self._build_message(message, **kwargs)
except KeyError:
# if you see this warning in your logs, please raise a bug report
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
LOG.warning(_LW('missing exception kwargs (programmer error)'))
message = self.message_format
super(Error, self).__init__(message)
def _build_message(self, message, **kwargs):
"""Builds and returns an exception message.
:raises: KeyError given insufficient kwargs
"""
if not message:
try:
message = self.message_format % kwargs
except UnicodeDecodeError:
try:
kwargs = dict([(k, encodeutils.safe_decode(v)) for k, v in
six.iteritems(kwargs)])
except UnicodeDecodeError:
# NOTE(jamielennox): This is the complete failure case
# at least by showing the template we have some idea
# of where the error is coming from
message = self.message_format
else:
message = self.message_format % kwargs
return message
class ValidationError(Error):
message_format = _("Expecting to find %(attribute)s in %(target)s -"
" the server could not comply with the request"
" since it is either malformed or otherwise"
" incorrect. The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class SchemaValidationError(ValidationError):
# NOTE(lbragstad): For whole OpenStack message consistency, this error
# message has been written in a format consistent with WSME.
message_format = _("%(detail)s")
class ValidationTimeStampError(Error):
message_format = _("Timestamp not in expected format."
" The server could not comply with the request"
" since it is either malformed or otherwise"
" incorrect. The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class StringLengthExceeded(ValidationError):
message_format = _("String length exceeded.The length of"
" string '%(string)s' exceeded the limit"
" of column %(type)s(CHAR(%(length)d)).")
class ValidationSizeError(Error):
message_format = _("Request attribute %(attribute)s must be"
" less than or equal to %(size)i. The server"
" could not comply with the request because"
" the attribute size is invalid (too large)."
" The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class CircularRegionHierarchyError(Error):
message_format = _("The specified parent region %(parent_region_id)s "
"would create a circular region hierarchy.")
code = 400
title = 'Bad Request'
class PasswordVerificationError(Error):
message_format = _("The password length must be less than or equal "
"to %(size)i. The server could not comply with the "
"request because the password is invalid.")
code = 403
title = 'Forbidden'
class RegionDeletionError(Error):
message_format = _("Unable to delete region %(region_id)s because it or "
"its child regions have associated endpoints.")
code = 403
title = 'Forbidden'
class PKITokenExpected(Error):
message_format = _('The certificates you requested are not available. '
'It is likely that this server does not use PKI tokens '
'otherwise this is the result of misconfiguration.')
code = 403
title = 'Cannot retrieve certificates'
class SecurityError(Error):
"""Avoids exposing details of security failures, unless in debug mode."""
amendment = _('(Disable debug mode to suppress these details.)')
def _build_message(self, message, **kwargs):
"""Only returns detailed messages in debug mode."""
if CONF.debug:
return _('%(message)s %(amendment)s') % {
'message': message or self.message_format % kwargs,
'amendment': self.amendment}
else:
return self.message_format % kwargs
class Unauthorized(SecurityError):
message_format = _("The request you have made requires authentication.")
code = 401
title = 'Unauthorized'
class AuthPluginException(Unauthorized):
message_format = _("Authentication plugin error.")
def __init__(self, *args, **kwargs):
super(AuthPluginException, self).__init__(*args, **kwargs)
self.authentication = {}
class MissingGroups(Unauthorized):
message_format = _("Unable to find valid groups while using "
"mapping %(mapping_id)s")
class AuthMethodNotSupported(AuthPluginException):
message_format = _("Attempted to authenticate with an unsupported method.")
def __init__(self, *args, **kwargs):
super(AuthMethodNotSupported, self).__init__(*args, **kwargs)
self.authentication = {'methods': CONF.auth.methods}
class AdditionalAuthRequired(AuthPluginException):
message_format = _("Additional authentications steps required.")
def __init__(self, auth_response=None, **kwargs):
super(AdditionalAuthRequired, self).__init__(message=None, **kwargs)
self.authentication = auth_response
class Forbidden(SecurityError):
message_format = _("You are not authorized to perform the"
" requested action.")
code = 403
title = 'Forbidden'
class ForbiddenAction(Forbidden):
message_format = _("You are not authorized to perform the"
" requested action: %(action)s")
class ImmutableAttributeError(Forbidden):
message_format = _("Could not change immutable attribute(s) "
"'%(attributes)s' in target %(target)s")
class CrossBackendNotAllowed(Forbidden):
message_format = _("Group membership across backend boundaries is not "
"allowed, group in question is %(group_id)s, "
"user is %(user_id)s")
class InvalidPolicyAssociation(Forbidden):
message_format = _("Invalid mix of entities for policy association - "
"only Endpoint, Service or Region+Service allowed. "
"Request was - Endpoint: %(endpoint_id)s, "
"Service: %(service_id)s, Region: %(region_id)s")
class NotFound(Error):
message_format = _("Could not find: %(target)s")
code = 404
title = 'Not Found'
class EndpointNotFound(NotFound):
message_format = _("Could not find endpoint: %(endpoint_id)s")
class MetadataNotFound(NotFound):
"""(dolph): metadata is not a user-facing concept,
s
|
crmccreary/openerp_server
|
openerp/addons/event/wizard/event_confirm_registration.py
|
Python
|
agpl-3.0
| 2,865
| 0.002792
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should
|
have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
class event_confirm_registration(osv.osv_memory):
"""
Confirm Event Registration
"""
_name = "event.confirm.registration"
_de
|
scription = "Confirmation for Event Registration"
_columns = {
'msg': fields.text('Message', readonly=True),
}
_defaults = {
'msg': 'The event limit is reached. What do you want to do?'
}
def default_get(self, cr, uid, fields, context=None):
"""
This function gets default values
"""
if context is None:
context = {}
registration_pool = self.pool.get('event.registration')
registration_ids = context.get('registration_ids', [])
res = super(event_confirm_registration, self).default_get(cr, uid, fields, context=context)
msg = ""
overlimit_event_ids = []
for registration in registration_pool.browse(cr, uid, registration_ids, context=context):
register_max = registration.event_id.register_max
if registration.event_id.id not in overlimit_event_ids:
overlimit_event_ids.append(registration.event_id.id)
msg += _("Warning: The Event '%s' has reached its Maximum Limit (%s).") \
%(registration.event_id.name, register_max)
if 'msg' in fields:
res.update({'msg': msg})
return res
def confirm(self, cr, uid, ids, context=None):
if context is None:
context = {}
registration_pool = self.pool.get('event.registration')
registration_ids = context.get('registration_ids', [])
registration_pool.do_open(cr, uid, registration_ids, context=context)
return {'type': 'ir.actions.act_window_close'}
event_confirm_registration()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
nir0s/logrotated
|
setup.py
|
Python
|
apache-2.0
| 879
| 0
|
from setuptools import setup, find_packages
import os
import codecs
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open
return codecs.open(os.path.join(here, *parts), 'r').read()
install_requires = [
"click==6.6",
"jinja2==2.8"
]
setup(
name='logrotated',
version="0.0.3",
url='https://github.com/nir0s/logrotated',
author='nir0s',
author_email='nir36g@gmail.com',
license='
|
LICENSE',
platforms='All',
description='A logrotate human friendly interface.',
long_description=read('README.rst'),
packages=find_packages(exclude=[]),
package_data={'logrotated': ['resources/logrotate']},
|
entry_points={
'console_scripts': [
'rotatethis = logrotated.logrotated:main',
]
},
install_requires=install_requires
)
|
T2DREAM/t2dream-portal
|
src/encoded/upgrade/publication.py
|
Python
|
mit
| 1,991
| 0
|
from snovault import upgrade_step
@upgrade_step('publication', '', '2')
def publication(value, system):
# http://redmine.encodedcc.org/issues/2591
value['identifiers'] = []
if 'references' in value:
for reference in value['references']:
value['identifiers'].append(reference)
del value['references']
# http://redmine.encodedcc.org/issues/2725
# /labs/encode-consortium/
value['lab'] = "cb0ef1f6-3bd3-4000-8636-1c5b9f7000dc"
# /awards/ENCODE/
value['award'] = "b5736134-3326-448b-a91a-894aafb77876"
if 'dbxrefs' in value:
unique_dbxrefs = set(value['dbxrefs'])
value['dbxrefs'] = list(unique_dbxrefs)
@upgrade_step('publication', '2', '3')
def publication_2_3(value, system):
# http://redmine.encodedcc.org/issues/3063
if 'identifiers' in value:
value['identifiers'] = list(set(value['identifiers']))
if 'datasets' in value:
value['datasets'] = list
|
(set(value['datasets']))
if 'categories' in value:
|
value['categories'] = list(set(value['categories']))
if 'published_by' in value:
value['published_by'] = list(set(value['published_by']))
# Upgrade 3 to 4 in item.py.
@upgrade_step('publication', '4', '5')
def publication_4_5(value, system):
# https://encodedcc.atlassian.net/browse/ENCD-3646
if value['status'] == 'planned':
value['status'] = 'in preparation'
elif value['status'] == 'replaced':
value['status'] = 'deleted'
elif value['status'] in ['in press', 'in revision']:
value['status'] = 'submitted'
@upgrade_step('publication', '5', '6')
def publication_5_6(value, system):
# https://encodedcc.atlassian.net/browse/ENCD-3708
if value['status'] == 'published':
value['status'] = 'released'
elif value['status'] == 'submitted':
value['status'] = 'in progress'
elif value['status'] == 'in preparation':
value['status'] = 'in progress'
else:
pass
|
coreymcdermott/artbot
|
artbot_website/views.py
|
Python
|
mit
| 793
| 0.022699
|
from django.shortcuts import render
from datetime import date, datetime, timedelta
from .models import Event, SponsoredContent
from pytz import timezone
def index(request):
now = datetime.now(timezone('Australia/Sydney')).date()
if now.isoweekday() in [5, 6, 7]:
weekend_start = now
else:
weekend_start = now + t
|
imedelta((5 - now.isoweekday()) % 7)
events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, status = Event.PUBLISHED_S
|
TATUS).order_by('-start')
sponsoredContent = SponsoredContent.objects.filter(start__lte = now, end__gte = now, status = SponsoredContent.PUBLISHED_STATUS).first()
return render(request, 'index.html', {'events': events, 'sponsoredContent': sponsoredContent},)
|
nickcrafford/python-pygame-tetris
|
src/Tetromino.py
|
Python
|
gpl-3.0
| 8,712
| 0.015725
|
#!/usr/bin/
|
env python
"""
Python Tetris is a clunky pygame Tetris clone. Feel free to make it better!!
Copyright (C) 2008 Nick Crafford <nickcrafford@earthlink.net>
This program i
|
s free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import random
import pygame
from pygame.locals import *
class Tetromino(object):
def __init__(self,first_x,first_y,mask_color,color, t):
self.first_x = first_x
self.first_y = first_y
self.color = color
self.mask_color = mask_color
self.positions = []
self.max_x = 0
self.min_x = 0
self.max_y = 0
self.currentPosition = 0
self.oldPosition = 0
self.active = True
self.id = random.random()
self.volume = 1.0
self.mfile = '../sound/cluck.wav'
self.freq = 44100
self.bitsize = -32
self.channels = 1
self.buffer = 4096
#Tetromino Switch Statement
if t == 'I':
self.I()
elif t == 'O':
self.O()
elif t == 'T':
self.T()
elif t == 'S':
self.S()
elif t == 'Z':
self.Z()
elif t == 'L':
self.L()
elif t == 'J':
self.J()
#Initialize Sound
if pygame.mixer:
pygame.mixer.init(self.freq, self.bitsize, self.channels, self.buffer)
pygame.mixer.music.set_volume(self.volume)
self.cluck = pygame.mixer.Sound(self.mfile)
def move(self, grid, x_direction, y_direction):
self.max_x = 0
self.min_x = 0
self.max_y = 0
max_x_pos = 0
min_x_pos = 50
max_y_pos = 0
if self.active:
#Render Current Position in color
if grid.accept(self.id,self.positions[self.currentPosition],x_direction,y_direction):
#Set all to mask color
pos = self.positions[self.currentPosition]
for idx in range(len(pos)):
grid.set(self.mask_color,pos[idx][0],pos[idx][1],0)
for posIdx in range(len(self.positions)):
pos = self.positions[posIdx]
for idx in range(len(pos)):
pos[idx] = (pos[idx][0]+x_direction,pos[idx][1]+y_direction)
x = pos[idx][0]
y = pos[idx][1]
if posIdx == self.currentPosition:
grid.set(self.color,x,y,self.id)
if y > max_y_pos:
max_y_pos = y
if x > max_x_pos:
max_x_pos = x
if x < min_x_pos:
min_x_pos = x
self.max_x = max_x_pos*grid.cell_width + grid.cell_width
self.min_x = min_x_pos*grid.cell_width
self.max_y = max_y_pos*grid.cell_height + grid.cell_height
else:
self.cluck.play()
self.active = False
def rotate(self,grid):
self.max_y = 0
if self.active:
self.oldPosition = self.currentPosition
pos = self.positions[self.oldPosition]
for idx in range(len(pos)):
grid.set(self.mask_color,pos[idx][0],pos[idx][1],0)
if self.currentPosition < len(self.positions)-1:
self.currentPosition += 1
else:
self.currentPosition = 0
self.move(grid,0,0)
def I(self):
self.color = (49,199,239)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+2, self.first_y), (self.first_x+3, self.first_y)])
self.positions.append([(self.first_x+2, self.first_y-2), (self.first_x+2, self.first_y-1),
(self.first_x+2, self.first_y), (self.first_x+2, self.first_y+1)])
def O(self):
self.color = (247,211,8)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y-1),
(self.first_x+1, self.first_y), (self.first_x, self.first_y-1)])
def T(self):
self.color = (173,77,156)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+2, self.first_y), (self.first_x+1, self.first_y-1)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x+2, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y-1)])
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+2, self.first_y), (self.first_x+1, self.first_y+1)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y-1)])
def S(self):
self.color = (66,182,66)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+2, self.first_y+1)])
self.positions.append([(self.first_x+2, self.first_y), (self.first_x+2, self.first_y+1),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y+2)])
def Z(self):
self.color = (239,32,41)
self.positions.append([(self.first_x, self.first_y+1), (self.first_x+1, self.first_y+1),
(self.first_x+1, self.first_y), (self.first_x+2, self.first_y)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x+1, self.first_y+1),
(self.first_x+2, self.first_y+1), (self.first_x+2, self.first_y+2)])
def L(self):
self.color = (90,101,173)
self.positions.append([(self.first_x, self.first_y), (self.first_x, self.first_y+1),
(self.first_x+1, self.first_y+1), (self.first_x+2, self.first_y+1)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x+1, self.first_y+1),
(self.first_x, self.first_y+2), (self.first_x+1, self.first_y+2)])
self.positions.append([(self.first_x, self.first_y+1), (self.first_x+1, self.first_y+1),
(self.first_x+2, self.first_y+1), (self.first_x+2, self.first_y+2)])
self.positions.append([(self.first_x+2, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y+2)])
def J(self):
self.color = (239,121,33)
self.positions.append([(self.first_x, self.first_y+1), (self.first_x+1, self.first_y+1),
(self.first_x+2, self.first_y+1), (self.first_x+2, self.first_y)])
self.positions.append([(self.first_x, self.first_y), (self.fir
|
tuxfux-hlp-notes/python-batches
|
archieves/batch-56/logging/second.py
|
Python
|
gpl-3.0
| 1,002
| 0.005988
|
#!/usr/bin/python
# logging.basicConfig
# Messages on screen or file like object - StreamHandlers
# logging.Formatter
# man date/https://docs.python.org/2/library/time.html#time.strftime
import logging
logging.basicConfig(filename="disk.log",filemode='a',level=logging.DEBUG,format='%(asct
|
ime)s - %(name)s - %(levelname)s - %(message)s',datefmt='%c')
# modes
# r - read mode - reading a file.
# w - write mode - write to a file. if file doesnot exist it should create it.
# if it exist truncates it to zero.
# a - append mode - appends contents to the file.
disk_size = input("plese enter the disk size:")
if disk_size < 40:
logging.info("Your disk looks healthy at {}".format(disk_size))
elif disk_size < 60:
logging.warning(
|
"Your disk is getting filled up {}".format(disk_size))
elif disk_size < 90:
logging.error("your disk is stomach full. It going to burst out {}".format(disk_size))
elif disk_size < 100:
logging.critical("your application is sleeping {}".format(disk_size))
|
aurzenligl/prophy
|
prophyc/generators/prophy.py
|
Python
|
mit
| 5,118
| 0.00254
|
from collections import namedtuple
from prophyc.generators import base, word_wrap
INDENT_STR = u" "
MAX_LINE_WIDTH = 100
DocStr = namedtuple("DocStr", "block, inline")
def _form_doc(model_node, max_inl_docstring_len, indent_level):
block_doc, inline_doc = "", ""
if model_node.docstring:
if len(model_node.docstring) <= max_inl_docstring_len and "\n" not in model_node.docstring:
inline_doc = u" // {}".format(model_node.docstring)
elif model_node.docstring:
block_doc = u"\n" + "".join(
_gen_multi_line_doc(model_node.docstring, indent_level=indent_level, block_header=model_node.name))
return DocStr(block_doc, inline_doc)
schema_line_breaker = word_wrap.BreakLinesByWidth(MAX_LINE_WIDTH, " ", "/* ", " * ", " ", " */")
@schema_line_breaker
def _gen_multi_line_doc(block_comment_text, indent_level=0, block_header=""):
assert "\n" not in block_header, "Will not work with line breaks in header bar."
if block_header:
if len(block_comment_text) >= 250:
schema_line_breaker.make_a_bar("-" if indent_level else "=", block_header)
yield block_header
for paragraph in block_comment_text.split("\n"):
yield paragraph
def _columnizer(model_node, column_splitter, max_line_width=100):
members_table = [column_splitter(m) for m in model_node.members]
widths = [max(len(str(r)) for r in g) for g in zip(*members_table)]
max_inline_comment_width = max_line_width - sum(widths)
for member, columns in zip(model_node.members, members_table):
doc = _form_doc(member, max_inline_comment_width, indent_level=1)
if doc.block:
yield doc.block
yield u"\n" + INDENT_STR
for is_not_last, (cell_width, cell_str) in enumerate(zip(widths, columns), 1 - len(columns)):
yield cell_str
padding = u" " * (max(0, cell_width - len(cell_str)))
if is_not_last:
yield padding
elif doc.inline:
yield padding + doc.inline
if model_node.members:
yield "\n"
def generate_schema_container(model_node, designator, column_splitter):
if model_node.docstring:
block_docstring = u"".join(_gen_multi_line_doc(model_node.docstring, indent_level=0,
block_header=model_node.name))
if block_docstring:
block_docstring += u"\n"
else:
block_docstring = u""
members = u"".join(_columnizer(model_node, column_splitter, max_line_width=100))
return u"{}{} {} {{{}}};".format(block_docstring, designator, model_node.name, members)
class SchemaTranslator(base.TranslatorBase):
block_template = u'''{content}'''
@staticme
|
thod
def translate_include(include):
doc = _form_doc(include, 50, indent_level=0)
return u"{d
|
.block}#include \"{0.name}\"{d.inline}".format(include, d=doc)
@staticmethod
def translate_constant(constant):
doc = _form_doc(constant, max_inl_docstring_len=50, indent_level=0)
return u"{d.block}\n{0.name} = {0.value};{d.inline}".format(constant, d=doc)
@staticmethod
def translate_enum(enumerator):
def column_selector(member):
value = u" = {};".format(member.value)
return member.name, value
return generate_schema_container(enumerator, "enum", column_selector)
@staticmethod
def translate_struct(struct):
def column_selector(member):
type_ = member.value
if member.optional:
type_ += u"*"
if member.is_fixed:
name = u"{m.name}[{m.size}];"
elif member.is_limited:
name = u"{m.name}<{m.size}>;"
elif member.is_dynamic:
name = u"{m.name}<@{m.bound}>;"
elif member.greedy:
name = u"{m.name}<...>;"
else:
name = u"{m.name};"
return type_, u" ", name.format(m=member)
return generate_schema_container(struct, u"struct", column_selector)
@staticmethod
def translate_union(union):
def column_selector(member):
discriminator = u"{}: ".format(member.discriminator)
field_type = member.value
field_name = u" {};".format(member.name)
return discriminator, field_type, field_name
return generate_schema_container(union, u"union", column_selector)
@classmethod
def _make_lines_splitter(cls, previous_node_type, current_node_type):
if not previous_node_type:
return u""
if previous_node_type == "Include" and current_node_type != "Include":
return u"\n\n"
if previous_node_type in ("Struct", "Union") or current_node_type in ("Enum", "Struct", "Union"):
return u"\n\n\n"
if previous_node_type != current_node_type:
return u"\n\n"
return u"\n"
class SchemaGenerator(base.GeneratorBase):
top_level_translators = {
'.prophy': SchemaTranslator,
}
|
justincely/pyspecfit
|
setup.py
|
Python
|
bsd-3-clause
| 731
| 0.025992
|
from distutils.core import setup
import os
import glob
setup(
name = 'pyspecfit',
url = 'http://justincely.github.io',
version = '0.0.1',
description = 'interact with IRAF task specfit I/O products',
|
author = 'Justin Ely',
author_email = 'ely@stsci.edu',
keywords = ['astronomy'],
classifiers = ['Programming Language :: Python',
'Development Status :: 1 - Planning',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Scientific/Enginee
|
ring :: Physics',
'Topic :: Software Development :: Libraries :: Python Modules'],
packages = ['pyspecfit']
)
|
huangz1990/riacn-code
|
ch05_listing_source.py
|
Python
|
mit
| 29,256
| 0.006797
|
# coding: utf-8
import bisect
import contextlib
import csv
from datetime import datetime
import functools
import json
import logging
import random
import threading
import time
import unittest
import uuid
import redis
QUIT = False
SAMPLE_COUNT = 100
config_connection = None
# 代码清单 5-1
# <start id="recent_log"/>
# 设置一个字典,它可以帮助我们将大部分日志的安全级别转换成某种一致的东西。
SEVERITY = {
logging.DEBUG: 'debug',
logging.INFO: 'info',
logging.WARNING: 'warning',
logging.ERROR: 'error',
logging.CRITICAL: 'critical',
}
SEVERITY.update((name, name) for name in SEVERITY.values())
def log_recent(conn, name, message, severity=logging.INFO, pipe=None):
# 尝试将日志的级别转换成简单的字符串。
severity = str(SEVERITY.get(severity, severity)).lower()
# 创建负责存储消息的键。
destination = 'recent:%s:%s'%(name, severity)
# 将当前时间添加到消息里面,用于记录消息的发送时间。
message = time.asctime() + ' ' + message
# 使用流水线来将通信往返次数降低为一次。
pipe = pipe or conn.pipeline()
# 将消息添加到日志列表的最前面。
pipe.lpush(destination, message)
# 对日志列表进行修剪,让它只包含最新的100条消息。
pipe.ltrim(destination, 0, 99)
# 执行两个命令。
pipe.execute()
# <end id="recent_log"/>
# 代码清单 5-2
# <start id="common_log"/>
def log_common(conn, name, message, severity=logging.INFO, timeout=5):
# 设置日志的级别。
severity = str(SEVERITY.get(severity, severity)).lower()
# 负责存储最新日志的键。
destination = 'common:%s:%s'%(name, severity)
# 因为程序每小时需要轮换一次日志,所以它使用一个键来记录当前所处的小时数。
start_key = destination + ':start'
pipe = conn.pipeline()
end = time.time() + timeout
while time.time() < end:
try:
# 对记录当前小时数的键进行监视,确保轮换操作可以正确地执行。
pipe.watch(start_key)
# 取得当前时间。
now = datetime.utcnow().timetuple()
# 取得当前所处的小时数。
hour_start = datetime(*now[:4]).isoformat()
existing = pipe.get(start_key)
# 创建一个事务。
pipe.multi()
# 如果目前的常见日志列表是上一个小时的……
if existing and existing < hour_start:
# ……那么将旧的常见日志信息进行归档。
pipe.rename(destination, destination + ':last')
pipe.rename(start_key, destination + ':pstart')
# 更新当前所处的小时数。
pipe.set(start_key, hour_start)
# 对记录日志出现次数的计数器执行自增操作。
pipe.zincrby(destination, message)
# log_recent()函数负责记录日志并调用execute()函数。
log_recent(pipe, name, message, severity, pipe)
return
except redis.exceptions.WatchError:
# 如果程序因为其他客户端在执行归档操作而出现监视错误,那么重试。
continue
# <end id="common_log"/>
# 代码清单 5-3
# <start id="update_counter"/>
# 以秒为单位的计数器精度,分别为1秒钟、5秒钟、1分钟、5分钟、1小时、5小时、1天——用户可以按需调整这些精度。
PRECISION = [1, 5, 60, 300, 3600, 18000, 86400] #A
def update_counter(conn, name, count=1, now=None):
# 通过取得当前时间来判断应该对哪个时间片执行自增操作。
now = now or time.time()
# 为了保证之后的清理工作可以正确地执行,这里需要创建一个事务型流水线。
pipe = conn.pipeline()
# 为我们记录的每种精度都创建一个计数器。
for prec in PRECISION:
# 取得当前时间片的开始时间。
pnow = int(now / prec) * prec
# 创建负责存储计数信息的散列。
hash = '%s:%s'%(prec, name)
# 将计数器的引用信息添加到有序集合里面,
# 并将其分值设置为0,以便在之后执行清理操作。
pipe.zadd('known:', hash, 0)
# 对给定名字和精度的计数器进行更新。
pipe.hincrby('count:' + hash, pnow, count)
pipe.execute()
# <end id="update_counter"/>
# 代码清单 5-4
# <start id="get_counter"/>
def get_counter(conn, name, precision):
# 取得存储着计数器数据的键的名字。
hash = '%s:%s'%(precision, name)
# 从Redis里面取出计数器数据。
data = conn.hgetall('count:' + hash)
# 将计数器数据转换成指定的格式。
to_return = []
for key, value in data.iteritems():
to_return.append((int(key), int(value)))
# 对数据进行排序,把旧的数据样本排在前面。
to_return.sort()
return to_return
# <end id="get_counter"/>
# <start id="clean_counters"/>
def clean_counters(conn):
pipe = conn.pipeline(True)
# 为了平等地处理更新频率各不相同的多个计数器,程序需要记录清理操作执行的次数。
passes = 0
# 持续地对计数器进行清理,直到退出为止。
while not QUIT:
# 记录清理操作开始执行的时间,用于计算清理操作执行的时长。
start = time.time()
# 渐进地遍历所有已知的计数器。
index = 0
while index < conn.zcard('known:'):
# 取得被检查计数器的数据。
hash = conn.zrange('known:', index, index)
index += 1
if not hash:
break
hash = hash[0]
# 取得计数器的精度。
prec = int(hash.p
|
artition(':')[0])
# 因为清理程序每60秒钟就会循环一次,
# 所以这里需要根据计数器的更新频率来判断是否真的有必要对计数器进行清理。
bprec = int(prec // 6
|
0) or 1
# 如果这个计数器在这次循环里不需要进行清理,
# 那么检查下一个计数器。
# (举个例子,如果清理程序只循环了三次,而计数器的更新频率为每5分钟一次,
# 那么程序暂时还不需要对这个计数器进行清理。)
if passes % bprec:
continue
hkey = 'count:' + hash
# 根据给定的精度以及需要保留的样本数量,
# 计算出我们需要保留什么时间之前的样本。
cutoff = time.time() - SAMPLE_COUNT * prec
# 获取样本的开始时间,并将其从字符串转换为整数。
samples = map(int, conn.hkeys(hkey))
# 计算出需要移除的样本数量。
samples.sort()
remove = bisect.bisect_right(samples, cutoff)
# 按需移除计数样本。
if remove:
conn.hdel(hkey, *samples[:remove])
# 这个散列可能已经被清空。
if remove == len(samples):
try:
# 在尝试修改计数器散列之前,对其进行监视。
pipe.watch(hkey)
# 验证计数器散列是否为空,如果是的话,
# 那么从记录已知计数器的有序集合里面移除它。
if not pipe.hlen(hkey):
pipe.multi()
pipe.zrem('known:', hash)
pipe.execute()
# 在删除了一个计数器的情况下,
# 下次循环可以使用与本次循环相同的索引。
index -= 1
else:
# 计数器散列并不为空,
# 继续让它留在记录已有计数器的有序集合里面。
pipe.unwatch()
# 有其他程序向这个计算器散列添加了新的数据,
# 它已经不再是空的了,继续让它留在记录已知计数器的有序集合里面。
except redis.exceptions.WatchError:
pass
# 为了让清理操作的执行频率与计数器更新的频率保持一致,
# 对记录循环次数的变量以及记录执行时长的变量进行更新。
passes += 1
duration = min(int(time.time() - start) + 1, 60)
# 如果这次循环未耗尽60秒钟,那么在余下的时间内进行休眠;
# 如果60秒钟已经耗尽,那么休眠一秒钟以便稍作休息。
time.sleep(max(60 - duration, 1))
# <end id="clean_counters"/>
# 代码清单 5-6
# <start id="update_stats"/>
def update_stats(conn, context, type, value, timeout=5):
# 设置用于存储统计数据的键。
destination = 'stats:%s:%s'%(context, type)
# 像common_log()函数一样,
# 处理当前这一个小时的数据和上一个小时的数据。
start_key = destination + ':start'
pipe = conn.pipeline(True)
end = time.time() + timeout
while time.time() < end:
try:
pipe.watch(start_key)
now = datetime.utcnow().timetuple()
hour_start = datetime(*now[:4]).isoformat()
existing = pipe.get(start_key)
pipe.multi()
if existing and existing < hour_start:
pipe.rename(destination, destination + ':last')
pipe.rename(start_key, destination + ':pstart')
pipe.set(start_key, hour_start)
tkey1 = str(uuid.uuid4())
tkey2 = str(uuid.uuid4())
# 将值添加到临时键里面。
pipe.zadd(tkey1, 'min', value)
pipe.zadd(tkey2, 'max', value)
# 使用合适聚合函数MIN和MAX,
# 对存储统计数据的键和两个临时键进行并集计算。
pipe.zunionstore(destination,
[destination, tkey1], aggregate='min')
pipe.zunionstore(destination,
[destination, tkey2], aggregate='max')
|
wuqize/FluentPython
|
chapter16/coro_exc_demo.py
|
Python
|
lgpl-3.0
| 1,499
| 0.00403
|
# -*- coding: utf-8 -*-
"""
Created on Sun May 14 22:13:58 2017
"""
#python3
"""
>>> exc_coro = demo_exc_handling()
>>> next(exc_coro)
-> coroutine started
>>> exc_coro.send(11)
-> coroutine received: 11
>>> exc_coro.send(22)
-> coroutine received: 22
>>> exc_coro.close()
>>> from inspect import getgeneratorstate
>>> getgeneratorstate(exc_coro)
'GEN_CLOSED'
>>> exc_coro = demo_exc_handling()
>>> next(exc_coro)
-> coroutine started
>>> exc_coro.send(11)
-> coroutine received: 11
>>> exc_coro.throw(DemoException)
*** DemoException handled. Continuing...
>>> getgeneratorstate(exc_coro)
'GEN_SUSPENDED'
>>> exc_coro = demo_exc_handling()
>>> next(exc_coro)
-> coroutine started
>>> exc_coro.send(11)
-> coroutine received: 11
>>> exc_coro.throw(ZeroDivisionError)
Traceback (most recent call last):
...
ZeroDivisionError
>>> getge
|
neratorstate(exc_coro)
'GEN_CLOSED'
"""
from inspect import getgeneratorstate
class DemoException(Exception):
"""异常类型。"""
def demo_exc_handling():
print('-> coroutine started')
while True:
try:
x = yield
except DemoException:
print('*** DemoException handled. Continuing...')
else:
print('-> coroutine received: {!r}'.format(x))
raise Runt
|
imeError('This line should never run.')
if __name__ == "__main__":
exc_coro = demo_exc_handling()
next(exc_coro)
exc_coro.send(11)
exc_coro.send(22)
exc_coro.close()
print(getgeneratorstate(exc_coro))
|
0xc0ffeec0de/tapioca-discourse
|
tapioca_discourse/resource_mapping/admin.py
|
Python
|
mit
| 3,741
| 0.001604
|
# -*- coding: utf-8 -*-
ADMIN_MAPPING = {
'admin_user_suspend': {
'resource': 'admin/users/{id}/suspend',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1suspend%2Fput'),
'methods': ['PUT'],
},
'admin_user_unsuspend': {
'resource': 'admin/users/{id}/unsuspend',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1unsuspend%2Fput'),
'methods': ['PUT'],
},
'admin_user_block': {
'resource': 'admin/users/{id}/block',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1block%2Fput'),
'methods': ['PUT'],
},
'admin_user_unblock': {
'resource': 'admin/users/{id}/unblock',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1unblock%2Fput'),
'methods': ['PUT'],
},
'admin_user_activate': {
'resource': 'admin/users/{id}/activate',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1activate%2Fput'),
'methods': ['PUT'],
},
'admin_user_anonymize': {
'resource': 'admin/users/{id}/anonymize',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1anonymize%2Fput'),
'methods': ['PUT'],
},
'admin_api_key_generate': {
'resource': 'admin/users/{id}/generate_api_key',
'docs': ('http://docs.discourse.org/#tag/
|
'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1generate_api_key%2Fpost'),
'methods': ['POST'],
},
'admin_group_assign': {
'resource': 'admin/users/{id}/groups',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~
|
1users~1%7Bid%7D~1groups%2Fpost'),
'methods': ['POST'],
},
'admin_group_remove': {
'resource': 'admin/users/{id}/groups/{group_id}',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1groups~1%7Bgroup_id%7D%2Fdelete'),
'methods': ['DELETE'],
},
'admin_group_create': {
'resource': 'admin/groups',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1groups%2Fpost'),
'methods': ['POST'],
},
'admin_group_delete': {
'resource': 'admin/groups/{group_id}.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1groups~1%7Bgroup_id%7D.json%2Fdelete'),
'methods': ['DELETE'],
},
'admin_group_members_list': {
'resource': 'groups/{group_name}/members.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1groups~1%7Bgroup_name%7D~1members.json%2Fget'),
'methods': ['GET'],
},
'admin_group_members_add': {
'resource': 'groups/{group_id}/members.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1groups~1%7Bgroup_id%7D~1members.json%2Fput'),
'methods': ['PUT'],
},
'admin_group_members_delete': {
'resource': 'groups/{group_id}/members.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1groups~1%7Bgroup_id%7D~1members.json%2Fdelete'),
'methods': ['DELETE'],
},
'admin_site_settings_show': {
'resource': 'admin/site_settings.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1site_settings.json%2Fget'),
'methods': ['GET'],
},
}
|
HarryLoofah/gae-bead-calculator
|
main.py
|
Python
|
mit
| 8,656
| 0.000116
|
#!/usr/bin/env python
#
# The MIT License (MIT)
#
# Copyright (c) 2015 Greg Aitkenhead
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
bead-calculator
============
A simple Python script to help peyote stitch beadworkers start their projects.
BeadCalculator checks to make sure the number of beads used in a project will
work out mathematically and lets the beadworker know what design elements
will be possible for the number of starting beads entered.
##To Use:
1. Measure the object that you'll be beading by stringing beads and wrapping
thread around the object.
2. Enter the number of beads from the initial measurement around the object.
3. BeadCalculator tells you if that number of beads will work, and if it does
not, BeadCalculator suggests an alternative number or numbers to start with.
BeadCalculator also tells the beadworker how many beads to string, how many to
add (when the first two lines of beads are added to the project), and what long
and short side design elements will be available.
"""
import cgi
import logging
import webapp2
import jinja2
from jinja2 import Environment, FileSystemLoader
ENV = Environment(loader=FileSystemLoader('templates'))
class MainHandler(webapp2.RequestHandler):
"""Renders the root of the web-app using base.html as the template."""
def get(self):
"""Create main web page."""
template = ENV.get_template('base.html')
self.response.write(template.render())
class CalcBeadResults
|
(webapp2.RequestHandler):
"""
Run all logic and create templates depending on value of beads_entered.
Value 'beads_entered/ comes from textarea value of main-form in base.html.
"""
def get(self):
"""Gets number of beads entered from base.html form input."""
bead_input = cgi.escape(self.request.get('beads_entered'))
def sanity_check(bead_input):
"""Before running full code, check to see that the nu
|
mber entered
(beads), is greater than 12 and that it is divisible by 6 or 9.
If 'beads' is less than 12, print error message.
"""
if int(bead_input) < 12:
beads_user_chose = str(bead_input)
more_beads_message = "Please re-try using more than 12 beads."
template = ENV.get_template('try-again.html')
self.response.write(
template.render(
beads_user_chose=beads_user_chose,
more_beads_message=more_beads_message)
)
# Run sanity_check.
sanity_check(bead_input)
def long_short_values(bead_input):
"""
Returns short and long side numbers of design elements depending on
whether the beads_entered is mod 6, 9, or 12. If number of beads
entered is not mod 6 or 9, long_short_values finds the higher and
lower values matching the above criteria and then suggests those
numbers to the user. Also shows the new list values so that the
user can see which option offers the most design choices.
"""
# Lists of possible results for design elements (values)
# where key represents the modulo interger.
check_list = {
6: (3, 5),
9: (4, 7),
12: (5, 9)
}
pass_list = [v for k, v in check_list.iteritems()
if int(bead_input) % k == 0]
if len(pass_list) != 0 and int(bead_input) >= 12:
# Suggest starting bead number and number of beads to add.
# These formulas are based on the specific 'three drop' peyote
# stitch pattern used (as opposed to the simpler 'two drop.')
suggested = int(bead_input)
beads_to_add = suggested/3
starting_number = beads_to_add*2
pass_list = ", ".join(repr(e) for e in sorted(pass_list))
starting_number = str(starting_number)
beads_to_add = str(beads_to_add)
beads_user_chose = str(bead_input)
# If pass_list contains values, print design elements and
# start/add numbers. See /templates/pass-test.html, which
# extends base.html.
template = ENV.get_template('pass-test.html')
self.response.write(
template.render(
beads_user_chose=beads_user_chose,
pass_list=pass_list,
starting_number=starting_number,
beads_to_add=beads_to_add
)
)
if len(pass_list) == 0:
# If list contains no values, find next usable number.
higher_list = pass_list
high_bead = int(bead_input)
while len(higher_list) == 0:
# Check that new number matches modulo criteria.
high_bead += 1
higher_list = [v for k, v in check_list.iteritems()
if int(high_bead) % k == 0]
if len(higher_list) != 0 and int(bead_input) >= 12:
# If pass_list does not contain values,
# suggest usable design element numbers
# for both next bead higher and next bead lower.
high_bead = str(high_bead)
higher_list = ", ".join(repr(e) for e in
sorted(higher_list))
# Also, find the next usable number lower than beads.
lower_list = pass_list
low_bead = int(bead_input)
# Make sure number of beads is >12 to avoid low numbers.
while len(lower_list) == 0 and low_bead > 12:
# Check to see if the new number matches modulo criteria.
low_bead -= 1
lower_list = [v for k, v in check_list.iteritems()
if int(low_bead) % k == 0]
if len(lower_list) != 0:
# Suggest design elements for lower bead options.
low_bead = str(low_bead)
lower_list = ", ".join(
repr(e) for e in sorted(lower_list)
)
beads_user_chose = str(bead_input)
template = ENV.get_template('no-pass.html')
self.response.write(
template.render(
beads_user_chose=beads_user_chose,
high_bead=high_bead,
higher_list=higher_list,
low_bead=low_bead,
lower_list=lower_list
)
)
# Run long_short_values.
long_short_values(bead_input)
APP = webapp2.WSGIApplication([
|
jalanb/jab
|
ipython/profile_jalanb/ipython_config.py
|
Python
|
mit
| 18,065
| 0.001439
|
# Configuration file for ipython.
# pylint: disable=E0602
c = get_config()
# -----------------------------------------------------------------------------
# InteractiveShellApp configuration
# -----------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# lines of code to run at IPython startup.
# c.InteractiveShellApp.exec_lines = []
# Enable GUI event loop integration ('qt', 'wx', 'gtk', 'glut', 'pyglet',
# 'osx').
# c.InteractiveShellApp.gui = None
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an 'import *' is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = None
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
#------------------------------------------------------------------------------
# TerminalIPythonApp configuration
#------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# Run the module as a script.
# c.TerminalIPythonApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.TerminalIPythonApp.extra_config_file = u''
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = u'default'
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.TerminalIPythonApp.matplotlib = None
# If a command or file is given via the command-line, e.g. 'ipython foo.py
# c.TerminalIPythonApp.force_interact = False
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an 'import *' is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.TerminalIPythonApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# ~/.ipython. This options can also be specified through the environment
|
# variable IPYTHONDIR.
# Whether to display a banner upon starting IPython.
c.TerminalIPythonApp.display_banner = False
# Whether to install the default config files into the profile dir. If a new
# profile is being c
|
reated, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Enable GUI event loop integration ('qt', 'wx', 'gtk', 'glut', 'pyglet',
# 'osx').
# c.TerminalIPythonApp.gui = None
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# The Logging format template
# c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
#------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
#------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = True
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.TerminalInteractiveShell.ast_transformers = []
#
# c.TerminalInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
c.TerminalInteractiveShell.colors = 'Linux'
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.separate_in = '\n'
#
# c.TerminalInteractiveShell.separate_out = ''
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
c.TerminalInteractiveShell.autocall = 1
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
# Set the editor used by IPython (default to $EDITOR/vi/notepad).
c.TerminalInteractiveShell.editor = '/usr/local/bin/vim'
# Shortcut style to use at the prompt
c.TerminalInteractiveShell.editing_mode = 'vi'
# The part of the banner to be printed before the profile
# c.TerminalInteractiveShell.banner1 = 'Python 2.7.1 (r271:86832, Aug 2 2012
|
atsushieno/cerbero
|
cerbero/bootstrap/linux.py
|
Python
|
lgpl-2.1
| 10,061
| 0.00497
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.bootstrap import BootstrapperBase
from cerbero.bootstrap.bootstrapper import register_bootstrapper
from cerbero.enums import Platform, Architecture, Distro, DistroVersion
from cerbero.errors import ConfigurationError
from cerbero.utils import shell
import subprocess
class UnixBootstrapper (BootstrapperBase):
tool = ''
command = ''
yes_arg = ''
checks = []
packages = []
distro_packages = {}
def __init__(self, config, offline, assume_yes):
BootstrapperBase.__init__(self, config, offline)
self.assume_yes = assume_yes
def start(self):
for c in self.checks:
c()
if self.config.distro_packages_install:
packages = self.packages
if self.config.distro_version in self.distro_packages:
packages += self.distro_packages[self.config.distro_version]
extra_packages = self.config.extra_bootstrap_packages.get(
self.config.platform, None)
if extra_packages:
self.packages += extra_packages.get(self.config.distro, [])
tool = self.tool
if self.assume_yes:
tool += ' ' + self.yes_arg;
tool += ' ' + self.command;
shell.call(tool % ' '.join(self.packages))
class DebianBootstrapper (UnixBootstrapper):
tool = 'sudo apt-get'
command = 'install %s'
yes_arg = '-y'
packages = ['autotools-dev', 'automake', 'autoconf', 'libtool', 'g++',
'autopoint', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkg-config', 'gtk-doc-tools', 'libxv-dev', 'libx11-dev',
'libpulse-dev', 'python3-dev', 'texinfo', 'gettext',
'build-essential', 'pkg-config', 'doxygen', 'curl',
'libxext-dev', 'libxi-dev', 'x11proto-record-dev',
'libxrender-dev', 'libgl1-mesa-dev', 'libxfixes-dev',
'libxdamage-dev', 'libxcomposite-dev', 'libasound2-dev',
'libxml-simple-perl', 'dpkg-dev', 'debhelper',
'build-essential', 'devscripts', 'fakeroot', 'transfig',
'gperf', 'libdbus-glib-1-dev', 'wget', 'glib-networking',
'libxtst-dev', 'libxrandr-dev', 'libglu1-mesa-dev',
'libegl1-mesa-dev', 'git', 'subversion', 'xutils-dev',
'intltool', 'ccache', 'python3-setuptools', 'libssl-dev']
def __init__(self, config, offline, assume_yes):
UnixBootstrapper.__init__(self, config, offline, assume_yes)
if self.config.target_platform == Platform.WINDOWS:
if self.config.arch == Architecture.X86_64:
self.packages.append('libc6:i386')
self.checks.append(self.create_debian_arch_check('i386'))
if self.config.target_platform == Platform.LINUX:
self.packages.append('chrpath')
self.packages.append('libfuse-dev')
if self.config.distro_version in [DistroVersion.DEBIAN_SQUEEZE,
DistroVersion.UBUNTU_MAVERICK, DistroVersion.UBUNTU_LUCID]:
self.packages.remove('glib-networking')
if self.config.distro_version in [DistroVersion.UBUNTU_LUCID]:
self.packages.remove('autopoint')
def create_debian_arch_check(self, arch):
def check_arch():
native_arch = shell.check_call('dpkg --print-architecture')
if native_arch == arch:
return
foreign_archs = shell.check_call('dpkg --print-foreign-architectures')
if arch in foreign_archs.split():
return
raise ConfigurationError(('Architecture %s is missing from your setup. ' + \
'You can add it with: "dpkg --add-architecture %s",' + \
' then run "apt-get update."') \
% (arch, arch))
return check_arch
class RedHatBootstrapper (UnixBootstrapper):
tool = 'dnf'
command = 'install %s'
yes_arg = '-y'
packages = ['gcc', 'gcc-c++', 'automake', 'autoconf', 'libtool',
'gettext-devel', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkgconfig', 'gtk-doc', 'curl', 'doxygen', 'texinfo',
'texinfo-tex', 'texlive-dvips', 'docbook-style-xsl',
'transfig', 'intltool', 'rpm-build', 'redhat-rpm-config',
'python3-devel', 'libXrender-devel', 'pulseaudio-libs-devel',
'libXv-devel', 'mesa-libGL-devel', 'libXcomposite-devel',
'alsa-lib-devel', 'perl-ExtUtils-MakeMaker', 'libXi-devel',
'perl-XML-Simple', 'gperf', 'gdk-pixbuf2-devel', 'wget',
'docbook-utils-pdf', 'glib-networking', 'help2man',
'dbus-devel', 'glib2-devel', 'libXrandr-devel',
'libXtst-devel', 'git', 'subversion', 'xorg-x11-util-macros',
'mesa-libEGL-devel', 'ccache', 'openssl-devel']
def __init__(self, config, offline, assume_yes):
UnixBootstrapper.__init__(self, config, offline, assume_yes)
if self.config.distro_version < DistroVersion.FEDORA_23:
self.tool = 'yum'
elif self.config.distro_version in [DistroVersion.REDHAT_6, DistroVersion.REDHAT_7]:
self.tool = 'yum'
if self.config.target_platform == Platform.WINDOWS:
if self.config.arch == Architecture.X86_64:
self.packages.append('glibc.i686')
if self.config.distro_version in [DistroVersion.FEDORA_24, DistroVersion.FEDORA_25]:
self.packages.append('libncurses-compat-libs.i686')
if self.config.target_platform == Platform.LINUX:
self.packages.append('chrpath')
self.packages.append('fuse-devel')
# Use sudo to gain root access on everything except RHEL
if self.config.distro_version == DistroVersion.REDHAT_6:
self.tool = 'su -c "' + self.tool + '"'
else:
self.tool = 'sudo ' + self.tool
class OpenSuseBootstrapper (UnixBootstrapper):
tool = 'sudo zypper'
command = 'install %s'
yes_arg = '-y'
packages = ['gcc', 'automake', 'autoconf', 'gcc-c++', 'libtool',
'gettext-tools', 'make', 'cmake', 'bison', 'flex', 'yasm',
'gtk-doc', 'curl', 'doxygen', 'texinfo',
'texlive', 'docbook-xsl-stylesheets',
'transfig', 'intltool', 'patterns-openSUSE-devel_rpm_build',
'python3-devel', 'xorg-x11-libXrender-devel', 'libpulse-devel',
'xorg-x11-libXv-devel', 'Mesa-libGL-devel', 'libXcomposite-devel',
'libX11-devel', 'alsa-devel', 'libXi-devel', 'Mesa-devel',
'Mesa-libGLESv3-devel',
'perl-XML-Simple', 'gperf', 'gdk-pixbuf-devel', 'wget',
'docbook-utils', 'glib-networking', 'git', 'subversion', 'ccache',
'openssl-devel']
class ArchBootstrapper (UnixBootstrapper):
tool = 'sudo pacman'
|
command = ' -S %s --needed'
yes_arg = ' --noconfirm'
packages = ['intltool', 'cmake', 'doxygen', 'gtk-doc',
'libtool', 'bison', 'flex', 'automake', 'autoconf', 'mak
|
e',
'curl', 'gettext', 'alsa-lib', 'yasm', 'gperf',
'docbook-xsl', 'transfig', 'libxrender',
'libxv', 'mesa', '
|
Comunitea/CMNT_004_15
|
project-addons/prepaid_order_discount/__manifest__.py
|
Python
|
agpl-3.0
| 697
| 0.002869
|
{
'name': 'Discount prepaid order',
'version': '1.0',
'category': 'Custom',
'description': """
Order Discount when it's prepaid and margin is between specific values
""",
'author': 'Nadia Ferreyra',
'website': '',
'depends': ['base',
|
'sale',
'product',
'sale_promotions_extend',
'commercial_rules',
'flask_middleware_connector',
'sale_custom'
],
'data': ['data/product_data.xml',
'data/parameters.xml',
'views/sale_order_view.xml',
'views/account_view.xml'
],
'installable': True
}
|
|
modoboa/modoboa-webmail
|
modoboa_webmail/tests/test_fetch_parser.py
|
Python
|
mit
| 3,016
| 0
|
# coding: utf-8
"""FETCH parser tests."""
from __future__ import print_function
import unittest
import six
from modoboa_webmail.lib.fetch_parser import FetchResponseParser
from . import data
def dump_bodystructure(fp, bs, depth=0):
"""Dump a parsed BODYSTRUCTURE."""
indentation = " " * (depth * 4)
for mp in bs:
if isinstance(mp, list):
if isinstance(mp[0], list):
print("{}m
|
ultipart/{}".format(indentation, mp[1]), file=fp)
dump_bodystructure(fp, mp, depth + 1)
else:
dump_bodystructure(fp, mp, depth)
|
elif isinstance(mp, dict):
if isinstance(mp["struct"][0], list):
print("{}multipart/{}".format(
indentation, mp["struct"][1]), file=fp)
dump_bodystructure(fp, mp["struct"][0], depth + 1)
else:
print("{}{}/{}".format(
indentation, *mp["struct"][:2]), file=fp)
fp.seek(0)
result = fp.read()
return result
class FetchParserTestCase(unittest.TestCase):
"""Test FETCH parser."""
def setUp(self):
"""Setup test env."""
self.parser = FetchResponseParser()
def _test_bodystructure_output(self, bs, expected):
"""."""
r = self.parser.parse(bs)
fp = six.StringIO()
output = dump_bodystructure(fp, r[list(r.keys())[0]]["BODYSTRUCTURE"])
fp.close()
self.assertEqual(output, expected)
return r
def test_parse_bodystructure(self):
"""Test the parsing of several responses containing BS."""
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_1, """multipart/alternative
text/plain
text/html
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_2, """multipart/mixed
text/plain
message/rfc822
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_3, """multipart/mixed
multipart/alternative
text/plain
text/html
application/pdf
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_4, """multipart/mixed
multipart/alternative
text/plain
text/html
application/octet-stream
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_5, """multipart/alternative
text/plain
text/html
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_6, """multipart/mixed
multipart/related
multipart/alternative
text/plain
text/html
image/png
image/jpeg
application/pdf
multipart/alternative
text/plain
text/html
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_7, """multipart/mixed
multipart/mixed
text/plain
application/octet-stream
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_8, "text/html\n")
|
alirizakeles/tendenci
|
tendenci/apps/forms_builder/forms/models.py
|
Python
|
gpl-3.0
| 20,327
| 0.002903
|
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext, ugettext_lazy as _
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import GenericRelation
from django.shortcuts import get_object_or_404
from django_countries import countries as COUNTRIES
from localflavor.us.us_states import STATE_CHOICES
from localflavor.ca.ca_provinces import PROVINCE_CHOICES
from tendenci.apps.forms_builder.forms.settings import FIELD_MAX_LENGTH, LABEL_MAX_LENGTH
from tendenci.apps.forms_builder.forms.managers import FormManager
from tendenci.apps.perms.models import TendenciBaseModel
from tendenci.apps.perms.object_perms import ObjectPermission
from tendenci.apps.user_groups.models import Group, GroupMembership
from tendenci.apps.site_settings.utils import get_setting
from tendenci.apps.base.fields import EmailVerificationField
from tendenci.apps.base.utils import checklist_update
from tendenci.apps.redirects.models import Redirect
from tendenci.libs.abstracts.models import OrderingBaseModel
#STATUS_DRAFT = 1
#STATUS_PUBLISHED = 2
STATUS_CHOICES = (
('draft', _("Draft")),
('published', _("Published")),
)
FIELD_CHOICES = (
("CharField", _("Text")),
("CharField/django.forms.Textarea", _("Paragraph Text")),
("BooleanField", _("Checkbox")),
("ChoiceField/django.forms.RadioSelect", _("Single-select - Radio Button")),
("ChoiceField", _("Single-select - From a List")),
("MultipleChoiceField/django.forms.CheckboxSelectMultiple", _("Multi-select - Checkboxes")),
("MultipleChoiceField", _("Multi-select - From a List")),
("EmailVerificationField", _("Email")),
("CountryField", _("Countries")),
("StateProvinceField", _("States/Provinces")),
("FileField", _("File upload")),
("DateField/django.forms.extras.SelectDateWidget", _("Date - Select")),
("DateField/django.forms.DateInput", _("Date - Text Input")),
("DateTimeField", _("Date/time")),
("CharField/tendenci.apps.forms_builder.forms.widgets.Description", _("Description")),
("CharField/tendenci.apps.forms_builder.forms.widgets.Header", _("Section Heading")),
)
FIELD_FUNCTIONS = (
("GroupSubscription", _("Subscribe to Group")),
("GroupSubscriptionAuto", _("Subscribe to Group")),
("EmailFirstName", _("First Name")),
("EmailLastName", _("Last Name")),
("EmailFullName", _("Full Name")),
("EmailPhoneNumber", _("Phone Number")),
("Recipients", _("Email to Recipients")),
)
BILLING_PERIOD_CHOICES = (
('month', _('Month(s)')),
('year', _('Year(s)')),
('week', _('Week(s)')),
('day', _('Day(s)')),
)
DUE_SORE_CHOICES = (
('start', _('start')),
('end', _('end')),
)
class Form(TendenciBaseModel):
"""
A user-built form.
"""
FIRST = 1
MIDDLE = 2
LAST = 3
FIELD_POSITION_CHOICES = (
(FIRST, _("First")),
(MIDDLE, _("Middle")),
(LAST, _("Last")),
)
INTRO_DEFAULT_NAME = _("Intro")
FIELDS_DEFAULT_NAME = _("Fields")
PRICING_DEFAULT_NAME = _("Pricings")
title = models.CharField(_("Title"), max_length=100)
slug = models.SlugField(max_length=100, unique=True)
intro = models.TextField(_("Intro"), max_length=2000, blank=True)
response = models.TextField(_("Confirmation Text"), max_length=2000, blank=True)
email_text = models.TextField(_("Email Text to Submitter"), default='', blank=True,
help_text=_("If Send email is checked, this is the text that will be sent in an email to the person submitting the form."), max_length=2000)
subject_template = models.CharField(_("Template for email subject "),
help_text=_("""Options include [title] for form title, and
name of form fields inside brackets [ ]. E.x. [first name] or
[email address]"""),
default="[title] - [first name] [last name] - [phone]",
max_length=200,
blank=True, null=True)
send_email = models.BooleanField(_("Send email"), default=False,
help_text=_("If checked, the person submitting the form will be sent an email."))
email_from = models.EmailField(_("Reply-To address"), blank=True,
help_text=_("The address the replies to the email will be sent to"))
email_copies = models.CharField(_("Send copies to"), blank=True,
help_text=_("One or more email addresses, separated by commas"),
max_length=2000)
completion_url = models.CharField(_("Completion URL"), max_length=1000, blank=True, null=True,
help_text=_("Redirect to this page after form completion. Absolute URLS should begin with http. Relative URLs should begin with a forward slash (/)."))
template = models.CharField(_('Template'), max_length=50, blank=True)
# payments
custom_payment = models.BooleanField(_("Is Custom Payment"), default=False,
help_text=_("If checked, please add pricing options below. Leave the price blank if users can enter their own amount."))
recurring_payment = models.BooleanField(_("Is Recurring Payment"), default=False,
help_text=_("If checked, please add pricing options below. Leave the price blank if users can enter their own amount. Please also add an email field as a required field with type 'email'"))
payment_methods = models.ManyToManyField("payments.PaymentMethod", blank=True)
perms = GenericRelation(ObjectPermission,
object_id_field="object_id", content_type_field="content_type")
# positions for displaying the fields
intro_position = models.IntegerField(_("Intro Position"), choices=FIELD_POSITION_CHOICES, default=FIRST)
fields_position = models.IntegerField(_("Fields Position"), choices=FIELD_POSITION_CHOICES, default=MIDDLE)
pricing_position = models.IntegerField(_("Pricing Position"), choices=FIELD_POSITION_CHOICES, default=LAST)
# variable name of form main sections
intro_name = models.CharField(_("Intro Name"), max_length=50,
default=INTRO_DEFAULT_NAME, blank=True)
fields_name = models.CharField(_("Fields Name"), max_length=50,
default=FIELDS_DEFAULT_NAME, blank=True)
pricing_name = models.CharField(_("Pricing Name"), max_length=50,
default=PRICING_DEFAULT_NAME, blank=True)
objects = FormManager()
class Meta:
verbose_name = _("Form")
verbose_name_plural = _("Forms")
permissions = (("view_form", _("Can view form")),)
app_label = 'forms'
def __unicode__(self):
return self.title
def save(self, *args, **kwargs):
# If this is the current contact form, upd
|
ate checklist
if str(self.pk) == get_setting('site', 'global', 'contact_form'):
checklist_update('update-contact')
super(Form, self).save(*args, **kwargs)
@model
|
s.permalink
def get_absolute_url(self):
return ("form_detail", (), {"slug": self.slug})
def get_payment_type(self):
if self.recurring_payment and self.custom_payment:
return _("Custom Recurring Payment")
if self.recurring_payment:
return _("Recurring Payment")
if self.custom_payment:
return _("Custom Payment")
def admin_link_view(self):
url = self.get_absolute_url()
return "<a href='%s'>%s</a>" % (url, ugettext("View on site"))
admin_link_view.allow_tags = True
admin_link_view.short_description = ""
def admin_link_export(self):
url = reverse("admin:forms_form_export", args=(self.id,))
return "<a href='%s'>%s</a>" % (url, ugettext("Export entries"))
admin_link_export.allow_tags = True
admin_link_export.short_description = ""
def has_files(self):
for field in self.fields.all():
if field.field_type == 'FileField':
return True
return False
class FieldManager(models.Manager):
"""
Only show visible fields when displaying actual form..
"""
def visible(self):
return self.filter(visible=True)
"""
Get all Auto-fields. (As of writing, this is on
|
MilesDuronCIMAT/django_image
|
upload_image/models.py
|
Python
|
mit
| 145
| 0.027586
|
from django.db import models
# Cre
|
ate your models here.
class ImageModel(model
|
s.Model):
image = models.ImageField(upload_to = 'pic_folder/')
|
mpurzynski/MozDef
|
mq/plugins/nagioshostname.py
|
Python
|
mpl-2.0
| 902
| 0.002217
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozil
|
la Corporation
import hashlib
class message(object):
def __init__(self):
'''
takes an incoming nagios message and assigns a static ID
so we always update the same doc for current status.
'''
# this plugin
# sets a static document ID
# for a particular event to allow you to have an event that just updates
# current status
self.registration = ['nagios_hostname']
self.priority = 5
|
def onMessage(self, message, metadata):
docid = hashlib.md5('nagiosstatus' + message['details']['nagios_hostname']).hexdigest()
metadata['id'] = docid
return (message, metadata)
|
alexa-infra/negine
|
thirdparty/boost-python/libs/python/test/staticmethod.py
|
Python
|
mit
| 826
| 0.007264
|
# Copyright David Abrahams 2004. Dis
|
tributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
'''
>>> from staticmethod_ext import *
>>> class X1(X):
... pass
>>> x = X(16)
>>> x1 = X1(17)
>>> x1.count()
2
>
|
>> x.count()
2
>>> X1.count()
2
>>> X.count()
2
>>> x1.magic()
7654321
>>> x.magic()
7654321
>>> X1.magic()
7654321
>>> X.magic()
7654321
'''
def run(args = None):
import sys
import doctest
if args is not None:
sys.argv = args
return doctest.testmod(sys.modules.get(__name__))
if __name__ == '__main__':
print "running..."
import sys
status = run()[0]
if (status == 0): print "Done."
sys.exit(status)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.