repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
ioam/topographica
|
topo/learningfn/som.py
|
Python
|
bsd-3-clause
| 5,130
| 0.009747
|
"""
SOM-based learning functions for CFProjections.
"""
from math import ceil
import param
from imagen import PatternGenerator, Gaussian
from holoviews import BoundingBox
from topo.base.arrayutil import L2norm, array_argmax
from topo.base.cf import CFPLearningFn
### JABHACKALERT: This class will be removed once the examples no
### longer rely upon it
class CFPLF_SOM(CFPLearningFn):
"""
An abstract base class of learning functions for Self-Organizing Maps.
This implementation is obsolete and will be removed soon.
Please see examples/cfsom_or.ty for current SOM support.
"""
__abstract = True
learning_radius = param.Number(default=0.0,doc=
"""
The radius of the neighborhood function to be used for
learning. Typically, this value will be set by the Sheet or
Projection owning this CFPLearningFn, but it can also be set
explicitly by the user.
""")
def __init__(self,**params):
self.warning("CFPLF_SOM is deprecated -- see the example in cfsom_or.ty for how to build a SOM")
def __call__(self, proj, input_activity, output_activity, learning_rate, **params):
raise NotImplementedError
### JABHACKALERT: This class will be removed once the examples no
### longer rely upon it
class CFPLF_HebbianSOM(CFPLF_SOM):
"""
Hebbian learning rule for CFProjections to Self-Organizing Maps.
This implementation is obsolete and will be removed soon.
Please see examples/cfsom_or.ty for current SOM support.
"""
learning_radius = param.Number(default=0.0)
crop_radius_multiplier = param.Number(default=3.0,doc=
"""
Factor by which the radius should be multiplied,
when deciding how far from the winner to keep updating the weights.
""")
neighborhood_kernel_generator = param.ClassSelector(PatternGenerator,
default=Gaussian(x=0.0,y=0.0,aspect_ratio=1.0),
doc="Neighborhood function")
def __call__(self, iterator, input_activity, output_activity, learning_rate, **params):
cfs = iterator.proj.cfs.tolist() # CEBALERT: convert to use flatcfs
rows,cols = output_activity.shape
# This learning function does not need to scale the learning
# rate like some do, so it does not use constant_sum_connection_rate()
single_connection_learning_rate = learning_rate
### JABALERT: The learning_radius is normally set by
### the learn() function of CFSOM, so it doesn't matter
### much that the value accepted here is in matrix and
### not sheet coordinates. It's confusing that anything
### would accept matrix coordinates, but the learning_fn
### doesn't have access to the sheet, so it can't easily
### convert from sheet coords.
radius = self.learning_radius
crop_radius = max(1.25,radius*self.crop_radius_multiplier)
# find out the matrix coordinates of the winner
#
# NOTE: when there are multiple projections, it would be
# slightly more efficient to calculate the winner coordinates
# within the Sheet, e.g. by moving winner_coords() to CFSOM
# and passing in the results here. However, finding the
# coordinates does not take much time, and requiring the
# winner to be passed in would make it harder to mix and match
# Projections and learning rules with different Sheets.
wr,wc = array_argmax(output_activity)
# Optimization: Calculate the bounding box around the winner
# in which weights will be changed, to avoid considering those
# units below.
cmin = int(max(wc-crop_radius,0))
cmax = int(min(wc+crop_radius+1,cols)) # at least 1 between cmin and cmax
rmin = int(max(wr-crop_radius,0))
rmax = int(min(wr+crop_radius+1,rows))
# generate the neighborhood kernel matrix so that the values
# can be read off easily using matrix coordinates.
nk_generator = self.neighborhood_kernel_generator
radius_int = int(ceil(crop_radius))
rbound = radius_int + 0.5
bb
|
= BoundingBox(points=((-rbound,-rbound), (rbound,rbound)))
# Print parameters designed to match fm2d's output
#print "%d rad= %d std= %f alpha= %f" % (topo.sim._time, radius_int, radius, single_connection_learning_rate
|
)
neighborhood_matrix = nk_generator(bounds=bb,xdensity=1,ydensity=1,
size=2*radius)
for r in range(rmin,rmax):
for c in range(cmin,cmax):
cwc = c - wc
rwr = r - wr
lattice_dist = L2norm((cwc,rwr))
if lattice_dist <= crop_radius:
cf = cfs[r][c]
rate = single_connection_learning_rate * neighborhood_matrix[rwr+radius_int,cwc+radius_int]
X = cf.get_input_matrix(input_activity)
cf.weights += rate * (X - cf.weights)
# CEBHACKALERT: see ConnectionField.__init__()
cf.weights *= cf.mask
|
cliffe/SecGen
|
modules/utilities/unix/ctf/metactf/files/repository/src_angr/dist/scaffold10.py
|
Python
|
gpl-3.0
| 4,519
| 0.011286
|
# This challenge is similar to the previous one. It operates under the same
# premise that you will have to replace the check_equals_ function. In this
# case, however, check_equals_ is called so many times that it wouldn't make
# sense to hook where each one was called. Instead, use a SimProcedure to write
# your own check_equals_ implementation and then hook the check_equals_ symbol
# to replace all calls to scanf with a call to your SimProcedure.
#
# You may be thinking:
# Why can't I just use hooks? The function is called many times, but if I hook
# the address of the function itself (rather than the addresses where it is
# called), I can replace its behavior everywhere. Furthermore, I can get the
# parameters by reading them off the stack (with memory.load(regs.esp + xx)),
# and return a value by simply setting eax! Since I know the length of the
# function in bytes, I can return from the hook just before the 'ret'
# instruction is called, which will allow the program to jump back to where it
# was before it called my hook.
# If you thought that, then congratulations! You have just invented the idea of
# SimProcedures! Instead of doing all of that by hand, you can let the already-
# implemented SimProcedures do the boring work for you so that you can focus on
# writing a replacement function in a Pythonic way.
# As a bonus, SimProcedures allow you to specify custom calling conventions, but
# unfortunately it is not covered in this CTF.
import angr
import claripy
import sys
def main(argv):
path_to_binary = argv[1]
project = angr.Project(path_to_binary)
initial_state = project.factory.entry_state()
# Define a class that inherits angr.SimProcedure in order to take advantage
# of Angr's SimProcedures.
class ReplacementCheckEquals(angr.SimProcedure):
# A SimProcedure replaces a function in the binary with a simulated one
# written in Python. Other than it being written in Python, the function
# acts largely the same as any function written in C. Any parameter after
# 'self' will be treated as a parameter to the function you are replacing.
# The parameters will be bitvectors. Additionally, the Python can return in
# the ususal Pythonic way. Angr will treat this in the same way it would
# treat a native function in the binary returning. An example:
#
# int add_if_positive(int a, int b) {
# if (a >= 0 && b >= 0) return a + b;
# else return 0;
# }
#
# could be simulated with...
#
# class ReplacementAddIfPositive(angr.SimProcedure):
# def run(self, a, b):
# if a >= 0 and b >=0:
# return a + b
# else:
# return 0
#
# Finish the parameters to the check_equals_ function. Reminder:
# int check_equals_AABBCCDDEEFFGGHH(char* to_check, int length) { ...
# (!)
def run(self, to_check, ...???):
# We can almost copy and paste the solution from the previous challenge.
# Hint: Don't look up the address! It's passed as a parameter.
# (!)
user_input_buffer_address = ???
user_input_buffer_length = ???
# Note the use of self.state to find the state of the system in a
# SimProcedure.
user_input_string = self.state.memory.load(
user_input_buffer_address,
user_input_buffer_length
)
check_against_string = ???
# Finally, instead of setting eax, we can use a Pythonic return statement
# to return the output of this function.
# Hint: Look at the previous solution.
return claripy.If(???, ???, ???)
# Hook the check_equals symbol. Angr automatically looks up the address
# associated with the symbol. Alternatively, you can use 'hook' instead
# of 'hook_symbol' and specify the address of the function. To find
|
the
# correct symbol, disassemble the binary.
# (!)
check_equals_symbol = ??? # :string
project.hook_symbol(check_equals_symbol, ReplacementCheckEquals())
simulation = project.factory.simgr(initial_state)
def is_successful(state):
stdou
|
t_output = state.posix.dumps(sys.stdout.fileno())
return ???
def should_abort(state):
stdout_output = state.posix.dumps(sys.stdout.fileno())
return ???
simulation.explore(find=is_successful, avoid=should_abort)
if simulation.found:
solution_state = simulation.found[0]
solution = ???
print solution
else:
raise Exception('Could not find the solution')
if __name__ == '__main__':
main(sys.argv)
|
woddx/privacyidea
|
privacyidea/lib/utils.py
|
Python
|
agpl-3.0
| 5,349
| 0.002243
|
from .log import log_with
import logging
log = logging.getLogger(__name__)
import binascii
from .crypto import geturandom
import qrcode
import StringIO
import urllib
from privacyidea.lib.crypto import urandom
import string
import re
def generate_otpkey(key_size=20):
"""
generates the HMAC key of keysize. Should be 20 or 32
The key is returned as a hexlified string
:param key_size: The size of the key to g
|
enerate
:type key_size: int
:return: hexlified key
:rtype: string
"""
log.debug("generating key of size %s" % key_size)
return binascii.hexlify(geturandom(key_size))
def create_png(data, alt=None):
im
|
g = qrcode.make(data)
output = StringIO.StringIO()
img.save(output)
o_data = output.getvalue()
output.close()
return o_data
def create_img(data, width=0, alt=None):
"""
create the qr image data
:param data: input data that will be munched into the qrcode
:type data: string
:param width: image width in pixel
:type width: int
:return: image data to be used in an <img> tag
:rtype: string
"""
width_str = ''
alt_str = ''
o_data = create_png(data, alt=alt)
data_uri = o_data.encode("base64").replace("\n", "")
if width != 0:
width_str = " width=%d " % (int(width))
if alt is not None:
val = urllib.urlencode({'alt': alt})
alt_str = " alt=%r " % (val[len('alt='):])
ret_img = 'data:image/png;base64,%s' % data_uri
return ret_img
def generate_password(size=6, characters=string.ascii_lowercase +
string.ascii_uppercase + string.digits):
"""
Generate a random password of the specified lenght of the given characters
:param size: The length of the password
:param characters: The characters the password may consist of
:return: password
:rtype: basestring
"""
return ''.join(urandom.choice(characters) for _x in range(size))
#
# Modhex calculations for Yubikey
#
hexHexChars = '0123456789abcdef'
modHexChars = 'cbdefghijklnrtuv'
hex2ModDict = dict(zip(hexHexChars, modHexChars))
mod2HexDict = dict(zip(modHexChars, hexHexChars))
def modhex_encode(s):
return ''.join(
[hex2ModDict[c] for c in s.encode('hex')]
)
# end def modhex_encode
def modhex_decode(m):
return ''.join(
[mod2HexDict[c] for c in m]
).decode('hex')
# end def modhex_decode
def checksum(msg):
crc = 0xffff
for i in range(0, len(msg) / 2):
b = int(msg[i * 2] + msg[(i * 2) + 1], 16)
crc = crc ^ (b & 0xff)
for _j in range(0, 8):
n = crc & 1
crc = crc >> 1
if n != 0:
crc = crc ^ 0x8408
return crc
def sanity_name_check(name, name_exp="^[A-Za-z0-9_\-]+$"):
"""
This function can be used to check the sanity of a name like a resolver,
ca connector or realm.
:param name: THe name of the resolver or ca connector
:return: True, otherwise raises an exception
"""
if re.match(name_exp, name) is None:
raise Exception("non conformant characters in the name"
": %r (not in %s)" % (name, name_exp))
return True
def get_data_from_params(params, exclude_params, config_description, module,
type):
"""
This is a helper function that parses the parameters when creating
resolvers or CA connectors.
It takes the parameters and checks, if the parameters correspond to the
Class definition.
:param params: The inpurt parameters like passed from the REST API
:type params: dict
:param exclude_params: The parameters to be excluded like "resolver",
"type" or "caconnector"
:type exclude_params: list of strings
:param config_description: The description of the allowed configuration
:type config_description: dict
:param module: An identifier like "resolver", "CA connector". This is
only used for error output.
:type module: basestring
:param type: The type of the resolver or ca connector. Only used for
error output.
:type type: basestring
:return: tuple of (data, types, description)
"""
types = {}
desc = {}
data = {}
for k in params:
if k not in exclude_params:
if k.startswith('type.') is True:
key = k[len('type.'):]
types[key] = params.get(k)
elif k.startswith('desc.') is True:
key = k[len('desc.'):]
desc[key] = params.get(k)
else:
data[k] = params.get(k)
if k in config_description:
types[k] = config_description.get(k)
else:
log.warn("the passed key %r is not a "
"parameter for the %s %r" % (k, module, type))
# Check that there is no type or desc without the data itself.
# i.e. if there is a type.BindPW=password, then there must be a
# BindPW=....
_missing = False
for t in types:
if t not in data:
_missing = True
for t in desc:
if t not in data:
_missing = True
if _missing:
raise Exception("type or description without necessary data! %s" %
unicode(params))
return data, types, desc
|
gsnyder206/synthetic-image-morph
|
congrid.py
|
Python
|
gpl-2.0
| 3,937
| 0.018542
|
import numpy as n
import scipy.interpolate
import scipy.ndimage
def congrid(a, newdims, method='linear', centre=False, minusone=False):
'''Arbitrary resampling of source array to new dimension sizes.
Currently only supports maintaining the same number of dimensions.
To use 1-D arrays, first promote them to shape (x,1).
Uses the same parameters and creates the same co-ordinate lookup points
as IDL''s congrid routine, which apparently originally came from a VAX/VMS
routine of the same name.
method:
neighbour - closest value from original data
nearest and linear - uses n x 1-D interpolations using
scipy.interpolate.interp1d
|
(see Numerical Recipes for validity of use of n 1-D interpolations)
spline - uses ndimage.map_coordinates
centre:
True - interpolation points are at the centres of the bins
F
|
alse - points are at the front edge of the bin
minusone:
For example- inarray.shape = (i,j) & new dimensions = (x,y)
False - inarray is resampled by factors of (i/x) * (j/y)
True - inarray is resampled by(i-1)/(x-1) * (j-1)/(y-1)
This prevents extrapolation one element beyond bounds of input array.
'''
if not a.dtype in [n.float64, n.float32]:
a = n.cast[float](a)
m1 = n.cast[int](minusone)
ofs = n.cast[int](centre) * 0.5
old = n.array( a.shape )
ndims = len( a.shape )
if len( newdims ) != ndims:
print("[congrid] dimensions error. ")# \
#"This routine currently only support " \
#"rebinning to the same number of dimensions."
return None
newdims = n.asarray( newdims, dtype=float )
dimlist = []
if method == 'neighbour':
for i in range( ndims ):
base = n.indices(newdims)[i]
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
cd = n.array( dimlist ).round().astype(int)
newa = a[list( cd )]
return newa
elif method in ['nearest','linear']:
# calculate new dims
for i in range( ndims ):
base = n.arange( newdims[i] )
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
# specify old dims
olddims = [n.arange(i, dtype = n.float) for i in list( a.shape )]
# first interpolation - for ndims = any
mint = scipy.interpolate.interp1d( olddims[-1], a, kind=method, bounds_error=False, fill_value=0.0 )
newa = mint( dimlist[-1] )
trorder = [ndims - 1] + list(range( ndims - 1 ))
for i in range( ndims - 2, -1, -1 ):
newa = newa.transpose( trorder )
mint = scipy.interpolate.interp1d( olddims[i], newa, kind=method , bounds_error=False, fill_value=0.0 )
newa = mint( dimlist[i] )
if ndims > 1:
# need one more transpose to return to original dimensions
newa = newa.transpose( trorder )
return newa
elif method in ['spline']:
oslices = [ slice(0,j) for j in old ]
oldcoords = n.ogrid[oslices]
nslices = [ slice(0,j) for j in list(newdims) ]
newcoords = n.mgrid[nslices]
newcoords_dims = range(n.rank(newcoords))
#make first index last
newcoords_dims.append(newcoords_dims.pop(0))
newcoords_tr = newcoords.transpose(newcoords_dims)
# makes a view that affects newcoords
newcoords_tr += ofs
deltas = (n.asarray(old) - m1) / (newdims - m1)
newcoords_tr *= deltas
newcoords_tr -= ofs
newa = scipy.ndimage.map_coordinates(a, newcoords)
return newa
else:
print("Congrid error: Unrecognized interpolation type.\n")#, \
#"Currently only \'neighbour\', \'nearest\',\'linear\',", \
#"and \'spline\' are supported."
return None
|
davidthaler/Kaggle_Avito-2015
|
val_run0.py
|
Python
|
mit
| 1,259
| 0.01668
|
'''
This script gets log loss on the validation set from full_val_set.pkl,
(generated by the full_validation_set.py script) for some simple,
no-learning models like the HistCTR, all 0's, or mean-value benchmark.
author: David Thaler
date: July 2015
'''
import avito2_io
from datetime import datetime
from eval import logloss
maxlines_val = None
start = datetime.now()
val_ids = avito2_io.get_artifact('full_val_set.pkl')
print 'validation set ids read'
train_etl = {'ad' : lambda l : l['AdID'],
'pos' : lambda l : l['Position'],
'ctr' : lambda l : l['His
|
tCTR']}
search_etl = {'cat' : lambda l
|
: l['CategoryID']}
# validation run
input = avito2_io.rolling_join(True,
train_etl,
search_etl,
do_validation=True,
val_ids=val_ids)
loss = 0.0
for (k, (x, y)) in enumerate(input):
#loss += logloss(float(x['ctr']), y)
loss += logloss(0.006, y)
if k == maxlines_val:
break
if (k + 1) % 250000 == 0:
print 'processed %d lines on validation pass' % (k + 1)
print 'validation set log loss: %.5f' % (loss/(k + 1))
print 'elapsed time: %s' % (datetime.now() - start)
|
rika/precip
|
precip/experiment.py
|
Python
|
apache-2.0
| 62,853
| 0.008146
|
#!/usr/bin/python2.7 -tt
"""
Copyright 2012 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import os
import random
import re
import socket
import subprocess
import time
import uuid
import threading
import paramiko
import boto
#from boto.ec2.connection import EC2Connection
from boto.ec2.regioninfo import RegionInfo
from boto.ec2.blockdevicemapping import EBSBlockDeviceType, BlockDeviceMapping
from boto.exception import EC2ResponseError
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
from azure_resource_manager import AzureResourceManager
__all__ = ["ExperimentException",
"EC2Experiment",
"NimbusExperiment",
"EucalyptusExperiment",
"OpenStackExperiment",
"GCloudExperiment",
"AzureExperiment"]
#logging.basicConfig(level=logging.WARN)
logger = logging.getLogger('precip')
# log to the console
console = logging.StreamHandler()
# default log level - make logger/console match
logger.setLevel(logging.DEBUG)
console.setLevel(logging.DEBUG)
# formatter
formatter = logging.Formatter("%(asctime)s %(levelname)7s: %(message)s", "%Y-%m-%d %H:%M:%S")
console.setFormatter(formatter)
logger.addHandler(console)
# make boto log less
boto_logger = logging.getLogger('boto')
boto_logger.setLevel(level=logging.FATAL)
class SSHConnection:
"""
Helper class for simple ssh functionality such as copying files and running commands.
The only authentication method supported is ssh pub/priv key authentication.
"""
def _new_connection(self, privkey, host, user):
"""
Internal method for setting up a ssh connection. As the instances come up with different
host keys all the time, the host key validation has been disabled.
:return: a handle to the ssh connection
"""
ssh = paramiko.SSHClient()
hkeys = ssh.get_host_keys()
hkeys.clear()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, 22, username=user, key_filename=privkey, allow_agent=False, look_for_keys=False)
transport = ssh.get_transport()
transport.set_keepalive(30)
return ssh
def run(self, privkey, host, user, cmd):
"""
Runs a command on the remote machine.
:return: exit code, stdout and stderr from the command
"""
logger.debug("Running command on host %s as user %s: %s" % (host, user, cmd))
out = ""
err = ""
ssh = self._new_connection(privkey, host, user)
chan = ssh.get_transport().open_session()
chan.get_pty()
stdin = chan.makefile("wb", -1)
stdout = chan.makefile("rb", -1)
stderr = chan.makefile_stderr("rb", -1)
chan.exec_command(cmd)
stdin.flush()
exit_code = chan.recv_exit_status()
for line in stdout:
out += line
for line in stderr:
err += line
ssh.close()
return exit_code, out, err
def put(self, privkey, host, user, local_path, remote_path):
"""
Copies file from the local machine to the remote machine
"""
ssh = self._new_connection(privkey, host, user)
ftp = ssh.open_sftp()
ftp.put(local_path, remote_path)
ftp.close()
ssh.close()
def get(self, privkey, host, user, remote_path, local_path):
"""
Copies file from the remote machine to the local machine
"""
ssh = self._new_connection(privkey, host, user)
ftp = ssh.open_sftp()
ftp.get(remote_path, local_path)
ftp.close()
ssh.close()
class ExperimentException(Exception):
"""
Class for grouping the most common experiment failures
"""
pass
class Instance:
"""
Representation of an instance, and a few common attributes of that instance
"""
id = None
priv_addr = None
pub_addr = None
tags = []
ec2_instance = None
gce_boot_response = None
azure_boot_thread = None
is_fully_instanciated = False
not_instanciated_correctly = False
def __init__(self, instance_id):
"""
:param instance_id: a unique identifier for the instance, for example the amazon instance id
"""
self.id = instance_id
self.tags = []
def add_tag(self, tag):
"""
Tagging is implemented in our own instance as some infrastructures (OpenStack, ...) have not implemented
tagging in their API
"""
self.tags.append(tag)
def has_tags(self, tags):
"""
Checks if the instance have all the tags queried for
"""
try:
for t in tags:
# if the tag does not exists, we fail here with a ValueException
self.tags.index(t)
except ValueError:
return False
return True
def info(self):
i = {}
i["id"] = self.id
i["public_address"] = self.pub_addr
i["private_address"] = self.priv_addr
i["tags"] = self.tags
return i
class Experiment:
"""
Base class for all types of cloud implementations. This is what defines the experiment API.
"""
def __init__(self, name = None):
"""
Constructor for a new experiment - this will set up ~/.precip and ssh keys if they
do not already exist in a way that you can use precip from multiple machines or
accounts at the same time.
"""
if name is None:
self._name = str(int(time.time()))
else:
self._name = name
self._instances = []
self._conf_dir = os.path.join(os.environ["HOME"], ".precip")
# checking/creating conf directory
if not os.path.exists(self._conf_dir):
os.makedirs(self._conf_dir)
uid = self._get_account_id()
# ssh keys setup
self._ssh_pubkey = os.pat
|
h.join(self._conf_dir, "precip_"+uid+".pub")
self._ssh_privkey = os.path.join(self._conf_dir, "precip_"+uid)
if not os.path.exists(self._ssh_privkey):
logger.info("Creating new ssh key in " + self._conf_dir)
logger.info("You don't need to
|
enter a passphrase, just leave it blank and press enter!")
cmd = "ssh-keygen -q -t rsa -f " + self._ssh_privkey + " </dev/null"
p = subprocess.Popen(cmd, shell=True)
stdoutdata, stderrdata = p.communicate()
rc = p.returncode
if rc != 0:
raise ExperimentException("Command '%s' failed with error code %s" % (cmd, rc))
def __del__(self):
"""
Deprovision all instances
"""
self.deprovision([])
def _instance_subset(self, tags):
"""
Returns the subset of instances matching the tags
"""
subset = []
for i in self._instances:
if i.has_tags(tags):
subset.append(i)
return subset
def _is_valid_hostaddr(self, addr):
"""
Checks if a host address is "external". Note that addr can be either an ip address
or a fqdn.
|
jitka/weblate
|
weblate/trans/admin_views.py
|
Python
|
gpl-3.0
| 7,747
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2016 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
import os.path
from django.contrib.sites.models import
|
Site
from django.shortcuts import render
from django.contrib.admin.
|
views.decorators import staff_member_required
from django.contrib import admin
from django.utils.translation import ugettext as _
from django.conf import settings
import django
import six
from weblate.trans.models import SubProject, IndexUpdate
from weblate import settings_example
from weblate import appsettings
from weblate.accounts.avatar import HAS_LIBRAVATAR
from weblate.trans.util import get_configuration_errors, HAS_PYUCA
from weblate.trans.ssh import (
generate_ssh_key, get_key_data, add_host_key,
get_host_keys, can_generate_key
)
import weblate
# List of default domain names on which warn user
DEFAULT_DOMAINS = ('example.net', 'example.com')
def admin_context(request):
"""Wrapper to get admin context"""
# Django has changed number of parameters
# pylint: disable=E1120
if django.VERSION < (1, 8, 0):
return admin.site.each_context()
return admin.site.each_context(request)
@staff_member_required
def report(request):
"""
Provides report about git status of all repos.
"""
context = admin_context(request)
context['subprojects'] = SubProject.objects.all()
return render(
request,
"admin/report.html",
context,
)
def get_first_loader():
"""Returns first loader from settings"""
if settings.TEMPLATES:
loaders = settings.TEMPLATES[0].get(
'OPTIONS', {}
).get(
'loaders', [['']]
)
else:
loaders = settings.TEMPLATE_LOADERS
if isinstance(loaders[0], six.string_types):
return loaders[0]
return loaders[0][0]
@staff_member_required
def performance(request):
"""
Shows performance tuning tips.
"""
checks = []
# Check for debug mode
checks.append((
_('Debug mode'),
not settings.DEBUG,
'production-debug',
settings.DEBUG,
))
# Check for domain configuration
checks.append((
_('Site domain'),
Site.objects.get_current().domain not in DEFAULT_DOMAINS,
'production-site',
Site.objects.get_current().domain,
))
# Check database being used
checks.append((
_('Database backend'),
"sqlite" not in settings.DATABASES['default']['ENGINE'],
'production-database',
settings.DATABASES['default']['ENGINE'],
))
# Check configured admins
checks.append((
_('Site administrator'),
len(settings.ADMINS) > 0 or
'noreply@weblate.org' in [x[1] for x in settings.ADMINS],
'production-admins',
', '.join([x[1] for x in settings.ADMINS]),
))
# Check offloading indexing
checks.append((
# Translators: Indexing is postponed to cron job
_('Indexing offloading'),
appsettings.OFFLOAD_INDEXING,
'production-indexing',
appsettings.OFFLOAD_INDEXING
))
if appsettings.OFFLOAD_INDEXING:
if IndexUpdate.objects.count() < 20:
index_updates = True
elif IndexUpdate.objects.count() < 200:
index_updates = None
else:
index_updates = False
checks.append((
# Translators: Indexing is postponed to cron job
_('Indexing offloading processing'),
index_updates,
'production-indexing',
IndexUpdate.objects.count(),
))
# Check for sane caching
caches = settings.CACHES['default']['BACKEND'].split('.')[-1]
if caches in ['MemcachedCache', 'PyLibMCCache', 'DatabaseCache']:
# We consider these good
caches = True
elif caches in ['DummyCache']:
# This one is definitely bad
caches = False
else:
# These might not be that bad
caches = None
checks.append((
_('Django caching'),
caches,
'production-cache',
settings.CACHES['default']['BACKEND'],
))
# Avatar caching
checks.append((
_('Avatar caching'),
'avatar' in settings.CACHES,
'production-cache-avatar',
settings.CACHES['avatar']['BACKEND']
if 'avatar' in settings.CACHES else '',
))
# Check email setup
default_mails = (
'root@localhost',
'webmaster@localhost',
'noreply@weblate.org'
)
checks.append((
_('Email addresses'),
(
settings.SERVER_EMAIL not in default_mails and
settings.DEFAULT_FROM_EMAIL not in default_mails
),
'production-email',
', '.join((settings.SERVER_EMAIL, settings.DEFAULT_FROM_EMAIL)),
))
# libravatar library
checks.append((
_('Federated avatar support'),
HAS_LIBRAVATAR,
'production-avatar',
HAS_LIBRAVATAR,
))
# pyuca library
checks.append((
_('pyuca library'),
HAS_PYUCA,
'production-pyuca',
HAS_PYUCA,
))
# Cookie signing key
checks.append((
_('Secret key'),
settings.SECRET_KEY != settings_example.SECRET_KEY,
'production-secret',
settings.SECRET_KEY,
))
# Allowed hosts
checks.append((
_('Allowed hosts'),
len(settings.ALLOWED_HOSTS) > 0,
'production-hosts',
', '.join(settings.ALLOWED_HOSTS),
))
loader = get_first_loader()
# Cached template loader
checks.append((
_('Cached template loader'),
'cached.Loader' in loader,
'production-templates',
loader,
))
# Check for serving static files
checks.append((
_('Admin static files'),
os.path.exists(
os.path.join(settings.STATIC_ROOT, 'admin', 'js', 'core.js')
),
'production-admin-files',
settings.STATIC_ROOT,
))
context = admin_context(request)
context['checks'] = checks
context['errors'] = get_configuration_errors()
return render(
request,
"admin/performance.html",
context,
)
@staff_member_required
def ssh(request):
"""
Show information and manipulate with SSH key.
"""
# Check whether we can generate SSH key
can_generate = can_generate_key()
# Grab action type
action = request.POST.get('action', None)
# Generate key if it does not exist yet
if can_generate and action == 'generate':
generate_ssh_key(request)
# Read key data if it exists
key = get_key_data()
# Add host key
if action == 'add-host':
add_host_key(request)
context = admin_context(request)
context['public_key'] = key
context['can_generate'] = can_generate
context['host_keys'] = get_host_keys()
context['ssh_docs'] = weblate.get_doc_url('admin/projects', 'private')
return render(
request,
"admin/ssh.html",
context,
)
|
huertatipografica/huertatipografica-fl-scripts
|
AT_Outlines/AT-RoundCorners.py
|
Python
|
apache-2.0
| 2,230
| 0.045291
|
#FLM: AT ChrisCorner
"""Round selected corners:
RADIUS
|
is a Point instance that represents the x and y radius of the corner
HAND
|
LELENGTH is a number between 0. and 1. that determines how long the
bezier handles should be, affecting the steepness of the curve
"""
import math
def getContourRange(nid,g):
cID = g.FindContour(nid)
cStart = g.GetContourBegin(cID)
cEnd = cStart + g.GetContourLength(cID) - 1
return cStart,cEnd
def getNextNode(nid,g):
cStart,cEnd = getContourRange(nid,g)
if nid == cEnd:
return g[cStart]
else:
return g[nid + 1]
def getPrevNode(nid,g):
cStart,cEnd = getContourRange(nid,g)
if nid == cStart:
return g[cEnd]
else:
return g[nid - 1]
def normalizeVector(p):
m = getMagnitude(p);
if m != 0:
return p*(1/m)
else:
return Point(0,0)
def getMagnitude(p):
return math.sqrt(p.x*p.x + p.y*p.y)
def roundCorner(g,nid,_radius,handleLength = .7):
handleLength = 1 - handleLength
radius = Point()
if isinstance(_radius,int):
radius.x = _radius
radius.y = _radius
else:
radius = _radius
n = g[nid]
p = Point(n.x, n.y)
nn = getNextNode(nid, g)
pn = getPrevNode(nid, g)
nVect = normalizeVector(Point(-p.x + nn.x, -p.y + nn.y))
pVect = normalizeVector(Point(-p.x + pn.x, -p.y + pn.y))
pOffset = Point(round(pVect.x * radius.x), round(pVect.y * radius.y))
nOffset = Point(round(nVect.x * radius.x), round(nVect.y * radius.y))
print pOffset
print nOffset
n.x += int(pOffset.x)
n.y += int(pOffset.y)
g.Insert(Node(nCURVE,Point(p.x,p.y)), nid+1)
n1 = g[nid+1]
n1.x += int(nOffset.x)
n1.y += int(nOffset.y)
n1[1].x = p.x + round(pVect.x * radius.x * handleLength)
n1[1].y = p.y + round(pVect.y * radius.y * handleLength)
n1[2].x = p.x + round(nVect.x * radius.x * handleLength)
n1[2].y = p.y + round(nVect.y * radius.y * handleLength)
# RADIUS allows the x and the y radius to be adjusted separately
RADIUS = Point(30,30)
# HANDLELENGTH is a number between 0. and 1. that determines how long the
# bezier handles should be, affecting the steepness of the curve
HANDLELENGTH = .7
g = fl.glyph
fl.SetUndo()
i = 0
while i < len(g.nodes):
n = g[i]
if n.selected:
roundCorner(g,i,RADIUS,HANDLELENGTH)
i += 1
fl.UpdateGlyph()
|
gregdek/ansible
|
lib/ansible/cli/adhoc.py
|
Python
|
gpl-3.0
| 7,375
| 0.002847
|
# Copyright: (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible import context
from ansible.cli import CLI
from ansible.cli.arguments import optparse_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv
from ansible.playbook import Playbook
from ansible.playbook.play import Play
from ansible.plugins.loader import get_all_plugin_loaders
from ansible.utils.display import Display
display = Display()
class AdHocCLI(CLI):
''' is an extra-simple tool/framework/API for doing 'remote things'.
this command allows you to define and run a single task 'playbook' against a set of hosts
'''
def init_parser(self):
''' create an options parser for bin/ansible '''
super(AdHocCLI, self).init_parser(usage='%prog <host-pattern> [options]',
desc="Define and run a single task 'playbook' against"
" a set of hosts",
epilog="Some modules do not make sense in Ad-Hoc (include,"
" meta, etc)")
opt_help.add_runas_options(self.parser)
opt_help.add_inventory_options(self.parser)
opt_help.add_async_options(self.parser)
opt_help.add_output_options(self.parser)
opt_help.add_connect_options(self.parser)
opt_help.add_check_options(self.parser)
opt_help.add_runtask_options(self.parser)
opt_help.add_vault_options(self.parser)
opt_help.add_fork_options(self.parser)
opt_help.add_module_options(self.parser)
opt_help.add_basedir_options(self.parser)
# options unique to ansible ad-hoc
self.parser.add_option('-a', '--args', dest='module_args',
help="module arguments", default=C.DEFAULT_MODULE_ARGS)
self.parser.add_option('-m', '--module-name', dest='module_name',
help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
default=C.DEFAULT_MODULE_NAME)
def post_process_args(self, options, args):
'''Post process and validate options for bin/ansible '''
options, args = super(AdHocCLI, self).post_process_args(options, args)
if len(args) < 1:
raise AnsibleOptionsError("Missing target hosts")
elif len(args) > 1:
raise AnsibleOptionsError("Extraneous options or arguments")
display.verbosity = options.verbosity
self.validate_conflicts(options, runas_opts=True, vault_opts=True, fork_opts=True)
options = self.normalize_become_options(options)
return options, args
def _play_ds(self, pattern, async_val, poll):
check_raw = context.CLIARGS['module_name'] in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw')
mytask = {'action': {'module': context.CLIARGS['module_name'], 'args': parse_kv(context.CLIARGS['module_args'], check_raw=check_raw)}}
# avoid adding to tasks that don't support it, unless set, then give user an error
if context.CLIARGS['module_name'] not in ('include_role', 'include_tasks') or any(frozenset((async_val, poll))):
mytask['async_val'] = async_val
mytask['poll'] = poll
return dict(
name="Ansible Ad-Hoc",
hosts=pattern,
gather_facts='no',
tasks=[mytask])
def run(self):
''' create and execute the single task playbook '''
super(AdHocCLI, self).run()
# only thing left should be host pattern
pattern = to_text(context.CLIARGS['args'][0], errors='surrogate_or_strict')
sshpass = None
becomepass = None
(sshpass, becomepass) = self.ask_passwords()
passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
# dynamically load any plugins
get_all_plugin_loaders()
loader, inventory, variable_manager = self._play_prereqs()
try:
hosts = self.get_host_list(inventory, context.CLIARGS['subset'], pattern)
except AnsibleError:
if context.CLIARGS['subset']:
raise
else:
hosts = []
display.warning("No hosts matched, nothing to do")
if context.CLIARGS['listhosts']:
display.display(' hosts (%d):' % len(hosts))
for host in hosts:
display.display(' %s' % host)
return 0
if context.CLIARGS['module_name'] in C.MODULE_REQUIRE_ARGS and not con
|
text.CLIARGS['module_args']:
err = "No argument passed to %s module" % context.CLIARGS['module_name']
if pattern.endswith(".yml"):
err = err + ' (did you mean to run ansible-playbook?)'
raise AnsibleOptionsError(err)
# Avoid modules that don't work with ad-hoc
if context.CLIARGS['module_name'] in ('import_playbook',):
raise AnsibleOptionsError("'%s' is not a valid action for
|
ad-hoc commands"
% context.CLIARGS['module_name'])
play_ds = self._play_ds(pattern, context.CLIARGS['seconds'], context.CLIARGS['poll_interval'])
play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)
# used in start callback
playbook = Playbook(loader)
playbook._entries.append(play)
playbook._file_name = '__adhoc_playbook__'
if self.callback:
cb = self.callback
elif context.CLIARGS['one_line']:
cb = 'oneline'
# Respect custom 'stdout_callback' only with enabled 'bin_ansible_callbacks'
elif C.DEFAULT_LOAD_CALLBACK_PLUGINS and C.DEFAULT_STDOUT_CALLBACK != 'default':
cb = C.DEFAULT_STDOUT_CALLBACK
else:
cb = 'minimal'
run_tree = False
if context.CLIARGS['tree']:
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
C.TREE_DIR = context.CLIARGS['tree']
run_tree = True
# now create a task queue manager to execute the play
self._tqm = None
try:
self._tqm = TaskQueueManager(
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
passwords=passwords,
stdout_callback=cb,
run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
run_tree=run_tree,
forks=context.CLIARGS['forks'],
)
self._tqm.send_callback('v2_playbook_on_start', playbook)
result = self._tqm.run(play)
self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)
finally:
if self._tqm:
self._tqm.cleanup()
if loader:
loader.cleanup_all_tmp_files()
return result
|
rendermotion/RMPY
|
Tools/QT4/ui/FormRigDisplay.py
|
Python
|
lgpl-3.0
| 1,331
| 0.003005
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UI_RigDisplay.ui'
#
# Created: Wed Mar 21 21:43:33 2018
# by: pyside-uic 0.2.14 running on PySide 1.2.0
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(236, 179)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.ChangeJointdrawStyle = QtGui.QPushButton(Form)
self.ChangeJointdrawStyle.setMaximumSize(QtCore.QSize(100, 30))
self.ChangeJointdrawStyle.setObjectName("ChangeJointdrawStyle")
self.horizontalLayout.addWidget(self.ChangeJoint
|
drawStyle)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.ChangeJointdrawStyle.setText(QtGui.QApplication.translate("Form", "Joi
|
nt DrawStyle", None, QtGui.QApplication.UnicodeUTF8))
|
InsulaCoworking/MusicCity
|
bands/migrations/0049_band_hidden_in_catalog.py
|
Python
|
gpl-2.0
| 567
| 0.001764
|
# Generated by Django 2.2.13 on 2021-09-28 11:02
from django.db import mi
|
grations, models
class Migration(migrations.Migration):
dependencies = [
('bands', '0048_band_profile_thumb'),
]
operations = [
migrations.AddField(
model_name='band',
name='
|
hidden_in_catalog',
field=models.BooleanField(default=False, help_text='Ocultar el perfil del listado, para bandas que no son de Alcala pero se crea su perfil para ciclos y festivales', verbose_name='Oculto en el listado principal'),
),
]
|
murven/malmo
|
Malmo/samples/Python_examples/tutorial_5_solved.py
|
Python
|
mit
| 7,626
| 0.011671
|
# ------------------------------------------------------------------------------------------------
# Copyright (c) 2016 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ---------------
|
---------------------------------------------------------------------------------
# Tutorial sample #5: Observations
import MalmoPython
import os
import sys
import time
import json
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) # flush print output immediately
def Menger(xorg, yorg, zorg, size, blocktype, vari
|
ant, holetype):
#draw solid chunk
genstring = GenCuboidWithVariant(xorg,yorg,zorg,xorg+size-1,yorg+size-1,zorg+size-1,blocktype,variant) + "\n"
#now remove holes
unit = size
while (unit >= 3):
w=unit/3
for i in xrange(0, size, unit):
for j in xrange(0, size, unit):
x=xorg+i
y=yorg+j
genstring += GenCuboid(x+w,y+w,zorg,(x+2*w)-1,(y+2*w)-1,zorg+size-1,holetype) + "\n"
y=yorg+i
z=zorg+j
genstring += GenCuboid(xorg,y+w,z+w,xorg+size-1, (y+2*w)-1,(z+2*w)-1,holetype) + "\n"
genstring += GenCuboid(x+w,yorg,z+w,(x+2*w)-1,yorg+size-1,(z+2*w)-1,holetype) + "\n"
unit/=3
return genstring
def GenCuboid(x1, y1, z1, x2, y2, z2, blocktype):
return '<DrawCuboid x1="' + str(x1) + '" y1="' + str(y1) + '" z1="' + str(z1) + '" x2="' + str(x2) + '" y2="' + str(y2) + '" z2="' + str(z2) + '" type="' + blocktype + '"/>'
def GenCuboidWithVariant(x1, y1, z1, x2, y2, z2, blocktype, variant):
return '<DrawCuboid x1="' + str(x1) + '" y1="' + str(y1) + '" z1="' + str(z1) + '" x2="' + str(x2) + '" y2="' + str(y2) + '" z2="' + str(z2) + '" type="' + blocktype + '" variant="' + variant + '"/>'
missionXML='''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<Mission xmlns="http://ProjectMalmo.microsoft.com" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<About>
<Summary>Hello world!</Summary>
</About>
<ServerSection>
<ServerInitialConditions>
<Time>
<StartTime>1000</StartTime>
<AllowPassageOfTime>false</AllowPassageOfTime>
</Time>
<Weather>clear</Weather>
</ServerInitialConditions>
<ServerHandlers>
<FlatWorldGenerator generatorString="3;7,44*49,73,35:1,159:4,95:13,35:13,159:11,95:10,159:14,159:6,35:6,95:6;12;"/>
<DrawingDecorator>
<DrawSphere x="-27" y="70" z="0" radius="30" type="air"/>''' + Menger(-40, 40, -13, 27, "stone", "smooth_granite", "air") + '''
<DrawCuboid x1="-25" y1="39" z1="-2" x2="-29" y2="39" z2="2" type="lava"/>
<DrawCuboid x1="-26" y1="39" z1="-1" x2="-28" y2="39" z2="1" type="obsidian"/>
<DrawBlock x="-27" y="39" z="0" type="diamond_block"/>
</DrawingDecorator>
<ServerQuitFromTimeUp timeLimitMs="30000"/>
<ServerQuitWhenAnyAgentFinishes/>
</ServerHandlers>
</ServerSection>
<AgentSection mode="Survival">
<Name>MalmoTutorialBot</Name>
<AgentStart>
<Placement x="0.5" y="56.0" z="0.5" yaw="90"/>
<Inventory>
<InventoryItem slot="8" type="diamond_pickaxe"/>
</Inventory>
</AgentStart>
<AgentHandlers>
<ObservationFromFullStats/>
<ObservationFromGrid>
<Grid name="floor3x3">
<min x="-1" y="-1" z="-1"/>
<max x="1" y="-1" z="1"/>
</Grid>
</ObservationFromGrid>
<ContinuousMovementCommands turnSpeedDegs="180"/>
<InventoryCommands/>
<AgentQuitFromTouchingBlockType>
<Block type="diamond_block" />
</AgentQuitFromTouchingBlockType>
</AgentHandlers>
</AgentSection>
</Mission>'''
# Create default Malmo objects:
agent_host = MalmoPython.AgentHost()
try:
agent_host.parse( sys.argv )
except RuntimeError as e:
print 'ERROR:',e
print agent_host.getUsage()
exit(1)
if agent_host.receivedArgument("help"):
print agent_host.getUsage()
exit(0)
my_mission = MalmoPython.MissionSpec(missionXML, True)
my_mission_record = MalmoPython.MissionRecordSpec()
# Attempt to start a mission:
max_retries = 3
for retry in range(max_retries):
try:
agent_host.startMission( my_mission, my_mission_record )
break
except RuntimeError as e:
if retry == max_retries - 1:
print "Error starting mission:",e
exit(1)
else:
time.sleep(2)
# Loop until mission starts:
print "Waiting for the mission to start ",
world_state = agent_host.getWorldState()
while not world_state.is_mission_running:
sys.stdout.write(".")
time.sleep(0.1)
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
print
print "Mission running ",
agent_host.sendCommand("hotbar.9 1") #Press the hotbar key
agent_host.sendCommand("hotbar.9 0") #Release hotbar key - agent should now be holding diamond_pickaxe
agent_host.sendCommand("pitch 0.2") #Start looking downward slowly
time.sleep(1) #Wait a second until we are looking in roughly the right direction
agent_host.sendCommand("pitch 0") #Stop tilting the camera
agent_host.sendCommand("move 1") #And start running...
agent_host.sendCommand("attack 1") #Whilst flailing our pickaxe!
jumping = False
# Loop until mission ends:
while world_state.is_mission_running:
sys.stdout.write(".")
time.sleep(0.1)
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
if world_state.number_of_observations_since_last_state > 0:
msg = world_state.observations[-1].text
observations = json.loads(msg)
grid = observations.get(u'floor3x3', 0)
if jumping and grid[4]!=u'lava':
agent_host.sendCommand("jump 0")
jumping = False
if grid[3]==u'lava':
agent_host.sendCommand("jump 1")
jumping = True
print
print "Mission ended"
# Mission has ended.
|
ArneBab/video-splitter
|
ffmpeg-split.py
|
Python
|
apache-2.0
| 2,871
| 0.019157
|
#!/usr/bin/env python
import subprocess
import re
import math
from optparse import OptionParser
length_regexp = 'Duration: (\d{2}):(\d{2}):(\d{2})\.\d+,'
re_length = re.compile(length_regexp)
def main():
(filename, split_length) = parse_options()
if split_length <= 0:
print "Split length can't be 0"
raise SystemExit
output = subprocess.Popen("ffmpeg -i '"+filename+"' 2>&1 | grep 'Duration'",
|
shell = True,
stdout = subprocess.PIPE
).stdout.read()
print output
matches = re_length.search(output)
if matches:
video_length = int(matches.group(1)) * 3600 + \
int(matches.group(2)) * 60 + \
int(matches.group(3))
print "Video length in seconds: "+str(video_length)
else:
print "Can't determine vid
|
eo length."
raise SystemExit
split_count = int(math.ceil(video_length/float(split_length)))
if(split_count == 1):
print "Video length is less then the target split length."
raise SystemExit
split_cmd = "ffmpeg -i '"+filename+"' -vcodec copy "
try:
filebase = ".".join(filename.split(".")[:-1])
fileext = filename.split(".")[-1]
except IndexError as e:
raise IndexError("No . in filename. Error: " + str(e))
for n in range(0, split_count):
split_str = ""
if n == 0:
split_start = 0
else:
split_start = split_length * n
split_str += " -ss "+str(split_start)+" -t "+str(split_length) + \
" '"+filebase + "-" + str(n) + "." + fileext + \
"'"
print "About to run: "+split_cmd+split_str
output = subprocess.Popen(split_cmd+split_str, shell = True, stdout =
subprocess.PIPE).stdout.read()
def parse_options():
parser = OptionParser()
parser.add_option("-f", "--file",
dest = "filename",
help = "file to split, for example sample.avi",
type = "string",
action = "store"
)
parser.add_option("-s", "--split-size",
dest = "split_size",
help = "split or chunk size in seconds, for example 10",
type = "int",
action = "store"
)
(options, args) = parser.parse_args()
if options.filename and options.split_size:
return (options.filename, options.split_size)
else:
parser.print_help()
raise SystemExit
if __name__ == '__main__':
try:
main()
except Exception, e:
print "Exception occured running main():"
print str(e)
|
persandstrom/home-assistant
|
homeassistant/components/huawei_lte.py
|
Python
|
apache-2.0
| 3,730
| 0
|
"""
Support for Huawei LTE routers.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/huawei_lte/
"""
from datetime import timedelta
from functools import reduce
import logging
import operator
import voluptuous as vol
import attr
from homeassistant.const import (
CONF_URL, CONF_USERNAME, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['huawei-lte-api==1.0.12']
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=
|
10)
DOMAIN = 'huawei_lte'
DATA_KEY = 'huawei_lte'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.All(cv.ensure_list, [vol.Schema({
vol.Required(CONF_URL): cv.url,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
})])
}, extra=vol.ALLOW_EXTRA)
@attr.s
class RouterData:
"""Class for router state."""
client = attr.ib()
device_information = attr.ib(init=False, factory=dict)
device_signal = attr.ib(init=Fal
|
se, factory=dict)
traffic_statistics = attr.ib(init=False, factory=dict)
wlan_host_list = attr.ib(init=False, factory=dict)
def __getitem__(self, path: str):
"""
Get value corresponding to a dotted path.
The first path component designates a member of this class
such as device_information, device_signal etc, and the remaining
path points to a value in the member's data structure.
"""
root, *rest = path.split(".")
try:
data = getattr(self, root)
except AttributeError as err:
raise KeyError from err
return reduce(operator.getitem, rest, data)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self) -> None:
"""Call API to update data."""
self.device_information = self.client.device.information()
_LOGGER.debug("device_information=%s", self.device_information)
self.device_signal = self.client.device.signal()
_LOGGER.debug("device_signal=%s", self.device_signal)
self.traffic_statistics = self.client.monitoring.traffic_statistics()
_LOGGER.debug("traffic_statistics=%s", self.traffic_statistics)
self.wlan_host_list = self.client.wlan.host_list()
_LOGGER.debug("wlan_host_list=%s", self.wlan_host_list)
@attr.s
class HuaweiLteData:
"""Shared state."""
data = attr.ib(init=False, factory=dict)
def get_data(self, config):
"""Get the requested or the only data value."""
if CONF_URL in config:
return self.data.get(config[CONF_URL])
if len(self.data) == 1:
return next(iter(self.data.values()))
return None
def setup(hass, config) -> bool:
"""Set up Huawei LTE component."""
if DATA_KEY not in hass.data:
hass.data[DATA_KEY] = HuaweiLteData()
for conf in config.get(DOMAIN, []):
_setup_lte(hass, conf)
return True
def _setup_lte(hass, lte_config) -> None:
"""Set up Huawei LTE router."""
from huawei_lte_api.AuthorizedConnection import AuthorizedConnection
from huawei_lte_api.Client import Client
url = lte_config[CONF_URL]
username = lte_config[CONF_USERNAME]
password = lte_config[CONF_PASSWORD]
connection = AuthorizedConnection(
url,
username=username,
password=password,
)
client = Client(connection)
data = RouterData(client)
data.update()
hass.data[DATA_KEY].data[url] = data
def cleanup(event):
"""Clean up resources."""
client.user.logout()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup)
|
PyCQA/pylint
|
tests/functional/u/use/use_implicit_booleaness_not_comparison.py
|
Python
|
gpl-2.0
| 5,576
| 0.009146
|
# pylint: disable=missing-docstring, missing-module-docstring, invalid-name
# pylint: disable=too-few-public-methods, line-too-long, dangerous-default-value
# pylint: disable=wrong-import-order
# https://github.com/PyCQA/pylint/issues/4774
def github_issue_4774():
# Test literals
# https://github.com/PyCQA/pylint/issues/4774
good_list = []
if not good_list:
pass
bad_list = []
if bad_list == []: # [use-implicit-booleaness-not-comparison]
pass
# Testing for empty literals
empty_tuple = ()
empty_list = []
empty_dict = {}
if empty_tuple == (): # [use-implicit-booleaness-not-comparison]
pass
if empty_list == []: # [use-implicit-booleaness-not-comparison]
pass
if empty_dict == {}: # [use-implicit-booleaness-not-comparison]
pass
if () == empty_tuple: # [use-implicit-booleaness-not-comparison]
pass
if [] == empty_list: # [use-implicit-booleaness-not-comparison]
pass
if {} == empty_dict: # [use-implicit-booleaness-not-comparison]
pass
def bad_tuple_return():
t = (1, )
return t == () # [use-implicit-booleaness-not-comparison]
def bad_list_return():
b = [1]
return b == [] # [use-implicit-booleaness-not-comparison]
def bad_dict_return():
c = {1: 1}
return c == {} # [use-implicit-booleaness-not-comparison]
assert () == empty_tuple # [use-implicit-booleaness-not-comparison]
assert [] == empty_list # [use-implicit-booleaness-not-comparison]
assert {} != empty_dict # [use-implicit-booleaness-not-comparison]
assert () < empty_tuple # [use-implicit-booleaness-not-comparison]
assert [] <= empty_list # [use-implicit-booleaness-not-comparison]
assert () > empty_tuple # [use-implicit-booleaness-not-comparison]
assert [] >= empty_list # [use-implicit-booleaness-not-comparison]
assert [] == []
assert {} != {}
assert () == ()
d = {}
if d in {}:
pass
class NoBool:
def __init__(self):
self.a = 2
class YesBool:
def __init__(self):
self.a = True
def __bool__(self):
return self.a
# Should be triggered
a = NoBool()
if [] == a: # [use-implicit-booleaness-not-comparison]
pass
a = YesBool()
if a == []:
pass
# compound test cases
e = []
f = {}
if e == [] and f == {}: # [use-implicit-booleaness-not-comparison, use-implicit-booleaness-not-comparison]
pass
named_fields = [0, "", "42", "forty two"]
empty = any(field == "" for field in named_fields)
something_else = NoBool()
empty_literals = [[], {}, ()]
is_empty = any(field == something_else for field in empty_literals)
h, i, j = 1, None, [1,2,3]
def test(k):
print(k == {})
def test_with_default(k={}):
print(k == {})
print(k == 1)
test(h)
test(i)
test(j)
test_with_default(h)
test_with_default(i)
test_with_default(j)
class A:
lst = []
@staticmethod
def test(b=1):
print(b)
return []
if A.lst == []: # [use-implicit-booleaness-not-comparison]
pass
if [] == A.lst: # [use-implicit-booleaness-not-comparison]
pass
if A.test("b") == []: # [use-implicit-booleaness-not-comparison]
pass
def test_function():
return []
if test_function() == []: # [use-implicit-booleaness-not-comparison]
pass
# pylint: disable=import-outside-toplevel, wrong-import-position, import-error
# Numpy has its own implementation of __bool__, but base class has list, that's why the comparison check is happening
import numpy
numpy_array = numpy.array([0])
if numpy_array == []: # [use-implicit-booleaness-not-comparison]
print('numpy_array')
if numpy_array != []: # [use-implicit-booleaness-not-comparison]
print('numpy_array')
if numpy_array >= (): # [use-implicit-booleaness-not-comparison]
print('b')
# pandas has its own implementations of __bool__ and is not subclass of list, dict, or tuple; that's why comparison check is not happening
import pandas as pd
pandas_df = pd.DataFrame()
if pandas_df == []:
pass
if pandas_df != ():
pass
if pandas_df <= []:
print("don't emit warning if variable can't safely be inferred")
from typing import Union
from random import random
var: Union[dict, bool, None] = {}
if random() > 0.5:
var = True
if var == {}:
pass
data = {}
if data == {}: # [use-implicit-booleaness-not-comparison]
print("This will be printed")
if data != {}: # [use-implicit-booleaness-not-comparison]
print("This will also be printed")
if data or not data:
print("This however won't be")
# literal string check
long_test = {}
if long_test == { }: # [use-implicit-booleaness-not-comparison]
pass
# Check for properties and uninferable class methods
# See https://github.com/PyCQA/pylint/issues/5646
from xyz import AnotherClassWithProperty
class ParentWithProperty:
@classmethod
@property
def parent_function(cls):
return {}
class MyClassWithProxy(ParentWithProperty):
attribute = True
@property
@classmethod
def my_property(cls):
return {}
@property
@classmethod
def my_difficult_property(cls):
if cls.attribute:
return {}
return MyClassWithProxy()
def test_func():
"""Some assertions against empty dicts."""
my_clas
|
s = MyClassWithProxy()
assert my_class.parent_function == {} # [use-implicit
|
-booleaness-not-comparison]
assert my_class.my_property == {} # [use-implicit-booleaness-not-comparison]
# If the return value is not always implicit boolean, don't raise
assert my_class.my_difficult_property == {}
# Uninferable does not raise
assert AnotherClassWithProperty().my_property == {}
|
mamchecker/mamchecker
|
mamchecker/conf.py
|
Python
|
gpl-3.0
| 1,452
| 0.004132
|
# -*- coding: utf-8 -*-
'''
Sphinx setting.
'''
import os.path
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
extensions = [
'mamchecker.inl',
'sphinx.ext.mathjax',
'sphinxcontrib.tikz',
'sphinxcontrib.texfigure']
# i.e. same as conf.py and with page.html containing only {{body}}
templates_path = ['.']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
source_encoding = 'utf-8'
default_role = 'math'
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
tikz_proc_suite = 'ImageMagick'
tikz_tikzlibraries = 'arrows,snakes,backgrounds,patterns,matrix,shapes,fi
|
t,calc,shadows,plotmarks'
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
'preamble': '''\\usepackage{amsfonts}\\usepackage{amssymb}\\usepackage{amsmath}\\usepackage{siunitx}\\u
|
sepackage{tikz}'''
+ '''
\\usetikzlibrary{''' + tikz_tikzlibraries + '''}'''
}
# latex
# sphinx-build[2] -b latex -c . -D master_doc=<rst-file> -D project=<rst-file> <src-dir> <build-dir>
# sphinx-build2 -b latex -c . -D master_doc=vector -D project=vector r/b _build
# html
# sphinx-build[2] -b html -c . -D master_doc=<rst-file> -D project=<rst-file> <src-dir> <build-dir>
# sphinx-build2 -c . -D master_doc=vector -D project=vector r/b _build
|
kennyledet/Algorithm-Implementations
|
10_Harshad_Number/Python/wasi0013/HarshadNumber.py
|
Python
|
mit
| 856
| 0.026869
|
"""
Harshad Number implementation
See: http://en.wikipedia.org/wiki/Harshad_number
"""
def is_harshad(n):
result=0
while n:
result+=n%10
n//=10
return n%result == 0 # Return if the remainder of n/result is 0 else return False
de
|
f main():
# test contains a set of harshad numbers
test=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
12, 18, 20, 21, 24, 27, 30, 36,
40, 42, 45, 48, 50, 54, 60, 63,
70, 72, 80, 81, 84, 90, 100, 102,
108, 110, 111, 112, 114, 117, 120,
126, 132, 133, 135, 140, 144, 150,
152,153, 156, 162, 171, 180, 190,
192, 195, 198, 200, 20
|
1]
flag=True
for i in test:
if not is_harshad(i):
flag=False
break
print("The test was", "Successful"if flag else "Unsuccessful!");
if __name__ == '__main__':
main()
|
cmjatai/cmj
|
cmj/core/migrations/0013_auto_20180516_1559.py
|
Python
|
gpl-3.0
| 1,070
| 0.000939
|
# -*- coding: utf-8 -*-
# Generated by D
|
jango 1.11.13 on 2018-05-16 18:59
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0012_auto_201
|
80227_0858'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'ordering': ('first_name', 'last_name'), 'permissions': (('menu_dados_auxiliares', 'Mostrar Menu Dados Auxiliares'), ('menu_tabelas_auxiliares', 'Mostrar Menu de Tabelas Auxiliares'), ('menu_contatos', 'Mostrar Menu de Cadastro de Contatos'), ('menu_grupocontatos', 'Mostrar Menu de Cadastro de Grupos de Contatos'), ('menu_processos', 'Mostrar Menu de Cadastro de Processos'), ('menu_area_trabalho', 'Mostrar Menu de Áreas de Trabalho'), ('menu_impresso_enderecamento', 'Mostrar Menu de Impressos de Endereçamento'), ('menu_relatorios', 'Mostrar Menu de Relatórios'), ('menu_administracao', 'Mostrar Menu de Administração'), ('menu_agenda', 'Mostrar Menu da Agenda de Eventos'))},
),
]
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/legends_of_runeterra/MatchApi.py
|
Python
|
mit
| 1,097
| 0.000912
|
from .. import BaseApi, NamedEndpoint
from .urls import MatchApiUrls
class MatchApi(NamedEndpoint):
"""
This class wraps the LoR-Match-V1 Api calls provided by the Riot API.
See https://developer.riotgames.com/apis#lor-match-v1 for more detailed
information
"""
def __init__(self, base_api: BaseApi):
|
"""
Initialize a new MatchApi which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(
|
).__init__(base_api, self.__class__.__name__)
def by_puuid(self, region: str, puuid: str):
"""
Get a list of match ids by PUUID.
:returns: List[string]
"""
return self._request_endpoint(
self.by_puuid.__name__, region, MatchApiUrls.by_puuid, puuid=puuid
)
def by_id(self, region: str, match_id: str):
"""
Get match by id.
:returns: MatchDto
"""
return self._request_endpoint(
self.by_id.__name__, region, MatchApiUrls.by_id, match_id=match_id
)
|
jnadro/pybgfx
|
pybgfx/__init__.py
|
Python
|
bsd-2-clause
| 96
| 0
|
from .bgfx import *
from .
|
bgfx_ex import *
from .bgfx_utils impor
|
t *
from .bgfxdefines import *
|
mattjml/wood_cylinder_cut
|
cut.py
|
Python
|
apache-2.0
| 5,717
| 0.009271
|
import numpy as np
from math import pi, tan, cos, sin, sqrt
import sys
import argparse
render = True
try:
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
except:
render = False
parser = argparse.ArgumentParser(description=\
"Calculates cutting path around cylinder for certain angled cut.")
parser.add_argument("-r", "--radius", type=int, default=100,
help="Radius of the cylinder in mm")
parser.add_argument("-a", "--angle", type=int, default=45,
help="Angle of the cut in degrees from the cylinder axis")
parser.add_argument("-i", "--interval", type=float, default=.5,
help="Cylinder intersection interval in proportion of circumference (0.0-1.0)")
parser.add_argument('--display', dest='display', action='store_true',
help="Render cut")
parser.add_argument('--no-display', dest='display', action='store_false',
help="Do not render cut")
parser.set_defaults(display=True)
parser.add_argument("-f", "--file", type=str, default='cut.csv',
help="CSV file to write into cut mark positions (around cylinder and along)")
args = parser.parse_args()
radius = args.radius
assert radius > 15, "Radius must be positive and in mm."
angle = args.angle
assert 90 >= angle > 0, "Angle must be between 0 and 90 degrees."
angle = (angle * pi) / 180
interval = args.interval
assert 0.25 >= interval >= 0.005, "Interval must be <= 0.25 and >= 0.005"
render = render and args.display
filename = args.file
assert len(filename) > 0, "Filename must be at least one character long"
circumference = (int)(radius * 2 * pi)
interval = circumference * interval
cyl_length = 2 * radius / tan(angle)
cut_length = 2 * radius / sin(angle)
print("Calculating {0} degree cut of {1}mm radius cylinder. "
"Approximating at {2} mm arc intervals".format(args.angle, radius, interval))
def rotation_matrix(axis,theta):
'''Create a rotation ma
|
trix for a theta radian
rotation around the axis given.'''
axis = axis/sqrt(np.dot(axis,axis))
a = cos(theta/2)
b,c,d = -axis*sin(theta/2)
|
return np.array([[a*a+b*b-c*c-d*d, 2*(b*c-a*d), 2*(b*d+a*c)],
[2*(b*c+a*d), a*a+c*c-b*b-d*d, 2*(c*d-a*b)],
[2*(b*d-a*c), 2*(c*d+a*b), a*a+d*d-b*b-c*c]])
def vertical_plane_normal(p1,p2):
'''Compute a normal to the cutting plane'''
p3 = p1 + [0,0,1]
return np.cross(p2-p1,p3-p1)
# Approximate cylinder with parallel lines
lines_cylinder = []
for i in range(0,int(circumference/interval)):
''' Builds a cylinder intersection approximated as a set of parallel lines
each separated by an arc of length 'interval' '''
theta = (2 * pi) * (i / (circumference/interval))
rotmat = rotation_matrix(np.array([0, 1, 0]), -theta)
lines_cylinder.append(np.dot(rotmat, np.array([0, -cyl_length/2, radius])))
lines_cylinder.append(np.dot(rotmat, np.array([0, cyl_length/2, radius])))
# Create cutting plane (a line will do for now)
rotmat = rotation_matrix(np.array([0,0,1]),angle)
cutting_line_st = np.dot(rotmat, np.array([0, -cut_length/2, 0]))
cutting_line_end = np.dot(rotmat, np.array([0, cut_length/2, 0]))
# Calculate cutting plane/cylinder intersection points.
# Only computes the first 180 degrees as the other 180
# is just a mirror of it.
ixs = []
for i in range(0, len(lines_cylinder), 2):
N = np.array(vertical_plane_normal(lines_cylinder[i], lines_cylinder[i+1]))
ix = cutting_line_st + (np.dot(N, lines_cylinder[i] - cutting_line_st) /
np.dot(N, cutting_line_end - cutting_line_st)) * (cutting_line_end - cutting_line_st)
ix = [lines_cylinder[i][0], ix[1], lines_cylinder[i][2]];
ixs.append(ix)
# Flatten cylinder intersections to give cuts on a 2D plane.
# These can be applied to the real cylinder by wrapping
# this 2D plane around the cylinder. The best way to do this
# is either by printing (to correct scale) the markers and
# wrapping the 2D paper around the cylinder or drawing these
# marks on graph paper and wrapping this around the cylinder.
ixs_flat = []
for i in range(int(len(ixs)/2)):
point = [i * interval, ixs[i][1]]
ixs_flat.append(point)
for i in range(int(len(ixs)/2)):
point = [circumference/2 + (i * interval), - ixs[i][1]]
ixs_flat.append(point)
f4 = np.poly1d(np.polyfit([ix[0] for ix in ixs_flat] , [ix[1] for ix in ixs_flat], 8))
xp = np.linspace(0, circumference, 100)
if render:
# Render 3D cut
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
plt.axis('equal')
for i in range(0, len(lines_cylinder), 2):
l_st = lines_cylinder[i]
l_en = lines_cylinder[i+1]
ax.plot([l_st[0],l_en[0]], [l_st[1],l_en[1]],zs=[l_st[2],l_en[2]])
ax.plot([cutting_line_st[0], cutting_line_end[0]], [cutting_line_st[1], cutting_line_end[1]], [cutting_line_st[2],cutting_line_end[2]])
ax.scatter([ix[0] for ix in ixs], [ix[1] for ix in ixs], zs=[ix[2] for ix in ixs])
ax.set_ylabel('Cylinder Axis (mm)')
ax.set_xlabel('mm')
ax.set_zlabel('mm')
plt.show()
# Render cut marker positions
fig = plt.plot([ix[0] for ix in ixs_flat], [ix[1] for ix in ixs_flat], '.', xp, f4(xp), '-')
plt.ylim(min([ix[1] for ix in ixs_flat]), max([ix[1] for ix in ixs_flat]))
plt.xlabel('Around the Cylinder (mm)')
plt.ylabel('Along the Cylinder (mm)')
plt.title('Unwrapped cylinder cut marker positions (printed and wrapped around cylinder).')
plt.axis('equal')
plt.show()
# Write cut markers to file
print("Writing cut marker positions to {}".format(filename))
file = open(filename, 'w')
file.write("arc pos (mm), length pos (mm)")
for ix in ixs_flat:
file.write("{0[0]:.3f}, {0[1]:.3f}\n".format(ix))
file.close()
print("Finished writing to file")
|
KelSolaar/Foundations
|
foundations/globals/constants.py
|
Python
|
gpl-3.0
| 3,184
| 0.000942
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
**constants.py**
**Platform:**
Windows, Linux, Mac Os X.
**Description:**
Defines **Foundations** package default constants through the :class:`Constants` class.
**Others:**
"""
from __future__ import unicode_literals
import os
import platform
import foundations
__author__ = "Thomas Mansencal"
__copyright__ = "Copyright (C) 2008 - 2014 - Thomas Mansencal"
__license__ = "GPL V3.0 - http://www.gnu.org/licenses/"
__maintainer__ = "Thomas Mansencal"
__email__ = "thomas.mansencal@gmail.com"
__status__ = "Production"
__all__ = ["Constants"]
class Constants():
"""
Defines **Foundations** package default constants.
"""
application_name = "Foundations"
"""
:param application_name: Package Application name.
:type application_name: unicode
"""
major_version = "2"
"""
:param major_version: Package major version.
:type major_version: unicode
"""
minor_version = "1"
"""
:param minor_version: Package minor version.
:type minor_version: unicode
"""
change_version = "0"
"""
:param change_version: Package change version.
:type change_version: unicode
"""
version = ".".join((major_version, minor_version, change_version))
"""
:param version: Package version.
:type version: unicode
"""
logger = "Foundations_Logger"
"""
:param logger: Package logger name.
:type logger: unicode
"""
verbosity_level = 3
"""
:param verbosity_level: Default logging verbosity level.
:type verbosity_level: int
"""
verbosity_labels = ("Critical", "Error", "Warning", "Info", "Debug")
"""
:param verbosity_labels: Logging verbosity labels.
:type verbosity_labels: tuple
"""
logging_default_formatter = "Default"
"""
:param logging_default_formatter: Default logging formatter name.
:type logging_default_formatter: unicode
"""
logging_separators = "*" * 96
"""
:param logging_separators: Logging separators.
:type logging_separators: unicode
"""
default_codec = "utf-8"
"""
:param default_codec: Default codec.
:type default_codec: unicode
"""
codec_error = "ignore"
"""
:param codec_error: Default codec error behavior.
:type codec_error: unicode
"""
application_directory = os.sep.join(("Foundations", ".".join((major_version, minor_version))))
"""
:param application_directory: Package Application directory.
:type application_direc
|
tory: unicode
"""
if platform.system() == "Windows" or platform.system() == "Microsoft" or platform.system() == "Darwin":
provider_directory = "HDRLabs"
"""
:param provider_directory: Package provider directory.
:type provider_directory: unicode
"""
elif platform.system() == "Linux":
provider_directory = ".HDRLabs"
"""
:param provider_directory: Package provide
|
r directory.
:type provider_directory: unicode
"""
null_object = "None"
"""
:param null_object: Default null object string.
:type null_object: unicode
"""
|
jpwbernardi/Computacao-Distribuida
|
Trabalho1/main.py
|
Python
|
gpl-3.0
| 816
| 0.011029
|
# -*- coding: utf-8 -*-
from bottle import run, get, post, view, request, redirect, route, static_file, template
import bottle
import json
import threading
import requests
import time
import sys
messages = set([])
@bottle.route('/static/<path:path>')
def server_static(path):
return static_file(p
|
ath, root='static')
@get('/chat')
@view('chat')
def chat():
name = request.query.name
return dict(msg=list(messages), name=name)
@route('/')
def index():
redirect('chat')
@post('/send')
def sendmsg():
name = request.forms.getunicode('name')
msg = request.forms.getunicode('msg')
global messages
if name != None and ms
|
g != None:
messages.add((name, msg))
redirect('chat?name=' + name)
else:
redirect('chat')
run(host='localhost', port=int(sys.argv[1]))
|
8devices/IoTPy
|
IoTPy/sandbox/ledstrip.py
|
Python
|
mit
| 5,948
| 0.000841
|
from math import exp
from colorsys import hls_to_rgb
import random
import struct
import threading
from time import sleep
class Wire:
def __init__(self, board, pin):
self.board = board
self.pin = pin
def __enter__(self):
self.board.uper_io(0, self.board.encode_sfp(100, [1]))
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def trans(self, data):
self.board.uper_io(0, self.board.encode_sfp(101, [data]))
class LedStrip:
def __init__(self, wire, length):
self.wire = wire
self.n_leds = length
self.set_color(0)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
@staticmethod
def c2rgb(c):
return (c >> 16) & 0xFF, (c >> 8) & 0xFF, c & 0xFF
@staticmethod
def rgb2c(r, g, b):
return ((int(r) & 0xFF) << 16) | ((int(g) & 0xFF) << 8) | (int(b) & 0xFF)
def set_colors(self, colors):
data = ""
for c in colors:
data += struct.pack("BBB", (c >> 16) & 0xFF, (c >> 8) & 0xFF, c & 0xFF)
self.wire.trans(data)
def set_color(self, color):
self.set_colors([color] * self.n_leds)
def set_random_nice_colors(self):
colors = [0, ] * self.n_leds
for i in xrange(31, self.n_leds):
r, g, b = hls_to_rgb(random.random(), 0.3, 1)
colors[i] = (int(r*255) << 16 | int(g*255) << 8 | int(b*255))
self.set_colors(colors)
def start_composition(self):
self.colors = [0]*self.n_leds
def finish_composition(self):
self.set_colors(self.colors)
def add_peak(self, color, pos, width):
self.colors[pos:pos+width] = [color]*width
def add_gauss(self, color, pos, sigma):
sigma = float(sigma)
r, g, b = self.c2rgb(color)
for x, col in enumerate(self.colors):
gauss = exp(-0.5*((x-pos)/sigma)**2)
r1, g1, b1 = self.c2rgb(col)
self.colors[x] = self.rgb2c(max(r1, gauss*r), max(g1, gauss*g), max(b1, gauss*b))
class Evolver(object):
def evolve(self, rate):
pass
def is_finished(self):
return True
class LinearNumberEvolver(Evolver):
def __init__(self, start, stop, duration):
self.value = start
self.d_value = float(stop-start)/duration
self.remaining_time = duration
def evolve(self, rate):
if self.is_finished():
return
self.value += self.d_value*rate
self.remaining_time -= rate
def is_finished(self):
if self.remaining_time <= 0:
return True
return False
def get_value(self):
return self.value
class SawNumberEvolver(Evolver):
def __init__(self, start, stop, period):
self.min = start # min doesn't need to be smaller than max
self.max = stop
self.period = period
self.time = 0
self.value = start
def evolve(self, rate):
self.time += rate
if self.time < 0:
self.time += self.period
if self.time > self.period:
self.time -= self.period
half_time = 2.0*self.time/self.period
delta = self.max-self.min
if self.time <= self.period*0.5:
self.value = self.min + delta*half_time
else:
self.value = self.max - delta*(half_time-1)
def is_finished(self):
return False
def get_value(self):
return self.value
class LinearHueColorEvolver(LinearNumberEvolver):
|
def get_value(self):
r, g, b =
|
hls_to_rgb(self.value, 0.3, 1.0)
return (int(r*255) << 16) | (int(g*255) << 8) | int(b*255)
class SawHueColorEvolver(SawNumberEvolver):
def get_value(self):
r, g, b = hls_to_rgb(self.value, 0.3, 1.0)
return (int(r*255) << 16) | (int(g*255) << 8) | int(b*255)
class LedEffect(object):
def apply(self, ledstrip):
pass
class GaussPeakLedEffect(LedEffect, Evolver):
def __init__(self, color, pos, sigma, duration=None):
self.color = color
self.pos = pos
self.sigma = sigma
self.remaining_time = duration or 0.0
def apply(self, ledstrip):
if self.is_finished():
return
color = self.color
pos = self.pos
sigma = self.sigma
if isinstance(self.color, Evolver):
color = self.color.get_value()
if isinstance(self.pos, Evolver):
pos = self.pos.get_value()
if isinstance(self.sigma, Evolver):
sigma = self.sigma.get_value()
ledstrip.add_gauss(color, pos, sigma)
def evolve(self, rate):
if self.is_finished():
return
if isinstance(self.color, Evolver):
self.color.evolve(rate)
if isinstance(self.pos, Evolver):
self.pos.evolve(rate)
if isinstance(self.sigma, Evolver):
self.sigma.evolve(rate)
self.remaining_time -= rate
def is_finished(self):
if self.remaining_time is not None and self.remaining_time <= 0:
return True
return False
class LedStripSynthesizer(object):
def __init__(self, ledstrip, fps):
self.led_strip = ledstrip
self.cycle = 1.0/fps
self.stop = False
self.effects = []
self._thread = threading.Thread(target=self.run)
self._thread.start()
def run(self):
while not self.stop:
self.led_strip.start_composition()
for effect in self.effects[:]:
effect.apply(self.led_strip)
effect.evolve(self.cycle)
if effect.is_finished():
self.effects.remove(effect)
self.led_strip.finish_composition()
sleep(self.cycle)
self.stop = False
def add_effect(self, effect):
self.effects.append(effect)
|
kyoren/https-github.com-h2oai-h2o-3
|
h2o-py/tests/testdir_algos/rf/pyunit_swpredsRF.py
|
Python
|
apache-2.0
| 1,272
| 0.015723
|
import sys
sys.path.insert(1, "../../../")
import h2o, tests
def swpredsRF():
# Training set has two predictor columns
# X1: 10 categorical levels, 100 observations per level; X2: Unif(0,1) noise
# Ratio of y = 1 per Level: cat01 = 1.0 (strong predictor), cat02 to cat10 = 0.5 (weak predictors)
#Log.info("Importing swpreds_1000x3.csv data...\n")
swpreds = h2o.import_file(path=tests.locate("smalldata/gbm_test/swpreds_1000x3.csv"))
swpreds[
|
"y"] = swpreds["y"].asfactor()
#Log.info("Summary of swpreds_1000x3.csv from H2O:\n")
#swpreds.summary()
# Train H2O DRF without Noise Column
#Log.info("Distributed Random Forest with only Predictor Column")
model1 = h2o.random_forest(x=swpreds[["X1"]], y=swpreds["y"], ntrees=50, max_depth=20, nbins=500)
model1.show()
perf1 = model1.model_performance(swpreds)
|
print(perf1.auc())
# Train H2O DRF Model including Noise Column:
#Log.info("Distributed Random Forest including Noise Column")
model2 = h2o.random_forest(x=swpreds[["X1","X2"]], y=swpreds["y"], ntrees=50, max_depth=20, nbins=500)
model2.show()
perf2 = model2.model_performance(swpreds)
print(perf2.auc())
if __name__ == "__main__":
tests.run_test(sys.argv, swpredsRF)
|
N402/NoahsArk
|
ark/app.py
|
Python
|
mit
| 3,309
| 0.000302
|
import os
from flask import Flask
from ark.utils._time import friendly_time
from ark.master.views import master_app
from ark.account.views import account_app
from ark.goal.vi
|
ews import goal_app
from ark.oauth.views import oauth_app
from ark.dashboard.views import dashboard_app
from ark.goal.models import Goal
from ark.exts import (setup_babel, setup_bcrypt, setup_cache,
|
setup_collect,
setup_database, setup_login_manager, setup_oauth,
setup_csrf)
def create_app(name=None, config=None):
app = Flask(name or __name__)
app.config.from_object('ark.settings')
init_config(app)
if isinstance(config, dict):
app.config.update(config)
app.debug = bool(int(os.environ.get('ARK_DEBUG', False)))
if app.config.get('SENTRY_DSN'):
from raven.contrib.flask import Sentry
sentry = Sentry(app)
init_error_pages(app)
init_jinja(app)
setup_babel(app)
setup_bcrypt(app)
setup_cache(app)
setup_collect(app)
setup_database(app)
setup_login_manager(app)
setup_oauth(app)
setup_csrf(app)
app.register_blueprint(master_app)
app.register_blueprint(account_app)
app.register_blueprint(goal_app)
app.register_blueprint(oauth_app)
app.register_blueprint(dashboard_app)
return app
def init_error_pages(app):
@app.errorhandler(403)
def page_forbidden(error):
return 'Forbidden', 403
@app.errorhandler(404)
def page_not_found(error):
return 'Not Found', 404
@app.errorhandler(405)
def page_not_allow(error):
return 'Method not allow', 405
def init_jinja(app):
_jinja_filters = {
'friendly_time': friendly_time,
'goal_state': (lambda state: Goal.GOAL_STATES[state]),
}
_jinja_global = {
'site_title': 'iChaser',
'site_keyword': 'iChaser',
'site_description': 'iChaser',
}
def setup_filter(app):
for _fname, _ffunc in _jinja_filters.iteritems():
app.add_template_filter(_ffunc, _fname)
def setup_global(app):
for _fname, _var in _jinja_global.iteritems():
app.jinja_env.globals[_fname] = _var
setup_filter(app)
setup_global(app)
def init_config(app):
configs = {
'BABEL_DEFAULT_LOCALE': 'zh',
'BABEL_DEFAULT_TIMEZONE': 'Asia/Shanghai',
'SQLALCHEMY_DATABASE_URI': 'sqlite:////tmp/ark.sqlite',
'SECRET_KEY': None,
'COLLECT_STATIC_ROOT': None,
'COLLECT_STORAGE': 'flask.ext.collect.storage.file',
'SENTRY_DSN': '',
'CACHE_TYPE': '',
'CACHE_DEFAULT_TIMEOUT': '',
'CACHE_THRESHOLD': '',
'CACHE_KEY_PREFIX': 'ark_cache_',
'CACHE_MEMCACHED_SERVERS': '',
'CACHE_MEMCACHED_USERNAME': '',
'CACHE_MEMCACHED_PASSWORD': '',
'CACHE_REDIS_HOST': '',
'CACHE_REDIS_PORT': '',
'CACHE_REDIS_PASSWORD': '',
'CACHE_REDIS_DB': '',
}
load_config(app, configs)
def load_config(app, configs):
for name, default in configs.iteritems():
env = os.environ.get(name, default)
if env is None:
raise ConfigError('%s cannot be None' % name)
if not env == '':
app.config[name] = env
class ConfigError(Exception):
pass
|
pepitogithub/PythonScripts
|
Dados.py
|
Python
|
gpl-2.0
| 1,284
| 0.043614
|
from Probabilidades import Probabilidad
from validador import *
a = Probabilidad()
a.cargarDatos("1","2","3","4","5","6")
uno = [" ------- ","| |","| # |","| |"," ------- "]
dos = [" ------- ","| # |","| |","| # |"," ------- "]
tres = [" ------- ","| # |","| # |","| # |"," ------- "]
cuatro = [" ------- ","| # # |","| |","| # # |"," ------- "]
cinco = [" ------- ","| # # |","| # |","| # # |"," ------- "]
seis = [" ------- ","| # # |","| # # |","| # # |"," ------- "]
diccio = {"1":uno,"2":dos,"3":tres,"4":cuatro,"5":cinco,"6"
|
:seis}
def dado(*repeticiones):
tiradas = 1
if (len(repeticiones) > 0):
tiradas = repeticiones[0]
else:
tiradas = 1
for i in range(0,tiradas):
numero =
|
a.generar()
resultado = diccio[numero]
for fila in resultado:
print fila
seguir = True
while (seguir):
print "indique la cantidad de tiradas:"
ingreso = validador.ingresar(int,validador.entre,0,20)
if(ingreso == 0):
print "KeepRollingDice4Life"
seguir = False
else:
dado(ingreso)
# print "otro?"
# seguir = validador.ingresarSINO()
|
stphivos/django-angular2-fullstack-devops
|
backend/api/migrations/0001_initial.py
|
Python
|
mit
| 955
| 0.002094
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-05-09 12:57
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CAS
|
CADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
|
},
),
]
|
bishnucit/Python-Preludes
|
6.py
|
Python
|
mit
| 286
| 0.003497
|
from selenium import webdriver
from selenium.webdriver.common.keys import
|
Keys
# clicking on Welcome link.
driver = webdriver.Firefox()
driver.get("http://www.practiceselenium.com/")
driver.find_element_by_link_text("Check Out").clic
|
k()
assert "Check Out" in driver.title
driver.close()
|
Ayrx/cryptography
|
tests/conftest.py
|
Python
|
bsd-3-clause
| 1,721
| 0
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import pytest
from cryptography.hazmat.backends import _available_backends
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from .utils import check_backend_support, select_backends, skip_if_empty
def pytest_report_header(config):
return "OpenSSL: {0}".format(openssl_backend.openssl_version_text())
def pytest_generate_tests(metafunc):
if "backend" in metafunc.fixturenames:
names = metafunc.config.getoption("--backend")
selected_backends = select_backends(names, _available_backends())
filtered_backends = []
required = metafunc.function.requires_backend_interface
required_interfaces = [
mark.kwargs["interface"] for mark in required
]
for backend in selected_backends:
if all(
isinstance(backend, iface) for iface in required_interfaces
):
filtered_backends.append(backend)
# If you pass an empty list to parametrize Bad Things(tm) happen
# as of pytest 2.6.4 when the test also has a parametrize decorator
skip_if_empty(filtered_backends, required_interfaces)
metafunc.parametrize("backend", filtered_backends)
@pytest.mark.trylast
def pytest_r
|
untest_setup(item):
check_backend_support(item)
def pytest_addoption(parser):
parser.addoption(
"--backend", action="store", metavar="NAM
|
E",
help="Only run tests matching the backend NAME."
)
|
pjxiao/yum-s3-plugin
|
s3.py
|
Python
|
apache-2.0
| 9,268
| 0.006798
|
"""
Yum plugin for Amazon S3 access.
This plugin provides access to a protected Amazon S3 bucket using either boto
or Amazon's REST authentication scheme.
On CentOS this file goes into /usr/lib/yum-plugins/s3.py
You will also need two configuration files. See s3.conf and s3test.repo for
examples on how to deploy those.
"""
# Copyright 2011, Robert Mela
# Copyright 2011, Jens Braeuer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import sys
import urllib
from yum.plugins import TYPE_CORE
from yum.yumRepo import YumRepository
from yum import config
from yum import logginglevels
import yum.Errors
def interactive_notify(msg):
if sys.stdout.isatty():
print msg
def createUrllibGrabber():
"""
Fetch files from AWS without boto. This code has not been tested on RHEL 6 as EPEL ships with boto 2.x.
"""
import os
import sys
import urllib2
import time, sha, hmac, base64
class UrllibGrabber:
@classmethod
def s3sign(cls,request, secret_key, key_id, date=None):
date=time.strftime("%a, %d %b %Y %H:%M:%S +0000", date or time.gmtime() )
host = request.get_host()
bucket = host.split('.')[0]
request.add_header('Date', date)
resource = "/%s%s" % ( bucket, request.get_selector() )
sigstring = """%(method)s\n\n\n%(date)s\n%(canon_amzn_resource)s""" % {
'method':request.get_method(),
#'content_md5':'',
#'content_type':'', # only for PUT
'date':request.headers.get('Date'),
#'canon_amzn_headers':'',
'canon_amzn_resource':resource }
digest = hmac.new(secret_key, sigstring, sha ).digest()
digest = base64.b64encode(digest)
request.add_header('Authorization', "AWS %s:%s" % ( key_id, digest ))
def __init__(self, awsAccessKey, awsSecretKey, baseurl ):
try: baseurl = baseurl[0]
except: pass
self.baseurl = baseurl
self.awsAccessKey = awsAccessKey
self.awsSecretKey = awsSecretKey
def _request(self,url):
req = urllib2.Request("%s%s" % (self.baseurl, url))
UrllibGrabber.s3sign(req, self.awsSecretKey, self.awsAccessKey )
return req
def urlgrab(self, url, filename=None, **kwargs):
"""urlgrab(url) copy the file to the local filesystem"""
self.verbose_logger.log(logginglevels.DEBUG_4, "UrlLibGrabber urlgrab url=%s filename=%s" % ( url, filename ))
req = self._request(url)
if not filename:
filename = req.get_selector()
if filename[0] == '/': filename = filename[1:]
out = open(filename, 'w+')
resp = urllib2.urlopen(req)
buff = resp.read(8192)
while buff:
out.write(buff)
buff = resp.read(8192)
return filename
# zzz - does this return a value or something?
def urlopen(self, url, **kwargs):
"""urlopen(url) open the remote file and return a file object"""
return urllib2.urlopen( self._request(url) )
def urlread(self, url, limit=None, **kwargs):
"""urlread(url) return the contents of the file as a string"""
return urllib2.urlopen( self._request(url) ).read()
return UrllibGrabber
def createBotoGrabber():
import boto
from urlparse import urlparse
import sys
import re
from urlgrabber.grabber import URLGrabber
class BotoGrabber(URLGrabber):
logger = logging.getLogger("yum.verbose.main")
def __init__(self, awsAccessKey, awsSecretKey, baseurl):
self.logger.debug("BotoGrabber init BASE_URL=%s" % baseurl)
URLGrabber.__init__(self)
self._handle_baseurl(baseurl)
self._handle_s3(awsAccessKey, awsSecretKey)
self._dump_attributes()
interactive_notify("%s - %s" % (self.bucket_name, self.key_prefix))
def _handle_baseurl(self, baseurl):
if type(baseurl) == list:
baseurl = baseurl[0]
# self.baseurl[1] is self.baseurl.netloc; self.baseurl[2] is
|
self.baseurl.path
# See http://docs.python.org/library/urlparse.html
self.baseurl = urlparse(baseurl)
self.bucket_name = re.match('(.*)\.s3.*\.amazonaws\.com', self.baseurl[1]).group(1)
self.key_prefix = self.baseurl[2][1:]
def _handle_s3(self, awsAccessKey, awsSecretKey):
self.s3 = boto.connect_s3(awsAccessKey, awsSecretKey)
|
def _dump_attributes(self):
self.logger.debug("baseurl: %s" % str(self.baseurl))
self.logger.debug("bucket: %s" % self.bucket_name)
self.logger.debug("key_prefix: %s" % self.key_prefix)
def _key_name(self,url):
self.logger.debug("_key_name url=%s, key_prefix=%s" % (url, self.key_prefix))
if not url.startswith("http://"):
key = "%s/%s" % (self.key_prefix, url)
else:
key = urlparse(url)[2]
self.logger.debug("_key_name(%s) -> %s" % (url, key))
return key
def _key(self, key_name):
self.logger.debug("_key(%s)" % key_name)
bucket = self.s3.get_bucket(self.bucket_name, validate=False)
return bucket.get_key(key_name)
def urlgrab(self, url, filename=None, **kwargs):
"""urlgrab(url) copy the file to the local filesystem"""
self.logger.debug("urlgrab(url='%s',filename='%s')" % (url, filename))
key_name = self._key_name(url)
key = self._key(key_name)
if not key:
raise Exception("Can not get key for key=%s" % key_name )
if not filename:
filename = key.key
key.get_contents_to_filename(filename)
return filename
def urlopen(self, url, **kwargs):
"""urlopen(url) open the remote file and return a file object"""
self.logger.debug("urlopen(%s)" % url)
return self._key(url)
def urlread(self, url, limit=None, **kwargs):
"""urlread(url) return the contents of the file as a string"""
self.logger.debug("urlread(%s)" % url)
return self._key(url).read()
return BotoGrabber
def createGrabber():
logger = logging.getLogger("yum.verbose.main")
try:
try:
grabber = createBotoGrabber()
logger.debug("Using BotoGrabber")
except:
grabber = createUrllibGrabber()
logger.debug("Using UrllibGrabber")
finally:
return grabber
AmazonS3Grabber = createGrabber()
class AmazonS3Repo(YumRepository):
"""
Repository object for Amazon S3.
"""
def __init__(self, repoid):
YumRepository.__init__(self, repoid)
self.enable()
self.grabber = None
def setupGrab(self):
YumRepository.setupGrab(self)
self.grabber = AmazonS3Grabber(self.key_id, self.secret_key )
def _getgrabfunc(self): raise Exception("get grabfunc!")
def _getgrab(self):
if not self.grabber:
self.grabber = AmazonS3Grabber(self.key_id, self.secret_key, baseurl=self.baseurl )
return self.grabber
grabfunc = prop
|
persandstrom/home-assistant
|
tests/components/binary_sensor/test_threshold.py
|
Python
|
apache-2.0
| 13,704
| 0
|
"""The test for the threshold sensor platform."""
import unittest
from homeassistant.setup import setup_component
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS)
from tests.common import get_test_home_assistant
class TestThresholdSensor(unittest.TestCase):
"""Test the threshold sensor."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_sensor_upper(self):
"""Test if source is above threshold."""
config = {
'binary_sensor': {
'platform': 'threshold',
'upper': '15',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('sensor.test_monitored',
state.attributes.get('entity_id'))
self.assertEqual(16, state.attributes.get('sensor_value'))
self.assertEqual('above', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(0.0, state.attributes.get('hysteresis'))
self.assertEqual('upper', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 14)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 15)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
def test_sensor_lower(self):
"""Test if source is below threshold."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '15',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
self.assertEqual(0.0, state.attributes.get('hysteresis'))
self.assertEqual('lower', state.attributes.get('type'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 14)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
def test_sensor_hysteresis(self):
"""Test if source is above threshold using hysteresis."""
config = {
'binary_sensor': {
'platform': 'threshold',
'upper': '15',
'hysteresis': '2.5',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 20)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(2.5, state.attributes.get('hysteresis'))
self.assertEqual('upper', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 13)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 12)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 17)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 18)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
def test_sensor_in_range_no_hysteresis(self):
"""Test if source is within the range."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '10',
'upper': '20',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('sensor.test_monitored',
state.attributes.get('entity_id'))
self.assertEqual(16, state.attributes.get('sensor_value'))
self.assertEqual('in_range', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(0.0, state.attributes.get('hysteresis'))
|
self.assertEqual('range', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 9)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('below', state.attributes.get('position'))
assert state.state == 'off'
self.hass.st
|
ates.set('sensor.test_monitored', 21)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
assert state.state == 'off'
def test_sensor_in_range_with_hysteresis(self):
"""Test if source is within the range."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '10',
'upper': '20',
'hysteresis': '2',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('sensor.test_monitored',
state.attributes.get('entity_id'))
self.assertEqual(16, state.attributes.get('sensor_value'))
self.assertEqual('in_range', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(float(config['binary_sensor']['hysteresis']),
state.attributes.get('hysteresis'))
self.assertEqual('range', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 8)
self.hass
|
NuclearTalent/NuclearStructure
|
doc/Programs/cython_examples/matvec/setup.py
|
Python
|
cc0-1.0
| 119
| 0
|
from
|
distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules=cythonize("matvec.pyx"),
)
| |
Onager/plaso
|
plaso/parsers/interface.py
|
Python
|
apache-2.0
| 8,018
| 0.008855
|
# -*- coding: utf-8 -*-
"""The parsers and plugins interface classes."""
import abc
import os
from plaso.lib import errors
class BaseFileEntryFilter(object):
"""File entry filter interface."""
# pylint: disable=redundant-returns-doc
@abc.abstractmethod
def Match(self, file_entry):
"""Determines if a file entry matches the filter.
Args:
file_entry (dfvfs.FileEntry): a file entry.
Returns:
bool: True if the file entry matches the filter.
"""
class FileNameFileEntryFilter(BaseFileEntryFilter):
"""File name file entry filter."""
def __init__(self, filename):
"""Initializes a file entry filter.
Args:
filename (str): name of the file.
"""
super(FileNameFileEntryFilter, self).__init__()
self._filename = filename.lower()
def Match(self, file_entry):
"""Determines if a file entry matches the filter.
Args:
file_entry (dfvfs.FileEntry): a file entry.
Returns:
bool: True if the file entry matches the filter.
"""
if not file_entry:
return False
filename = file_entry.name.lower()
return filename == self._filename
class BaseParser(object):
"""The parser interface."""
# The name of the parser. This is the name that is used in the registration
# and used for parser/plugin selection, so this needs to be concise and un
|
ique
# for all plugins/parsers, such as 'Chrome', 'Safari' or 'UserAssist'.
NAME = 'base_parser'
# Data format supported by the parser plugin. This information is used by
# the parser manager to generate
|
parser and plugin information.
DATA_FORMAT = ''
# List of filters that should match for the parser to be applied.
FILTERS = frozenset()
# Every derived parser class that implements plugins should define
# its own _plugin_classes dict:
# _plugin_classes = {}
# We deliberately don't define it here to make sure the plugins of
# different parser classes don't end up in the same dict.
_plugin_classes = None
def __init__(self):
"""Initializes a parser.
By default all plugins will be enabled. To only enable specific plugins
use the EnablePlugins method and pass it a list of strings containing
the names of the plugins to enable.
The default plugin, named "{self.NAME:s}_default", if it exists,
is always enabled and cannot be disabled.
"""
super(BaseParser, self).__init__()
self._default_plugin = None
self._plugins = None
self.EnablePlugins([])
@classmethod
def DeregisterPlugin(cls, plugin_class):
"""Deregisters a plugin class.
The plugin classes are identified based on their lower case name.
Args:
plugin_class (type): class of the plugin.
Raises:
KeyError: if plugin class is not set for the corresponding name.
"""
plugin_name = plugin_class.NAME.lower()
if plugin_name not in cls._plugin_classes:
raise KeyError(
'Plugin class not set for name: {0:s}.'.format(
plugin_class.NAME))
del cls._plugin_classes[plugin_name]
def EnablePlugins(self, plugin_includes):
"""Enables parser plugins.
Args:
plugin_includes (list[str]): names of the plugins to enable, where None
or an empty list represents all plugins. Note the default plugin, if
it exists, is always enabled and cannot be disabled.
"""
self._plugins = []
if not self._plugin_classes:
return
default_plugin_name = '{0:s}_default'.format(self.NAME)
for plugin_name, plugin_class in self._plugin_classes.items():
if plugin_name == default_plugin_name:
self._default_plugin = plugin_class()
continue
if plugin_includes and plugin_name not in plugin_includes:
continue
plugin_object = plugin_class()
self._plugins.append(plugin_object)
# TODO: move this to a filter.
# pylint: disable=redundant-returns-doc
@classmethod
def GetFormatSpecification(cls):
"""Retrieves the format specification.
Returns:
FormatSpecification: a format specification or None if not available.
"""
return
@classmethod
def GetPluginObjectByName(cls, plugin_name):
"""Retrieves a specific plugin object by its name.
Args:
plugin_name (str): name of the plugin.
Returns:
BasePlugin: a plugin object or None if not available.
"""
plugin_class = cls._plugin_classes.get(plugin_name, None)
if plugin_class:
return plugin_class()
return None
@classmethod
def GetPlugins(cls):
"""Retrieves the registered plugins.
Yields:
tuple[str, type]: name and class of the plugin.
"""
for plugin_name, plugin_class in cls._plugin_classes.items():
yield plugin_name, plugin_class
@classmethod
def RegisterPlugin(cls, plugin_class):
"""Registers a plugin class.
The plugin classes are identified based on their lower case name.
Args:
plugin_class (type): class of the plugin.
Raises:
KeyError: if plugin class is already set for the corresponding name.
"""
plugin_name = plugin_class.NAME.lower()
if plugin_name in cls._plugin_classes:
raise KeyError((
'Plugin class already set for name: {0:s}.').format(
plugin_class.NAME))
cls._plugin_classes[plugin_name] = plugin_class
@classmethod
def RegisterPlugins(cls, plugin_classes):
"""Registers plugin classes.
Args:
plugin_classes (list[type]): classes of plugins.
Raises:
KeyError: if plugin class is already set for the corresponding name.
"""
for plugin_class in plugin_classes:
cls.RegisterPlugin(plugin_class)
@classmethod
def SupportsPlugins(cls):
"""Determines if a parser supports plugins.
Returns:
bool: True if the parser supports plugins.
"""
return cls._plugin_classes is not None
class FileEntryParser(BaseParser):
"""The file entry parser interface."""
def Parse(self, parser_mediator):
"""Parsers the file entry and extracts event objects.
Args:
parser_mediator (ParserMediator): a parser mediator.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
file_entry = parser_mediator.GetFileEntry()
if not file_entry:
raise errors.UnableToParseFile('Invalid file entry')
parser_mediator.AppendToParserChain(self)
try:
self.ParseFileEntry(parser_mediator, file_entry)
finally:
parser_mediator.PopFromParserChain()
@abc.abstractmethod
def ParseFileEntry(self, parser_mediator, file_entry):
"""Parses a file entry.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_entry (dfvfs.FileEntry): a file entry to parse.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
class FileObjectParser(BaseParser):
"""The file-like object parser interface."""
# The initial file offset. Set this value to None if no initial
# file offset seek needs to be performed.
_INITIAL_FILE_OFFSET = 0
def Parse(self, parser_mediator, file_object):
"""Parses a single file-like object.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_object (dvfvs.FileIO): a file-like object to parse.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
if not file_object:
raise errors.UnableToParseFile('Invalid file object')
if self._INITIAL_FILE_OFFSET is not None:
file_object.seek(self._INITIAL_FILE_OFFSET, os.SEEK_SET)
parser_mediator.AppendToParserChain(self)
try:
self.ParseFileObject(parser_mediator, file_object)
finally:
parser_mediator.PopFromParserChain()
@abc.abstractmethod
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a file-like object.
Args:
parser_mediator (ParserMediator): a parser mediator.
file_object (dvfvs.FileIO): a file-like object to parse.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
|
mozilla-iam/cis
|
python-modules/cis_change_service/cis_change_service/__init__.py
|
Python
|
mpl-2.0
| 349
| 0
|
# -*- coding: utf-8 -*-
"""Flask application for publishing changes."""
__version__ = "0.0.1
|
"
from cis_change_service import api
from cis_change_service import common
|
from cis_change_service import exceptions
from cis_change_service import idp
from cis_change_service import profile
__all__ = [api, common, exceptions, idp, profile, __version__]
|
indera/olass-client
|
olass/run.py
|
Python
|
mit
| 1,316
| 0
|
#!/usr/bin/env python
"""
Goal: Implement the application entry point.
@authors:
Andrei Sura <sura.andrei@gmail.com>
"""
import argparse
from olass.olass_client import OlassClient
from olass.version import __version__
DEFAULT_SETTINGS_FILE = 'config/settings.py'
def main():
""" Read args """
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version",
default=False,
action='store_true',
help="Show the version number")
parser.add_argument("-c", "--config",
default=DEFAULT_SETTINGS_FILE,
help="Application config file")
parser.add_argument('--interactive',
default=True,
help="When `true` ask for c
|
onfirmation")
parser.add_argument('--rows',
default=100,
help="Number of rows/batch sent to the server")
args = parse
|
r.parse_args()
if args.version:
import sys
print("olass, version {}".format(__version__))
sys.exit()
app = OlassClient(config_file=args.config,
interactive=args.interactive,
rows_per_batch=args.rows)
app.run()
if __name__ == "__main__":
main()
|
ziposoft/godiva
|
src/zs/view_dt.py
|
Python
|
mit
| 582
| 0.015464
|
import django_tables2 as tables
from django_tables2 import RequestConfig
from django_tables2.utils import A # alias for Accessor
from django.shortcuts import render
import inspect
class DtTemplate(tables.Table):
#name_first = tables.Column(verbose_name="First Name")
#name_last = tables.LinkColumn('track:runner',args=[A('id')])
#gender = tables.Column(verbose_name="Gender")
#result_count=tables.Column( accessor='result_
|
count',or
|
derable=False,verbose_name="Number of results")
class Meta:
#model = Runner
attrs = {"class": "paleblue"}
|
ernw/dizzy
|
dizzy/tests/test_field.py
|
Python
|
bsd-3-clause
| 3,596
| 0.003615
|
# test_field.
|
py
#
# Copyright 2017 Daniel Mende <mail@c0decafe.de>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following condit
|
ions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from unittest import TestCase, main
from dizzy.tests import first
from dizzy.objects.field import Field
from dizzy.value import Value
class TestField(TestCase):
def test_init(self):
f = Field("test", b"\x01\x23", slice(10, 12), "std")
self.assertEqual(f.name, "test")
self.assertEqual(f.size, slice(10, 12, 1))
self.assertEqual(f.default, Value(b"\x01\x23", 10))
self.assertEqual(f.fuzz, "std")
def test_add_aligned(self):
chars1 = b"This is a test "
chars2 = b"of adding adding aligned"
chars3 = b" values."
f1 = Field("test1", chars1)
f2 = Field("test2", chars2)
f3 = Field("test3", chars3)
v1 = first(f1) + first(f2) + first(f3)
self.assertTrue(isinstance(v1, Value))
self.assertEqual(v1.byte, chars1 + chars2 + chars3)
self.assertEqual(v1.size // 8, len(chars1) + len(chars2) + len(chars3))
def test_add_unaligned(self):
pass
def test_iter(self):
expected = [Value(b'\x01#', 10), Value(b'\x00\x00', 10), Value(b'\x00\x01', 10), Value(b'\x00\x02', 10),
Value(b'\x00\x03', 10), Value(b'\x00\x04', 10), Value(b'\x03\xfb', 10), Value(b'\x03\xfc', 10),
Value(b'\x03\xfd', 10), Value(b'\x03\xfe', 10), Value(b'\x03\xff', 10), Value(b'\x01\xfc', 10),
Value(b'\x01\xfd', 10), Value(b'\x01\xfe', 10), Value(b'\x01\xff', 10), Value(b'\x01\x00', 10),
Value(b'\x02\x00', 10), Value(b'\x03\x00', 10), Value(b'\x04\x00', 10)]
f = Field("test", b"\x01\x23", 10, "std")
self.assertEqual([i for i in f], expected)
def test_size(self):
f = Field("test", b"\x01\x23", 10, "std")
self.assertEqual(f.length(), 19)
self.assertEqual(len(list(f)), f.length())
if __name__ == '__main__':
main()
|
jfillmore/Omega-API-Engine
|
clients/python/omega/dbg.py
|
Python
|
mit
| 8,885
| 0.008216
|
#!/usr/bin/env python
# omega - python client
# https://github.com/jfillmore/Omega-API-Engine
#
# Copyright 2011, Jonathon Fillmore
# Licensed under the MIT license. See LICENSE file.
# http://www.opensource.org/licenses/mit-license.php
"""Uses python introspection to provide PHP-like "var_dump" functionality for debugging objects."""
import sys
import time
import types
import inspect
dark_colors = {
'str': '0;37',
'unicode': '0;37',
'bool': '1;36',
'int': '0;32',
'float': '1;32',
'NoneType': '0;36',
'object': '0;36',
'instance': '0;36',
'module': '0;36',
'classobj': '0;36',
'builtin_function_or_method': '0;36',
'ArgSpec': '0:36:40',
'list': ['1;37', '1;33', '0;33', '1;31', '0;31'],
'tuple': ['1;37', '1;33', '0;33', '1;31', '0;31'],
'dict': ['1;37', '1;33', '0;33', '1;31', '0;31'],
'bullet': '1;30',
'seperator': '1;30'
}
def get_obj_info(obj, include_private = False):
obj_info = {
'type': type(obj).__name__,
'callable': callable(obj),
'value': unicode(obj),
'repr': repr(obj),
'description': unicode(getattr(obj, '__doc__', '')).strip()
}
# take a look at what it contains and build up description of what we've got
if obj_info['type'] == 'function':
obj_info['arg_spec'] = inspect.getargspec(obj)
elif not obj_info['type'] in ('str', 'int', 'float', 'bool', 'NoneType', 'unicode', 'ArgSpec'):
for key in dir(obj):
if key.startswith('__') and not include_private:
continue
item = getattr(obj, key)
if inspect.ismethod(item):
if not 'methods' in obj_info:
obj_info['methods'] = {}
obj_info['methods'][key] = {
'description': unicode(item.__doc__)[0:64].strip(),
'arg_spec': inspect.getargspec(item)
}
elif inspect.ismodule(item):
if not 'modules' in obj_info:
|
obj_info['modules'] = {}
obj_info['modules'][key] = unicode(item.__doc__)[0:64].strip()
elif inspect.isclass(item):
if not 'classes' in obj_info:
obj_info['classes'] = {}
|
obj_info['classes'][key] = unicode(item.__doc__)[0:64].strip()
else:
if not 'properties' in obj_info:
obj_info['properties'] = {}
obj_info['properties'][key] = obj2str(item, short_form = True)
return obj_info
def print_tb():
import traceback
tb = traceback.extract_stack()
#tb.pop() # no need to show the last item, which is the line of code executing traceback.extract_stack()
print '\n'.join([
"\tTraceback (most recent call on bottom):",
'\n'.join(['\t\t%s:%i, method "%s"\n\t\t\tLine: %s' % t for t in tb])
])
def obj2str(obj, depth = 0, color = True, indent_char = ' ', indent_size = 4, inline = True, short_form = False):
"""Returns a formatted string, optionally with color coding"""
def shell_color(obj, obj_color):
if color:
return '\033[%sm%s\033[0;0m' % (obj_color, unicode(obj))
else:
return unicode(obj)
def rdump(obj, depth = 0, indent_size = 4, inline = False, short_form = False):
if short_form:
return unicode(obj)[0:80 - (depth * indent_size)]
obj_info = get_obj_info(obj)
# indent ourselves
dump = depth * (indent_size * indent_char)
# see what we've got and recurse as needed
if obj_info['type'] == 'list':
if not len(obj):
dump += shell_color(' (empty)', dark_colors['object']) + '\n'
else:
skip_next_indent = True
for i in range(0, len(obj)):
item = obj[i]
item_info = get_obj_info(item)
# handy any indentation we may need to do
if skip_next_indent:
skip_next_indent = False
else:
dump += depth * (indent_size * indent_char)
# add in the key, cycling through the available colors based on depth
dump += shell_color(i, dark_colors[obj_info['type']][(depth) % (len(dark_colors[obj_info['type']]))])
# format it depending on whether we've nested list with any empty items
if item_info['type'] in ('dict', 'tuple', 'list'):
if not len(item):
dump += rdump(item, 0, indent_size, True)
else:
dump += '\n' + rdump(item, depth + 1, indent_size, True)
else:
dump += rdump(item, 1, 1);
elif obj_info['type'] == 'dict':
if not len(obj):
dump += shell_color(' (empty)', dark_colors['object'])
else:
skip_next_indent = True
for key in obj:
item = obj[key]
item_info = get_obj_info(item)
# handy any indentation we may need to do
if skip_next_indent:
skip_next_indent = False
else:
dump += depth * (indent_size * indent_char)
# add in the key, cycling through the available colors based on depth
dump += shell_color(key, dark_colors[obj_info['type']][(depth) % (len(dark_colors[obj_info['type']]))])
# add in a bullet
dump += shell_color(':', dark_colors['bullet'])
# format it depending on whether we've nested list with any empty items
if item_info['type'] in ('dict', 'tuple', 'list'):
if not len(item):
dump += rdump(item, 0, indent_size, True)
else:
dump += '\n' + rdump(item, depth + 1, indent_size, True)
if item_info['type'] == 'tuple':
dump += '\n'
else:
dump += rdump(item, 1, 1);
elif obj_info['type'] == 'tuple':
if not len(obj):
dump += shell_color(' (empty)', dark_colors['object'])
else:
dump += shell_color('(', dark_colors['bullet'])
dump += ', '.join([unicode(item)[0:32] for item in obj if item != ()])
dump += shell_color(')', dark_colors['bullet'])
elif obj_info['type'] == 'str' or obj_info['type'] == 'unicode':
dump += shell_color(obj, dark_colors[obj_info['type']])
elif obj_info['type'] == 'bool':
dump += shell_color(obj, dark_colors[obj_info['type']])
elif obj_info['type'] == 'NoneType':
dump += shell_color('(none/null)', dark_colors[obj_info['type']])
elif obj_info['type'] == 'int':
dump += shell_color(obj, dark_colors[obj_info['type']])
elif obj_info['type'] == 'float':
dump += shell_color(obj, dark_colors[obj_info['type']])
elif obj_info['type'] == 'object':
dump += shell_color('(object)', dark_colors[obj_info['type']])
elif obj_info['type'] == 'instance':
dump += rdump(obj_info, depth)
elif obj_info['type'] == 'module':
dump += rdump(obj_info, depth)
elif obj_info['type'] == 'function':
dump += rdump(obj_info, depth)
elif obj_info['type'] == 'classobj':
dump += rdump(obj_info, depth)
elif obj_info['type'] == 'builtin_function_or_method':
dump += rdump(obj_info, depth)
elif obj_info['type'] == 'ArgSpec':
dump += '\n' + rdump({
'args': obj.args,
'varargs': obj.varargs,
'keywords': obj.keywords,
'defaults': obj.defaults,
}, depth + 1, inline = True)
else:
dum
|
CornellProjects/hlthpal
|
web/project/main/permissions.py
|
Python
|
apache-2.0
| 624
| 0.008013
|
from rest_framework import permissions
from rest_framework.permissions import BasePermission
cla
|
ss IsAuthenticatedOrCreate(permissions.IsAuthenticated):
def has_permission(self, request
|
, view):
if request.method == 'POST':
return True
return super(IsAuthenticatedOrCreate, self).has_permission(request, view)
class IsOwner(BasePermission):
message = "You must be the owner of this object."
def has_object_permission(self, request, view, obj):
my_safe_methods = []
if request.method in my_safe_methods:
return True
return obj.owner == request.user
|
atilag/qiskit-sdk-py
|
qiskit/qasm/_node/_gatebody.py
|
Python
|
apache-2.0
| 1,570
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2017 IBM RESEARCH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""
Node for an OPENQASM custom gate body.
"""
from ._node import Node
class GateBody(Node):
"""Node for an OPENQA
|
SM custom gate bod
|
y.
children is a list of gate operation nodes.
These are one of barrier, custom_unitary, U, or CX.
"""
def __init__(self, children):
"""Create the gatebody node."""
Node.__init__(self, 'gate_body', children, None)
def qasm(self, prec=15):
"""Return the corresponding OPENQASM string."""
string = ""
for children in self.children:
string += " " + children.qasm(prec) + "\n"
return string
def calls(self):
"""Return a list of custom gate names in this gate body."""
lst = []
for children in self.children:
if children.type == "custom_unitary":
lst.append(children.name)
return lst
|
zejn/prometapi
|
prometapi/sos112/management/commands/update_sos112.py
|
Python
|
agpl-3.0
| 707
| 0.008487
|
from django.core.management.base import BaseCommand
|
, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Update SPIN SOS112 feed.'
def handle(self, *args, **options):
from prometapi.sos112.models import SOS112, fetch_sos112, parse_sos112
import simplejson
timestamp, data = fetch_sos112()
try:
json_data = parse_sos112(timestamp, data)
except Exception as e:
print(e)
json_data = ''
|
obj = SOS112(
timestamp=timestamp,
original_data=data,
json_data=simplejson.dumps(json_data))
obj.save()
|
gunan/tensorflow
|
tensorflow/python/framework/meta_graph_test.py
|
Python
|
apache-2.0
| 43,142
| 0.007
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for tensorflow.python.framework.meta_graph.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os.path
import random
import shutil
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import error_interpolation
from tensorflow.python.framework import function
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.training import queue_runner_impl
# pylint: disable=invalid-name
def _TestDir(test_name):
test_dir = os.path.join(test.get_temp_dir(), test_name)
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
gfile.MakeDirs(test_dir)
return test_dir
# pylint: enable=invalid-name
class SimpleMetaGraphTest(test.TestCase):
@test_util.run_deprecated_v1
def testNoVariables(self):
test_dir = _TestDir("no_variables")
filename = os.path.join(test_dir, "metafile")
input_feed_value = -10 # Arbitrary input value for feed_dict.
orig_graph = ops.Graph()
with self.session(graph=orig_graph) as sess:
# Cre
|
ate a minimal graph with zero variables.
input_tensor = array_ops.placeholder(
|
dtypes.float32, shape=[], name="input")
offset = constant_op.constant(42, dtype=dtypes.float32, name="offset")
output_tensor = math_ops.add(input_tensor, offset, name="add_offset")
# Add input and output tensors to graph collections.
ops.add_to_collection("input_tensor", input_tensor)
ops.add_to_collection("output_tensor", output_tensor)
output_value = sess.run(output_tensor, {input_tensor: input_feed_value})
self.assertEqual(output_value, 32)
# Generates MetaGraphDef.
meta_graph_def, var_list = meta_graph.export_scoped_meta_graph(
filename=filename,
graph_def=ops.get_default_graph().as_graph_def(add_shapes=True),
collection_list=["input_tensor", "output_tensor"],
saver_def=None)
self.assertTrue(meta_graph_def.HasField("meta_info_def"))
self.assertNotEqual(meta_graph_def.meta_info_def.tensorflow_version, "")
self.assertNotEqual(meta_graph_def.meta_info_def.tensorflow_git_version,
"")
self.assertEqual({}, var_list)
# Create a clean graph and import the MetaGraphDef nodes.
new_graph = ops.Graph()
with self.session(graph=new_graph) as sess:
# Import the previously export meta graph.
meta_graph.import_scoped_meta_graph(filename)
# Re-exports the current graph state for comparison to the original.
new_meta_graph_def, _ = meta_graph.export_scoped_meta_graph(filename +
"_new")
test_util.assert_meta_graph_protos_equal(self, meta_graph_def,
new_meta_graph_def)
# Ensures that we can still get a reference to our graph collections.
new_input_tensor = ops.get_collection("input_tensor")[0]
new_output_tensor = ops.get_collection("output_tensor")[0]
# Verifies that the new graph computes the same result as the original.
new_output_value = sess.run(new_output_tensor,
{new_input_tensor: input_feed_value})
self.assertEqual(new_output_value, output_value)
@test_util.run_deprecated_v1
def testStrippedOpListNestedFunctions(self):
with self.cached_session():
# Square two levels deep
@function.Defun(dtypes.int32)
def f0(x):
return math_ops.square(x)
@function.Defun(dtypes.int32)
def f1(x):
return f0(x)
# At this point we've defined two functions but haven't called them, so
# there should be no used ops.
op_list = meta_graph.stripped_op_list_for_graph(ops.get_default_graph()
.as_graph_def())
self.assertEqual(len(op_list.op), 0)
# If we call the function on a constant, there should be two ops
_ = f1(constant_op.constant(7))
op_list = meta_graph.stripped_op_list_for_graph(ops.get_default_graph()
.as_graph_def())
self.assertEqual(["Const", "Square"], [op.name for op in op_list.op])
def testStrippedOpListRecursiveFunctions(self):
# The function module doesn't support recursive functions, so we build a
# recursive function situation by ourselves: A calls B calls A and Const.
graph = graph_pb2.GraphDef()
a = graph.library.function.add()
b = graph.library.function.add()
a.signature.name = "A"
b.signature.name = "B"
a.node_def.add().op = "B"
b.node_def.add().op = "Const"
b.node_def.add().op = "A"
# Use A in the graph
graph.node.add().op = "A"
# The stripped op list should contain just Const.
op_list = meta_graph.stripped_op_list_for_graph(graph)
self.assertEqual(["Const"], [op.name for op in op_list.op])
@test_util.run_deprecated_v1
def testDefaultAttrStripping(self):
"""Verifies that default attributes are stripped from a graph def."""
# Complex Op has 2 attributes with defaults:
# o "T" : float32.
# o "Tout" : complex64.
# When inputs to the Complex Op are float32 instances, "T" maps to float32
# and "Tout" maps to complex64. Since these attr values map to their
# defaults, they must be stripped unless stripping of default attrs is
# disabled.
with self.cached_session():
real_num = constant_op.constant(1.0, dtype=dtypes.float32, name="real")
imag_num = constant_op.constant(2.0, dtype=dtypes.float32, name="imag")
math_ops.complex(real_num, imag_num, name="complex")
# strip_default_attrs is enabled.
meta_graph_def, _ = meta_graph.export_scoped_meta_graph(
graph_def=ops.get_default_graph().as_graph_def(),
strip_default_attrs=True)
node_def = test_util.get_node_def_from_graph("complex",
meta_graph_def.graph_def)
self.assertNotIn("T", node_def.attr)
self.assertNotIn("Tout", node_def.attr)
self.assertTrue(meta_graph_def.meta_info_def.stripped_default_attrs)
# strip_default_attrs is disabled.
meta_graph_def, _ = meta_graph.export_scoped_meta_graph(
graph_def=ops.get_default_graph().as_graph_def(),
strip_default_attrs=False)
node_def = test_util.get_node_def_from_graph("complex",
meta_graph
|
aelaguiz/pyvotune
|
pyvotune/theano/rbm.py
|
Python
|
mit
| 13,636
| 0.00066
|
# -*- coding: utf-8 -*-
import numpy as np
class RBM(object):
"""Restricted Boltzmann Machine (RBM) """
def __init__(self, theano, T, input=None, n_visible=784, n_hidden=500,
W=None, hbias=None, vbias=None, np_rng=None,
theano_rng=None):
"""
RBM constructor. Defines the parameters of the model along with
basic operations for inferring hidden from visible (and vice-versa),
as well as for performing CD updates.
:param input: None for standalone RBMs or symbolic variable if RBM is
part of a larger graph.
:param n_visible: number of visible units
:param n_hidden: number of hidden units
:param W: None for standalone RBMs or symbolic variable pointing to a
shared weight matrix in case RBM is part of a DBN network; in a DBN,
the weights are shared between RBMs and layers of a MLP
:param hbias: None for standalone RBMs or symbolic variable pointing
to a shared hidden units bias vector in case RBM is part of a
different network
:param vbias: None for standalone RBMs or a symbolic variable
pointing to a shared visible units bias
"""
self.theano = theano
self.T = T
self.n_visible = n_visible
self.n_hidden = n_hidden
if np_rng is None:
# create a number generator
np_rng = np.random.RandomState(1234)
if W is None:
# W is initialized with `initial_W` which is uniformely
# sampled from -4*sqrt(6./(n_visible+n_hidden)) and
# 4*sqrt(6./(n_hidden+n_visible)) the output of uniform if
# converted using asarray to dtype self.theano.config.floatX so
# that the code is runable on GPU
initial_W = np.asarray(np_rng.uniform(
low=-4 * np.sqrt(
6. / (n_hidden + n_visible)),
high=4 * np.sqrt(
6. / (n_hidden + n_visible)),
size=(n_visible, n_hidden)),
dtype=self.theano.config.floatX)
# theano shared variables for weights and biases
W = self.theano.shared(value=initial_W, name='W', borrow=True)
if hbias is None:
# create shared variable for hidden units bias
hbias = self.theano.shared(value=np.zeros(n_hidden,
dtype=self.theano.config.floatX),
name='hbias', borrow=True)
if vbias is None:
# create shared variable for visible units bias
vbias = self.theano.shared(value=np.zeros(n_visible,
dtype=self.theano.config.floatX),
name='vbias', borrow=True)
# initialize input layer for standalone RBM or layer0 of DBN
self.input = input
if not input:
self.input = self.T.matrix('input')
self.W = W
self.hbias = hbias
self.vbias = vbias
self.theano_rng = theano_rng
# **** WARNING: It is not a good idea to put things in this list
# other than shared variables created in this function.
self.params = [self.W, self.hbias, self.vbias]
def free_energy(self, v_sample):
''' Function to compute the free energy '''
wx_b = self.T.dot(v_sample, self.W) + self.hbias
vbias_term = self.T.dot(v_sample, self.vbias)
hidden_term = self.T.sum(self.T.log(1 + self.T.exp(wx_b)), axis=1)
return -hidden_term - vbias_term
def propup(self, vis):
'''This function propagates the visible units activation upwards to
the hidden units
Note that we return also the pre-sigmoid activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
'''
pre_sigmoid_activation = self.T.dot(vis, self.W) + self.hbias
return [pre_sigmoid_activation, self.T.nnet.sigmoid(pre_sigmoid_activation)]
def sample_h_given_v(self, v0_sample):
''' This function infers state of hidden units given visible units '''
# compute the activation of the hidden units given a sample of
# the visibles
pre_sigmoid_h1, h1_mean = self.propup(v0_sample)
# get a sample of the hiddens given their activation
# Note that theano_rng.binomial returns a symbolic sample of dtype
# int64 by default. If we want to keep our computations in floatX
# for the GPU we need to specify to return the dtype floatX
h1_sample = self.theano_rng.binomial(size=h1_mean.shape,
n=1, p=h1_mean,
dtype=self.theano.config.floatX)
return [pre_sigmoid_h1, h1_mean, h1_sample]
def propdown(self, hid):
'''This function propagates the hidden units activation downwards to
the visible units
Note that we return also the pre_sigmoid_activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
'''
pre_sigmoid_activation = self.T.dot(hid, self.W.T) + self.vbias
return [pre_sigmoid_activation, self.T.nnet.sigmoid(pre_sigmoid_activation)]
def
|
sample_v_given_h(self, h0_sample):
''' This function infers state of visible units given hidden unit
|
s '''
# compute the activation of the visible given the hidden sample
pre_sigmoid_v1, v1_mean = self.propdown(h0_sample)
# get a sample of the visible given their activation
# Note that theano_rng.binomial returns a symbolic sample of dtype
# int64 by default. If we want to keep our computations in floatX
# for the GPU we need to specify to return the dtype floatX
v1_sample = self.theano_rng.binomial(size=v1_mean.shape,
n=1, p=v1_mean,
dtype=self.theano.config.floatX)
return [pre_sigmoid_v1, v1_mean, v1_sample]
def gibbs_hvh(self, h0_sample):
''' This function implements one step of Gibbs sampling,
starting from the hidden state'''
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
return [pre_sigmoid_v1, v1_mean, v1_sample,
pre_sigmoid_h1, h1_mean, h1_sample]
def gibbs_vhv(self, v0_sample):
''' This function implements one step of Gibbs sampling,
starting from the visible state'''
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v0_sample)
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h1_sample)
return [pre_sigmoid_h1, h1_mean, h1_sample,
pre_sigmoid_v1, v1_mean, v1_sample]
def get_cost_updates(self, lr=0.1, persistent=None, k=1):
"""This functions implements one step of CD-k or PCD-k
:param lr: learning rate used to train the RBM
:param persistent: None for CD. For PCD, shared variable
containing old state of Gibbs chain. This must be a shared
variable of size (batch size, number of hidden units).
:param k: number of Gibbs steps to do in CD-k/PCD-k
Returns a proxy for the cost and the updates dictionary. The
dictionary contains the update rules for weights and biases but
also an update of the shared variable used to store the persistent
chain, if one is used.
"""
# compute positive phase
pre_sigmoid_ph, ph_mean, ph_sample = self.sample_h_given_v(self.input)
# decide how to initialize persistent chain:
|
gento/dionaea
|
modules/python/scripts/store.py
|
Python
|
gpl-2.0
| 2,119
| 0.032091
|
#**********************************************
|
************
|
**********************
#* Dionaea
#* - catches bugs -
#*
#*
#*
#* Copyright (C) 2009 Paul Baecher & Markus Koetter
#*
#* This program is free software; you can redistribute it and/or
#* modify it under the terms of the GNU General Public License
#* as published by the Free Software Foundation; either version 2
#* of the License, or (at your option) any later version.
#*
#* This program is distributed in the hope that it will be useful,
#* but WITHOUT ANY WARRANTY; without even the implied warranty of
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#* GNU General Public License for more details.
#*
#* You should have received a copy of the GNU General Public License
#* along with this program; if not, write to the Free Software
#* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#*
#*
#* contact nepenthesdev@gmail.com
#*
#*******************************************************************************/
from dionaea.core import ihandler, incident, g_dionaea
from dionaea.util import md5file
import os
import logging
logger = logging.getLogger('store')
logger.setLevel(logging.DEBUG)
class storehandler(ihandler):
def __init__(self, path):
logger.debug("%s ready!" % (self.__class__.__name__))
ihandler.__init__(self, path)
def handle_incident(self, icd):
logger.debug("storing file")
p = icd.path
md5 = md5file(p)
n = g_dionaea.config()['downloads']['dir'] + '/' + md5
i = incident("dionaea.download.complete.hash")
i.file = n
i.url = icd.url
if hasattr(icd, 'con'):
i.con = icd.con
i.md5hash = md5
i.report()
try:
f = os.stat(n)
i = incident("dionaea.download.complete.again")
logger.debug("file %s already existed" % md5)
except OSError:
logger.debug("saving new file %s to %s" % (md5, n))
os.link(p, n)
i = incident("dionaea.download.complete.unique")
i.file = n
if hasattr(icd, 'con'):
i.con = icd.con
i.url = icd.url
i.md5hash = md5
i.report()
|
xlqian/navitia
|
source/jormungandr/jormungandr/scenarios/helper_classes/streetnetwork_path.py
|
Python
|
agpl-3.0
| 8,235
| 0.002186
|
# Copyright (c) 2001-2017, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import
from jormungandr import utils, new_relic
from jormungandr.street_network.street_network import StreetNetworkPathType
import logging
from .helper_utils import timed_logger
class StreetNetworkPath:
"""
A StreetNetworkPath is a journey from orig_obj to dest_obj purely in street network(without any pt)
"""
def __init__(
self,
future_manager,
instance,
streetnetwork_service,
orig_obj,
dest_obj,
mode,
fallback_extremity,
request,
streetnetwork_path_type,
request_id,
):
"""
:param future_manager: a module that manages the future pool properly
:param instance: instance of the coverage, all outside services callings pass through it(street network,
auto completion)
:param streetnetwork_service: service that will be used to compute the path
:param orig_obj: proto obj
:param dest_obj: proto obj
:param mode: street network mode, should be one of ['walking', 'bike', 'bss', 'car']
:param fallback_extremity: departure after datetime or arrival after datetime
:param request: original user request
:param streetnetwork_path_type: street network path's type
"""
self._future_manager = future_manager
self._instance = instance
self._streetnetwork_service = streetnetwork_service
self._orig_obj = orig_obj
self._dest_obj = dest_obj
self._mode = mode
self._fallback_extremity = fallback_extremity
self._request = request
self._path_type = streetnetwork_path_type
self._value = None
self._logger = logging.getLogger(__name__)
self._request_id = request_id
self._async_request()
@new_relic.distributedEvent("direct_path", "street_network")
def _direct_path_with_fp(self):
with timed_logger(self._logger, 'direct_path_calling_external_service', self._request_id):
try:
return self._streetnetwork_service.direct_path_with_fp(
self._instance,
self._mode,
self._orig_obj,
self._dest_obj,
self._fallback_extremity,
self._request,
self._path_type,
self._request_id,
)
except Exception as e:
logging.getLogger(__name__).error("
|
Exception':{}\n".format(str(e)))
return None
def _do_request(self):
self._logger.debug(
"requesting %s direct path from %s to %s by %s",
self._path_type,
self._orig_obj.uri,
self._dest_obj.uri,
self._mode,
)
dp = self._direct_path_with_fp(self._streetnetwork_service)
if getattr(dp, "journeys", None):
dp.journeys[0].internal_id = str(utils.generate_id
|
())
self._logger.debug(
"finish %s direct path from %s to %s by %s",
self._path_type,
self._orig_obj.uri,
self._dest_obj.uri,
self._mode,
)
return dp
def _async_request(self):
self._value = self._future_manager.create_future(self._do_request)
def wait_and_get(self):
if self._value:
return self._value.wait_and_get()
return None
class StreetNetworkPathPool:
"""
A direct path pool is a set of pure street network journeys which are computed by the given street network service.
According to its usage, a StreetNetworkPath can be direct, beginning_fallback and ending_fallback
"""
def __init__(self, future_manager, instance):
self._future_manager = future_manager
self._instance = instance
self._value = {}
self._direct_paths_future_by_mode = {}
def add_async_request(
self,
requested_orig_obj,
requested_dest_obj,
mode,
period_extremity,
request,
streetnetwork_path_type,
request_id,
):
streetnetwork_service = self._instance.get_street_network(mode, request)
key = (
streetnetwork_service.make_path_key(
mode, requested_orig_obj.uri, requested_dest_obj.uri, streetnetwork_path_type, period_extremity
)
if streetnetwork_service
else None
)
path = self._value.get(key)
if not path:
path = self._value[key] = StreetNetworkPath(
self._future_manager,
self._instance,
streetnetwork_service,
requested_orig_obj,
requested_dest_obj,
mode,
period_extremity,
request,
streetnetwork_path_type,
request_id,
)
if streetnetwork_path_type is StreetNetworkPathType.DIRECT:
self._direct_paths_future_by_mode[mode] = path
def get_all_direct_paths(self):
"""
:return: a dict of mode vs direct_path future
"""
return self._direct_paths_future_by_mode
def has_valid_direct_paths(self):
for k in self._value:
if k.streetnetwork_path_type is not StreetNetworkPathType.DIRECT:
continue
dp = self._value[k].wait_and_get()
if getattr(dp, "journeys", None):
return True
return False
def wait_and_get(
self, requested_orig_obj, requested_dest_obj, mode, period_extremity, streetnetwork_path_type, request
):
streetnetwork_service = self._instance.get_street_network(mode, request)
key = (
streetnetwork_service.make_path_key(
mode, requested_orig_obj.uri, requested_dest_obj.uri, streetnetwork_path_type, period_extremity
)
if streetnetwork_service
else None
)
dp_future = self._value.get(key)
return dp_future.wait_and_get() if dp_future else None
def add_feed_publishers(self, request, requested_direct_path_modes, responses):
def _feed_publisher_not_present(feed_publishers, id):
for fp in feed_publishers:
if id == fp.id:
return False
return True
for mode in requested_direct_path_modes:
streetnetwork_service = self._instance.get_street_network(mode, request)
fp = streetnetwork_service.feed_publisher()
if fp != None:
for resp in responses:
if _feed_publisher_not_present(resp.feed_publishers, fp.id):
feed = resp.feed_publishers.add()
feed.id = fp.id
feed.name = fp.name
feed.license
|
gkc1000/pyscf
|
pyscf/pbc/df/mdf_ao2mo.py
|
Python
|
apache-2.0
| 6,690
| 0.003737
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import numpy
from pyscf import lib
from pyscf.ao2mo import _ao2mo
from pyscf.ao2mo.incore import iden_coeffs, _conc_mos
from pyscf.pbc.df.fft_ao2mo import _format_kpts
from pyscf.pbc.df import df_ao2mo
from pyscf.pbc.df import aft_ao2mo
from pyscf.pbc.lib import kpts_helper
from pyscf.pbc.lib.kpts_helper import is_zero, gamma_point, member, unique
from pyscf import __config__
def get_eri(mydf, kpts=None,
compact=getattr(__config__, 'pbc_df_ao2mo_get_eri_compact', True)):
if mydf._cderi is None:
mydf.build()
kptijkl = _format_kpts(kpts)
eri = aft_ao2mo.get_eri(mydf, kptijkl, compact=compact)
eri += df_ao2mo.get_eri(mydf, kptijkl, compact=compact)
return eri
def general(mydf, mo_coeffs, kpts=None,
compact=getattr(__config__, 'pbc_df_ao2mo_general_compact', True)):
if mydf._cderi is None:
mydf.build()
kptijkl = _format_kpts(kpts)
if isinstance(mo_coeffs, numpy.ndarray) and mo_coeffs.ndim == 2:
mo_coeffs = (mo_coeffs,) * 4
eri_mo = aft_ao2mo.general(mydf, mo_coeffs, kptijkl, compact=compact)
eri_mo += df_ao2mo.general(mydf, mo_coeffs, kptijkl, compact=compact)
return eri_mo
def ao2mo_7d(mydf, mo_coeff_kpts, kpts=None, factor=1, out=None):
cell = mydf.cell
if kpts is None:
kpts = mydf.kpts
nkpts = len(kpts)
if isinstance(mo_coeff_kpts, numpy.ndarray) and mo_coeff_kpts.ndim == 3:
mo_coeff_kpts = [mo_coeff_kpts] * 4
else:
mo_coeff_kpts = list(mo_coeff_kpts)
# Shape of the orbitals can be different on different k-points. The
# orbital coefficients must be formatted (padded by zeros) so that the
# shape of the orbital coefficients are the same on all k-points. This can
# be achieved by calling pbc.mp.kmp2.padded_mo_coeff function
nmoi, nmoj, nmok, nmol = [x.shape[2] for x in mo_coeff_kpts]
eri_shape = (nkpts, nkpts, nkpts, nmoi, nmoj, nmok, nmol)
if gamma_point(kpts):
dtype = numpy.result_type(*mo_coeff_kpts)
else:
dtype = numpy.complex128
if out is None:
out = numpy.empty(eri_shape, dtype=dtype)
else:
assert(out.shape == eri_shape)
kptij_lst = numpy.array([(ki, kj) for ki in kpts for kj in kpts])
kptis_lst = kptij_lst[:,0]
kptjs_lst = kptij_lst[:,1]
kpt_ji = kptjs_lst - kptis_lst
uniq_kpts, uniq_index, uniq_inverse = unique(kpt_ji)
ngrids = numpy.prod(mydf.mesh)
nao = cell.nao_nr()
max_memory = max(2000, mydf.max_memory-lib.current_memory()[0]-nao**4*16/1e6) * .5
fswap = lib.H5TmpFile()
tao = []
ao_loc = None
kconserv = kpts_helper.get_kconserv(cell, kpts)
for uniq_id, kpt in enumerate(uniq_kpts):
q = uniq_kpts[uniq_id]
adapted_ji_idx = numpy.where(uniq_inverse == uniq_id)[0]
kptjs = kptjs_lst[adapted_ji_idx]
coulG = mydf.weighted_coulG(q, False, mydf.mesh)
coulG *= factor
moij_list = []
ijslice_list = []
for ji, ji_idx in enumerate(adapted_ji_idx):
ki = ji_idx // nkpts
kj = ji_idx % nkpts
moij, ijslice = _conc_mos(mo_coeff_kpts[0][ki], mo_coeff_kpts[1][kj])[2:]
moij_list.append(moij)
ijslice_list.append(ijslice)
fswap.create_dataset('zij/'+str(ji), (ngrids,nmoi*nmoj), 'D')
for aoaoks, p0, p1 in mydf.ft_loop(mydf.mesh, q, kptjs,
max_memory=max_memory):
for ji, aoao in enumerate(aoaoks):
ki = adapted_ji_idx[ji] // nkpts
kj = adapted_ji_idx[ji] % nkpts
buf = aoao.transpose(1,2,0).reshape(nao**2,p1-p0)
zij = _ao2mo.r_e2(lib.transpose(buf), moij_list[ji],
|
i
|
jslice_list[ji], tao, ao_loc)
zij *= coulG[p0:p1,None]
fswap['zij/'+str(ji)][p0:p1] = zij
mokl_list = []
klslice_list = []
for kk in range(nkpts):
kl = kconserv[ki, kj, kk]
mokl, klslice = _conc_mos(mo_coeff_kpts[2][kk], mo_coeff_kpts[3][kl])[2:]
mokl_list.append(mokl)
klslice_list.append(klslice)
fswap.create_dataset('zkl/'+str(kk), (ngrids,nmok*nmol), 'D')
ki = adapted_ji_idx[0] // nkpts
kj = adapted_ji_idx[0] % nkpts
kptls = kpts[kconserv[ki, kj, :]]
for aoaoks, p0, p1 in mydf.ft_loop(mydf.mesh, q, -kptls,
max_memory=max_memory):
for kk, aoao in enumerate(aoaoks):
buf = aoao.conj().transpose(1,2,0).reshape(nao**2,p1-p0)
zkl = _ao2mo.r_e2(lib.transpose(buf), mokl_list[kk],
klslice_list[kk], tao, ao_loc)
fswap['zkl/'+str(kk)][p0:p1] = zkl
for ji, ji_idx in enumerate(adapted_ji_idx):
ki = ji_idx // nkpts
kj = ji_idx % nkpts
moij, ijslice = _conc_mos(mo_coeff_kpts[0][ki], mo_coeff_kpts[1][kj])[2:]
zij = []
for LpqR, LpqI, sign in mydf.sr_loop(kpts[[ki,kj]], max_memory, False, mydf.blockdim):
zij.append(_ao2mo.r_e2(LpqR+LpqI*1j, moij, ijslice, tao, ao_loc))
for kk in range(nkpts):
kl = kconserv[ki, kj, kk]
eri_mo = lib.dot(numpy.asarray(fswap['zij/'+str(ji)]).T,
numpy.asarray(fswap['zkl/'+str(kk)]))
for i, (LrsR, LrsI, sign) in \
enumerate(mydf.sr_loop(kpts[[kk,kl]], max_memory, False, mydf.blockdim)):
zkl = _ao2mo.r_e2(LrsR+LrsI*1j, mokl_list[kk],
klslice_list[kk], tao, ao_loc)
lib.dot(zij[i].T, zkl, sign*factor, eri_mo, 1)
if dtype == numpy.double:
eri_mo = eri_mo.real
out[ki,kj,kk] = eri_mo.reshape(eri_shape[3:])
del(fswap['zij'])
del(fswap['zkl'])
return out
|
catapult-project/catapult-csm
|
telemetry/telemetry/timeline/inspector_importer.py
|
Python
|
bsd-3-clause
| 2,689
| 0.008553
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Imports event data obtained from the inspector's timeline."""
from telemetry.timeline import importer
import telemetry.timeline.slice as tracing_slice
import telemetry.timeline.thread as timeline_thread
from tracing.trace_data import trace_data as trace_data_module
class InspectorTimelineImporter(importer.TimelineImporter):
def __init__(self, model, trace_data):
super(InspectorTimelineImporter, self).__init__(model,
trace_data,
import_order=1)
traces = trace_data.GetTracesFor(
trace_data_module.INSPECTOR_TRACE_PART)
assert len(traces) == 1
self._events = traces[0]
@staticmethod
def GetSupportedPart():
return trace_data_module.INSPECTOR_TRACE_PART
def ImportEvents(self):
render_process = self._model.GetOrCreateProcess(0)
for raw_event in self._events:
thread = render_process.GetOrCreateThread(raw_event.get('thread', 0))
InspectorTimelineImporter.AddRawEventToThreadRecursive(thread, raw_event)
|
def FinalizeImport(self):
pass
@staticmethod
def AddRawEventToThreadRecursive(thread, raw_inspector_event):
pending_slice = None
if ('startTime' in
|
raw_inspector_event and
'type' in raw_inspector_event):
args = {}
for x in raw_inspector_event:
if x in ('startTime', 'endTime', 'children'):
continue
args[x] = raw_inspector_event[x]
if len(args) == 0:
args = None
start_time = raw_inspector_event['startTime']
end_time = raw_inspector_event.get('endTime', start_time)
pending_slice = tracing_slice.Slice(
thread, 'inspector',
raw_inspector_event['type'],
start_time,
thread_timestamp=None,
args=args)
for child in raw_inspector_event.get('children', []):
InspectorTimelineImporter.AddRawEventToThreadRecursive(
thread, child)
if pending_slice:
pending_slice.duration = end_time - pending_slice.start
thread.PushSlice(pending_slice)
@staticmethod
def RawEventToTimelineEvent(raw_inspector_event):
"""Converts raw_inspector_event to TimelineEvent."""
thread = timeline_thread.Thread(None, 0)
InspectorTimelineImporter.AddRawEventToThreadRecursive(
thread, raw_inspector_event)
thread.FinalizeImport()
assert len(thread.toplevel_slices) <= 1
if len(thread.toplevel_slices) == 0:
return None
return thread.toplevel_slices[0]
|
Gailbear/dots-editor
|
tests/test_core.py
|
Python
|
mit
| 2,462
| 0.005686
|
from dots_editor import core, utf8_braille
import os, pygame, pytest
TEST_STRING = u'\u2801\u2803\u2809\u2819\u2811'
TEST_FILENAME = 'test.txt'
def test_setenv():
assert os.environ["SDL_VIDEODRIVER"] == "dummy"
def test_key_to_dot(game):
assert game.key_to_dot(pygame.K_f) == 1
assert game.key_to_dot(pygame.K_d) == 2
assert game.key_to_dot(pygame.K_s) == 3
assert game.key_to_dot(pygame.K_j) == 4
assert game.key_to_dot(pygame.K_k) == 5
assert game.key_to_dot(pygame.K_l) == 6
def test_sentence_to_lines_single_line(game):
lines = game.sentence_to_lines()
assert len(lines) == 1
assert lines[0] == TEST_STRING
def test_sentence_to_lines_double_line(game):
line1 = u'\u2840'*40
line2 = u'\u2840'*3
game.sentence = make_cells(line1 + line2)
lines = game.sentence_to_lines()
assert len(lines) == 2
assert lines[0] == line1
assert lines[1] == line2
def test_ascii_lines(game):
lines = game.sentence_to_lines()
a_lines = game.ascii_lines(lines)
assert len(a_lines) == 1
assert a_lines[0] == 'ABCDE'
def test_save_sentences_ascii(game,
|
tmpdir):
assert game.savemode == 'ascii'
game.save_sentences()
f = tmpdir.join(TEST_FILENAME)
assert f.check()
assert f.read() == 'ABCDE'
def test_save_sentences_unicode(game, tmpdir):
game.savemode = 'unicode'
game
|
.save_sentences()
f = tmpdir.join(TEST_FILENAME)
assert f.check()
assert f.read_text('utf8') == TEST_STRING
def test_save_sentences_ascii_2_sentences(game_2lines, tmpdir):
assert game_2lines.savemode == 'ascii'
game_2lines.save_sentences()
f = tmpdir.join(TEST_FILENAME)
assert f.check()
assert f.read() == 'ABCDE\nABCDE'
def test_save_sentences_unicode_2_sentences(game_2lines, tmpdir):
game_2lines.savemode = 'unicode'
game_2lines.save_sentences()
f = tmpdir.join(TEST_FILENAME)
assert f.check()
assert f.read_text('utf8') == TEST_STRING + u'\n' + TEST_STRING
def test_draw_sentences(game):
# not that we can see the lines, just that it doesn't throw errors
game.draw_sentences()
assert True
def make_cells(chars):
return [utf8_braille.Cell(c) for c in chars]
@pytest.fixture
def game(tmpdir):
f = tmpdir.join('test.txt')
game = core.Game(str(f), 'ascii')
game.sentence = make_cells(TEST_STRING)
return game
@pytest.fixture
def game_2lines(game):
game.sentences.append(game.sentence)
return game
|
mick-d/nipype_source
|
nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py
|
Python
|
bsd-3-clause
| 1,781
| 0.03032
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.fsl.epi import EPIDeWarp
def test_EPIDeWarp_inputs():
input_map = dict(args=dict(argstr='%s',
),
cleanup=dict(argstr='--cleanup',
),
dph_file=dict(argstr='--dph %s',
mandatory=True,
),
environ=dict(nohash=True,
usedefault=True,
),
epi_file=dict(argstr='--epi %s',
),
epidw=dict(argstr='--epidw %s',
genfile=False,
),
esp=dict(argstr='--esp %s',
usedefault=True,
),
exf_file=dict(argstr='--exf %s',
),
exfdw=dict(argstr='--exfdw %s',
genfile=True,
),
|
ignore_exception=dict(nohash=True,
usedefault=True,
),
mag_file=dict(argstr='--mag %s',
mandatory=True,
position=0,
),
nocleanup=dict(argstr='--nocleanup',
usedefault=True,
),
output_type=dict(),
sigma=dict(argstr='--sigma %s',
usedefault=True,
),
tediff=dict(argstr='--tediff %s',
usedefault=True,
),
terminal_output=dict(mandatory=True,
nohash=True,
|
),
tmpdir=dict(argstr='--tmpdir %s',
genfile=True,
),
vsm=dict(argstr='--vsm %s',
genfile=True,
),
)
inputs = EPIDeWarp.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_EPIDeWarp_outputs():
output_map = dict(exf_mask=dict(),
exfdw=dict(),
unwarped_file=dict(),
vsm_file=dict(),
)
outputs = EPIDeWarp.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
jj0hns0n/geonode
|
geonode/upload/urls.py
|
Python
|
gpl-3.0
| 1,501
| 0.002665
|
# -*- coding: utf-8 -*-
########################################################################
|
#
#
|
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf.urls import patterns, url
from geonode.upload.views import UploadFileCreateView, UploadFileDeleteView
urlpatterns = patterns('geonode.upload.views',
url(r'^new/$', UploadFileCreateView.as_view(), name='data_upload_new'),
url(r'^progress$', 'data_upload_progress', name='data_upload_progress'),
url(r'^(?P<step>\w+)?$', 'view', name='data_upload'),
url(r'^delete/(?P<id>\d+)?$', 'delete', name='data_upload_delete'),
url(r'^remove/(?P<pk>\d+)$', UploadFileDeleteView.as_view(), name='data_upload_remove'),
)
|
peter1010/my_vim
|
vimfiles/py_scripts/snippet.py
|
Python
|
gpl-2.0
| 569
| 0.010545
|
import vim
def func_header_snippet(row):
cmt = "//!"
cb = vim.current.buffer
start = row
|
while start >= 0:
line = cb[start-1].strip()
if not line.startswith(cmt):
break
start -= 1
print("HDR")
def select_snippet(line):
line = line.strip()
if line.startswith("//!"):
return func_header_snippet
def main():
row, col = vim.current.window.cursor
row -= 1
cline = vim.current.buffer[row]
|
func = select_snippet(cline)
if func:
func(row)
#! @brief
#! @details
main()
|
popdynamics/popdynamics
|
basepop.py
|
Python
|
mit
| 40,337
| 0.001289
|
# -*- coding: utf-8 -*-
"""
Base Population Model to handle different type of models
"""
from __future__ import print_function
from __future__ import division
from builtins import range
from builtins import object
from past.utils import old_div
import os
import sys
import math
import random
import platform
import glob
import copy
import numpy
try:
from scipy.integrate import odeint
except Exception:
print("Unable to load scipy")
# General file-handling methods for use in examples
def ensure_out_dir(out_dir):
"""
Make sure the output directory exists and create if it doesn't.
"""
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
def open_pngs_in_dir(out_dir):
"""
Open the .png image files through the OS
"""
pngs = glob.glob(os.path.join(out_dir, '*png'))
operating_system = platform.system()
if 'Windows' in operating_system:
os.system("start " + " ".join(pngs))
elif 'Darwin' in operating_system:
os.system('open ' + " ".join(pngs))
# function used by BaseModel in data manipulation
def add_unique_tuple_to_list(a_list, a_tuple):
"""
Adds or modifies a list of tuples, compares only the items before the last
in the tuples, the last value in the tuple is assumed to be a value.
"""
for i, test_tuple in enumerate(a_list):
if test_tuple[:-1] == a_tuple[:-1]:
a_list[i] = a_tuple
break
else:
a_list.append(a_tuple)
def label_intersects_tags(label, tags):
"""
Determine whether a string is contained within a list of strings for use
in functions such as calculation of the force of infection, where we might
want a list of all the compartments that contain a particular string (such
as 'active' or 'infectious').
Args:
label: The string we're searching for
tags: List for comparison
"""
for tag in tags:
if tag in label:
return True
return False
# Math functions to build scale-up functions
def make_sigmoidal_curve(
y_low=0,
y_high=1.,
x_start=0,
x_inflect=0.5,
multiplier=1.):
"""
Returns a sigmoidal curve function for smooth scaling of time-variant
parameter values
:param y_low: lowest y value
:param y_high: highest y value
:param x_inflect: inflection point of graph along the x-axis
:param multiplier: if 1, slope at x_inflect goes to (0, y_low), larger
values makes it steeper
:return: function that increases sigmoidally from 0 y_low to y_high
the halfway point is at x_inflect on the x-axis and the slope
at x_inflect goes to (0, y_low) if the multiplier is 1.
"""
amplitude = y_high - y_low
if amplitude == 0:
def curve(x):
return y_low
return curve
x_delta = x_inflect - x_start
slope_at_inflection = multiplier * 0.5 * amplitude / x_delta
b = 4. * slope_at_inflection / amplitude
def curve(x):
arg = b * (x_inflect - x)
# check for large values that will blow out exp
if arg > 10.:
return y_low
return old_div(amplitude, (1. + math.exp(arg))) + y_low
return curve
def make_constant_function(value):
def curve(x):
return value
return curve
def make_two_step_curve(y_low, y_med, y_high, x_start, x_med, x_end):
curve1 = make_sigmoidal_curve(
y_high=y_med,
y_low=y_low,
x_start=x_start,
x_inflect=(x_med - x_start) * 0.5 + x_start,
multiplier=4)
curve2 = make_sigmoidal_curve(
y_high=y_high,
y_low=y_med,
x_start=x_med,
x_inflect=(x_end - x_med) * 0.5 + x_med,
multiplier=4)
def curve(x):
if x < x_start:
return y_low
if x < x_med:
return curve1(x)
if x < x_end:
return curve2(x)
return y_high
return curve
def pick_event(event_intervals):
"""
Returns a randomly selected index of an event from a list of intervals
proportional to the events' probabilities
:param event_intervals: list of event intervals
"""
i_event = 0
cumul_intervals = []
cumul_interval = 0.0
for interval in event_intervals:
cumul_interval += interval
cumul_intervals.append(cumul_interval)
i = random.random() * cumul_interval
i_last_event = len(event_intervals) - 1
while i > cumul_intervals[i_event] and i_event < i_last_event:
i_event += 1
return i_event
# The Base model
class BaseModel(object):
"""
BaseModel is a compartmental model that implements a system of
differential equations that connects the populations of the different
compartments.
Most connections between compartments are of the double-entry book-keeping
type where losses in one compartments are taken up from another
|
compartment.
In order to handle all the connections without putting too much of a
burden on the programmer the differential equations are built up from the
individual connections rather than being specified straight up.
Basic concepts:
self.target_times: time steps where model values are stored
self.dt: the time step used in simulation, usually much smaller that
the intervals be
|
tween values in self.target_times
self.time: current time of simulation
self.compartments: dictionary that holds current compartment populations
self.init_compartments: dictionary of initial compartment values
self.flows: dictionary that holds current compartment flows
self.params: hard-coded numerical values
self.vars: values that are calculated at every time step
self.scaleup_fns: dictionary of functions that are used to calculate
certain self.vars values
self.fixed_transfer_rate_flows: connections between compartments with
fixed multipliers
self.var_transfer_rate_flows: connections between compartments with
variable multipliers
self.infection_death_rate_flows: extinctions from certain compartments
(for demography)
self.var_entry_rate_flow: dynamic growth of certain compartments
self.background_death_rate: generalized extinction across all compartments
The execution loop is:
1) calculate self.vars - depends only on self.params and self.compartments
2) Assign transfers between compartments, depends on self.vars,
self.scaleup_fns and self.params
3) Determine self.flows for compartments from transfers
4) Update self.compartments from self.flows
5) Save to self.soln_arrray
"""
def __init__(self):
"""
:param params: dictionary of params to add to self.params
"""
# list of labels for all compartments
self.labels = []
# stores the initial value for all compartments
self.init_compartments = {}
# stores the values of all parameters there should be no hard-coded
# values except as contained in self.structure
self.params = {}
# stored list of time points
self.target_times = []
self.time = 0
self.times = []
self.start_time = 0
# scale-up functions, generally used for time-variant parameters,
# whose values change in a way that is predictable before the model
# has been run
self.scaleup_fns = {}
# stores the population compartments in the model at a given time-
# point
self.compartments = {}
# stores any auxillary variables used to calculate dynamic effects
# (such as transmission) at each time-step
self.vars = {}
# total flow of each compartment
self.flows = {}
# variable entry (birth) rate flow(s)
# list of 2-tuple (label, var_label)
# - label: name of compartment
# - var_label: name of var that holds the entry rate
self.var_entry_rate_flow = []
# fixed transfer rates (often for progression
|
thomashuang/Fukei
|
setup.py
|
Python
|
mit
| 865
| 0.020809
|
from setuptools import setup
with open('README.rst') as f:
long_description = f.read(
|
)
setup(
name = "fukei",
version = "0.1",
license = 'MIT',
description = "A Python Tornado port of shadowsocks and socks proxy",
author = 'Thomas Huang',
url = 'https://github.com/thomashuang/Fukei',
packages = ['fukei', 'fukei.connection', 'fukei.upstream'],
package_data={
|
'fukei': ['README.rst', 'LICENSE', 'config/config.json']
},
install_requires = ['setuptools',
],
scripts=['bin/ss-local', 'bin/ss-server', 'bin/ss-default'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: Proxy Servers',
],
long_description=long_description,
)
|
anyweez/regis
|
face/util/exceptions.py
|
Python
|
gpl-2.0
| 2,294
| 0.012642
|
'''
Thrown when a user doesn't have an available question. There may be questions
that haven't been answered, but none of them are in the 'ready' state, meaning
that they've been parsed but their answer hasn't been computed.
'''
class NoQuestionReadyException(Exception):
def __init__(self, user):
self.user = user
def __str__(self):
return 'No question ready for user %s' % self.user.username
'''
[Depracated]
This exception is thrown when a user enters a pair of passwords that do not match
during registration.
'''
class DifferingPasswordException(Exception):
def __init__(self):
pass
def __str__(self):
return 'Provided passwords do not match.'
'''
[Deprecated]
This exception is thrown when a user requests a username that already ex
|
ists.
'''
class DuplicateNameException(Exception):
def __init__(self, uname):
self.uname = uname
def __str__(self):
return 'The username %s already exists.' % self.uname
'''
[Deprecated]
This exception is thrown when a user tries to hack the URL or POST parameters to
view data that isn't available to them, such as the status of another user's guess.
'''
class UnauthorizedAttemptException(Exception):
def __init__(self, user, qid):
self.user = user
|
self.qid = qid
def __str__(self):
return '%s made an authorized guess attempt on question ID #%d' % (self.user.username, self.qid)
'''
This exception is thrown when a new user tries to create an account but no QuestionSet
is available to pair them to. New QuestionSets are supposed to be generated by a
periodic cron job.
'''
class NoQuestionSetReadyException(Exception):
def __init__(self, user):
self.user = user
def __str__(self):
return 'No question set available for user #%d (%s)' % (self.user.id, self.user.username)
'''
This exception is typically thrown when the processing is SUPPOSED to terminate,
i.e. in the handler for /bomb. This exception shouldn't ever be caught and
is primarily just an exception for testing.
'''
class KamikazeException(Exception):
def __init__(self, user):
self.user = user
def __str__(self):
return '%s just dropped a KamikazeException' % self.user.username
|
won0089/oppia
|
core/tests/test_utils.py
|
Python
|
apache-2.0
| 26,631
| 0.000263
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities for test classes."""
import contextlib
import copy
import datetime
import os
import re
import unittest
import webtest
from core.controllers import reader
from core.domain import collection_domain
from core.domain import collection_services
from core.domain import config_domain
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import rule_domain
from core.domain import rights_manager
from core.platform import models
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
current_user_services = models.Registry.import_current_user_services()
import feconf
import jinja_utils
import main
import utils
import inspect
import json
CSRF_REGEX = (
r'csrf_token: JSON\.parse\(\'\\\"([A-Za-z0-9/=_-]+)\\\"\'\)')
# Prefix to append to all lines printed by tests to the console.
LOG_LINE_PREFIX = 'LOG_INFO_TEST: '
def empty_environ():
os.environ['AUTH_DOMAIN'] = 'example.com'
os.environ['SERVER_NAME'] = 'localhost'
os.environ['HTTP_HOST'] = 'localhost'
os.environ['SERVER_PORT'] = '8080'
os.environ['USER_EMAIL'] = ''
os.environ['USER_ID'] = ''
os.environ['USER_IS_ADMIN'] = '0'
os.environ['DEFAULT_VERSION_HOSTNAME'] = '%s:%s' % (
os.environ['HTTP_HOST'], os.environ['SERVER_PORT'])
class TestBase(unittest.TestCase):
"""Base class for all tests."""
maxDiff = 2500
# This is the value that gets returned by default when
# app_identity.get_application_id() is called during tests.
EXPECTED_TEST_APP_ID = 'testbed-test'
# A test unicode string.
UNICODE_TEST_STRING = u'unicode ¡马!'
# Dummy strings representing user attributes. Note that it is up to the
# individual test to actually register these users as editors, admins, etc.
ADMIN_EMAIL = 'admin@example.com'
# Usernames containing the string 'admin' are reserved, so we use 'adm'
# instead.
ADMIN_USERNAME = 'adm'
MODERATOR_EMAIL = 'moderator@example.com'
MODERATOR_USERNAME = 'moderator'
OWNER_EMAIL = 'owner@example.com'
OWNER_USERNAME = 'owner'
EDITOR_EMAIL = 'editor@example.com'
EDITOR_USERNAME = 'editor'
VIEWER_EMAIL = 'viewer@example.com'
VIEWER_USERNAME = 'viewer'
NEW_USER_EMAIL = 'new.user@example.com'
NEW_USER_USERNAME = 'newuser'
DEFAULT_END_STATE_NAME = 'End'
VERSION_0_STATES_DICT = {
feconf.DEFAULT_INIT_STATE_NAME: {
'content': [{'type': 'text', 'value': ''}],
'param_changes': [],
'interaction': {
'customization_args': {},
'id': 'Continue',
'handlers': [{
'name': 'submit',
'rule_specs': [{
'dest': 'END',
'feedback': [],
'param_changes': [],
'definition': {'rule_type': 'default'}
}]
}]
}
}
}
def _get_unicode_test_string(self, suffix):
return '%s%s' % (self.UNICODE_TEST_STRING, suffix)
def setUp(self):
raise NotImplementedError
def tearDown(self):
raise NotImplementedError
def assertFuzzyTrue(self, value):
self.assertEqual(value, rule_domain.CERTAIN_TRUE_VALUE)
self.assertTrue(isinstance(value, float))
def assertFuzzyFalse(self, value):
self.assertEqual(value, rule_domain.CERTAIN_FALSE_VALUE)
self.assertTrue(isinstance(value, float))
def signup_superadmin_user(self):
"""Signs up a superadmin user. Should be called at the end of setUp().
"""
self.signup('tmpsuperadmin@example.com', 'tmpsuperadm1n')
def log_line(self, line):
"""Print the line with a prefix that can be identified by the
script that calls the test.
"""
print '%s%s' % (LOG_LINE_PREFIX, line)
def _delete_all_models(self):
raise NotImplementedError
def _stash_current_user_env(self):
"""Stashes the current user-specific env variables for later retrieval.
Developers: please don't use this method outside this class -- it makes
the individual tests harder to follow.
"""
|
self.stashed_user_env = {
'USER_EMAIL': os.environ['USER_EMAIL'],
'USER_ID': os.environ['USER_ID'],
'USER_IS_ADMIN': os.environ['USER_IS_ADMIN']
}
def _restore_stashed_user_env(self):
"""Restores a stashed set of use
|
r-specific env variables.
Developers: please don't use this method outside this class -- it makes
the individual tests harder to follow.
"""
if not self.stashed_user_env:
raise Exception('No stashed user env to restore.')
for key in self.stashed_user_env:
os.environ[key] = self.stashed_user_env[key]
self.stashed_user_env = None
def login(self, email, is_super_admin=False):
os.environ['USER_EMAIL'] = email
os.environ['USER_ID'] = self.get_user_id_from_email(email)
os.environ['USER_IS_ADMIN'] = '1' if is_super_admin else '0'
def logout(self):
os.environ['USER_EMAIL'] = ''
os.environ['USER_ID'] = ''
os.environ['USER_IS_ADMIN'] = '0'
def shortDescription(self):
"""Additional information logged during unit test invocation."""
# Suppress default logging of docstrings.
return None
def get_expected_login_url(self, slug):
"""Returns the expected login URL."""
return current_user_services.create_login_url(slug)
def get_expected_logout_url(self, slug):
"""Returns the expected logout URL."""
return current_user_services.create_logout_url(slug)
def _parse_json_response(self, json_response, expect_errors=False):
"""Convert a JSON server response to an object (such as a dict)."""
if not expect_errors:
self.assertEqual(json_response.status_int, 200)
self.assertEqual(
json_response.content_type, 'application/javascript')
self.assertTrue(json_response.body.startswith(feconf.XSSI_PREFIX))
return json.loads(json_response.body[len(feconf.XSSI_PREFIX):])
def get_json(self, url):
"""Get a JSON response, transformed to a Python object."""
json_response = self.testapp.get(url)
self.assertEqual(json_response.status_int, 200)
return self._parse_json_response(json_response, expect_errors=False)
def post_json(self, url, payload, csrf_token=None, expect_errors=False,
expected_status_int=200, upload_files=None):
"""Post an object to the server by JSON; return the received object."""
data = {'payload': json.dumps(payload)}
if csrf_token:
data['csrf_token'] = csrf_token
json_response = self.testapp.post(
str(url), data, expect_errors=expect_errors,
upload_files=upload_files)
self.assertEqual(json_response.status_int, expected_status_int)
return self._parse_json_response(
json_response, expect_errors=expect_errors)
def put_json(self, url, payload, csrf_token=None, expect_errors=False,
expected_status_int=200):
"""Put an object to the server by JSON; return the received object."""
data = {'payload': json.dumps(payload)}
if csrf_token:
data['csrf_token'] = csrf_token
json_response = self.testapp.put(
str(url), dat
|
LowResourceLanguages/hltdi-l3
|
l3xdg/morphology/internals.py
|
Python
|
gpl-3.0
| 16,052
| 0.002803
|
"""
This file is part of L3Morpho.
Author: Michael Gasser <gasser@cs.indiana.edu>
-----------------------------------------------------------------
internals.py is part of
Natural Language Toolkit: Internal utility functions
Copyright (C) 2001-2008 University of Pennsylvania
Author: Steven Bird <sb@csse.unimelb.edu.au>
Edward Loper <edloper@gradient.cis.upenn.edu>
URL: <http://www.nltk.org/>
License: <http://creativecommons.org/licenses/by-nc-nd/3.0/us/>
"""
import subprocess, os.path, re, warnings, textwrap
import types
######################################################################
# Regular Expression Processing
######################################################################
def convert_regexp_to_nongrouping(pattern):
"""
Convert all grouping parenthases in the given regexp pattern to
non-grouping parenthases, and return the result. E.g.:
>>> convert_regexp_to_nongrouping('ab(c(x+)(z*))?d')
'ab(?:c(?:x+)(?:z*))?d'
@type pattern: C{str}
@rtype: C{str}
"""
# Sanity check: back-references are not allowed!
for s in re.findall(r'\\.|\(\?P=', pattern):
if s[1] in '0123456789' or s == '(?P=':
raise ValueError('Regular expressions with back-references '
'are not supported: %r' % pattern)
# This regexp substitution function replaces the string '('
# with the string '(?:', but otherwise makes no changes.
def subfunc(m):
return re.sub('^\((\?P<[^>]*>)?$', '(?:', m.group())
# Scan through the regular expression. If we see any backslashed
# characters, ignore them. If we see a named group, then
# replace it with "(?:". If we see any open parens that are part
# of an extension group, ignore those too. But if we see
# any other open paren, replace it with "(?:")
return re.sub(r'''(?x)
\\. | # Backslashed character
\(\?P<[^>]*> | # Named group
\(\? | # Extension group
\( # Grouping parenthasis''', subfunc, pattern)
##########################################################################
# Java Via Command-Line
##########################################################################
_java_bin = None
_java_options = []
def config_java(bin=None, options=None):
"""
Configure nltk's java interface, by letting nltk know where it can
find the C{java} binary, and what extra options (if any) should be
passed to
|
java when it is run.
@param bin: The full path to the C{java} binary. If not specified,
then nltk will search the system for a C{java} binary; and if
one is not found, it will raise a C{LookupError} exception.
@type bin: C{string}
@param options: A list of options that should be passed to the
C{java} binary when it is called. A common value is
C{['-Xmx512m']}, which tells the C{java} binary to increase
|
the maximum heap size to 512 megabytes. If no options are
specified, then do not modify the options list.
@type options: C{list} of C{string}
"""
global _java_bin, _java_options
if bin is not None:
if not os.path.exists(bin):
raise ValueError('Could not find java binary at %r' % bin)
_java_bin = bin
if options is not None:
if isinstance(options, basestring):
options = options.split()
_java_options = list(options)
# Check the JAVAHOME environment variable.
for env_var in ['JAVAHOME', 'JAVA_HOME']:
if _java_bin is None and env_var in os.environ:
paths = [os.path.join(os.environ[env_var], 'java'),
os.path.join(os.environ[env_var], 'bin', 'java')]
for path in paths:
if os.path.exists(path):
_java_bin = path
print('[Found java: %s]' % path)
# If we're on a POSIX system, try using the 'which' command to
# find a java binary.
if _java_bin is None and os.name == 'posix':
try:
p = subprocess.Popen(['which', 'java'], stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
path = stdout.strip()
if path.endswith('java') and os.path.exists(path):
_java_bin = path
print('[Found java: %s]' % path)
except:
pass
if _java_bin is None:
raise LookupError('Unable to find java! Use config_java() '
'or set the JAVAHOME environment variable.')
def java(cmd, classpath=None, stdin=None, stdout=None, stderr=None):
"""
Execute the given java command, by opening a subprocess that calls
C{java}. If java has not yet been configured, it will be configured
by calling L{config_java()} with no arguments.
@param cmd: The java command that should be called, formatted as
a list of strings. Typically, the first string will be the name
of the java class; and the remaining strings will be arguments
for that java class.
@type cmd: C{list} of C{string}
@param classpath: A C{':'} separated list of directories, JAR
archives, and ZIP archives to search for class files.
@type classpath: C{string}
@param stdin, stdout, stderr: Specify the executed programs'
standard input, standard output and standard error file
handles, respectively. Valid values are C{subprocess.PIPE},
an existing file descriptor (a positive integer), an existing
file object, and C{None}. C{subprocess.PIPE} indicates that a
new pipe to the child should be created. With C{None}, no
redirection will occur; the child's file handles will be
inherited from the parent. Additionally, stderr can be
C{subprocess.STDOUT}, which indicates that the stderr data
from the applications should be captured into the same file
handle as for stdout.
@return: A tuple C{(stdout, stderr)}, containing the stdout and
stderr outputs generated by the java command if the C{stdout}
and C{stderr} parameters were set to C{subprocess.PIPE}; or
C{None} otherwise.
@raise OSError: If the java command returns a nonzero return code.
"""
if isinstance(cmd, basestring):
raise TypeError('cmd should be a list of strings')
# Make sure we know where a java binary is.
if _java_bin is None:
config_java()
# Construct the full command string.
cmd = list(cmd)
if classpath is not None:
cmd = ['-cp', classpath] + cmd
cmd = [_java_bin] + _java_options + cmd
# Call java via a subprocess
p = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr)
(stdout, stderr) = p.communicate()
# Check the return code.
if p.returncode != 0:
print(stderr)
raise OSError('Java command failed!')
return (stdout, stderr)
if 0:
#config_java(options='-Xmx512m')
# Write:
#java('weka.classifiers.bayes.NaiveBayes',
# ['-d', '/tmp/names.model', '-t', '/tmp/train.arff'],
# classpath='/Users/edloper/Desktop/weka/weka.jar')
# Read:
(a,b) = java(['weka.classifiers.bayes.NaiveBayes',
'-l', '/tmp/names.model', '-T', '/tmp/test.arff',
'-p', '0'],#, '-distribution'],
classpath='/Users/edloper/Desktop/weka/weka.jar')
######################################################################
# Parsing
######################################################################
class ParseError(ValueError):
"""
Exception raised by parse_* functions when they fail.
@param position: The index in the input string where an error occured.
@param expected: What was expected when an error occured.
"""
def __init__(self, expected, position):
ValueError.__init__(self, expected, position)
self.expected = expected
self.position = position
def __str__(self):
return 'Expected %s at %s' % (self.expected, self.position)
_STRING_START_RE = re.compile(r"[uU]?[rR]?(\"\"\"|\'\'\'|\"|\')")
def parse_str
|
thenenadx/forseti-security
|
google/cloud/security/common/gcp_api/iam.py
|
Python
|
apache-2.0
| 1,047
| 0
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version
|
2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANT
|
IES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper for IAM API client."""
from google.cloud.security.common.gcp_api import _base_client
# TODO: The next editor must remove this disable and correct issues.
# pylint: disable=missing-type-doc
# pylint: disable=missing-param-doc
class IamClient(_base_client.BaseClient):
"""IAM Client."""
API_NAME = 'iam'
def __init__(self, credentials=None):
super(IamClient, self).__init__(
credentials=credentials, api_name=self.API_NAME)
|
nagyistoce/netzob
|
test/src/test_netzob/test_Common/test_Type/test_Endianess.py
|
Python
|
gpl-3.0
| 2,233
| 0.010767
|
# -*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more detail
|
s. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+-----------------------------------------------------------
|
----------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
from netzob.Common.Type.Endianess import Endianess
from common.NetzobTestCase import NetzobTestCase
class test_Endianess(NetzobTestCase):
def test_BIG(self):
self.assertEqual(Endianess.BIG, "big-endian")
def test_LITTLE(self):
self.assertEqual(Endianess.LITTLE, "little-endian")
|
kimegitee/python-koans
|
python3/koans/about_scoring_project.py
|
Python
|
mit
| 2,731
| 0.014647
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
# Greed is a dice game where you roll up to five dice to accumulate
# points. The following "score" function will be used calculate the
# score of a single roll of the dice.
#
# A greed roll is scored as follows:
#
# * A set of three ones is 1000 points
#
# * A set of three numbers (other than ones) is worth 100 times the
|
# number. (e.g. three fives is 500 points).
#
# * A one (that is not part of a set of three) is worth 100 points.
#
# * A five (that is not part of a set of three) is worth 50 points.
#
# * Everything else is worth 0 points.
#
#
# Examples:
#
# score([1,1,1,5,1]) => 1150 points
# score([2,3,4,6,2]) => 0 points
# score([3,4,5,3,3]) => 350 points
# score([1,5,1,2,4]) => 250 points
#
# More scoring
|
examples are given in the tests below:
#
# Your goal is to write the score method.
from collections import Counter
def score(dice):
'''
Calculate the scores for results of up to fice dice rolls
'''
return sum((score_of_three(k) * (v//3) + score_of_one(k) * (v%3) for k, v in Counter(dice).items()))
def score_of_three(num):
'''
Calculate score for set of three
'''
if num == 1:
return 1000
else:
return num*100
def score_of_one(num):
'''
Calculate score for a roll not in a set of three
'''
if num == 1:
return 100
elif num == 5:
return 50
else:
return 0
class AboutScoringProject(Koan):
def test_score_of_an_empty_list_is_zero(self):
self.assertEqual(0, score([]))
def test_score_of_a_single_roll_of_5_is_50(self):
self.assertEqual(50, score([5]))
def test_score_of_a_single_roll_of_1_is_100(self):
self.assertEqual(100, score([1]))
def test_score_of_multiple_1s_and_5s_is_the_sum_of_individual_scores(self):
self.assertEqual(300, score([1,5,5,1]))
def test_score_of_single_2s_3s_4s_and_6s_are_zero(self):
self.assertEqual(0, score([2,3,4,6]))
def test_score_of_a_triple_1_is_1000(self):
self.assertEqual(1000, score([1,1,1]))
def test_score_of_other_triples_is_100x(self):
self.assertEqual(200, score([2,2,2]))
self.assertEqual(300, score([3,3,3]))
self.assertEqual(400, score([4,4,4]))
self.assertEqual(500, score([5,5,5]))
self.assertEqual(600, score([6,6,6]))
def test_score_of_mixed_is_sum(self):
self.assertEqual(250, score([2,5,2,2,3]))
self.assertEqual(550, score([5,5,5,5]))
self.assertEqual(1150, score([1,1,1,5,1]))
def test_ones_not_left_out(self):
self.assertEqual(300, score([1,2,2,2]))
self.assertEqual(350, score([1,5,2,2,2]))
|
carthach/essentia
|
test/src/unittests/all_tests.py
|
Python
|
agpl-3.0
| 8,953
| 0.006143
|
#!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from __future__ import absolute_import # For Python 2 compatibility
from os.path import join, sep
import os
import sys
import unittest
import glob
import essentia
import essentia.streaming
try:
from importlib import reload # Python3
except:
pass
# we don't want to get too chatty when running all the tests
essentia.log.info = False
#essentia.log.debug += essentia.EAll
#essentia.log.debug -= essentia.EConnectors
tests_dir = os.path.dirname(__file__)
if tests_dir:
# Add sys path to make python recognize tests/src/unittests as a module
parent_dir = os.path.abspath(os.path.dirname(tests_dir))
sys.path.insert(0, parent_dir)
# Chdir into the tests dir so that the paths work out right
os.chdir(tests_dir)
# import the test from the subdirectories which filename match the pattern 'test_*.py'
listAllTests = [ filename.split(sep+'test_') for filename in glob.glob(join('*', 'test_*.py')) ]
for testfile in listAllTests:
testfile[1] = testfile[1][:-3]
def importTest(fullname, strategy = 'import'):
'''Imports or reloads test given its fullname.'''
folder, name = fullname
if strategy == 'import':
cmd = 'import unittests.%s.test_%s; setattr(sys.modules[__name__], \'%s\', unittests.%s.test_%s.suite)' % (folder, name, name, folder, name)
elif strategy == 'reload':
cmd1 = 'reload(sys.modules[\'unittests.%s.test_%s\']); ' % (folder, name)
cmd2 = 'setattr(sys.modules[__name__], \'%s\', sys.modules[\'unittests.%s.test_%s\'].suite)' % (name, folder, name)
cmd = cmd1 + cmd2
else:
raise ValueError('When importing a test, the only strategies allowed are \'import\' and \'reload\'')
exec(cmd)
def getTests(names=None, exclude=None, strategy='import'):
allNames = [ name for _, name in listAllTests ]
names = names or allNames
tests = [ (folder, name) for folder, name in listAllTests
if name in names and name not in exclude ]
for name in names:
if name not in allNames:
print('WARNING: did not find test %s' % name)
for name in (exclude or []):
if name not in allNames:
print('WARNING: did not find test to exclude %s' % name)
print('Running tests:')
print(sorted(name for _, name in tests))
if not tests:
raise RuntimeError('No test to execute!')
for test in tests:
importTest(test, strategy)
testObjectsList = [ getattr(sys.modules[__name__], testName) for folder, testName in tests ]
return unittest.TestSuite(testObjectsList)
def traceCompute(algo, *args, **kwargs):
print('computing algo %s' % algo.name())
return algo.normalCompute(*args, **kwargs)
def computeResetCompute(algo, *args, **kwargs):
# do skip certain algos, otherwise we'd enter in an infinite loop!!!
audioLoaders = [ 'MonoLoader', 'EqloudLoader', 'EasyLoader', 'AudioLoader' ]
filters = [ 'IIR', 'DCRemoval', 'HighPass', 'LowPass', 'BandPass', 'AllPass',
'BandReject', 'EqualLoudness', 'MovingAverage' ]
special = [ 'FrameCutter', 'OverlapAdd', 'TempoScaleBands', 'TempoTap', 'TempoTapTicks',
'Panning','OnsetDetection', 'MonoWriter', 'Flux', 'StartStopSilence',
'LogSpectrum', 'ClickDetector', 'SNR', 'SaturationDetector' ]
if algo.name() in audioLoaders + filters + special:
return algo.normalCompute(*args, **kwargs)
else:
algo.normalCompute(*args, **kwargs)
algo.reset()
return algo.normalCompute(*args, **kwargs)
def computeDecorator(newCompute):
def algodecorator(algo):
algo.normalCompute = algo.compute
algo.compute = newCompute
algo.__call__ = newCompute
algo.hasDoubleCompute = True
return algo
return algodecorator
# recursive helper function that finds outputs connected to pools and calls func
def mapPools(algo, func):
# make a copy first, because func might modify the connections in the for
# loops
connections = dict(algo.connections)
for output, inputs in connections.items():
ins = list(inputs)
for input in ins:
# TODO: assuming input is a tuple of pool and descriptor name
if isinstance(input, tuple):
func(algo, output, input)
elif isinstance(input, essentia.streaming._StreamConnector):
mapPools(input.input_algo, func)
#else ignore nowhere connections
# For this to work for networks that are connected to a pool, we need to conduct
# the first run of the network with all pools replaced by dummy pools. The
# second run will run with the network connected to the original pools. This
# method is required to avoid doubl
|
ing of the data in the pools due to the fact
# that we run the network twice.
def runResetRun(gen, *args, **kwargs):
# 0. Find networks which contain algorithms who do not play nice with
|
our
# little trick. In particular, we have a test for multiplexer that runs
# multiple generators...
def isValid(algo):
if isinstance(algo, essentia.streaming.VectorInput) and not list(algo.connections.values())[0]:
# non-connected VectorInput, we don't want to get too fancy here...
return False
if algo.name() == 'Multiplexer':
return False
for output, inputs in algo.connections.items():
for inp in inputs:
if isinstance(inp, essentia.streaming._StreamConnector) and not isValid(inp.input_algo):
return False
return True
if not isValid(gen):
print('Network is not capable of doing the run/reset/run trick, doing it the normal way...')
essentia.run(gen)
return
# 1. Find all the outputs in the network that are connected to pools--aka
# pool feeders and for each pool feeder, disconnect the given pool,
# store it, and connect a dummy pool in its place
def useDummy(algo, output, input):
if not hasattr(output, 'originalPools'):
output.originalPools = []
output.dummyPools = []
# disconnect original
output.originalPools.append(input)
output.disconnect(input)
# connect dummy
dummy = essentia.Pool()
output.dummyPools.append((dummy, input[1]))
output >> output.dummyPools[-1]
mapPools(gen, useDummy)
# 2. Run the network
essentia.run(gen)
# 3. Reset the network
essentia.reset(gen)
# 4. For each pool feeder, disconnect the dummy pool and reconnect the
# original pool
def useOriginal(algo, output, input):
# disconnect dummy
output.disconnect(input)
# the dummy pools and the original pools should have the same index
idx = output.dummyPools.index(input)
output.dummyPools.remove(input)
# connect original
output >> output.originalPools[idx]
# don't need these anymore
if len(output.dummyPools) == 0:
del output.dummyPools
del output.originalPools
mapPools(gen, useOriginal)
# 5. Run the network for the second and final time
return essentia.run(gen)
def runTests(tests):
result = unittest.TextTestRunner(verbosity=2).run(tests)
# return the number of failures and errors
return len(result.errors) + len(result.f
|
abelboldu/nagpy-pushover
|
nagpy/util/pushover.py
|
Python
|
epl-1.0
| 690
| 0.007246
|
#!/usr/bin/env python
import urllib
import urllib2
import urlparse
import json
import os
PUSHOVER_API = "https://api.pushover.net/1/"
class PushoverError(Exception): pass
def pushover(**kwargs):
assert 'message' in kwargs
if not 'token' in kwargs:
kwargs['token'] = os.environ['PUSHOVER_TOKEN']
if not 'user' in
|
kwargs:
kwargs['user'] = os.environ['PUSHOVER_USER']
url = urlparse.
|
urljoin(PUSHOVER_API, "messages.json")
data = urllib.urlencode(kwargs)
req = urllib2.Request(url, data)
response = urllib2.urlopen(req)
output = response.read()
data = json.loads(output)
if data['status'] != 1:
raise PushoverError(output)
|
willybh11/python
|
projectEuler/problems/e7.py
|
Python
|
gpl-3.0
| 287
| 0.020906
|
print '''
By listing the first six
|
prime numbers: 2, 3, 5, 7, 11, and 13, we can see that the 6th prime is 13.
What is the 10,001st prime number?
'''
def problem():
x,limit = 1,0
while limit != 10001:
x += 1
if
|
isprime(x): limit += 1
print x
problem()
|
DavidNorman/tensorflow
|
tensorflow/python/keras/engine/training_dataset_test.py
|
Python
|
apache-2.0
| 22,565
| 0.004254
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training routines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import sys
import numpy as np
import six
from tensorflow.python import keras
from tensorflow.python.data.experimental.ops import cardinality
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.keras import callbacks
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import metrics as metrics_module
from tensorflow.python.keras import testing_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
class BatchCounterCallback(callbacks.Callback):
def __init__(self):
self.batch_begin_count = 0
self.batch_end_count = 0
def on_batch_begin(self, *args, **kwargs):
self.batch_begin_count += 1
def on_batch_end(self, *args, **kwargs):
self.batch_end_count += 1
class TestTrainingWithDataset(keras_parameterized.TestCase):
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_calling_model_on_same_dataset(self):
if ((not testing_utils.should_run_eagerly()) and
testing_utils.get_model_type() == 'subclass' and
context.executing_eagerly() and
(not testing_utils.should_run_tf_function())):
self.skipTest('b/120673224')
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae']
model.compile(
optimizer,
loss,
metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
# Call fit with validation data
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_training_and_eval_methods_on_dataset(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae', metrics_module.CategoricalAccuracy()]
model.compile(
optimizer,
loss,
metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat() # Infinite dataset.
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset, steps=2, verbose=1)
model.predict(dataset, steps=2)
# Test with validation data
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
# Test with validation split
with self.assertRaisesRegexp(
ValueError, '`validation_split` argument is not supported when '):
model.fit(dataset,
epochs=1, steps_per_epoch=2, verbose=0,
validation_split=0.5, validation_steps=2)
# Test with sample weight.
sample_weight = np.random.random((10,))
with self.assertRaisesRegexp(
ValueError, '`sample_weight` argument is not supported '
'when input `x` is a dataset or a dataset iterator'):
model.fit(
dataset,
epochs=1,
steps_per_epoch=2,
verbose=0,
sample_weight=sample_weight)
# Test invalid usage
with self.assertRaisesRegexp(
ValueError, 'The `batch_size` argument must not be specified'):
model.fit(dataset, batch_size=10, epochs=1, steps_per_epoch=2,
verbose=0)
with self.assertRaisesRegexp(
ValueError, 'The `batch_size` argument must not be specified'):
model.predict(dataset, batch_size=10, steps=2, verbose=0)
with self.assertRaisesRegexp(
ValueError, 'The `batch_size` argument must not be specified'):
model.evaluate(dataset, batch_size=10, steps=2, verbose=0)
with self.assertRaisesRegexp(ValueError,
'you should not specify a target'):
model.fit(dataset, dataset,
epochs=1, steps_per_epoch=2, verbose=0)
# With an infinite dataset, `steps_per_epoch`/`steps` argument is required.
with self.assertRaisesRegexp(
ValueError, 'the `steps_per_epoch` argument'):
model.fit(dataset, epochs=1, verbose=0)
with self.assertRaisesRegexp(ValueError,
'the `steps` argument'):
model.evaluate(dataset, verbose=0)
with self.assertRaisesRegexp(ValueError,
'the `steps` argument'):
model.predict(dataset, verbose=0)
@keras_parameterized.run_with_all_model_types(exclude_models='sequential')
@keras_parameterized.run_all_keras_modes
def test_training_and_eval_methods_on_multi_input_output_dataset(self):
input_a = keras.layers.Input(shape=(3,), name='input_1')
input_b = keras.layers.Input(shape=(3,), name='input_2')
dense = keras.layers.Dense(4, name='dense')
dropout = keras.layers.Dropout(0.5, name='dropout')
branch_a = [input_a, dense]
branch_b = [input_b, dense, dropout]
model = testing_utils.get_multi_io_model(branch_a, branch_b)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
input_a_np = np.random.random((10, 3)).astype(dtype=np.float32)
input_b_np = np.random.random((10, 3)).astype(dtype=np.float32)
output_d_np = np.random.random((10, 4)).astype(dtype=np
|
.float32)
output_e_np = np.random.random((10, 4)).astype(dtype=np.float32)
# Test
|
with tuples
dataset_tuple = dataset_ops.Dataset.from_tensor_slices((
(input_a_np, input_b_np), (output_d_np, output_e_np)))
dataset_tuple = dataset_tuple.repeat(100)
dataset_tuple = dataset_tuple.batch(10)
model.fit(dataset_tuple, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset_tuple, steps=2, verbose=1)
predict_dataset_tuple = dataset_ops.Dataset.from_tensor_slices(
(input_a_np, input_b_np))
# TODO(b/123360757): Remove below assertion once predict() supports
# muti-input datasets.
with self.assertRaisesRegexp(ValueError,
'Error when checking model input'):
model.predict(predict_dataset_tuple, steps=1)
# Test with dict
input_dict = {'input_1': input_a_np, 'input_2': input_b_np}
if testing_utils.get_model_type() == 'subclass':
output_dict = {'output_1': output_d_np, 'output_2': output_e_np}
else:
output_dict = {'dense': output_d_np, 'dropout': output_e_np}
dataset_dict = dataset_ops.D
|
socialplanning/opencore
|
opencore/project/browser/base.py
|
Python
|
gpl-3.0
| 3,338
| 0.000899
|
from Acquisition import aq_inner
from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile
from opencore.browser.base import BaseView, view
from opencore.project import LATEST_ACTIVITY
from opencore.project import P
|
ROJ_HOME
from opencore.project.utils import get_featurelets
from plone.memoize.instance import memoizedproperty
from topp.featurelets.interfaces import IFeatureletSupporter, IFeaturelet
from topp.utils import text
from zope.component import queryAdapter
class ProjectBaseView(BaseV
|
iew):
# XXX to move to project
@memoizedproperty
def has_mailing_lists(self):
return self._has_featurelet('listen')
@memoizedproperty
def has_task_tracker(self):
return self._has_featurelet('tasks')
@memoizedproperty
def has_blog(self):
return self._has_featurelet('blog')
def _get_featurelet(self, flet_id):
flets = get_featurelets(self.context)
for flet in flets:
if flet['name'] == flet_id:
return flet
return None
def _has_featurelet(self, flet_id):
flet_adapter = queryAdapter(
IFeatureletSupporter(self.context),
IFeaturelet,
name=flet_id)
if flet_adapter is None:
return False
return flet_adapter.installed
#@@ wiki should just be another featurelet
@staticmethod
def intrinsic_homepages():
"""return data for homepages intrinsic to opencore
(not featurelet-dependent)
"""
# XXX maybe this should just be a list?
# @@ maybe this should just be an ini?
return [
dict(id='latest-activity',
title='Summary',
url=LATEST_ACTIVITY,
checked=True,
hidden=False,
),
dict(id='wiki',
title='Pages',
url=PROJ_HOME,
checked=False,
hidden=False,
),
]
@view.mcproperty
def project_info(self):
"""
Returns a dict containing information about the
currently-viewed project for easy template access.
calculated once
"""
from opencore.interfaces.workflow import IReadWorkflowPolicySupport
proj_info = {}
if self.piv.inProject:
proj = aq_inner(self.piv.project)
security = IReadWorkflowPolicySupport(proj).getCurrentPolicyId()
proj_info.update(navname=proj.Title(),
fullname=proj.getFull_name(),
title=proj.Title(),
security=security,
url=proj.absolute_url(),
description=proj.Description(),
featurelets=self.piv.featurelets,
location=proj.getLocation(),
obj=proj)
return proj_info
def authenticator(self):
return self.get_tool('browser_id_manager').getBrowserId(create=True)
def authenticator_input(self):
return '<input type="hidden" name="authenticator" value="%s" />' % self.authenticator()
|
svanschalkwyk/datafari
|
windows/python/Lib/test/test_pdb.py
|
Python
|
apache-2.0
| 11,281
| 0.002748
|
# A test suite for pdb; at the moment, this only validates skipping of
# specified test modules (RFE #5142).
import imp
import sys
import os
import unittest
import subprocess
import textwrap
from test import test_support
# This little helper class is essential for testing pdb under doctest.
from test_doctest import _FakeInput
class PdbTestCase(unittest.TestCase):
def run_pdb(self, script, commands):
"""Run 'script' lines with pdb and the pdb 'commands'."""
filename = 'main.py'
with open(filename, 'w') as f:
f.write(textwrap.dedent(script))
self.addCleanup(test_support.unlink, filename)
cmd = [sys.executable, '-m', 'pdb', filename]
stdout = stderr = None
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
stdout, stderr = proc.communicate(commands)
proc.stdout.close()
proc.stdin.close()
return stdout, stderr
def test_issue13183(self):
script = """
from bar import bar
def foo():
bar()
def nope():
pass
def foobar():
foo()
nope()
foobar()
"""
commands = """
from bar import bar
break bar
continue
step
step
quit
"""
bar = """
def bar():
pass
"""
with open('bar.py', 'w') as f:
f.write(textwrap.dedent(bar))
self.addCleanup(test_support.unlink, 'bar.py')
self.addCleanup(test_support.unlink, 'bar.pyc')
stdout, stderr = self.run_pdb(script, commands)
self.assertTrue(
any('main.py(5)foo()->None' in l for l in stdout.splitlines()),
'Fail to step into the caller after a return')
class PdbTestInput(object):
"""Context manager that makes testing Pdb in doctests easier."""
def __init__(self, input):
self.input = input
def __enter__(self):
self.real_stdin = sys.stdin
sys.stdin = _FakeInput(self.input)
def __exit__(self, *exc):
sys.stdin = self.real_stdin
def write(x):
print x
def test_pdb_displayhook():
"""This tests the custom displayhook for pdb.
>>> def test_function(foo, bar):
... import pdb; pdb.Pdb().set_trace()
... pass
>>> with PdbTestInput([
... 'foo',
... 'bar',
... 'for i in range(5): write(i)',
... 'continue',
... ]):
... test_function(1, None)
> <doctest test.test_pdb.test_pdb_displayhook[0]>(3)test_function()
-> pass
(Pdb) foo
1
(Pdb) bar
(Pdb) for i in range(5): write(i)
0
1
2
3
4
(Pdb) continue
"""
def test_pdb_breakpoint_commands():
"""Test basic commands related to breakpoints.
>>> def test_function():
... import pdb; pdb.Pdb().set_trace()
... print(1)
... print(2)
... print(3)
... print(4)
First, need to clear bdb state that might be left over from previous tests.
Otherwise, the new breakpoints might get assigned different numbers.
>>> from bdb import Breakpoint
>>> Breakpoint.next = 1
>>> Breakpoint.bplist = {}
>>> Breakpoint.bpbynumber = [None]
Now test the breakpoint commands. NORMALIZE_WHITESPACE is needed because
the breakpoint list outputs a tab for the "stop only" and "ignore next"
lines, which we don't want to put in here.
>>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE
... 'break 3',
... 'disable 1',
... 'ignore 1 10',
... 'condition 1 1 < 2',
... 'break 4',
... 'break 4',
... 'break',
... 'clear 3',
... 'break',
... 'condition 1',
... 'enable 1',
... 'clear 1',
... 'commands 2',
... 'print 42',
... 'end',
... 'continue', # will stop at breakpoint 2 (line 4)
... 'clear', # clear all!
... 'y',
... 'tbreak 5',
... 'continue', # will stop at temporary breakpoint
... 'break', # make sure breakpoint is gone
... 'continue',
... ]):
... test_function
|
()
> <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>(3)test_function()
-> print(1)
(Pdb) break 3
Breakpoint 1 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:3
(Pdb) disable 1
(Pdb) ignore 1 10
W
|
ill ignore next 10 crossings of breakpoint 1.
(Pdb) condition 1 1 < 2
(Pdb) break 4
Breakpoint 2 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:4
(Pdb) break 4
Breakpoint 3 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:4
(Pdb) break
Num Type Disp Enb Where
1 breakpoint keep no at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:3
stop only if 1 < 2
ignore next 10 hits
2 breakpoint keep yes at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:4
3 breakpoint keep yes at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:4
(Pdb) clear 3
Deleted breakpoint 3
(Pdb) break
Num Type Disp Enb Where
1 breakpoint keep no at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:3
stop only if 1 < 2
ignore next 10 hits
2 breakpoint keep yes at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:4
(Pdb) condition 1
Breakpoint 1 is now unconditional.
(Pdb) enable 1
(Pdb) clear 1
Deleted breakpoint 1
(Pdb) commands 2
(com) print 42
(com) end
(Pdb) continue
1
42
> <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>(4)test_function()
-> print(2)
(Pdb) clear
Clear all breaks? y
(Pdb) tbreak 5
Breakpoint 4 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:5
(Pdb) continue
2
Deleted breakpoint 4
> <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>(5)test_function()
-> print(3)
(Pdb) break
(Pdb) continue
3
4
"""
def test_pdb_skip_modules():
"""This illustrates the simple case of module skipping.
>>> def skip_module():
... import string
... import pdb; pdb.Pdb(skip=['string*']).set_trace()
... string.lower('FOO')
>>> with PdbTestInput([
... 'step',
... 'continue',
... ]):
... skip_module()
> <doctest test.test_pdb.test_pdb_skip_modules[0]>(4)skip_module()
-> string.lower('FOO')
(Pdb) step
--Return--
> <doctest test.test_pdb.test_pdb_skip_modules[0]>(4)skip_module()->None
-> string.lower('FOO')
(Pdb) continue
"""
# Module for testing skipping of module that makes a callback
mod = imp.new_module('module_to_skip')
exec 'def foo_pony(callback): x = 1; callback(); return None' in mod.__dict__
def test_pdb_skip_modules_with_callback():
"""This illustrates skipping of modules that call into other code.
>>> def skip_module():
... def callback():
... return None
... import pdb; pdb.Pdb(skip=['module_to_skip*']).set_trace()
... mod.foo_pony(callback)
>>> with PdbTestInput([
... 'step',
... 'step',
... 'step',
... 'step',
... 'step',
... 'continue',
... ]):
... skip_module()
... pass # provides something to "step" to
> <doctest test.test_pdb.test_pdb_skip_modules_with_callback[0]>(5)skip_module()
-> mod.foo_pony(callback)
(Pdb) step
--Call--
> <doctest test.test_pdb.test_pdb_skip_modules_with_callback[0]>(2)callback()
-> def callback():
(Pdb) step
> <doctest test.test_pdb.test_pdb_skip_modules_with_callback[0]>(3)callback()
-> return None
(Pdb) step
--Return--
> <doctest test.test_pdb.test_pdb_skip_modules_with_callback[0]>(3)callback()->None
-> return None
(Pdb) step
--Ret
|
pri22296/beautifultable
|
docs/conf.py
|
Python
|
mit
| 5,447
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# BeautifulTable documentation build configuration file, created by
# sphinx-quickstart on Sun Dec 18 15:59:32 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath("../"))
import beautifultable # noqa E402
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.a
|
utodoc",
"sphinx.ext.intersphinx",
"sphinx.e
|
xt.todo",
"sphinx.ext.coverage",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"sphinx.ext.napoleon",
]
napoleon_google_docstring = False
napoleon_include_special_with_doc = False
# napoleon_use_param = False
# napoleon_use_ivar = True
autodoc_member_order = "bysource"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "BeautifulTable"
copyright = "2021, Priyam Singh"
author = beautifultable.__author__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = beautifultable.__version__
# The full version, including alpha/beta/rc tags.
release = beautifultable.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "BeautifulTabledoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"BeautifulTable.tex",
"BeautifulTable Documentation",
"Priyam Singh",
"manual",
),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, "beautifultable", "BeautifulTable Documentation", [author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"BeautifulTable",
"BeautifulTable Documentation",
author,
"BeautifulTable",
"One line description of project.",
"Miscellaneous",
),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"https://docs.python.org/": None}
|
mattdm/dnf
|
dnf/base.py
|
Python
|
gpl-2.0
| 113,956
| 0.001667
|
# Copyright 2005 Duke University
# Copyright (C) 2012-2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Supplies the Base class.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import dnf
import libdnf.transaction
from copy import deepcopy
from dnf.comps import CompsQuery
from dnf.i18n import _, P_, ucd
from dnf.util import _parse_specs
from dnf.db.history import SwdbInterface
from dnf.yum import misc
try:
from collections.abc import Sequence
except ImportError:
from collections import Sequence
import datetime
import dnf.callback
import dnf.comps
import dnf.conf
import dnf.conf.read
import dnf.crypto
import dnf.dnssec
import dnf.drpm
import dnf.exceptions
import dnf.goal
import dnf.history
import dnf.lock
import dnf.logging
# WITH_MODULES is used by ansible (lib/ansible/modules/packaging/os/dnf.py)
try:
import dnf.module.module_base
WITH_MODULES = True
except ImportError:
WITH_MODULES = False
import dnf.persistor
import dnf.plugin
import dnf.query
import dnf.repo
import dnf.repodict
import dnf.rpm.connection
import dnf.rpm.miscutils
import dnf.rpm.transaction
import dnf.sack
import dnf.selector
import dnf.subject
import dnf.transaction
import dnf.util
import dnf.yum.rpmtrans
import functools
import hawkey
import itertools
import logging
import math
import os
import operator
import re
import rpm
import time
import shutil
logger = logging.getLogger("dnf")
class Base(object):
def __init__(self, conf=None):
# :api
self._closed = False
self._conf = conf or self._setup_default_conf()
self._goal = None
self._repo_persistor = None
self._sack = None
self._transaction = None
self._priv_ts = None
self._comps = None
self._comps_trans = dnf.comps.TransactionBunch()
self._history = None
self._tempfiles = set()
self._trans_tempfiles = set()
self._ds_callback = dnf.callback.Depsolve()
self._logging = dnf.logging.Logging()
self._repos = dnf.repodict.RepoDict()
self._rpm_probfilter = set([rpm.RPMPROB_FILTER_OLDPACKAGE])
self._plugins = dnf.plugin.Plugins()
self._trans_success = False
self._trans_install_set = False
self._tempfile_persistor = None
# self._update_security_filters is used by ansible
self._update_security_filters = []
self._update_security_options = {}
self._allow_erasing = False
self._repo_set_imported_gpg_keys = set()
self.output = None
def __enter__(self):
return self
def __exit__(self, *exc_args):
self.close()
def __del__(self):
self.close()
def _add_tempfiles(self, files):
if self._transaction:
self._trans_tempfiles.update(files)
elif self.conf.destdir:
pass
else:
self._tempfiles.update(files)
def _add_repo_to_sack(self, repo):
repo.load()
mdload_flags = dict(load_filelists=True,
load_presto=repo.deltarpm,
load_updateinfo=True)
if repo.load_metadata_other:
mdload_flags["
|
load_other"] = True
try:
self._sack.load_repo(repo._repo, build_c
|
ache=True, **mdload_flags)
except hawkey.Exception as e:
logger.debug(_("loading repo '{}' failure: {}").format(repo.id, e))
raise dnf.exceptions.RepoError(
_("Loading repository '{}' has failed").format(repo.id))
@staticmethod
def _setup_default_conf():
conf = dnf.conf.Conf()
subst = conf.substitutions
if 'releasever' not in subst:
subst['releasever'] = \
dnf.rpm.detect_releasever(conf.installroot)
return conf
def _setup_modular_excludes(self):
hot_fix_repos = [i.id for i in self.repos.iter_enabled() if i.module_hotfixes]
try:
solver_errors = self.sack.filter_modules(
self._moduleContainer, hot_fix_repos, self.conf.installroot,
self.conf.module_platform_id, update_only=False, debugsolver=self.conf.debug_solver,
module_obsoletes=self.conf.module_obsoletes)
except hawkey.Exception as e:
raise dnf.exceptions.Error(ucd(e))
if solver_errors:
logger.warning(
dnf.module.module_base.format_modular_solver_errors(solver_errors[0]))
def _setup_excludes_includes(self, only_main=False):
disabled = set(self.conf.disable_excludes)
if 'all' in disabled and WITH_MODULES:
self._setup_modular_excludes()
return
repo_includes = []
repo_excludes = []
# first evaluate repo specific includes/excludes
if not only_main:
for r in self.repos.iter_enabled():
if r.id in disabled:
continue
if len(r.includepkgs) > 0:
incl_query = self.sack.query().filterm(empty=True)
for incl in set(r.includepkgs):
subj = dnf.subject.Subject(incl)
incl_query = incl_query.union(subj.get_best_query(
self.sack, with_nevra=True, with_provides=False, with_filenames=False))
incl_query.filterm(reponame=r.id)
repo_includes.append((incl_query.apply(), r.id))
excl_query = self.sack.query().filterm(empty=True)
for excl in set(r.excludepkgs):
subj = dnf.subject.Subject(excl)
excl_query = excl_query.union(subj.get_best_query(
self.sack, with_nevra=True, with_provides=False, with_filenames=False))
excl_query.filterm(reponame=r.id)
if excl_query:
repo_excludes.append((excl_query, r.id))
# then main (global) includes/excludes because they can mask
# repo specific settings
if 'main' not in disabled:
include_query = self.sack.query().filterm(empty=True)
if len(self.conf.includepkgs) > 0:
for incl in set(self.conf.includepkgs):
subj = dnf.subject.Subject(incl)
include_query = include_query.union(subj.get_best_query(
self.sack, with_nevra=True, with_provides=False, with_filenames=False))
exclude_query = self.sack.query().filterm(empty=True)
for excl in set(self.conf.excludepkgs):
subj = dnf.subject.Subject(excl)
exclude_query = exclude_query.union(subj.get_best_query(
self.sack, with_nevra=True, with_provides=False, with_filenames=False))
if len(self.conf.includepkgs) > 0:
self.sack.add_includes(include_query)
self.sack.set_use_includes(True)
if exclude_query:
self.sack.add_excludes(exclude_query)
if repo_includes:
for query, repoid in repo_includes:
self.sack.add_includes(query)
self.sack.set_use_includes(True, repoid)
if repo_excludes:
for query, repoid in repo_excludes:
self.sack.add_excludes(query)
if not only_main and WITH_MODULES:
self.
|
vinni-au/vega-strike
|
data/bases/frigid_mud.py
|
Python
|
gpl-2.0
| 4,436
| 0.018034
|
import Base
import VS
import dynamic_mission
import vsrandom
import fixers
shipsize = VS.getPlayer().rSize()/35
#print "Ship Size: " + str(VS.getPlayer().rSize()) #debug
dynamic_mission.CreateMissions()
time_of_day='_day'
# ROOMS
landing = Base.Room ('Landing Pad')
if (VS.getPlayer().rSize()<=100):
Base.Texture (landing, 'tex', 'bases/frigid_mud/AridLandingSmall2.sprite', 0, 0)
else:
Base.Texture (landing, 'tex', 'bases/frigid_mud/AridLandingCapship.sprite', 0, 0)
fuelbay = Base.Room ('Fuel Bay')
Base.Texture (fuelbay, 'tex', 'bases/frigid_mud/AridFuelBay.sprite', 0, 0)
panorama = Base.Room ('Panorama')
Base.Texture (panorama, 'tex', 'bases/frigid_mud/AridPanorama.sprite', 0, 0)
entrance = Base.Room ('Concourse')
Base.Texture (entrance, 'tex', 'bases/frigid_mud/AridConcourse2.sprite', 0, 0)
exit = Base.Room ('Concourse')
Base.Texture (exit, 'tex', 'bases/frigid_mud/AridConcourse.sprite', 0, 0)
bar1 = Base.Room ('Bar')
Base.Texture (bar1, 'tex', 'bases/frigid_mud/AridBar2.sprite', 0, 0)
bar2 = Base.Room ('Bar')
Base.Texture (bar2, 'tex', 'bases/frigid_mud/AridBar1.sprite', 0, 0)
balcony = Base.Room ('Balcony')
Base.Texture (balcony, 'tex', 'bases/frigid_mud/AridBalcony.sprite', 0, 0)
# INTERLINKS
if (VS.getPlayer().rSize()<=100):
Base.Link (landing, 'l_f', -0.70, 0.30, 0.30, 0.22, 'Fuel Bay', fuelbay)
Base.Link (landing, 'l_e', 0.40, -0.10, 0.30, 0.22, 'Concourse', entrance)
Base.Link (landing, 'l_p', -0.97, -0.20, 0.30, 0.22, 'Panorama', panorama)
else:
Base.Link (landing, 'l_f', 0.00, -0.30, 0.30, 0.22, 'Fuel Bay', fuelbay)
Base.Link (landing, 'l_e', 0.80, -0.40, 0.30, 0.22, 'Concourse', entrance)
Base.Link (landing, 'l_p', -0.97, -0.20, 0.30, 0.22, 'Panorama', panorama)
Base.Link (entrance, 'e_b', -0.10, 0.00, 0.30, 0.22, 'Bar', bar1)
Base.Link (entrance, 'e_e', 0.00, -0.90, 0.30, 0.22, 'To Landing Pad', exit)
Base.Link (bar1, 'b1_b2', -0.20, -0.10, 0.30, 0.22, 'Bar', bar2)
Base.Link (bar1, 'b1_e', 0.00, -0.90, 0.30, 0.22, 'Concourse', exit)
Base.Link (exit, 'e_l', -0.50, -0.30, 0.30, 0.22, 'Landing Pad', landing)
Base.Link (exit, 'e_e', 0.00, -0.90, 0.30, 0.22, 'To Bar', entrance)
Base.Link (bar2, 'b2_b', -0.10, 0.00, 0.30, 0.22, 'Balcony', balcony)
Base.Link (bar2, 'b2_b1', 0.00, -0.90, 0.30, 0.22, 'Bar', bar1)
Base.Link (balcony, 'b_b2', -0.50, -0.30, 0.30, 0.22, 'Bar', bar2)
Base.Link (balcony, 'b_l', 0.20, -0.10, 0.30, 0.22, 'Landing Pad', landing)
Base.Link (balcony, 'b_p', 0.50, 0.20, 0.30, 0.22, 'Panorama', panorama)
Base.Link (fuelbay, 'f_l', 0.00, -0.95, 0.30, 0.22, 'Landing Pad', landing)
Base.Link (fuelbay, 'f_p', -0.70, -0.10, 0.30, 0.22, 'Panorama', panorama)
Base.Link (panorama, 'p_l', 0.10, -0.60, 0.30, 0.22, 'Landing Pad', landing)
Base.Link (panorama, 'p_b', 0.00, -0.30, 0.30, 0.22, 'Balcony', balcony)
Base.Link (panorama, 'p_c', 0.25, -0.40, 0.30, 0.22, 'Concourse', entrance)
Base.Link (panorama, 'p_f', -0.35, -0.30, 0.30, 0.22, 'Fuel Bay', fuelbay)
# SHIP PLACEMENT (size is inverse proportional!)
if (VS.getPlayer().rSize()<=100):
Base.Ship (landing, 'ship_l', (-0.3, -0.5, 2/shipsize), (0.00, 0.90, -0.20), (-0.7, 0, -0.7))
else:
Base.Ship (landing, 'ship_l', (-0.4, -0.5, shipsize/20), (0.00, 1.0, 0.00), (0.7, 0, -0.4))
Base.LaunchPython (landing, 'launch','bases/launch_music.py', -0.5, -0.5, 0.5, 0.3, 'Launch Your Ship')
# COMPUTER TERMINALS (News Missions Info Cargo Upgrades ShipDealer)
if (VS.getPlayer().rSize()<=100):
Base.Comp (landing, 'my_comp_id', 0.17, -0.15, 0.30, 0.22, 'Computer', 'News Missions Info')
else:
Base.Comp (landing, 'my_comp_id', 0.17, -0.45, 0.30, 0.22, 'Computer', 'News Missions Info')
Base.Comp (fuelbay, 'my_comp_id', 0.20, -0.
|
10, 0.30, 0.22, 'Computer', 'Upgrades Info')
Base.Comp (entrance, 'my_comp_id', -0.90, -0.30, 0.30, 0.22, 'Computer', 'News Missions Upgra
|
des Info Cargo ShipDealer')
Base.Comp (exit, 'my_comp_id', 0.20, -0.30, 0.30, 0.22, 'Computer', 'News Missions Upgrades Info Cargo ShipDealer')
# FIXERS
bartender = vsrandom.randrange(0,19)
Base.Texture (bar1,'bartender','bases/generic/bartender%d.spr' % (bartender), -0.47, 0.15)
Base.Python (bar1, 'talk', -0.67, -0.12, 0.4, 0.4, 'Talk to the Bartender', 'bases/bartender_frigidmud.py',0)
Base.Texture (bar2,'bartender','bases/generic/bartender%d.spr' % (bartender), -0.47, 0.15)
Base.Python (bar2, 'talk', -0.67, -0.12, 0.4, 0.4, 'Talk to the Bartender', 'bases/bartender_frigidmud.py',0)
|
Sabayon/anaconda
|
pyanaconda/installclasses/awesome.py
|
Python
|
gpl-2.0
| 1,507
| 0.000664
|
#
# awesome.py
#
# Copyright (C) 2014 Fabio Erculiani
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNES
|
S FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should h
|
ave received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from pyanaconda.installclass import BaseInstallClass
from pyanaconda.i18n import N_
from pyanaconda.sabayon import Entropy
class InstallClass(BaseInstallClass):
id = "sabayon_awesome"
name = N_("Sabayon Awesome")
sortPriority = 10000
_l10n_domain = "anaconda"
efi_dir = "sabayon"
dmrc = "awesome"
if Entropy().is_sabayon_steambox():
dmrc = "steambox"
if not Entropy().is_installed("x11-wm/awesome"):
hidden = 1
def configure(self, anaconda):
BaseInstallClass.configure(self, anaconda)
BaseInstallClass.setDefaultPartitioning(self, anaconda.storage)
def getBackend(self):
from pyanaconda.sabayon.livecd import LiveCDCopyBackend
return LiveCDCopyBackend
def __init__(self):
BaseInstallClass.__init__(self)
|
amol9/wallp
|
wallp/desktop/desktop_factory.py
|
Python
|
mit
| 1,362
| 0.022761
|
import os
from redlib.api.system import sys_command, CronDBus, CronDBusError, is_linux, is_windows
from ..util.logger import log
from . import Desktop, DesktopError
from . import gnome_desktop
from . import feh_desktop
if is_windows():
from .windows_desktop import WindowsDesktop
def load_optional_module(module, package=None, err_msg=None):
import importlib
try:
importlib.impor
|
t_module(module, package=package)
except ImportError as e:
print(e)
if err_msg is not None:
print(err_msg)
load_optional_module('.kde_plasma_desktop', package='wallp.desktop', err_msg='KDE Plasma will not be supported.')
def get_desktop():
if is_linux():
crondbus = CronDBus
|
(vars=['GDMSESSION', 'DISPLAY', 'XDG_CURRENT_DESKTOP'])
crondbus.setup()
gdmsession = os.environ.get('GDMSESSION', None)
xdg_current_desktop = os.environ.get('XDG_CURRENT_DESKTOP', None)
if gdmsession is None and xdg_current_desktop is None:
log.error('could not read environment variables: GDMSESSION or XDG_CURRENT_DESKTOP')
raise DesktopError()
for desktop_class in Desktop.__subclasses__():
if desktop_class.supports(gdmsession, xdg_current_desktop):
return desktop_class()
log.error('unsupported window manager: %s, %s'%(gdmsession, xdg_current_desktop))
elif is_windows():
return WindowsDesktop()
else:
log.error('unsupported OS')
return None
|
UCSC-MedBook/MedBook_
|
tools/old-external-tools/shazam/abiFG.py
|
Python
|
bsd-3-clause
| 14,508
| 0.010408
|
#!/usr/bin/python2.6
import sys, string, os, time, fnmatch, imgFG, markup, re
from markup import oneliner as o
from numpy import *
import pdb
abi = ["DTB-004", "DTB-009", "DTB-024Pro", "DTB-030", "DTB-034", "DTB-036", "DTB-046", "DTB-049", "DTB-053", "DTB-064", "DTB-073"]
naive = ["DTB-003", "DTB-005", "DTB-011", "DTB-018", "DTB-022", "DTB-023", "DTB-038", "DTB-040", "DTB-060", "DTB-063", "DTB-071", "DTB-080"]
rootDir = ""
pngDir = ""
pngBase = 'png/'
pathwayNameDict = {}
entityDict = {}
entityFile = {}
imgFG.printPDF = True
def getPathwayName(pid):
pid = pid.split('_')
if len(pid) != 2:
return "N/A"
pid = pid[1]
pid = re.sub("\.","", pid)
try:
name = pathwayNameDict[pid]
except:
name = "N/A"
return name
def initEntityDict(file_name):
inFile = open(file_name)
lineCount = 0
for line in inFile:
lineCount+=1
data = line[:-1].split('\t')
if len(data) == 2:
type = data[0]
name = data[1]
if name in entityDict:
if entityDict[name] != type and file_name == entityFile[name]:
print "on line ", lineCount, name, "cannot be assigned ",type, "when it is", entityDict[name] , "in", file_name , entityFile[name]
assert(entityDict[name] == type)
elif entityDict[name] != type:
if type != 'protein' and entityFile[name] == 'protein':
print "WARNING", lineCount, name, "has multiple types ",type, "and", entityDict[name] , "in", file_name , entityFile[name]
type = 'protein'
entityDict[name] = type
entityFile[name] = file_name
inFile.close()
def initPathwayNameDict(path_file="pathway_pids.tab"):
inFile = open(path_file)
for line in inFile:
data = line[:-1].split('\t')
pid = data[0]
name = data[1]
pathwayNameDict[pid] = name
inFile.close()
def getFilesMatching(baseDir, patterns):
list = []
for root, dirs, files in os.walk(baseDir):
for file in files:
ptr = os.path.join(root, file)
for pattern in patterns:
if fnmatch.fnmatch(ptr, pattern):
list.append(ptr)
return list
def writePageToFile(page, fname):
outFile = open(fname, 'w')
outFile.write(str(page))
outFile.close()
def initializePage(t, h, sort_list = "[[9,1]]"):
currentTime = time.localtime()
dateTime = str(currentTime[1]) + '/' + str(currentTime[2]) + '/' + str(currentTime[0]) + " "
dateTime += str(currentTime[3]) + ":" + str(currentTime[4]) + ":" + str(currentTime[5])
csses = "style.css"
tsStr = '\n$(document).ready(function()\n'
tsStr += ' {\n'
tsStr += ' $("table").tablesorter({\n'
tsStr += ' // sort on the tenth column , order desc \n'
tsStr += ' sortList: '+sort_list+' \n'
tsStr += ' }); \n'
tsStr += ' }\n'
tsStr += ');\n'
scripts = [('js/jquery-latest.js',['javascript','']),
('js/jquery.tablesorter.min.js',['javascript','']),
('js/jquery.metadata.js',['javascript','']),
('',['javascript',tsStr])]
page = markup.page()
pathway_name = re.sub(" ","_",re.sub("/","_",t))
summary_tsv = open(rootDir + pathway_name+'.tsv', 'wb')
page.init(title = t,
header = h,
script=scripts,
css = (csses, 'print, projection, screen'),
footer = "Last modified on " + dateTime)
return page, summary_tsv
def putSummaryTable(p, b, data, id, tsv):
labels = data["sample"]["labels"]
p.table(border=b, id=id, class_='tablesorter')
p.thead()
p.tr()
p.th("Entity - Gene or Complex or Molecule")
p.th(labels, class_="{sorter:'digit'}")
p.tr.close()
p.thead.close()
p.tbody()
for d in data["sample"]:
if d == "la
|
bels":
continue
vals = data["sample"][d]
p.tr()
#name of gene
geneUrl = 'http://www.genecards.org/cgi-bin/carddisp.pl?gene='+d
tsv.write('<a href=%s target="_blank">%s</a>\t' % (geneUrl, d))
p.td(o.a(d, href=geneUrl, target="_blank"))
tmp = [round(v, 3) for v in vals]
p.td(tmp)
tsv.write('%s\n' % tmp)
p.tr.close()
p.tbody.close()
tsv.close()
p.table.close()
def getPathwayByFilename(f):
|
i = f.find("pid")
if i == -1:
print "string 'pid' not found in file name", f
sys.exit(0)
tmp = f[i:-3].split('_')
pid = tmp[0] + '_' + tmp[1]
pid = re.sub("\.","", pid)
print "pid:",pid
return pid, getPathwayName(pid)
def summarizePathway(samples, data, entitySummary):
sampleIndex = []
nwIndex = []
naIndex = []
for i in range(len(samples)):
s = samples[i]
if s.startswith("nw_"):
nwIndex.append(i)
elif s.startswith("na_"):
naIndex.append(i)
else:
sampleIndex.append(i)
totalOutliers = 0
totalActivity = 0
count = 0
geneCount = 0
for d in entitySummary["sample"]:
if d == "labels":
continue
vals = entitySummary["sample"][d]
totalOutliers += vals[6]
try:
totalActivity += vals[7]
except:
print "error: no activity for ",d
sys.exit(2)
totalActivity += 0
try:
if entityDict[d] == 'protein':
geneCount += 1
except:
pass
count += 1
if geneCount > 0:
avgOutliers = totalOutliers / geneCount;
else:
avgOutliers = 0
print "entities", count, "genes", geneCount
minMean = 1000
maxMean = -1000
minMeanNw = 1000
maxMeanNw = -1000
minMeanNa = 1000
maxMeanNa = -1000
for d in data:
vals = data[d]
tmp = [vals[i] for i in sampleIndex]
m = mean(tmp)
if m < minMean:
minMean = m
elif m > maxMean:
maxMean = m
tmp = [vals[i] for i in nwIndex]
m = mean(tmp)
if m < minMeanNw:
minMeanNw = m
elif m > maxMeanNw:
maxMeanNw = m
tmp = [vals[i] for i in naIndex]
m = mean(tmp)
if m < minMeanNa:
minMeanNa = m
elif m > maxMeanNa:
maxMeanNa = m
if geneCount < 10:
return None
summary = {}
summary["Avg Num Alterations"] = avgOutliers
summary["Total Alterations"] = totalOutliers
summary["Num Genes"] = geneCount
summary["Min Mean Truth"] = minMean
summary["Max Mean Truth"] = maxMean
summary["Min Mean Any"] = minMeanNa
summary["Max Mean Any"] = maxMeanNa
if geneCount > 0:
summary["Normalized Activity"] = 100 * totalActivity / geneCount
print "summary Normalized Activity", 100 * totalActivity / geneCount
else:
print "#warning geneCount = 0"
summary["order"] = ("Avg Num Alterations", "Total Alterations",
"Num Genes",
"Min Mean Truth", "Max Mean Truth",
"Min Mean Any", "Max Mean Any", "Normalized Activity")
return summary
def fileData(fname):
inFile = open(fname)
line = inFile.readline()
header = line[:-1].split('\t')
sample_names = header[1:]
#pdb.set_trace()
for i in range(len(sample_names)):
if sample_names[i].split(' ')[0] in naive:
sample_names[i] = "na_"+sample_names[i]
print "#naive", sample_names[i]
elif sample_names[i].split(' ')[0] in abi:
print "#abi"
else:
sample_names[i] = "nw_"+sample_names[i]
print "#neither"
fData = {}
for line in inFile:
data = line[:-1].split('\t')
name = data[0]
data = data[1:]
if len(name.split("__")) > 1:
continue
try:
vals = [float(d) for d in data]
fData[name]
|
streeter/autoliker
|
main.py
|
Python
|
mit
| 617
| 0
|
#!/usr/bin/env python
from autoliker.services.instagram import InstagramUserPhotoService
from autoliker.servi
|
ces.twitter import TwitterUserMentionService
if __name__ == '__main__':
services = [InstagramUserPhotoService, TwitterUserMentionService]
for service_cls in services:
service = service_cls()
print("Fetching the latest {} posts...".format(service.SERVICE_NAME))
posts = service.latest_posts()
print("Liking {} posts...".format(len(posts)))
liked, skipped = service.like_posts(pos
|
ts)
print("Liked {} posts, skipped {} posts".format(liked, skipped))
|
mretegan/crispy
|
crispy/utils.py
|
Python
|
mit
| 2,446
| 0
|
# coding: utf-8
###################################################################
# Copyright (c) 2016-2022 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""Utility functions/mixins"""
import logging
import sys
from PyQt5.QtGui import QFontDatabase
from PyQt5.QtWidgets import QCheckBox, QComboBox, QDataWidgetMapper
from crispy.views import Delegate
logger = logging.getLogger(__name__)
def setMappings(mappings):
"""Set the mappings between the model and widgets.
TODO:
- Should this be extended to accept other columns?
- Check if it has a model already.
"""
column = 1
mappers = []
for widget, obj in mappings:
mapper = QDataWidgetMapper(widget)
# logger.debug(obj.model())
mapper.setModel(obj.model())
mapper.addMapping(widget, column)
delegate = Delegate(widget)
mapper.setItemDelegate(delegate)
mapper.setRootIndex(obj.parent().index())
mapper.setCurrentModelIndex(obj.index())
# QDataWidgetMapper needs a focus event to notice a change in the data.
# To make sure the model is informed about the change, I connected the
# stateChanged signal of the QCheckBox to the submit slot of the
# QDataWidgetMapper. The same idea goes for the QComboBox.
# https://bugreports.qt.io/browse/QTBUG-1818
if isinstance(widget, QCheckBox):
signal = widget.stateChanged
try:
signal.disconnect()
except TypeError:
pass
signal.connect(mapper.submit)
elif isinstance(widget, QComboBox):
signal = widget.currentTextChanged
try:
signal.disconnect()
except TypeError:
pass
signal.connect(mapper.submit)
mappers.append(mapper)
return mappers
def fixedFont():
font = QFontDatabase.systemFont(QFontDatabase.FixedFont)
|
if sys.platform == "darwin":
font.setPointSize(font.pointSize() + 2)
return font
|
|
sukeesh/Jarvis
|
jarviscli/plugins/evaluator.py
|
Python
|
mit
| 9,968
| 0.000401
|
# -*- coding: utf-8 -*-
import re
import sympy
from colorama import Fore
from plugin import alias, plugin
@alias('calc', 'evaluate')
@plugin('calculate')
def calculate(jarvis, s):
"""
Jarvis will get your calculations done!
-- Example:
calculate 3 + 5
"""
tempt = s.replace(" ", "")
if len(tempt) > 1:
calc(jarvis, tempt, formatter=lambda x: x)
else:
jarvis.say("Error: Not in correct format", Fore.RED)
@plugin('solve')
def solve(jarvis, s):
"""
Prints where expression equals zero
-- Example:
solve x**2 + 5*x + 3
solve x + 3 = 5
"""
x = sympy.Symbol('x')
def _format(solutions):
if solutions == 0:
return "No solution!"
ret = ''
for count, point in enumerate(solutions):
if x not in point:
return "Please use 'x' in expression."
x_value = point[x]
ret += "{}. x: {}\n".format(count, x_value)
return ret
def _calc(expr):
return sympy.solve(expr, x, dict=True)
s = remove_equals(jarvis, s)
calc(jarvis, s, calculator=_calc, formatter=_format, do_evalf=False)
@plugin('equations')
def equations(jarvis, term):
"""
Solves linear equations system
Use variables: a, b, c, ..., x, y,z
Example:
~> Hi, what can I do for you?
equations
1. Equation: x**2 + 2y - z = 6
2. Equation: (x-1)(y-1) = 0
3. Equation: y**2 - x -10 = y**2 -y
4. Equation:
[{x: -9, y: 1, z: 77}, {x: 1, y: 11, z: 17}]
"""
a, b, c, d, e, f, g, h, i, j, k, l, m = sympy.symbols(
'a,b,c,d,e,f,g,h,i,j,k,l,m')
n, o, p, q, r, s, t, u, v, w, x, y, z = sympy.symbols(
'n,o,p,q,r,s,t,u,v,w,x,y,z')
equations = []
count = 1
user_input = jarvis.input('{}. Equation: '.format(count))
while user_input != '':
count += 1
user_input = format_expression(user_input)
user_input = remove_equals(jarvis, user_input)
equations.append(user_input)
user_input = jarvis.input('{}. Equation: '.format(count))
calc(
jarvis,
term,
calculator=lambda expr: sympy.solve(
expr,
equations,
dict=True))
@plugin('factor')
def factor(jarvis, s):
"""
Jarvis will factories
-- Example:
factor x**2-y**2
"""
tempt = s.replace(" ", "")
if len(tempt) > 1:
calc(jarvis, tempt, formatter=sympy.factor)
else:
jarvis.say("Error: Not in correct format", Fore.RED)
@alias("curve plot")
@plugin('plot')
def plot(jarvis, s):
"""
Plot graph
-- Example:
plot x**2
plot y=x(x+1)(x-1)
"""
def _plot(expr):
sympy.plotting.plot(expr)
return ""
if len(s) == 0:
jarvis.say("Missing parameter: function (e.g. call 'plot x**2')")
return
s = remove_equals(jarvis, s)
try:
calc(jarvis, s, calculator=solve_y, formatter=_plot, do_evalf=False)
except ValueError:
jarvis.say("Cannot plot...", Fore.RED)
except OverflowError:
jarvis.say("Cannot plot - values probably too big...")
@plugin('limit')
def limit(jarvis, s):
"""
Prints limit to +/- infinity or to number +-. Use 'x' as variable.
-- Examples:
limit 1/x
limit @1 1/(1-x)
limit @1 @2 1/((1-x)(2-x))
"""
def try_limit(term, x, to, directory=''):
try:
return sympy.Limit(term, x, to, directory).doit()
except sympy.SympifyError:
return 'Error'
except NotImplementedError:
return "Sorry, cannot solve..."
if s == '':
jarvis.say("Usage: limit TERM")
return
s_split = s.split()
limit_to = []
term = ""
for token in s_split:
if token[0] == '@':
if token[1:].isnumeric():
limit_to.append(int(token[1:]))
else:
jarvis.say("Error: {} Not a number".format(
token[1:]), Fore.RED)
else:
term += token
term = remove_equals(jarvis, term)
term = format_expression(term)
try:
term = solve_y(term)
except (sympy.SympifyError, TypeError):
jarvis.say('Error, not a valid term')
return
x = sympy.Symbol('x')
# infinity:
jarvis.say("lim -> ∞\t= {}".format(try_limit(term,
x, +sympy.S.Infinity)), Fore.BLUE)
jarvis.say("lim -> -∞\t= {}".format(try_limit(term,
x, -sympy.S.Infinity)), Fore.BLUE)
for limit in limit_to:
limit_plus = try_limit(term, x, limit, directory="+")
limit_minus = try_limit(term, x, limit, directory="-")
jarvis.say("lim -> {}(+)\t= {}".format(limit, limit_plus), Fore.BLUE)
jarvis.say("lim -> {}(-)\t= {}".format(limit, limit_minus), Fore.BLUE)
def remove_equals(jarvis, equation):
"""
User should be able to input equations like x + y = 1.
SymPy only accepts equations like: x + y - 1 = 0.
=> This method Finds '=' and move everything beyond to left side
"""
spl
|
it = equation.split('=')
if len(split) == 1:
return equation
if len(split) != 2:
jarvis
|
.say("Warning! More than one = detected!", Fore.RED)
return equation
return "{} - ({})".format(split[0], split[1])
def format_expression(s):
s = str.lower(s)
s = s.replace("power", "**")
s = s.replace("plus", "+")
s = s.replace("minus", "-")
s = s.replace("dividedby", "/")
s = s.replace("by", "/")
s = s.replace("^", "**")
# Insert missing * commonly omitted
# 2x -> 2*x
p = re.compile('(\\d+)([abcxyz])')
s = p.sub(r'\1*\2', s)
# x(... -> x*(...
p = re.compile('([abcxyz])\\(')
s = p.sub(r'\1*(', s)
# (x-1)(x+1) -> (x-1)*(x+1)
# x(... -> x*(...
s = s.replace(")(", ")*(")
return s
def solve_y(s):
if 'y' in s:
y = sympy.Symbol('y')
try:
results = sympy.solve(s, y)
except NotImplementedError:
return 'unknown'
if len(results) == 0:
return '0'
else:
return results[0]
else:
return solve_y("({}) -y".format(s))
def calc(jarvis, s, calculator=sympy.sympify, formatter=None, do_evalf=True):
s = format_expression(s)
try:
result = calculator(s)
except sympy.SympifyError:
jarvis.say("Error: Something is wrong with your expression", Fore.RED)
return
except NotImplementedError:
jarvis.say("Sorry, cannot solve", Fore.RED)
return
if formatter is not None:
result = formatter(result)
if do_evalf:
result = result.evalf()
jarvis.say(str(result), Fore.BLUE)
@alias("curve sketch")
@plugin('curvesketch')
def curvesketch(jarvis, s):
"""
Prints useful information about a graph of a function.
* Limit
* Intersection x/y axis
* Derivative and Integral
* Minima / Maxima / Turning point
-- Example:
curve sketch y=x**2+10x-5
curve sketch y=sqrt((x+1)(x-1))
curve sketch y=1/3x**3-2x**2+3x
"""
if len(s) == 0:
jarvis.say(
"Missing parameter: function (e.g. call 'curve sketch y=x**2+10x-5')")
return
def section(jarvis, headline):
jarvis.say("\n{:#^50}".format(" {} ".format(headline)), Fore.MAGENTA)
term = remove_equals(jarvis, s)
term = format_expression(term)
term = solve_y(term)
def get_y(x_val, func=term):
x = sympy.Symbol('x')
return func.evalf(subs={x: x_val})
section(jarvis, s)
section(jarvis, "Graph")
jarvis.eval('plot {}'.format(s))
section(jarvis, "Limit")
jarvis.eval('limit {}'.format(term))
section(jarvis, "Intersection x-axis")
jarvis.eval('solve {}'.format(term))
section(jarvis, "Intersection y-axis")
jarvis.say(str(get_y(0).round(9)), Fore.BLUE)
section(jarvis, "Factor")
jarvis.eval('factor {}'.format(term))
section(jarvis, "Derivative")
x = sympy.Symbol('x')
derivative_1 = sympy.D
|
xZise/pywikibot-core
|
scripts/interwiki.py
|
Python
|
mit
| 111,420
| 0.000701
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Script to check language links for general pages.
Uses existing translations of a page, plus hints from the command line, to
download the equivalent pages from other languages. All of such pages are
downloaded as well and checked for interwiki links recursively until there are
no more links that are encountered. A rationalization process then selects the
right interwiki links, and if this is unambiguous, the interwiki links in the
original page will be automatically updated and the modified page uploaded.
These command-line arguments can be used to specify which pages to work on:
&pagegenerators_help;
-days: Like -years, but runs through all date pages. Stops at
Dec 31. If the argument is given in the form -days:X,
it will start at month no. X through Dec 31. If the
argument is simply given as -days, it will run from
Jan 1 through Dec 31. E.g. for -days:9 it will run
from Sep 1 through Dec 31.
-years: run on all year pages in numerical order. Stop at year 2050.
If the argument is given in the form -years:XYZ, it
will run from [[XYZ]] through [[2050]]. If XYZ is a
negative value, it is interpreted as a year BC. If the
argument is simply given as -years, it will run from 1
through 2050.
This implies -noredirect.
-new: Work on the 100 newest pages. If given as -new:x, will work
on the x newest pages.
When multiple -namespace parameters are given, x pages are
inspected, and only the ones in the selected name spaces are
processed. Use -namespace:all for all namespaces. Without
-namespace, only article pages are processed.
This implies -noredirect.
-restore: restore a set of "dumped" pages the bot was working on
when it terminated. The dump file will be subsequently
removed.
-restore:all restore a set of "dumped" pages of all dumpfiles to a given
family remaining in the "interwiki-dumps" directory. All
these dump files wi
|
ll be subsequently removed. If restoring
process interrupts again, it saves all unprocessed pages in
one new dump file of the given site.
-continue: like restore, but after having gone through the dumped pages,
continue alphabetically starting at the last of the dumped
pages. The dump file will b
|
e subsequently removed.
-warnfile: used as -warnfile:filename, reads all warnings from the
given file that apply to the home wiki language,
and read the rest of the warning as a hint. Then
treats all the mentioned pages. A quicker way to
implement warnfile suggestions without verifying them
against the live wiki is using the warnfile.py
script.
Additionaly, these arguments can be used to restrict the bot to certain pages:
-namespace:n Number or name of namespace to process. The parameter can be
used multiple times. It works in combination with all other
parameters, except for the -start parameter. If you e.g.
want to iterate over all categories starting at M, use
-start:Category:M.
-number: used as -number:#, specifies that the bot should process
that amount of pages and then stop. This is only useful in
combination with -start. The default is not to stop.
-until: used as -until:title, specifies that the bot should
process pages in wiki default sort order up to, and
including, "title" and then stop. This is only useful in
combination with -start. The default is not to stop.
Note: do not specify a namespace, even if -start has one.
-bracket only work on pages that have (in the home language)
parenthesis in their title. All other pages are skipped.
(note: without ending colon)
-skipfile: used as -skipfile:filename, skip all links mentioned in
the given file. This does not work with -number!
-skipauto use to skip all pages that can be translated automatically,
like dates, centuries, months, etc.
(note: without ending colon)
-lack: used as -lack:xx with xx a language code: only work on pages
without links to language xx. You can also add a number nn
like -lack:xx:nn, so that the bot only works on pages with
at least nn interwiki links (the default value for nn is 1).
These arguments control miscellanous bot behaviour:
-quiet Use this option to get less output
(note: without ending colon)
-async Put page on queue to be saved to wiki asynchronously. This
enables loading pages during saving throtteling and gives a
better performance.
NOTE: For post-processing it always assumes that saving the
the pages was sucessful.
(note: without ending colon)
-summary: Set an additional action summary message for the edit. This
could be used for further explainings of the bot action.
This will only be used in non-autonomous mode.
-hintsonly The bot does not ask for a page to work on, even if none of
the above page sources was specified. This will make the
first existing page of -hint or -hinfile slip in as the start
page, determining properties like namespace, disambiguation
state, and so on. When no existing page is found in the
hints, the bot does nothing.
Hitting return without input on the "Which page to check:"
prompt has the same effect as using -hintsonly.
Options like -back, -same or -wiktionary are in effect only
after a page has been found to work on.
(note: without ending colon)
These arguments are useful to provide hints to the bot:
-hint: used as -hint:de:Anweisung to give the bot a hint
where to start looking for translations. If no text
is given after the second ':', the name of the page
itself is used as the title for the hint, unless the
-hintnobracket command line option (see there) is also
selected.
There are some special hints, trying a number of languages
at once:
* all: All languages with at least ca. 100 articles.
* 10: The 10 largest languages (sites with most
articles). Analogous for any other natural
number.
* arab: All languages using the Arabic alphabet.
* cyril: All languages that use the Cyrillic alphabet.
* chinese: All Chinese dialects.
* latin: All languages using the Latin script.
* scand: All Scandinavian languages.
Names of families that forward their interlanguage links
to the wiki family being worked upon can be used (with
-family=wikipedia only), they are:
* commons: Interlanguage links of Mediawiki Commons.
* incubator: Links in pages on the Mediawiki Incubator.
* meta: Interlanguage li
|
nttks/edx-platform
|
openedx/core/djangoapps/ga_task/migrations/0001_initial.py
|
Python
|
agpl-3.0
| 1,256
| 0.000796
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migra
|
tions.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('task_type', models.CharField(max_length=50, db_index=True)),
('task_key', models.CharField(max_length=255,
|
db_index=True)),
('task_input', models.CharField(max_length=255)),
('task_id', models.CharField(max_length=255, db_index=True)),
('task_state', models.CharField(max_length=50, db_index=True)),
('task_output', models.CharField(max_length=1024, null=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('subtasks', models.TextField(blank=True)),
('requester', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
|
splotz90/urh
|
misc/IQGenerator.py
|
Python
|
gpl-3.0
| 6,115
| 0.003598
|
import numpy as np
from misc.Plotter import Plotter
class IQGenerator(object):
def __init__(self, f_baseband=10, f_s=1000, bits=[True, False, True, True, False, False]):
self.f_baseband = f_baseband
self.f_s = f_s
self.t_s = 1 / f_s
self.bits = bits
self.samples_per_cycle = int(f_s / f_baseband) + 1
self.bit_len = 3 * self.samples_per_cycle
self.modulation = "PSK"
self.carrier_samples = []
self.nsamples = self.bit_len * len(bits)
def modulate_iq(self, add_noise=False):
pos = 0
result = np.empty(self.nsamples, dtype=np.complex64)
self.carrier_samples = np.empty(self.nsamples, dtype=np.complex64)
for bit in self.bits:
if self.modulation == "FSK":
freq = self.f_baseband if bit else int(0.5 * self.f_baseband)
else:
freq = self.f_baseband
if self.modulation == "ASK":
a = 1 if bit else 0
else:
a = 1
if self.modulation == "PSK":
phi = 0 if bit else np.pi
else:
phi = 0
result.real[pos:pos + self.bit_len] = a * np.cos(
2 * np.pi * freq * self.t_s * np.arange(pos, pos + self.bit_len) + phi)
result.imag[pos:pos + self.bit_len] = a * np.sin(
2 * np.pi * freq * self.t_s * np.arange(pos, pos + self.bit_len) + phi)
self.carrier_samples.real[pos:pos + self.bit_len] = np.cos(
2 * np.pi * self.f_baseband * self.t_s * np.arange(pos, pos + self.bit_len))
self.carrier_samples.imag[pos:pos + self.bit_len] = np.sin(
2 * np.pi * self.f_baseband * self.t_s * np.arange(pos, pos + self.bit_len))
pos += self.bit_len
if add_noise:
noise = np.random.normal(0, 0.1, self.nsamples)
result.real = np.add(result.real, noise)
result.imag = np.add(result.imag, noise)
self.carrier_samples.real = np.add(self.carrier_samples.real, noise)
self.carrier_samples.imag = np.add(self.carrier_samples.imag, noise)
return result
def gen_iq_from_passband(self):
# Lyons, 457, nur zu Dokuzwecken
f_carrier = 433e6
signal = np.sin(2 * np.pi * f_carrier * self.t_s * np.arange(0, self.nsamples))
result = np.empty(self.nsamples, dtype=np.complex64)
result.real = signal * np.cos(2 * np.pi * self.f_baseband * self.t_s * np.arange(0, self.nsamples))
result.imag = signal * np.sin(2 * np.pi * self.f_baseband * self.t_s * np.arange(0, self.nsamples))
result = self.lowpass(result,
self.f_baseband) # Bei 2*f_c wird noch eine Frequenz angelegt, die schneiden wir hier weg
return result
def lowpass(self, data, cutoff):
freq = np.fft.fftfreq(len(data), self.t_s)
fft = np.fft.fft(data)
fft_filtered = [fft[i] if abs(freq[i]) < cutoff else 0.0 for i in range(len(freq))]
inverse = np.fft.ifft(fft_filtered)
return inverse
def get_spectrum(self, signal):
w = np.abs(np.fft.fft(signal))
freqs = np.fft.fftfreq(len(w), self.t_s)
idx = np.argsort(freqs)
return freqs[idx], w[idx]
def get_max_freq(self, signal):
w = np.abs(np.fft.fft(signal))
freqs = np.fft.fftfreq(len(w))
idx = np.argsort(w)
return freqs[idx]
def get_norm_angle(self, c):
return np.arctan2(c.imag, c.real) + np.pi
def costa_alpha_beta(self, bw, damp=(1 / np.sqrt(2))):
# BW in range((2pi/200), (2pi/100))
alpha = (4 * damp * bw) / (1 + 2 * damp * bw + bw * bw)
beta = (4 * bw * bw) / (1 + 2 * damp * bw + bw * bw)
return alpha, beta
def psk_demod(self, iq_data):
result = []
nco_out = 0
nco_times_sample = 0
phase_error = 0
modulus = 2 * np.pi
costa_phase = 0
costa_freq = 0
lowpass_filtered = costa_freq
# bw = (2 * np.pi / 100 + 2 * np.pi / 200) / 2
bw = 2 * np.pi / 100
alpha, beta = self.costa_alpha_beta(bw)
freqs = []
F = (np.arctan2(iq_data[1].imag, iq_data[1].real)) - (np.arctan2(iq_data[0].imag, iq_data[0].real))
prod = 0
avg_prod = -100
N = 15
lowpass_values = []
for i in range(1, len(iq_data)):
sample = iq_data[i]
tmp = iq_data[i - 1].conjugate() * sample
F = np.arctan2(tmp.imag, tmp.real)
# # NCO Output
nco_out = np.exp(-costa_phase * 1j)
# # Sample * nco_out
nco_times_sample = nco_out * sample
# # LPF
lowpass_filtered = nco_times_sample
#
#phase_error = np.arctan2(lowpass_filtered.imag, lowpass_filtered.real) # Arctan2 geht hier nicht, da durch Fehler in der Gleitkommarithmethik Teilweise -Pi statt 0 als Fehler ausgegeben wird. Das fuckt alles ab.
# #phase_error = np.arctan2(nco_times_sample.imag, nco_times_sample.real)
phase_error = lowpass_filtered.imag * lowpass_filtered.real
costa_freq += beta * phase_error
costa_phase += costa_freq + alpha * phase_error
result.append(nco_times_sample.real)
return result
def get_angle(self, summed_angle):
while summed_angle > np.pi:
summed_angle -= np.pi
return summed_angle
if __name__ == "__main__":
iqg = IQGenerator()
iq_data = iqg.modulate_iq(add_noise=True)
# print("\n".join(map(str, iqg.psk_demod(iq_data))))
demod = iqg.psk_demod(iq_data)
|
# Plotter.generic_plot(np.arange(0, len(iq
|
_data.real)), iq_data.real, iqg.modulation)
carrier_plot = np.arange(0, len(iqg.carrier_samples)), iqg.carrier_samples.real, "Carrier"
demod_plot = np.arange(0, len(demod)), demod, "Demod"
# plot = carrier_plot + demod_plot
plot = demod_plot
Plotter.generic_plot(*plot)
iq_data.tofile("../tests/data/psk_gen_noisy.complex")
|
twisted/quotient
|
xquotient/test/historic/test_composer4to5.py
|
Python
|
mit
| 598
| 0.001672
|
from axiom.test.historic.stubloader import StubbedTest
from xquotient.compos
|
e import Composer, Drafts
class ComposerUpgradeTestCase(StubbedTest):
"""
Test that the Composer no longer has a 'drafts' attribute, that no Drafts
items have been created and that the other attributes have been copied.
"""
def test_upgrade(self):
composer = self.store.findUnique(Composer)
self.failIf(hasattr(composer, 'drafts'), "Still has 'drafts' attribute")
self.assertNotEqual(composer.privateApplication, None)
self.assertEqual(self.st
|
ore.count(Drafts), 0)
|
unicefuganda/edtrac
|
edtrac_project/rapidsms_uganda_common/setup.py
|
Python
|
bsd-3-clause
| 845
| 0.002367
|
from setuptools import setup
setup(
name='uganda_common',
version='0.1',
license="BSD",
install_requires = ["rapidsms"],
description='A suite of utility functions for Uganda RSMS deployments.',
long_description='',
author='UNICEF Uganda T4D',
author_email='mossplix@gmail.com',
url='http://github.com/mossplix/uganda_common',
download_url='http://github.com/mossplix/uganda_common/downloads',
include_package_data=True,
packages=['uganda_common'],
zip_safe=False,
classifiers=[
'Development Status :: 4
|
- Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
|
'Programming Language :: Python',
'Framework :: Django',
]
)
|
ckan/ckanext-archiver
|
ckanext/archiver/plugin.py
|
Python
|
mit
| 8,644
| 0.000231
|
import logging
from ckan import model
from ckan import plugins as p
from ckanext.report.interfaces import IReport
from ckanext.archiver.interfaces import IPipe
from ckanext.archiver.logic import action, auth
from ckanext.archiver import helpers
from ckanext.archiver import lib
from ckanext.archiver.model import Archival, aggregate_archivals_for_a_dataset
from ckanext.archiver import cli
log = logging.getLogger(__name__)
class ArchiverPlugin(p.SingletonPlugin, p.toolkit.DefaultDatasetForm):
"""
Registers to be notified whenever CKAN resources are created or their URLs
change, and will create a new ckanext.archiver celery task to archive the
resource.
"""
p.implements(p.IDomainObjectModification, inherit=True)
p.implements(IReport)
p.implements(p.IConfigurer, inherit=True)
p.implements(p.IActions)
p.implements(p.IAuthFunctions)
p.implements(p.ITemplateHelpers)
p.implements(p.IPackageController, inherit=True)
if p.toolkit.check_ckan_version(min_version='2.9.0'):
p.implements(p.IClick)
# IDomainObjectModification
def notify(self, entity, operation=None):
if not isinstance(entity, model.Package):
return
log.debug('Notified of package event: %s %s', entity.name, operation)
run_archiver = \
self._is_it_sufficient_change_to_run_archiver(entity, operation)
if not run_archiver:
return
log.debug('Creating archiver task: %s', entity.name)
lib.create_archiver_package_task(entity, 'priority')
def _is_it_sufficient_change_to_run_archiver(self, package, operation):
''' Returns True if in this revision any of these happened:
* it is a new dataset
* dataset licence changed (affects qa)
* there are resources that have been added or deleted
* resources have changed their URL or format (affects qa)
'''
if operation == 'new':
log.debug('New package - will archive')
# even if it has no resources, QA needs to show 0 stars against it
return True
elif operation == 'deleted':
log.debug('Deleted package - won\'t archive')
return False
# therefore operation=changed
# 2.9 does not have revisions so archive anyway
if p.toolkit.check_ckan_version(min_version='2.9.0'):
return True
# check to see if resources are added, deleted or URL changed
# look for the latest revision
rev_list = package.all_related_revisions
if not rev_list:
log.debug('No sign of previous revisions - will archive')
return True
# I am not confident we can rely on the info about the current
# revision, because we are still in the 'before_commit' stage. So
# simply ignore that if it's returned.
if rev_list[0][0].id == model.Session.revision.id:
rev_list = rev_list[1:]
if not rev_list:
log.warn('No sign of previous revisions - will archive')
return True
previous_revision = rev_list[0][0]
log.debug('Comparing with revision: %s %s',
previous_revision.timestamp, previous_revision.id)
# get the package as it was at that previous revision
context = {'model': model, 'session': model.Session,
# 'user': c.user or c.author,
'ignore_auth': True,
'revision_id': previous_revision.id}
data_dict = {'id': package.id}
try:
old_pkg_dict = p.toolkit.get_action('package_show')(
context, data_dict)
except p.toolkit.NotFound:
log.warn('No sign of previous package - will archive anyway')
return True
# has the licence changed?
old_licence = (old_pkg_dict['license_id'],
lib.get_extra_from_pkg_dict(old_pkg_dict, 'licence')
or None)
new_licence = (package.license_id,
package.extras.get('licence') or None)
if old_licence != new_licence:
log.debug('Licence has changed - will archive: %r->%r',
old_licence, new_licence)
return True
# have any resources been added or deleted?
old_resources = dict((res['id'], res)
for res in old_pkg_dict['resources'])
old_res_ids = set(old_resources.keys())
new_res_ids = set((res.id for res in package.resources))
deleted_res_ids = old_res_ids - new_res_ids
if deleted_res_ids:
log.debug('Deleted resources - will archive. res_ids=%r',
deleted_res_ids)
return True
added_res_ids = new_res_ids - old_res_ids
if added_res_ids:
log.debug('Added resources - will archive. res_ids=%r',
added_res_ids)
return True
# have any resources' url/format changed?
for res in package.resources:
for key in ('url', 'format'):
old_res_value = old_resources[res.id][key]
new_res_value = getattr(res, key)
if old_res_value != new_res_value:
log.debug('Resource %s changed - will archive. '
'id=%s pos=%s url="%s"->"%s"',
key, res.id[:4], res.position,
old_res_value, new_res_value)
return True
was_in_progress = old_resources[res.id].get('upload_in_progress', None)
is_in_progress = res.extras.get('upload_in_progress', None)
if was_in_progress != is_in_progress:
log.debug('Resource %s upload finished - will archive. ', 'upload_finished')
return True
log.debug('Resource unchanged. pos=%s id=%s',
res.position, res.id[:4])
log.debug('No new, deleted or changed resources - won\'t archive')
return False
# IReport
def register_reports(self):
"""Register details of an extension's reports"""
from ckanext.archiver import reports
return [reports.broken_links_report_info,
]
# IConfigurer
def update_config(self, config):
p.toolkit.add_template_directory(config, 'templates')
# IActions
def get_actions(self):
return {
'archiver_resource_show': action.archiver_resource_show,
'archiver_dataset_show': action.archiver_dataset_show,
}
|
# IAuthFunctions
def get_auth_functions(self):
return {
'archiver_resource_show': auth.archiver_resource_show,
'archiver_dataset_show': auth.archiver_dataset_show,
}
#
|
ITemplateHelpers
def get_helpers(self):
return dict((name, function) for name, function
in list(helpers.__dict__.items())
if callable(function) and name[0] != '_')
# IPackageController
def after_show(self, context, pkg_dict):
# Insert the archival info into the package_dict so that it is
# available on the API.
# When you edit the dataset, these values will not show in the form,
# it they will be saved in the resources (not the dataset). I can't see
# and easy way to stop this, but I think it is harmless. It will get
# overwritten here when output again.
archivals = Archival.get_for_package(pkg_dict['id'])
if not archivals:
return
# dataset
dataset_archival = aggregate_archivals_for_a_dataset(archivals)
pkg_dict['archiver'] = dataset_archival
# resources
archivals_by_res_id = dict((a.resource_id, a) for a in archivals)
for res in pkg_dict['resources']:
archival = archivals_by_res_id.get(res['id'])
if archival:
archival_dict = archival.as_dict()
del archival_dict['id']
del archival_dict['package_id']
del archival_di
|
ericlink/adms-server
|
playframework-dist/play-1.1/framework/pym/play/commands/javadoc.py
|
Python
|
mit
| 1,459
| 0.004798
|
import os, os.path
import shutil
import subprocess
from play.utils import *
COMMANDS = ['javadoc', 'jd']
HELP = {
'javadoc': 'Generate your application Javadoc'
}
def execute(**kargs):
command = kargs.get("command")
app = kargs.get("app")
args = kargs.get("args")
play_env = kargs.get("env")
app.check()
modules = app.modules()
if not os.environ.has_key('JAVA_HOME'):
javadoc_path = "javadoc"
else:
javadoc_path = os.path.normpath("%s/bin/javadoc" % os.environ['JAVA_HOME'])
fileList = []
def add_java_files(app_path):
for root, subFolders, files in os.walk(os.path.join(app_path, 'app')):
for file in files:
if file.endswith(".java"):
fileList.append(os.path.jo
|
in(root, file))
add_java_files(app.path)
for module in m
|
odules:
add_java_files(os.path.normpath(module))
outdir = os.path.join(app.path, 'javadoc')
sout = open(os.path.join(app.log_path(), 'javadoc.log'), 'w')
serr = open(os.path.join(app.log_path(), 'javadoc.err'), 'w')
if (os.path.isdir(outdir)):
shutil.rmtree(outdir)
javadoc_cmd = [javadoc_path, '-classpath', app.cp_args(), '-d', outdir] + fileList
print "Generating Javadoc in " + outdir + "..."
subprocess.call(javadoc_cmd, env=os.environ, stdout=sout, stderr=serr)
print "Done! You can open " + os.path.join(outdir, 'overview-tree.html') + " in your browser."
|
NNBlocks/NNBlocks
|
nnb/activation.py
|
Python
|
gpl-3.0
| 1,375
| 0.007273
|
# NNBlocks is a Deep Learning framework for computational linguistics.
#
# Copyright (C) 2015 Frederico Tommasi Caroli
#
# NNBlocks is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# NNBlocks is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# NNBlocks. If not, see http://www.gnu.org/licenses/.
import theano.tensor as T
def sigmoid(a):
return T.nnet.sigmoid(a)
def tanh(a):
return T.tanh(a)
def linear(a):
return a
def threshold(t, yes=1., no=0.):
def r(a):
return T.switch(T.ge(a, t), yes, no)
return r
|
#The ReLU functions are a copy of theano's recommended way to implement ReLU.
#theano.tensor.nnet.relu is not used her
|
e because it is only available in
#version 0.7.2 of theano
def ReLU(a):
return 0.5 * (a + abs(a))
def leaky_ReLU(alpha):
def r(a):
f1 = 0.5 * (a + alpha)
f2 = 0.5 * (a - alpha)
return f1 * a + f2 * abs(a)
return r
|
petervo/cockpit
|
pkg/lib/inotify.py
|
Python
|
lgpl-2.1
| 2,629
| 0.004184
|
#
# This file is part of Cockpit.
#
# Copyright (C) 2017 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
import os
import ctypes
import struct
import subprocess
import traceback
IN_CLOSE_WRITE = 0x00000008
IN_MOVED_FROM = 0x00000040
IN_MOVED_TO = 0x00000080
IN_CREATE = 0x00000100
IN_DELETE = 0x00000200
IN_DELETE_SELF = 0x00000400
IN_MOVE_SELF = 0x00000800
IN_IGNORED = 0x00008000
class Inotify:
def __init__(self):
self._libc = ctypes.CDLL(None, use_errno=True)
self._get_errno_func = ctyp
|
es.get_errno
|
self._libc.inotify_init.argtypes = []
self._libc.inotify_init.restype = ctypes.c_int
self._libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p,
ctypes.c_uint32]
self._libc.inotify_add_watch.restype = ctypes.c_int
self._libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int]
self._libc.inotify_rm_watch.restype = ctypes.c_int
self.fd = self._libc.inotify_init()
def add_watch(self, path, mask):
path = ctypes.create_string_buffer(path.encode(sys.getfilesystemencoding()))
wd = self._libc.inotify_add_watch(self.fd, path, mask)
if wd < 0:
sys.stderr.write("can't add watch for %s: %s\n" % (path, os.strerror(self._get_errno_func())))
return wd
def rem_watch(self, wd):
if self._libc.inotify_rm_watch(self.fd, wd) < 0:
sys.stderr.write("can't remove watch: %s\n" % (os.strerror(self._get_errno_func())))
def process(self, callback):
buf = os.read(self.fd, 4096)
pos = 0
while pos < len(buf):
(wd, mask, cookie, name_len) = struct.unpack('iIII', buf[pos:pos+16])
pos += 16
(name,) = struct.unpack('%ds' % name_len, buf[pos:pos + name_len])
pos += name_len
callback(wd, mask, name.decode().rstrip('\0'))
def run(self, callback):
while True:
self.process(callback)
|
mF2C/COMPSs
|
compss/programming_model/bindings/python/src/exaqute/ExaquteTaskPyCOMPSs.py
|
Python
|
apache-2.0
| 1,613
| 0
|
#!/usr/bin/python
#
# Copyright 2002-2019 Barcelona Supercomputing Center (www.bsc.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
|
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from exaqute.ExaquteTask import *
from pycom
|
pss.api.task import task
from pycompss.api.api import compss_wait_on
from pycompss.api.api import compss_barrier
from pycompss.api.api import compss_delete_object
from pycompss.api.api import compss_delete_file
from pycompss.api.parameter import *
from pycompss.api.implement import implement
from pycompss.api.constraint import *
class ExaquteTask(object):
def __init__(self, *args, **kwargs):
global scheduler
scheduler = "Current scheduler is PyCOMPSs"
self.task_instance = task(*args, **kwargs)
def __call__(self, f):
return self.task_instance.__call__(f)
def barrier(): # Wait
compss_barrier()
def get_value_from_remote(obj): # Gather
obj = compss_wait_on(obj)
return obj
def delete_object(obj): # Release
compss_delete_object(obj)
def delete_file(file_path):
compss_delete_file(file_path)
def compute(obj): # Submit task
return obj
|
tripleee/gmail-oauth2-tools
|
python/oauth2.py
|
Python
|
apache-2.0
| 12,198
| 0.00705
|
#!/usr/bin/python
#
# Copyright 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Performs client tasks for testing IMAP OAuth2 authentication.
To use this script, you'll need to have registered with Google as an OAuth
application and obtained an OAuth client ID and client secret.
See https://developers.google.com/identity/protocols/OAuth2 for instructions on
registering and for documentation of the APIs invoked by this code.
This script has 3 modes of operation.
1. The first mode is used to generate and authorize an OAuth2 token, the
first step in logging in via OAuth2.
oauth2 --user=xxx@gmail.com \
--client_id=1038[...].apps.googleusercontent.com \
--client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
--generate_oauth2_token
The script will converse with Google and generate an oauth request
token, then present you with a URL you should visit in your browser to
authorize the token. Once you get the verification code from the Google
website, enter it into the script to get your OAuth access token. The output
from this command will contain the access token, a refresh token, and some
metadata about the tokens. The access token can be used until it expires, and
the refresh token lasts indefinitely, so you should record these values for
reuse.
2. The script will generate new access tokens using a refresh token.
oauth2 --user=xxx@gmail.com \
--client_id=1038[...].apps.googleusercontent.com \
--client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
--refresh_token=1/Yzm6MRy4q1xi7Dx2DuWXNgT6s37OrP_DW_IoyTum4YA
3. The script will generate an OAuth2 string that can be fed
directly to IMAP or SMTP. This is triggered with the --generate_oauth2_string
option.
oauth2 --generate_oauth2_string --user=xxx@gmail.com \
--access_token=ya29.AGy[...]ezLg
The output of this mode will be a base64-encoded string. To use it, connect to a
IMAPFE and pass it as the second argument to the AUTHENTICATE command.
a AUTHENTICATE XOAUTH2 a9sha9sfs[...]9dfja929dk==
"""
import base64
import imaplib
import json
from optparse import OptionParser
import smtplib
import sys
import urllib
def SetupOptionParser():
# Usage message is the module's docstring.
parser = OptionParser(usage=__doc__)
parser.add_option('--generate_oauth2_token',
action='store_true',
dest='generate_oauth2_token',
help='generates an OAuth2 token for testing')
parser.add_option('--generate_oauth2_string',
action='store_true',
dest='generate_oauth2_string',
help='generates an initial client response string for '
'OAuth2')
parser.add_option('--client_id',
default=None,
help='Client ID of the application that is authenticating. '
'See OAuth2 documentation for details.')
parser.add_option('--client_secret',
default=None,
help='Client secret of the application that is '
'authenticating. See OAuth2 documentation for '
'details.')
parser.add_option('--access_token',
default=None,
help='OAuth2 access token')
parser.add_option('--refresh_token',
default=None,
help='OAuth2 refresh token')
parser.add_option('--scope',
default='https://mail.google.com/',
help='scope for the access token. Multiple scopes can be '
'listed separated by spaces with the whole argument '
'quoted.')
parser.add_option('--test_imap_authentication',
action='store_true',
dest='test_imap_authentication',
help='attempts to authenticate to IMAP')
parser.add_option('--test_smtp_authentication',
action='store_true',
dest='test_smtp_authentication',
help='attempts to authenticate to SMTP')
parser.add_option('--user',
default=None,
help='email address of user whose account is being '
'accessed')
return parser
# The URL root for accessing Google Accounts.
GOOGLE_ACCOUNTS_BASE_URL = 'https://accounts.google.com'
# Hardcoded dummy redirect URI for non-web apps.
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
def AccountsUrl(command):
"""Generates the Google Accounts URL.
Args:
command: The command to execute.
Returns:
A URL for the given command.
"""
return '%s/%s' % (GOOGLE_ACCOUNTS_BASE_URL, command)
def UrlEscape(text):
# See OAUTH 5.1 for a definition of which characters need to be escaped.
return urllib.quote(text, safe='~-._')
def UrlUnescape(text):
# See OAUTH 5.1 for a definition of which characters need to be escaped.
return urllib.unquote(text)
def FormatUrlParams(params):
"""Formats parameters into a URL query string.
Args:
params: A key-value map.
Returns:
A URL query string version of the given parameters.
"""
param_fragments = []
for param in sorted(para
|
ms.iteritems(), key=lambda x: x[0]):
param_fragments.append('%s=%s' % (param[0], UrlEscape(param[1])))
return '&'.join(param_fragments)
def GeneratePermissionUrl(client_id, scope='https://mail.google.com/'):
"""Generates the URL fo
|
r authorizing access.
This uses the "OAuth2 for Installed Applications" flow described at
https://developers.google.com/accounts/docs/OAuth2InstalledApp
Args:
client_id: Client ID obtained by registering your app.
scope: scope for access token, e.g. 'https://mail.google.com'
Returns:
A URL that the user should visit in their browser.
"""
params = {}
params['client_id'] = client_id
params['redirect_uri'] = REDIRECT_URI
params['scope'] = scope
params['response_type'] = 'code'
return '%s?%s' % (AccountsUrl('o/oauth2/auth'),
FormatUrlParams(params))
def AuthorizeTokens(client_id, client_secret, authorization_code):
"""Obtains OAuth access token and refresh token.
This uses the application portion of the "OAuth2 for Installed Applications"
flow at https://developers.google.com/accounts/docs/OAuth2InstalledApp#handlingtheresponse
Args:
client_id: Client ID obtained by registering your app.
client_secret: Client secret obtained by registering your app.
authorization_code: code generated by Google Accounts after user grants
permission.
Returns:
The decoded response from the Google Accounts server, as a dict. Expected
fields include 'access_token', 'expires_in', and 'refresh_token'.
"""
params = {}
params['client_id'] = client_id
params['client_secret'] = client_secret
params['code'] = authorization_code
params['redirect_uri'] = REDIRECT_URI
params['grant_type'] = 'authorization_code'
request_url = AccountsUrl('o/oauth2/token')
response = urllib.urlopen(request_url, urllib.urlencode(params)).read()
return json.loads(response)
def RefreshToken(client_id, client_secret, refresh_token):
"""Obtains a new token given a refresh token.
See https://developers.google.com/accounts/docs/OAuth2InstalledApp#refresh
Args:
client_id: Client ID obtained by registering your app.
client_secret: Client secret obtained by registering your app.
refresh_token: A previously-obtained refresh token.
Returns:
The decoded response from the Google Accounts server, as a dict. Expected
fields include 'access_token', 'expires_in', and 'refresh_token'.
"""
params = {}
params
|
jkonecny12/anaconda
|
pyanaconda/ui/tui/spokes/user.py
|
Python
|
gpl-2.0
| 11,266
| 0.00142
|
# User creation text spoke
#
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from pyanaconda.core.configuration.anaconda import conf
from pyanaconda.core.constants import FIRSTBOOT_ENVIRON, PASSWORD_SET, PASSWORD_POLICY_USER
from pyanaconda.flags import flags
from pyanaconda.core.i18n import N_, _
from pyanaconda.core.regexes import GECOS_VALID
from pyanaconda.modules.common.constants.services import USERS
from pyanaconda.modules.common.util import is_module_available
from pyanaconda.ui.categories.user_settings import UserSettingsCategory
from pyanaconda.ui.common import FirstbootSpokeMixIn
from pyanaconda.ui.tui.spokes import NormalTUISpoke
from pyanaconda.ui.tui.tuiobject import Dialog, PasswordDialog, report_if_failed, report_check_func
from pyanaconda.ui.lib.users import get_user_list, set_user_list
from pyanaconda.core.users import guess_username, check_username, check_grouplist
from simpleline.render.screen import InputState
from simpleline.render.containers import ListColumnContainer
from simpleline.render.widgets import CheckboxWidget, EntryWidget
__all__ = ["UserSpoke"]
FULLNAME_ERROR_MSG = N_("Full name can't contain the ':' character")
class UserSpoke(FirstbootSpokeMixIn, NormalTUISpoke):
"""
.. inheritance-diagram:: UserSpoke
:parts: 3
"""
category = UserSettingsCategory
@staticmethod
def get_screen_id():
"""Return a unique id of this UI screen."""
return "user-configuration"
@classmethod
def should_run(cls, environment, data):
"""Should the spoke run?"""
if not is_module_available(USERS):
return False
if FirstbootSpokeMixIn.should_run(environment, data):
return True
# the user spoke should run always in the anaconda and in firstboot only
# when doing reconfig or if no user has been created in the installation
users_module = USERS.get_proxy()
user_list = get_user_list(users_module)
if environment == FIRSTBOOT_ENVIRON and data and not user_list:
return True
return False
def __init__(self, data, storage, payload):
FirstbootSpokeMixIn.__init__(self)
NormalTUISpoke.__init__(self, data, storage, payload)
self.initialize_start()
# connect to the Users DBus module
self._users_module = USERS.get_proxy()
self.title = N_("User creation")
self._container = None
# was user creation requested by the Users DBus module
# - at the moment this basically means user creation was
# requested via kickstart
# - note that this does not currently update when user
# list is changed via DBus
self._user_requested = False
self._user_cleared = False
# should a user be created ?
self._create_user = False
self._user_list = get_user_list(self._users_module, add_default=True)
# if user has a name, it's an actual user that has been requested,
# rather than a default user added by us
if self.user.name:
self._user_requested = True
self._create_user = True
self._use_password = self.user.is_crypted or self.user.password
self._groups = ""
self._is_admin = False
self.errors = []
self._users_module = USERS.get_proxy()
self.initialize_done()
@property
def user(self):
"""The user that is manipulated by the User spoke.
This user is always the first one in the user list.
:return: a UserData instance
"""
return self._user_list[0]
def refresh(self, args=None):
NormalTUISpoke.refresh(self, args)
# refresh the user list
self._user_list = get_user_list(self._users_module, add_default=True, add_if_not_empty=self._user_cleared)
self._is_admin = self.user.has_admin_priviledges()
self._groups = ", ".join(self.user.groups)
self._container = ListColumnContainer(1)
w = CheckboxWidget(title=_("Create user"), completed=self._create_user)
self._container.add(w, self._set_create_user)
if self._create_user:
dialog = Dialog(title=_("Full name"), conditions=[self._check_fullname])
self._container.add(EntryWidget(dialog.title, self.user.gecos), self._set_fullname, dialog)
dialog = Dialog(title=_("User name"), conditions=[self._check_username])
self._container.add(EntryWidget(dialog.title, self.user.name), self._set_username, dialog)
w = CheckboxWidget(title=_("Use password"), completed=self._use_password)
self._container.add(w, self._set_use_password)
if self._use_password:
password_dialog = PasswordDialog(
title=_("Password"),
policy_name=PASSWORD_POLICY_USER
)
if self.user.password:
entry = EntryWidget(password_dialog.title, _(PASSWORD_SET))
else:
entry = EntryWidget(password_dialog.title)
self._container.add(entry, self._set_password, password_dialog)
msg = _("Administrator")
w = CheckboxWidget(title=msg, completed=self._is_admin)
self._container.add(w, self._set_administrator)
dialog = Dialog(title=_("Groups"), conditions=[self._check_groups])
self._container.add(EntryWidget(dialog.title, self._groups), self._set_groups, dialog)
self.window.add_with_separator(self._container)
@report_if_failed(message=FULLNAME_ERROR_MSG)
def _check_fullname(self, user_input, report_func):
return GECOS_VALID.match(user_input) is not None
@report_check_func()
def _check_username(self, user_input, report_func):
return check_username(user_input)
@report_check_func()
def _check_groups(self, user_input, report_func):
return check_grouplist(user_in
|
put)
def _set_create_user(self, args):
self._create_user = not self._create_user
def _set_fullname(self, dialog):
self.user.gecos = dialog.run()
def _set_username(self, dialog):
self.user.name = dialog.run(
|
)
def _set_use_password(self, args):
self._use_password = not self._use_password
def _set_password(self, password_dialog):
password = password_dialog.run()
while password is None:
password = password_dialog.run()
self.user.password = password
def _set_administrator(self, args):
self._is_admin = not self._is_admin
def _set_groups(self, dialog):
self._groups = dialog.run()
def show_all(self):
NormalTUISpoke.show_all(self)
# if we have any errors, display them
while self.errors:
print(self.errors.pop())
@property
def completed(self):
""" Verify a user is created; verify pw is set if option checked. """
user_list = get_user_list(self._users_module)
if user_list:
if self._use_password and not bool(self.user.password or self.user.is_crypted):
return False
else:
return True
else:
ret
|
matokeotz/matokeo-api
|
app/app/urls.py
|
Python
|
mit
| 1,219
| 0
|
"""app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^
|
blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework.documentation impo
|
rt include_docs_urls
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^docs/', include_docs_urls(title='Matokeo API')),
url(r'^api/', include('api.urls.student_urls')),
url(r'^api/', include('api.urls.subject_urls')),
url(r'^api/', include('api.urls.grade_urls')),
url(r'^api/', include('api.urls.location_urls')),
url(r'^api/', include('api.urls.school_urls')),
]
|
anybox/anybox.recipe.openerp
|
anybox/recipe/openerp/tests/oerp70/setup.py
|
Python
|
agpl-3.0
| 3,751
| 0.018128
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# flake8: noqa
#
# setup.py from openobject-server 7.0, included as is, except for the
# dependency list
#
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, s
|
ee <http://www.gnu.org/licenses/>.
#
##############################################################################
import glob, os, re, setuptools, sys
from os.path import join, isfile
# List all data files
def data():
files = []
for root, dirnames, filenames in os.walk('openerp'):
for filename in filenames:
if not re.match(r'.*(\.pyc|\.pyo|\~)$',filename):
files.ap
|
pend(os.path.join(root, filename))
d = {}
for v in files:
k=os.path.dirname(v)
if k in d:
d[k].append(v)
else:
d[k]=[v]
r = d.items()
if os.name == 'nt':
r.append(("Microsoft.VC90.CRT", glob.glob('C:\Microsoft.VC90.CRT\*.*')))
import babel
r.append(("localedata",
glob.glob(os.path.join(os.path.dirname(babel.__file__), "localedata" , '*'))))
return r
def gen_manifest():
file_list="\n".join(data())
open('MANIFEST','w').write(file_list)
if os.name == 'nt':
sys.path.append("C:\Microsoft.VC90.CRT")
def py2exe_options():
if os.name == 'nt':
import py2exe
return {
"console" : [ { "script": "openerp-server", "icon_resources": [(1, join("install","openerp-icon.ico"))], }],
'options' : {
"py2exe": {
"skip_archive": 1,
"optimize": 2,
"dist_dir": 'dist',
"packages": [ "DAV", "HTMLParser", "PIL", "asynchat", "asyncore", "commands", "dateutil", "decimal", "email", "encodings", "imaplib", "lxml", "lxml._elementpath", "lxml.builder", "lxml.etree", "lxml.objectify", "mako", "openerp", "poplib", "pychart", "pydot", "pyparsing", "reportlab", "select", "simplejson", "smtplib", "uuid", "vatnumber", "vobject", "xml", "xml.dom", "yaml", ],
"excludes" : ["Tkconstants","Tkinter","tcl"],
}
}
}
else:
return {}
execfile(join(os.path.dirname(__file__), 'openerp', 'release.py'))
setuptools.setup(
name = 'openerp',
version = version,
description = description,
long_description = long_desc,
url = url,
author = author,
author_email = author_email,
classifiers = filter(None, classifiers.split("\n")),
license = license,
scripts = ['openerp-server'],
data_files = data(),
packages = setuptools.find_packages(),
dependency_links = ['http://download.gna.org/pychart/'],
install_requires = ['PIL',
],
#include_package_data = True,
**py2exe_options()
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kohr-h/odl
|
odl/contrib/solvers/spdhg/examples/get_started.py
|
Python
|
mpl-2.0
| 2,277
| 0
|
# Copyright 2014-2018 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""A simple example to get started with SPDHG [CERS2017]. The example at hand
solves the ROF denoising problem.
Reference
---------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C
|
.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Ap
|
plications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
"""
from __future__ import division, print_function
import odl
import odl.contrib.solvers.spdhg as spdhg
import odl.contrib.datasets.images as images
import numpy as np
# set ground truth and data
image_gray = images.building(gray=True)
X = odl.uniform_discr([0, 0], image_gray.shape, image_gray.shape)
groundtruth = X.element(image_gray)
data = odl.phantom.white_noise(X, mean=groundtruth, stddev=0.1, seed=1807)
# set parameter
alpha = .12 # regularisation parameter
nepoch = 100
# set functionals and operator
A = odl.BroadcastOperator(*[odl.PartialDerivative(X, d, pad_mode='symmetric')
for d in [0, 1]])
f = odl.solvers.SeparableSum(*[odl.solvers.L1Norm(Yi) for Yi in A.range])
g = 1 / (2 * alpha) * odl.solvers.L2NormSquared(X).translated(data)
# set sampling
n = 2 # number of subsets
prob = [1 / n] * n # probablity that a subset gets selected
S = [[0], [1]] # all possible subsets to select from
def fun_select(k): # subset selection function
return S[int(np.random.choice(n, 1, p=prob))]
# set parameters for algorithm
Ai_norm = [2, 2]
gamma = 0.99
sigma = [gamma / a for a in Ai_norm]
tau = gamma / (n * max(Ai_norm))
# callback for output during the iterations
cb = (odl.solvers.CallbackPrintIteration(fmt='iter:{:4d}', step=n, end=', ') &
odl.solvers.CallbackPrintTiming(fmt='time: {:5.2f} s', cumulative=True,
step=n))
# initialise variable and run algorithm
x = X.zero()
niter = 2 * nepoch
spdhg.spdhg(x, f, g, A, tau, sigma, niter, prob=prob, fun_select=fun_select,
callback=cb)
# show data and output
data.show()
x.show()
|
quantumlib/ReCirq
|
recirq/hfvqe/analysis_test.py
|
Python
|
apache-2.0
| 6,819
| 0
|
# Copyright 2020 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import product
import numpy as np
import scipy as sp
from recirq.hfvqe.circuits import rhf_params_to_matrix
from recirq.hfvqe.analysis import (trace_distance, kdelta, energy_from_opdm,
fidelity_witness, fidelity,
mcweeny_purification)
from recirq.hfvqe.molecular_example import make_h6_1_3, make_h3_2_5
from recirq.hfvqe.gradient_hf import rhf_func_generator
def test_kdelta():
assert np.isclose(kdelta(1, 1), 1.)
assert np.isclose(kdelta(0, 1), 0.)
def test_trace_distance():
rho = np.arange(16).reshape((4, 4))
sigma = np.arange(16, 32).reshape((4, 4))
assert np.isclose(trace_distance(rho, rho), 0.)
assert np.isclose(trace_distance(rho, sigma), 32.0)
def test_energy_from_opdm():
"""Build test assuming sampling functions work"""
rhf_objective, molecule, parameters, obi, tbi = make_h6_1_3()
unitary, energy, _ = rhf_func_generator(rhf_objective)
parameters = np.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8,
|
0.9])
initial_opdm = np.diag([1] * 3 + [0] * 3)
final_opdm = unitary(parameters) @ initial_opdm @ unitary(
parameters).conj().T
test_energy = energy_from_opdm(final_opdm,
constant=molecule.nuclear_repulsion,
one_body_tensor=obi,
two_body_tensor=tbi)
true_energy = energy(parameters)
assert np.allclose(test_energy, true_energy)
def test_energy_from_opdm_odd_qubit():
""
|
"Build test assuming sampling functions work"""
rhf_objective, molecule, parameters, obi, tbi = make_h3_2_5()
unitary, energy, _ = rhf_func_generator(rhf_objective)
parameters = np.array([0.1, 0.2])
initial_opdm = np.diag([1] * 1 + [0] * 2)
print(initial_opdm)
final_opdm = unitary(parameters) @ initial_opdm @ unitary(
parameters).conj().T
test_energy = energy_from_opdm(final_opdm,
constant=molecule.nuclear_repulsion,
one_body_tensor=obi,
two_body_tensor=tbi)
true_energy = energy(parameters)
assert np.allclose(test_energy, true_energy)
def test_mcweeny():
np.random.seed(82)
opdm = np.array([[
0.766034130, -0.27166330, -0.30936072, -0.08471057, -0.04878244,
-0.01285432
],
[
-0.27166330, 0.67657015, -0.37519640, -0.02101843,
-0.03568214, -0.05034585
],
[
-0.30936072, -0.37519640, 0.55896791, 0.04267370,
-0.02258184, -0.08783738
],
[
-0.08471057, -0.02101843, 0.04267370, 0.05450848,
0.11291253, 0.17131658
],
[
-0.04878244, -0.03568214, -0.02258184, 0.11291253,
0.26821219, 0.42351185
],
[
-0.01285432, -0.05034585, -0.08783738, 0.17131658,
0.42351185, 0.67570713
]])
for i, j in product(range(6), repeat=2):
opdm[i, j] += np.random.randn() * 1.0E-3
opdm = 0.5 * (opdm + opdm.T)
pure_opdm = mcweeny_purification(opdm)
w, _ = np.linalg.eigh(pure_opdm)
assert len(np.where(w < -1.0E-9)[0]) == 0
def test_fidelity():
parameters = np.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
u = sp.linalg.expm(rhf_params_to_matrix(parameters, 6))
opdm = np.array([[
0.766034130, -0.27166330, -0.30936072, -0.08471057, -0.04878244,
-0.01285432
],
[
-0.27166330, 0.67657015, -0.37519640, -0.02101843,
-0.03568214, -0.05034585
],
[
-0.30936072, -0.37519640, 0.55896791, 0.04267370,
-0.02258184, -0.08783738
],
[
-0.08471057, -0.02101843, 0.04267370, 0.05450848,
0.11291253, 0.17131658
],
[
-0.04878244, -0.03568214, -0.02258184, 0.11291253,
0.26821219, 0.42351185
],
[
-0.01285432, -0.05034585, -0.08783738, 0.17131658,
0.42351185, 0.67570713
]])
assert np.isclose(fidelity(u, opdm), 1.0)
opdm += 0.1
opdm = 0.5 * (opdm + opdm.T)
assert np.isclose(fidelity(u, opdm), 0.3532702370138279)
def test_fidelity_witness():
parameters = np.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
u = sp.linalg.expm(rhf_params_to_matrix(parameters, 6))
omega = [1] * 3 + [0] * 3
opdm = np.array([[
0.766034130, -0.27166330, -0.30936072, -0.08471057, -0.04878244,
-0.01285432
],
[
-0.27166330, 0.67657015, -0.37519640, -0.02101843,
-0.03568214, -0.05034585
],
[
-0.30936072, -0.37519640, 0.55896791, 0.04267370,
-0.02258184, -0.08783738
],
[
-0.08471057, -0.02101843, 0.04267370, 0.05450848,
0.11291253, 0.17131658
],
[
-0.04878244, -0.03568214, -0.02258184, 0.11291253,
0.26821219, 0.42351185
],
[
-0.01285432, -0.05034585, -0.08783738, 0.17131658,
0.42351185, 0.67570713
]])
assert np.isclose(fidelity_witness(u, omega, opdm), 1.0)
opdm += 0.1
opdm = 0.5 * (opdm + opdm.T)
# higher than fidelity because of particle number breaking
assert np.isclose(fidelity_witness(u, omega, opdm), 0.7721525013371697)
|
nanaze/pystitch
|
pystitch/dmc_colors.py
|
Python
|
apache-2.0
| 1,790
| 0.027374
|
import csv
import os
import color
def _GetDataDirPath():
return os.path.join(os.path.dirname(__file__), 'data')
def _GetCsvPath():
return os.path.join(_GetDataDirPath(), 'dmccolors.csv')
def _GetCsvString():
with open(_GetCsvPath()) as f:
return f.read().strip()
def _CreateDmcColorFromRow(row):
number = int(row[0])
name = row[1]
hex_color = row[5]
rgb_color = color.RGBColorFromHexString(hex_color)
return DMCColor(number, name, rgb_color)
# DMC Colors singleton
_dmc_colors = None
def _CreateDMCColors():
global _dmc_colors
csv_data = _GetCsvString()
lines = csv_data.spli
|
tlines()
# Skip first line
lines = lines[1:]
reader = csv.reader(lines, delimiter='\t')
dmc_colors = set()
for row in reader:
dmc_colors.a
|
dd(_CreateDmcColorFromRow(row))
return dmc_colors
def GetDMCColors():
global _dmc_colors
if not _dmc_colors:
_dmc_colors = frozenset(_CreateDMCColors())
return _dmc_colors
def GetClosestDMCColorsPairs(rgb_color):
pairs = list()
for dcolor in GetDMCColors():
pairs.append((dcolor, color.RGBColor.distance(rgb_color, dcolor.color)))
return sorted(pairs, key=lambda pair: pair[1])
def GetClosestDMCColors(rgb_color):
return [pair[0] for pair in GetClosestDMCColorsPairs(rgb_color)]
class DMCColor(object):
def __init__(self, number, name, color):
self.number = number
self.name = name
self.color = color
def __str__(self):
return super(DMCColor, self).__str__() + str((self.number, self.name, self.color))
def GetStringForDMCColor(dmc_color):
return "%s %s %s" % (dmc_color.number, dmc_color.name, dmc_color.color)
# Simple executable functionality for debugging.
def main():
for color in GetDMCColors():
print color
if __name__ == '__main__':
main()
|
Azure/azure-sdk-for-python
|
sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2019_07_01/models/__init__.py
|
Python
|
mit
| 10,249
| 0.000195
|
# coding=utf-8
# ---------------------------------
|
-----------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import ARMBaseModel
|
from ._models_py3 import Address
from ._models_py3 import Alert
from ._models_py3 import AlertErrorDetails
from ._models_py3 import AlertList
from ._models_py3 import AsymmetricEncryptedSecret
from ._models_py3 import Authentication
from ._models_py3 import AzureContainerInfo
from ._models_py3 import BandwidthSchedule
from ._models_py3 import BandwidthSchedulesList
from ._models_py3 import ClientAccessRight
from ._models_py3 import CloudErrorBody
from ._models_py3 import ContactDetails
from ._models_py3 import DataBoxEdgeDevice
from ._models_py3 import DataBoxEdgeDeviceExtendedInfo
from ._models_py3 import DataBoxEdgeDeviceList
from ._models_py3 import DataBoxEdgeDevicePatch
from ._models_py3 import FileEventTrigger
from ._models_py3 import FileSourceInfo
from ._models_py3 import IoTDeviceInfo
from ._models_py3 import IoTRole
from ._models_py3 import Ipv4Config
from ._models_py3 import Ipv6Config
from ._models_py3 import Job
from ._models_py3 import JobErrorDetails
from ._models_py3 import JobErrorItem
from ._models_py3 import MetricDimensionV1
from ._models_py3 import MetricSpecificationV1
from ._models_py3 import MountPointMap
from ._models_py3 import NetworkAdapter
from ._models_py3 import NetworkAdapterPosition
from ._models_py3 import NetworkSettings
from ._models_py3 import Node
from ._models_py3 import NodeList
from ._models_py3 import Operation
from ._models_py3 import OperationDisplay
from ._models_py3 import OperationsList
from ._models_py3 import Order
from ._models_py3 import OrderList
from ._models_py3 import OrderStatus
from ._models_py3 import PeriodicTimerEventTrigger
from ._models_py3 import PeriodicTimerSourceInfo
from ._models_py3 import RefreshDetails
from ._models_py3 import Role
from ._models_py3 import RoleList
from ._models_py3 import RoleSinkInfo
from ._models_py3 import SecuritySettings
from ._models_py3 import ServiceSpecification
from ._models_py3 import Share
from ._models_py3 import ShareAccessRight
from ._models_py3 import ShareList
from ._models_py3 import Sku
from ._models_py3 import StorageAccountCredential
from ._models_py3 import StorageAccountCredentialList
from ._models_py3 import SymmetricKey
from ._models_py3 import TrackingInfo
from ._models_py3 import Trigger
from ._models_py3 import TriggerList
from ._models_py3 import UpdateDownloadProgress
from ._models_py3 import UpdateInstallProgress
from ._models_py3 import UpdateSummary
from ._models_py3 import UploadCertificateRequest
from ._models_py3 import UploadCertificateResponse
from ._models_py3 import User
from ._models_py3 import UserAccessRight
from ._models_py3 import UserList
except (SyntaxError, ImportError):
from ._models import ARMBaseModel # type: ignore
from ._models import Address # type: ignore
from ._models import Alert # type: ignore
from ._models import AlertErrorDetails # type: ignore
from ._models import AlertList # type: ignore
from ._models import AsymmetricEncryptedSecret # type: ignore
from ._models import Authentication # type: ignore
from ._models import AzureContainerInfo # type: ignore
from ._models import BandwidthSchedule # type: ignore
from ._models import BandwidthSchedulesList # type: ignore
from ._models import ClientAccessRight # type: ignore
from ._models import CloudErrorBody # type: ignore
from ._models import ContactDetails # type: ignore
from ._models import DataBoxEdgeDevice # type: ignore
from ._models import DataBoxEdgeDeviceExtendedInfo # type: ignore
from ._models import DataBoxEdgeDeviceList # type: ignore
from ._models import DataBoxEdgeDevicePatch # type: ignore
from ._models import FileEventTrigger # type: ignore
from ._models import FileSourceInfo # type: ignore
from ._models import IoTDeviceInfo # type: ignore
from ._models import IoTRole # type: ignore
from ._models import Ipv4Config # type: ignore
from ._models import Ipv6Config # type: ignore
from ._models import Job # type: ignore
from ._models import JobErrorDetails # type: ignore
from ._models import JobErrorItem # type: ignore
from ._models import MetricDimensionV1 # type: ignore
from ._models import MetricSpecificationV1 # type: ignore
from ._models import MountPointMap # type: ignore
from ._models import NetworkAdapter # type: ignore
from ._models import NetworkAdapterPosition # type: ignore
from ._models import NetworkSettings # type: ignore
from ._models import Node # type: ignore
from ._models import NodeList # type: ignore
from ._models import Operation # type: ignore
from ._models import OperationDisplay # type: ignore
from ._models import OperationsList # type: ignore
from ._models import Order # type: ignore
from ._models import OrderList # type: ignore
from ._models import OrderStatus # type: ignore
from ._models import PeriodicTimerEventTrigger # type: ignore
from ._models import PeriodicTimerSourceInfo # type: ignore
from ._models import RefreshDetails # type: ignore
from ._models import Role # type: ignore
from ._models import RoleList # type: ignore
from ._models import RoleSinkInfo # type: ignore
from ._models import SecuritySettings # type: ignore
from ._models import ServiceSpecification # type: ignore
from ._models import Share # type: ignore
from ._models import ShareAccessRight # type: ignore
from ._models import ShareList # type: ignore
from ._models import Sku # type: ignore
from ._models import StorageAccountCredential # type: ignore
from ._models import StorageAccountCredentialList # type: ignore
from ._models import SymmetricKey # type: ignore
from ._models import TrackingInfo # type: ignore
from ._models import Trigger # type: ignore
from ._models import TriggerList # type: ignore
from ._models import UpdateDownloadProgress # type: ignore
from ._models import UpdateInstallProgress # type: ignore
from ._models import UpdateSummary # type: ignore
from ._models import UploadCertificateRequest # type: ignore
from ._models import UploadCertificateResponse # type: ignore
from ._models import User # type: ignore
from ._models import UserAccessRight # type: ignore
from ._models import UserList # type: ignore
from ._data_box_edge_management_client_enums import (
AccountType,
AlertSeverity,
AuthenticationType,
AzureContainerDataFormat,
ClientPermissionType,
DataBoxEdgeDeviceStatus,
DataPolicy,
DayOfWeek,
DeviceType,
DownloadPhase,
EncryptionAlgorithm,
InstallRebootBehavior,
JobStatus,
JobType,
MetricAggregationType,
MetricCategory,
MetricUnit,
MonitoringStatus,
NetworkAdapterDHCPStatus,
NetworkAdapterRDMAStatus,
NetworkAdapterStatus,
NetworkGroup,
NodeStatus,
OrderState,
PlatformType,
RoleStatus,
RoleTypes,
SSLStatus,
ShareAccessProtocol,
ShareAccessType,
ShareStatus,
SkuName,
SkuTier,
TimeGrain,
TriggerEventType,
UpdateOperation,
UpdateOperationStage,
)
__all__ = [
'ARMBaseModel',
'Address',
'Alert',
'AlertErrorDetails',
'AlertList',
'AsymmetricEncryptedSecret',
'Authentication',
'AzureContainerInfo',
'BandwidthSchedule',
'BandwidthSchedulesList
|
grilo/pyaccurev
|
tests/test_client.py
|
Python
|
gpl-3.0
| 19,204
| 0.00125
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import os
import collections
import mock
import accurev.client
import accurev.depot
class TestAccuRevClient(unittest.TestCase):
def setUp(self):
self.client = accurev.client.Client()
def test_cmd(self):
self.client.chdir('somedirectory')
expected = "accurev somecommand"
with mock.patch.object(accurev.utils, "cmd") as mocked:
self.client.cmd('somecommand')
mocked.assert_called_once_with('accurev somecommand', 'somedirectory')
def test_xml_cmd(self):
with mock.patch.object(self.client, "tempfile_cmd") as mocked:
self.client.xml_cmd('somestring')
mocked.assert_called_once_with('xml', 'somestring')
def test_info(self):
string = """Shell: /bin/bash
Principal: automaticTasks
Host: madprdci2
Domain: (none)
Server name: 169.0.0.1
Port: 5050
DB Encoding: Unicode
ACCUREV_BIN: /opt/accurev-5.5/bin
Client time: 2017/05/14 04:29:59 CEST (1494728999)
Server time: 2017/05/14 04:30:00 CEST (1494729000)"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = string, ''
self.assertTrue(isinstance(self.client.info, dict))
expected = [
'Shell',
'Principal',
'Host',
'Domain',
'Server name',
'Port',
'DB Encoding',
'ACCUREV_BIN',
'Client time',
'Server time',
]
self.assertEqual(len(self.client.info.keys()), len(expected))
def test_depot_count(self):
string = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="show depots"
TaskId="12492">
<Element
Number="1"
Name="OFFICE"
Slice="1"
exclusiveLocking="false"
case="insensitive"
locWidth="128"/>
<Element
Number="2"
Name="PROVIDER"
Slice="2"
exclusiveLocking="false"
case="insensitive"
locWidth="128"/>
</AcResponse>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = string, ''
depots = self.client.depots
self.assertEqual(len(depots.keys()), 2)
for d in depots.values():
self.assertTrue(isinstance(d, accurev.depot.Depot))
def test_login_permanent(self):
with mock.patch.object(self.client, "cmd") as mocked:
self.client.login('user', 'pass', permanent=True)
mocked.assert_called_once_with('login -n user pass')
def test_users(self):
xml = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="show users"
TaskId="647018">
<Element
Number="1"
Name="Administrator"
Kind="full"/>
<Element
Number="2"
Name="SomeoneElse"
Kind="full"/>
</AcResponse>"""
with mock.patch.object(self.client, "user_show") as mocked:
mocked.return_value = xml
users = list(self.client.users)
self.assertTrue(len(users), 2)
def test_tempfile_cmd(self):
with mock.patch.object(accurev.client.tempfile, "NamedTemporaryFile") as mocktmp:
mocktmp.return_value = open('notrandomfile', 'w')
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = 'stdout', 'stderr'
self.client.tempfile_cmd('xml', 'world')
mocked.assert_called_once_with('xml -l notrandomfile')
if os.path.isfile('notrandomfile'):
os.unlink('notrandomfile')
def test_group_show_no_user(self):
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = '', ''
self.client.group_show()
mocked.assert_called_once_with('show -fx groups')
def test_group_show_with
|
_user(self):
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = '', ''
self.client.group_show('user')
mocked.assert_called_once_with('show -fx -u user groups')
def test_member_sh
|
ow(self):
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = '', ''
self.client.member_show('group')
mocked.assert_called_once_with('show -fx -g group members')
def test_cpkdescribe(self):
query = "<AcRequest>\n"
query += "\t<cpkdescribe>\n"
query += "\t\t<depot>mycompany</depot>\n"
query += "\t\t<stream1>some_stream</stream1>\n"
query += "\t\t<issues>\n"
query += "\t\t\t<issueNum>1010</issueNum>\n"
query += "\t\t</issues>\n"
query += "\t</cpkdescribe>\n"
query += "</AcRequest>"
response = """<?xml version="1.0" encoding="utf-8"?>
<acResponse>
<issues>
<issue ancestry="direct">
<issueNum fid="1">1010</issueNum>
</issue>
</issues>
</acResponse>"""
with mock.patch.object(self.client, "xml_cmd") as mocked:
mocked.return_value = response, ''
issues = self.client.cpkdescribe(['1010'], 'mycompany', 'some_stream')
mocked.assert_called_once_with(query)
def test_schema(self):
response = """<?xml version="1.0" encoding="UTF-8"?>
<template name="default">
<lookupField fid="5"/>
<field name="issueNum" type="internal" label="Issue" reportWidth="10" fid="1"></field>
<field name="transNum" type="internal" label="Transaction" reportWidth="10" fid="2"> </field>
<field name="shortDescription" type="Text" label="Short Description" reportWidth="150" width="60" fid="3"></field>
<field name="state" type="Choose" label="State" reportWidth="10" fid="4">
<value>Open</value>
<value>Cancelled</value>
<value>Closed</value>
</field>
<field name="JIRA" type="Text" label="Jira Issue" reportWidth="10" width="15" fid="5"></field>
</template>"""
with mock.patch.object(self.client, "getconfig") as mocked:
mocked.return_value = response, ''
schema = self.client.schema('mycompany')
mocked.assert_called_once_with('mycompany', 'schema.xml')
def test_element_promote(self):
response = "<elements>\n"
response += """\t<e eid="10" v="1/1"/>\n"""
response += """\t<e eid="11" v="2/2"/>\n"""
response += "</elements>"
class Element:
pass
element_one = Element()
element_one.eid = "10"
element_one.real_version = "1/1"
element_two = Element()
element_two.eid = "11"
element_two.real_version ="2/2"
element_list = [
element_one,
element_two
]
with mock.patch.object(self.client, "tempfile_cmd") as mocked:
self.client.element_promote(element_list, 'hello', 'world')
mocked.assert_called_once_with('promote -s hello -S world -Fx', response)
def test_issue_query(self):
expected = """<queryIssue issueDB="mycompany" useAltQuery="false">\n"""
expected += "\t<OR>\n"
expected += "\t\t<condition>1 == 10</condition>\n"
expected += "\t\t<condition>1 == 20</condition>\n"
expected += "\t</OR>\n"
expected += "</queryIssue>"
response = """<?something>\n"""
response += """<issueOne/>"""
response += """<issueTwo/>"
|
bravelittlescientist/kdd-particle-physics-ml-fall13
|
src/adaboost.py
|
Python
|
gpl-2.0
| 1,625
| 0.002462
|
#!/usr/bin/python2
# This is an Adaboost classifier
import sys
from util import get_split_training_dataset
from metrics import suite
import feature_selection_trees as fclassify
from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
def train(Xtrain, Ytrain):
""" Use entirety of provided X, Y to predict
Default Arguments
Xtrain -- Training data
Ytrain -- Training prediction
Named Arguments
C -- regularization parameter
Returns
classifier -- a tree fitted to Xtrain and Ytrain
"""
# Initialize classifier parameters for adaboost
# For adaboost, this means the number of estimators for now
ada = AdaBoostClassifier(DecisionTreeClassifier(max_depth=1))
parameters = {'n_estimators': [150]}
# Classify over grid of parameters
classifier = GridSearchCV(ada, parameters)
classifier.fit(Xtrain, Ytrain)
return c
|
lassifier
if __name__ == "__main__":
# Let's take our training data and train a decision tree
# on a subset. Scikit-learn provides a good module for cross-
# validation.
Xt, Xv, Yt, Yv = get_split_training_dataset()
Classifier = train(Xt, Yt)
pri
|
nt "Adaboost Classifier"
suite(Yv, Classifier.predict(Xv))
# smaller feature set
Xtimp, features = fclassify.get_important_data_features(Xt, Yt, max_features=25)
Xvimp = fclassify.compress_data_to_important_features(Xv, features)
ClassifierImp = train(Xtimp,Yt)
print "Adaboosts Classiifer, 25 important features"
suite(Yv, ClassifierImp.predict(Xvimp))
|
mateusz-blaszkowski/PerfKitBenchmarker
|
perfkitbenchmarker/providers/azure/provider_info.py
|
Python
|
apache-2.0
| 872
| 0.002294
|
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed
|
to in writing, software
# distrib
|
uted under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Provider info for Azure
"""
from perfkitbenchmarker import provider_info
from perfkitbenchmarker import benchmark_spec
class AzureProviderInfo(provider_info.BaseProviderInfo):
UNSUPPORTED_BENCHMARKS = ['mysql_service']
CLOUD = benchmark_spec.AZURE
|
gmt/kernel-ng-util
|
kernelng/config.py
|
Python
|
gpl-2.0
| 46,364
| 0.005133
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# vim:ai:sta:et:ts=4:sw=4:sts=4
"""kernelng 0.x
Tool for maintaining customized overlays of kernel-ng.eclass-based ebuilds
Copyright 2005-2014 Gentoo Foundation
Copyright (C) 2014 Gregory M. Turner <gmt@be-evil.net>
Distributed under the terms of the GNU General Public License v2
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import os
import sys
import re
from collections import OrderedDict
from itertools import chain, islice, count, repeat
import click
from click._compat import iteritems
from .output import has_verbose_level, echov, sechov, trace, suppress_tracing
import portage
try:
portage.proxy.lazyimport.lazyimport(globals(),
'portage.data:portage_uid,portage_gid')
except ImportError:
portage_uid = 250
portage_gid = 250
# eprefixifiable dummy value
EPREFIX = "@GENTOO_PORTAGE_EPREFIX@"
# non-eprefixified fallback behavior: ask portage or assume empty
if EPREFIX == "@GENTOO_%s_EPREFIX@" % "PORTAGE":
try:
from portage.const import EPREFIX as _EPREFIX
except ImportError:
_EPREFIX = ''
EPREFIX = _EPREFIX
PROGNAME = sys.argv[0].split(os.path.sep)[-1] if len(sys.argv) >= 1 else 'kernelng'
PROGDESC = 'kernel-ng-util'
FRAMEWORK = 'kernel-ng'
PORTAGE_CONF_DIR = '/etc/portage'
REPOS_CONF = 'repos.conf'
REPOS_CONF_FILE = ''.join((
EPREFIX,
PORTAGE_CONF_DIR,
os.path.sep,
REPOS_CONF
))
KERNELNG_CONF = '%s.conf' % FRAMEWORK
KERNELNG_CONF_DIR = '/etc/%s' % FRAMEWORK
EKERNELNG_CONF_DIR = '%s%s' % (EPREFIX, KERNELNG_CONF_DIR)
KERNELNG_CONF_FILE = ''.join((
EKERNELNG_CONF_DIR,
os.path.sep,
KERNELNG_CONF,
))
CONST_RE = re.compile('%\([^)]*\)[^\W\d_]', re.UNICODE)
SUBCONSTS = {
'prog': PROGNAME,
'progdesc': PROGDESC,
'framework': FRAMEWORK,
'kngconf': KERNELNG_CONF,
'kngconffile': KERNELNG_CONF_FILE,
'eprefix': EPREFIX,
'lc': '%s%s' % (
click.style('LOADCONFIG', fg='blue', bold=True),
click.style(':', fg='white', bold=True)
)
}
CONFIG_COMMENT_RE = re.compile('\s*#|\s*$', re.UNICODE)
CONFIG_SECTION_RE = re.compile('\s*\[\s*([^][]*[^][\s]+)\s*\]\s*$', re.UNICODE)
CONFIG_SETTING_RE = re.compile('\s*([^\d\W][\w-]*)\s*=\s*($|.*\S+)\s*$', re.UNICODE)
def subconsts(text, subconsts=SUBCONSTS):
"""Utility function to make substitutions from a dictionary of constants."""
try:
return text % subconsts if re.search(CONST_RE, text) else text
except ValueError as e:
echov('subconsts: error substituting in "%s": %s.' % (text, str(e)), err=True)
raise
# convenience alias
_sc = subconsts
class KNGConfigItemUnknownReason(Exception):
def __init__(self, key, value, reason):
super(KNGConfigItemUnknownReason, self).__init__(
'Unknown KNGConfigItem reason "%s", assigning "%s" to "%s"' % (
reason, value, key))
VALID_KNGCONFIGITEMREASONS=['stored', 'default', 'override']
def ValidateKNGConfigItemReason(key, value, reason):
if reason not in VALID_KNGCONFIGITEMREASONS:
raise KNGConfigItemUnknownReason(key, value, reason)
# KNGConfigItem, KNGConfigItems, KNGConfig, and fetal-ness/daddy
# ==============================================================
# The interface here is tolerable but the plumbing is ugly and inelegant
# due to code evolution by incremental hacking. The whole thing should probably be
# scrapped and re-coded from scratch, truth be told, now that I've figued out
# what it is I'm trying to accomplish.
#
# The basic data-structure we are building could be thought of as a dict of {<str>: <list>}
# items; the lists could be thought of as containing (<str>, <str>) tuples. In fact,
# that's an oversimplification. The dict is actually a KNGConfig, which is an OrderedDict
# subclass that logically represents the entire contents of a kernel-ng.conf file, with
# each dictionary key representing a section. The list is actually a KNGConfigItems instance
# and the list-items are KNGConfigItem instances (the analogue of the (<str>, <str>) tuples).
# Each KNGConfigItem either represents a configuration-file comment or a
|
standard configuration-file
# line-item (i.e.: key=value).
#
# We use the OrderedDict so that we can round-trip the
|
configuration file without re-ordering
# the sections. Initially this will be fairly broken, but the enhancements to achieve full
# .conf => OO => .conf round-trip capabilities are simply to saving off some formatting metadata
# at the KNGConfigItem level during "deserialization" -- aka parsing, what-have-you. First,
# .conf-file deserialization of /any/ sort will need to be implemented :S.
#
# The motivation for much of the crazyness below is that I wanted consumers to be able to say:
# "kngconfig['foo']['bar'] = 'baz'", and have the bar setting in the foo section recieve a value of
# 'baz'. Even so, thereafter, kngconfig['foo']['bar'] would not be 'baz', but a KNGConfigItem
# with value 'baz' and key 'bar', but that's fine, kngconfig['foo']['bar'].value would be our 'baz'.
#
# To achieve this, I used the __missing__ feature at the top dict level, added hybrid-dict features
# to KNGConfigItems (so that KNGConfigItems.__getattr__ will search the KNGConfigItem instances
# it contains for the provided index, or otherwise call a "_missing" API which works just like
# "__missing__" but, obviously is not a built-in magic name thingy so-preferably-not-to-speak.
# BUT, crap, I thought, this would mean that as soon as the API consumer simply looks at
# kngconfig['foo'], the 'foo' section must come into being. Which wouldn't be a problem except
# that a 'kernelng_foo' package would fail to be generated during "kernelng overlay update" due
# to (amazingly!) there being no */foo package in all of portage. Clearly this would not be what
# most API consumers meant by kngconfig['foo'].
#
# To solve this dilemma, I created the concept of "fetal" KNGConfigItem and KNGConfigItems
# instances. In this scheme, two new properties are created: "daddy" and "fetal". Daddy maps back
# to the container that contains the instance (nb: implications wrt. i.e., deepclone() are not
# dealt with yet); meanwhile, fetal tells us:
#
# KNGConfigItem: if the instance has never had a non-None "value" property set
# KNGConfigItems: if the instance has ever had any non-fetal KNGConfigItem instances in it.
#
# Once these are "born", there is back-propogation through the "daddy"s so that the KNGConfigItems
# get born themselves, the instant they become grandparents, if necessary.
#
# The purpose of all these acrobatics is to censor the fetuses during deserialization, ensuring
# that no gross side effects occur due to the objects generated by __missing__ and _missing.
#
# Yes, I know this is all kinds of ugly but the interface is almost reasonable (eliminating the
# requirement to pass a "daddy" keyword argument to constructors would be nice and will eventually
# get done; the ability for multiple containers to be pregnant with the same fetus is not
# needed but my implementation also sort-of breaks the ability for multiple containers to contain
# the same non-fetal containee, which clearly sucks and should also be fixed).
#
# Each KNGConfigItem has a "reason" property which explains its semantic purpose. Three "reasons"
# are supported: "stored" is the standard reason and simply means the KNGConfigItem represents
# a setting which should persist when the KNGConfig containing it is deserialized. The "default"
# reason signifies that the key=>value mapping is not stored in the configuration
|
deepmind/dm_robotics
|
cpp/setup.py
|
Python
|
apache-2.0
| 4,478
| 0.00335
|
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Build script for the python controller bindings."""
import os
import subprocess
import sys
from setuptools import Extension
from setuptools import setup
from setuptools.command.build_ext import build_ext
class CMakeExtension(Extension):
"""Extension to record the directory to run cmake on."""
def __init__(self, name, sourcedir, cmake):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
self.cmake = cmake
class CMakeBuild(build_ext):
"""Runs cmake."""
def build_extension(self, ext):
output_directory = os.path.abspath(
os.path.dirname(self.get_ext_fullpath(ext.name)))
# required for auto-detection of auxiliary "native" libs
if not output_directory.endswith(os.path.sep):
output_directory += os.path.sep
build_type = "Debug" if self.debug else "Release"
cmake_args = [
"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={}".format(output_directory),
"-DPYTHON_EXECUTABLE={}".format(sys.executable),
"-DDMR_PYTHON_VERSION={}.{}".format(sys.version_info.major,
sys.version_info.minor),
"-DCMAKE_BUILD_TYPE={
|
}".format(build_type),
"-DDM_ROBOTICS_BUILD_TESTS=OFF",
"-DDM_ROBOTICS_BUILD_WHEEL=True",
"--log-level=VERBOSE",
]
version_script = os.environ.get("DM_ROBOTICS_VERSION_SCRIPT", None)
if version_script:
cmake_args.append(f"-DDM_ROBOTICS_VERSION_SCRIPT={version_scr
|
ipt}",)
build_args = []
if "CMAKE_BUILD_PARALLEL_LEVEL" not in os.environ:
build_args += ["-j4"]
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
# Generate build files:
subprocess.check_call(
[ext.cmake] + cmake_args + ["-S", ext.sourcedir], cwd=self.build_temp)
# Build.
subprocess.check_call(
[ext.cmake, "--build", "."] + build_args, cwd=self.build_temp)
def _get_requirements(requirements_file): # pylint: disable=g-doc-args
"""Returns a list of dependencies for setup() from requirements.txt.
Currently a requirements.txt is being used to specify dependencies. In order
to avoid specifying it in two places, we're going to use that file as the
source of truth.
Lines starting with -r will be ignored. If the requirements are split across
multiple files, call this function multiple times instead and sum the results.
"""
def line_should_be_included(line):
return line and not line.startswith("-r")
with open(requirements_file) as f:
return [_parse_line(line) for line in f if line_should_be_included(line)]
def _parse_line(s):
"""Parses a line of a requirements.txt file."""
requirement, *_ = s.split("#")
return requirement.strip()
setup(
name="dm_robotics-controllers",
package_dir={"dm_robotics.controllers": ""},
packages=["dm_robotics.controllers"],
version="0.3.0",
license="Apache 2.0",
author="DeepMind",
description="Python bindings for dm_robotics/cpp/controllers",
long_description=open("controllers_py/README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/deepmind/dm_robotics/tree/main/cpp/controllers_py",
python_requires=">=3.7, <3.10",
setup_requires=["wheel >= 0.31.0"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Scientific/Engineering",
],
ext_modules=[
CMakeExtension(
"dm_robotics.controllers.cartesian_6d_to_joint_velocity_mapper",
sourcedir="",
cmake=os.environ.get("CMAKE_EXE", "cmake"))
],
cmdclass={"build_ext": CMakeBuild},
zip_safe=False,
)
|
endlos99/xdt99
|
test/as-checkobj.py
|
Python
|
gpl-3.0
| 4,385
| 0.00114
|
#!/usr/bin/
|
env python3
import os
from config import Dirs, Disks, Files, XAS99_CONFIG
from utils import (xas, xdm, sinc, error, clear_env, delfile, check_obj_code_eq, check_image_set_eq,
check_imag
|
e_files_eq, read_stderr, get_source_markers, check_errors)
# Main test
def runtest():
"""check cross-generated output against native reference files"""
clear_env(XAS99_CONFIG)
# object code
for inp_file, opts, ref_file, compr_file in [
('asdirs.asm', [], 'ASDIRS-O', 'ASDIRS-C'),
('asorgs.asm', [], 'ASORGS-O', 'ASORGS-C'),
('asopcs.asm', [], 'ASOPCS-O', 'ASOPCS-C'),
('asexprs.asm', [], 'ASEXPRS-O', None),
('asbss.asm', [], 'ASBSS-O', 'ASBSS-C'),
('asregs.asm', ['-R'], 'ASREGS-O', 'ASREGS-C'),
('ashellon.asm', ['-R'], 'ASHELLO-O', 'ASHELLO-C'),
('ascopy.asm', [], 'ASCOPY-O', None),
('ascopyn.asm', [], 'ASCOPYN-O', None),
('assize1.asm', [], 'ASSIZE1-O', 'ASSIZE1-C'),
('assize2.asm', [], 'ASSIZE2-O', None),
('assize3.asm', [], 'ASSIZE3-O', None),
('assize4.asm', [], 'ASSIZE4-O', None),
('asextsym.asm', [], 'ASEXTSYM-O', None),
('asdorg.asm', [], 'ASDORG-O', None),
('asrorg.asm', [], 'ASRORG-O', None),
('asimg1.asm', [], 'ASIMG1-O', 'ASIMG1-C'),
('asimg2.asm', [], 'ASIMG2-O', None),
('asimg3.asm', [], 'ASIMG3-OX', None),
('asreloc.asm', [], 'ASRELOC-O', None),
('asxorg.asm', [], 'ASXORG-O', None),
('ascart.asm', ['-R'], 'ASCART-O', 'ASCART-C')
]:
source = os.path.join(Dirs.sources, inp_file)
xdm(Disks.asmsrcs, '-e', ref_file, '-o', Files.reference)
xas(*[source] + opts + ['-q', '-o', Files.output])
check_obj_code_eq(Files.output, Files.reference)
xas(*[source] + opts + ['--strict', '-q', '-o', Files.output])
check_obj_code_eq(Files.output, Files.reference)
if compr_file:
# compressed object code
xas(*[source] + opts + ['-C', '-q', '-o', Files.output])
xdm(Disks.asmsrcs, '-e', compr_file, '-o', Files.reference)
check_obj_code_eq(Files.output, Files.reference, compressed=True)
# image files
for inp_file, ref_file in [
('asimg1.asm', 'ASIMG1-I'),
('asimg2.asm', 'ASIMG2-I'),
('asimg3.asm', 'ASIMG3-I')
]:
source = os.path.join(Dirs.sources, inp_file)
xas(source, '-i', '-o', Files.output)
xdm(Disks.asmsrcs, '-e', ref_file, '-o', Files.reference)
check_image_files_eq(Files.output, Files.reference)
for inp_file, reffiles in [
('aslimg.asm', ['ASLIMG-I', 'ASLIMG-J', 'ASLIMG-K']),
('assimg.asm', ['ASSIMG-I', 'ASSIMG-J', 'ASSIMG-K', 'ASSIMG-L']),
('asreloc.asm', ['ASRELOC-I'])
]:
source = os.path.join(Dirs.sources, inp_file)
xas(source, '-R', '-i', '-q', '-o', Files.output)
gendata = []
refdata = []
for i, ref_file in enumerate(reffiles):
xdm(Disks.asmimgs, '-e', ref_file, '-o', Files.reference)
with open(Files.outputff[i], 'rb') as fgen, open(Files.reference, 'rb') as fref:
gendata.append(fgen.read())
refdata.append(fref.read())
check_image_set_eq(gendata, refdata)
# JMP instruction
source = os.path.join(Dirs.sources, 'asjmp.asm')
with open(Files.error, 'w') as ferr:
xas(source, '-o', Files.output, stderr=ferr, rc=1)
xaserrors = read_stderr(Files.error)
referrors = get_source_markers(source, r';ERROR(:....)?')
check_errors(referrors, xaserrors)
# xas99-defined symbols
source = os.path.join(Dirs.sources, 'asxassym.asm')
xas(source, '-b', '-o', Files.output)
with open(Files.output, 'rb') as f:
data = f.read()
for i in range(0, len(data), 2):
if data[i:i + 2] == b'\x00\x00':
error('symbols', 'Undefined xas99 symbol')
# DORG special cases
source = os.path.join(Dirs.sources, 'asdorg.asm')
xas(source, '-a', '>2000', '-o', Files.output)
ref = os.path.join(Dirs.sources, 'asdorg-ti.asm')
xas(ref, '-a', '>2000', '-o', Files.reference)
check_obj_code_eq(Files.output, Files.reference)
# cleanup
delfile(Dirs.tmp)
if __name__ == '__main__':
runtest()
print('OK')
|
JDrosdeck/xml-builder-0.9
|
xmlbuilder/tests/__init__.py
|
Python
|
mit
| 3,828
| 0.025078
|
#!/usr/bin/env python
from __future__ import with_statement
#-------------------------------------------------------------------------------
import unittest
from xml.etree.ElementTree import fromstring
#-------------------------------------------------------------------------------
from xmlbuilder import XMLBuilder
#-------------------------------------------------------------------------------
def xmlStructureEqual(xml1,xml2):
tree1 = fromstring(xml1)
tree2 = fromstring(xml2)
return _xmlStructureEqual(tree1,tree2)
#-------------------------------------------------------------------------------
def _xmlStructureEqual(tree1,tree2):
if tree1.tag != tree2.tag:
return False
attr1 = list(tree1.attrib.items())
attr1.sort()
attr2 = list(tree2.attrib.items())
attr2.sort()
if attr1 != attr2:
return False
return tree1.getchildren() == tree2.getchildren()
#-------------------------------------------------------------------------------
result1 = \
"""
<root>
<array />
<array len="10">
<el val="0" />
<el val="1">xyz</el>
<el val="2">abc</el>
<el val="3" />
<el val="4" />
<el val="5" />
<sup-el val="23">test </sup-el>
</array>
</root>
""".strip()
#-------------------------------------------------------------------------------
class TestXMLBuilder(unittest.TestCase):
def testShift(self):
xml = (XMLBuilder() << ('root',))
self.assertEqual(str(xml),"<root />")
xml = XMLBuilder()
xml << ('root',"some text")
self.assertEqual(str(xml),"<root>some text</root>")
xml = XMLBuilder()
xml << ('root',{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>some text</root>"))
xml = XMLBuilder()
xml << ('root',{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'></root>"))
xml = XMLBuilder()
xml << ('root',{'x':1,'y':'2'})
self.assert_(not xmlStructureEqual(str(xml),"<root x='2' y='2'></root>"))
xml = XMLBuilder()
xml << ('root',"gonduras.ua",{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>gonduras.ua</root>"))
xml = XMLBuilder()
xml << ('root',"gonduras.ua",{'x':1,'y':'2'})
self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>gonduras.com</root>"))
#---------------------------------------------------------------------------
def testWith(self):
xml = XMLBuilder()
with xml.root(lenght = 12):
pass
self.assertEqual(str(xml),'<root lenght="12" />')
xml = XMLBuilder()
with xml.root(
|
):
xml << "text1" << "text2" << ('some_node',)
self.assertEqual(str(xml),"<root>text1text2<some_node /></root>")
#------------------------------------------------
|
---------------------------
def testFormat(self):
x = XMLBuilder('utf-8',format = True)
with x.root():
x << ('array',)
with x.array(len = 10):
with x.el(val = 0):
pass
with x.el('xyz',val = 1):
pass
x << ("el","abc",{'val':2}) << ('el',dict(val=3))
x << ('el',dict(val=4)) << ('el',dict(val='5'))
with x('sup-el',val = 23):
x << "test "
self.assertEqual(str(x),result1)
#-------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
#-------------------------------------------------------------------------------
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.