repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
Depado/MarkDownBlog | app/modules/blog/rss.py | Python | mit | 1,049 | 0.000953 | # -*- coding: utf-8 -*-
from flask import request, url_for
from sqlalchemy import desc
from werkzeug.contrib.atom import AtomFeed
from . import blueprint
from .utils import requested_blog_user, make_external
from app.models import Post
@blueprint.route("/recent.atom")
def rss | _feed(user_slug):
blog_user = requested_blog_user(user_slug)
if blog_user:
feed = AtomFeed(
'{user} Recent Articles'.format(user=blog_user.username),
feed_url=request.url,
url=request.url_root
)
posts = blog_user.posts.order_by(desc(Post.pub_date)).limit(15).all()
for post in posts:
feed.add(post.title, post.content_as_html(),
| content_type='html',
author=post.user.username,
url=make_external(url_for("blog.get", user_slug=user_slug, post_slug=post.title_slug)),
updated=post.pub_date,
published=post.pub_date)
return feed.get_response()
else:
return ""
|
endlessm/endless-ndn | eos_data_distribution/DirTools/test_class.py | Python | lgpl-3.0 | 742 | 0 | import pytest
from eos_data_distribution import DirTools
from gi.repository import GLib
ITER_COUNT = 10
class TestClass:
@pytest.mark.timeou | t(timeout=3, method='thread')
def test_0(self, tmpdir):
loop = GLib.MainLoop()
self.__called = 0
def cb_changed(M, p, m, f, o, evt, d=None, e=None):
print('signal', e, p, f, o, evt, d)
assert e == 'created'
self.__called += 1
d = tmpdir.mkdir("ndn")
m = DirTools.Monitor(str(d))
[m.connect(s, cb_changed, s) for s in ['create | d']]
[d.mkdir(str(i)) for i in range(ITER_COUNT)]
GLib.timeout_add_seconds(2, lambda: loop.quit())
loop.run()
assert self.__called == ITER_COUNT
|
earthenv/mapotron | tile_handler.py | Python | bsd-3-clause | 6,062 | 0.010887 | #!/usr/bin/python2.7
# -*- coding: utf-8 -*-
import services
import webapp2
import logging
import cache
import json
import config_creds
import ee_assets
from google.appengine.api import images
from google.appengine.api import urlfetch
EE_TILE_URL = 'https://earthengine.googleapis.com/map/%s/%i/%i/%i?token=%s'
class TileHandler(webapp2.RequestHandler):
def checkCoords(self, z,x,y):
if y<0 or y>=2**z:
return False
else:
return True
def get(self, collection_id, layer_id, z, x, y):
z=int(z)
x=int(x)
y=int(y)
#check tile coords
if self.checkCoords(z,x,y):
while x < 0:
x = x + 2**z
while x >= 2**z:
x = x- 2**z
self.getTile(collection_id, layer_id,z,x,y)
else:
logging.info('Coords out of range, serving blank.')
tile = open('empty.png', 'r').read()
services.writeResult(tile, self.response, format='image/png')
def getTile(self,collection_id, layer_id,z,x,y):
#first try and fetch from cache
tile_key = 'mapotron-maps_%s_%s_%i_%i_%i' % (collection_id, layer_id, int(z), int(x), int(y))
tile_meta_key = 'mapotron-maps_%s_%s' % (collection_id, layer_id)
tile = services.checkCache(tile_key, type='blob')
try:
#test tile image
image = images.Image(image_data=tile)
logging.info("Image format %s" % image.format)
if image.format >= 0:
logging.info('serving image from cache')
services.writeResult(tile, self.response, format = 'image/png')
else:
raise Exception('format','Bad format in blobstore')
except Exception as e:
logging.info(e)
#No tile available, find the latest mapid/token for this key
map_key = 'mapotron-maps_%s_%s' % (collection_id, layer_id)
tile_meta = services.checkCache(map_key, type='json')
if tile_meta is None:
#fetch fresh tile metadata if none
import config_creds
import config
import ee
import ee_services
## this is where I imagine putting .clip(bbox)...
geodesic = ee.Geometry.Rectangle(-180, -60, 180, 85)
bb | ox = ee.Geometry(geodesic, None, False)
logging.info('No tile meta, generating new map from %s ' % (ee_assets.layers[collection_id]["layers"][layer_id]["id"]))
image = ee.Image(ee_assets.layers[collection_id]["layers"][layer_id]["id"])
tile_meta = ee_services.getMap(
| image.mask(image.gt(0)).clip(bbox),
ee_assets.layers[collection_id]["layers"][layer_id]["viz_params"],
tile_meta_key)
services.cacheResult({
"mapid": tile_meta["mapid"],
"token":tile_meta["token"]},
map_key
)
#first try to get it from EE using the mapid/token given
tile_url = EE_TILE_URL % (
tile_meta["mapid"], int(z), int(x), int(y), tile_meta["token"])
logging.info("First try, using cache token -- fetching tile from %s" % tile_url)
#test that it is an image
try:
tile = urlfetch.fetch(tile_url, deadline=60).content
image = images.Image(image_data = tile)
if image.format >= 0 :
logging.info('caching tile')
services.cacheResult(tile, tile_key, type = 'blob')
services.writeResult(tile, self.response, format='image/png')
else:
raise Exception('format','Bad format')
except Exception as e:
#expired, try again
import config_creds
import config
import ee
import ee_services
logging.info('Generating new map from %s ' % (ee_assets.layers[collection_id]["layers"][layer_id]["id"]))
## this is where I imagine putting .clip(bbox)...
geodesic = ee.Geometry.Rectangle(-180, -60, 180, 85)
bbox = ee.Geometry(geodesic, None, False)
image = ee.Image(ee_assets.layers[collection_id]["layers"][layer_id]["id"])
tile_meta = ee_services.getMap(
image.mask(image.gt(0)).clip(bbox),
ee_assets.layers[collection_id]["layers"][layer_id]["viz_params"],
tile_meta_key)
services.cacheResult({
"mapid": tile_meta["mapid"],
"token":tile_meta["token"]},
map_key
)
#first try to get it from EE using the mapid/token given
tile_url = EE_TILE_URL % (
tile_meta["mapid"], int(z), int(x), int(y), tile_meta["token"])
logging.info("Second Try, using new token - Fetching tile from %s" % tile_url)
try:
tile = urlfetch.fetch(tile_url, deadline=60).content
image = images.Image(image_data = tile)
if image.format >0:
services.cacheResult(tile, tile_key, type = 'blob')
services.writeResult(tile, self.response, format='image/png')
else:
raise Exception ('format',image.format)
except Exception as e:
logging.info(e)
services.writeResult(
{"error": "Tile not available, we tried!"},
self.response, format = 'application/json')
application = webapp2.WSGIApplication([
webapp2.Route(r'/api/tile/<collection_id>/<layer_id>/<z>/<x>/<y>.png',
handler='tile_handler.TileHandler:get')],
debug=True)
|
cgeoffroy/son-analyze | son-scikit/src/son_scikit/hl_prometheus.py | Python | apache-2.0 | 2,969 | 0 | # Copyright (c) 2015 SONATA-NFV, Thales Communications & Security
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, Thales Communications & Security
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
"""High level pandas structure for the Sonata prometheus data"""
import datetime
import typing # noqa pylint: disable=unused-import
from typing import Dict
import pandas # type: ignore
from son_analyze.core.prometheus import PrometheusData
def convert_timestamp_to_posix(timestamp: str) -> datetime.datetime:
"""Convert the timestamp into a datetime"""
return datetime.datetime.fromtimestamp(float(timestamp), # type: ignore
tz=datetime.timezone.utc)
# pylint: disable=unsubscriptable-object
def build_sonata_df_by_id(prom_data: PrometheusData) -> Dict[str,
pandas.DataFrame]:
"""Build a dict of dataframe. Each dataframe contains the values matching
the corresponding id"""
# noqa TODO: find the longest metrics and use it as the index. Interpolate the
| # other metric against it before the merge
result = {}
items_itr = prom_data._by_id.items() # pylint: disable=protected-access
for id_index, all_metrics in items_itr:
acc_ts = []
for elt in all_metrics:
metric_name = elt['metric']['__name__'] |
index, data = zip(*elt['values'])
index = [convert_timestamp_to_posix(z) for z in index]
this_serie = pandas.Series(data, index=index)
this_serie.name = metric_name
acc_ts.append(this_serie)
dataframe = pandas.concat(acc_ts, join='outer', axis=1)
dataframe.index = pandas.date_range(
start=dataframe.index[0],
periods=len(dataframe.index),
freq='S')
dataframe = dataframe.interpolate(method='index')
# import pdb; pdb.set_trace()
result[id_index] = dataframe
return result
|
RodFernandes/Python_USP_Curso_Ciencias_da_Computacao_1 | bhaskara.py | Python | apache-2.0 | 506 | 0.018182 | import math
a=float(input('Digite o valor de A:'))
b=float(input('Digite o valor de B:'))
c=float(input('Digite o valor de C | :'))
delta=(b**2)-4*a*c
if delta < 0:
print('esta equação não possui raízes reais')
else:
xPositivo = (-b + math.sqrt(delta)) / (2 * a)
if delta == 0:
print('a raiz desta equação é', xPositivo)
else:
if delta>0:
| xNegativo = (-b - math.sqrt(delta)) / (2 * a)
print('as raízes da equação são',xNegativo,'e',xPositivo) |
astroJeff/dart_board | paper/scripts/acor_plot.py | Python | mit | 2,539 | 0.016148 | import sys
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import dart_board
from pandas import Series
file_root = sys.argv[1]
file_in = "../data/" + file_root + "_chain.npy"
if len(sys.argv) == 2:
delay = 200
else:
delay = int(int(sys.argv[2]) / 100)
chains = np.load(file_in)
if chains.ndim == 4: chains = chains[0]
chains = chains[:,delay:,:]
n_chains, length, n_var = chains.shape
print(chains.shape)
if file_root == 'HMXB' or file_root == 'mock_1' or file_root == 'J0513_nosfh' or file_root == 'J0513_nosfh_PT':
var = [r'$M_1$',r'$M_2$',r'$a$',r'$e$',r'$v_k$',r'$\theta_k$',r'$\phi_k$',r'$t_b$']
elif file_root == 'LMC_HMXB' or file_root == 'mock_2' or file_root == 'mock_3' or file_root == 'J0513' or file_root == 'J0513_flatsfh' or file_root == 'J0513_PT' or file_root == 'J0513_flatsfh_PT':
var = [r'$M_1$',r'$M_2$',r'$a$',r'$e$',r'$v_k$',r'$\theta_k$',r'$\phi_k$',r'$\alpha$',r'$\delta$',r'$t_b$']
factor = 100.0
n_var = len(var)
#fig, ax = plt.subplots(int(n_var/2), 2, figsize=(8,12))
fig = plt.figure(figsize=(4,3))
# Plot the zero correlation line
plt.axhline(0.0, color='k', linewidth=2, linestyle='dashed', alpha=0.5)
N = 50
if file_root == 'HMXB':
xmax = 10000/factor
else:
xmax = 80000/factor
xmin = 0
for k in np.arange(n_var):
kx = int(k%(n_var/2))
ky = int(k/(n_var/2))
# Plot the autocorrelation of the flatchain
autocorr = np.zeros(N)
series = Series(data=chains.reshape((n_chains*length, n_var)).T[k])
for i in np.arange(N):
autocorr[i] = Series.autocorr(series, lag=int(i*float(xmax-xmin)/N))
plt.plot(np.linspace(xmin,xmax,N)*factor, autocorr, linewidth=2, label=var[k])
# ax[kx,ky].plot(np.linspace(xmin,xmax,N)*factor, autocorr, color='k', linewidth=2)
|
# Plot the | autocorrelation of 10 sample chains
# for j in np.arange(10):
# autocorr = np.zeros(N)
# series = Series(data=chains[j,:,k])
# for i in np.arange(N):
# autocorr[i] = Series.autocorr(series, lag=int(i*float(xmax-xmin)/N))
# ax[kx,ky].plot(np.linspace(xmin,xmax,N)*factor, autocorr, color='k', alpha=0.1)
# ax[kx,ky].axhline(0.0, color='k', alpha=0.3, linewidth=3)
# ax[kx,ky].set_xlabel(r'lag (steps)')
# ax[kx,ky].set_ylabel(r'Autocorrelation')
# ax[kx,ky].text(8, 0.8, var[k])
plt.legend(ncol=2)
plt.xlabel('lag (steps)')
plt.ylabel('Autocorrelation')
file_out = "../figures/" + file_root + "_acor.pdf"
plt.tight_layout()
plt.savefig(file_out)
|
joplen/svgplotlib | svgplotlib/SVG/Parsers.py | Python | bsd-3-clause | 4,791 | 0.011271 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re
class SVGParseError(Exception):
pass
EOF = object()
class Lexer:
"""
This style of implementation was inspired by this article:
http://www.gooli.org/blog/a-simple-lexer-in-python/
"""
Float = r'[-\+]?(?:(?:\d*\.\d+)|(?:\d+\.)|(?:\d+))(?:[Ee][-\+]?\d+)?'
Int = r'[-\+]?\d+'
lexicon = None
ignore = None
callbacks = None
def __init__(self):
lexicon = self.lexicon
# create internal names for group matches
groupnames = dict(('lexer_%d' % idx, item[0]) for idx,item in enumerate(lexicon))
self.groupnames = groupnames
# assemble regex parts to one regex
igroupnames = dict((value,name) for name,value in groupnames.iteritems())
regex_parts = ('(?P<%s>%s)' % (igroupnames[cls], regs) for cls,regs in lexicon)
self.regex_string = '|'.join(regex_parts)
self.regex = re.compile(self.regex_string)
def lex(self, text):
"""
Yield (token_type, data) tokens.
The last token will be (EOF, None) where EOF
"""
regex = self.regex
groupnames = self.groupnames
ignore = self.ignore or set()
callbacks = self.callbacks or dict()
position = 0
size = len(text)
while position < size:
match = regex.match(text, position)
if match is None:
raise SVGParseError('Unknown token at position %d' % position)
position = match.end()
cls = groupnames[match.lastgroup]
value = match.group(match.lastgroup)
if cls in ignore:
continue
if cls in callbacks:
value = callbacks[cls](self, value)
yield (cls, value)
yield (EOF, None)
# Parse SVG angle units
angle_pattern = \
r"""
^ # match start of line
\s* # ignore whi | tespace
(?P<value>[-\+]?\d*\.?\d*([eE][-\+]?\d+)?) # match float or int value
(?P<unit>.+)? # match any chars
\s* # ignore whitespace
$ # match end of line
"""
angle_match = re.compile(angle_pattern, re.X).match
def parseAngle(angle):
"""
Convert angle to degree | s
"""
SCALE = {
"deg": 1, "grad": 1.11, "rad":57.30
}
match = length_match(angle)
if match is None:
raise SVGParseError("Not a valid angle unit: '%s'" % angle)
value = match.group('value')
if not value:
raise SVGParseError("Not a valid angle unit: '%s'" % angle)
value = float(value)
unit = match.group('unit') or ''
if not unit:
return value
elif unit in SCALE:
return value * SCALE[unit]
else:
raise SVGParseError("Unknown unit '%s'" % unit)
# Parse SVG length units
length_pattern = \
r"""
^ # match start of line
\s* # ignore whitespace
(?P<value>[-\+]?\d*\.?\d*([eE][-\+]?\d+)?) # match float or int value
(?P<unit>.+)? # match any chars
\s* # ignore whitespace
$ # match end of line
"""
length_match = re.compile(length_pattern, re.X).match
def parseLength(length):
"""
Convert length to pixels.
"""
SCALE = {
"px": 1., "pt": 1.25, "pc": 15.,
"mm": 3.543307, "cm": 35.43307,
"in": 90., "i": 90.
}
match = length_match(str(length))
if match is None:
raise SVGParseError("Not a valid length unit: '%s'" % length)
value = match.group('value')
if not value:
raise SVGParseError("Not a valid length unit: '%s'" % length)
if value[0] == 'e' or value[0] == 'E':
value = float('1' + value)
else:
value = float(value)
unit = match.group('unit') or ''
if not unit or unit in ('em', 'ex', '%'):
# ignoring relative units
return value
elif unit in SCALE:
return value * SCALE[unit]
else:
raise SVGParseError("Unknown unit '%s'" % unit)
def parseDashArray(array):
return map(parseLength, re.split('[ ,]+', array))
def parseOpacity(value):
try:
opacity = float(value)
except ValueError:
raise SVGParseError('expected float value')
# clamp value
opacity = min(max(opacity, 0.), 1.)
return opacity
if __name__ == '__main__':
print parseAngle('3.14253rad')
print parseLength('10.5cm')
|
jtraver/dev | python/hashlib/md5.py | Python | mit | 457 | 0.006565 | #!/usr/bin/python
import hashlib
# perl
## http: | //stackoverflow.com/questions/9991757/sha256-digest-in-perl
#use Digest::MD5 qw(md5_hex);
#print md5_hex('swaranga@gmail.com'), "\n";
perl_result = "cbc41284e23c8c7ed98f589b6d6ebfd6"
md5 = hashlib.md5()
md5.update('swaranga@gmail.com')
hex1 = md5.hexdigest()
if hex1 == perl_result:
print "ok"
else:
print "FAIL perl_result = %s" % str(perl_result)
print | "FAIL hex1 = %s" % str(hex1)
|
datanel/navitia | source/jormungandr/jormungandr/interfaces/v1/Ptobjects.py | Python | agpl-3.0 | 4,241 | 0.001179 | # coding=utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending q | uest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is | distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from flask import Flask, request
from flask.ext.restful import Resource, fields, marshal_with, reqparse, abort
from flask.globals import g
from jormungandr import i_manager, timezone
from jormungandr.interfaces.v1.fields import DisruptionsField
from make_links import add_id_links
from fields import NonNullList, NonNullNested, PbField, error, pt_object
from ResourceUri import ResourceUri
from make_links import add_id_links
from jormungandr.interfaces.argument import ArgumentDoc
from jormungandr.interfaces.parsers import depth_argument, option_value
from copy import deepcopy
pt_objects = {
"pt_objects": NonNullList(NonNullNested(pt_object), attribute='places'),
"disruptions": DisruptionsField,
"error": PbField(error, attribute='error'),
}
pt_object_type_values = ["network", "commercial_mode", "line", "route", "stop_area"]
class Ptobjects(ResourceUri):
parsers = {}
def __init__(self, *args, **kwargs):
ResourceUri.__init__(self, *args, **kwargs)
self.parsers["get"] = reqparse.RequestParser(
argument_class=ArgumentDoc)
self.parsers["get"].add_argument("q", type=unicode, required=True,
description="The data to search")
self.parsers["get"].add_argument("type[]", type=option_value(pt_object_type_values),
action="append",default=pt_object_type_values,
description="The type of data to\
search")
self.parsers["get"].add_argument("count", type=int, default=10,
description="The maximum number of\
ptobjects returned")
self.parsers["get"].add_argument("search_type", type=int, default=0,
description="Type of search:\
firstletter or type error")
self.parsers["get"].add_argument("admin_uri[]", type=str,
action="append",
description="If filled, will\
restrained the search within the\
given admin uris")
self.parsers["get"].add_argument("depth", type=depth_argument,
default=1,
description="The depth of objects")
@marshal_with(pt_objects)
def get(self, region=None, lon=None, lat=None):
self.region = i_manager.get_region(region, lon, lat)
timezone.set_request_timezone(self.region)
args = self.parsers["get"].parse_args()
self._register_interpreted_parameters(args)
if len(args['q']) == 0:
abort(400, message="Search word absent")
response = i_manager.dispatch(args, "pt_objects",
instance_name=self.region)
return response, 200
|
rhefner1/ghidonations | pipeline/models.py | Python | apache-2.0 | 6,902 | 0.011011 | #!/usr/bin/python2.5
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Datastore models used by the Google App Engine Pipeline API."""
from google.appengine.ext import db
from google.appengine.ext import blobstore
# Relative imports
import json
class _PipelineRecord(db.Model):
"""Represents a Pipeline.
Properties:
class_path: Path of the Python class to use for this pipeline.
root_pipeline: The root of the whole workflow; set to itself this pipeline
is its own root.
fanned_out: List of child _PipelineRecords that were started when this
generator pipeline moved from WAITING to RUN.
start_time: For pipelines with no start _BarrierRecord, when this pipeline
was enqueued to run immediately.
finalized_time: When this pipeline moved from WAITING or RUN to DONE.
params: Serialized parameter dictionary.
status: The current status of the pipeline.
current_attempt: The current attempt (starting at 0) to run.
max_attempts: Maximum number of attempts (starting at 0) to run.
next_retry_time: ETA of the next retry attempt.
retry_message: Why the last attempt failed; None or empty if no message.
Root pipeline properties:
is_root_pipeline: This is a root pipeline.
abort_message: Why the whole pipeline was aborted; only saved on
root pipelines.
abort_requested: If an abort signal has been requested for this root
pipeline; only saved on root pipelines
"""
WAITING = 'waiting'
RUN = 'run'
DONE = 'done'
ABORTED = 'aborted'
class_path = db.StringProperty()
root_pipeline = db.SelfReferenceProperty(
collection_name='child_pipelines_set')
fanned_out = db.ListProperty(db.Key, indexed=False)
start_time = db.DateTimeProperty(indexed=True)
finalized_time = db.DateTimeProperty(indexed=False)
# One of these two will be set, depending on the size of the params.
params_text = db.TextProperty(name='params')
params_blob = blobstore.BlobReferenceProperty(indexed=False)
status = db.StringProperty(choices=(WAITING, RUN, DONE, ABORTED),
defa | ult=WAITING)
# Retry behavior
current_attempt = db.IntegerProperty(default=0, indexed=False)
max_attempts = db.IntegerProperty(default=1, indexed=False)
next_retry_time = db.DateTimeProperty(indexed=False)
retry_message = db.TextProperty()
# Root pipeline properties
is_root_pipeline = db.BooleanProperty()
abort_message = db.TextProperty()
abort_requested = db.Boole | anProperty(indexed=False)
@classmethod
def kind(cls):
return '_AE_Pipeline_Record'
@property
def params(self):
"""Returns the dictionary of parameters for this Pipeline."""
if hasattr(self, '_params_decoded'):
return self._params_decoded
if self.params_blob is not None:
value_encoded = self.params_blob.open().read()
else:
value_encoded = self.params_text
value = json.loads(value_encoded)
if isinstance(value, dict):
kwargs = value.get('kwargs')
if kwargs:
adjusted_kwargs = {}
for arg_key, arg_value in kwargs.iteritems():
# Python only allows non-unicode strings as keyword arguments.
adjusted_kwargs[str(arg_key)] = arg_value
value['kwargs'] = adjusted_kwargs
self._params_decoded = value
return self._params_decoded
class _SlotRecord(db.Model):
"""Represents an output slot.
Properties:
root_pipeline: The root of the workflow.
filler: The pipeline that filled this slot.
value: Serialized value for this slot.
status: The current status of the slot.
fill_time: When the slot was filled by the filler.
"""
FILLED = 'filled'
WAITING = 'waiting'
root_pipeline = db.ReferenceProperty(_PipelineRecord)
filler = db.ReferenceProperty(_PipelineRecord,
collection_name='filled_slots_set')
# One of these two will be set, depending on the size of the value.
value_text = db.TextProperty(name='value')
value_blob = blobstore.BlobReferenceProperty(indexed=False)
status = db.StringProperty(choices=(FILLED, WAITING), default=WAITING,
indexed=False)
fill_time = db.DateTimeProperty(indexed=False)
@classmethod
def kind(cls):
return '_AE_Pipeline_Slot'
@property
def value(self):
"""Returns the value of this Slot."""
if hasattr(self, '_value_decoded'):
return self._value_decoded
if self.value_blob is not None:
encoded_value = self.value_blob.open().read()
else:
encoded_value = self.value_text
self._value_decoded = json.loads(encoded_value)
return self._value_decoded
class _BarrierRecord(db.Model):
"""Represents a barrier.
Properties:
root_pipeline: The root of the workflow.
target: The pipeline to run when the barrier fires.
blocking_slots: The slots that must be filled before this barrier fires.
trigger_time: When this barrier fired.
status: The current status of the barrier.
"""
# Barrier statuses
FIRED = 'fired'
WAITING = 'waiting'
# Barrier trigger reasons (used as key names)
START = 'start'
FINALIZE = 'finalize'
ABORT = 'abort'
root_pipeline = db.ReferenceProperty(_PipelineRecord)
target = db.ReferenceProperty(_PipelineRecord,
collection_name='called_barrier_set')
blocking_slots = db.ListProperty(db.Key)
trigger_time = db.DateTimeProperty(indexed=False)
status = db.StringProperty(choices=(FIRED, WAITING), default=WAITING,
indexed=False)
@classmethod
def kind(cls):
return '_AE_Pipeline_Barrier'
class _StatusRecord(db.Model):
"""Represents the current status of a pipeline.
Properties:
message: The textual message to show.
console_url: URL to iframe as the primary console for this pipeline.
link_names: Human display names for status links.
link_urls: URLs corresponding to human names for status links.
status_time: When the status was written.
"""
root_pipeline = db.ReferenceProperty(_PipelineRecord)
message = db.TextProperty()
console_url = db.TextProperty()
link_names = db.ListProperty(db.Text, indexed=False)
link_urls = db.ListProperty(db.Text, indexed=False)
status_time = db.DateTimeProperty(indexed=False)
@classmethod
def kind(cls):
return '_AE_Pipeline_Status'
|
GoogleCloudPlatform/sap-deployment-automation | third_party/github.com/ansible/awx/awx/main/tests/unit/utils/test_safe_yaml.py | Python | apache-2.0 | 2,424 | 0.000413 | # -*- coding: utf-8 -*-
from copy import deepcopy
import pytest
import yaml
from awx.main.utils.safe_yaml import safe_dump
@pytest.mark.parametrize('value', [None, 1, 1.5, []])
def test_native_types(value):
# Native non-string types should dump the same way that `yaml.safe_dump` does
assert safe_dump(value) == yaml.safe_dump(value)
def test_empty():
assert safe_dump({}) == ''
def test_raw_string():
assert safe_dump('foo') == "!unsafe 'foo'\n"
def test_kv_null():
assert safe_dump({'a': None}) == "!unsafe 'a': null\n"
def test_kv_null_safe():
assert safe_dump({'a': None}, {'a': None}) == "a: null\n"
def test_kv_null_unsafe():
assert safe_dump({'a': ''}, {'a': None}) == "!unsafe 'a': !unsafe ''\n"
def test_kv_int():
assert safe_dump({'a': 1}) == "!unsafe 'a': 1\n"
def test_kv_float():
assert safe_dump({'a': 1.5}) == "!unsafe 'a': 1.5\n"
def test_kv_unsafe():
assert safe_dump({'a': 'b'}) == "!unsafe 'a': !unsafe 'b'\n"
def test_kv_unsafe_unicode():
assert safe_dump({'a': u'🐉'}) == '!unsafe \'a\': !unsafe "\\U0001F409"\n'
def test_kv_unsafe_in_list():
assert safe_dump({'a': ['b']}) == "!unsafe 'a':\n- !unsafe 'b'\n"
def test_kv_unsafe_in_mixed_list():
assert safe_dump({'a': [1, 'b']}) == "!unsafe 'a':\n- 1\n- !unsafe 'b'\n"
def test_kv_unsafe_deep_nesting():
yaml = safe_dump({'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]})
for x in ('a', 'b', 'c', 'd', 'e'):
assert "!unsafe '{}'".format(x) in yaml
def test_kv_unsafe_multiple():
assert safe_dump({ | 'a': 'b', 'c': 'd'}) == '\n'.joi | n([
"!unsafe 'a': !unsafe 'b'",
"!unsafe 'c': !unsafe 'd'",
""
])
def test_safe_marking():
assert safe_dump({'a': 'b'}, safe_dict={'a': 'b'}) == "a: b\n"
def test_safe_marking_mixed():
assert safe_dump({'a': 'b', 'c': 'd'}, safe_dict={'a': 'b'}) == '\n'.join([
"a: b",
"!unsafe 'c': !unsafe 'd'",
""
])
def test_safe_marking_deep_nesting():
deep = {'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]}
yaml = safe_dump(deep, deepcopy(deep))
for x in ('a', 'b', 'c', 'd', 'e'):
assert "!unsafe '{}'".format(x) not in yaml
def test_deep_diff_unsafe_marking():
deep = {'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]}
jt_vars = deepcopy(deep)
deep['a'][1][0]['b']['z'] = 'not safe'
yaml = safe_dump(deep, jt_vars)
assert "!unsafe 'z'" in yaml
|
RedHatInsights/insights-core | insights/util/streams.py | Python | apache-2.0 | 3,293 | 0.001518 | """
Module for executing a command or pipeline of commands and yielding the result
as a generator of lines.
"""
import os
import shlex
import signal
from contextlib import contextmanager
from subprocess import Popen, PIPE, STDOUT
from insights.util import which
stream_options = {
"bufsize": -1, # use OS defaults. Non buffered if not set.
"universal_newlines": True, # convert all to "\n"
"stdout": PIPE, # pipe to Popen.stdout instead of literall | y stdout
"stderr": STDOUT # redirect stderr to stdout for all processes
}
def reader(stream):
for line in stream:
yield line.rstrip("\n")
timeout_command = [which("timeout"), "-s", str(signal.SIGKILL)]
@contextmanager
def stream(command, stdin=None, env=os.environ, timeout=None):
"""
Yields a generator of a command's output. For line oriented commands only.
Args:
command (str or list): a command without pi | pes. If it's not a list,
``shlex.split`` is applied.
stdin (file like object): stream to use as the command's standard input.
env (dict): The environment in which to execute the command. PATH should
be defined.
timeout (int): Amount of time in seconds to give the command to complete.
The ``timeout`` utility must be installed to use this feature.
Yields:
The output stream for the command. It should typically be wrapped in a
``reader``.
"""
if not isinstance(command, list):
command = shlex.split(command)
cmd = which(command[0])
if cmd is None:
path = env.get("PATH", "")
raise Exception("Command [%s] not in PATH [%s]" % (command[0], path))
command[0] = cmd
if timeout:
if not timeout_command[0]:
raise Exception("Timeout specified but timeout command not available.")
command = timeout_command + [str(timeout)] + command
output = None
try:
output = Popen(command, env=env, stdin=stdin, **stream_options)
yield output.stdout
finally:
if output:
output.wait()
@contextmanager
def connect(*cmds, **kwargs):
"""
Connects multiple command streams together and yields the final stream.
Args:
cmds (list): list of commands to pipe together. Each command will be an
input to ``stream``.
stdin (file like object): stream to use as the first command's
standard input.
env (dict): The environment in which to execute the commands. PATH
should be defined.
timeout (int): Amount of time in seconds to give the pipeline to complete.
The ``timeout`` utility must be installed to use this feature.
Yields:
The output stream for the final command in the pipeline. It should
typically be wrapped in a ``reader``.
"""
stdin = kwargs.get("stdin")
env = kwargs.get("env", os.environ)
timeout = kwargs.get("timeout")
end = len(cmds) - 1
@contextmanager
def inner(idx, inp):
with stream(cmds[idx], stdin=inp, env=env, timeout=timeout) as s:
if idx == end:
yield s
else:
with inner(idx + 1, s) as c:
yield c
with inner(0, stdin) as s:
yield s
|
brianwc/courtlistener | cl/search/migrations/0009_auto_20151210_1124.py | Python | agpl-3.0 | 734 | 0.002725 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('search', '0008_auto_20151117_1526'),
]
operations = [
migrations.AlterField(
| model_name='docket',
name='slug',
field=models.SlugField(help_text=b'URL that the document should map to (the slug)', max_length=75, null=True, db_index=False),
),
migrations.AlterField(
model_name='opinioncluster',
name=' | slug',
field=models.SlugField(help_text=b'URL that the document should map to (the slug)', max_length=75, null=True, db_index=False),
),
]
|
Azure/azure-sdk-for-python | sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/v2020_12_01/_vnet_client.py | Python | mit | 3,611 | 0.003323 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from ._configuration import VnetClientConfiguration
from .operations import ManagedPrivateEndpointsOperations
from . import models
class VnetClient(object):
"""VnetClient.
:ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations
:vartype managed_private_endpoints: azure.synapse.managedprivateendpoints.v2020_12_01.operations.ManagedPrivateEndpointsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
"""
def __init__(
self,
credential, # type: "TokenCredential"
endpoint, # type: str
**kwargs # type: Any
):
# type: (...) -> None
base_url = '{endpoint}'
self._config = VnetClientConfiguration(credential, endpoint, **kwargs)
self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.managed_private_endpoints = ManagedPrivateEndpointsOperations(
self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, http_request, **kwargs):
# type: (HttpRequest, Any) -> HttpResponse
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.HttpResponse
"""
| path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
http_ | request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> VnetClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
|
skosukhin/spack | var/spack/repos/builtin/packages/py-rope/package.py | Python | lgpl-2.1 | 1,561 | 0.000641 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it a | nd/or modify
# it under the terms of the GNU Lesser General Public License (as
# pu | blished by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyRope(PythonPackage):
"""a python refactoring library."""
homepage = "https://github.com/python-rope/rope"
url = "https://pypi.io/packages/source/r/rope/rope-0.10.5.tar.gz"
version('0.10.5', '21882fd7c04c29d09f75995d8a088be7')
depends_on('py-setuptools', type='build')
|
numerigraphe/stock-logistics-barcode | __unported__/base_gs1_barcode/__openerp__.py | Python | agpl-3.0 | 2,985 | 0.001006 | # -*- coding: utf-8 -*-
##############################################################################
#
# This module is copyright (C) 2012 Numérigraphe SARL. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Decoding API for GS1-128 (aka UCC/EAN-128) and GS1-Datamatrix',
'version': '1.0',
'author': u'Numérigraphe',
'website': 'http://numerigraphe.com',
'category': 'Generic Modules/Inventory Control',
'description': """
This module provides an API to decoding the content of structured barcodes \
like GS1-128 or GS1-Datamatrix.
GS1-128 (formerly known as UCC-128, EAN 128 or UCC/EAN-128), and GS1-Datamatrix \
are standards for encoding item identification and logistics data.
Physically, GS1-128 is represented as a 1-dimension Code-128 barcode and \
GS1-Datamtrix | is represented as a 2-dimensions Datamatrix barcode.
When those barcodes are read, their content can be decode into multiple values \
using a set of standard "Application Identifiers". For example, most pharmacy \
items have a GS1-Datamatrix barcode containg their GTIN, lot number and \
expiry date.
This module does not directly allow you to print or scan ba | rcodes.
Instead, the focus of this module is on decoding the data contained in \
barcodes. To this end, it provides objects to fine-tune the Application Identifiers and
the associated data types.
Caveat Emptor: when an "Application Identifiers" has variable-length data, \
the barcodes must contain a special character (<GS>, group separator) \
but as this is not an ASCII character. Some barcdode readers will not include \
this character: decoding the structured data will then be impossible. Other \
readers will translate GS1 to ASCII character 29, but this character is not \
printable, and some applications may not record it. Yet other readers will \
let you configure how to map <GS>, which may help improve compatibility.
""",
'depends': [
'product',
],
'init_xml': [],
'update_xml': [
'gs1_barcode_view.xml',
'res_users_view.xml',
'data/gs1_barcode.csv',
"security/ir.model.access.csv",
],
'test': [
'test/gs1_barcode_test.yml'
],
'active': False,
'installable': False,
'license': 'GPL-3',
}
|
djeof-1/VOXINN | voxinn/procedural/ProceduralTerrain.py | Python | mit | 1,201 | 0.011657 | import random
from djinn import *
import os
import sys
class ProceduralTerrain:
def __init__(self, heightmapList):
self.heightmapList = heightmapList
def generateHill(self):
count = 0
num = 1
randindex = random.randint(0,len(self.hei | ghtmapList)-1)
while ind < len(self.heightmapList):
if ind == randindex:
i = ind
for j in range(len(9)):
for index in range((9-count)/2):
self.heightmapList[i].append(0)
for index in range(9 - 2 * ((9 - count)/2)):
self.heightmapList[i].append(nu | m)
num += 1
num -= 1
while num != 0:
self.heightmapList[i].append(num)
num -= 1
for index in range((9-count)/2):
self.heightmapList[i].append(0)
count -= 2
num = 1
i += 1
flag = 1
ind = i
if flag == 0:
for k in range(len(9)):
self.heightmapList[ind].append(0)
ind += 1
|
cleverZY/Helloworld | jizhidezy.py | Python | apache-2.0 | 513 | 0.038055 | #coding=utf-8
from PIL import Image#需要p | illow库
import glob, os
in_dir ='background'#源图片目录
out_dir = in_dir+'_out'#转换后图片目录
if not os.path.exists(out_dir): os.mkdir(out_dir)
#图片批处理
def main():
for files in glob.glob(in_dir+'/*'):
filepath,filename = os.path.split(files)
im = Image.open(files)
w,h = im.size
im = im.resize((int(1920), int(1.0*h/w*1920)))
im.save(os.path.join(out_dir,filename))
if __name__=='__main__':
main | () |
baiyubin/python_practice | pegs.py | Python | apache-2.0 | 2,445 | 0.001227 | # n peg hanoi tower problem, use bfs instead of dfs, and don't have a full
# analytical solution
import sys
import copy
def solutionWorks(currentSolution, stacksAfterSolution, initialStacks, finalStacks):
for x in range(len(currentSolution)):
i, j = currentSolution[x]
stacksAfterSolution[j].append(stacksAfterSolution[i].pop())
if str(stacksAfterSolution) == str(finalStacks):
return True
else:
return False
def stepLegitimate(stacksAfterSolution, i, j):
if len(stacksAfterSolution[i]) == 0 or \
(len(stacksAfterSolution[j]) > 0 and stacksAfterSolution[i][-1] > stacksAfterSolution[j][-1]):
retu | rn False
return True
# DFS cannot work, need to use BFS
def moveDiscs(initialStacks, finalStacks, results):
import collections
solutions = collections.deque()
solutions.append([])
K = len(initialStacks) - 1
while len(solutions) > 0:
currentSolution = copy.deepcopy(solutions.popleft())
if len(currentSolution) > 7:
continue
stacksAfterSolution = copy.deepcopy(initialStacks)
if solutionWorks(currentSolution, stacksAfterSolution, in | itialStacks, finalStacks):
for x in range(len(currentSolution)):
results.append(list(currentSolution[x]))
return
# add other solutions in queue
for i in range(1, K + 1):
for j in range(1, K + 1):
if j != i and stepLegitimate(stacksAfterSolution, i, j):
currentSolution.append([i, j])
solutions.append(copy.deepcopy(currentSolution))
currentSolution.pop()
if __name__ == '__main__':
# N, K = [int(x) for x in sys.stdin.readline().split()]
N, K = 6, 4
initialStacks = [[] for x in range(K + 1)]
finalStacks = [[] for x in range(K + 1)]
# initial = [int(x) for x in sys.stdin.readline().split()]
# final = [int(x) for x in sys.stdin.readline().split()]
initial = [4, 2, 4, 3, 1, 1]
final = [1, 1, 1, 1, 1, 1]
for i in range(N - 1, -1, -1):
initialStacks[initial[i]].append(i + 1)
for i in range(N - 1, -1, -1):
finalStacks[final[i]].append(i + 1)
print(initialStacks)
print(finalStacks)
results = []
moveDiscs(initialStacks, finalStacks, results)
print(len(results))
for i in range(len(results)):
print(results[i][0], results[i][1])
|
whutch/cwmud | cwmud/core/commands/movement/secondary.py | Python | mit | 2,232 | 0 | # -*- coding: utf-8 -*-
"""Secondary movement commands."""
# Part of Clockwork MUD Server (https://github.com/whutch/cwmud)
# :copyright: (c) 2008 - 2017 Will Hutcheson
# :license: MIT (https://github.com/whutch/cwmud/blob/master/LICENSE.txt)
from .. import Command, COMMANDS
from ...characters import CharacterShell
@COMMANDS.register
class NortheastCommand(Command):
"""A command to allow a character to move northeast."""
def _action(self):
char = self.session.char
if not char:
self.session.send("You're not playing a character!")
return
if not char.room:
self.session.send("You're not in a room!")
return
char.move_direction(x=1, y=1)
@COMMANDS.register
class NorthwestCommand(Command):
"""A command to allow a character to move northwest."""
def _action(self):
char = self.session.char
if not char:
self.session.send("You're not playing a character!")
return
if not char.room:
self.session.send("You're not in a room!")
return
char.move_direction(x=-1, y=1)
@COMMANDS.register
class SoutheastCommand(Command):
"""A command to allow a character to move southeast."""
def _action(self):
char = self.session.char
if not char:
self.session.send("You're not playing a character!")
return
if not char.room:
self.session.send("You're not in a room!")
return
char.move_direction(x=1, y=-1)
@COMM | ANDS.register
class SouthwestCommand(Command):
"""A command to allow a character to move southwest."""
def _action(self):
char = self.session.char
if not char:
self.session.send("You're not playing a character!")
return
if not char.room:
self.session.send("You're not in a room!")
return
char.move_direction(x=-1, y=-1)
CharacterShell.add_verbs(NortheastCo | mmand, "northeast", "ne")
CharacterShell.add_verbs(NorthwestCommand, "northwest", "nw")
CharacterShell.add_verbs(SoutheastCommand, "southeast", "se")
CharacterShell.add_verbs(SouthwestCommand, "southwest", "sw")
|
jorilallo/coinbase-python | tests/test_model.py | Python | mit | 39,379 | 0.010437 | # coding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import re
import unittest2
import warnings
import httpretty as hp
from coinbase.client import Client
from coinbase.client import OAuthClient
from coinbase.error import APIError
from coinbase.error import TwoFactorTokenRequired
from coinbase.error import UnexpectedDataFormatError
from coinbase.model import APIObject
from coinbase.model import Account
from coinbase.model import Address
from coinbase.model import Button
from coinbase.model import Money
from coinbase.model import Order
from coinbase.model import Transaction
from coinbase.model import Transfer
# Hide all warning output.
warnings.showwarning = lambda *a, **k: None
# Dummy API key values for use in tests
api_key = 'fakeapikey'
api_secret = 'fakeapisecret'
client_id = 'fakeid'
client_secret = 'fakesecret'
access_token = 'fakeaccesstoken'
refresh_token = 'fakerefreshtoken'
class TestAccount(unittest2.TestCase):
@hp.activate
def test_delete(self):
account = Account(Client(api_key, api_secret))
account.id = 'fakeaccountid'
def server_response(request, uri, headers):
self.assertTrue(uri.endswith(account.id))
self.assertEqual(request.body.decode(), '')
return (200, headers, json.dumps(data))
hp.register_uri(hp.DELETE, re.compile('.*'), body=server_response)
data = {'success': False}
with self.assertRaises(APIError):
account.delete()
data = {'success': True}
self.assertIsNone(account.delete())
@hp.activate
def test_set_primary(self):
account = Account(Client(api_key, api_secret))
account.id = 'fakeaccountid'
account.primary = None
def server_response(request, uri, headers):
self.assertTrue(uri.endswith('%s/primary' % account.id))
self.assertEqual(request.body.decode(), '')
return (200, headers, json.dumps(data))
hp.register_uri(hp.POST, re.compile('.*'), body=server_response)
data = {'success': False}
with self.assertRaises(APIError):
account.set_primary()
self.assertIsNone(account.primary) # Primary status should not have changed.
data = {'success': True}
account.set_primary()
self.assertTrue(account.primary) # Primary status should have changed.
@hp.activate
def test_modify(self):
account = Account(Client(api_key, api_secret))
account.id = 'fakeaccountid'
account.name = initial_name = 'Wallet'
def server_response(request, uri, headers):
self.assertTrue(uri.endswith(account.id))
try: request_data = json.loads(request.body.decode())
except ValueError: raise AssertionError("request body was malformed.")
name = request_data.get('account', {}).get('name')
assert name == new_name
return (200, headers, json.dumps(data))
new_name = 'Vault'
data = {'success': False, 'account': {'name': new_name}}
hp.register_uri(hp.PUT, re.compile('.*'), body=server_response)
with self.assertRaises(APIError):
account.modify(new_name)
self.assertEqual(account.name, initial_name)
data = {'success': True, 'account': {'name': new_name}}
account.modify(new_name)
self.assertEqual(account.name, new_name)
data = {'success': True, 'account': 'nottherighttype'}
with self.assertRaises(UnexpectedDataFormatError):
account.modify(new_name)
@hp.activate
def test_get_balance(self):
account = Account(Client(api_key, api_secret))
account.id = 'fakeaccountid'
account.balance = initial_balance = lambda: None # Initial value
def server_response(request, uri, headers):
self.assertTrue(uri.endswith('%s/balance' % account.id))
self.assertEqual(request.body.decode(), '')
return (200, headers, json.dumps(data))
data = {'currency': 'USD', 'amount': '10.00'}
hp.register_uri(hp.GET, re.compile('.*'), body=server_response)
balance = account.get_balance()
self.assertIsInstance(balance, Money)
# Fetching the current balance should not modify the balance attribute on
# the Account object.
self.assertEqual(account.balance, initial_balance)
@hp.activate
def test_get_address(self):
account = Account(Client(api_key, api_secret))
account.id = 'fakeaccountid'
def server_response(request, uri, headers):
self.assertTrue(uri.endswith('%s/address' % account.id))
self.assertEqual(request.body.decode(), '')
return (200, headers, json.dumps(data))
hp.register_uri(hp.GET, re.compile('.*'), body=server_response)
data = {'address': 'a',
'callback_url': None,
'label': None,
'success': False}
with self.assertRaises(APIError):
account.get_address()
data = {'badkey': 'bar',
'success': True}
with self.assertRaises(UnexpectedDataFormatError):
account.get_address()
data = {'address': 'a',
'callback_url': None,
'label': None,
'success': True}
address = account.get_address()
self.assertIsInstance(address, Address)
@hp.activate
def test_get_addresses(self):
account = Account(Client(api_key, api_secret))
account.id = 'fakeaccountid'
def server_response(request, uri, headers):
try: json.loads(request.body.decode())
except ValueError: raise AssertionError("request body was malformed.")
data = {
'total_count': 3,
'current_page': 1,
'num_pages': 1,
'addresses': [
{'address': {
'label': '',
'address': 'foo',
'callback_url': '',
'id': '1'
}},
{'address': {
'label': '',
'address': 'foo',
'callback_url': '',
'id': '2'
}},
{'address': {
'label': '',
'address': 'foo',
'callback_url': '',
'id': '3'
}},
],
}
return (200, headers, json.dumps(data))
hp.register_uri(hp.GET, re.compile('.*'), body=server_response)
response = account.get_addresses()
self.assertIsInstance(response, APIObject)
self.assertEqual(len(response.addresses), 3)
for address in response.addresses:
self.assertIsInstance(address, Address)
@hp.activate
def test_create_address(self):
def server_response(request, uri, headers):
try: request_data = json.loads(request.body.decode())
except ValueError: raise AssertionError("request body | was malformed.")
address = request_data.get('address')
assert isinstance(address, dict)
if label is not None:
assert address.get('label') == label
if callback_url is not None:
assert address.get('callback_url') == callback_url
return (200, headers, json.dumps(data))
account = Account(Client(api_key, api_secret))
accou | nt.id = 'fakeaccountid'
hp.register_uri(hp.POST, re.compile('.*'), body=server_response)
label, callback_url = ('label', 'http://example.com/')
data = {'success': False,
'address': 'foo',
'label': label,
'callback_url': callback_url}
with self.assertRaises(APIError):
account.create_address(label, callback_url)
label, callback_url = ('label', 'http://example.com/')
data = {'success': True, 'arbkey': 'bar'}
with self.assertRaises(UnexpectedDataFormatError):
account.create_address(label, callback_url)
label, callback_url = ('label', 'http://example.com/')
data = {'success': True,
'address': 'foo',
'label': label,
'callback_url': callback_url}
address = account.create_address(label, callback_url)
self.assertIsInstance(address, Address)
label, callback_url = (None, None)
data = {'success': True,
'address': 'foo',
'label': label,
'callback_url': callback_url}
address = account.create_address()
self.assertIsInstance(address, Address)
@hp.activate
def test_get_transactions(self):
accoun |
EvanK/ansible | test/runner/lib/constants.py | Python | gpl-3.0 | 379 | 0.005277 | """Constants used by ansible-test. Imports should not be used in this file."""
# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2 | .x when close_fds=True.
# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time.
SOFT_RLIMIT_NOF | ILE = 1024
|
BeerTheorySociety/timetable | docs/conf.py | Python | bsd-3-clause | 9,474 | 0.006122 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# timetable documentation build configuration file, created by
# sphinx-quickstart on Tue Aug 16 16:32:24 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'timetable'
copyright = '2016, Zach Sailer'
author = 'Zach | Sailer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/be | ta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = 'timetable v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'timetabledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'timetable.tex', 'timetable Documentation',
'Zach Sailer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after ex |
sftd/AllBlue-Blender-Tools | ab_addons_tools.py | Python | gpl-2.0 | 4,848 | 0.004538 | # ab_addons_tools.py Copyright (C) 2012, Jakub Zolcik
#
# Searches through files in file browser by name.
#
# ***** BEGIN GPL LICENSE BLOCK *****
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENCE BLOCK *****
bl_info = {
"name": "Add-ons Tools",
"author": "Jakub Zolcik",
"version": (0, 0, 1),
"blender": (2, 72, 0),
"location": "File",
"description": "Allows enabling add-ons according to *.blend files.",
"warning": "",
"wiki_url": "https://studio.allblue.pl/wiki/wikis/blender/addons-tools",
"tracker_url": "https://github.com/sftd/AllBlue-Blender-Tools",
"category": "System"
}
import bpy
import addon_utils
from bpy.app.handlers import persistent
class AddonsToolsPreferences(bpy.types.AddonPreferences):
bl_idname = __name__
load = bpy.props.BoolProperty('Load Add-ons automatically', default=True)
def draw(self, context):
layout = self.layout
layout.label('Addons Tools Preferences')
layout.prop(self, 'load', text='Load Add-ons automatically')
class ADTAddonItem(bpy.types.PropertyGroup):
module = bpy.props.StringProperty(name="Module", default='')
def adt_enable_addons():
context = bpy.context
scene = bpy.data.scenes[0]
enabled_addons = context.user_preferences.addons.keys()
for adt_addon in scene.adt_addons:
if (adt_addon.module not in enabled_addons):
bpy.ops.wm.addon_enable(module=adt_addon.module)
class ADTEnableAddonsOperator(bpy.types.Operator):
bl_idname = "wm.adt_enable_addons"
bl_label = "ADT Enable Add-ons"
def execute(self, context):
adt_enable_addons()
return {"FINISHED"}
def adt_menu_draw(self, context):
self.layout.operator("wm.adt_enable_addons", icon='LOAD_FACTORY')
if (context.window_manager.adt_save):
self.layout.prop(context.window_manager, "adt_save", text="ADT Save Add-ons", icon='CHECKBOX_HLT')
else:
self.layout.prop(context.window_manager, "adt_save", text="ADT Save Add-ons", icon='CHECKBOX_DEHLT')
self.layout.separator()
def adt_save_update(self, context):
scene = bpy.data.scenes[0]
scene.adt_save = context.window_manager.adt_save
@persistent
def adt_save_pre_handler(dummy):
context = bpy.context
scene = bpy.data.scenes[0]
scene.adt_save = context.window_manager.adt_save
scene.adt_addons.clear()
if (not context.window_manager.adt_save):
return
for addon in context.user_preferences.addons:
adt_addon = scene.adt_addons.add()
adt_addon.module = addon.module
@persistent
def adt_load_post_handler(dummy):
context = bpy.context
adt_preferences = context.user_preferences.addons['ab_addons_tools'].preferences
context.window_manager.adt_save = bpy.data.scenes[0].adt_save
if (adt_preferences.load):
adt_enable_addons()
def register():
# Apparently need to register does classes before Add-on registers them.
bpy.utils.register_class(AddonsToolsPreferences)
bpy.utils.register_class(ADTAddonItem)
bpy.utils.register_class(ADTEnableAd | donsOperator)
bpy.types.INFO_MT_file.prepend(adt_menu_draw)
adt_preferences = bpy.context.user_preferences.addons[__name__].preferences
# Properties
bpy.types.Scene.adt_addons = bpy.props.CollectionProperty(type=ADTAddonItem)
bpy.types.Scene.adt_save = bpy.props.BoolProperty('ADT Save Add-ons', default=True)
bpy.types.WindowManager.adt_ | save = bpy.props.BoolProperty('ADT Save Add-ons', default=True, update=adt_save_update)
bpy.app.handlers.save_pre.append(adt_save_pre_handler);
bpy.app.handlers.load_post.append(adt_load_post_handler)
def unregister():
bpy.utils.unregister_class(AddonsToolsPreferences)
bpy.utils.unregister_class(ADTAddonItem)
bpy.utils.unregister_class(ADTEnableAddonsOperator)
bpy.types.INFO_MT_file.remove(adt_menu_draw)
bpy.app.handlers.save_pre.remove(adt_save_pre_handler);
bpy.app.handlers.load_post.remove(adt_load_post_handler)
del bpy.types.Scene.adt_addons
del bpy.types.Scene.adt_save
del bpy.types.WindowManager.adt_save
|
yarikoptic/NiPy-OLD | nipy/io/imageformats/compat.py | Python | bsd-3-clause | 7,618 | 0.011158 | #emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
#ex: set sts=4 ts=4 sw=4 et:
### ### # | ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyNIfTI | package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""The module provides the NiftiImage interface, which is backward-compatible
to the previous C-based implementation.
"""
__docformat__ = 'restructuredtext'
import numpy as N
from nipy.io.imageformats.nifti1 import Nifti1Image
from nipy.io.imageformats.volumeutils import allopen
class NiftiImage(Nifti1Image):
def __init__(self, source, header=None, loadmeta=False):
if type(source) == N.ndarray:
raise NotImplementedError
elif type(source) in (str, unicode):
# load image
files = Nifti1Image.filespec_to_files(source)
img = Nifti1Image.from_files(files)
# and init from temp image without assigning the data, for lazy
# loading
Nifti1Image.__init__(self,
None,
img.get_affine(),
img.get_header(),
img.extra)
# store filenames? yes! otherwise get_data() will refuse to access
# the data since it doesn't know where to get the image from
self._files = files
# XXX handle original 'header' argument
else:
raise ValueError, \
"Unsupported source type. Only NumPy arrays and filename " \
+ "string are supported."
def asDict(self):
return self.get_header()
def updateFromDict(self, hdrdict):
raise NotImplementedError
def vx2q(self, coord):
raise NotImplementedError
def vx2s(self, coord):
raise NotImplementedError
def getVoxDims(self):
return self.get_header().get_zooms()[:3]
def setVoxDims(self, value):
raise NotImplementedError
def setPixDims(self, value):
raise NotImplementedError
def getPixDims(self):
return self.get_header()['pixdim'][1:]
def getExtent(self):
return self.get_header().get_data_shape()
def getVolumeExtent(self):
raise NotImplementedError
def getTimepoints(self):
raise NotImplementedError
def getRepetitionTime(self):
return self.get_header().get_zooms()[3]
def setRepetitionTime(self, value):
raise NotImplementedError
def setSlope(self, value):
raise NotImplementedError
def setIntercept(self, value):
raise NotImplementedError
def setDescription(self, value):
raise NotImplementedError
def setXFormCode(self, xform, code):
raise NotImplementedError
def setQFormCode(self, code):
raise NotImplementedError
def getQFormCode(self, as_string = False):
raise NotImplementedError
def getSFormCode(self, as_string = False):
raise NotImplementedError
def setSFormCode(self, code):
raise NotImplementedError
def getSForm(self):
raise NotImplementedError
def setSForm(self, m, code='mni152'):
raise NotImplementedError
def getInverseSForm(self):
raise NotImplementedError
def getQForm(self):
raise NotImplementedError
def getInverseQForm(self):
raise NotImplementedError
def setQForm(self, m, code='scanner'):
raise NotImplementedError
def setQuaternion(self, value, code='scanner'):
raise NotImplementedError
def getQuaternion(self):
raise NotImplementedError
def setQOffset(self, value, code='scanner'):
raise NotImplementedError
def getQOffset(self):
raise NotImplementedError
def setQFac(self, value, code='scanner'):
raise NotImplementedError
def getQOrientation(self, as_string = False):
raise NotImplementedError
def getSOrientation(self, as_string = False):
raise NotImplementedError
def getXYZUnit(self, as_string = False):
raise NotImplementedError
def setXYZUnit(self, value):
raise NotImplementedError
def getTimeUnit(self, as_string = False):
raise NotImplementedError
def setTimeUnit(self, value):
raise NotImplementedError
def getFilename(self):
raise NotImplementedError
def save(self, filename=None, filetype = 'NIFTI', update_minmax=True):
if not filename is None:
filename = self.filespec_to_files(filename)
self.to_files(filename)
def copy(self):
raise NotImplementedError
def load(self):
raise NotImplementedError
def unload(self):
raise NotImplementedError
def updateCalMinMax(self):
raise NotImplementedError
def updateHeader(self, hdrdict):
raise NotImplementedError
def getScaledData(self):
raise NotImplementedError
def setDataArray(self, data):
raise NotImplementedError
def getDataArray(self):
# we need the axis order reversed
return Nifti1Image.get_data(self).T
def asarray(self, copy = True):
raise NotImplementedError
def setFilename(self, filename, filetype = 'NIFTI'):
raise NotImplementedError
def getFilename(self):
raise NotImplementedError
#
# class properties
#
# # read only
data = property(fget=getDataArray) #, fset=setDataArray)
# nvox = property(fget=lambda self: self.__nimg.nvox)
# max = property(fget=lambda self: self.__nimg.cal_max)
# min = property(fget=lambda self: self.__nimg.cal_min)
# sform_inv = property(fget=getInverseSForm)
# qform_inv = property(fget=getInverseQForm)
extent = property(fget=getExtent)
# volextent = property(fget=getVolumeExtent)
# timepoints = property(fget=getTimepoints)
# raw_nimg = property(fget=lambda self: self.__nimg)
# filename = property(fget=getFilename)
#
# # read and write
# filename = property(fget=getFilename, fset=setFilename)
# bbox = property(fget=imgfx.getBoundingBox, fset=imgfx.crop)
#
# slope = property(fget=lambda self: self.__nimg.scl_slope,
# fset=setSlope)
# intercept = property(fget=lambda self: self.__nimg.scl_inter,
# fset=setIntercept)
voxdim = property(fget=getVoxDims, fset=setVoxDims)
pixdim = property(fget=getPixDims, fset=setPixDims)
# description = property(fget=lambda self: self.__nimg.descrip,
# fset=setDescription)
header = property(fget=asDict)
# sform = property(fget=getSForm, fset=setSForm)
# sform_code = property(fget=getSFormCode, fset=setSFormCode)
# qform = property(fget=getQForm, fset=setQForm)
# qform_code = property(fget=getQFormCode, fset=setQFormCode)
# quatern = property(fget=getQuaternion, fset=setQuaternion)
# qoffset = property(fget=getQOffset, fset=setQOffset)
# qfac = property(fget=lambda self: self.__nimg.qfac, fset=setQFac)
rtime = property(fget=getRepetitionTime, fset=setRepetitionTime)
# xyz_unit = property(fget=getXYZUnit, fset=setXYZUnit)
# time_unit = property(fget=getTimeUnit, fset=setTimeUnit)
|
sgenoud/scikit-learn | sklearn/cluster/dbscan_.py | Python | bsd-3-clause | 7,355 | 0.000272 | # -*- coding: utf-8 -*-
"""
DBSCAN: Density-Based Spatial Clustering of Applications with Noise
"""
# Author: Robert Layton <robertlayton@gmail.com>
#
# License: BSD
import warnings
import numpy as np
from ..base import BaseEstimator
from ..metrics import pairwise_distances
from ..utils import check_random_state
def dbscan(X, eps=0.5, min_samples=5, metric='euclidean',
random_state=None):
"""Perform DBSCAN clustering from vector array or distance matrix.
Parameters
----------
X: array [n_samples, n_samples] or [n_samples, n_features]
Array of distances between samples, or a feature array.
The array is treated as a feature array unless the metric is given as
'precomputed'.
eps: float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples: int, optional
The number of samples in a neighborhood for a point to be considered
as a core point.
metric: string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.calculate_distance for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square.
random_state: numpy.RandomState, optional
The generator used to initialize the centers. Defaults to numpy.random.
Returns
-------
core_samples: array [n_core_samples]
Indices of core samples.
labels : array [n_samples]
Cluster labels for each point. Noisy samples are given the label -1.
Notes
-----
See examples/plot_dbscan.py for an example.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, “A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise”.
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226–231. 1996
"""
X = np.asarray(X)
n = X.shape | [0]
# If index order not given, create random order.
random_state = check_random_state(random_state)
index_order = np.arange(n)
random_state.shuffle(index_order)
D = pairwise_distances(X, metric=metric)
# Calculate neighborhood for all samples. This leaves the original point
# in, which needs to be considered later (i.e. point i is the
# neighborhood of point i. While True, its useless information)
neighborhoo | ds = [np.where(x <= eps)[0] for x in D]
# Initially, all samples are noise.
labels = -np.ones(n)
# A list of all core samples found.
core_samples = []
# label_num is the label given to the new cluster
label_num = 0
# Look at all samples and determine if they are core.
# If they are then build a new cluster from them.
for index in index_order:
if labels[index] != -1 or len(neighborhoods[index]) < min_samples:
# This point is already classified, or not enough for a core point.
continue
core_samples.append(index)
labels[index] = label_num
# candidates for new core samples in the cluster.
candidates = [index]
while len(candidates) > 0:
new_candidates = []
# A candidate is a core point in the current cluster that has
# not yet been used to expand the current cluster.
for c in candidates:
noise = np.where(labels[neighborhoods[c]] == -1)[0]
noise = neighborhoods[c][noise]
labels[noise] = label_num
for neighbor in noise:
# check if its a core point as well
if len(neighborhoods[neighbor]) >= min_samples:
# is new core point
new_candidates.append(neighbor)
core_samples.append(neighbor)
# Update candidates for next round of cluster expansion.
candidates = new_candidates
# Current cluster finished.
# Next core point found will start a new cluster.
label_num += 1
return core_samples, labels
class DBSCAN(BaseEstimator):
"""Perform DBSCAN clustering from vector array or distance matrix.
DBSCAN - Density-Based Spatial Clustering of Applications with Noise.
Finds core samples of high density and expands clusters from them.
Good for data which contains clusters of similar density.
Parameters
----------
eps : float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples : int, optional
The number of samples in a neighborhood for a point to be considered
as a core point.
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.calculate_distance for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square.
random_state : numpy.RandomState, optional
The generator used to initialize the centers. Defaults to numpy.random.
Attributes
----------
`core_sample_indices_` : array, shape = [n_core_samples]
Indices of core samples.
`components_` : array, shape = [n_core_samples, n_features]
Copy of each core sample found by training.
`labels_` : array, shape = [n_samples]
Cluster labels for each point in the dataset given to fit().
Noisy samples are given the label -1.
Notes
-----
See examples/plot_dbscan.py for an example.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, “A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise”.
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226–231. 1996
"""
def __init__(self, eps=0.5, min_samples=5, metric='euclidean',
random_state=None):
self.eps = eps
self.min_samples = min_samples
self.metric = metric
self.random_state = check_random_state(random_state)
def fit(self, X, **params):
"""Perform DBSCAN clustering from vector array or distance matrix.
Parameters
----------
X: array [n_samples, n_samples] or [n_samples, n_features]
Array of distances between samples, or a feature array.
The array is treated as a feature array unless the metric is
given as 'precomputed'.
params: dict
Overwrite keywords from __init__.
"""
if params:
warnings.warn('Passing parameters to fit methods is '
'depreciated', stacklevel=2)
self.set_params(**params)
self.core_sample_indices_, self.labels_ = dbscan(X,
**self.get_params())
self.components_ = X[self.core_sample_indices_].copy()
return self
|
amenonsen/ansible | test/lib/ansible_test/_internal/provider/layout/__init__.py | Python | gpl-3.0 | 6,980 | 0.002292 | """Code for finding content."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import abc
import collections
import os
from ... import types as t
from ...util import (
ANSIBLE_SOURCE_ROOT,
)
from .. import (
PathProvider,
)
class Layout:
"""Description of content locations and helper methods to access content."""
def __init__(self,
root, # type: str
paths, # type: t.List[str]
): # type: (...) -> None
self.root = root
self.__paths = paths # contains both file paths and symlinked directory paths (ending with os.path.sep)
self.__files = [path for path in paths if | not path.endswith(os.path.sep)] # contains only file paths
self.__paths_tree = paths_to_tree(self.__paths)
self.__files_tree = paths_to | _tree(self.__files)
def all_files(self, include_symlinked_directories=False): # type: (bool) -> t.List[str]
"""Return a list of all file paths."""
if include_symlinked_directories:
return self.__paths
return self.__files
def walk_files(self, directory, include_symlinked_directories=False): # type: (str, bool) -> t.List[str]
"""Return a list of file paths found recursively under the given directory."""
if include_symlinked_directories:
tree = self.__paths_tree
else:
tree = self.__files_tree
parts = directory.rstrip(os.sep).split(os.sep)
item = get_tree_item(tree, parts)
if not item:
return []
directories = collections.deque(item[0].values())
files = list(item[1])
while directories:
item = directories.pop()
directories.extend(item[0].values())
files.extend(item[1])
return files
def get_dirs(self, directory): # type: (str) -> t.List[str]
"""Return a list directory paths found directly under the given directory."""
parts = directory.rstrip(os.sep).split(os.sep)
item = get_tree_item(self.__files_tree, parts)
return [os.path.join(directory, key) for key in item[0].keys()] if item else []
def get_files(self, directory): # type: (str) -> t.List[str]
"""Return a list of file paths found directly under the given directory."""
parts = directory.rstrip(os.sep).split(os.sep)
item = get_tree_item(self.__files_tree, parts)
return item[1] if item else []
class ContentLayout(Layout):
"""Information about the current Ansible content being tested."""
def __init__(self,
root, # type: str
paths, # type: t.List[str]
plugin_paths, # type: t.Dict[str, str]
collection=None, # type: t.Optional[CollectionDetail]
integration_path=None, # type: t.Optional[str]
unit_path=None, # type: t.Optional[str]
unit_module_path=None, # type: t.Optional[str]
unit_module_utils_path=None, # type: t.Optional[str]
): # type: (...) -> None
super(ContentLayout, self).__init__(root, paths)
self.plugin_paths = plugin_paths
self.collection = collection
self.integration_path = integration_path
self.integration_targets_path = os.path.join(integration_path, 'targets')
self.integration_vars_path = os.path.join(integration_path, 'integration_config.yml')
self.unit_path = unit_path
self.unit_module_path = unit_module_path
self.unit_module_utils_path = unit_module_utils_path
self.is_ansible = root == ANSIBLE_SOURCE_ROOT
@property
def prefix(self): # type: () -> str
"""Return the collection prefix or an empty string if not a collection."""
if self.collection:
return self.collection.prefix
return ''
@property
def module_path(self): # type: () -> t.Optional[str]
"""Return the path where modules are found, if any."""
return self.plugin_paths.get('modules')
@property
def module_utils_path(self): # type: () -> t.Optional[str]
"""Return the path where module_utils are found, if any."""
return self.plugin_paths.get('module_utils')
@property
def module_utils_powershell_path(self): # type: () -> t.Optional[str]
"""Return the path where powershell module_utils are found, if any."""
if self.is_ansible:
return os.path.join(self.plugin_paths['module_utils'], 'powershell')
return self.plugin_paths.get('module_utils')
@property
def module_utils_csharp_path(self): # type: () -> t.Optional[str]
"""Return the path where csharp module_utils are found, if any."""
if self.is_ansible:
return os.path.join(self.plugin_paths['module_utils'], 'csharp')
return self.plugin_paths.get('module_utils')
class CollectionDetail:
"""Details about the layout of the current collection."""
def __init__(self,
name, # type: str
namespace, # type: str
root, # type: str
): # type: (...) -> None
self.name = name
self.namespace = namespace
self.root = root
self.full_name = '%s.%s' % (namespace, name)
self.prefix = '%s.' % self.full_name
self.directory = os.path.join('ansible_collections', namespace, name)
class LayoutProvider(PathProvider):
"""Base class for layout providers."""
PLUGIN_TYPES = (
'action',
'become',
'cache',
'callback',
'cliconf',
'connection',
'doc_fragments',
'filter',
'httpapi',
'inventory',
'lookup',
'module_utils',
'modules',
'netconf',
'shell',
'strategy',
'terminal',
'test',
'vars',
)
@abc.abstractmethod
def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout
"""Create a layout using the given root and paths."""
def paths_to_tree(paths): # type: (t.List[str]) -> t.Tuple(t.Dict[str, t.Any], t.List[str])
"""Return a filesystem tree from the given list of paths."""
tree = {}, []
for path in paths:
parts = path.split(os.sep)
root = tree
for part in parts[:-1]:
if part not in root[0]:
root[0][part] = {}, []
root = root[0][part]
root[1].append(path)
return tree
def get_tree_item(tree, parts): # type: (t.Tuple(t.Dict[str, t.Any], t.List[str]), t.List[str]) -> t.Optional[t.Tuple(t.Dict[str, t.Any], t.List[str])]
"""Return the portion of the tree found under the path given by parts, or None if it does not exist."""
root = tree
for part in parts:
root = root[0].get(part)
if not root:
return None
return root
|
EmpireProject/Empire | lib/modules/powershell/persistence/misc/disable_machine_acct_change.py | Python | bsd-3-clause | 2,520 | 0.013889 | from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-DisableMachineAcctChange',
'Author': ['@harmj0y'],
'Description': ('Disables the machine account for the target sy | stem '
'from changing its password automatically.'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : True,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersi | on' : '2',
'Comments': []
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'CleanUp' : {
'Description' : 'Switch. Re-enable machine password changes.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
cleanup = self.options['CleanUp']['Value']
if cleanup.lower() == 'true':
script = "$null=Set-ItemProperty -Force -Path HKLM:\SYSTEM\CurrentControlSet\Services\Netlogon\Parameters -Name DisablePasswordChange -Value 0; 'Machine account password change re-enabled.'"
if obfuscate:
script = helpers.obfuscate(self.mainMenu.installPath, psScript=script, obfuscationCommand=obfuscationCommand)
return script
script = "$null=Set-ItemProperty -Force -Path HKLM:\SYSTEM\CurrentControlSet\Services\Netlogon\Parameters -Name DisablePasswordChange -Value 1; 'Machine account password change disabled.'"
if obfuscate:
script = helpers.obfuscate(self.mainMenu.installPath, psScript=script, obfuscationCommand=obfuscationCommand)
return script
|
VapourApps/va_master | doc_generator/module_info.py | Python | gpl-3.0 | 937 | 0.01174 | from optparse import OptionParser
import sys, inspect, importlib, csv
#Gets a class
#Returns a list of tuples [('function_name', 'function_doc'), ...]
def get_class_methods(cls):
methods = inspect.getmembers(cls, predicate = inspect.ismethod)
methods = [(x[0], x[1].__doc__) for x in methods]
| return methods
#Gets a list of tuples [('class', 'ClassName')], ...]
#Returns a dic | tionary {'ClassName': [('function_name', 'function_doc'), ...]
def get_class_dict(cls_list):
all_methods = {c[1]: get_class_methods(c[0]) for c in cls_list}
return all_methods
def dict_to_table(cls_name, cls_dict):
table = ['|'.join(*func) for func in cls[cls_name]]
table = ['Function | Documentation', '--- | ---'] + table
table = '\n'.join(table)
return table
def get_tables_for_dicts(cls_list):
tables = [dict_to_table(cls, cls_list[cls]) for cls in cls_list]
tables = '\n\n'.join(tables)
return tables
|
bbaltz505/iotkit-libpy | tests/test_alerts.py | Python | bsd-3-clause | 543 | 0.001842 | import iotkitclient
import unittest
from config import *
# Test vars
newaccount = "Test101"
class TestAlerts(unittest.TestCase):
def login(self):
iot = iotkitclient.Client(host=hostname, proxies=proxies)
iot.login(username, password)
return iot
# Connec | tion tests
def test_list_alerts(self):
iot = self.login()
acct = iotkitclient.Account(iot)
acct.get_account(newaccount)
js = acct. | list_alerts()
iotkitclient.prettyprint(js)
self.assertEqual(0, len(js)) |
npp/npp-api | data/management/commands/load_cffr.py | Python | mit | 12,123 | 0.007754 | from django import db
#from django.core.management.base import NoArgsCommand
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Sum
from data.models import CffrRaw, CffrProgram, CffrState, State, County, Cffr, CffrIndividualCounty, CffrIndividualState
from django.db import connection, transaction
# National Priorities Project Data Repository
# load_cffr.py
# Created 5/11/2011
# Populates the Cffr model and the CffrState summary table used by the API
# source model(s): CffrRaw
# source load command(s): import_cffr
# destination model: Cffr, CffrState
# HOWTO:
# 1) Ensure that CffrRaw, CffrProgram, State, County is loaded and up to date (especially make sure that any new program codes have been loaded to CffrProgram before running this script)
# 2) Run as Django management command from your project path "python manage.py load_cffr"
#class Command(NoArgsCommand):
class Command(BaseCommand):
args = '<year>'
help = 'loads cffr county and state tables from the raw cffr table'
#def handle_noargs(self, **options):
def handle(self, *args, **options):
def get_state(**lookup):
state_ref_current = State.objects.get(**lookup)
return state_ref_current
def get_county(**lookup):
try:
county_ref_current = County.objects.get(state=lookup['state'], county_ansi=lookup['county_ansi'])
except:
county_ref_current = add_county(**lookup)
return county_ref_current
def add_county(**lookup):
#Sometimes, CFFR records come through with old county codes
#that aren't in the official ANSI county list. Rather than skip
#these records, add the missing county record.
#get the county name from the CFFR raw record
cffr_county = CffrRaw.objects.filter(state_code = lookup['state'].state_ansi,county_code = lookup['county_ansi'])
if cffr_county.count() > 0:
county_name = cffr_county[0].county_name
else:
county_name = 'Unknown'
record = County(state_id=lookup['state'].id, county_ansi=lookup['county_ansi'], county_name=county_name)
record.save()
return record
def get_program(**lookup):
program_ref_current = CffrProgram.objects.get(**lookup)
return program_ref_current
def load_year_county(year_current):
records_current = Cffr.objects.filter(year=year_current).count()
#if we already have records loaded for this year, skip it
if records_current == 0:
print 'starting cffr load for year ' + str(year_current) + '...'
cursor = connection.cursor()
#check for unknown state codes
cursor.execute('''
SELECT COUNT(*)
FROM
data_cffrraw r
LEFT JOIN data_state s
ON r.state_code = s.state_ansi
WHERE
r.year > 1992 AND
s.state_ansi IS NULL''')
x = cursor.fetchone()
if x[0] > 0:
print '1 or more state codes for ' + str(year_current) + ' were not in the state table. No records will be loaded for ' + str(year_current) + '.'
return
#check for unknown program ids
cursor.execute('''
SELECT COUNT(*)
FROM
data_cffrraw r
LEFT JOIN data_cffrprogram p
ON r.program_code = p.program_code
AND p.year = r.year
WHERE
r.year = %s
AND p.id IS NULL
''',[year_current])
x = cursor.fetchone()
if x[0] > 0:
print '1 or more program codes for ' + str(year_current) + ' were not in CffrProgram. No records will be loaded for ' + str(year_current) + '.'
return
#insert any missing counties
cursor.execute('''
INSERT INTO data_county (state_id, county_ansi, county_abbr, county_name, create_date, update_date)
SELECT DISTINCT
s.id, r.county_code, '', r.county_name, NOW(), NOW()
FROM
data_cffrraw r
JOIN data_state s
ON r.state_code = s.state_ansi
LEFT JOIN data_county c
ON r.county_code = c.county_ansi
AND s.id = c.state_id
WHERE
r.year = %s
AND c.id IS NULL
AND s.state_abbr <> 'DC'
| ''',[year_current])
#finally, aggregate raw cffr data to the county level & insert it
cursor.execute('''
INSERT INTO data_cffr (year, state_id, county_id, cffrprogram_id, amount, amount_per_capita, create_date, update_date)
SELECT
c.year
, s.id
, co.id
, p.id
, SUM(amount_adjusted)
, ROUND(SUM(amount_ | adjusted)/pop.total,2)
, NOW()
, NOW()
FROM
data_cffrraw c
JOIN data_state s
ON c.state_code = s.state_ansi
JOIN data_county co
ON c.county_code = co.county_ansi
AND s.id = co.state_id
JOIN data_cffrprogram p
ON c.program_code = p.program_code
AND c.year = p.year
LEFT JOIN data_populationgendercounty pop
ON co.id = pop.county_id
AND c.year = pop.year
WHERE
c.year = %s
GROUP BY
c.year, s.id, co.id, p.id
''',[year_current])
else:
print str(year_current) + ' skipped: ' + str(records_current) + ' county records already loaded for that year.'
def load_year_state(year_current):
records_current = CffrState.objects.filter(year=year_current).count()
#if we already have records loaded for this year, skip it
if records_current == 0:
cursor = connection.cursor()
cursor.execute('''
insert into data_cffrstate (
year, state_id, cffrprogram_id, amount, amount_per_capita, create_date, update_date)
select
c.year
, c.state_id
, cffrprogram_id
, sum(amount)
, round(sum(amount)/pop.total,2)
, now()
, now()
from
data_cffr c
left join data_populationgenderstate pop
on c.state_id = pop.state_id
and c.year = pop.year
where
c.year = %s
group by
c.year
, c.state_id
, cffrprogram_id
''',[year_current])
transaction.commit_unless_managed()
else:
print str(year_current) + ' skipped: ' + str(records_current) + ' state records already loaded for that year.'
def load_year_individual_county(year_current):
records_current = CffrIndividualCounty.objects.filter(year=year_current).count()
#if we already have records loaded for this year, skip it
if records_current == |
GridProtectionAlliance/ARMORE | source/webServer/domains/network.py | Python | mit | 2,712 | 0.015487 | # # # # #
# network.py
#
# This file is used to serve up
# RESTful links that can be
# consumed by a frontend system
#
# University of Illinois/NCSA Open Source License
# Copyright (c) 2015 Information Trust Institute
# All rights reserved.
#
# Developed by:
#
# Information Trust Institute
# University of Illinois
# http://www.iti.illinois.edu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal with
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# Redistributions of source code must retain the above copyright notice, this list
# of conditions and the following disclaimers. Redistributions in binary form must
# reproduce the above copyright notice, t | his list of conditions and the following
# disclaimers in the documentation and/or other materials provided with the
# distribution.
#
# Neither the names of Information Trust Institute, University of Illinois, nor
# the names of its contributors may be used to endorse or promote products derived
# from this Software without specific prior written permission.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPL | IED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE SOFTWARE.
#
# # # # #
import domains.support.system as sysLib
import domains.support.network as netLib
from domains.support.lib.common import *
from flask import Blueprint, render_template, session
netDomain = Blueprint('network', __name__)
@netDomain.route("/example/network")
@secure(["admin","user"])
def example_network():
connections, types, families, theStates = netLib.getConnections()
return render_template("example/network.html",
common = sysLib.getCommonInfo({"username": session["username"]}, "network"),
network_interfaces = netLib.getInterfaceStats(),
socket_types = types,
states = theStates,
socket_families = families,
connections = connections
)
@netDomain.route("/network")
@secure(["admin","user"])
def network():
return example_network()
|
HeatherHillers/RoamMac | src/configmanager/editorwidgets/__init__.py | Python | gpl-2.0 | 1,068 | 0.003745 | from PyQt4.QtCore import pyqtSignal
from PyQt4.QtGui import QWidget
from configmanager.editorwidgets.checkwidget import CheckboxWidgetConfig
from configmanager.editorwidgets.datewidget import DateWidgetConfig
from configmanager.editorwidgets.imagewidget import ImageWidgetConfig
from configmanager.editorwidgets.textwidget import TextBlockWidgetConfig, TextWidgetConfig
from configmanager.editorwidgets.listwidget import ListWidgetConfig
from configmanager.editorwidgets.numberwidget import NumberWidgetConfig
from configmanager.editorwidgets.optionwidget import OptionWidgetConfig
widgetconfigs = {"Checkbox" : CheckboxWidgetConfig,
"Date": DateWidgetConfig,
| "Image": ImageWidgetConfig,
"List": ListWidgetConfig,
"MultiList" : ListWidgetConfig,
"Text" : TextWidgetConfig,
"TextBlock": TextBlockWidgetConfig,
"Number": NumberWidgetConfig,
"Number(Double)": NumberWidgetConfig,
"Option Row": Optio | nWidgetConfig}
|
soxfmr/ant | vendor/yougetsignal.py | Python | gpl-3.0 | 794 | 0.04534 | # -*- coding: utf-8 -*-
import requests
import json
from provider import send
URL = 'http://domains.yougetsignal.com/domains. | php'
KEY_REMOTE_ADDR = 'remoteAddress'
KEY_RESERVE_KEY = 'key'
KEY_RESERVE_UNDERLINE = '_'
KEY_DOMAIN_ARRAY = 'domainArray'
def retrieve(target, ip):
retval = []
try:
result = send(URL,
payload = {
KEY_REMOTE_ADDR : target,
KEY_RESERVE_KEY : '',
KEY_RESERVE_UNDERLINE : ''
},
headers = {
'Referer' : 'http://www.yougetsignal.com/tools/web-sites-on-web-server'
})
if result:
data = json.loads(result)
domainList = data.get(KEY_DOMAIN_ARRAY)
if d | omainList:
for domain in domainList:
# Construct as ['example.com', '']
retval.append(domain[0])
except Exception as e:
print e
return retval
|
starduliang/haha | autoapp.py | Python | bsd-3-clause | 266 | 0 | # -*- coding: utf-8 -*-
"""Create an application instance."""
from flask.helpers import get_debug_fla | g
from haha.app import create_app
from haha.settings i | mport DevConfig, ProdConfig
CONFIG = DevConfig if get_debug_flag() else ProdConfig
app = create_app(CONFIG)
|
alviano/wasp | python_libraries/heuristics/heuristic-instructions.py | Python | apache-2.0 | 2,804 | 0.008559 | import wasp
def onConflict():
"""
Optional.
A conflict happened during the solving
"""
pass
def onDeletion():
"""
Optional.
The method for deleting clauses is invoked.
"""
pass
def onLearningClause(lbd, size, *lits):
"""
Optional.
When a clause is learnt.
:param lbd: the lbd value of the learnt clause
:param size: the size of the learned clause
:param lits: the literals in the learned clause
"""
pass
def onLitInImportantClause(lit):
"""
Optional.
When a literal appears in special clauses, e.g. glue clauses.
:param lit: the literal in the important clause.
"""
pass
def onLitInvolvedInConflict(lit):
"""
Optional.
When a literal is involved in the computation of the learned clause.
:param lit: the literal involved in the conflict
"""
pass
def onLoopFormula(lbd, size, *lits):
"""
Optional.
| When a loop formula is learnt for an unfounded set.
:param lbd: the lbd value of the loop formula
:param size: the size of the loop formula
:param lits: the literals in the loop formula
"""
pass
def onNewClause(*clause):
"""
Optional.
All clauses left after the simplifications are sent to the heuristic using this method
:param clause: the clause
"""
def onRestart():
"""
| Optional.
When the solver performs a restart.
"""
pass
def onUnfoundedSet(*unfounded_set):
"""
Optional.
When an unfounded set is found.
:param unfounded_set: all atoms in the unfounded set
"""
pass
def initFallback():
"""
Optional.
Init the activities of variables in the fallback heuristic.
:return: List of pairs (v, i), the activity variable v is associated with i.
"""
pass
def factorFallback():
"""
Optional.
Set the factor for the activities of variables in the fallback heuristic (required fallback method).
:return: list of pairs (v, f), the factor f is associated to the variable v.
"""
pass
def signFallback():
"""
Optional.
Set the preferred polarity for variables in the fallback heuristic (required fallback method).
:return: list of literals
"""
pass
def selectLiteral():
"""
Required.
This method is invoked when a choice is needed. It can return a choice and special
values for performing special actions.
Special values:
- wasp.restart() force the solver to perform a restart
- wasp.fallback(n) use the fallback heuristic for n steps (n<=0 use always fallback heuristic) -> require the presence of the method fallback() in the script
- wasp.unroll(v) unroll the truth value of the variable v
:return: wasp.choice(l), where l is a literal
"""
pass |
bockthom/codeface | codeface/dbmanager.py | Python | gpl-2.0 | 21,118 | 0.001799 | #! /usr/bin/env python
# This file is part of Codeface. Codeface is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Copyright 2013 by Siemens AG, Wolfgang Mauerer <wolfgang.mauerer@siemens.com>
# All Rights Reserved.
# Thin sql database wrapper
import MySQLdb as mdb
import time
from datetime import datetime
from logging import getLogger;
from contextlib import contextmanager
# create logger
log = getLogger(__name__)
@contextmanager
def _log_db_error(action, args=None):
try:
yield
except mdb.Error as e:
if args:
try:
action = action % args
except:
pass
log.critical('MySQL error {e[0]} during "{action}": {e[1]}'
''.format(e=e.args, action=action))
raise
class DBManager:
"""This class provides an interface to the codeface sql database."""
def __init__(self, conf):
try:
self.con = None
self.con = mdb.Connection(host=conf["dbhost"],
port=conf["dbport"],
user=conf["dbuser"],
passwd=conf["dbpwd"],
db=conf["dbname"],
charset="utf8",
use_unicode=True)
log.debug(
"Establishing MySQL connection to "
"{c[dbuser]}@{c[dbhost]}:{c[dbport]}, DB '{c[dbname]}'"
.format(c=conf))
except mdb.Error as e:
log.critical(
"Failed to establish MySQL connection to "
"{c[dbuser]}@{c[dbhost]}:{c[dbport]}, DB '{c[dbname]}'"
": {e[1]} ({e[0]})"
"".format(c=conf, e=e.args))
raise
self.cur = self.con.cursor()
max_packet_size = 1024 * 1024 * 512
self.doExec("SET GLOBAL max_allowed_packet=%s", (max_packet_size,))
def __del__(self):
if self.con != None:
self.con.close()
def doExec(self, stmt, args=None):
with _log_db_error(stmt, args):
retryCount = 0
while retryCount < 10:
try:
if isinstance(args, list):
res = self.cur.executemany(stmt, args)
else:
res = self.cur.execute(stmt, args)
return res
except mdb.OperationalError as dbe:
retryCount += 1
log.devinfo("DBE args: " + str(dbe.args))
if dbe.args[0] == 1213: # Deadlock! retry...
log.warning("Recoverable deadlock in MySQL - retrying " \
"(attempt {}).".format(retryCount))
elif dbe.args[0] == 2003: # Can't connect to MySQL server
log.warning("Can't connect to MySQL server - retrying " \
"(attempt {}).".format(retryCount))
time.sleep(60)
self.con.ping(True)
elif dbe.args[0] == 2006: # Server gone away...
log.warning("MySQL Server gone away, trying to reconnect " \
"(attempt {}).".format(retryCount))
time.sleep(60)
self.con.ping(True)
elif dbe.args[0] == 2013 or dbe.args[0] == 1053: # Lost connection to MySQL server during query | Server shutdown in progress
log.warning("Lost connection to MySQL server during query, " \
"trying to reconnect (attempt {}).".format(retryCount))
time.sleep(60)
self.con.ping(True)
elif dbe.args[0] == 1153: # Got a packet bigger than 'max_allowed_packet' bytes
log.warning("Sent a too big packet ({lnos} lines), retrying with smaller packets.".format(
lnos=len(args)))
## split package into smaller packets of size 'chunk_size'
chunk_size = 100
args_list = [args[i:i + chunk_size] for i in range(0, len(args), chunk_size)]
## retrying
time.sleep(60)
self.con.ping(True)
for chunk in args_list:
self.doExec(stmt, chunk)
| else:
| self.con.close()
raise
# Give up after too many retry attempts and propagate the
# problem to the caller. Either it's fixed with a different
# query, or the analysis fails
log.error("DB access failed after ten attempts, giving up")
self.con.close()
raise
def doFetchAll(self):
with _log_db_error("fetchall"):
return self.cur.fetchall()
def doCommit(self):
with _log_db_error("commit"):
return self.con.commit()
def doExecCommit(self, stmt, args=None):
self.doExec(stmt, args)
self.doCommit()
# NOTE: We don't provide any synchronisation since by assumption,
# a single project is never analysed from two threads.
def getProjectID(self, name, analysisMethod):
"""
Return the project ID of the given name/analysisMethod combination.
If the project does not exist yet in the database, it is created.
"""
self.doExec("SELECT id FROM project WHERE name=%s "
"AND analysisMethod=%s", (name, analysisMethod))
if self.cur.rowcount == 0:
# Project is not contained in the database
log.devinfo("Creating new project {}/{}".
format(name, analysisMethod))
self.doExecCommit("INSERT INTO project (name, analysisMethod) " +
"VALUES (%s, %s);", (name, analysisMethod))
self.doExec("SELECT id FROM project WHERE name=%s;", (name,))
elif self.cur.rowcount > 1:
raise Exception("Duplicate projects {}/{} in database!".
format(name, analysisMethod))
pid = self.doFetchAll()[0][0]
log.devinfo("Using project {}/{} with ID {}".
format(name, analysisMethod, pid))
return pid
def get_project(self, pid):
self.doExec("SELECT name, analysisMethod FROM project"
" WHERE id=%s", pid)
if self.cur.rowcount == 0:
raise Exception("Project id {} not found!".format(pid))
return self.doFetchAll()[0]
def get_edgelist(self, cid):
self.doExec("SELECT fromId, toId, weight FROM edgelist \
WHERE clusterId={}".format(cid))
if self.cur.rowcount == 0:
raise Exception("Cluster id {} not found!".format(cid))
return self.doFetchAll()
def get_file_dev(self, project_id, range_id):
self.doExec("SELECT * FROM (SELECT id, commitHash, commitDate, author, description " \
"FROM commit WHERE projectId={} AND releaseRangeId={}) AS Commits " \
"INNER JOIN (SELECT file, commitId, SUM(size) AS fileSize " \
"FROM commit_dependency GROUP BY commitId, file) AS commitFileLOC " \
"ON Commits.id=commitFileLOC.commitId ORDER BY " \
"commitFileLOC.file, commitFileLOC.c |
mohsraspi/mhscs14 | tobias/pyramid.py | Python | gpl-2.0 | 163 | 0.055215 | import minecraft as minecraft
mc = minecraft.Minecraft.create()
p=25
y=-5
n=-25
while p >= 1:
mc.setBlocks(p,y,p,n,y,n,45)
p = p-1
n = n+1
y = y+1 | ||
JuliaLima/Evergreen | build/i18n/tests/testSQL.py | Python | gpl-2.0 | 1,833 | 0.002182 | #!/usr/bin/env python
#
# Perform the following tests:
# 1. Generate a POT file from a set of marked SQL statements
# 2. Generate an SQL file from a translated PO file
import filecmp
import os
import subprocess
import testhelper
import unittest
class TestSQLFramework(unittest.TestCase):
basedir = os.path.dirname(__file__)
script = os.path.join(basedir, '../scripts/db-seed-i18n.py')
tmpdirs = [(os.path.join(basedir, 'tmp/'))]
sqlsource = os.path.join(basedir, 'data/sqlsource.sql')
canonpot = os.path.join(basedir, 'data/sql2pot.pot')
canonpo = os.path.join(basedir, 'data/sqlsource.po')
testpot = os.path.join(basedir, 't | mp/sql2pot.pot')
canonsql = os.path.join(basedir, 'data/po2sql.sql')
testsql = os.path.join(basedir, 'tmp/testi18n.sql')
def setUp(self):
testhelper.setUp(self)
def tearDown(self):
testhelper.tearDown(self)
def testgenpot(self):
"""
Create a POT file from our test SQL statements.
"""
subprocess.Popen(
('python', self.script, '--po | t', self.sqlsource,
'--output', self.testpot),
0, None, None).wait()
# avoid basic timestamp conflicts
testhelper.mungepothead(self.testpot)
testhelper.mungepothead(self.canonpot)
self.assertEqual(filecmp.cmp(self.canonpot, self.testpot), 1)
def testgensql(self):
"""
Create a SQL file from a translated PO file.
"""
devnull = open('/dev/null', 'w')
subprocess.Popen(
('python', self.script, '--sql', self.canonpo,
'--locale', 'zz-ZZ', '--output', self.testsql),
0, None, None, devnull, devnull).wait()
self.assertEqual(filecmp.cmp(self.canonsql, self.testsql), 1)
if __name__ == '__main__':
unittest.main()
|
jeremiedecock/snippets | python/pyqt/pyqt4/hello_class_for_main_widget.py | Python | mit | 2,089 | 0.004312 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaini | ng a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, | sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: http://web.archive.org/web/20120408161958/http://zetcode.com/tutorials/pyqt4/firstprograms/
import sys
from PyQt4 import QtGui
class Window(QtGui.QWidget):
def __init__(self):
super(Window, self).__init__()
self.resize(250, 150)
self.setWindowTitle('Hello')
self.show()
def main():
"""Main function"""
app = QtGui.QApplication(sys.argv)
# The default constructor has no parent.
# A widget with no parent is a window.
window = Window()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
molpopgen/fwdpy11 | fwdpy11/demographic_models/IM.py | Python | gpl-3.0 | 6,028 | 0.000829 | """
This module provides functions to generate demographic events for
"isolation-with-migration", or IM, models.
"""
import attr
import numpy as np
from fwdpy11.class_decorators import (attr_add_asblack, attr_class_pickle,
attr_class_to_from_dict)
@attr_add_asblack
# @attr_class_pickle
@attr_class_to_from_dict
@attr.s(
frozen=True, auto_attribs=True, repr_ns="fwdpy11.demographic_models.IM", eq=False
)
class TwoDemeIMParameters(object):
"""
Holds the parameters to :func:`fwdpy11.demographic_models.IM.two_deme_IM`.
Instances of this class are held as the ``parameters`` attribute of
:class:`fwdpy11.demographic_models.DemographicModelDetails`.
Attribute names are the same as the ``kwargs`` to
:func:`fwdpy11.demographic_models.IM.two_deme_IM`:
:param Nanc:
:param T:
:param psplit:
:param Ns:
:param migrates:
:param burnin:
.. versionadded:: 0.8.0
"""
Nanc: int
T: float
psplit: float
Ns: tuple
migrates: list
burnin: float
def __eq__(self, other):
return all(
[self.Nanc, self.T, self.psplit, self.Ns, self.burnin]
== [other.Nanc, other.T, other.psplit, other.Ns, other.burnin]
) is True and np.array_equal(self.migrates, other.migrates)
@attr_add_asblack
@attr_class_pickle
@attr_class_to_from_dict
@attr.s(frozen=True, auto_attribs=True, repr_ns="fwdpy11.demographic_models.IM")
class TwoDemeIMMetaData(object):
"""
Holds metadata returned by :func:`fwdpy11.demographic_models.IM.two_deme_IM`.
Instances of this class are held as the ``metadata`` attribute of
:class:`fwdpy11.demographic_models.DemographicModelDetails`.
:param split_time: The time until the two demes split
:param gens_post_split: The time from the split until the end of the simulation
:param simlen: ``split_time`` + ``gens_post_split``
.. versionadded:: 0.8.0
"""
split_time: int
gens_post_split: int
simlen: int
def two_deme_IM(Nanc, T, psplit, Ns, migrates, burnin=10.0):
"""
Isolation-with-migration (IM) model for two demes.
An ancestral population splits into two daughter demes.
At the time of the split, ``psplit`` of the ancestral
population moves into deme 1. The two daughter populations
begin exponential growth until the present time and migration
may occur between them.
:param Nanc: The ancestral population size.
:type Nanc: int
:param T: The time of the split, in units of Nanc generations
into the past.
:type T: float
:param psplit: The proportion of the ancestral population that splits
off to found deme 1
:type psplit: float
:param Ns: The final sizes of demes 0 and 1, relative to Nanc
:type Ns: tuple
:param migrates: The migration rates from 0 to 1 and from 1 to 0,
respectively. Migration rates are the fraction
of the destination deme replaced by the source
deme.
:type migrates: float
:param burnin: Time to simulate before the split, in units of Nanc
:type burnin: float
:returns: The model events, instances of
:class:`fwdpy11.demographic_models.IM.TwoDemeIMParameters`
and :class:`fwdpy11.demographic_models.IM.TwoDemeIMMetaData`.
:rtype: fwdpy11.demographic_models.DemographicModelDetails
.. note::
The events returned by this model assume/require that you will
construct a population with intitial size ``Nanc``.
.. versionadded:: 0.6.0
.. versionchanged:: 0.8.0
Returns instance of :class:`fwdpy11.demographic_models.DemographicModelDetails`
"""
import fwdpy11
import numpy as np
from .demographic_model_details import DemographicModelDetails
N0, N1 = Ns
m01, m10 = migrates
split_time = np.rint(Nanc * burnin).astype(int)
# The split event
split = [
fwdpy11.move_individuals(
when=split_time, source=0, destination=1, fraction=psplit
)
]
# Get growth rates and set growth rate changes,
# taking care to handle our rounding!
gens_post_split = np.rint(Nanc * T).astype(int)
N0split = np.rint(Nanc * (1.0 - psplit))
if N0split == 0 or N0split == Nanc:
raise ValueError("invalid value for psplit: {}".format(psplit))
N0final = np.rint(N0 * Nanc)
N1split = np.rint(Nanc * psplit)
if N1split == 0 or N1split == Nanc:
raise ValueError("invalid value for psplit: {}".format(psplit))
N1final = np.rint(N1 * Nanc)
G0 = fwdpy11.exponential_growth_rate(N0split, N0final, gens_post_split)
G1 = fwdpy11.exponential_growth_rate(N1split, N1final, gens_post_split)
growth = [
fwdpy11.SetExponentialGrowth(split_time, 0, G0),
fwdpy11.SetExponentialGrowth(split_time, 1, G1),
]
# Set up the migration matrix for two demes, but only
# deme | zero exists.
m = fwdpy11.migration_matrix_single_extant_deme(2, 0)
# The rows of the matrix change at the split:
cm = [
fwdpy11.SetMigrationRates(split_time, 0, [1.0 - m10, m10]),
fwdpy11.SetMigrationRates(split_time, 1, [m01, 1.0 - m01]),
]
mdict = {
"mass_migrations": split, |
"set_growth_rates": growth,
"set_migration_rates": cm,
"migmatrix": m,
}
return DemographicModelDetails(
model=fwdpy11.DiscreteDemography(**mdict),
name="Two deme isolation-with-migration (IM) model",
source={"function": "fwdpy11.demographic_models.IM.two_deme_IM"},
parameters=TwoDemeIMParameters(
Nanc=Nanc,
T=T,
psplit=psplit,
Ns=Ns,
migrates=migrates,
burnin=burnin,
),
citation=None,
metadata=TwoDemeIMMetaData(
split_time=split_time,
gens_post_split=gens_post_split,
simlen=split_time + gens_post_split,
),
)
|
DavideCanton/Python3 | euler/ex7.py | Python | gpl-3.0 | 182 | 0.005495 | __author__ = 'davide'
import itertools as it
from euler._utils import genera_primi
if __name__ | == "__main__":
n = | it.islice(genera_primi(), 10000, 8359265)
print(next(n)) |
arth-co/shoop | shoop_tests/core/test_sales_unit.py | Python | agpl-3.0 | 775 | 0 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from decimal import Decimal
from shoop.core.models.units import SalesUnit
def test_sales_unit_decimals():
assert SalesUnit(decimals=0).quantity_step == 1
assert not SalesUnit(decimals=0).allow_fractions
assert SalesUnit(decimals=1) | .quantity_step == Decimal("0.1")
assert SalesUn | it(decimals=1).allow_fractions
assert SalesUnit(decimals=10).quantity_step == Decimal("0.0000000001")
assert SalesUnit(decimals=2).round("1.509") == Decimal("1.51")
assert SalesUnit(decimals=0).round("1.5") == Decimal("2")
|
yfauser/ansible-modules-extras | notification/flowdock.py | Python | gpl-3.0 | 6,028 | 0.003981 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013 Matt Coddington <coddington@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: flowdock
version_added: "1.2"
author: "Matt Coddington (@mcodd)"
short_description: Send a message to a flowdock
description:
- Send a message to a flowdock team inbox or chat using the push API (see https://www.flowdock.com/api/team-inbox and https://www.flowdock.com/api/chat)
options:
token:
description:
- API token.
required: true
type:
description:
- Whether to post to 'inbox' or 'chat'
required: true
choices: [ "inbox", "chat" ]
msg:
description:
- Content of the message
required: true
tags:
description:
- tags of the message, separated by commas
required: false
external_user_name:
description:
- (chat only - required) Name of the "user" sending the message
required: false
from_address:
description:
- (inbox only - required) Email address of the message sender
required: false
source:
description:
- (inbox only - required) Human readable identifier of the application that uses the Flowdock API
required: false
subject:
description:
- (inbox only - required) Subject line of the message
required: false
from_name:
description:
- (inbox only) Name of the message sender
required: false
reply_to:
description:
- (inbox only) Email address for replies
required: false
project:
description:
- (inbox only) Human readable identifier for more detaile | d message categorization
required: false
link:
description:
- (inbox only) Link associated with the message. This will be used to link the message subject in Team Inbox | .
required: false
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
requirements: [ ]
'''
EXAMPLES = '''
- flowdock: type=inbox
token=AAAAAA
from_address=user@example.com
source='my cool app'
msg='test from ansible'
subject='test subject'
- flowdock: type=chat
token=AAAAAA
external_user_name=testuser
msg='test from ansible'
tags=tag1,tag2,tag3
'''
import urllib
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(required=True),
msg=dict(required=True),
type=dict(required=True, choices=["inbox","chat"]),
external_user_name=dict(required=False),
from_address=dict(required=False),
source=dict(required=False),
subject=dict(required=False),
from_name=dict(required=False),
reply_to=dict(required=False),
project=dict(required=False),
tags=dict(required=False),
link=dict(required=False),
validate_certs = dict(default='yes', type='bool'),
),
supports_check_mode=True
)
type = module.params["type"]
token = module.params["token"]
if type == 'inbox':
url = "https://api.flowdock.com/v1/messages/team_inbox/%s" % (token)
else:
url = "https://api.flowdock.com/v1/messages/chat/%s" % (token)
params = {}
# required params
params['content'] = module.params["msg"]
# required params for the 'chat' type
if module.params['external_user_name']:
if type == 'inbox':
module.fail_json(msg="external_user_name is not valid for the 'inbox' type")
else:
params['external_user_name'] = module.params["external_user_name"]
elif type == 'chat':
module.fail_json(msg="%s is required for the 'inbox' type" % item)
# required params for the 'inbox' type
for item in [ 'from_address', 'source', 'subject' ]:
if module.params[item]:
if type == 'chat':
module.fail_json(msg="%s is not valid for the 'chat' type" % item)
else:
params[item] = module.params[item]
elif type == 'inbox':
module.fail_json(msg="%s is required for the 'inbox' type" % item)
# optional params
if module.params["tags"]:
params['tags'] = module.params["tags"]
# optional params for the 'inbox' type
for item in [ 'from_name', 'reply_to', 'project', 'link' ]:
if module.params[item]:
if type == 'chat':
module.fail_json(msg="%s is not valid for the 'chat' type" % item)
else:
params[item] = module.params[item]
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=False)
# Send the data to Flowdock
data = urllib.urlencode(params)
response, info = fetch_url(module, url, data=data)
if info['status'] != 200:
module.fail_json(msg="unable to send msg: %s" % info['msg'])
module.exit_json(changed=True, msg=module.params["msg"])
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
|
cortedeltimo/SickRage | sickbeard/notifiers/slack.py | Python | gpl-3.0 | 3,220 | 0.002174 | # coding=utf-8
# Author: Patrick Begley<forge33@gmail.com>
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import json
import requests
import six
import sickbeard
from sickbeard import common, logger
from sickrage.helper.exceptions import ex
class Notifier(object):
SLACK_WEBHOOK_URL = 'https://hooks.slack.com/services/'
def notify_snatch(self, ep_name):
if sickbeard.SLACK_NOTIFY_SNATCH:
self._notify_slack(common.notifyStrings[common | .NOTIFY_SNATCH] + ': ' + ep_name)
def notify_download(self, ep_name):
| if sickbeard.SLACK_NOTIFY_DOWNLOAD:
self._notify_slack(common.notifyStrings[common.NOTIFY_DOWNLOAD] + ': ' + ep_name)
def notify_subtitle_download(self, ep_name, lang):
if sickbeard.SLACK_NOTIFY_SUBTITLEDOWNLOAD:
self._notify_slack(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD] + ' ' + ep_name + ": " + lang)
def notify_git_update(self, new_version="??"):
if sickbeard.USE_SLACK:
update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT]
title = common.notifyStrings[common.NOTIFY_GIT_UPDATE]
self._notify_slack(title + " - " + update_text + new_version)
def notify_login(self, ipaddress=""):
if sickbeard.USE_SLACK:
update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT]
title = common.notifyStrings[common.NOTIFY_LOGIN]
self._notify_slack(title + " - " + update_text.format(ipaddress))
def test_notify(self):
return self._notify_slack("This is a test notification from SickRage", force=True)
def _send_slack(self, message=None):
slack_webhook = self.SLACK_WEBHOOK_URL + sickbeard.SLACK_WEBHOOK.replace(self.SLACK_WEBHOOK_URL, '')
logger.log("Sending slack message: " + message, logger.INFO)
logger.log("Sending slack message to url: " + slack_webhook, logger.INFO)
if isinstance(message, six.text_type):
message = message.encode('utf-8')
headers = {b"Content-Type": b"application/json"}
try:
r = requests.post(slack_webhook, data=json.dumps(dict(text=message, username="SickRageBot")), headers=headers)
r.raise_for_status()
except Exception as e:
logger.log("Error Sending Slack message: " + ex(e), logger.ERROR)
return False
return True
def _notify_slack(self, message='', force=False):
if not sickbeard.USE_SLACK and not force:
return False
return self._send_slack(message)
|
Kryz/sentry | tests/sentry/models/tests.py | Python | bsd-3-clause | 5,760 | 0.002604 | # coding: utf-8
from __future__ import absolute_import
import pytest
from datetime import timedelta
from django.core import mail
from django.core.urlresolvers import reverse
from django.db import connection
from django.utils import timezone
from exam import fixture
from sentry.db.models.fields.node import NodeData, NodeIntegrityFailure
from sentry.models import ProjectKey, Event, LostPasswordHash
from sentry.testutils import TestCase
from sentry.utils.compat import pickle
from sentry.utils.strings import compress
class ProjectKeyTest(TestCase):
def test_get_dsn(self):
key = ProjectKey(project_id=1, public_key='public', secret_key='secret')
with self.settings(SENTRY_URL_PREFIX='http://example.com'):
self.assertEquals(key.get_dsn(), 'http://public:secret@example.com/1')
def test_get_dsn_with_ssl(self):
key = ProjectKey(project_id=1, public_key='public', secret_key='secret')
with self.settings(SENTRY_URL_PREFIX='https://example.com'):
self.assertEquals(key.get_dsn(), 'https://public:secret@example.com/1')
def test_get_dsn_with_port(self):
key = ProjectKey(project_id=1, public_key='public', secret_key='secret')
with self.settings(SENTRY_URL_PREFIX='http://example.com:81'):
self.assertEquals(key.get_dsn(), 'http://public:secret@example.com:81/1')
def test_get_dsn_with_public_endpoint_setting(self):
key = ProjectKey(project_id=1, public_key='public', secret_key='secret')
with self.settings(SENTRY_PUBLIC_ENDPOINT='http://public_endpoint.com'):
self.assertEquals(key.get_dsn(public=True), 'http://public@public_endpoint.com/1')
def test_get_dsn_with_endpoint_setting(self):
key = ProjectKey(project_id=1, public_key='public', secret_key='secret')
with self.settings(SENTRY_ENDPOINT='http://endpoint.com'):
self.assertEquals(key.get_dsn(), 'http://public:secret@endpoint.com/1')
def test_key_is_created_for_project(self):
user = self.create_user('admin@example.com')
team = self.create_team(name='Test')
project = self.create_project(name | ='Test', team=team)
assert project.key_set.exists() is True
class LostPasswordTest(TestCase):
@fixture
def password_hash(self):
return LostPasswordHash.objects.create(
user=self.user,
)
def test_send_recover_mail(self):
with self.settings(SENTRY_URL_PREFIX='http://testserver'), self.tasks():
self.password_hash.send_recover_mail()
assert len(mail.outbox) == 1
msg = mail. | outbox[0]
assert msg.to == [self.user.email]
assert msg.subject == '[Sentry] Password Recovery'
url = 'http://testserver' + reverse('sentry-account-recover-confirm',
args=[self.password_hash.user_id, self.password_hash.hash])
assert url in msg.body
class GroupIsOverResolveAgeTest(TestCase):
def test_simple(self):
group = self.group
group.last_seen = timezone.now() - timedelta(hours=2)
group.project.update_option('sentry:resolve_age', 1) # 1 hour
assert group.is_over_resolve_age() is True
group.last_seen = timezone.now()
assert group.is_over_resolve_age() is False
class EventNodeStoreTest(TestCase):
def test_does_transition_data_to_node(self):
group = self.group
data = {'key': 'value'}
query_bits = [
"INSERT INTO sentry_message (group_id, project_id, data, message, datetime)",
"VALUES(%s, %s, %s, %s, %s)",
]
params = [group.id, group.project_id, compress(pickle.dumps(data)), 'test', timezone.now()]
# This is pulled from SQLInsertCompiler
if connection.features.can_return_id_from_insert:
r_fmt, r_params = connection.ops.return_insert_id()
if r_fmt:
query_bits.append(r_fmt % Event._meta.pk.column)
params += r_params
cursor = connection.cursor()
cursor.execute(' '.join(query_bits), params)
if connection.features.can_return_id_from_insert:
event_id = connection.ops.fetch_returned_insert_id(cursor)
else:
event_id = connection.ops.last_insert_id(
cursor, Event._meta.db_table, Event._meta.pk.column)
event = Event.objects.get(id=event_id)
assert type(event.data) == NodeData
assert event.data == data
assert event.data.id is None
event.save()
assert event.data == data
assert event.data.id is not None
node_id = event.data.id
event = Event.objects.get(id=event_id)
Event.objects.bind_nodes([event], 'data')
assert event.data == data
assert event.data.id == node_id
def test_screams_bloody_murder_when_ref_fails(self):
group1 = self.create_group()
invalid_event = self.create_event(group=group1)
group2 = self.create_group()
event = self.create_event(group=group2)
event.data.bind_ref(invalid_event)
event.save()
assert event.data.get_ref(event) == event.group.id
assert event.data.get_ref(invalid_event) == invalid_event.group.id
with pytest.raises(NodeIntegrityFailure):
Event.objects.bind_nodes([event], 'data')
def test_accepts_valid_ref(self):
invalid_event = self.create_event()
event = self.create_event()
event.data.bind_ref(event)
event.save()
Event.objects.bind_nodes([event], 'data')
assert event.data.ref == event.group.id
def test_basic_ref_binding(self):
event = self.create_event()
assert event.data.get_ref(event) == event.group.id
|
JFeaux/pedro | src/cards.py | Python | mit | 6,808 | 0.000147 | #!/usr/bin/python
import sys
from random import shuffle, seed
from itertools import product
class Card:
FACES = {11: 'Jack', 12: 'Queen', 13: 'King', 14: 'Ace'}
SUITS = {'Hearts': 1, 'Diamonds': 2, 'Spades': 3, 'Clubs': 4}
COLORS = {'Hearts': 0, 'Diamonds': 0, 'Spades': 1, 'Clubs': 1}
def __init__(self, rank, suit):
self.suit = suit
self.rank = rank
self.suit_rank = self.SUITS[suit]
self.sort_rank = self.rank
if rank == 5:
if self.suit_rank == 1 or self.suit_rank == 3:
self.sort_rank = 100
else:
self.sort_rank = 0
def __str__(self):
value = self.FACES.get(self.rank, self.rank)
return "{0} of {1}".format(value, self.suit)
def __repr__(self):
return str(self)
def check_trump(self, trump_suit):
if self.rank != 5:
if self.suit == trump_suit:
return True
else:
return False
else:
if self.COLORS[self.suit] == self.COLORS[trump_suit]:
return True
else:
return False
class Deck:
def __init__(self):
ranks = range(2, 15)
suits = 'Spades Diamonds Clubs Hearts'.split()
self.cards = [Card(r, s) for s, r in product(suits, ranks)]
def __str__(self):
s = ''
for i in range(len(self.cards)):
s = s + ' ' * i + str(self.cards[i]) + '\n'
return s
def __repr__(self):
pass
def shuffle(self):
shuffle(self.cards)
def deal(self, hand, num_cards=1):
for i in range(num_cards):
hand.add(self.cards.pop())
class Hand:
def __init__(self):
self.cards = []
def clear_hand(self):
self.cards = []
def discard(self, trump_suit):
self.cards = [x for x in self.cards if x.check_trump(trump_suit)]
def sort_hand(self):
self.cards = sorted(
self.cards, key=lambda x: (
x.suit_rank, x.sort_rank))
def play(self, card):
return self.cards.pop(card - 1)
def add(self, card):
self.cards.append(card)
def __str__(self):
s = ''
for i in range(len(self.cards)):
s = s + ' ' + str(i + 1) + ':' + ' ' * (i + 1) + \
str(self.cards[i]) + '\n'
return s
def __repr__(self):
return str(self)
class Pedro_game:
def __init__(self, players):
self.players = players
self.trump_suit = None
def deal_round(self, first_bidder):
self.deck = Deck()
self.deck.shuffle()
order = [i for i in range(first_bidder, 4)] + \
[i for i in range(first_bidder)]
for player in self.players:
player.clear_hand()
for i in range(3):
for j in order:
self.deck.deal(self.players[j], 3)
for i in order:
self.players[i].sort_hand()
def bidding(self, first_bidder):
current_ | bid = 5
winning_bidder = -1
order = [i for i in range(first_bidder, 4)] + \
[i for i in range(first_bidder)]
for i, j in enumerate(order):
print self.players[j]
if current_bid < 14:
bid = int(raw_input('Bid?\n'))
| if bid > current_bid:
current_bid = bid
winning_bidder = j
else:
bid = int(raw_input('Bid?\n'))
if bid == 14 and i == 3:
current_bid = bid
winning_bidder = j
print current_bid
print winning_bidder
self.winning_bidder = winning_bidder
print self.players[winning_bidder]
self.trump_suit = raw_input('Trump suit?\n')
def second_deal(self, first_bidder):
order = [i for i in range(first_bidder, 4)] + \
[i for i in range(first_bidder)]
for i, j in enumerate(order):
self.players[j].discard(self.trump_suit)
take = 6 - len(self.players[j].cards)
if take > 0:
self.deck.deal(self.players[j], take)
self.players[j].sort_hand()
def play_trick(self, lead):
trick = Trick(self.trump_suit)
order = [i for i in range(lead, 4)] + [i for i in range(lead)]
for i, j in enumerate(order):
print self.players[j]
card_number = int(raw_input('Play Card?\n'))
card = self.players[j].play(card_number)
trick.add(card)
print trick
class Trick:
def __init__(self, trump_suit, lead_card):
self.cards = [lead_card]
self.trump_suit = trump_suit
def add(self, card):
self.cards.append(card)
def __str__(self):
s = ''
for i in range(len(self.cards)):
s = s + ' ' + str(i + 1) + ':' + ' ' * (i + 1) + \
str(self.cards[i]) + '\n'
return s
def __repr__(self):
return str(self)
class Pedro_Player(object):
def __init__(self, name):
self.name = name
self.hand = Hand()
def bid(self, min_bid):
if min_bid > 14:
return False
else:
if min_bid > 5:
ask = 'Current Bid: ' + min_bid - 1 + '\n'
else:
ask = 'Minimum Bid: 6\n'
ask += ' ' + self.name + ': Bid?\n'
invalid_bid = True
while invalid_bid:
try:
bid = int(raw_input(ask))
if bid > min_bid and bid < 14:
return bid
else:
msg = 'Must be greater than ' + str(min_bid)
msg += ' and less than 14'
print msg
except ValueError:
print 'Please insert integer'
def discard(self, trump):
pass
def play_card(self, card):
pass
class Dealer(Pedro_Player):
def __init__(self, pedro_player):
"""
Dealer classes intialized
by a Pedro_Player instance
"""
self.name = pedro_player.name
self.hand = pedro_player.hand
def bid(self, current_bid):
return bid
jacob = Pedro_Player('Jacob')
jacob.bid(5)
sys.exit()
# to do dealer rotation
players = ['a', 'b', 'c', 'd']
print players
dealer = players.pop()
players.insert(0, dealer)
print players
seed(1)
# initialize the players
# Jacob=Hand('Jacob')
# Brigette=Hand('Brigette')
# David=Hand('David')
# Richard=Hand('Richard')
#
# players=[Jacob,Brigette,David,Richard]
# game=Pedro_game(players)
# game.deal_round(0)
# game.bidding(0)
# game.second_deal(0)
# game.play_trick(game.winning_bidder)
#
|
apoelstra/coinjoin | generate-tx.py | Python | cc0-1.0 | 3,413 | 0.028714 |
# Note - to use this script you need Jeff Garzik's pytho | n-bitcoinrpc
# https://github.com/jgarzik/python-bitcoinrpc
import os
import sys;
import json;
from bitcoinrpc.authproxy import AuthServiceProxy;
# SET THESE VALUES
rpc_user = "bitcoinrpc";
rpc_pass = "A7Xr149i7F6GxkhDbxWDTbmXooz1UZGhhyUYvaajA13Z";
rpc_host = "localhost";
rpc_port = 8332;
donation_minimum = 0;
donation_per_input = 3000;
donation_address = "1ForFeesAndDona | tionsSpendHerdtWbWy";
# http://stackoverflow.com/questions/626796/how-do-i-find-the-windows-common-application-data-folder-using-python
try:
from win32com.shell import shellcon, shell
config_file = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, 0, 0) + "/Bitcoin/bitcoin.conf"
except ImportError: # quick semi-nasty fallback for non-windows/win32com case
config_file = os.path.expanduser("~") + "/.bitcoin/bitcoin.conf"
# thanks ryan-c for this function
def asp_from_config(filename):
rpcport = '8332'
rpcconn = '127.0.0.1'
rpcuser = None
rpcpass = None
with open(filename, 'r') as f:
for line in f:
try:
(key, val) = line.rstrip().replace(' ', '').split('=')
except:
(key, val) = ("", "");
if key == 'rpcuser':
rpcuser = val
elif key == 'rpcpassword':
rpcpass = val
elif key == 'rpcport':
rpcport = val
elif key == 'rpcconnect':
rpcconn = val
f.close()
if rpcuser is not None and rpcpass is not None:
rpcurl = 'http://%s:%s@%s:%s' % (rpcuser, rpcpass, rpcconn, rpcport)
print('RPC server: %s' % rpcurl)
return AuthServiceProxy(rpcurl)
def to_satoshi(s):
return int (100000000 * float (s));
def from_satoshi(s):
return float (s) / 100000000;
if len(sys.argv) < 3:
print ("Usage: %s <input size> <target output size in BTC>" % sys.argv[0]);
exit (0);
#service = AuthServiceProxy ("http://%s:%s@%s:%d" % (rpc_user, rpc_pass, rpc_host, rpc_port));
service = asp_from_config (config_file);
balance = to_satoshi (service.getbalance());
unspent = service.listunspent();
target_in = to_satoshi (sys.argv[1]);
target_out = to_satoshi (sys.argv[2]);
if balance < target_in:
print ("Cannot spend %f; only have %f in wallet." % (from_satoshi (target_in), from_satoshi (balance)));
exit (0);
if target_out > target_in:
print ("Please have a smaller target output than input value.");
exit (0);
# FIND INPUTS
# TODO: have a smarter coin selection algo
# For now we just sort the coins by increasing abs(value - target output), then select in order
inputs = [];
donation = 0;
total_in = 0;
unspent.sort (key=lambda coin: abs(to_satoshi (coin['amount']) - target_in));
for coin in unspent:
total_in += to_satoshi (coin['amount']);
donation += donation_per_input;
inputs.append (dict (txid = coin['txid'], vout = coin['vout']));
if total_in > target_in:
break;
if donation < donation_minimum:
donation = donation_minimum;
# FIND OUTPUTS
outputs = dict ();
outputs[donation_address] = from_satoshi (donation);
total_in -= donation;
while total_in > target_out:
outputs[service.getnewaddress()] = from_satoshi (target_out);
total_in -= target_out;
outputs[service.getnewaddress()] = from_satoshi (total_in);
# Make the transaction
print service.createrawtransaction (inputs, outputs);
|
cortext/crawtextV2 | ~/venvs/crawler/lib/python2.7/site-packages/nltk/stem/wordnet.py | Python | mit | 1,263 | 0.000792 | # Natural Language Toolkit: WordNet stemmer interface
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Steven Bird <sb@csse.unimelb.edu.au>
# Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
from nltk.corpus.reader.wordnet import NOUN
from nltk.corpus import wordnet
class WordNetLemmatizer(object):
"""
WordNet Lemmatizer
Lemmatize using WordNet's built-in morphy function.
Returns the input word unchanged if it cannot be found in WordNet.
>>> from nltk.stem import WordNetLemmatizer
>>> wnl = WordNetLemmatizer()
>>> wnl.lemmatize('dogs')
'dog'
>>> wnl.lemmatize('churches')
'church'
>>> wnl.lemmatize('aardwolves')
'aardwolf'
>>> wnl.lemmatize('abaci')
'abacus'
>>> wnl.lemmatize('hardrock')
'hardrock'
"""
def __init__(self):
pass
def lemmatize(self, word, pos=NOUN):
lemmas = wordnet._morphy | (word, pos)
return min(lemmas, key=len) if lemmas else word
def __repr__(self):
return '<Wo | rdNetLemmatizer>'
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
|
oskopek/CryptoIM | tests/unit/common.py | Python | apache-2.0 | 1,286 | 0.005443 | #!/usr/bin/env python
# encoding: utf-8
"""
Copyright 2014 CryptoIM Development Team
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CO | NDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import random, string
def random_string_range(lo, hi):
"""
Returns a random string of string.printable characters, of length randint(lo, hi)
"""
length = random.randint(lo, hi)
return ''.join(random.choice(string.printable) for _ in range(length))
def random_string_length(length):
"""
Returns a | random string of string.printable characters, of the given length
"""
return random_string_range(length, length)
def random_string_limit(limit):
"""
Returns a random string of string.printable characters, of length randint(1, limit)
"""
return random_string_range(1, limit)
|
pauldamian/-casa | acasa/things/dim.py | Python | apache-2.0 | 1,015 | 0.008867 | import RPi.GPIO as gp
from time import sleep
from utili | ty import util
import constants
"""
Pin Mapping:
Pin 4 - VCC 5V
Pin 6 - GND
Pin 7 - SYNC - PWM
Pin 11 - GATE - Digital
"""
gm = util.get_sensor_attribute_value(constants.DIMMER, constants.SENSOR_PIN)
dimming = 100
AC_LOAD = g | m['gate']
SYNC = gm['sync']
gp.setwarnings(False)
def _zero_cross_int(arg):
global dimming
idle_time = dimming * 0.0001
if idle_time == 0:
gp.output(AC_LOAD, True)
return
elif idle_time == 0.01:
gp.output(AC_LOAD, False)
return
else:
gp.output(AC_LOAD, False)
sleep(idle_time)
gp.output(AC_LOAD, True)
sleep(0.00001)
gp.output(AC_LOAD, False)
def set_dim_level(percent):
global dimming
dimming = 100 - percent
FREQ = 50 # Hz
gp.setmode(gp.BOARD)
gp.setup(AC_LOAD, gp.OUT)
gp.setup(SYNC, gp.IN, pull_up_down=gp.PUD_UP)
gp.add_event_detect(SYNC, gp.RISING, callback=_zero_cross_int)
|
caoxudong/code_practice | leetcode/59_spiral_matrix_ii.py | Python | mit | 1,960 | 0.005102 | #!/usr/bin/env python
#coding: utf-8
"""
https://leetcode.com/problems/spiral-matrix-ii | /
Given an integer n, generate a square matrix filled with elements from 1 to n2 in spiral order.
For exam | ple,
Given n = 3,
You should return the following matrix:
[
[ 1, 2, 3 ],
[ 8, 9, 4 ],
[ 7, 6, 5 ]
]
"""
class Solution(object):
def generateMatrix(self, n):
"""
:type n: int
:rtype: List[List[int]]
"""
result = [([0] * n) for i in range(n)]
DIRECTION_RIGHT = "RIGHT"
DIRECTION_DOWN = "DOWN"
DIRECTION_LEFT = "LEFT"
DIRECTION_UP = "UP"
direction = DIRECTION_RIGHT
x, y, steps_count = 0, 0, 1
max_steps_count = n * n
while steps_count <= max_steps_count:
result[x][y] = steps_count
cache_x, cache_y = x, y
if direction == DIRECTION_RIGHT:
if y == (n - 1) or result[x][y + 1] != 0:
direction = DIRECTION_DOWN
x += 1
else:
y += 1
elif direction == DIRECTION_DOWN:
if x == (n - 1) or result[x + 1][y] != 0:
direction = DIRECTION_LEFT
y -= 1
else:
x += 1
elif direction == DIRECTION_LEFT:
if y == 0 or result[x][y - 1] != 0:
direction = DIRECTION_UP
x -= 1
else:
y -= 1
elif direction == DIRECTION_UP:
if x == 0 or result[x - 1][y] != 0:
direction = DIRECTION_RIGHT
y += 1
else:
x -= 1
steps_count += 1
return result
solution = Solution()
print(solution.generateMatrix(4))
|
show0k/pypot | pypot/robot/config.py | Python | gpl-3.0 | 12,375 | 0.001697 | """
The config module allows the definition of the structure of your robot.
Configuration are written as Python dictionary so you can define/modify them programmatically. You can also import them form file such as JSON formatted file. In the configuration you have to define:
* contr | ollers: For each defined controller, you can specify the port name, | the attached motors and the synchronization mode.
* motors: You specify all motors belonging to your robot. You have to define their id, type, orientation, offset and angle_limit.
* motorgroups: It allows to define alias of group of motors. They can be nested.
"""
import logging
import numpy
import time
import json
from collections import OrderedDict
import pypot.sensor
import pypot.dynamixel
import pypot.dynamixel.io
import pypot.dynamixel.error
import pypot.dynamixel.motor
import pypot.dynamixel.syncloop
from .robot import Robot
from .controller import DummyController
# This logger should always provides the config as extra
logger = logging.getLogger(__name__)
def from_config(config, strict=True, sync=True, use_dummy_io=False, **extra):
""" Returns a :class:`~pypot.robot.robot.Robot` instance created from a configuration dictionnary.
:param dict config: robot configuration dictionary
:param bool strict: make sure that all ports, motors are availaible.
:param bool sync: choose if automatically starts the synchronization loops
For details on how to write such a configuration dictionnary, you should refer to the section :ref:`config_file`.
"""
logger.info('Loading config... ', extra={'config': config})
alias = config['motorgroups']
# Instatiate the different motor controllers
controllers = []
for c_name, c_params in config['controllers'].items():
motor_names = sum([_motor_extractor(alias, name)
for name in c_params['attached_motors']], [])
attached_motors = [motor_from_confignode(config, name)
for name in motor_names]
# at least one of the motor is set as broken
if [m for m in attached_motors if m._broken]:
strict = False
attached_ids = [m.id for m in attached_motors]
if not use_dummy_io:
dxl_io = dxl_io_from_confignode(config, c_params, attached_ids, strict)
check_motor_eprom_configuration(config, dxl_io, motor_names)
logger.info('Instantiating controller on %s with motors %s',
dxl_io.port, motor_names,
extra={'config': config})
syncloop = (c_params['syncloop'] if 'syncloop' in c_params
else 'BaseDxlController')
SyncLoopCls = getattr(pypot.dynamixel.syncloop, syncloop)
c = SyncLoopCls(dxl_io, attached_motors)
controllers.append(c)
else:
controllers.append(DummyController(attached_motors))
try:
robot = Robot(motor_controllers=controllers, sync=sync)
except RuntimeError:
for c in controllers:
c.io.close()
raise
make_alias(config, robot)
# Create all sensors and attached them
try:
if 'sensors' in config and not use_dummy_io:
sensors = []
for s_name in config['sensors'].keys():
if s_name in extra and extra[s_name] == 'dummy':
config['sensors'][s_name]['type'] = 'Dummy{}'.format(s_name.capitalize())
sensor = sensor_from_confignode(config, s_name, robot)
setattr(robot, s_name, sensor)
sensors.append(sensor)
robot.sensors.append(sensor)
[s.start() for s in sensors if hasattr(s, 'start')]
# If anything goes wrong when adding sensors
# We have to make sure we close the robot properly
# Otherwise trying to open it again will fail.
except:
robot.close()
raise
logger.info('Loading complete!',
extra={'config': config})
return robot
def motor_from_confignode(config, motor_name):
params = config['motors'][motor_name]
type = params['type']
if type == 'XL-320':
MotorCls = pypot.dynamixel.motor.DxlXL320Motor
elif type == 'MX-64' or type == 'MX-106':
MotorCls = pypot.dynamixel.motor.DxlMX64106Motor
elif type.startswith('MX'):
MotorCls = pypot.dynamixel.motor.DxlMXMotor
elif type.startswith('AX') or type.startswith('RX'):
MotorCls = pypot.dynamixel.motor.DxlAXRXMotor
elif type.startswith('SR'):
MotorCls = pypot.dynamixel.motor.DxlSRMotor
broken = 'broken' in params and params['broken']
if 'wheel_mode' in params and params['wheel_mode']:
params['angle_limit'] = (0, 0)
m = MotorCls(id=params['id'],
name=motor_name,
model=type,
direct=True if params['orientation'] == 'direct' else False,
offset=params['offset'],
broken=broken,
angle_limit=params['angle_limit'])
logger.info("Instantiating motor '%s' id=%d direct=%s offset=%s",
m.name, m.id, m.direct, m.offset,
extra={'config': config})
return m
def sensor_from_confignode(config, s_name, robot):
args = config['sensors'][s_name]
cls_name = args.pop("type")
if 'need_robot' in args and args.pop('need_robot'):
args['robot'] = robot
SensorCls = getattr(pypot.sensor, cls_name)
return SensorCls(name=s_name, **args)
def dxl_io_from_confignode(config, c_params, ids, strict):
port = c_params['port']
if port == 'auto':
port = pypot.dynamixel.find_port(ids, strict)
logger.info('Found port {} for ids {}'.format(port, ids))
sync_read = c_params['sync_read']
if sync_read == 'auto':
# USB Vendor Product ID "VID:PID=0403:6001" for USB2Dynamixel
# USB Vendor Product ID "VID:PID=16d0:06a7" for USBAX
vendor_pid = pypot.dynamixel.get_port_vendor_info(port)
sync_read = ('PID=0403:6001' in vendor_pid and c_params['protocol'] == 2 or
'PID=16d0:06a7' in vendor_pid)
logger.info('sync_read is {}. Vendor pid = {}'.format(sync_read, vendor_pid))
handler = pypot.dynamixel.error.BaseErrorHandler
DxlIOCls = (pypot.dynamixel.io.Dxl320IO
if 'protocol' in c_params and c_params['protocol'] == 2
else pypot.dynamixel.io.DxlIO)
dxl_io = DxlIOCls(port=port,
use_sync_read=sync_read,
error_handler_cls=handler)
try:
found_ids = dxl_io.scan(ids)
except pypot.dynamixel.io.DxlError:
dxl_io.close()
found_ids = []
if ids != found_ids:
missing_ids = tuple(set(ids) - set(found_ids))
msg = 'Could not find the motors {} on bus {}.'.format(missing_ids,
dxl_io.port)
logger.warning(msg)
if strict:
dxl_io.close()
raise pypot.dynamixel.io.DxlError(msg)
return dxl_io
def check_motor_eprom_configuration(config, dxl_io, motor_names):
""" Change the angles limits depanding on the robot configuration ;
Check if the return delay time is set to 0.
"""
changed_angle_limits = {}
changed_return_delay_time = {}
for name in motor_names:
m = config['motors'][name]
id = m['id']
try:
old_limits = dxl_io.get_angle_limit((id, ))[0]
old_return_delay_time = dxl_io.get_return_delay_time((id, ))[0]
except IndexError: # probably a broken motor so we just skip
continue
if old_return_delay_time != 0:
logger.warning("Return delay time of %s changed from %s to 0",
name, old_return_delay_time)
changed_return_delay_time[id] = 0
new_limits = m['angle_limit']
if 'wheel_mode' in m and m['wheel_mode']:
dxl_io.set_wheel_mode([m['id']])
time.sleep(0.5)
else:
dxl_io.set_joint_mode([m['id'] |
encukou/freeipa | ipaserver/install/krainstance.py | Python | gpl-3.0 | 11,676 | 0 | # Authors: Ade Lee <alee@redhat.com>
#
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import logging
import os
import pwd
import shutil
import tempfile
import base64
from ipalib import api
from ipalib import x509
from ipaplatform.paths import paths
from ipapython import directivesetter
from ipapython import ipautil
from ipapython.dn import DN
from ipaserver.install import cainstance
from ipaserver.install import installutils
from ipaserver.install import ldapupdate
from ipaserver.install.dogtaginstance import DogtagInstance
from ipaserver.plugins import ldap2
logger = logging.getLogger(__name__)
# When IPA is installed with DNS support, this CNAME should hold all IPA
# replicas with KRA configured
IPA_KRA_RECORD = "ipa-kra"
ADMIN_GROUPS = [
'Enterprise CA Administrators',
'Enterprise KRA Administrators',
'Security Domain Administrators'
]
KRA_BASEDN = DN(('o', 'kra'), ('o', 'ipaca'))
KRA_AGENT_DN = DN(('uid', 'ipakra'), ('ou', 'people'), KRA_BASEDN)
class KRAInstance(DogtagInstance):
"""
We assume that the CA has already been installed, and we use the
same tomcat instance to host both the CA and KRA.
The mod_nss database will contain the RA agent cert that will be used
to do authenticated requests against dogtag. The RA agent cert will
be the same for both the CA and KRA.
"""
# Mapping of nicknames for tracking requests, and the profile to
# use for that certificate. 'configure_renewal()' reads this
# dict. The profile MUST be specified.
tracking_reqs = {
'auditSigningCert cert-pki-kra': 'caInternalAuthAuditSigningCert',
'transportCert cert-pki-kra': 'caInternalAuthTransportCert',
'storageCert cert-pki-kra': 'caInternalAuthDRMstorageCert',
}
def __init__(self, realm):
super(KRAInstance, self).__init__(
realm=realm,
subsystem="KRA",
service_desc="KRA server",
config=paths.KRA_CS_CFG_PATH,
)
def configure_instance(self, realm_name, host_name, dm_password,
admin_password, pkcs12_info=None, master_host=None,
subject_base=None, ca_subject=None,
| promote=False, pki_config_override=None):
"""Create a KRA instance.
To create a clone, pass in pkcs12_info.
"""
self.fqdn = host_name
self.dm_password = dm_password
self.admin_groups = ADMIN_GROUPS
self.admin_password = admin_password
self.pkcs12_info = pkcs12_info
if self.pkcs12_info is not None or promote:
| self.clone = True
self.master_host = master_host
self.pki_config_override = pki_config_override
self.subject_base = \
subject_base or installutils.default_subject_base(realm_name)
# eagerly convert to DN to ensure validity
self.ca_subject = DN(ca_subject)
self.realm = realm_name
self.suffix = ipautil.realm_to_suffix(realm_name)
# Confirm that a KRA does not already exist
if self.is_installed():
raise RuntimeError(
"KRA already installed.")
# Confirm that a Dogtag 10 CA instance already exists
ca = cainstance.CAInstance(self.realm)
if not ca.is_installed():
raise RuntimeError(
"KRA configuration failed. "
"A Dogtag CA must be installed first")
if promote:
self.step("creating ACIs for admin", self.add_ipaca_aci)
self.step("creating installation admin user", self.setup_admin)
self.step("configuring KRA instance", self.__spawn_instance)
if not self.clone:
self.step("create KRA agent",
self.__create_kra_agent)
if promote:
self.step("destroying installation admin user",
self.teardown_admin)
self.step("enabling ephemeral requests", self.enable_ephemeral)
self.step("restarting KRA", self.restart_instance)
self.step("configure certmonger for renewals",
self.configure_certmonger_renewal_helpers)
self.step("configure certificate renewals", self.configure_renewal)
if not self.clone:
self.step("add vault container", self.__add_vault_container)
self.step("apply LDAP updates", self.__apply_updates)
self.step("enabling KRA instance", self.__enable_instance)
try:
self.start_creation(runtime=120)
finally:
self.clean_pkispawn_files()
def __spawn_instance(self):
"""
Create and configure a new KRA instance using pkispawn.
Creates a configuration file with IPA-specific
parameters and passes it to the base class to call pkispawn
"""
self.tmp_agent_db = tempfile.mkdtemp(
prefix="tmp-", dir=paths.VAR_LIB_IPA)
tmp_agent_pwd = ipautil.ipa_generate_password()
# Create a temporary file for the admin PKCS #12 file
(admin_p12_fd, admin_p12_file) = tempfile.mkstemp()
os.close(admin_p12_fd)
cfg = dict(
pki_issuing_ca_uri="https://{}".format(
ipautil.format_netloc(self.fqdn, 443)),
# Client security database
pki_client_database_dir=self.tmp_agent_db,
pki_client_database_password=tmp_agent_pwd,
pki_client_database_purge=True,
pki_client_pkcs12_password=self.admin_password,
pki_import_admin_cert=False,
pki_client_admin_cert_p12=admin_p12_file,
)
if not (os.path.isdir(paths.PKI_TOMCAT_ALIAS_DIR) and
os.path.isfile(paths.PKI_TOMCAT_PASSWORD_CONF)):
# generate pin which we know can be used for FIPS NSS database
pki_pin = ipautil.ipa_generate_password()
cfg['pki_server_database_password'] = pki_pin
else:
pki_pin = None
_p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP)
if self.clone:
krafile = self.pkcs12_info[0]
shutil.copy(krafile, p12_tmpfile_name)
pent = pwd.getpwnam(self.service_user)
os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid)
self._configure_clone(
cfg,
security_domain_hostname=self.fqdn,
clone_pkcs12_path=p12_tmpfile_name,
)
cfg.update(
pki_clone_setup_replication=False,
)
else:
# the admin cert file is needed for the first instance of KRA
cert = self.get_admin_cert()
# First make sure that the directory exists
parentdir = os.path.dirname(paths.ADMIN_CERT_PATH)
if not os.path.exists(parentdir):
os.makedirs(parentdir)
with open(paths.ADMIN_CERT_PATH, "wb") as admin_path:
admin_path.write(
base64.b64encode(cert.public_bytes(x509.Encoding.DER))
)
# Generate configuration file
pent = pwd.getpwnam(self.service_user)
config = self._create_spawn_config(cfg)
with tempfile.NamedTemporaryFile('w', delete=False) as f:
config.write(f)
os.fchown(f.fileno(), pent.pw_uid, pent.pw_gid)
cfg_file = f.name
nolog_list = [
self.d |
liveaverage/baruwa | src/baruwa/status/migrations/0001_initial.py | Python | gpl-2.0 | 2,911 | 0.007901 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'MailQueueItem'
db.create_table(u'mailq', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('messageid', self.gf('django.db.models.fields.CharField')(max_length=255)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')()),
('from_address', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, blank=True)),
('to_address', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('subject', self.gf('django.db.models.fields.TextField')(blank=True)),
('hostname', self.gf('django.db.models.fields.TextField')()),
('size', self.gf('django.db.models.fields.IntegerField')()),
('attempts', self.gf('django.db.models.fields.IntegerField')()),
('lastattempt', self.gf('django.db.models.fields.DateTimeField')()),
('direction', self.gf('django.db.models.fields.IntegerField')(default=1)),
| ('reason', self.gf('django.db.models.fields.TextField')(blank=True)),
('flag', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal('status', ['MailQueueItem'])
def backwards(self, orm):
# Deleting model 'MailQueueItem'
db.delete_ta | ble(u'mailq')
models = {
'status.mailqueueitem': {
'Meta': {'ordering': "['-timestamp']", 'object_name': 'MailQueueItem', 'db_table': "u'mailq'"},
'attempts': ('django.db.models.fields.IntegerField', [], {}),
'direction': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'flag': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'from_address': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'hostname': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastattempt': ('django.db.models.fields.DateTimeField', [], {}),
'messageid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'reason': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'subject': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {}),
'to_address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
}
}
complete_apps = ['status']
|
MichSchli/Mindblocks | controller/viewscreen_controller/viewscreen_listener.py | Python | gpl-3.0 | 4,045 | 0.001731 | from model.component.component_model import ComponentModel
from model.component.component_specification import ComponentSpecification
from model.computation_unit.computation_unit_model import ComputationUnitModel
from model.graph.graph_model import GraphModel
class ViewscreenListener:
"""
Receives requests from viewscreen and enacts corresponding changes on model.
"""
canvas_repository = None
selection_presenter = None
def __init__(self, viewscreen, canvas_repository, component_repository, gr | aph_repository, selection_presenter, computation_unit_repository):
self.canvas_reposi | tory = canvas_repository
self.selection_presenter = selection_presenter
self.component_repository = component_repository
self.graph_repository = graph_repository
self.viewscreen = viewscreen
self.computation_unit_repository = computation_unit_repository
self.register_observers()
def register_observers(self):
self.viewscreen.define_tab_changed_observer(self.tab_changed)
self.viewscreen.define_click_observer(self.canvas_clicked)
'''
Events:
'''
def tab_changed(self, event):
new_canvas_unique_identifier = event.new_canvas_unique_identifier
new_canvas = self.canvas_repository.get_canvas_by_identifier(new_canvas_unique_identifier)
self.selection_presenter.select_canvas(new_canvas)
def canvas_clicked(self, event):
selected_element = self.selection_presenter.selected_canvas_item.get()
clicked_element = event.element
toolbox_item = self.selection_presenter.selected_toolbox_item.get()
if self.can_create_edge(selected_element, clicked_element):
self.create_edge(selected_element, clicked_element)
elif self.can_create_edge(clicked_element, selected_element):
self.create_edge(clicked_element, selected_element)
elif toolbox_item is not None:
self.create_component_at(toolbox_item, event.location)
else:
self.selection_presenter.select_canvas_item(clicked_element)
'''
Helpers:
'''
def can_create_edge(self, element_1, element_2):
if element_1 is None or element_2 is None:
return False
if not (element_1.is_socket() and element_2.is_socket()):
return False
return element_1.edge_valid(element_2)
'''
Actions:
'''
def create_component_at(self, toolbox_item, location):
computation_unit_model = toolbox_item.create_computation_model()
self.computation_unit_repository.create(computation_unit_model)
component = ComponentModel()
component.computation_unit = computation_unit_model
component.prototype_id = toolbox_item.get_unique_identifier()
component.set_position(location[0], location[1])
component.update_attributes(toolbox_item.attributes)
component = self.component_repository.create_component_with_sockets(component)
canvas_model = self.selection_presenter.selected_canvas.get()
graph_model = GraphModel(None)
graph_model.add_component_with_sockets(component)
graph_model.canvas_identifier = canvas_model.get_unique_identifier()
graph_model = self.graph_repository.create(graph_model)
canvas_model.defined_graphs.append(graph_model)
self.canvas_repository.update_canvas(canvas_model)
self.selection_presenter.select_canvas_item(component)
def create_edge(self, out_socket, in_socket):
target_socket_graph = in_socket.get_graph()
self.graph_repository.unify_graphs(out_socket.get_graph(), in_socket.get_graph())
self.graph_repository.add_edge_to_graph(out_socket.get_graph(), out_socket, in_socket)
canvas_model = self.selection_presenter.selected_canvas.get()
if target_socket_graph != out_socket.get_graph():
canvas_model.delete_graph(target_socket_graph)
self.canvas_repository.update_canvas(canvas_model)
|
marcharper/Axelrod | axelrod/strategies/axelrod_second.py | Python | mit | 3,354 | 0.000298 | """
Additional strategies from Axelrod's second tournament.
"""
import random
from axelrod import Actions, Player, flip_action, random_choice
C, D = Actions.C, Actions.D
class Champion(Player):
"""
Strategy submitted to Axelrod's second tournament by Danny Champion.
"""
name = "Champion"
classifier = {
'memory_depth': float('inf'),
'stochastic': True,
'makes_use_of': set(["length"]),
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def strategy(self, opponent):
curre | nt_round = len(self.history)
expected_length = self.match_attributes['length']
# Cooperate for the first 1/20-th of the game
if current_round == 0: |
return C
if current_round < expected_length / 20.:
return C
# Mirror partner for the next phase
if current_round < expected_length * 5 / 40.:
return opponent.history[-1]
# Now cooperate unless all of the necessary conditions are true
defection_prop = float(opponent.defections) / len(opponent.history)
if opponent.history[-1] == D:
r = random.random()
if defection_prop > max(0.4, r):
return D
return C
class Eatherley(Player):
"""
Strategy submitted to Axelrod's second tournament by Graham Eatherley.
"""
name = "Eatherley"
classifier = {
'memory_depth': float('inf'),
'stochastic': True,
'makes_use_of': set(),
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
@staticmethod
def strategy(opponent):
# Cooperate on the first move
if not len(opponent.history):
return C
# Reciprocate cooperation
if opponent.history[-1] == C:
return C
# Respond to defections with probability equal to opponent's total
# proportion of defections
defection_prop = float(opponent.defections) / len(opponent.history)
return random_choice(1 - defection_prop)
class Tester(Player):
"""
Submitted to Axelrod's second tournament by David Gladstein.
Defects on the first move and plays TFT if the opponent ever defects (after
one apology cooperation round). Otherwise alternate cooperation and defection.
"""
name = "Tester"
classifier = {
'memory_depth': float('inf'),
'stochastic': False,
'makes_use_of': set(),
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def __init__(self):
Player.__init__(self)
self.is_TFT = False
def strategy(self, opponent):
# Defect on the first move
if not opponent.history:
return D
# Am I TFT?
if self.is_TFT:
return D if opponent.history[-1:] == [D] else C
else:
# Did opponent defect?
if opponent.history[-1] == D:
self.is_TFT = True
return C
if len(self.history) in [1, 2]:
return C
# Alternate C and D
return flip_action(self.history[-1])
def reset(self):
Player.reset(self)
self.is_TFT = False
|
jasdumas/jasdumas.github.io | post_data/RF_adult_income.py | Python | mit | 5,021 | 0.019319 | # import libraries: dataframe manipulation, machine learning, os tools
from pandas import Series, DataFrame
import pandas as pd
import numpy as np
import os
import matplotlib.pylab as plt
from sklearn.cross_validation import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import classification_report
import sklearn.metrics
# Feature Importance
from sklearn import datasets
from sklearn.ensemble import ExtraTreesClassifier
# change working directory to where the dataset is
os.chdir("C:/Users/JD87417/Desktop/python work/Coursera")
# Load the dataset (http://archive.ics.uci.edu/ml/datasets/Adult)
AH_data = pd.read_csv("adult2_income.csv")
data_clean = AH_data.dropna()
# encode categorical features (done in R)
# summary statistics including counts, mean, stdev, quartiles
data_clean.head(n=5)
data_clean.dtypes # data types of each variable
data_clean.describe()
# Split into training and testing sets
# Specifying predictor x variables
predictors = data_clean[["age", "workclassLocal-gov", "workclassPrivate",
"workclassSelf-emp-inc", "workclassSelf-emp-not-inc", "workclassState-gov",
"workclassWithout-pay", "fnlwgt", "education11th", "education12th",
"education1st-4th", "education5th-6th", "education7th-8th", "education9th",
"educationAssoc-acdm", "educationAssoc-voc", "educationBachelors",
"educationDoctorate", "educationHS-grad", "educationMasters",
"educationPreschool", "educationProf-school", "educationSome-college",
"education_num", "martial_statusMarried-AF-spouse", "martial_statusMarried-civ-spouse",
"martial_statusMarried-spouse-absent", "martial_statusNever-married",
"martial_statusSeparated", "martial_statusWidowed", "occupationArmed-Forces",
"occupationCraft-repair", "occupationExec-managerial", "occupationFarming-fishing",
"occupationHandlers-cleaners", "occupationMachine-op-inspct",
"occupationOther-service", "occupationPriv-house-serv", "occupationProf-specialty",
"occupationProtective-serv", "occupationSales", "occupationTech-support",
"occupationTransport-moving", "relationshipNot-in-family", "relationshipOther-relative",
"relationshipOwn-child", "relationshipUnmarried", "relationshipWife",
"raceAsian-Pac-Islander", "raceBlack", "raceOther", "raceWhite",
"sexMale", "capital_gain", "capital_loss", "hours_per_week",
"native_countryCanada", "native_countryChina", "native_countryColumbia",
"native_countryCuba", "native_countryDominican-Republic", "native_countryEcuador",
"native_countryEl-Salvador", "native_countryEngland", "native_countryFrance",
"native_countryGermany", "native_countryGreece", "native_countryGuatemala",
"native_countryHaiti", "native_countryHoland-Netherlands", "native_countryHonduras",
"native_countryHong", "native_countryHungary", "native_countryIndia",
"native_countryIran", "native_countryIreland", "native_countryItaly",
"native_countryJamaica", "native_countryJapan", "native_countryLaos",
"native_countryMexico", "native_countryNicaragua", "native_countryOutlying-US(Guam-USVI-etc)",
"native_countryPeru", "native_countryPhilippines", "native_countryPoland",
"native_countryPortugal", "native_countryPuerto-Rico", "native_countryScotland",
"native_countrySouth", "native_countryTaiwan", "native_countryThailand",
"native_countryTrinadad&Tobago", "native_countryUnited-States",
"native_countryVietnam", "native_countryYugoslavia"]]
# y repsonse variable
targets = data_clean.income_target_50k
# concurrent split of x's, y, at 40%
pred_train, pred_test, tar_train, tar_test = train_test_split(predictors, targets, test_size=.4)
# shape/dimensions of the DataFrame
pred_train.shape
pred_test.shape
tar_train.shape
tar_test.shape
# Build model on training data
from sklearn.ensemble import RandomForestClassifier
# n_estimators is the amount of trees to build
classifier=RandomForestClassifier(n_estimators=25)
# fit the RandomForest Model
classifier=classifier.fit(pred_train,tar_train)
# prediction scoring of the model (array of binary 0-1)
predictions=classifier.predict(pred_test)
# confusion matrix / missclassification matrix
sklearn.metrics.confusion_matrix(tar_test,predictions)
sklearn.metrics.accuracy_score(tar_test, predictions)
# fit an Extra Trees model to the data
model = ExtraTreesClassifier()
model.fit(pred_train,tar_train)
# display the relative importance of each attribute
print(model.feature_importances_)
max_val = np.where(model.feature_importances_ == max(model.feature_importances_))
min_val = np.where(model.feature_importances_ == min(model.feature_importances_))
print(max_val, min_val)
"""
Running a different number of trees and see the effect
of that on the accuracy of | the prediction
"""
trees=range(25)
accuracy=np.zeros(25)
for idx in range(len(trees)):
classifier=RandomForestClassifier(n_estimators=idx + 1)
classifier=classifier.fit(pred_train,tar_train)
predictions=classifier.predict(pred_test)
accuracy[idx]=sklearn.metrics.accuracy_score(tar_test, predictions)
plt.cla()
plt.plot(tr | ees, accuracy)
|
skippyprime/configs | tests/utils.py | Python | apache-2.0 | 171 | 0 | import collecti | ons
ConfigParams = collections.namedtuple(
'ConfigParams',
[
'format',
'hint',
'disposition',
'exten | sion'
]
)
|
elibixby/gcloud-python | gcloud/error_reporting/test_client.py | Python | apache-2.0 | 5,284 | 0 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestClient(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.error_reporting.client import Client
return Client
def _getHttpContext(self):
from gcloud.error_reporting.client import HTTPContext
return HTTPContext
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def _makeHTTP(self, *args, **kw):
return self._getHttpContext()(*args, **kw)
PROJECT = 'PROJECT'
SERVICE = 'SERVICE'
VERSION = 'myversion'
def test_ctor_default(self):
CREDENTIALS = _Credentials()
target = self._makeOne(project=self.PROJECT,
credentials=CREDENTIALS)
self.assertEquals(target.service, target.DEFAULT_SERVICE)
self.assertEquals(target.version, None)
def test_ctor_params(self):
CREDENTIALS = _Credentials()
target = self._makeOne(project=self.PROJECT,
credentials=CREDENTIALS,
service=self.SERVICE,
version=self.VERSION)
self.assertEquals(target.service, self.SERVICE)
self.assertEquals(target.version, self.VERSION)
def test_report_exception(self):
CREDENTIALS = _Credentials()
target = self._makeOne(project=self.PROJECT,
credentials=CREDENTIALS)
logger = _Logger()
target.logging_client.logger = lambda _: logger
try:
raise NameError
except NameError:
target.report_exception()
payload = logger.log_struct_called_with
self.assertEquals(payload['serviceContext'], {
'service': target.DEFAULT_SERVICE,
})
self.assertIn('test_report', payload['message'])
self.assertIn('test_client.py', payload['message'])
def test_report_exception_with_service_version_in_constructor(self):
CREDENTIALS = _Credentials()
SERVICE = "notdefault"
VERSION = "notdefaultversion"
target = self._makeOne(project=self.PROJECT,
credentials=CREDENTIALS,
service=SERVICE,
version=VERSION)
logger = _Logger()
target.logging_client.logger = lambda _: logger
http_context = self._makeHTTP(method="GET", response_status_code=500)
USER = "user@gmail.com"
try:
raise NameError
except NameError:
target.report_exception(http_context=http_context, user=USER)
payload = logger.log_struct_called_with
self.assertEquals(payload['serviceContext'], {
'service': SERVICE,
'version': VERSION
})
self.assertIn(
'test_report_exception_with_service_version_in_constructor',
payload['message'])
self.assertIn('test_client.py', payload['message'])
self.assertEquals(
| payload['context']['httpContext']['responseStatusCode'], 500)
self.assertEquals(
payload['context']['httpContext']['method'], 'GET')
self.assertEquals(payload['context']['user'], USER)
def test_report(self):
CREDENTIALS = _Credentials()
target = self._makeOne(project=self.PROJECT,
credentials=CREDENTIALS)
| logger = _Logger()
target.logging_client.logger = lambda _: logger
MESSAGE = 'this is an error'
target.report(MESSAGE)
payload = logger.log_struct_called_with
self.assertEquals(payload['message'], MESSAGE)
report_location = payload['context']['reportLocation']
self.assertIn('test_client.py', report_location['filePath'])
self.assertEqual(report_location['functionName'], 'test_report')
self.assertGreater(report_location['lineNumber'], 100)
self.assertLess(report_location['lineNumber'], 150)
class _Credentials(object):
_scopes = None
@staticmethod
def create_scoped_required():
return True
def create_scoped(self, scope):
self._scopes = scope
return self
class _Logger(object):
def log_struct(self, payload, # pylint: disable=unused-argument
client=None, # pylint: disable=unused-argument
labels=None, # pylint: disable=unused-argument
insert_id=None, # pylint: disable=unused-argument
severity=None, # pylint: disable=unused-argument
http_request=None): # pylint: disable=unused-argument
self.log_struct_called_with = payload
|
theadviceio/executer | tests/cache/test_cache.py | Python | apache-2.0 | 6,321 | 0.00443 | # from __future__ import absolute_import
# from base import *
import unittest
import os
import tempfile
import glob
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__)) + "/../../"))
import cache
__version__ = 1.0
__author__ = 'weldpua2008@gmail.com'
tmp_files=[]
class TestRepoClass(unittest.TestCase):
def getTempFileName(self, deleted=True):
f = tempfile.NamedTemporaryFile(delete=deleted, prefix='_tmp')
config_file_path = f.name
f.close
tmp_files.append(config_file_path)
return config_file_path
def tearDown(self):
for f in tmp_files:
if os.path.isfile(f):
os.remove(f)
tmp_files[:] = []
def test_dict_by_default(self):
__cache_store = cache.get_cache_copy()
self.assertIsInstance(__cache_store, dict)
def test_set(self):
cache.flush_cache()
file_path = self.getTempFileName(deleted=False)
statbuf = os.stat(file_path)
access_time = statbuf.st_mtime
key = 'key'
value = 'value'
cache_type = 'cache_type'
cache.update(key=key, value=value, | cache_type=cache_type, file_path=file_path)
# cache.update(key=None, value=None, cache_type=None, file_path=None)
__cache_store = cache.get_cache_copy()
self.assertIsInstance(__cache_store, dict)
self.assertIn(cache_type, __cache_store)
self.assertIn(key, __cache_store[cache_type])
| self.assertIn(file_path, __cache_store[cache_type][key])
self.assertIn('value', __cache_store[cache_type][key][file_path])
self.assertIn('access_time', __cache_store[cache_type][key][file_path])
self.assertEqual(value, __cache_store[cache_type][key][file_path]['value'])
self.assertEqual(access_time, __cache_store[cache_type][key][file_path]['access_time'])
# test that on change file it's the same value
with open(file_path, 'a+') as file_content:
file_content.write("sss")
self.assertEqual(value, __cache_store[cache_type][key][file_path]['value'])
self.assertEqual(access_time, __cache_store[cache_type][key][file_path]['access_time'])
cache.flush_cache()
def test_set_same_key(self):
cache.flush_cache()
file_path = self.getTempFileName(deleted=False)
statbuf = os.stat(file_path)
access_time = statbuf.st_mtime
key = 'key'
value = 'value'
cache_type = 'cache_type'
cache.update(key=key, value=value, cache_type=cache_type, file_path=file_path)
file_path = self.getTempFileName(deleted=False)
cache.update(key=key, value=value, cache_type=cache_type, file_path=file_path)
statbuf = os.stat(file_path)
access_time_2 = statbuf.st_mtime
__cache_store = cache.get_cache_copy()
self.assertEqual(value, __cache_store[cache_type][key][file_path]['value'])
self.assertEqual(access_time, __cache_store[cache_type][key][file_path]['access_time'])
# cache.update(key=None, value=None, cache_type=None, file_path=None)
__cache_store = cache.get_cache_copy()
# logging.critical("__cache_store: %s" % __cache_store)
# test that on change file it's the same value
with open(file_path, 'a+') as file_content:
file_content.write("sss")
self.assertEqual(value, __cache_store[cache_type][key][file_path]['value'])
self.assertEqual(access_time_2, __cache_store[cache_type][key][file_path]['access_time'])
cache.flush_cache()
def test_get_exist(self):
cache.flush_cache()
file_path = self.getTempFileName(deleted=False)
# statbuf = os.stat(file_path)
# access_time = statbuf.st_mtime
key = 'key'
value = 'value'
cache_type = 'cache_type'
cache.update(key=key, value=value, cache_type=cache_type, file_path=file_path)
_value = cache.get(key=key, cache_type=cache_type, file_path=file_path)
self.assertEqual(value, _value)
def test_get_non_exist(self):
cache.flush_cache()
file_path = self.getTempFileName(deleted=False)
statbuf = os.stat(file_path)
access_time = statbuf.st_mtime
key = 'key'
value = 'value'
cache_type = 'cache_type'
# cache.update(key=key, value=value, cache_type=cache_type, file_path=file_path)
_value = cache.get(key=key, cache_type=cache_type, file_path=file_path)
self.assertNotEqual(value, _value)
def test_flush_cache(self):
cache.flush_cache()
file_path = self.getTempFileName(deleted=False)
statbuf = os.stat(file_path)
access_time = statbuf.st_mtime
key='key'
value='value'
cache_type='cache_type'
cache.update(key=key, value=value, cache_type=cache_type, file_path=file_path)
__cache_store = cache.get_cache_copy()
self.assertIsInstance(__cache_store, dict)
num = len(__cache_store)
self.assertGreater(num, 0)
cache.flush_cache()
__cache_store = cache.get_cache_copy()
self.assertIsInstance(__cache_store, dict)
num = len(__cache_store)
self.assertLessEqual(num, 0)
# file_path = self.getTempFileName(deleted=False)
# cache.update(key=key, value=value, cache_type=cache_type, file_path=file_path)
# statbuf = os.stat(file_path)
# access_time_2 = statbuf.st_mtime
# __cache_store = cache.get_cache_copy()
# self.assertEqual(value, __cache_store[cache_type][key][file_path]['value'])
# self.assertEqual(access_time, __cache_store[cache_type][key][file_path]['access_time'])
# # cache.update(key=None, value=None, cache_type=None, file_path=None)
# __cache_store = cache.get_cache_copy()
# # test that on change file it's the same value
# with open(file_path, 'a+') as file_content:
# file_content.write("sss")
# self.assertEqual(value, __cache_store[cache_type][key][file_path]['value'])
# self.assertEqual(access_time_2, __cache_store[cache_type][key][file_path]['access_time'])
# cache.flush_cache()
if __name__ == '__main__':
unittest.main()
|
vmassuchetto/dnstorm | dnstorm/app/__init__.py | Python | gpl-2.0 | 383 | 0 | from django.contrib.auth.models import User
from actstream import registry
from dnstorm.app import models
DN | STORM_VERSION = '0.01'
DNSTORM_URL = 'http://vmassuchetto.github.io/dnstorm'
registry.register(User)
registry.register(models.Problem)
registry.register(models.Criteria)
registry.register(models.Idea)
registry.register(models.Al | ternative)
registry.register(models.Comment)
|
StefQM/facedetect | facedetect/rects.py | Python | mit | 1,352 | 0.012574 | import cv2
def outlineRect(image, rect, color):
if rect is None:
return
x, y, w, h = rect
cv2.rectangle(image, (x, y), (x+w, y+h), color)
def copyRect(src, dst, srcRect, dstRect,
interpo | lation = cv2.INTER_LINEAR):
"""Copy part of the source to part of the destination."""
x0, y0, w0, h0 = srcRect
x1, y1, w1, h1 = ds | tRect
# Resize the contents of the source sub-rectangle.
# Put the result in the destination sub-rectangle.
dst[y1:y1+h1, x1:x1+w1] = \
cv2.resize(src[y0:y0+h0, x0:x0+w0], (w1, h1),
interpolation = interpolation)
def swapRects(src, dst, rects,
interpolation = cv2.INTER_LINEAR):
"""Copy the source with two or more sub-rectangles swapped."""
if dst is not src:
dst[:] = src
numRects = len(rects)
if numRects < 2:
return
# Copy the contents of the last rectangle into temporary storage.
x, y, w, h = rects[numRects - 1]
temp = src[y:y+h, x:x+w].copy()
# Copy the contents of each rectangle into the next.
i = numRects - 2
while i >= 0:
copyRect(src, dst, rects[i], rects[i+1], interpolation)
i -= 1
# Copy the temporarily stored content into the first rectangle.
copyRect(temp, dst, (0, 0, w, h), rects[0], interpolation) |
MatthewVerbryke/inmoov_ros | inmoov_tools/trainer/trainergui.py | Python | bsd-3-clause | 13,132 | 0.003807 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'trainergui.ui'
#
# Created: Tue May 24 14:29:31 2016
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(401, 686)
MainWindow.setAutoFillBackground(False)
MainWindow.setDocumentMode(False)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.sliderGoal = QtGui.QSlider(self.centralwidget)
self.sliderGoal.setGeometry(QtCore.QRect(340, 20, 31, 611))
self.sliderGoal.setMaximum(990000)
self.sliderGoal.setSingleStep(10000)
self.sliderGoal.setPageStep(100000)
self.sliderGoal.setOrientation(QtCore.Qt.Vertical)
self.sliderGoal.setInvertedAppearance(False)
self.sliderGoal.setInvertedControls(False)
self.sliderGoal.setTickPosition(QtGui.QSlider.TicksBothSides)
self.sliderGoal.setTickInterval(10000)
self.sliderGoal.setObjectName(_fromUtf8("sliderGoal"))
self.frame = QtGui.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(30, 20, 281, 611))
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.txtMinPulse = QtGui.QLineEdit(self.frame)
self.txtMinPulse.setGeometry(QtCore.QRect(110, 310, 141, 27))
self.txtMinPulse.setObjectName(_fromUtf8("txtMinPulse"))
self.txtMaxSensor = QtGui.QLineEdit(self.frame)
self.txtMaxSensor.setGeometry(QtCore.QRect(110, 400, 141, 27))
self.txtMaxSensor.setObjectName(_fromUtf8("txtMaxSensor"))
self.label_3 = QtGui.QLabel(self.frame)
self.label_3.setGeometry(QtCore.QRect(16, 310, 91, 21))
self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_4 = QtGui.QLabel(self.frame)
self.label_4.setGeometry(QtCore.QRect(16, 340, 91, 21))
self.label_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.label_5 = QtGui.QLabel(self.frame)
self.label_5.setGeometry(QtCore.QRect(16, 370, 91, 21))
self.label_5.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.label_2 = QtGui.QLabel(self.frame)
self.label_2.setGeometry(QtCore.QRect(40, 280, 67, 21))
self.label_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.txtMaxPulse = QtGui.QLineEdit(self.frame)
self.txtMaxPulse.setGeometry(QtCore.QRect(110, 340, 141, 27))
self.txtMaxPulse.setObjectName(_fromUtf8("txtMaxPulse"))
self.chkCalibrated = QtGui.QCheckBox(self.frame)
self.chkCalibrated.setGeometry(QtCore.QRect(30, 430, 97, 22))
self.chkCalibrated.setLayoutDirection(QtCore.Qt.RightToLeft)
self.chkCalibrated.setObjectName(_fromUtf8("chkCalibrated"))
self.txtMaxGoal = QtGui.QLineEdit(self.frame)
self.txtMaxGoal.setGeometry(QtCore.QRect(110, 280, 141, 27))
self.txtMaxGoal.setObjectName(_fromUtf8("txtMaxGoal"))
self.txtMinSensor = QtGui.QLineEdit(self.frame)
self.txtMinSensor.setGeometry(QtCore.QRect(110, 370, 141, 27))
self.txtMinSensor.setObjectName(_fromUtf8("txtMinSensor"))
self.label = QtGui.QLabel(self.frame)
self.label.setGeometry(QtCore.QRect(40, 250, 67, 21))
self.label.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label.setObjectName(_fromUtf8("label"))
self.cmbSmoothing = QtGui.QComboBox(self.frame)
self.cmbSmoothing.setEnabled(False)
self.cmbSmoothing.setGeometry(QtCore.QRect(110, 200, 141, 27))
self.cmbSmoothing.setObjectName(_fromUtf8("cmbSmoothing"))
self.label_8 = QtGui.QLabel(self.frame)
self.label_8.setEnabled(False)
self.label_8.setGeometry(QtCore.QRect(16, 170, 91, 21))
self.label_8.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.txtMaxSpeed = QtGui.QLineEdit(self.frame)
self.txtMaxSpeed.setEnabled(False)
self.txtMaxSpeed.setGeometry(QtCore.QRect(110, 170, 141, 27))
self.txtMaxSpeed.setObjectName(_fromUtf8("txtMaxSpeed"))
self.label_6 = QtGui.QLabel(self.frame)
self.label_6.setGeometry(QtCore.QRect(16, 400, 91, 21))
self.label_6.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_6.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.label_14 = QtGui.QLabel(self.frame)
self.label_14.setEnabled(False)
self.label_14.setGeometry(QtCore.QRect(10, 200, 91, 21))
self.label_14.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.txtMinGoal = QtGui.QLineEdit(self.frame)
self.txtMinGoal.setGeometry(QtCore.QRect(110, 250, 141, 27))
self.txtMinGoal.setObjectName(_fromUtf8("txtMinGoal"))
self.label_7 = QtGui.QLabel(self.frame)
self.label_7.setGeometry(QtCore.QRect(18, 560, 91, 21))
self.label_7.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.txtSpeed = QtGui.QLineEdit(self.frame)
self.txtSpeed.setEnabled(False)
| self.txtSpeed.setGeometry(QtCore.QRect(132, 520, 121, 27))
self.txtSpeed.setObjectName(_fromUtf8("txtSpeed"))
self.chkPower = QtGui.QCheckBox(self.frame)
self.chkPower.setEnabled(False)
self.chkPower.setGeometry(QtCore.QRect(30, 500, 97, 22))
self.chkPower.setLayoutDirection(QtCore.Qt.Ri | ghtToLeft)
self.chkPower.setText(_fromUtf8(""))
self.chkPower.setObjectName(_fromUtf8("chkPower"))
self.txtPosition = QtGui.QLineEdit(self.frame)
self.txtPosition.setEnabled(False)
self.txtPosition.setGeometry(QtCore.QRect(110, 470, 141, 27))
self.txtPosition.setObjectName(_fromUtf8("txtPosition"))
self.txtSensorRaw = QtGui.QLineEdit(self.frame)
self.txtSensorRaw.setEnabled(False)
self.txtSensorRaw.setGeometry(QtCore.QRect(112, 560, 141, 27))
self.txtSensorRaw.setObjectName(_fromUtf8("txtSensorRaw"))
self.label_10 = QtGui.QLabel(self.frame)
self.label_10.setGeometry(QtCore.QRect(40, 470, 67, 21))
self.label_10.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.label_11 = QtGui.QLabel(self.frame)
self.label_11.setGeometry(QtCore.QRect(10, 500, 91, 17))
self.label_11.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.cmbServo = QtGui.QComboBox(self.frame)
self.cmbServo.setGeometry(QtCore.QRect(110, 50, 141, 27))
self.cmbServo.setObjectName(_fromUtf8("cmbServo"))
self.label_12 = QtG |
rootulp/exercism | python/etl/etl.py | Python | mit | 119 | 0 | def | transform(old):
return {value.lower(): score for score, values in
old.items( | ) for value in values}
|
SUNET/eduid-webapp | src/eduid_webapp/letter_proofing/tests/test_pdf.py | Python | bsd-3-clause | 6,870 | 0.001601 | # -*- coding: utf-8 -*-
import unittest
from collections import OrderedDict
from datetime import datetime
from six import BytesIO, StringIO
from eduid_common.api.testing import EduidAPITestCase
from eduid_webapp.letter_proofing import pdf
from eduid_webapp.letter_proofing.app import init_letter_proofing_app
from eduid_webapp.letter_proofing.settings.common import LetterProofingConfig
# We need to add Navet responses that we fail to handle
__author__ = 'lundberg'
class FormatAddressTest(unittest.TestCase):
def test_successful_format(self):
navet_responses = [
OrderedDict(
[
(
u'Name',
OrderedDict(
[(u'GivenNameMarking', u'20'), (u'GivenName', u'Testaren Test'), (u'Surname', u'Testsson')]
),
),
(
u'OfficialAddress',
OrderedDict(
[(u'Address2', u'\xd6RGATAN 79 LGH 10'), (u'PostalCode', u'12345'), (u'City', u'LANDET')]
),
),
]
),
OrderedDict(
[
(
u'Name',
OrderedDict(
[
(u'GivenNameMarking', u'20'),
(u'GivenName', u'Testaren Test'),
(u'MiddleName', u'Tester'),
(u'Surname', u'Testsson'),
]
),
),
(
u'OfficialAddress',
OrderedDict(
[
(u'Address2', u'\xd6RGATAN 79 LGH 10'),
(u'Address1', u'LGH 4321'),
(u'CareOf', u'TESTAREN & TESTSSON'),
(u'PostalCode', u'12345'),
(u'City', u'LANDET'),
]
),
),
]
),
]
for response in navet_responses:
name, care_of, address, misc_address, postal_code, city = pdf.format_address(response)
self.assertIsNotNone(name)
self.assertIsNotNone(care_of)
self.assertIsNotNone(address)
self.assertIsNotNone(misc_address)
self.assertIsNotNone(postal_code)
self.assertIsNotNone(city)
def test_failing_format(self):
failing_navet_responses = [
OrderedDict(
[
(
u'OfficialAddress',
OrderedDict(
[(u'Address2', u'\xd6RGATAN 79 LGH 10'), (u'PostalCode', u'12345'), (u'City', u'LANDET')]
),
)
]
),
OrderedDict(
[
(
u'Name',
OrderedDict(
[(u'GivenNameMarking', u'20'), (u'GivenName', u'Testaren Test'), (u'Surname', u'Testsson')]
),
),
]
),
OrderedDict(
[
(u'Name', OrderedDict([(u'GivenNameMarking', u'20'), (u'Surname', u'Testsson')])),
(
u'OfficialAddress',
OrderedDict(
[(u'Address2', u'\xd6RGATAN 79 LGH 10'), (u'PostalCode', u'12345'), (u'City', u'LANDET')]
),
),
]
),
OrderedDict(
[
(u'Name', OrderedDict([(u'GivenNameMarking', u'20'), (u'Surname', u'Testsson')])),
(u'OfficialAddress', OrderedDict([(u'Address2', u'\xd6RGATAN 79 LGH 10'), (u'City', u'LANDET')])),
]
),
OrderedDict(
[
(
u'Name',
{
u'GivenName': u'Testaren Test',
u'MiddleName': u'Tester',
u'GivenNameMarking': u'20',
u'Surname': u'Testsson',
},
),
(u'OfficialAddress', {}),
]
),
]
for response in failing_navet_responses:
self.assertRaises(pdf.AddressFormatException, pdf.format_address, response)
class CreatePDFTest(EduidAPITestCase):
def load_app(self, config):
"""
Called from the parent class, so we can provide the appropriate flask
app for this test case.
"""
return init_letter_proofing_app('testing', config)
def update_config(self, app_config):
app_config.update(
{
'letter_wait_time_hours': 336,
'msg_broker_url': 'amqp://dummy',
'am_broker_url': 'amqp://dummy',
'celery_config': {'result_backend': 'amqp', 'task_serializer': 'json'},
}
)
return app_config
def test_create_pdf(self):
recipient = OrderedDict(
[
(
u'Name',
OrderedDict(
[
(u'GivenNameMarking', u'20'), |
(u'GivenName', u'Testaren Test'),
(u'MiddleName', u'Tester'),
(u'Surname', u'Testsson'),
]
),
),
(
u'OfficialAddress',
| OrderedDict(
[
(u'Address2', u'\xd6RGATAN 79 LGH 10'),
(u'Address1', u'LGH 4321'),
(u'CareOf', u'TESTAREN & TESTSSON'),
(u'PostalCode', u'12345'),
(u'City', u'LANDET'),
]
),
),
]
)
with self.app.app_context():
with self.app.test_request_context():
pdf_document = pdf.create_pdf(
recipient,
verification_code='bogus code',
created_timestamp=datetime.utcnow(),
primary_mail_address='test@example.org',
letter_wait_time_hours=336,
)
self.assertIsInstance(pdf_document, (StringIO, BytesIO))
|
FedoraScientific/salome-geom | src/GEOM_SWIG/GEOM_example6.py | Python | lgpl-2.1 | 1,993 | 0.003512 | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2007-2014 CEA/DEN, EDF R&D, OPEN CASCADE
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
# GEOM GEOM_SWIG : binding of C++ omplementaion with Python
# File : GEOM_example6.py
# Author : Dmitry MATVEITChEV
# Module : GEOM
#
import salome
salome.salome_init()
import GEOM
from salome.geom import geomBuilder
geompy = geomBu | ilder.New(salome.myStudy)
ind = 1
circlelist = []
while ind < 6:
x1 = 0. + (10. * ind)
y1 = 0.
z1 = 0.
x2 = 10. + (10. * ind)
y2 = 20. * (ind+1)
z2 = 30. * (ind+1)
x3 = 50. + (10. * ind)
y3 = 0. * ( | ind+1)
z3 = -10. * (ind+1)
print x1, y1, z1, x2, y2, z2, x3, y3, z3
point1 = geompy.MakeVertex(x1, y1, z1)
name1 = "point1_%d"%(ind)
id_pt1 = geompy.addToStudy(point1, name1)
point2 = geompy.MakeVertex(x2, y2, z2)
name2 = "point2_%d"%(ind)
id_pt2 = geompy.addToStudy(point2, name2)
point3 = geompy.MakeVertex(x3, y3, z3)
name3 = "point3_%d"%(ind)
id_pt3 = geompy.addToStudy(point3, name3)
name = "circle%d"%(ind)
circle = geompy.MakeCircleCenter2Pnt(point1, point2, point3)
id_circle = geompy.addToStudy(circle, name)
circlelist.append(circle)
ind = ind + 1
|
Smarsh/django | tests/modeltests/custom_methods/models.py | Python | bsd-3-clause | 1,885 | 0.003714 | """
3. Giving models custom methods
Any method you add to a model will be available to instances.
"""
from django.db import models
import datetime
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
def __unicode__(self):
return self.headline
def was_published_today(self):
return self.pub_date == datetime.date.today()
| def articles_from_same_day_1(self):
return Article.objects.filter(pub_date=self.pub_date).exclude(id=self.id)
def articles_from_same_day_2(self):
"""
Verbose version of get_articles_from_same_day_1, which does a custom
database query for the sake of demonstration.
"""
from django.db import connection
cursor = connection.cursor()
| cursor.execute("""
SELECT id, headline, pub_date
FROM custom_methods_article
WHERE pub_date = %s
AND id != %s""", [connection.ops.value_to_db_date(self.pub_date),
self.id])
# The asterisk in "(*row)" tells Python to expand the list into
# positional arguments to Article().
return [self.__class__(*row) for row in cursor.fetchall()]
__test__ = {'API_TESTS':"""
# Create a couple of Articles.
>>> from datetime import date
>>> a = Article(id=None, headline='Area man programs in Python', pub_date=date(2005, 7, 27))
>>> a.save()
>>> b = Article(id=None, headline='Beatles reunite', pub_date=date(2005, 7, 27))
>>> b.save()
# Test the custom methods.
>>> a.was_published_today()
False
>>> a.articles_from_same_day_1()
[<Article: Beatles reunite>]
>>> a.articles_from_same_day_2()
[<Article: Beatles reunite>]
>>> b.articles_from_same_day_1()
[<Article: Area man programs in Python>]
>>> b.articles_from_same_day_2()
[<Article: Area man programs in Python>]
"""}
|
davidbgk/udata | udata/tests/site/test_site_views.py | Python | agpl-3.0 | 13,615 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import StringIO
from datetime import datetime
from flask import url_for
from udata.frontend import csv
from udata.models import Badge, Site, PUBLIC_SERVICE
from udata.core.dataset.factories import DatasetFactory, ResourceFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.site.models import current_site
from udata.core.reuse.factories import ReuseFactory
from udata.tests.frontend import FrontTestCase
class SiteViewsTest(FrontTestCase):
modules = ['core.site', 'admin', 'core.dataset', 'core.reuse',
'core.organization', 'search']
def test_site_global(self):
'''It should create and/or load the current site'''
with self.app.test_request_context(''):
self.app.preprocess_request()
self.assertIsInstance(current_site._get_current_object(), Site)
self.assertEqual(current_site.id, self.app.config['SITE_ID'])
def test_render_home(self):
'''It should render the home page'''
for i in range(3):
org = OrganizationFactory()
DatasetFactory(organization=org)
ReuseFactory(organization=org)
current_site.settings.home_datasets = [
DatasetFactory() for _ in range(3)]
current_site.settings.home_reuses = [
ReuseFactory() for _ in range(3)]
response = self.get(url_for('site.home'))
self.assert200(response)
def test_render_home_no_data(self):
'''It should render the home page without data'''
response = self.get(url_for('site.home'))
self.assert200(response)
def test_render_dashboard(self):
'''It should render the search page'''
for i in range(3):
org = OrganizationFactory()
DatasetFactory(organization=org)
ReuseFactory(organization=org)
response = self.get(url_for('site.dashboard'))
self.assert200(response)
def test_render_dashboard_no_data(self):
'''It should render the search page without data'''
response = self.get(url_for('site.dashboard'))
self.assert200(response)
def test_datasets_csv(self):
with self.autoindex():
datasets = [DatasetFactory(resources=[ResourceFactory()])
for _ in range(5)]
hidden_dataset = DatasetFactory()
response = self.get(url_for('site.datasets_csv'))
self.assert200(response)
self.assertEqual(response.mimetype, 'text/csv')
self.assertEqual(response.charset, 'utf-8')
csvfile = StringIO.StringIO(response.data)
reader = csv.get_reader(csvfile)
header = reader.next()
self.assertEqual(header[0], 'id')
self.assertIn('titl | e', header)
self.assertIn('description', header)
self.assertIn('created_at', header)
self.assertIn('last_modified', header)
self.assertIn('tags', header)
self.assertIn('metric.reuses', header)
rows = list(reader)
ids = [row[0] for row in rows]
self.assertEqual(len(rows), len(datasets))
for dataset in datasets:
se | lf.assertIn(str(dataset.id), ids)
self.assertNotIn(str(hidden_dataset.id), ids)
def test_datasets_csv_with_filters(self):
'''Should handle filtering but ignore paging or facets'''
with self.autoindex():
filtered_datasets = [
DatasetFactory(resources=[ResourceFactory()],
tags=['selected'])
for _ in range(6)]
datasets = [DatasetFactory(resources=[ResourceFactory()])
for _ in range(3)]
hidden_dataset = DatasetFactory()
response = self.get(
url_for(
'site.datasets_csv', tag='selected', page_size=3, facets=True))
self.assert200(response)
self.assertEqual(response.mimetype, 'text/csv')
self.assertEqual(response.charset, 'utf-8')
csvfile = StringIO.StringIO(response.data)
reader = csv.get_reader(csvfile)
header = reader.next()
self.assertEqual(header[0], 'id')
self.assertIn('title', header)
self.assertIn('description', header)
self.assertIn('created_at', header)
self.assertIn('last_modified', header)
self.assertIn('tags', header)
self.assertIn('metric.reuses', header)
rows = list(reader)
ids = [row[0] for row in rows]
# Should ignore paging
self.assertEqual(len(rows), len(filtered_datasets))
# SHoulf pass filter
for dataset in filtered_datasets:
self.assertIn(str(dataset.id), ids)
for dataset in datasets:
self.assertNotIn(str(dataset.id), ids)
self.assertNotIn(str(hidden_dataset.id), ids)
def test_resources_csv(self):
with self.autoindex():
datasets = [
DatasetFactory(resources=[ResourceFactory(),
ResourceFactory()])
for _ in range(3)]
DatasetFactory()
response = self.get(url_for('site.resources_csv'))
self.assert200(response)
self.assertEqual(response.mimetype, 'text/csv')
self.assertEqual(response.charset, 'utf-8')
csvfile = StringIO.StringIO(response.data)
reader = csv.get_reader(csvfile)
header = reader.next()
self.assertEqual(header[0], 'dataset.id')
self.assertIn('dataset.title', header)
self.assertIn('dataset.url', header)
self.assertIn('title', header)
self.assertIn('description', header)
self.assertIn('filetype', header)
self.assertIn('url', header)
self.assertIn('created_at', header)
self.assertIn('modified', header)
self.assertIn('downloads', header)
resource_id_index = header.index('id')
rows = list(reader)
ids = [(row[0], row[resource_id_index]) for row in rows]
self.assertEqual(len(rows), sum(len(d.resources) for d in datasets))
for dataset in datasets:
for resource in dataset.resources:
self.assertIn((str(dataset.id), str(resource.id)), ids)
def test_resources_csv_with_filters(self):
'''Should handle filtering but ignore paging or facets'''
with self.autoindex():
filtered_datasets = [DatasetFactory(resources=[ResourceFactory(),
ResourceFactory()],
tags=['selected'])
for _ in range(6)]
[DatasetFactory(resources=[ResourceFactory()]) for _ in range(3)]
DatasetFactory()
response = self.get(
url_for('site.resources_csv', tag='selected', page_size=3,
facets=True))
self.assert200(response)
self.assertEqual(response.mimetype, 'text/csv')
self.assertEqual(response.charset, 'utf-8')
csvfile = StringIO.StringIO(response.data)
reader = csv.get_reader(csvfile)
header = reader.next()
self.assertEqual(header[0], 'dataset.id')
self.assertIn('dataset.title', header)
self.assertIn('dataset.url', header)
self.assertIn('title', header)
self.assertIn('description', header)
self.assertIn('filetype', header)
self.assertIn('url', header)
self.assertIn('created_at', header)
self.assertIn('modified', header)
self.assertIn('downloads', header)
resource_id_index = header.index('id')
rows = list(reader)
ids = [(row[0], row[resource_id_index]) for row in rows]
self.assertEqual(len(rows),
sum(len(d.resources) for d in filtered_datasets))
for dataset in filtered_datasets:
for resource in dataset.resources:
self.assertIn((str(dataset.id), str(resource.id)), ids)
def test_organizations_csv(self):
with self.autoindex():
|
StrellaGroup/erpnext | erpnext/stock/doctype/serial_no/test_serial_no.py | Python | gpl-3.0 | 2,291 | 0.017023 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
# ERPNext - web based ERP (http://erpnext.com)
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, unittest
from erpnext.stock.doctype.stock_entry.test_stock_entry import make_serialized_item
from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import make_purchase_receipt
from erpnext.stock.doctype.delivery_note.test_delivery_note import create_delivery_note
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
from erpnext.stock.doctype.warehouse.test_warehouse import | create_warehouse
from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import set_perpetual_inventory
test_dependencies = ["Item"]
test_records = frappe.get_test_records('Serial No')
from erpnext.stock.doctype.serial_no.serial_no import *
| class TestSerialNo(unittest.TestCase):
def test_cannot_create_direct(self):
frappe.delete_doc_if_exists("Serial No", "_TCSER0001")
sr = frappe.new_doc("Serial No")
sr.item_code = "_Test Serialized Item"
sr.warehouse = "_Test Warehouse - _TC"
sr.serial_no = "_TCSER0001"
sr.purchase_rate = 10
self.assertRaises(SerialNoCannotCreateDirectError, sr.insert)
sr.warehouse = None
sr.insert()
self.assertTrue(sr.name)
sr.warehouse = "_Test Warehouse - _TC"
self.assertTrue(SerialNoCannotCannotChangeError, sr.save)
def test_inter_company_transfer(self):
set_perpetual_inventory(0, "_Test Company 1")
set_perpetual_inventory(0)
se = make_serialized_item(target_warehouse="_Test Warehouse - _TC")
serial_nos = get_serial_nos(se.get("items")[0].serial_no)
create_delivery_note(item_code="_Test Serialized Item With Series", qty=1, serial_no=serial_nos[0])
wh = create_warehouse("_Test Warehouse", company="_Test Company 1")
make_purchase_receipt(item_code="_Test Serialized Item With Series", qty=1, serial_no=serial_nos[0],
company="_Test Company 1", warehouse=wh)
serial_no = frappe.db.get_value("Serial No", serial_nos[0], ["warehouse", "company"], as_dict=1)
self.assertEqual(serial_no.warehouse, wh)
self.assertEqual(serial_no.company, "_Test Company 1")
def tearDown(self):
frappe.db.rollback() |
beeftornado/sentry | src/sentry/integrations/pagerduty/integration.py | Python | bsd-3-clause | 7,387 | 0.002572 | from __future__ import absolute_import
from django.utils.translation import ugettext_lazy as _
from django.db import transaction
from sentry import options
from sentry.utils import json
from sentry.utils.compat import filter
from sentry.utils.http import absolute_uri
from s | entry.integrations.base import (
IntegrationInstallation,
IntegrationFeatures,
IntegrationMetadata,
IntegrationProvider,
FeatureDescription,
)
from sentry.shared_integrations.exceptions import IntegrationError
from sentry.m | odels import OrganizationIntegration, PagerDutyService
from sentry.pipeline import PipelineView
from .client import PagerDutyClient
DESCRIPTION = """
Connect your Sentry organization with one or more PagerDuty accounts, and start getting
incidents triggered from Sentry alerts.
"""
FEATURES = [
FeatureDescription(
"""
Manage incidents and outages by sending Sentry notifications to PagerDuty.
""",
IntegrationFeatures.INCIDENT_MANAGEMENT,
),
FeatureDescription(
"""
Configure rule based PagerDuty alerts to automatically be triggered in a specific
service - or in multiple services!
""",
IntegrationFeatures.ALERT_RULE,
),
]
setup_alert = {
"type": "info",
"text": "The PagerDuty integration adds a new Alert Rule action to all projects. To enable automatic notifications sent to PagerDuty you must create a rule using the PagerDuty action in your project settings.",
}
metadata = IntegrationMetadata(
description=_(DESCRIPTION.strip()),
features=FEATURES,
author="The Sentry Team",
noun=_("Installation"),
issue_url="https://github.com/getsentry/sentry/issues/new?title=PagerDuty%20Integration:%20&labels=Component%3A%20Integrations",
source_url="https://github.com/getsentry/sentry/tree/master/src/sentry/integrations/pagerduty",
aspects={"alerts": [setup_alert]},
)
class PagerDutyIntegration(IntegrationInstallation):
def get_client(self, integration_key):
return PagerDutyClient(integration_key=integration_key)
def get_organization_config(self):
fields = [
{
"name": "service_table",
"type": "table",
"label": "PagerDuty services with the Sentry integration enabled",
"help": "If services need to be updated, deleted, or added manually please do so here. Alert rules will need to be individually updated for any additions or deletions of services.",
"addButtonText": "",
"columnLabels": {"service": "Service", "integration_key": "Integration Key"},
"columnKeys": ["service", "integration_key"],
"confirmDeleteMessage": "Any alert rules associated with this service will stop working. The rules will still exist but will show a `removed` service.",
}
]
return fields
def update_organization_config(self, data):
if "service_table" in data:
service_rows = data["service_table"]
# validate fields
bad_rows = filter(lambda x: not x["service"] or not x["integration_key"], service_rows)
if bad_rows:
raise IntegrationError("Name and key are required")
with transaction.atomic():
exising_service_items = PagerDutyService.objects.filter(
organization_integration=self.org_integration
)
for service_item in exising_service_items:
# find the matching row from the input
matched_rows = filter(lambda x: x["id"] == service_item.id, service_rows)
if matched_rows:
matched_row = matched_rows[0]
service_item.integration_key = matched_row["integration_key"]
service_item.service_name = matched_row["service"]
service_item.save()
else:
service_item.delete()
# new rows don't have an id
new_rows = filter(lambda x: not x["id"], service_rows)
for row in new_rows:
service_name = row["service"]
key = row["integration_key"]
PagerDutyService.objects.create(
organization_integration=self.org_integration,
service_name=service_name,
integration_key=key,
)
def get_config_data(self):
service_list = []
for s in self.services:
service_list.append(
{"service": s.service_name, "integration_key": s.integration_key, "id": s.id}
)
return {"service_table": service_list}
@property
def services(self):
services = PagerDutyService.objects.filter(organization_integration=self.org_integration)
return services
class PagerDutyIntegrationProvider(IntegrationProvider):
key = "pagerduty"
name = "PagerDuty"
metadata = metadata
features = frozenset([IntegrationFeatures.ALERT_RULE, IntegrationFeatures.INCIDENT_MANAGEMENT])
integration_cls = PagerDutyIntegration
setup_dialog_config = {"width": 600, "height": 900}
def get_pipeline_views(self):
return [PagerDutyInstallationRedirect()]
def post_install(self, integration, organization, extra=None):
services = integration.metadata["services"]
try:
org_integration = OrganizationIntegration.objects.get(
integration=integration, organization=organization
)
except OrganizationIntegration.DoesNotExist:
return
with transaction.atomic():
for service in services:
PagerDutyService.objects.create_or_update(
organization_integration=org_integration,
integration_key=service["integration_key"],
service_name=service["name"],
)
def build_integration(self, state):
config = json.loads(state.get("config"))
account = config["account"]
# PagerDuty gives us integration keys for various things, some of which
# are not services. For now we only care about services.
services = [x for x in config["integration_keys"] if x["type"] == "service"]
return {
"name": account["name"],
"external_id": account["subdomain"],
"metadata": {"services": services, "domain_name": account["subdomain"]},
}
class PagerDutyInstallationRedirect(PipelineView):
def get_app_url(self, account_name=None):
if not account_name:
account_name = "app"
app_id = options.get("pagerduty.app-id")
setup_url = absolute_uri("/extensions/pagerduty/setup/")
return (
u"https://%s.pagerduty.com/install/integration?app_id=%s&redirect_url=%s&version=2"
% (account_name, app_id, setup_url)
)
def dispatch(self, request, pipeline):
if "config" in request.GET:
pipeline.bind_state("config", request.GET["config"])
return pipeline.next_step()
account_name = request.GET.get("account", None)
return self.redirect(self.get_app_url(account_name))
|
tlksio/tlksio | talks/migrations/0005_auto_20170402_1502.py | Python | mit | 866 | 0.003464 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-02 15:02
from __future_ | _ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('talks', '0004_auto_20170326_1755'),
]
operations = [
migrations.AlterField(
model_name='talk', |
name='fav_count',
field=models.PositiveIntegerField(default=0, verbose_name='favorite count'),
),
migrations.AlterField(
model_name='talk',
name='view_count',
field=models.PositiveIntegerField(default=0, verbose_name='view count'),
),
migrations.AlterField(
model_name='talk',
name='vote_count',
field=models.PositiveIntegerField(default=0, verbose_name='vote count'),
),
]
|
OBIGOGIT/etch | binding-python/runtime/src/test/python/tests/binding/support/TestValidator_float.py | Python | apache-2.0 | 3,261 | 0.01012 | # Licensed to the Apache Software Foundation (ASF) under one *
# or more contributor license agreements. See the NOTICE file *
# distributed with this work for additional information *
# regarding copyright ownership. The ASF licenses this file *
# to you under the Apache License, Version 2.0 (the *
# "License"); you may not use this file except in compliance *
# with the License. You may obtain a copy of the License at *
# *
# http://www.apache.org/licenses/LICENSE-2.0 *
# *
# Unless required by applicable law or agreed to in writing, *
# software distributed under the License is distributed on an *
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
# KIND, either express or implied. See the License for the *
# specific language governing permissions and limitations *
# under the License.
import unittest
from etch.python.Types import *
from etch.binding.support.Validator_float import *
from etch.binding.transport.fmt.TypeCode import *
class Test_EtchSupportValidator_float(unittest.TestCase):
def _test_validator(self):
_test = self._test
# TODO - resolve casting issues in validator tests
_test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MAX_VALUE), "1")
_test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MIN_VALUE), "2" )
_test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MAX_VALUE), "3" )
_test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MIN_VALUE), "4" )
_test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MAX_VALUE), "5" )
_test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MIN_VALUE), "abc" )
_test(1, "float[1]", TypeCode.ARRAY, [Float], [], 1)
_test(2, "float[2]", TypeCode.ARRAY, [[Float]], [[]], [] )
_test(3, "float[3]", TypeCode.ARRAY, [[[Float]]], [[[]]], [[]] )
_test(4, "float[4]", TypeCode.ARRAY, [[[[Float]]]], [[[[]]]], [[[]]] )
_test(5, "float[5]", TypeCode.ARRAY, [[[[[Float]]]]], [[[[[]]]]], [[[[]]]] )
def test_getNeg1(self):
self.assertRaises(IllegalArgumentException, Validator_float.get, -1)
def test_getNeg2(self):
self.assertRaises(IllegalArgumentException, Validator_float.get(0).elementValidator)
def test_getMaxPlusOne(self):
self.assertRaises(IllegalArgumentException, Validator_float.get, Validator.MAX_NDIMS + 1)
def _test(self, n, s, tc, clss, good, bad):
v = Val | idator_float.get(n)
self.assertEqual(n, v.getNDims())
self.assertEqual(clss, v.getExpectedClass())
self.assertEqual(s, repr(v))
self.assertEqual(True, v.validate(good))
self.assertEqual(False, v.validate(bad))
self.assertEqual(tc, v.checkValue(good))
self.assertEqual(None, v.checkValue(bad))
if n > 0:
self | .assertEqual(n-1, v.elementValidator().getNDims())
if __name__=='__main__':
unittest.main()
|
pwmarcz/django | django/contrib/gis/tests/test_spatialrefsys.py | Python | bsd-3-clause | 4,891 | 0.001636 | import unittest
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.tests.utils import (oracle, postgis, spatialite,
SpatialRefSys)
from django.db import connection
from django.test import skipUnlessDBFeature
from django.utils import six
test_srs = ({
'srid': 4326,
'auth_name': ('EPSG', True),
'auth_srid': 4326,
# Only the beginning, because there are differences depending on installed libs
'srtext': 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84"',
# +ellps=WGS84 has been removed in the 4326 proj string in proj-4.8
'proj4_re': r'\+proj=longlat (\+ellps=WGS84 )?(\+datum=WGS84 |\+towgs84=0,0,0,0,0,0,0 )\+no_defs ',
'spheroid': 'WGS 84', 'name': 'WGS 84',
'geographic': True, 'projected': False, 'spatialite': True,
# From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'ellipsoid': (6378137.0, 6356752.3, 298.257223563),
'eprec': (1, 1, 9),
}, {
'srid': 32140,
'auth_name': ('EPSG', False),
'auth_srid': 32140,
'srtext': (
'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",'
'DATUM["North_American_Datum_1983",SPHEROID["GRS 1980"'
),
'proj4_re': r'\+proj=lcc \+lat_1=30.28333333333333 \+lat_2=28.38333333333333 \+lat_0=27.83333333333333 '
r'\+lon_0=-99 \+x_0=600000 \+y_0=4000000 (\+ellps=GRS80 )?'
r'(\+datum=NAD83 |\+towgs84=0,0,0,0,0,0,0 )?\+units=m \+no_defs ',
'spheroid': 'GRS 1980', 'name': 'NAD83 / Texas South Central',
'geographic': False, 'projected': True, 'spatialite': False,
# From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'ellipsoid': (6378137.0, 6356752.31414, 298.257222101),
'eprec': (1, 5, 10),
})
@unittest.skipUnless(HAS_GDAL, "SpatialRefSysTest needs gdal support")
@skipUnlessDBFeature("has_spatialrefsys_table")
class SpatialRefSysTest(unittest.TestCase):
def test_retrieve(self):
"""
Test retrieval of SpatialRefSys model objects.
"""
for sd in test_srs:
srs = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(sd['srid'], srs.srid)
# Some of the authority names are borked on Oracle, e.g., SRID=32140.
# also, Oracle Spatial seems to add extraneous info to fields, hence the
# the testing with the 'startswith' flag.
auth_name, oracle_flag = sd['auth_name']
if postgis or (oracle and oracle_flag):
self.assertEqual(True, srs.auth_name.startswith(auth_name))
self.assertEqual(sd['auth_srid'], srs.auth_srid)
# No proj.4 and different srtext on oracle backends :(
if postgis:
self.assertTrue(srs.wkt.startswith(sd['srtext']))
six.assertRegex(self, srs.proj4text, sd['proj4_re'])
def test_osr(self):
"""
Test getting OSR objects from SpatialRefSys model objects.
"""
for sd in test_srs:
sr = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(True, sr.spheroid.startswith(sd['spheroid']))
self.assertEqual(sd['geographic'], sr.geographic)
self.assertEqual(sd['projected'], sr.projected)
if not (spatialite and not sd['spatialite']):
# Can't get 'NAD83 / Texas South Central' from PROJ.4 string
# on SpatiaLite
self.assertEqual(True, sr.name.startswith(sd['name']))
# Testing the SpatialReference object directly.
if postgis or spatialite:
srs = sr.srs
six.assertRegex(self, srs.proj4, sd['proj4_re'])
# No `srtext` field in the `spatial_ref_sys` table in SpatiaLite < 4
if not spatialite or connection.ops.spatial_version[0] >= 4:
self.assertTrue(srs.wkt.startswith(sd['srtext']))
def test_ellipsoid(self):
"""
Test the ellipsoid property.
"""
for sd in test_srs:
# Getting the ellipsoid and precision parameters.
ellps1 = sd['ellipsoid']
prec = sd['eprec']
# Getting our spatial reference and its ellipsoid
srs = SpatialRefSys.objects.get(srid=sd['srid'])
ellps2 = srs.ellipsoid
for i in r | ange(3):
self.assertAlmostEqual(ellps1[i], ellps2[i], prec[i])
def test_add_entry(self):
"""
Test adding a new entry in the SpatialRefSys model using the
add_srs_entry utility.
"""
from django.contrib.gis.utils import add_srs_entry
add_srs_ | entry(3857)
self.assertTrue(
SpatialRefSys.objects.filter(srid=3857).exists()
)
srs = SpatialRefSys.objects.get(srid=3857)
self.assertTrue(
SpatialRefSys.get_spheroid(srs.wkt).startswith('SPHEROID[')
)
|
theJollySin/mazelib | mazelib/solve/MazeSolveAlgo.py | Python | gpl-3.0 | 6,364 | 0.001886 | import abc
import numpy as np
from numpy.random import shuffle
class MazeSolveAlgo:
__metaclass__ = abc.ABCMeta
def solve(self, grid, start, end):
""" helper method to solve a init the solver before solving the maze
Args:
grid (np.array): maze array
start (tuple): position in maze to start from
end (tuple): position in maze to finish at
Returns:
list: final solutions
"""
self._solve_preprocessor(grid, start, end)
return self._solve()
def _solve_preprocessor(self, grid, start, end):
""" ensure the maze mazes any sense before you solve it
Args:
grid (np.array): maze array
start (tuple): position in maze to start from
end (tuple): position in maze to finish at
Returns: None
"""
self.grid = grid.copy()
self.start = start
self.end = end
# validating checks
assert grid is not None, 'Maze grid is not set.'
assert start is not None and end is not None, 'Entrances are not set.'
assert start[0] >= 0 and start[0] < grid.shape[0], 'Entrance is outside the grid.'
assert start[1] >= 0 and start[1] < grid.shape[1], 'Entrance is outside the grid.'
assert end[0] >= 0 and end[0] < grid.shape[0], 'Entrance is outside the grid.'
assert end[1] >= 0 and end[1] < grid.shape[1], 'Entrance is outside the grid.'
@abc.abstractmethod
def _solve(self):
return None
"""
All of the methods below this are helper methods,
common to many maze-solving algorithms.
"""
def _find_unblocked_neighbors(self, posi):
""" Find all the grid neighbors of the current position; visited, or not.
Args:
posi (tuple): cell of interest
Returns:
list: all the unblocked neighbors to this cell
"""
r, c = posi
ns = []
if r > 1 and not self.grid[r - 1, c] and not self.grid[r - 2, c]:
ns.append((r - 2, c))
if r < self.grid.shape[0] - 2 and not self.grid[r + 1, c] and not self.grid[r + 2, c]:
ns.append((r + 2, c))
if c > 1 and not self.grid[r, c - 1] and not self.grid[r, c - 2]:
ns.append((r, c - 2))
if c < self.grid.shape[1] - 2 and not self.grid[r, c + 1] and not self.grid[r, c + 2]:
ns.append((r, c + 2))
shuffle(ns)
return ns
def _midpoint(self, a, b):
""" Find the wall cell between to passage cells
Args:
a (tuple): first cell
b (tuple): second cell
Returns:
tuple: cell half way between the first two
"""
return (a[0] + b[0]) // 2, (a[1] + b[1]) // 2
def _move(self, start, direction):
""" Convolve a position tuple with a direction tuple to generate a new position.
Args:
start (tuple): position cell to start at
direction (tuple): vector cell of direction to travel to
Returns:
tuple: end result of movement
"""
return tuple(map(sum, zip(start, direction)))
def _on_edge(self, cell):
""" Does the cell lay on the edge, rather inside of the maze grid?
Args:
cell (tuple): some place in the grid
Returns:
bool: Is the cell on the edge of the maze?
"""
r, c = cell
if r == 0 or r == self.grid.shape[0] - 1:
return True
if c == 0 or c == self.grid.shape[1] - 1:
return True
return False
def _push_edge(self, cell):
""" You may need to find the cell directly inside of a start or end cell.
Args:
cell (tuple): some place in the grid
Returns:
tuple: the new cell location, pushed from the edge
"""
r, c = cell
if r == 0:
return (1, c)
elif r == (self.grid.shape[0] - 1):
return (r - 1, c)
elif c == 0:
return (r, 1)
else:
return (r, c - 1)
def _within_one(self, cell, desire):
""" Is the current cell within one move of the desired cell?
Note, this might be one full more, or one half move.
Args:
cell (tuple): position to start at
desire (tuple): position you want to be at
Returns:
bool: Are you within one movement of your goal?
"""
if not cell or not desire:
return False
if cell[0] == desire[0]:
if abs(cell[1] - desire[1]) < 2:
return True
elif cell[1] == desire[1]:
if abs(cell[0] - desire[0]) < 2:
return True
return False
def _prune_solution(self, solution):
""" In the process of solving a maze, the algorithm might go down
the wrong | corridor then backtrack. These extraneous steps need to be removed.
Also, clean up the end points.
Args:
solution (list): raw maze solution
Returns:
list: | cleaner, tightened up solution to the maze
"""
found = True
attempt = 0
max_attempt = len(solution)
while found and len(solution) > 2 and attempt < max_attempt:
found = False
attempt += 1
for i in range(len(solution) - 1):
first = solution[i]
if first in solution[i + 1:]:
first_i = i
last_i = solution[i + 1:].index(first) + i + 1
found = True
break
if found:
solution = solution[:first_i] + solution[last_i:]
# solution does not include entrances
if len(solution) > 1:
if solution[0] == self.start:
solution = solution[1:]
if solution[-1] == self.end:
solution = solution[:-1]
return solution
def prune_solutions(self, solutions):
""" prune all the duplicate cells from all solutions, and fix end points
Args:
solutions (list): multiple raw solutions
Returns:
list: the above solutions, cleaned up
"""
return [self._prune_solution(s) for s in solutions] |
Cinntax/home-assistant | homeassistant/components/scsgate/switch.py | Python | apache-2.0 | 5,491 | 0.001093 | """Support for SCSGate switches."""
import logging
import voluptuous as vol
from homeassistant.components import scsgate
from homeassistant.components.switch import SwitchDevice, PLATFORM_SCHEMA
from homeassistant.const import ATTR_ENTITY_ID, ATTR_STATE, CONF_NAME, CONF_DEVICES
import homeassistant.helpers.config_validation as cv
ATTR_SCENARIO_ID = "scenario_id"
CONF_TRADITIONAL = "traditional"
CONF_SCENARIO = "scenario"
CONF_SCS_ID = "scs_id"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_DEVICES): cv.schema_with_slug_keys(scsgate.SCSGATE_SCHEMA)}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SCSGate switches."""
logger = logging.getLogger(__name__)
_setup_traditional_switches(
logger=logger, config=config, add_entities_callback=add_entities
)
_setup_scenario_switches(logger=logger, config=config, hass=hass)
def _setup_traditional_switches(logger, config, add_entities_callback):
"""Add traditional SCSGate switches."""
traditional = config.get(CONF_TRADITIONAL)
switches = []
if traditional:
for _, entity_info in traditional.items():
if entity_info[scsgate.CONF_SCS_ID] in scsgate.SCSGATE.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[scsgate.CONF_SCS_ID]
logger.info("Adding %s scsgate.traditional_switch", name)
switch = SCSGateSwitch(name=name, scs_id=scs_id, logger=logger)
switches.append(switch)
add_entities_callback(switches)
scsgate.SCSGATE.add_devices_to_register(switches)
def _setup_scenario_switches(logger, config, hass):
"""Add only SCSGate scenario switches."""
scenario = config.get(CONF_SCENARIO)
if scenario:
for _, entity_info in scenario.items():
if entity_info[scsgate.CONF_SCS_ID] in scsgate.SCSGATE.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[scsgate.CONF_SCS_ID]
logger.info("Adding %s scsgate.scenario_switch", name)
switch = SCSGateScenarioSwitch(
name=name, scs_id=scs_id, logger=logger, hass=hass
)
scsgate.SCSGATE.add_device(switch)
class SCSGateSwitch(SwitchDevice):
"""Representation of a SCSGate switch."""
def __init__(self, scs_id, name, logger):
"""Initialize the switch."""
self._name = name
self._scs_id = scs_id
self._toggled = False
self._logger = logger
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if switch is on."""
return self._toggled
def turn_on(self, **kwargs):
"""Turn the device on."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(ToggleStatusTask(target=self._scs_id, toggled=True))
self._toggled = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(
ToggleStatusTask(target=self._scs_id, toggled=False)
)
self._toggled = False
self.schedule_update_ha_state()
def process_event(self, message):
"""Handle a SCSGate message related with this switch."""
if self._toggled == message.toggled:
self._logger.info(
"Switch %s, ignoring message %s because state already active",
| self._scs_id,
message,
)
# Nothing changed, ignoring
return
self._toggled = message.toggled
self.schedule_update_ha_state()
command = "off"
if self._toggled:
command = "on"
self.hass.bus.fire(
"button_pressed", {ATTR_ENTITY_ID: self._scs_id, ATTR_STATE: command}
)
class SCSGateScenarioSwitch:
"""Provides a SCSGate | scenario switch.
This switch is always in an 'off" state, when toggled it's used to trigger
events.
"""
def __init__(self, scs_id, name, logger, hass):
"""Initialize the scenario."""
self._name = name
self._scs_id = scs_id
self._logger = logger
self._hass = hass
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def name(self):
"""Return the name of the device if any."""
return self._name
def process_event(self, message):
"""Handle a SCSGate message related with this switch."""
from scsgate.messages import StateMessage, ScenarioTriggeredMessage
if isinstance(message, StateMessage):
scenario_id = message.bytes[4]
elif isinstance(message, ScenarioTriggeredMessage):
scenario_id = message.scenario
else:
self._logger.warn("Scenario switch: received unknown message %s", message)
return
self._hass.bus.fire(
"scenario_switch_triggered",
{ATTR_ENTITY_ID: int(self._scs_id), ATTR_SCENARIO_ID: int(scenario_id, 16)},
)
|
tpugsley/tco2 | tco2/config/production.py | Python | bsd-3-clause | 4,340 | 0.001843 | # -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use sendgrid to send emails
- Use MEMCACHIER on Heroku
'''
from configurations import values
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from .common import Common
class Production(Common):
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
# END INSTALLED_APPS
# SECRET KEY
SECRET_KEY = values.SecretValue()
# END SECRET KEY
# django-secure
INSTALLED_APPS += ("djangosecure", )
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
SECURE_FRAME_DENY = values.BooleanValue(True)
SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
SESSION_COOKIE_SECURE = values.BooleanValue(False)
SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
SECURE_SSL_REDIRECT = values.BooleanValue(True)
# end django-secure
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = values.SecretValue()
AWS_SECRET_ACCESS_KEY = values.SecretValue()
AWS_STORAGE_BUCKET_NAME = values.SecretValue()
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# see: https://github.com/antonagestam/collectfast
AWS_PRELOAD_METADATA = True
INSTALLED_APPS += ('collectfast', )
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY)
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# END STORAGE CONFIGURATION
# EMAIL
DEFAULT_FROM_EMAIL = values.Value('tco2 <noreply@example.com>')
EMAIL_HOST = values.Value('smtp.sendgrid.com')
EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD")
EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME")
EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT")
EMAIL_SUBJECT_PREFIX = values.Value('[tco2] ', environ_name="EMAIL_SUBJECT_PREFIX")
EMAIL_USE_TLS = True
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# END TEMPLATE CO | NFIGURATION
# CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
CACHES = memcacheify()
except ImportError:
CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211")
# EN | D CACHING
# Your production stuff: Below this line define 3rd party library settings
|
GNU-Pony/ptools | src/pclean.py | Python | gpl-3.0 | 3,292 | 0.005474 | #!/usr/bin/env python3
'''
ptools – software package installation tools
Copyright © 2013 Mattias Andrée (maandree@member.fsf.org)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys
import os
try:
SPIKE_PATH = os.getenv('SPIKE_PATH')
sys.path.append('%s/src' % SPIKE_PATH)
from dragonsuite import *
except:
print('Environment variable SPIKE_PATH has not been set correctly')
sys.exit(1)
rm_r_ = rm_r
def rm_r(ps):
for p in ([ps] if isinstance(ps, str) else ps):
if os.path.lexists(p):
rm_r(p)
from fh import *
pkgdir = evald_dirs['destdir']
pkgname = evald_dirs['pkgname']
i_use_devel = get('I_USE_DEVEL', 'y').lower().startswith('y')
i_use_emacs = get('I_USE_EMACS', 'y').lower().startswith('y')
i_use_info = get('I_USE_INFO', 'y').lower().startswith('y')
i_use_man = get('I_USE_MAN', 'y').lower().startswith('y')
i_use_man_locale = get('I_USE_MAN_LOCALE', '*')
i_use_doc = get('I_USE_DOC', 'y').lower().startswith('y')
i_use_locale = get('I_USE_LOCALE', '*')
i_use_locale_man = get('I_USE_LOCALE_MAN', 'en')
i_use_license = get('I_USE_LICENSE', 'y').lower().startswith('y')
includedir = pkgdir + evald_dirs['includedir']
pkgconfigdir = pkgdir + evald_dirs['pkgconfigdir']
infodir = pkgdir + evald_dirs['infodir']
mandir = pkgdir + evald_dirs['mandir']
docdir = pkgdir + evald_dirs['docdir']
localedir = pkgdir + evald_dirs['localedir']
licensedir = pkgdir + evald_dirs['licensedir']
datarootdir = pkgdir + evald_dirs['datarootdir']
if not i_use_devel:
rm_r(pkgdir + includedir)
rm_r(pkgdir + pkgconfigdir)
if not i_use_emacs:
rm_r(pkgdir + datarootdir + '/emacs')
if not i_use_info:
rm_r(pkgdir + infodir)
else:
rm(pkgdir + infodir + '/dir')
if not i_use_man:
rm_r(pkgdir + mandir)
else:
_man = pkgdir + mandir
_en = _man + os.sep | + 'en'
if not os.path.lexists(_en):
mkdir_p(_en)
mv(path('%s/man?/' % path_escape(_man)), _en)
filter_locale(i_use_man_locale, pkgdir, None, mandir)
_lang = _man + os.sep + i_use_locale_man
if os.path.lexists(_lang):
mv(os.list | dir(_lang), _man)
rmdir(_lang)
if len(os.listdir(_man)) == 0:
rmdir(_man)
if not i_use_doc:
rm_r(pkgdir + docdir)
filter_locale(i_use_locale, pkgdir, None, localedir)
if not i_use_license:
_dir = '%s%s%s' % (i_use_license, os.sep, pkgname)
if os.path.lexists(_dir):
if os.path.islink(_dir):
rm(_dir)
else:
rm_r(_dir)
if os.path.lexists(datarootdir) and os.path.isdir(datarootdir):
if len(os.listdir(datarootdir)) == 0:
rmdir(_dir)
|
LoLab-VU/pysb | pysb/examples/paper_figures/fig6.py | Python | bsd-2-clause | 9,204 | 0.000869 | """Produce contact map for Figure 5D from the PySB publication"""
from __future__ import print_function
import pysb.integrate
import pysb.util
import numpy as np
import scipy.optimize
import scipy.interpolate
import matplotlib.pyplot as plt
import os
import sys
import inspect
from earm.lopez_embedded import model
# List of model observables and corresponding data file columns for
# point-by-point fitting
obs_names = ['mBid', 'cPARP']
data_names = ['norm_ICRP', 'norm_ECRP']
var_names = ['nrm_var_ICRP', 'nrm_var_ECRP']
# Load experimental data file
data_path = os.path.join(os.path.dirname(__file__), 'fig6_data.csv')
exp_data = np.genfromtxt(data_path, delimiter=',', names=True)
# Model observable corresponding to the IMS-RP reporter (MOMP timing)
momp_obs = 'aSmac'
# Mean and variance of Td (delay time) and Ts (switching time) of MOMP, and
# yfinal (the last value of the IMS-RP trajectory)
momp_data = np.array([9810.0, 180.0, 1.0])
momp_var = np.array([7245000.0, 3600.0, 1e-9])
# Build time points for the integrator, using the same time scale as the
# experimental data but with greater resolution to help the integrator converge.
ntimes = len(exp_data['Time'])
# Factor by which to increase time resolution
tmul = 10
# Do the sampling such that the original experimental timepoints can be
# extracted with a slice expression instead of requiring interpolation.
tspan = np.linspace(exp_data['Time'][0], exp_data['Time'][-1],
(ntimes-1) * tmul + 1)
# Initialize solver object
solver = pysb.integrate.Solver(model, tspan, rtol=1e-5, atol=1e-5)
# Get parameters for rates only
rate_params = model.parameters_rules()
# Build a boolean mask for those params against the entire param list
rate_mask = np.array([p in rate_params for p in model.parameters])
# Build vector of nominal parameter values from the model
nominal_values = np.array([p.value for p in model.parameters])
# Set the radius of a hypercube bounding the search space
bounds_radius = 2
def objective_func(x, rate_mask, lb, ub):
caller_frame, _, _, caller_func, _, _ = inspect.stack()[1]
if caller_func in {'anneal', '_minimize_anneal'}:
caller_locals = caller_frame.f_locals
if caller_locals['n'] == 1:
print(caller_locals['best_state'].cost, caller_locals['current_state'].cost)
# Apply hard bounds
if np.any((x < lb) | (x > ub)):
print("bounds-check failed")
return np.inf
# Simulate model with rates taken from x (which is log transformed)
param_values = np.array([p.value for p in model.parameters])
param_values[rate_mask] = 10 ** x
solver.run(param_values)
# Calculate error for point-by-point trajectory comparisons
e1 = 0
for obs_name, data_name, var_name in zip(obs_names, data_names, var_names):
# Get model observable trajectory (this is the slice expression
# mentioned above in the comment for tspan)
ysim = solver.yobs[obs_name][::tmul]
# Normalize it to 0-1
ysim_norm = ysim / np.nanmax(ysim)
# Get experimental measurement and variance
ydata = exp_data[data_name]
yvar = exp_data[var_name]
# Compute error between simulation and experiment (chi-squared)
e1 += np.sum((ydata - ysim_norm) ** 2 / (2 * yvar)) / len(ydata)
# Calculate error for Td, Ts, and final value for IMS-RP reporter
# =====
# Normalize trajectory
ysim_momp = solver.yobs[momp_obs]
ysim_momp_norm = ysim_momp / np.nanmax(ysim_momp)
# Build a spline to interpolate it
st, sc, sk = scipy.interpolate.splrep(solver.tspan, ysim_momp_norm)
# Use root-finding to find the point where trajectory reaches 10% and 90%
t10 = scipy.interpolate.sproot((st, sc-0.10, sk))[0]
t90 = scipy.interpolate.sproot((st, sc-0.90, sk))[0]
# Calculate Td as the mean of these times
td = (t10 + t90) / 2
# Calculate Ts as their difference
ts = t90 - t10
# Get yfinal, the last element from the trajectory
yfinal = ysim_momp_norm[-1]
# Build a vector of the 3 variables to fit
momp_sim = [td, ts, yfinal]
# Perform chi-squared calculation against mean and variance vectors
e2 = np.sum((momp_data - momp_sim) ** 2 / (2 * momp_var)) / 3
# Calculate error for final cPARP value (ensure all PARP is cleaved)
cparp_final = model.parameters['PARP_0'].value
cparp_final_var = .01
cparp_final_sim = solver.yobs['cPARP'][-1]
e3 = (cparp_final - cparp_final_sim) ** 2 / (2 * cparp_final_var)
error = e1 + e2 + e3
return error
def estimate(start_values=None):
"""Estimate parameter values by fitting to data.
Parameters
==========
parameter_values : numpy array of floats, optional
Starting parameter values. Taken from model's nominal parameter values
if not specified.
Returns
=======
numpy array of floats, containing fitted parameter values.
"""
# Set starting position to nominal parameter values if not specified
if start_values is None:
start_values = nominal_values
else:
assert start_values.shape == nominal_values.shape
# Log-transform the starting position
x0 = np.log10(start_values[rate_mask])
# Displacement size for annealing moves
dx = .02
# The default 'fast' annealing schedule uses the 'lower' and 'upper'
# arguments in a somewhat counterintuitive way. See
# http://projects.scipy.org/scipy/ticket/1126 for more information. This is
# how to get the search to start at x0 and use a displacement on the order
# of dx (note that this will affect the T0 estimation which *does* expect
# lower and upper to be the absolute expected bounds on x).
lower = x0 - dx / 2
upper = x0 + dx / 2
# Log-transform the rate parameter values
xnominal = np.log10(nominal_values[rate_mask])
# Hard lower and upper bounds on x
lb = xnominal - bounds_radius
ub = xnominal + bounds_radius
# Perform the annealing
args = [rate_mask, lb, ub]
(xmin, Jmin, Tfinal, feval, iters, accept, retval) = \
scipy.optimize.anneal(objective_func, x0, full_output=True,
maxiter=4000, quench=0.5,
lower=lower, upper=upper,
args=args)
# Construct vector with resulting parameter values (un-log-transformed)
params_estimated = start_values.copy()
params_estimated[rate_mask] = 10 ** xmin
# Display annealing results
for v in ('xmin', 'Jmin', 'Tfinal', 'feval', 'iters', 'accept', 'retval'):
print("%s: %s" % (v, locals()[v]))
return params_estimated
def display(params_estimated):
# Simulate model with nominal parameters and construct a matrix of the
# trajectories of the observables of interest, normalized to 0-1.
solver.run()
obs_names_disp = ['mBid', 'aSmac', 'cPARP']
obs_totals = [model.parameters[n].value for n in ('Bid_0', 'Smac_0', 'PARP_0')]
sim_obs = solver.yobs[obs_names_disp].view(float).reshape(len(solver.yobs) | , -1)
sim_obs_norm = (sim_obs / obs_totals).T
# Do the same with the estimated parameters
solver.run(params_estimated)
sim_est_obs = solver.yobs[obs_names_disp].view(float).reshape(len(solver. | yobs), -1)
sim_est_obs_norm = (sim_est_obs / obs_totals).T
# Plot data with simulation trajectories both before and after fitting
color_data = '#C0C0C0'
color_orig = '#FAAA6A'
color_est = '#83C98E'
plt.subplot(311)
plt.errorbar(exp_data['Time'], exp_data['norm_ICRP'],
yerr=exp_data['nrm_var_ICRP']**0.5, c=color_data, linewidth=2,
elinewidth=0.5)
plt.plot(solver.tspan, sim_obs_norm[0], color_orig, linewidth=2)
plt.plot(solver.tspan, sim_est_obs_norm[0], color_est, linewidth=2)
plt.ylabel('Fraction of\ncleaved IC-RP/Bid', multialignment='center')
plt.axis([0, 20000, -0.2, 1.2])
plt.subplot(312)
plt.vlines(momp_data[0], -0.2, 1.2, color=color_data, linewidth=2)
plt.plot(solver.tspan, sim_obs_norm[1], color_orig, linewidth=2)
plt.plot(solver.tspan, sim_est_obs_norm[1], color_est, linewidth=2)
|
jaredjennings/snowy | wsgi/snowy/snowy/lib/reversion/managers.py | Python | agpl-3.0 | 2,511 | 0.005575 | """Model managers for Reversion."""
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback.
from django.contrib.contenttypes.models import ContentType
from django.db import models
class VersionManager(models.Manager):
"""Manager for Version models."""
def get_for_object(self, object):
"""Returns all the versions of the given Revision, ordered by date c | reated."""
content_type = Co | ntentType.objects.get_for_model(object)
return self.filter(content_type=content_type, object_id=unicode(object.pk)).order_by("pk").select_related().order_by("pk")
def get_unique_for_object(self,obj):
"""Returns unique versions associated with the object."""
versions = self.get_for_object(obj)
changed_versions = []
known_serialized_data = set()
for version in versions:
serialized_data = version.serialized_data
if serialized_data in known_serialized_data:
continue
known_serialized_data.add(serialized_data)
changed_versions.append(version)
return changed_versions
def get_for_date(self, object, date):
"""Returns the latest version of an object for the given date."""
try:
return self.get_for_object(object).filter(revision__date_created__lte=date).order_by("-pk")[0]
except IndexError:
raise self.model.DoesNotExist
def get_deleted(self, model_class):
"""Returns all the deleted versions for the given model class."""
live_ids = [unicode(row[0]) for row in model_class._default_manager.all().values_list("pk")]
content_type = ContentType.objects.get_for_model(model_class)
deleted_ids = self.filter(content_type=content_type).exclude(object_id__in=live_ids).order_by().values_list("object_id").distinct()
deleted = []
for object_id, in deleted_ids:
deleted.append(self.get_deleted_object(model_class, object_id))
return deleted
def get_deleted_object(self, model_class, object_id):
"""
Returns the version corresponding to the deletion of the object with
the given id.
"""
try:
content_type = ContentType.objects.get_for_model(model_class)
return self.filter(content_type=content_type, object_id=unicode(object_id)).order_by("-pk").select_related()[0]
except IndexError:
raise self.model.DoesNotExist |
bit-trade-one/SoundModuleAP | lib-src/lv2/sratom/waflib/Tools/xlcxx.py | Python | gpl-2.0 | 1,267 | 0.056827 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@conf
def find_xlcxx(conf):
cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX')
cxx=conf.cmd_to_list(cxx)
conf.get_xlc_version(cxx)
conf.env.CXX_NAME='xlc++'
conf.env.CXX=cxx
@conf
def xl | cxx_common_flags(conf):
v=conf.env
v['CXX_SRC_F']=[]
v['CXX_TGT_F']=['-c','-o']
if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
v['CXXLNK_SRC_F']=[]
v['CXXLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']= | '-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['SONAME_ST']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['LINKFLAGS_cxxprogram']=['-Wl,-brtl']
v['cxxprogram_PATTERN']='%s'
v['CXXFLAGS_cxxshlib']=['-fPIC']
v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull']
v['cxxshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cxxstlib']=[]
v['cxxstlib_PATTERN']='lib%s.a'
def configure(conf):
conf.find_xlcxx()
conf.find_ar()
conf.xlcxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
|
PressLabs/silver | silver/api/exceptions.py | Python | apache-2.0 | 935 | 0 | # Copyright (c) 2017 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from rest_framework import status
from rest_framework.exceptions import APIException
class APIConflictException(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = 'The request could not be completed due to | a conflict ' \
'with the cur | rent state of the resource.'
|
pi-bot/pibot-pkg | tests/dependencies.py | Python | gpl-3.0 | 100 | 0 | import time
import os
import j | son
import | requests
# Plan is to import and to checkout dependencies:
|
pmisik/buildbot | master/buildbot/test/unit/test_mq_connector.py | Python | gpl-2.0 | 3,377 | 0 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import mock
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.mq import base
from buildbot.mq import connector
from buildbot.test.fake import fakemaster
from buildbot.test.reactor import TestReactorMixin
from buildbot.util import service
class FakeMQ(service.ReconfigurableServiceMixin, base.MQBase):
new_config = "not_called"
def reconfigServiceWithBuildbotConfig(self, new_config):
self.new_config = new_config
return defer.succeed(None)
def produce(self, routingKey, data):
pass
def startConsuming(self, callback, filter, persistent_name=None):
return defer.succeed(None)
class MQConnector(TestReactorMixin, unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
self.setup_test_reactor()
self.master = fakemaster.make_master(self)
self.mqconfig = self.master.config.mq = {}
self.conn = connector.MQConnector()
yield self.conn.setServiceParent(self.master)
def patchFakeMQ(self, name='fake'):
self.patch(connector.MQConnector, 'classes',
{name:
{'class': 'buildbot.test.unit.test_mq_connector.FakeMQ'},
})
@defer.inlineCallbacks
def test_setup_unknown_type(self):
self.mqconfig['type'] = 'unknown'
with self.assertRaises(AssertionError):
yield self.conn.setup()
@defer.inlineCallbacks
def test_setup_simple_type(self):
self.patchFakeMQ(name='simple')
self.mqconfig['type'] = 'simple'
yield self.conn.setup()
self.assertIsInstance(self.conn.impl, FakeMQ)
self.assertEqual(self.conn.impl.produce, self.conn.produce)
self.assertEqual(self.conn.impl.startConsuming,
self.conn.startConsuming)
@defer.inlineCallbacks
def test_reconfigServiceWithBuildbotConfig(self):
self.patchFakeMQ()
self.mqconfig['type'] = 'fake'
self.conn.setup()
new_config = mock.Mock()
new_config.mq = dict(type='fake')
yield self.conn.reconfigServiceWithBuildbotConfig(new_config)
self.assertIdentical(self.conn.impl.new_config, | new_config)
@defer.inlineCallbacks
def test_reconfigService_change_type(self):
self.patchFakeMQ()
self.mqconfig['type'] = 'fake'
yield | self.conn.setup()
new_config = mock.Mock()
new_config.mq = dict(type='other')
try:
yield self.conn.reconfigServiceWithBuildbotConfig(new_config)
except AssertionError:
pass # expected
else:
self.fail("should have failed")
|
SmokinCaterpillar/pypet | doc/source/conf.py | Python | bsd-3-clause | 10,941 | 0.006581 | # -*- coding: utf-8 -*-
#
# pypet documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 4 12:12:59 2013.
#
# This file is execfile()d with the current directory f_set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
#sys.path.append(os.path.abspath('../../'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autosummary','sphinx.ext.autodoc', 'sphinx.ext.doctest',
'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'sphinx.ext.imgmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# Autogenerate stubs
#autosummary_generate = True
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pypet'
copyright = u'2021, Robert Meyer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you f_set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['cookbook/concept.rst',
'other/to_new_tree.rst',
'contact_license.rst']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# try:
# import sphinx_rtd_theme
#
# html_theme = "sphinx_rtd_theme"
#
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# except ImportError:
html_theme = 'agogo'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
css_gradient = '''background: rgb(23,49,119); /* Old browsers */
background: -moz-linear-gradient(top, rgba(23,49,119,1) 0%, rgba(32,124,202,1) 65%, rgba(41,137,216,1) 82%, rgba(125,185,232,1) 100%); /* FF3.6+ */
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,rgba(23,49,119,1)), color-stop(65%,rgba(32,124,202,1)), color-stop(82%,rgba(41,137,216,1)), color-stop(100%,rgba(125,185,232,1))); /* Chrome,Safari4+ */
background: -webkit-linear-gradient(top, rgba(23,49,119,1) 0%,rgba(32,124,202,1) 65%,rgba(41,137,216,1) 82%,rgba(125,185,232,1) 100%); /* Chrome10+,Safari5.1+ */
background: -o-linear-gradient(top, rgba(23,49,119,1) 0%,rgba(32,124,202,1) 65%,rgba(41,137,216,1) 82%,rgba(125,185,232,1) 100%); /* Opera 11.10+ */
background: -ms-linear-gradient(top, rgba(23,49,119,1) 0%,rgba(32,124,202,1) 65%,rgba(41,137,216,1) 82%,rgba(125,185,232,1) 100%); /* IE10+ */
background: linear-gradient(to bottom, rgba(23,49,119,1) 0%,rgba(32,124,202,1) 65%,rgba(41,137,216,1) 82%,rgba(125,185,232,1) 100%); /* W3C */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#173177', endColorstr='#7db9e8',GradientType=0 ); /* IE6-9 */'''
html_theme_options = {'documentwidth': '55em',
'pagewidth': '75em',
'nosidebar': False,
'headerbg': css_gradient}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this f_set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sou | rcelink = True
# If true, "Created usi | ng Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pypetdoc'
# -- Options for LaTeX output --------------------------------------------------
fh = open('latex/latex_preamble.tex', 'r+')
PREAMBLE = fh.read()
fh.close()
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': PREAMBLE,
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('latex', 'pypet.tex', u'pypet Documentation',
u'Robert Meyer', 'manual', True),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#lat |
geosdude/pyFace | menuWidget.py | Python | gpl-3.0 | 8,829 | 0.009061 | #!/bin/bash
# -*- codign:utf-8 -*-
# pyface.menuWidget.Menu_Widget
# Coded by Richard Polk
#----------------------------------------------------
from Tkinter import *
class Menu_Widget:
def __init__(self):
# Menu metrics
frame_height = self.sub_frame_height = self.guiDct['sub_frame_height']
frame_relheight = frame_height
frame_relwidth = 1
frame_rely = self.rely = self.guiDct['rely']
self.rely = self.rely + frame_relheight
# menu frame
widgetname = "menu_frame"
self.menu_frame = Frame(self.root,
name=widgetname,
relief=FLAT,
bd=self.bd,
bg=self.bg)
# bg='DarkGray')
self.menu_frame.place(rely=frame_rely,
relheight=frame_relheight,
relwidth=frame_relwidth)
#? 2-28-11 these need to be migrated into conf files.
#? What are the integers for?
self.menuLst = [
('File', 0,
[('Open...', 0, "self.disabled()"),
('Save', 0, "self.disabled()"),
('Save As...', 5, "self.disabled()"),
('New', 0, "self.disabled()"),
'separator',
('Recent Files List', 0, "self.disabled()"),
'separator',
('Quit...', 0, "self.onQuit(3)"),
]
),
('Edit', 0,
[('Cut', 0, "self.onCut()"),
('Copy', 1, "self.onCopy()"),
('Paste', 0, "self.onPaste(0)"),
'separator',
('Delete', 0, "self.onDelete()"),
('Select All', 0, "self.onSelectAll()"),
'separator',
('Clear text', 0, "self.clearText()"),
('Restore text', 0, "self.disabled()"),
'separator',
('Goto...', 0, "self.onGoto()"),
('Find...', 0, "self.findWord()"),
('Find Next', 0, "self.findNextWord()"),
('Change', 0, "self.onChange()"),
]
),
('Search', 0,
[('Files by List', 0, "self.disabled()"),
('List all Files', 0, "self.disabled()"),
('File By Extension', 0, "self.disabled()"),
('File By Extension List', 0, "self.disabled()"),
('File By Substring', 0, "self.disabled()"),
('File By Substring and Extension', 0, "self.disabled()"),
'separator',
('Tree List', 0, "self.disabled()"),
('Tree List by Substring', 0, "self.disabled()"),
]
),
('List', 0,
[('Copy Files By List', 0, "self.disabled()"),
('Path List', 0, "self.disabled()"),
('Sort List by Index', 0, "self.disabled()"),
'separator',
('List File Dicts', 0, "self.disabled()"),
'separator',
('Get List Files', 0, "self.disabled()"),
('Get Current List', 0, "self.disabled()"),
]
),
('GUI', 0,
[('Font List', 0, "self.disabled()"),
('Pick Bg...', 4, "self.disabled()"),
('Pick Fg...', 0, "self.disabled()"),
('Color List', 0, "self.disabled()"),
'separator',
('Reload GUI', 0, "self.reloadGUI()"),
'separator',
('Update Py Path', 0, "self.disabled()"),
'separator',
('Save User Settings', 0, "self.disabled()"),
'separator',
('Refresh Modules', 0, "self.disabled()"),
('Update Idle Tasks', 0, "self.disabled()"),
'separator',
('Preferences', 0, "self.disabled()"),
]
),
('Context', 0,
[("Lock Context Menu", 0, "self.ct_Lock()"),
("Unlock Context Menu", 0, "self.ct_Unlock()"),
]
),
('System', 0,
[("Register dll's", 1, "self.dllReg()"),
("Shut the Hell Up!", 1, "self.stopAnnoyingMe()"),
("Shutdown", 1, "self.disabled()"),
]
),
('Report', 0,
[('Environment', 0, "self.disabled()"),
('Variables', 0, "self.disabled()"),
('Methods', 0, "self.disabled()"),
('Display', 0, "self.disabled()"),
('GUI Widgets', 0, "self.disabled()"),
('GeoTools', 0, "self.disabled()"),
'separator',
('System Env', 0, "self.disabled()"),
('Path Values', 0, "self.disabled()"),
'separator',
('Network', 0, "self.disabled()"),
('Mapped Drives', 0, "self.disabled()"),
('Process', 0, "self.disabled()"),
'separator',
('Permissions', 0, "self.disabled()"),
'separator',
('Port Usage', 0, "self.disabled()"),
('Open Port Scan', 0, "self.disabled()"),
('GUI Port Scan', 0, "self.disabled()"),
('Ports', 0, "self.disabled()"),
'separator',
('Software', 0, "self.disabled()"),
]
),
('Input/Output', 0,
[("Echo On", 0, "self.setEcho(1)"),
("Echo Off", 0, "self.setEcho(0)"),
'separator',
('Print StdOut', 0, "self.writeStdOut()"),
('Print StdErr', 0, "self.writeStdErr()"),
'separator',
]
),
('Help', 0,
[('GeoTools', 0, "self.disabled()"),
('Python', 0, "self.pyHelp()"),
('ActiveState', 0, "self.pyHelp2()"),
('Lundh''s Tk Intro', 0, "self.TkinterHelp(1)"),
('NM Tech''s Tk Ref', 0, | "self.TkinterHelp(2)"),
'separator',
('About Python', 0, "self.pyVersion()"),
]
),
]
# Standard pulldown menus from list
for (menuName, key, items) in self.menuLst:
widgetname = string.lower(menuName.rep | lace(' ', '_'))
self.mbutton = Menubutton(self.menu_frame,
name=widgetname,
text=menuName,
font=self.fonts[7],
underline=key)
self.widgetLst.append(widgetname)
self.mbutton.pack(side=LEFT)
widgetname = string.lower(widgetname + "_pulldwn")
self.pulldown = Menu(self.mbutton, name=widgetname)
self.widgetLst.append(widgetname)
for item in items: # scan nested items list
if item == 'separator': # string: add separator
self.pulldown.add_separator({})
elif type(item) == ListType: # list: disabled item list
for num in item:
self.pulldown.entryconfig(num, state=DISABLED)
elif type(item[2]) != ListType: # Added Curry logic here 2-28-11.
docstring = """self.pulldown.add_command(label = item[0],
underline = item[1], command=Kurry(self.callHandler, item[2]))""" + '\n'
command = 'self.' + docstring
exec(docstring)
else:
self.pullover = Menu(menu)
self.addMenuItems(pullover, it |
hellrich/JeSemE | pipeline/preprocessing/google/parse_normalized.py | Python | mit | 712 | 0.021067 | #produces lemmata!
import json
import glob
import codecs
import sys
import os
if len(sys.argv) != 3:
raise Exception("Provide 2 arguments:\n\t1,Source directory with CAB responses\n\t2,Result file")
cab_files = sys.argv | [1]
result_path = sys.argv[2]
with codecs.open(result_path, mode="w", buffering=10000, encoding="utf-8") as result:
for cab_file in glob.glob(os.path.join(cab_files, "*")):
with codecs.open(cab_file, mode="r", encoding="utf-8") as cab:
for line in cab:
if "\t" in line:
word, analysis = line.split("\t")
lemma = json.loads(analysis)["m | oot"]["lemma"]
if word.lower() != lemma.lower() and not ( ";" in word or ";" in lemma):
result.write(word+";"+lemma+"\n")
|
bikash/omg-monitor | monitor/utils/pingdom.py | Python | gpl-3.0 | 4,612 | 0.00477 | # The MIT License
#
# Copyright (c) 2010 Daniel R. Craig
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from urlparse import urljoin
from urllib import urlencode
import urllib2
import json
import time
API_URL = 'https://api.pingdom.com/api/2.0/'
class Pingdom(object):
def __init__(self, url=API_URL, username=None, password=None, appkey=None):
self.url = url
self.appkey= appkey
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, url, username, password)
auth_handler = urllib2.HTTPBasicAuthHandler(password_manager)
self.opener = urllib2.build_opener(auth_handler)
class RequestWithMethod(urllib2.Request):
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverif | iable=False, http_method=None):
urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
if http_method:
self.method = http_method
def get_method(self):
| if self.method:
return self.method
return urllib2.Request.get_method(self)
def method(self, url, method="GET", parameters=None):
if parameters:
data = urlencode(parameters)
else:
data = None
method_url = urljoin(self.url, url)
if method == "GET" and data:
method_url = method_url+'?'+data
req = self.RequestWithMethod(method_url, http_method=method, data=None)
else:
req = self.RequestWithMethod(method_url, http_method=method, data=data)
req.add_header('App-Key', self.appkey)
response = self.opener.open(req).read()
return json.loads(response)
def check_by_name(self, name):
resp = self.method('checks')
checks = [check for check in resp['checks'] if check['name'] == name]
return checks
def check_status(self, name):
checks = self.check_by_name(name)
for check in checks:
print '%s check %s' % (check['name'], check['status'])
def modify_check(self, name, parameters={}):
checks = self.check_by_name(name)
if not checks:
print "No checks for %s" % name
return
for check in checks:
id_ = check['id']
response = self.method('checks/%s/' % id_, method='PUT', parameters=parameters)
print response['message']
def pause_check(self, name):
self.modify_check(name, parameters={'paused': True})
self.check_status(name)
def unpause_check(self, name):
self.modify_check(name, parameters={'paused': False})
self.check_status(name)
def avg_response(self, check_id, minutes_back=None, country=None):
parameters = {}
if minutes_back:
from_time = "%.0f" % (time.time() - 60*minutes_back)
parameters['from'] = from_time
if country:
parameters['bycountry'] = 'true'
summary = self.method('summary.average/%s/' % check_id, parameters=parameters)['summary']
avgresponse = summary['responsetime']['avgresponse']
if country:
response_time = None
for c in avgresponse:
countryiso = c['countryiso']
countryresponse = c['avgresponse']
if countryiso == country:
response_time = countryresponse
else:
response_time = avgresponse
return response_time
|
pombredanne/bitmath | full_demo.py | Python | mit | 6,552 | 0.001374 | #!/usr/bin/env python
from __future__ import print_function
import logging
import time
import bitmath
import bitmath.integrations
import argparse
import requests
import progressbar
import os
import tempfile
import atexit
import random
# Files of various sizes to use in the demo.
#
# Moar here: https://www.kernel.org/pub/linux/kernel/v3.0/?C=S;O=D
REMOTES = [
# patch-3.0.70.gz 20-Mar-2013 20:02 1.0M
'https://www.kernel.org/pub/linux/kernel/v3.0/patch-3.4.92.xz',
# patch-3.16.gz 03-Aug-2014 22:39 8.0M
'https://www.kernel.org/pub/linux/kernel/v3.0/patch-3.16.gz',
# patch-3.2.gz 05-Jan-2012 00:43 22M
'https://www.kernel.org/pub/linux/kernel/v3.0/patch-3.2.gz',
]
######################################################################
p = argparse.ArgumentParser(description='bitmath demo suite')
p.add_argument('-d', '--down', help="Download Rate",
type=bitmath.integrations.BitmathType,
default=bitmath.MiB(4))
p.add_argument('-s', '--slowdown',
help='Randomly pause to slow down the transfer rate',
action='store_true', default=False)
args = p.parse_args()
######################################################################
# Save our example files somewhere. And then clean up every trace that
# anything every happened there. shhhhhhhhhhhhhhhh
DESTDIR = tempfile.mkdtemp('demosuite', 'bitmath')
@atexit.register
def cleanup():
for f in os.listdir(DESTDIR):
os.remove(os.path.join(DESTDIR, f))
os.rmdir(DESTDIR)
######################################################################
for f in REMOTES:
print("""
######################################################################""")
fname = os.path.basename(f)
# An array of widgets to design our progress bar. Note how we use
# BitmathFileTransferSpeed
widgets = ['Bitmath Demo Suite (%s): ' % fname,
progressbar.Percentage(), ' ',
progressbar.Bar(marker=progressbar.RotatingMarker()), ' ',
progressbar.ETA(), ' ',
bitmath.integrations.BitmathFileTransferSpeed()]
# The 'stream' keyword lets us http GET files in
# chunks. http://docs.python-requests.org/en/latest/user/quickstart/#raw-response-content
r = requests.get(f, stream=True)
# We haven't began receiving the payload content yet, we have only
# just received the response headers. Of interest is the
# 'content-length' header which describes our payload in bytes
#
# http://bitmath.readthedocs.org/en/latest/classes.html#bitmath.Byte
size = bitmath.Byte(int(r.headers['Content-Length']))
# Demonstrate 'with' context handler, allowing us to customize all
# bitmath string printing within the indented block. We don't need
# all that precision anyway, just two points should do.
#
# http://bitmath.readthedocs.org/en/latest/module.html#bitmath-format
with bitmath.format("{value:.2f} {unit}"):
print("Downloading %s (%s) in %s chunks" % (f,
size.best_prefix(),
args.down.best_prefix()))
# We have to save these files somewhere
save_path = os.path.join(DESTDIR, fname)
print("Saving to: %s" % save_path)
print("")
# OK. Let's create our actual progress bar now. See the 'maxval'
# keyword? That's the size of our payload in bytes.
pbar = progressbar.ProgressBar(
widgets=widgets,
maxval=int(size)).start()
######################################################################
# Open a new file for binary writing and write 'args.down' size
# chunks into it until we've received the entire payload
with open(save_path, 'wb') as fd:
# The 'iter_content' method accepts integer values of
# bytes. Lucky for us, 'args.down' is a bitmath instance and
# has a 'bytes' attribute we can feed into the method call.
for chunk in r.iter_content(int(args.down.bytes)):
fd.write(chunk)
# The progressbar will end the entire cosmos as we know it
# if we try to .update() it beyond it's MAXVAL
# parameter.
#
# That's something I'd like to avoid taking the
# responsibility for.
if (pbar.currval + args.down.bytes) < pbar.maxval:
pbar.update(pbar.currval + int(args.down.bytes))
# We can add an pause to artificially speed up/slowdown
# the transfer rate. Allows us to see different units.
if args.slowdown:
# randomly slow down 1/5 of the time
if random.randrange(0, 100) % 5 == 0:
time.sleep(random.randrange(0, 500) * 0.01)
# Nothing to see here. Go home.
pbar.finish()
######################################################################
print("""
######################################################################
List | downloaded contents
* Filter for .xz files only
""")
for p,bm in bitmath.listdir(DESTDIR,
filter='*.xz'):
print(p, bm)
######################################################################
print("""
######################################################################
List downloaded contents
* Filter for .gz files only
* Print using be | st human readable prefix
""")
for p,bm in bitmath.listdir(DESTDIR,
filter='*.gz',
bestprefix=True):
print(p, bm)
######################################################################
print("""
######################################################################
List downloaded contents
* No filter set, to display all files
* Limit precision of printed file size to 3 digits
* Print using best human readable prefix
""")
for p,bm in bitmath.listdir(DESTDIR,
bestprefix=True):
with bitmath.format("{value:.3f} {unit}"):
print(p, bm)
######################################################################
print("""
######################################################################
Sum the size of all downloaded files together
* Print with best prefix and 3 digits of precision
""")
discovered_files = [f[1] for f in bitmath.listdir(DESTDIR)]
total_size = reduce(lambda x,y: x+y, discovered_files).best_prefix().format("{value:.3f} {unit}")
print("Total size of %s downloaded items: %s" % (len(discovered_files), total_size))
|
portfoliome/foil | foil/deserializers.py | Python | mit | 2,317 | 0 | import collections
from functools import partial
from itertools import chain
from types import MappingProxyType
from typing import Callable
from uuid import UUID
import iso8601
from foil.parsers import parse_iso_date as _parse_iso_date
def parse_uuid(value):
try:
value = UUID(value, version=4)
except ValueError:
pass
return value
def parse_iso_date(value):
try:
value = _parse_iso_date(value)
except AttributeError:
pass
return value
def parse_iso_datetime(value):
# Prevent iso8601 over-zealous datetime parsing
if '-' in value and ':' in value:
try:
value = iso8601.parse_date(value)
except iso8601.ParseError:
pass
return value
STRING_DECODERS = (parse_uuid, parse_iso_date, parse_iso_datetime)
def json_decoder_hook(dct, str_decoders=STRING_DECODERS,
converters=MappingProxyType(dict())) -> dict:
"""Decoder for parsing typical objects like uuid's and dates."""
for k, v in dct.items():
if k in converters:
parse_func = converters[k]
dct[k] = parse_func(v)
elif isinstance(v, str):
for decode_func in str_decoders:
v = decode_func(v)
if not isinstance(v, str):
break
dct[k] = v
elif isinstance(v, collections.Mapping):
dct[k] = json_decoder_hook(v, str_decoders, converters)
return dct
def make_json_decoder_hook(str_decoders=STRING_DECODERS,
extra_str_decoders=tuple(),
converters=MappingProxyType(dict())) -> Callable:
"""Customize JSON string decoder hooks.
Object hook for typical deserialization scenarios.
Notes
-----
Specifying a field in converters will ensure custom decoding/passthrough.
Parameter | s
----------
str_decoders: functions for decoding strings to objects.
extra_str_decoders: appends additional string decoders to str_decoders.
converters: field / parser function mapping.
"""
str_decoders = tuple(c | hain(str_decoders, extra_str_decoders))
object_hook = partial(json_decoder_hook, str_decoders=str_decoders,
converters=converters)
return object_hook
|
gpodder/panucci | src/panucci/gtkui/gtkplaylist.py | Python | gpl-3.0 | 15,781 | 0.003865 | # -*- coding: utf-8 -*-
#
# This file is part of Panucci.
# Copyright (c) 2008-2011 The Panucci Project
#
# Panucci is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Panucci is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Panucci. If not, see <http://www.gnu.org/licenses/>.
#
# Based on http://thpinfo.com/2008/panucci/:
# A resuming media player for Podcasts and Audiobooks
# Copyright (c) 2008-05-26 Thomas Perl <thpinfo.com>
# (based on http://pygstdocs.berlios.de/pygst-tutorial/seeking.html)
from __future__ import absolute_import
import logging
import gtk
import gobject
import pango
import panucci
from panucci import platform
from panucci import util
from panucci.gtkui import gtkutil
##################################################
# PlaylistTab
##################################################
class PlaylistTab(gtk.VBox):
def __init__(self, main_window, playlist):
gtk.VBox.__init__(self)
self.__log = logging.getLogger('panucci.panucci.BookmarksWindow')
self.main = main_window
self.playlist = playlist
self.__model = gtk.TreeStore(
# uid, name, position
gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING )
self.set_spacing(5)
self.treeview = gtk.TreeView()
self.treeview.set_model(self.__model)
self.treeview.set_headers_visible(True)
tree_selection = self.treeview.get_selection()
# This breaks drag and drop, only use single selection for now
# tree_selection.set_mode(gtk.SELECTION_MULTIPLE)
tree_selection.connect('changed', self.tree_selection_changed)
# The tree lines look nasty on maemo
if platform.DESKTOP:
self.treeview.set_enable_tree_lines(True)
self.update_model()
ncol = gtk.TreeViewColumn(_('Name'))
ncell = gtk.CellRendererText()
ncell.set_property('ellipsize', pango.ELLIPSIZE_END)
ncell.set_property('editable', True)
ncell.connect('edited', self.label_edited)
ncol.set_expand(True)
ncol.pack_start(ncell)
ncol.add_attribute(ncell, 'text', 1)
tcol = gtk.TreeViewColumn(_('Position'))
tcell = gtk.CellRendererText()
tcol.pack_start(tcell)
tcol.add_attribute(tcell, 'text', 2)
self.treeview.append_column(ncol)
self.treeview.append_column(tcol)
self.treeview.connect('drag-data-received', self.drag_data_recieved)
self.treeview.connect('drag_data_get', self.drag_data_get_data)
treeview_targets = [
( 'playlist_row_data', gtk.TARGET_SAME_WIDGET, 0 ) ]
self.treeview.enable_model_drag_source(
gtk.gdk.BUTTON1_MASK, treeview_targets, gtk.gdk.ACTION_COPY )
self.treeview.enable_model_drag_dest(
treeview_targets, gtk.gdk.ACTION_COPY )
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
sw.set_shadow_type(gtk.SHADOW_IN)
sw.add(self.treeview)
self.add(sw)
self.hbox = gtk.HBox()
self.add_button = gtk.Button(gtk.STOCK_NEW)
self.add_button.set_use_stock(True)
gtkutil.set_stock_button_text( self.add_button, _('Add File') )
self.add_button.connect('clicked', self.add_file)
self.hbox.pack_start(self.add_button, True, True)
self.dir_button = gtk.Button(gtk.STOCK_OPEN)
self.dir_button.set_use_stock(True)
gtkutil.set_stock_button_text( self.dir_button, _('Add Folder') )
self.dir_button.connect('clicked', self.add_directory)
self.hbox.pack_start(self.dir_button, True, True)
self.remove_button = gtk.Button(stock=gtk.STOCK_REMOVE)
self.remove_button.connect('clicked', self.remove_bookmark)
self.hbox.pack_start(self.remove_button, True, True)
self.jump_button = gtk.Button(stock=gtk.STOCK_JUMP_TO)
self.jump_button.connect('clicked', self.jump_bookmark)
self.hbox.pack_start(self.jump_button, True, True)
if platform.FREMANTLE:
self.info_button = gtk.Button(_('Info'))
else:
self.info_button = gtk.Button(stock=gtk.STOCK_INFO)
self.info_button.connect('clicked', self.show_playlist_item_details)
self.hbox.pack_start(self.info_button, True, True)
if platform.FREMANTLE:
self.empty_button = gtk.Button(_('Clear'))
else:
self.empty_button = gtk.Button(stock=gtk.STOCK_DELETE)
gtkutil.set_stock_button_text( self.empty_button, _('Clear'), )
self.empty_button.connect('clicked', self.empty_playlist)
self.hbox.pack_start(self.empty_button, True, True)
if platform.FREMANTLE:
for child in self.hbox.get_children():
if isinstance(child, gtk.Button):
child.set_name('HildonButton-thumb')
self.hbox.set_size_request(-1, 105)
self.pack_start(self.hbox, False, True)
self.playlist.register( 'file_queued',
lambda x,y,z: self.update_model() )
self.playlist.register( 'bookmark_added', self.on_bookmark_added )
self.show_all()
def tree_selection_changed(self, treeselection):
count = treeselection.count_selected_rows()
self.remove_button.set_sensitive(count > 0)
self.jump_button.set_sensitive(count == 1)
self.info_button.set_sensitive(count == 1)
def drag_data_get_data(
self, treeview, context, selection, target_id, timestamp):
treeselection = treeview.get_selection()
model, iter = treeselection.get_selected()
# only allow moving around top-level parents
if model.iter_parent(iter) is None:
# send the path of the selected row
data = model.get_string_from_iter(iter)
selection.set(selection.target, 8, data)
else:
self.__log.debug("Can't move children...")
def drag_data_recieved(
self, treeview, context, x, y, selection, info, timestamp):
drop_info = treeview.get_dest_row_at_pos(x, y)
# TODO: If user drags the row past the last row, drop_info is None
# I'm not sure if it's safe to simply assume that None is
# euqivalent to the last row...
if None not in [ drop_info and selection.data ]:
model = treeview.get_model()
path, position = drop_info
from_iter = model.get_iter_from_string(selection.data)
# make sure the to_iter doesn't have a parent
to_iter = model.get_iter(path)
if model.iter_parent(to_iter) is not None:
to_iter = model.iter_parent(to_iter)
from_row = model.get_path(from_iter)[0]
to_row = path[0]
if ( position == gtk.TREE_VIEW_DROP_BEFORE or
position == gtk.TREE_VIEW_DROP_INTO | _OR_BEFORE ):
model.move_before( from_iter, to_iter )
to_row = to_row - 1 if from_row < to_row else to_row
elif ( position == gtk.TREE_VIEW_DROP_AFTER or
position == gtk.TREE_VIEW_DROP_INTO_OR_AFTER ):
model.move_after( from_iter, to_iter )
to_row = to_row + 1 if from_row > to_row else to_row
else:
self.__log.debug('Drop not supported: %s', position | )
# don't do anything if we're not actually moving rows around
if from_row != to_row:
self.player.playlist.move_item( from_row, to_row )
else:
self.__log.debug('No drop_data or selection.data available')
def update_model(self):
path_info = |
hudl/redash | tests/models/test_queries.py | Python | bsd-2-clause | 13,843 | 0.001448 | # encoding: utf8
from tests import BaseTestCase
import datetime
from redash.models import Query, Group, Event, db
from redash.utils import utcnow
class QueryTest(BaseTestCase):
def test_changing_query_text_changes_hash(self):
q = self.factory.create_query()
old_hash = q.query_hash
q.query_text = "SELECT 2;"
db.session.flush()
self.assertNotEquals(old_hash, q.query_hash)
def test_search_finds_in_name(self):
q1 = self.factory.create_query(name=u"Testing seåřċħ")
q2 = self.factory.create_query(name=u"Testing seåřċħing")
q3 = self.factory.create_query(name=u"Testing seå řċħ")
queries = list(Query.search(u"seåřċħ", [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertNotIn(q3, queries)
def test_search_finds_in_description(self):
q1 = self.factory.create_query(description=u"Testing seåřċħ")
q2 = self.factory.create_query(description=u"Testing seåřċħing")
q3 = self.factory.create_query(description=u"Testing seå řċħ")
queries = Query.search(u"seåřċħ", [self.factory.default_group.id])
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertNotIn(q3, queries)
def test_search_by_id_returns_query(self):
q1 = self.factory.create_query(description="Testing search")
q2 = self.factory.create_query(description="Testing searching")
q3 = self.factory.create_query(description="Testing sea rch")
db.session.flush()
queries = Query.search(str(q3.id), [self.factory.default_group.id])
self.assertIn(q3, queries)
self.assertNotIn(q1, queries)
self.assertNotIn(q2, queries)
def test_search_by_number(self):
q = self.factory.create_query(description="Testing search 12345")
db.session.flush()
queries = Query.search('12345', [self.factory.default_group.id])
self.assertIn(q, queries)
def test_search_respects_groups(self):
other_group = Group(org=self.factory.org, name="Other Group")
db.session.add(other_group)
ds = self.factory.create_data_source(group=other_group)
q1 = self.factory.create_query(description="Testing search", data_source=ds)
q2 = self.factory.create_query(description="Testing searching")
q3 = self.factory.create_query(description="Testing sea rch")
queries = list(Query.search("Testing", [self.factory.default_group.id]))
self.assertNotIn(q1, queries)
self.assertIn(q2, queries)
self.assertIn(q3, queries)
queries = list(Query.search("Testing", [other_group.id, self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertIn(q3, queries)
queri | es = list(Query.search("Testing", [other_group.id]))
self.assertIn(q1, queries)
self.assertNotIn(q2, queries)
self.assertNotIn(q3, queries)
| def test_returns_each_query_only_once(self):
other_group = self.factory.create_group()
second_group = self.factory.create_group()
ds = self.factory.create_data_source(group=other_group)
ds.add_group(second_group, False)
q1 = self.factory.create_query(description="Testing search", data_source=ds)
db.session.flush()
queries = list(Query.search("Testing", [self.factory.default_group.id, other_group.id, second_group.id]))
self.assertEqual(1, len(queries))
def test_save_updates_updated_at_field(self):
# This should be a test of ModelTimestampsMixin, but it's easier to test in context of existing model... :-\
one_day_ago = utcnow().date() - datetime.timedelta(days=1)
q = self.factory.create_query(created_at=one_day_ago, updated_at=one_day_ago)
db.session.flush()
q.name = 'x'
db.session.flush()
self.assertNotEqual(q.updated_at, one_day_ago)
def test_search_is_case_insensitive(self):
q = self.factory.create_query(name="Testing search")
self.assertIn(q, Query.search('testing', [self.factory.default_group.id]))
def test_search_query_parser_or(self):
q1 = self.factory.create_query(name="Testing")
q2 = self.factory.create_query(name="search")
queries = list(Query.search('testing or search', [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
def test_search_query_parser_negation(self):
q1 = self.factory.create_query(name="Testing")
q2 = self.factory.create_query(name="search")
queries = list(Query.search('testing -search', [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertNotIn(q2, queries)
def test_search_query_parser_parenthesis(self):
q1 = self.factory.create_query(name="Testing search")
q2 = self.factory.create_query(name="Testing searching")
q3 = self.factory.create_query(name="Testing finding")
queries = list(Query.search('(testing search) or finding', [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertIn(q3, queries)
def test_search_query_parser_hyphen(self):
q1 = self.factory.create_query(name="Testing search")
q2 = self.factory.create_query(name="Testing-search")
queries = list(Query.search('testing search', [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
def test_search_query_parser_emails(self):
q1 = self.factory.create_query(name="janedoe@example.com")
q2 = self.factory.create_query(name="johndoe@example.com")
queries = list(Query.search('example', [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
queries = list(Query.search('com', [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
queries = list(Query.search('johndoe', [self.factory.default_group.id]))
self.assertNotIn(q1, queries)
self.assertIn(q2, queries)
class QueryRecentTest(BaseTestCase):
def test_global_recent(self):
q1 = self.factory.create_query()
q2 = self.factory.create_query()
db.session.flush()
e = Event(org=self.factory.org, user=self.factory.user, action="edit",
object_type="query", object_id=q1.id)
db.session.add(e)
recent = Query.recent([self.factory.default_group.id])
self.assertIn(q1, recent)
self.assertNotIn(q2, recent)
def test_recent_excludes_drafts(self):
q1 = self.factory.create_query()
q2 = self.factory.create_query(is_draft=True)
db.session.add_all([
Event(org=self.factory.org, user=self.factory.user,
action="edit", object_type="query",
object_id=q1.id),
Event(org=self.factory.org, user=self.factory.user,
action="edit", object_type="query",
object_id=q2.id)
])
recent = Query.recent([self.factory.default_group.id])
self.assertIn(q1, recent)
self.assertNotIn(q2, recent)
def test_recent_for_user(self):
q1 = self.factory.create_query()
q2 = self.factory.create_query()
db.session.flush()
e = Event(org=self.factory.org, user=self.factory.user, action="edit",
object_type="query", object_id=q1.id)
db.session.add(e)
recent = Query.recent([self.factory.default_group.id], user_id=self.factory.user.id)
self.assertIn(q1, recent)
self.assertNotIn(q2, recent)
recent = Query.recent([self.factory.default_group.id], user_id=self.factory.user.id + 1)
self.assertNotIn(q1, recent)
self.assertNotIn(q2, recent)
def test_respects_groups(self):
q1 = self.factory.create_query()
ds = self.factory.create_data_source(group=self.factory.create_group())
q2 = self.factor |
soarlab/FPTuner | examples/micro/jacobi-n2u2.py | Python | mit | 916 | 0.012009 | import tft_ir_api as IR
n = 2
unrolls = 2
low = 1.0
high = 10.0
A = list()
for j in range(n):
row = list()
for i in range(n):
row.append(IR.RealVE("a{}{}".format(i,j), 0, low, high))
A.append(row)
b = list()
for i in range(n):
b.appen | d(IR.RealVE("b{}".format(i), 1, low, high))
x = list()
for i in range(n):
x.append(IR.FConst(1.0))
g=2
#j k = 0
#j while convergence not reached: # while loop
for k in range(unrolls): # replacement | for while loop
for i in range(n): # i loop
sigma = IR.FConst(0.0)
for j in range(n): # j loop
if j != i:
sigma = IR.BE("+", g, sigma, IR.BE("*", g, A[i][j], x[j]))
g += 1
# end j loop
x[i] = IR.BE("/", g, IR.BE("-", g, b[i], sigma), A[i][j])
g += 1
# end i loop
#j check convergence
#j k = k+1
# end while loop
print(x[0])
rs = x[0]
IR.TuneExpr(rs)
|
great-expectations/great_expectations | great_expectations/cli/v012/project.py | Python | apache-2.0 | 3,445 | 0.001742 | import sys
import click
from great_expectations import DataContext
from great_expectations import exceptions as ge_exceptions
from great_expectations.cli.v012.cli_messages import SECTION_SEPARATOR
from great_expectations.cli.v012.toolkit import load_data_context_with_error_handling
from great_expectations.cli.v012.util import cli_message
from great_expectations.core.usage_statistics.util import send_usage_message
from great_expectations.data_context.types.base import CURRENT_GE_CONFIG_VERSION
@click.group()
def project():
"""Project operations"""
pass
@project.command(name="check-config")
@click.option(
"--directory",
"-d",
default="./great_expectations",
help="The project's great_expectations directory.",
)
def project_check_config(directory):
"""Check a config for validity and help with migrations."""
cli_message("Checking your config files for validity...\n")
is_config_ok, error_message, context = do_config_check(directory)
if context:
send_usage_message(
data_context=context,
event="cli.project.check_config",
api_version="v2",
success=True,
)
if not is_config_ok:
cli_message("Unfortunately, your config appears to be invalid:\n")
cli_message(f"<red>{error_message}</red>")
sys.exit(1)
cli_message("<green>Your config file appears valid!</green>")
@project.command(name="upgrade")
@click.option(
"--directory",
"-d",
default="./great_expectations",
help="The project's great_expectations directory.",
)
def project_upgrade(directory):
"""Upgrade a project after installing the next Great Expectations major version."""
cli_message("\nChecking project...")
cli_message(SECTION_SEPARATOR)
if load_data_context_with_error_handling(
directory=directory, from_cli_upgrade_command=True
):
up_to_date_message = (
"Your project is up-to-date - no further upgrade is necessary.\n"
)
cli_message(f"<green>{up_to_date_message}</green>")
sys.exit(0)
def do_config_check(target_directory):
try:
context: DataC | ontext = DataContext(context_root_dir=target_directory)
ge_config_version: int = context.get_config().config_version
if int(ge_config_version) < CURRENT_GE_CONFIG_VERSION:
upgrade_message: str = f"""The config_version of your great_expectations.yml -- {float(ge_config_version)} -- | is outdated.
Please consult the V3 API migration guide https://docs.greatexpectations.io/en/latest/guides/how_to_guides/migrating_versions.html and
upgrade your Great Expectations configuration to version {float(CURRENT_GE_CONFIG_VERSION)} in order to take advantage of the latest capabilities.
"""
return (
False,
upgrade_message,
None,
)
return True, None, context
except (
ge_exceptions.InvalidConfigurationYamlError,
ge_exceptions.InvalidTopLevelConfigKeyError,
ge_exceptions.MissingTopLevelConfigKeyError,
ge_exceptions.InvalidConfigValueTypeError,
ge_exceptions.UnsupportedConfigVersionError,
ge_exceptions.DataContextError,
ge_exceptions.PluginClassNotFoundError,
ge_exceptions.PluginModuleNotFoundError,
ge_exceptions.GreatExpectationsError,
) as err:
return False, err.message, None
|
gallantlab/pycortex | cortex/mapper/volume.py | Python | bsd-2-clause | 7,345 | 0.006535 | import numpy as np
from scipy import sparse
from . import Mapper
from . import samplers
class VolumeMapper(Mapper):
@classmethod
def _cache(cls, filename, subject, xfmname, **kwargs):
from .. import db
masks = []
xfm = db.get_xfm(subject, xfmname, xfmtype='coord')
pia = db.get_surf(subject, "pia", merge=False, nudge=False)
wm = db.get_surf(subject, "wm", merge=False, nudge=False)
#iterate over hemispheres
for (wpts, polys), (ppts, _) in zip(pia, wm):
masks.append(cls._getmask(xfm(ppts), xfm(wpts), polys, xfm.shape, **kwargs))
_savecache(filename, masks[0], masks[1], xfm.shape)
return cls(masks[0], masks[1], xfm.shape, subject, xfmname)
@classmethod
def _getmask(cls, pia, wm, polys, shape, **kwargs):
from .. import mp
rand = np.random.rand(npts, 3)
csrshape = len(wm), np.prod(shape)
def func(pts):
if len(pts) > 0:
#generate points within the bounding box
samples = rand * (pts.max(0) - pts.min(0)) + pts.min(0)
#check which points are inside the polyhedron
inside = polyutils.inside_convex_poly(pts)(samples)
return cls._sample(samples[inside], shape, np.sum(inside))
surf = polyutils.Surface(pia, polys)
samples = mp.map(func, surf.polyconvex(wm))
#samples = map(func, surf.polyconvex(wm)) ## For debugging
ij, data = [], []
for i, sample in enumerate(samples):
if sample is not None:
idx = np.zeros((2, len(sample[0])))
idx[0], idx[1] = i, sample[0]
ij.append(idx)
data.append(sample[1])
return sparse.csr_matrix((np.hstack(data), np.hstack(ij)), shape=csrshape)
class PolyConstMapper(VolumeMapper):
patchsize = 0.5
class PolyLinMapper(VolumeMapper):
patchsize = 1
class Polyhedral(VolumeMapper):
'''Uses an actual (likely concave) polyhedra betwen the pial and white surfaces
to estimate the thickness'''
@staticmethod
def _getmask(pia, wm, polys, shape):
from .. import polyutils
mask = sparse.csr_matrix((len(wm), np.prod(shape)))
from tvtk.api import tvtk
measure = tvtk.MassProperties()
planes = tvtk.PlaneCollection()
for norm in np.vstack([-np.eye(3), np.eye(3)]):
planes.append(tvtk.Plane(normal=norm))
ccs = tvtk.ClipClosedSurface(clipping_planes=planes)
feats = tvtk.FeatureEdges(boundary_edges=1, non_manifold_edges=0, manifold_edges=0, feature_edges=0)
feats.set_input(ccs.output)
surf = polyutils.Surface(pia, polys)
for i, (pts, faces) in enumerate(surf.polyhedra(wm)):
if len(pts) > 0:
poly = tvtk.PolyData(points=pts, polys=faces)
measure.set_input(poly)
measure.update()
totalvol = measure.volume
ccs.set_input(poly)
measure.set_input(ccs.output)
bmin = pts.min(0).round().astype(int)
bmax = (pts.max(0).round() + 1).astype(int)
vidx = np.mgrid[bmin[0]:bmax[0], bmin[1]:bmax[1], bmin[2]:bmax[2]]
for vox in vidx.reshape(3, -1).T:
try:
idx = np.ravel_multi_index(vox[::-1], shape)
for plane, m in zip(planes, [.5, .5, .5, -.5, -.5, -.5]):
plane.origin = vox+m
ccs.update()
if ccs.output.number_of_cells > 2:
measure.update()
mask[i, idx] = measure.volume
except ValueError:
print('Voxel not in volume: (%d, %d, %d)'%tuple(vox))
mask.data[mask.indptr[i]:mask.indptr[i+1]] /= mask[i].sum()
return mask
class ConvexPolyhedra(VolumeMapper):
@classmethod
def _getmask(cls, pia, wm, polys, shape, npts=1024):
from .. import mp
from .. import polyutils
rand = np.random.rand(npts, 3)
csrshape = len(wm), np.prod(shape)
def func(pts):
if len(pts) > 0:
#generate points within the bounding box
samples = rand * (pts.max(0) - pts.min(0)) + pts.min(0)
#check which points are inside the polyhedron
inside = polyutils.inside_convex_poly(pts)(samples)
return cls._sample(samples[inside], shape, np.sum(inside))
surf = polyutils.Surface(pia, polys)
samples = mp.map(func, surf.polyconvex(wm))
#samples = map(func, surf.polyconvex(wm)) ## For debugging
ij, data = [], []
for i, sample in enumerate(samples):
if sample is not None:
idx = np.zeros((2, len(sample[0])))
idx[0], idx[1] = i, sample[0]
ij.append(idx)
data.append(sample[1])
return sparse.csr_matrix((np.hstack(data), np.hstack(ij)), shape=csrshape)
class ConvexNN(VolumeMapper):
@staticmethod
def _sample(pts, shape, norm):
coords = pts.round().astype(int)[:,::-1]
d1 = np.logical_and(0 <= coords[:,0], coords[:,0] < shape[0])
d2 = np.logical_and(0 <= coords[:,1], coords[:,1] < shape[1])
d3 = np.logical_and(0 <= coords[:,2], coords[:,2] < shape[2])
valid = np.logical_and(d1, np.logical_and(d2, d3))
if valid.any():
idx = np.ravel_multi_index(coords[valid].T, shape)
j, data = np.array(Counter(idx).items()).T
return j, data / float(norm)
class ConvexTrilin(VolumeMapper):
@staticmethod
def _sample(pts, shape, norm):
(x, y, z), floor = np.modf(pts.T)
floor = floor.astype(int)
ceil = floor + 1
x[x < 0] = 0
y[y < 0] = 0
z[z < 0] = 0
i000 = np.ravel_multi_index((floor[2], floor[1], floor[0]), shape, mode='clip')
i100 = np.ravel_multi_index((floor[2], floor[1], ceil[0]), shape, mode='clip')
i010 = np.ravel_multi_index((floor[2], ceil[1], floor[0]), shape, mode='clip')
i001 = np.ravel_multi_index(( ceil[2], floor[1], floor[0]), shape, mode='clip' | )
i101 = np.ravel_multi_index(( ceil[2], floor[1], ceil[0]), shape, mode='clip')
i011 = np.ravel_multi_index(( ceil[2], ceil[1], floor[0]), shape, mode='clip')
| i110 = np.ravel_multi_index((floor[2], ceil[1], ceil[0]), shape, mode='clip')
i111 = np.ravel_multi_index(( ceil[2], ceil[1], ceil[0]), shape, mode='clip')
v000 = (1-x)*(1-y)*(1-z)
v100 = x*(1-y)*(1-z)
v010 = (1-x)*y*(1-z)
v110 = x*y*(1-z)
v001 = (1-x)*(1-y)*z
v101 = x*(1-y)*z
v011 = (1-x)*y*z
v111 = x*y*z
allj = np.vstack([i000, i100, i010, i001, i101, i011, i110, i111]).T.ravel()
data = np.vstack([v000, v100, v010, v001, v101, v011, v110, v111]).T.ravel()
uniquej = np.unique(allj)
uniquejdata = np.array([data[allj==j].sum() for j in uniquej])
return uniquej, uniquejdata / float(norm)
class ConvexLanczos(VolumeMapper):
def _sample(self, pts):
raise NotImplementedError
|
dhtech/graphite-web | webapp/graphite/events/views.py | Python | apache-2.0 | 2,759 | 0.006887 | import datetime
import time
from django.http import HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.utils.timezone import localtime, now
from graphite.util import json
from graphite.events import models
from graphite.render.attime import parseATTime
from django.core.urlresolvers import get_script_prefix
def to_timestamp(dt):
return time.mktime(dt.timetuple())
class EventEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return to_timestamp(obj)
return json.JSONEncoder.default(self, obj)
def view_events(request):
if request.method == "GET":
context = { 'events' : fetch(request),
'slash' : get_script_prefix()
}
return render_to_response("events.html", context)
else:
return post_event(request)
def detail(request, event_id):
e = get_object_or_404(models.Event, pk=event_id)
context = { 'event' : e,
'slash' : get_script_prefix()
}
return render_to_response("event.html", context)
def post_event(request):
if request.method == 'POST':
event = json.loads(request.body)
assert isinstance(event, dict)
values = {}
values["what"] = event["what"]
values["tags"] = event.get("tags", None)
values["when"] = datetime.datetime.fromtimestamp(
event.get("when", time.time()))
if "data" in event:
values["data"] = event["data"]
e = models.Event(**values)
e.save()
return HttpResponse(status=200)
else:
return HttpResponse(status=405)
def get_data(request):
if 'jsonp' in request.REQUEST:
| response = HttpResponse(
"%s(%s)" % (request.REQUEST.get('jsonp'),
json.dumps(fetch(request), cls=EventEncoder)),
mimetype='text/javascript')
else:
response = HttpResponse(
json.dumps(fetch(request), cls=EventEncoder),
mimetype="appl | ication/json")
return response
def fetch(request):
#XXX we need to move to USE_TZ=True to get rid of localtime() conversions
if request.GET.get("from", None) is not None:
time_from = localtime(parseATTime(request.GET["from"])).replace(tzinfo=None)
else:
time_from = datetime.datetime.fromtimestamp(0)
if request.GET.get("until", None) is not None:
time_until = localtime(parseATTime(request.GET["until"])).replace(tzinfo=None)
else:
time_until = now()
tags = request.GET.get("tags", None)
if tags is not None:
tags = request.GET.get("tags").split(" ")
return [x.as_dict() for x in
models.Event.find_events(time_from, time_until, tags=tags)]
|
aabilio/PyDownTV | Servers/televigo.py | Python | gpl-3.0 | 2,787 | 0.00757 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of PyDownTV.
#
# PyDownTV is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyDownTV is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Gene | ral Public License
# alon | g with PyDownTV. If not, see <http://www.gnu.org/licenses/>.
# Pequeña descripción de qué canal de tv es el módulo
__author__="aabilio"
__date__ ="$15-may-2011 11:03:38$"
from Descargar import Descargar
from utiles import salir, formatearNombre, printt
import sys
class TeleVigo(object):
'''
Clase que maneja la descarga los vídeos de TeleVigo
'''
URL_TeleVigo = "http://www.televigo.com/"
def __init__(self, url=""):
self._URL_recibida = url
def getURL(self):
return self._URL_recibida
def setURL(self, url):
self._URL_recibida = url
url = property(getURL, setURL)
# Funciones privadas que ayuden a procesarDescarga(self):
def __descHTML(self, url2down):
''' Método que utiliza la clase descargar para descargar el HTML '''
D = Descargar(url2down)
return D.descargar()
def procesarDescarga(self):
'''
Procesa lo necesario para obtener la url final del vídeo a descargar y devuelve
esta y el nombre como se quiere que se descarge el archivo de la siguiente forma:
return [ruta_url, nombre]
Si no se quiere especificar un nombre para el archivo resultante en disco, o no se
conoce un procedimiento para obtener este automáticamente se utilizará:
return [ruta_url, None]
Y el método de Descargar que descarga utilizará el nombre por defecto según la url.
Tanto "ruta_url" como "nombre" pueden ser listas (por supuesto, el nombre del ruta_url[0]
tiene que ser nombre[0] y así sucesivamente).
'''
streamHTML = self.__descHTML(self._URL_recibida)
xmlURL = streamHTML.split("_url_xml_datos:\"")[1].split("\"")[0]
streamXML = self.__descHTML(xmlURL)
url = streamXML.split("<url>")[1].split("<")[0]
ext = "." + url.split(".")[-1]
name = streamXML.split("<title><![CDATA[")[1].split("]")[0] + ext
if name:
name = formatearNombre(name)
return [url, name]
|
tokyo-jesus/university | src/python/koans/python3/koans/about_packages.py | Python | unlicense | 1,909 | 0.001048 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This is very different to AboutModules in Ruby Koans
# Our AboutMultipleInheritan | ce class is a little more comparable
#
from runner.koan import *
#
# Package hierarchy of Python Koans project:
#
# contemplate_koans.py
# koans/
# __init__.py
# about_asserts.py
# about_attribute_access.py
# about_class_attributes.py
# about_classes.py
# ...
# | a_package_folder/
# __init__.py
# a_module.py
class AboutPackages(Koan):
def test_subfolders_can_form_part_of_a_module_package(self):
# Import ./a_package_folder/a_module.py
from .a_package_folder.a_module import Duck
duck = Duck()
self.assertEqual(__, duck.name)
def test_subfolders_become_modules_if_they_have_an_init_module(self):
# Import ./a_package_folder/__init__.py
from .a_package_folder import an_attribute
self.assertEqual(__, an_attribute)
# ------------------------------------------------------------------
def test_use_absolute_imports_to_import_upper_level_modules(self):
# Import /contemplate_koans.py
import contemplate_koans
self.assertEqual(__, contemplate_koans.__name__)
# contemplate_koans.py is the root module in this package because its
# the first python module called in koans.
#
# If contemplate_koan.py was based in a_package_folder that would be
# the root folder, which would make reaching the koans folder
# almost impossible. So always leave the starting python script in
# a folder which can reach everything else.
def test_import_a_module_in_a_subfolder_folder_using_an_absolute_path(self):
# Import contemplate_koans.py/koans/a_package_folder/a_module.py
from koans.a_package_folder.a_module import Duck
self.assertEqual(__, Duck.__module__)
|
JacquesLucke/still-lambda | vec2.py | Python | bsd-3-clause | 470 | 0.014894 | import math
def add(vec_a, vec_b):
return [vec_a[0]+vec_b[0], vec_a[1]+vec_b[1]]
def sub(vec_a, vec_b):
return [vec_a[0]-vec_b[0], vec_a[1]-vec_b[1]]
def mul(vec, sca):
return [vec[0]*sca, vec[1]*sca]
def inner(vec_a, vec_b):
return vec_a[0]*vec_b[0]+vec_a[1]*vec_b[1]
def abs(vec):
return math.sqrt(vec[0]**2+vec[1]**2)
def norm(vec):
length = abs( | vec)
return mul(vec, 1/length)
def vecint(vec):
return | [int(vec[0]), int(vec[1])]
|
SEA000/uw-empathica | empathica/gluon/restricted.py | Python | mit | 9,911 | 0.003834 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
"""
import sys
import cPickle
import traceback
import types
import os
import logging
from storage import Storage
from http import HTTP
from html import BEAUTIFY
logger = logging.getLogger("web2py")
__all__ = ['RestrictedError', 'restricted', 'TicketStorage', 'compile2']
class TicketStorage(Storage):
"""
defines the ticket object and the default values of its members (None)
"""
def __init__(
self,
db=None,
tablename='web2py_ticket'
):
self.db = db
self.tablename = tablename
def store(self, request, ticket_id, ticket_data):
"""
stores the ticket. It will figure out if this must be on disk or in db
"""
if self.db:
self._store_in_db(request, ticket_id, ticket_data)
else:
self._store_on_disk(request, ticket_id, ticket_data)
def _store_in_db(self, request, ticket_id, ticket_data):
table = self._get_table(self.db, self.tablename, request.application)
table.insert(ticket_id=ticket_id,
ticket_data=cPickle.dumps(ticket_data),
created_datetime=request.now)
logger.error('In FILE: | %(layer)s\n\n%(traceback)s\n' % ticket_data)
def _store_on_disk(self, request, ticket_id, ticket_data) | :
ef = self._error_file(request, ticket_id, 'wb')
try:
cPickle.dump(ticket_data, ef)
finally:
ef.close()
def _error_file(self, request, ticket_id, mode, app=None):
root = request.folder
if app:
root = os.path.join(os.path.join(root, '..'), app)
errors_folder = os.path.abspath(os.path.join(root, 'errors'))#.replace('\\', '/')
return open(os.path.join(errors_folder, ticket_id), mode)
def _get_table(self, db, tablename, app):
tablename = tablename + '_' + app
table = db.get(tablename, None)
if table is None:
db.rollback() # not necessary but one day
# any app may store tickets on DB
table = db.define_table(
tablename,
db.Field('ticket_id', length=100),
db.Field('ticket_data', 'text'),
db.Field('created_datetime', 'datetime'),
)
return table
def load(
self,
request,
app,
ticket_id,
):
if not self.db:
ef = self._error_file(request, ticket_id, 'rb', app)
try:
return cPickle.load(ef)
finally:
ef.close()
table = self._get_table(self.db, self.tablename, app)
rows = self.db(table.ticket_id == ticket_id).select()
if rows:
return cPickle.loads(rows[0].ticket_data)
return None
class RestrictedError(Exception):
"""
class used to wrap an exception that occurs in the restricted environment
below. the traceback is used to log the exception and generate a ticket.
"""
def __init__(
self,
layer='',
code='',
output='',
environment=None,
):
"""
layer here is some description of where in the system the exception
occurred.
"""
if environment is None: environment = {}
self.layer = layer
self.code = code
self.output = output
self.environment = environment
if layer:
try:
self.traceback = traceback.format_exc()
except:
self.traceback = 'no traceback because template parting error'
try:
self.snapshot = snapshot(context=10,code=code,
environment=self.environment)
except:
self.snapshot = {}
else:
self.traceback = '(no error)'
self.snapshot = {}
def log(self, request):
"""
logs the exception.
"""
try:
d = {
'layer': str(self.layer),
'code': str(self.code),
'output': str(self.output),
'traceback': str(self.traceback),
'snapshot': self.snapshot,
}
ticket_storage = TicketStorage(db=request.tickets_db)
ticket_storage.store(request, request.uuid.split('/',1)[1], d)
return request.uuid
except:
logger.error(self.traceback)
return None
def load(self, request, app, ticket_id):
"""
loads a logged exception.
"""
ticket_storage = TicketStorage(db=request.tickets_db)
d = ticket_storage.load(request, app, ticket_id)
self.layer = d['layer']
self.code = d['code']
self.output = d['output']
self.traceback = d['traceback']
self.snapshot = d.get('snapshot')
def __str__(self):
# safely show an useful message to the user
try:
output = self.output
if isinstance(output, unicode):
output = output.encode("utf8")
elif not isinstance(output, str):
output = str(output)
except:
output = ""
return output
def compile2(code,layer):
"""
The +'\n' is necessary else compile fails when code ends in a comment.
"""
return compile(code.rstrip().replace('\r\n','\n')+'\n', layer, 'exec')
def restricted(code, environment=None, layer='Unknown'):
"""
runs code in environment and returns the output. if an exception occurs
in code it raises a RestrictedError containing the traceback. layer is
passed to RestrictedError to identify where the error occurred.
"""
if environment is None: environment = {}
environment['__file__'] = layer
environment['__name__'] = '__restricted__'
try:
if type(code) == types.CodeType:
ccode = code
else:
ccode = compile2(code,layer)
exec ccode in environment
except HTTP:
raise
except RestrictedError:
# do not encapsulate (obfuscate) the original RestrictedError
raise
except Exception, error:
# extract the exception type and value (used as output message)
etype, evalue, tb = sys.exc_info()
# XXX Show exception in Wing IDE if running in debugger
if __debug__ and 'WINGDB_ACTIVE' in os.environ:
sys.excepthook(etype, evalue, tb)
output = "%s %s" % (etype, evalue)
raise RestrictedError(layer, code, output, environment)
def snapshot(info=None, context=5, code=None, environment=None):
"""Return a dict describing a given traceback (based on cgitb.text)."""
import os, types, time, linecache, inspect, pydoc, cgitb
# if no exception info given, get current:
etype, evalue, etb = info or sys.exc_info()
if type(etype) is types.ClassType:
etype = etype.__name__
# create a snapshot dict with some basic information
s = {}
s['pyver'] = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
s['date'] = time.ctime(time.time())
# start to process frames
records = inspect.getinnerframes(etb, context)
s['frames'] = []
for frame, file, lnum, func, lines, index in records:
file = file and os.path.abspath(file) or '?'
args, varargs, varkw, locals = inspect.getargvalues(frame)
call = ''
if func != '?':
call = inspect.formatargvalues(args, varargs, varkw, locals,
formatvalue=lambda value: '=' + pydoc.text.repr(value))
# basic frame information
f = {'file': file, 'func': func, 'call': call, 'lines': {}, 'lnum': lnum}
highlight = {}
def reader(lnum=[lnum]):
highlight[lnum[0]] = 1
try: return linecache.getline(file, lnum[0])
finally: lnum[0] += 1
vars = cgitb. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.