repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
ronekko/deep_metric_learning | main_n_pair_mc.py | Python | mit | 2,501 | 0.001999 | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 09 20:49:04 2017
@author: sakurai
"""
import colorama
import chainer.functions as F
from sklearn.model_selection import ParameterSampler
from lib.functions.n_pair_mc_loss import n_pair_mc_loss
from lib.common.utils import LogUniformDistribution, load_params
from lib.common.train_eval import train
colorama.init()
def lossfun_one_batch(model, params, batch):
# the first half of a batch are the anchors and the latters
# are the positive examples corresponding to each anchor
x_data, | c_data = batch
x_data = model.xp.asarray(x_data)
y = model(x_data)
y_a, y_p = F.split_axis(y, 2, axis=0)
return n_pair_mc_loss(y_a, y_p, params.loss_l2_reg)
if __name__ == '__main__':
param_filename = 'n_pair_mc_cars196.yaml'
random_search_mode = True
random_state = None
num_runs = 10000 |
save_distance_matrix = False
if random_search_mode:
param_distributions = dict(
learning_rate=LogUniformDistribution(low=6e-5, high=8e-5),
# loss_l2_reg=LogUniformDistribution(low=1e-6, high=5e-3),
# l2_weight_decay=LogUniformDistribution(low=1e-5, high=1e-2),
# out_dim=[64, 128],
# optimizer=['RMSProp', 'Adam'] # 'RMSPeop' or 'Adam'
)
static_params = dict(
num_epochs=20,
num_batches_per_epoch=500,
batch_size=120,
out_dim=128,
# learning_rate=7e-5,
loss_l2_reg=3e-3, # L2-norm penalty for output vector
crop_size=224,
normalize_output=False,
l2_weight_decay=5e-3,
optimizer='Adam', # 'Adam' or 'RMSPeop'
distance_type='euclidean', # 'euclidean' or 'cosine'
dataset='cars196', # 'cars196' or 'cub200_2011' or 'products'
method='n_pairs_mc' # sampling method for batch construction
)
sampler = ParameterSampler(param_distributions, num_runs, random_state)
for random_params in sampler:
params = {}
params.update(random_params)
params.update(static_params)
stop = train(__file__, lossfun_one_batch, params,
save_distance_matrix)
if stop:
break
else:
print('Train once using config file "{}".'.format(param_filename))
params = load_params(param_filename)
train(__file__, lossfun_one_batch, params, save_distance_matrix)
|
andrucuna/python | interactivepython-coursera/interactivepython/week4/Motion.py | Python | gpl-2.0 | 796 | 0.005025 | __author__ = 'andrucuna'
# Ball motion with an explicit timer
import simplegui
# Initialize globals
| WIDTH = 600
HEIGHT = 400
BALL_RADIUS = 20
init_pos = [WIDTH / 2, HEIGHT / 2]
vel = [0, 3] # pixels per tick
time = 0
# define event handlers
def tick():
global time
time = time + 1
def draw(canvas):
# create a list to hold ball position
ball_pos = [0, 0]
# calculate ball position
ball_pos[0] = init_pos[0] | + time * vel[0]
ball_pos[1] = init_pos[1] + time * vel[1]
# draw ball
canvas.draw_circle(ball_pos, BALL_RADIUS, 2, "Red", "White")
# create frame
frame = simplegui.create_frame("Motion", WIDTH, HEIGHT)
# register event handlers
frame.set_draw_handler(draw)
timer = simplegui.create_timer(100, tick)
# start frame
frame.start()
timer.start() |
zmallen/flock | twitter_app.py | Python | mit | 1,763 | 0.024957 | from flask import Flask,redirect
from flask import render_template
from flask import request
from twython import Twython
from redis import Redis
import settings
app = Flask(__name__)
r = Redis()
API_KEY = settings.API_KEY
API_SECRET = settings.API_SECRET
@app.route("/twitter", methods=["GET" | ])
def display():
#Create Twitter API instance
twitter = Twython(app_key=API_KEY, app_secret=API_SECRET)
#Get auth url
auth = twitter.get_authentication_tokens(callback_url='http://138.197.113.54:8080/twitterfinish')
#Save off token and secret for later use. Could be saved in cookies.
r.set("twitter:token", auth['oauth_token'] | )
r.set("twitter:secret", auth['oauth_token_secret'])
#redirect user to auth link
return redirect(auth['auth_url'])
@app.route("/twitterfinish", methods=["GET"])
def finish():
#Get verifier from GET request from Twitter
verifier = request.args['oauth_verifier']
#Get token and secret that was saved earlier
token = r.get("twitter:token")
secret = r.get("twitter:secret")
#Create new Twitter API instance with the new credentials
twitter = Twython(API_KEY, API_SECRET, token, secret)
#Send new credentials with verifier to get the access_token
last = twitter.get_authorized_tokens(verifier)
# get access_key, access_secret & botname to writeout to writeout
access_key = last['oauth_token']
access_secret = last['oauth_token_secret']
twitter2 = Twython(API_KEY, API_SECRET, access_key, access_secret)
bot_name = twitter2.verify_credentials()['screen_name']
# write out and update our csv file
with open("bots.csv", "a") as f:
f.write("%s,%s,%s,%s,%s\n" % (bot_name, API_KEY, API_SECRET, access_key, access_secret))
return "Success!"
if __name__ == '__main__':
app.run(host='0.0.0.0',debug=False, port=8080)
|
caasiu/xbmc-addons-chinese | plugin.video.dnvodPlayer/js2py/constructors/jsuint16array.py | Python | gpl-2.0 | 3,043 | 0.011502 | # this is based on jsarray.py
from ..base import *
try:
import numpy
except:
pass
@Js
def Uint16Array():
TypedArray = (PyJsInt8Array,PyJsUint8Array,PyJsUint8ClampedArray,PyJsInt16Array,PyJsUint16Array,PyJsInt32Array,PyJsUint32Array,PyJsFloat32Array,PyJsFloat64Array)
a = arguments[0]
if isinstance(a, PyJsNumber): # length
length = a.to_uint32()
if length!=a.value:
raise MakeError('RangeError', 'Invalid array length')
temp = Js(numpy.full(length, 0, dtype=numpy.uint16))
temp.put('length', a)
return temp
elif isinstance(a, PyJsString): # object (string)
temp = Js(numpy.array(list(a.value), dtype=numpy.uint16))
temp.put('length', Js(len(list(a.value))))
return temp
elif isinstance(a, PyJsArray): # object (array)
array = a.to_list()
array = [(int(item.value) if item.value != None else 0) for item in array]
temp = Js(numpy.array(array, dtype=numpy.uint16))
temp.put('length', Js(len(array)))
return temp
elif isinstance(a,TypedArray) or isinstance(a,PyJsArrayBuffer): # TypedArray / buffer
if len(arguments) > 1:
offset = int(arguments[1].value)
else:
offset = 0
if len(arguments) == 3:
length = int(arguments[2].value)
else:
length = a.get('length').to_uint32()
temp = Js(numpy.frombuffer(a.array, dtype=numpy.uint16, count=length, offset=offset))
temp.put('length', Js(length))
return temp
elif isinstance(a,PyObjectWrapper): # object (Python object)
if len(arguments) > 1:
offset = int(arguments[1].value)
else:
offset = 0
if len(arguments) == 3:
length = int(arguments[2].value)
else:
length = len(a.obj)
temp = Js(numpy.frombuffer(a.obj, dtype=numpy.uint16, count=length, offset=offset))
temp.put('length', Js(length))
return temp
temp = Js(numpy.full(0, 0, dtype=numpy.uint16))
temp.put('length', Js(0))
return temp
Uint16Array.create = Uint16Array
Uint16Array.own['length']['value'] = Js(3)
Uint16Array.define_own_property('prototype', {'value': Uint16ArrayPrototype,
'enumerable': False,
'writable': False,
'configurable': False})
Uint16ArrayPrototype.define_own_property('constructor', {'value': Uint16Arra | y,
'enumerable': False,
'writable': True,
'config | urable': True})
Uint16ArrayPrototype.define_own_property('BYTES_PER_ELEMENT', {'value': Js(2),
'enumerable': False,
'writable': False,
'configurable': False})
|
business-factory/gold-digger | gold_digger/data_providers/_provider.py | Python | apache-2.0 | 5,065 | 0.000987 | from abc import ABCMeta, abstractmethod
from datetime import date
from decimal import Decimal, InvalidOperation
from functools import wraps
from http import HTTPStatus
from inspect import getcallargs
from cachetools import Cache
from requests import RequestException, Session
class Provider(metaclass=ABCMeta):
DEFAULT_REQUEST_TIMEOUT = 15 # 15 seconds for both connect & read timeouts
def __init__(self, base_currency, http_user_agent):
"""
:type base_currency: str
:type http_user_agent: str
"""
self._base_currency = base_currency
self.has_request_limit = False
self.request_limit_reached = False
self._http_session = Session()
self._http_session.headers["User-Agent"] = http_user_agent
self._cache = Cache(maxsize=1)
@property
def base_currency(self):
"""
:rtype: str
"""
return self._base_currency
@property
@abstractmethod
def name(self):
"""
:rtype: str
"""
raise NotImplementedError
@abstractmethod
def get_supported_currencies(self, date_of_exchange, logger):
"""
:type date_of_exchange: datetime.date
:type logger: gold_digger.utils.ContextLogger
:rtype: set[str]
"""
raise NotImplementedError
@abstractmethod
def get_by_date(self, date_of_exchange, currency, logger):
"""
:type date_of_exchange: datetime.date
:type currency: str
:type logger: gold_digger.utils.ContextLogger
:rtype: decimal.Decimal | None
"""
raise NotImplementedError
@abstractmethod
def get_all_by_date(self, date_of_exchange, currencies, logger):
"""
:type date_of_exchange: datetime.date
:type currencies: set[str]
:type logger: gold_digger.utils.ContextLogger
:rtype: dict[str, decimal.Decimal | None]
"""
raise NotImplementedError
@abstractmethod
def get_historical(self, origin_date, currencies, logger):
"""
:type origin_date: datetime.date
:type currencies: set[str]
:type logger: gold_digger.utils.ContextLogger
:rtype: dict[date, dict[str, decimal.Decimal]]
"""
raise NotImplementedError
def _get(self, url, params=None, *, logger):
"""
:type url: str
:type params: None | dict[str, str]
:type logger: gold_digger.utils.ContextLogger
:rtype: requests.Response | None
"""
try:
self._http_session.cookies.clear()
response = self._http_session.get(url, params=params, timeout=self.DEFAULT_REQUEST_TIMEOUT)
if response.status_code == HTTPStatus.OK:
return response
else:
logger.error("%s - Status code: %s, URL: %s, Params: %s", self, response.status_code, url, params)
except RequestException as e:
logger.error("%s - Exception: %s, URL: %s, Params: %s", self, e, url, params)
return None
def _to_decimal(self, value, currency=None, *, logger):
"""
:type value: str | float | decimal.Decimal
:type currency: str | None
:type logger: gold_digger.utils.ContextLogger
:rtype: decimal.Decimal | None
"""
try:
return Decimal(value)
except InvalidOperation:
logger.error("%s - Invalid operation: value %s is not a number (currency %s)", self, value, currency)
return None
def set_request_limit_reached(self, logger):
"""
:type logger: gold_digger.utils.ContextLogger
"""
logger.warning("%s - Requests limit exceeded.", self)
self.request_limit_reached = True
def __str__(sel | f):
"""
:rtype: str
"""
return self.name
@staticmethod
def is_first_day_of_month():
"""
:rtype: bool
"""
return date.today().day == 1
@staticmethod
def check_request_limit(return_value=None):
"""
Check request limit and prev | ent API call if the limit was exceeded.
:type return_value: dict | set | None
:rtype: function
"""
def decorator(func):
"""
:type func: function
:rtype: function
"""
@wraps(func)
def wrapper(*args, **kwargs):
"""
:rtype: object
"""
provider_instance = args[0]
if provider_instance.is_first_day_of_month():
provider_instance.request_limit_reached = False
if not provider_instance.request_limit_reached:
return func(*args, **kwargs)
else:
getcallargs(func, *args)["logger"].warning("%s - API limit was exceeded. Rate won't be requested.", provider_instance.name)
return return_value
return wrapper
return decorator
|
pychess/pychess | lib/pychess/Database/JvR.py | Python | gpl-3.0 | 6,560 | 0.005488 | # Chess Analyses by Jan van Reek
# http://www.endgame.nl/index.html
JvR_links = (
("http://www.endgame.nl/match.htm", "http://www.endgame.nl/MATCHPGN.ZIP"),
("http://www.endgame.nl/bad1870.htm", "http://www.endgame.nl/bad1870.pgn"),
("http://www.endgame.nl/wfairs.htm", "http://www.endgame.nl/wfairs.pgn"),
("http://www.endgame.nl/russia.html", "http://www.endgame.nl/Russia.pgn"),
("http://www.endgame.nl/wien.htm", "http://www.endgame.nl/wien.pgn"),
("http://www.endgame.nl/london1883.htm", "http://www.endgame.nl/london.pgn"),
("http://www.endgame.nl/neur1896.htm", "http://www.endgame.nl/neur1896.pgn"),
("http://www.endgame.nl/newyork.htm", "http://www.endgame.nl/newy.pgn"),
("http://www.endgame.nl/seaside.htm", "http://www.endgame.nl/seaside.pgn"),
("http://www.endgame.nl/CSpr1904.htm", "http://www.endgame.nl/cs1904.pgn"),
("http://www.endgame.nl/stpeter.htm", "http://www.endgame.nl/stp1909.pgn"),
("http://www.endgame.nl/stpeter.htm", "http://www.endgame.nl/stp1914.pgn"),
("http://www.endgame.nl/berlin1928.htm", "http://www.endgame.nl/berlin.pgn"),
("http://www.endgame.nl/bad.htm", "http://www.endgame.nl/bad.pgn"),
("http://www.endgame.nl/nimzowitsch.htm", "http://www.endgame.nl/nimzowitsch.pgn"),
("http://www.endgame.nl/mostrau.htm", "http://www.endgame.nl/mostrau.pgn"),
("http://www.endgame.nl/early.htm", "http://www.endgame.nl/early.pgn"),
("http://www.endgame.nl/bled1931.htm", "http://www.endgame.nl/Alekhine.pgn"),
("http://www.endgame.nl/nott1936.htm", "http://www.endgame.nl/nott1936.pgn"),
("http://www.endg | ame.nl/wbm.htm", "http://www.endgame.nl/wbm.pgn"),
("http://www.endgame.nl/AVRO1938.htm", "http:/ | /www.endgame.nl/avro1938.pgn"),
("http://www.endgame.nl/salz1942.htm", "http://www.endgame.nl/salz1942.pgn"),
("http://www.endgame.nl/itct.html", "http://www.endgame.nl/itct.pgn"),
("http://www.endgame.nl/zurich1953.htm", "http://www.endgame.nl/zurich.pgn"),
("http://www.endgame.nl/spassky.htm", "http://www.endgame.nl/SPASSKY.ZIP"),
("http://www.endgame.nl/dallas1957.htm", "http://www.endgame.nl/dallas57.pgn"),
("http://www.endgame.nl/capamem.htm", "http://www.endgame.nl/capamem.pgn"),
("http://www.endgame.nl/kortschnoj.htm", "http://www.endgame.nl/korchnoi.pgn"),
("http://www.endgame.nl/planinc.htm", "http://www.endgame.nl/Planinc.pgn"),
("http://www.endgame.nl/planinc.htm", "http://www.endgame.nl/memorial.pgn"),
("http://www.endgame.nl/Piatigorsky.htm", "http://www.endgame.nl/piatigorsky.pgn"),
("http://www.endgame.nl/ussr7079.htm", "http://www.endgame.nl/ussr6591.pgn"),
("http://www.endgame.nl/tilburg.htm", "http://www.endgame.nl/tilburg.pgn"),
("http://www.endgame.nl/dglory.htm", "http://www.endgame.nl/dglory.pgn"),
("http://www.endgame.nl/bugojno.htm", "http://www.endgame.nl/Bugojno.pgn"),
("http://www.endgame.nl/montreal.htm", "http://www.endgame.nl/mon1979.pgn"),
("http://www.endgame.nl/moscow88.htm", "http://www.endgame.nl/ussr88.pgn"),
("http://www.endgame.nl/skelleftea.htm", "http://www.endgame.nl/skel1989.pgn"),
("http://www.endgame.nl/vsb.htm", "http://www.endgame.nl/vsb.pgn"),
("http://www.endgame.nl/dortmund.htm", "http://www.endgame.nl/dortmund.pgn"),
("http://www.endgame.nl/Barca.html", "http://www.endgame.nl/Barca.pgn"),
("http://www.endgame.nl/Madrid.html", "http://www.endgame.nl/Madrid.pgn"),
("http://www.endgame.nl/costa_del_sol.html", "http://www.endgame.nl/Costa.pgn"),
("http://www.endgame.nl/Palma.html", "http://www.endgame.nl/Palma.pgn"),
("http://www.endgame.nl/olot.html", "http://www.endgame.nl/Olot.pgn"),
("http://www.endgame.nl/LasPalmas.html", "http://www.endgame.nl/lpalm96.pgn"),
("http://www.endgame.nl/DosH.htm", "http://www.endgame.nl/DosH.pgn"),
("http://www.endgame.nl/wijk.htm", "http://www.endgame.nl/corus.pgn"),
("http://www.endgame.nl/tal.html", "http://www.endgame.nl/Tal.pgn"),
("http://www.endgame.nl/cc.htm", "http://www.endgame.nl/cc.pgn"),
("http://www.endgame.nl/sofia.htm", "http://www.endgame.nl/sofia.pgn"),
("http://www.endgame.nl/linares.htm", "http://www.endgame.nl/linares.pgn"),
("http://www.endgame.nl/Bilbao.html", "http://www.endgame.nl/Bilbao.pgn"),
("http://www.endgame.nl/nanjing.html", "http://www.endgame.nl/Nanjing.pgn"),
("http://www.endgame.nl/dchamps.htm", "http://www.endgame.nl/dch.pgn"),
("http://www.endgame.nl/dsb.htm", "http://www.endgame.nl/dsb.pgn"),
("http://www.endgame.nl/cc-history.htm", "http://www.endgame.nl/cc-history.pgn"),
("http://www.endgame.nl/hastings.htm", "http://www.endgame.nl/hastings.pgn"),
("http://www.endgame.nl/ibm.htm", "http://www.endgame.nl/IBM.pgn"),
("http://www.endgame.nl/gambits.htm", "http://www.endgame.nl/gambit.pgn"),
("http://www.endgame.nl/trebitsch.htm", "http://www.endgame.nl/Trebitsch.pgn"),
("http://www.endgame.nl/cloister.htm", "http://www.endgame.nl/TerApel.pgn"),
("http://www.endgame.nl/Biel.html", "http://www.endgame.nl/Biel.pgn"),
("http://www.endgame.nl/USA.html", "http://www.endgame.nl/USA.pgn"),
("http://www.endgame.nl/uk.html", "http://www.endgame.nl/UK.pgn"),
("http://www.endgame.nl/olympiads.html", "http://www.endgame.nl/olympiads.pgn"),
("http://www.endgame.nl/lone_pine.html", "http://www.endgame.nl/lonepine.pgn"),
("http://www.endgame.nl/staunton.html", "http://www.endgame.nl/Staunton.pgn"),
("http://www.endgame.nl/Hoogeveen.html", "http://www.endgame.nl/crown.pgn"),
("http://www.endgame.nl/paoli.html", "http://www.endgame.nl/Paoli.pgn"),
("http://www.endgame.nl/endgame.htm", "http://www.endgame.nl/endgame.pgn"),
("http://www.endgame.nl/estrin.html", "http://www.endgame.nl/Estrin.pgn"),
("http://www.endgame.nl/Argentina.html", "http://www.endgame.nl/Argentina.pgn"),
("http://www.endgame.nl/comeback.html", "http://www.endgame.nl/comeback.pgn"),
("http://www.endgame.nl/strategy.htm", "http://www.endgame.nl/strategy.pgn"),
("http://www.endgame.nl/computer.html", "http://www.endgame.nl/computer.pgn"),
("http://www.endgame.nl/correspondence.html", "http://www.endgame.nl/gambitnimzo.pgn"),
("http://web.inter.nl.net/hcc/rekius/buckle.htm", "http://web.inter.nl.net/hcc/rekius/buckle.pgn"),
("http://web.inter.nl.net/hcc/rekius/euwe.htm", "http://web.inter.nl.net/hcc/rekius/euwem.pgn"),
)
JvR = []
for item in JvR_links:
JvR.append((item[0], "https://raw.githubusercontent.com/gbtami/JvR-archive/master/%s" % item[1][7:]))
|
joksnet/youtube-dl | youtube_dl/extractor/gamespot.py | Python | unlicense | 2,196 | 0.006831 | import re
import xml.etree.ElementTree
from .common import InfoExtractor
from ..utils import (
unified_strdate,
compat_urllib_parse,
)
class GameSpotIE(InfoExtractor):
_VALID_URL = r'(?:http://)?(?:www\.)?gamespot\.com/.*-(?P<page_id>\d+)/?'
_TEST = {
u"url": u"http://www.gamespot.com/arma-iii/videos/arma-iii-community-guide-sitrep-i-6410818/",
u"file": u"6410818.mp4",
u"md5": u"b2a30deaa8654fcccd43713a6b6a4825",
u"info_dict": {
u"title": u"Arma 3 - Community Guide: SITREP I",
u"upload_date": u"20130627",
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
page_id = mobj.group('page_id')
webpage = self._download_webpage(url, page_id)
video_id = self._html_search_regex([r'"og:video" content=".*?\?id=(\d+)"',
r'http://www\.gamespot\.com/videoembed/(\d+)'],
webpage, 'video id')
data = compat_urllib_parse.urlencode({'id': video_id, 'newplayer': '1'})
info_url = 'http://www.gamespot.com/pages/video_player/xml.php?' + data
info_xml = self._download_webpage(info_url, video_id)
doc = xml.etree.ElementTree.fromstring(info_xml)
clip_el = doc.find('./playList/clip')
http_urls = [{'url': node.find('filePath').text,
'rate': int(node.find('rate').text)}
for node in clip_el.find('./httpURI')]
best_quality = sorted(http_urls, key=lambda f: f['rate'])[-1]
video_url = best_quality['url']
title = clip_el.find('./title').text
ext = video_url.rpartition('.')[2]
thumbnail_url = clip_el.find('./screenGrabURI'). | text
view_count = int(clip_el.find('./views').text)
upload_date = unified_strdate(clip_el.find('./postDate').text)
return [{
'id' : video_id,
'url' : video | _url,
'ext' : ext,
'title' : title,
'thumbnail' : thumbnail_url,
'upload_date' : upload_date,
'view_count' : view_count,
}]
|
Masood-M/yalih | malwebsites.py | Python | apache-2.0 | 2,519 | 0.037316 | #! /usr/bin/env python
import subprocess
import sys, threading, Qu | eue
import os
import string
from time import gmtime, strftime
import urllib2
import urllib
import re, time
import urlparse
import os.path
import logging
#from google import search
import scan
import executemechanize
import extraction
import mechanize
from BeautifulSoup import BeautifulSoup
def domaindownload():# this function downloads domain and website links from multible blacklisted website databases.
if os.path.isfile("list/list1.txt")==True:
print "Malicious w | ebsite database from https://spyeyetracker.abuse.ch exists!\n"
print "Continuing with the next list."
else:
print "Fetching list from: https://spyeyetracker.abuse.ch"
command1="wget https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist -O list/list1.txt"
os.system(command1)
#--proxy-user=username --proxy-password=password
if os.path.isfile("list/list2.txt")==True:
print "Malicious website database from https://zeustracker.abuse.ch/ exists!\n"
print "Continuing with the next list."
else:
print "Fetching list from: https://zeustracker.abuse.ch/"
command2="wget https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist -O list/list2.txt"
os.system(command2)
if os.path.isfile("list/list3.txt")==True:
print "Malicious website database 3 exists!\n"
else:
print "Fetching list 3"
command3="wget http://hosts-file.net/hphosts-partial.asp -O list/list3.txt"
os.system(command3)
print "*****\nThis May Take a While\n"
mainfile=open("list/malwebsites.txt", 'w')
file1=open("list/list1.txt", 'r')
mainfile.write(file1.read())
file2=open("list/list2.txt", 'r')
mainfile.write(file2.read())
file3=open("list/list3.txt", 'r')
mainfile.write(file3.read())
mainfile.close()
file1.close()
file2.close()
file3.close()
def duplicateremover():
mylist=list()
fopen2=open("list/malwebsites.txt","r")
for line in fopen2:
line=line.strip()
if line.startswith("127.0.0.1"):
line=line[10:]
pass
if line.startswith("#"):
continue
if line.find('#') == 1:
continue
# if line=="invalid":
# continue
if not line:
continue
if line in mylist:
continue
if not (line.startswith("http://")) and not (line.startswith("https://")):
line="http://"+line
pass
# print line
mylist.append(line)
fopen2.close()
fopen3=open("list/malwebsites.txt","w")
for line in mylist:
fopen3.write(line+"\n")
fopen3.close()
print "List of Malicious websites were downloaded from three databases."
|
hastexo/edx-platform | common/djangoapps/third_party_auth/tests/specs/test_lti.py | Python | agpl-3.0 | 7,528 | 0.002125 | """
Integration tests for third_party_auth LTI auth providers
"""
import unittest
import django
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from oauthlib.oauth1.rfc5849 import Client, SIGNATURE_TYPE_BODY
from openedx.tests.util import expected_redirect_url
from third_party_auth.tests import testutil
FORM_ENCODED = 'application/x-www-form-urlencoded'
LTI_CONSUMER_KEY = 'consumer'
LTI_CONSUMER_SECRET = 'secret'
LTI_TPA_LOGIN_URL = '/auth/login/lti/'
LT | I_TPA_COMPLETE_URL = '/auth/complete/lti/'
OTHER_LTI_CONSUMER_KEY = 'settings-consumer'
OTHER_LTI_CONSUMER_SECRET = 'secret2'
LTI_USER_ID = 'lti_user_id'
EDX_USER_ID = 'test_user'
EMAIL = 'lti_user@example.com'
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class IntegrationTestLTI(testutil.TestCase):
"""
Integration test | s for third_party_auth LTI auth providers
"""
def setUp(self):
super(IntegrationTestLTI, self).setUp()
self.hostname = 'testserver'
self.client.defaults['SERVER_NAME'] = self.hostname
self.url_prefix = 'http://{}'.format(self.hostname)
self.configure_lti_provider(
name='Other Tool Consumer 1', enabled=True,
lti_consumer_key='other1',
lti_consumer_secret='secret1',
lti_max_timestamp_age=10,
)
self.configure_lti_provider(
name='LTI Test Tool Consumer', enabled=True,
lti_consumer_key=LTI_CONSUMER_KEY,
lti_consumer_secret=LTI_CONSUMER_SECRET,
lti_max_timestamp_age=10,
)
self.configure_lti_provider(
name='Tool Consumer with Secret in Settings', enabled=True,
lti_consumer_key=OTHER_LTI_CONSUMER_KEY,
lti_consumer_secret='',
lti_max_timestamp_age=10,
)
self.lti = Client(
client_key=LTI_CONSUMER_KEY,
client_secret=LTI_CONSUMER_SECRET,
signature_type=SIGNATURE_TYPE_BODY,
)
def test_lti_login(self):
# The user initiates a login from an external site
(uri, _headers, body) = self.lti.sign(
uri=self.url_prefix + LTI_TPA_LOGIN_URL, http_method='POST',
headers={'Content-Type': FORM_ENCODED},
body={
'user_id': LTI_USER_ID,
'custom_tpa_next': '/account/finish_auth/?course_id=my_course_id&enrollment_action=enroll',
}
)
login_response = self.client.post(path=uri, content_type=FORM_ENCODED, data=body)
# The user should be redirected to the registration form
self.assertEqual(login_response.status_code, 302)
self.assertTrue(login_response['Location'].endswith(reverse('signin_user')))
register_response = self.client.get(login_response['Location'])
self.assertEqual(register_response.status_code, 200)
self.assertIn('"currentProvider": "LTI Test Tool Consumer"', register_response.content)
self.assertIn('"errorMessage": null', register_response.content)
# Now complete the form:
ajax_register_response = self.client.post(
reverse('user_api_registration'),
{
'email': EMAIL,
'name': 'Myself',
'username': EDX_USER_ID,
'honor_code': True,
}
)
self.assertEqual(ajax_register_response.status_code, 200)
continue_response = self.client.get(self.url_prefix + LTI_TPA_COMPLETE_URL)
# The user should be redirected to the finish_auth view which will enroll them.
# FinishAuthView.js reads the URL parameters directly from $.url
self.assertEqual(continue_response.status_code, 302)
self.assertEqual(
continue_response['Location'],
expected_redirect_url('/account/finish_auth/?course_id=my_course_id&enrollment_action=enroll')
)
# Now check that we can login again
self.client.logout()
self.verify_user_email(EMAIL)
(uri, _headers, body) = self.lti.sign(
uri=self.url_prefix + LTI_TPA_LOGIN_URL, http_method='POST',
headers={'Content-Type': FORM_ENCODED},
body={'user_id': LTI_USER_ID}
)
login_2_response = self.client.post(path=uri, content_type=FORM_ENCODED, data=body)
# The user should be redirected to the dashboard
self.assertEqual(login_2_response.status_code, 302)
expected_url = expected_redirect_url(LTI_TPA_COMPLETE_URL)
# TODO: Remove Django 1.11 upgrade shim
# SHIM: Get rid of this logic post-upgrade
if django.VERSION >= (1, 9):
expected_url = "{}?".format(expected_url)
self.assertEqual(login_2_response['Location'], expected_url)
continue_2_response = self.client.get(login_2_response['Location'])
self.assertEqual(continue_2_response.status_code, 302)
self.assertTrue(continue_2_response['Location'].endswith(reverse('dashboard')))
# Check that the user was created correctly
user = User.objects.get(email=EMAIL)
self.assertEqual(user.username, EDX_USER_ID)
def test_reject_initiating_login(self):
response = self.client.get(self.url_prefix + LTI_TPA_LOGIN_URL)
self.assertEqual(response.status_code, 405) # Not Allowed
def test_reject_bad_login(self):
login_response = self.client.post(
path=self.url_prefix + LTI_TPA_LOGIN_URL, content_type=FORM_ENCODED,
data="invalid=login",
)
# The user should be redirected to the login page with an error message
# (auth_entry defaults to login for this provider)
self.assertEqual(login_response.status_code, 302)
self.assertTrue(login_response['Location'].endswith(reverse('signin_user')))
error_response = self.client.get(login_response['Location'])
self.assertIn(
'Authentication failed: LTI parameters could not be validated.',
error_response.content
)
def test_can_load_consumer_secret_from_settings(self):
lti = Client(
client_key=OTHER_LTI_CONSUMER_KEY,
client_secret=OTHER_LTI_CONSUMER_SECRET,
signature_type=SIGNATURE_TYPE_BODY,
)
(uri, _headers, body) = lti.sign(
uri=self.url_prefix + LTI_TPA_LOGIN_URL, http_method='POST',
headers={'Content-Type': FORM_ENCODED},
body={
'user_id': LTI_USER_ID,
'custom_tpa_next': '/account/finish_auth/?course_id=my_course_id&enrollment_action=enroll',
}
)
with self.settings(SOCIAL_AUTH_LTI_CONSUMER_SECRETS={OTHER_LTI_CONSUMER_KEY: OTHER_LTI_CONSUMER_SECRET}):
login_response = self.client.post(path=uri, content_type=FORM_ENCODED, data=body)
# The user should be redirected to the registration form
self.assertEqual(login_response.status_code, 302)
self.assertTrue(login_response['Location'].endswith(reverse('signin_user')))
register_response = self.client.get(login_response['Location'])
self.assertEqual(register_response.status_code, 200)
self.assertIn(
'"currentProvider": "Tool Consumer with Secret in Settings"',
register_response.content
)
self.assertIn('"errorMessage": null', register_response.content)
|
ildoc/homeboard | faqs/admin.py | Python | mit | 900 | 0 | from django.contrib import admin
from django.db import models
from pagedown.widgets import AdminPagedownWidget
from .models import Faq, Category
class FaqAdmin(admin.ModelAdmin):
formfield_overrides = {
models.TextField: {'widget': AdminPagedownWidget},
}
fieldsets = [
('Faq', {'fields': ['question', 'answer', 'category']})
]
list_display = ('question', 'created', 'modified')
list_filter = ['created', 'modified']
search_fields = ['question', 'answer']
ordering = ['-created']
def save_model(self, request, obj, form, change):
obj.author = request.user
obj.save()
class CategoryAdmin(admin.ModelAdmin):
fieldsets = [
('Category', {'fields': ['title']})
]
list_display = ('titl | e', 'slug')
search_fields = ['ti | tle']
admin.site.register(Faq, FaqAdmin)
admin.site.register(Category, CategoryAdmin)
|
spreaker/android-publish-cli | setup.py | Python | mit | 975 | 0.042051 |
from __future__ import print_function
from setuptools import setup
import sys
if sys.version_info < (2, 6):
print('google-api-python-client requires python version >= 2.6.', file = sys.stderr)
sys.exit(1)
install_requires = ['google-api-python-client==1.3.1']
if sys.version_info < (2, 7):
install_requires.append('argparse')
setup(
name = 'android-publish-cli',
packages = [],
version = '0.1.2',
description = 'A simple CLI for Goo | gle Play Publish API',
author = 'Spreaker',
author_email = 'dev@spreaker.com',
url = 'https://github.com/spreaker/android-publish-cli',
download_url = 'https://github.com/spreaker/android-publish-cli/tarball/0.1.2',
keywords = ['android', 'automation', 'google'],
classifiers = [],
install_requires = install_requires,
scripts = | ['bin/android-publish']
) |
Cisco-Talos/pyrebox | volatility/volatility/plugins/linux/check_fops.py | Python | gpl-2.0 | 10,529 | 0.009023 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation. You may not use, modify or
# distribute this program under any other version of the GNU General
# Public License.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import os
import volatility.obj as obj
import volatility.debug as debug
import volatility.plugins.linux.common as linux_common
import volatility.plugins.linux.pslist as linux_pslist
import volatility.plugins.linux.lsmod as linux_lsmod
from volatility.plugins.linux.slab_info import linux_slabinfo
import volatility.plugins.linux.find_file as find_file
from volatility.renderers import TreeGrid
from volatility.renderers.basic import Address
class linux_check_fop(linux_common.AbstractLinuxCommand):
"""Check file operation structures for rootkit modifications"""
def __init__(self, config, *args, **kwargs):
linux_common.AbstractLinuxCommand.__init__(self, config, *args, **kwargs)
self._config.add_option('INODE', short_option = 'i', default = None, help = 'inode to check', action = 'store', type='int')
# to prevent multiple plugins from walking the process list
self.tasks = []
def check_file_cache(self, f_op_members, modules):
for (_, _, file_path, file_dentry) in find_file.linux_find_file(self._config).walk_sbs():
for (hooked_member, hook_address) in self.verify_ops(file_dentry.d_inode.i_fop, f_op_members, modules):
yield (file_path, hooked_member, hook_address)
def check_open_files_fop(self, f_op_members, modules):
# get all the members in file_operations, they are all function pointers
tasks = linux_pslist.linux_pslist(self._config).calculate()
for task in tasks:
self.tasks.append(task)
for filp, i in task.lsof():
for (hooked_member, hook_address) in self.verify_ops(filp.f_op, f_op_members, modules):
name = "{0:s} {1:d} {2:s}".format(task.comm, i, linux_common.get_path(task, filp))
yield (name, hooked_member, hook_address)
def check_proc_fop(self, f_op_members, modules):
proc_mnt_addr = self.addr_space.profile.get_symbol("proc_mnt")
if proc_mnt_addr:
proc_mnt_ptr = obj.Object("Pointer", offset = proc_mnt_addr, vm = self.addr_space)
proc_mnts = [proc_mnt_ptr.dereference_as("vfsmount")]
else:
proc_mnts = []
seen_pids = {}
if self.addr_space.profile.obj_has_member("nsproxy", "pid_ns"):
ns_member = "pid_ns"
else:
ns_member = "pid_ns_for_children"
for task in self.tasks:
nsp = task.nsproxy
pidns = nsp.m(ns_member)
if pidns.v() in seen_pids:
continue
| seen_pids[pidns.v()] = 1
proc_mnts.append(pidns.proc_mnt)
for proc_mnt in proc_mnts:
root = proc_mnt.mnt_root
for (hooked_member, hook_address) in self.verify_ops(root.d_inode.i_fop, f_op_members, module | s):
yield ("proc_mnt: root: %x" % root.v(), hooked_member, hook_address)
# only check the root directory
if self.addr_space.profile.obj_has_member("dentry", "d_child"):
walk_member = "d_child"
else:
walk_member = "d_u"
for dentry in root.d_subdirs.list_of_type("dentry", walk_member):
name = dentry.d_name.name.dereference_as("String", length = 255)
for (hooked_member, hook_address) in self.verify_ops(dentry.d_inode.i_fop, f_op_members, modules):
yield("proc_mnt: {0:x}:{1}".format(root.v(), name), hooked_member, hook_address)
def _get_name(self, pde, parent):
if type(pde.name) == obj.Pointer:
s = pde.name.dereference_as("String", length = 255)
else:
s = pde.obj_vm.read(pde.name.obj_offset, pde.namelen)
return str(parent + "/" + str(s))
def _walk_proc_old(self, cur, f_op_members, modules, parent):
last_cur = None
while cur:
if cur.obj_offset in self.seen_proc:
if cur.obj_offset == last_cur:
break
cur = cur.next
if cur.obj_offset in self.seen_proc:
break
else:
continue
self.seen_proc[cur.obj_offset] = 1
name = self._get_name(cur, parent)
for (hooked_member, hook_address) in self.verify_ops(cur.proc_fops, f_op_members, modules):
yield (name, hooked_member, hook_address)
subdir = cur.subdir
while subdir:
for (subname, hooked_member, hook_address) in self._walk_proc_old(subdir, f_op_members, modules, name):
yield (subname, hooked_member, hook_address)
subdir = subdir.next
last_cur = cur.obj_offset
cur = cur.next
def _walk_rb(self, rb):
nodes = []
if not rb.is_valid():
return nodes
rboff = self.addr_space.profile.get_obj_offset("proc_dir_entry", "subdir_node")
pde = obj.Object("proc_dir_entry", offset = rb.v() - rboff, vm = self.addr_space)
nodes.append(pde)
for pde2 in self._walk_rb(rb.rb_left):
nodes.append(pde2)
for pde3 in self._walk_rb(rb.rb_right):
nodes.append(pde3)
return nodes
def _do_walk_proc_current(self, cur, f_op_members, modules, parent):
nodes = []
for pde in self._walk_rb(cur.subdir.rb_node):
name = self._get_name(pde, parent)
nodes.append((pde, name))
nodes = nodes + self._do_walk_proc_current(pde, f_op_members, modules, name)
return nodes
def _walk_proc_current(self, cur, f_op_members, modules, parent):
proc_entries = self._do_walk_proc_current(cur, f_op_members, modules, parent)
for (pde, name) in proc_entries:
for (hooked_member, hook_address) in self.verify_ops(pde.proc_fops, f_op_members, modules):
yield (name, hooked_member, hook_address)
def _walk_proc_dir(self, proc_root, f_op_members, modules, parent):
if self.addr_space.profile.obj_has_member("proc_dir_entry", "subdir_node"):
walk_proc = self._walk_proc_current
else:
walk_proc = self._walk_proc_old
for (name, hooked_member, hook_address) in walk_proc(proc_root, f_op_members, modules, parent):
yield (name, hooked_member, hook_address)
def check_proc_root_fops(self, f_op_members, modules):
self.seen_proc = {}
proc_root_addr = self.addr_space.profile.get_symbol("proc_root")
proc_root = obj.Object("proc_dir_entry", offset = proc_root_addr, vm = self.addr_space)
for (hooked_member, hook_address) in self.verify_ops(proc_root.proc_fops, f_op_members, modules):
yield("proc_root", hooked_member, hook_address)
for (name, hooked_member, hook_address) in self._walk_proc_dir(proc_root, f_op_members, modules, "/proc"):
yield(name, hooked_member, hook_address)
def check_proc_net_fops(self, f_op_members, modules):
nslist_addr = self.addr_space.profile.get_symbol(" |
sgiavasis/nipype | nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py | Python | bsd-3-clause | 1,067 | 0.01687 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ....testing import assert_equal
from ..postproc import TrackMerge
def test_TrackMerge_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
output_file=dict(argstr='%s',
position=-1,
usedefault=True,
),
terminal_output=dict(nohash=True,
),
track_files=dict(argstr='%s...',
mandatory=True,
position= | 0,
),
)
inputs = TrackMerge.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_TrackMerge_outputs():
output_map = dict(track_file=dict(),
)
outputs = TrackMerge.output_spec()
for key, metadata in list(output_map.items() | ):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
datafiniti/Diamond | src/diamond/collector.py | Python | mit | 13,380 | 0.000299 | # coding=utf-8
"""
The Collector class is a base class for all metric collectors.
"""
import os
import socket
import platform
import logging
import configobj
import traceback
import time
from diamond.metric import Metric
# Detect the architecture of the system and set the counters for MAX_VALUES
# appropriately. Otherwise, rolling over counters will cause incorrect or
# negative values.
if platform.architecture()[0] == '64bit':
MAX_COUNTER = (2 ** 64) - 1
else:
MAX_COUNTER = (2 ** 32) - 1
def get_hostname(config, method=None):
"""
Returns a hostname as configured by the user
"""
if 'hostname' in config:
return config['hostname']
if method is None:
if 'hostname_method' in config:
method = config['hostname_method']
else:
method = 'smart'
# case insensitive method
method = method.lower()
if method in get_hostname.cached_results:
return get_hostname.cached_results[method]
if method == 'smart':
hostname = get_hostname(config, 'fqdn_short')
if hostname != 'localhost':
get_hostname.cached_results[method] = hostname
return hostname
hostname = get_hostname(config, 'hostname_short')
get_hostname.cached_results[method] = hostname
return hostname
if method == 'fqdn_short':
hostname = socket.getfqdn().split('.')[0]
get_hostname.cached_results[method] = hostname
return hostname
if method == 'fqdn':
hostname = socket.getfqdn().replace('.', '_')
get_hostname.cached_results[method] = hostname
return hostname
if method == 'fqdn_rev':
hostname = socket.getfqdn().split('.')
hostname.reverse()
hostname = '.'.join(hostname)
get_hostname.cached_results[method] = hostname
return hostname
if method == 'uname_short':
hostname = os.uname()[1].split('.')[0]
get_hostname.cached_results[method] = hostname
return hostname
if method == 'uname_rev':
hostname = os.uname()[1].split('.')
hostname.reverse()
hostname = '.'.join(hostname)
get_hostname.cached_results[method] = hostname
return hostname
if method == 'hostname':
hostname = socket.gethostname()
get_hostname.cached_results[method] = hostname
return hostname
if method == 'hostname_short':
hostname = socket.gethostname().split('.')[0]
get_hostname.cached_results[method] = hostname
return hostname
if method == 'hostname_rev':
hostname = socket.gethostname().split('.')
hostname.reverse()
hostname = '.'.join(hostname)
get_hostname.cached_results[method] = hostname
return hostname
if method == 'none':
get_hostname.cached_results[method] = None
return None
raise NotImplementedError(config['hostname_method'])
get_hostname.cached_results = {}
def str_to_bool(value):
"""
Converts string ('true', 'false') to bool
"""
if isinstance(value, basestring):
if value.strip().lower() == 'true':
return True
else:
return False
return value
class Collector(object):
"""
The Collector class is a base class for all metric collectors.
"""
def __init__(self, config, handlers):
"""
Create a new instance of the Collector class
"""
# Initialize Logger
self.log = logging.getLogger('diamond')
# Initialize Members
self.name = self.__class__.__name__
self.handlers = handlers
self.last_values = {}
# Get Collector class
cls = self.__class__
# Initialize config
self.config = configobj.ConfigObj()
# Check if default config is defined
if self.get_default_config() is not None:
# Merge default config
self.config.merge(self.get_default_config())
# Merge default Collector config
self.config.merge(config['collectors']['default'])
# Check if Collector config section exists
if cls.__name__ in config['collectors']:
# Merge Collector config section
self.config.merge(config['collectors'][cls.__name__])
# Check for config file in config directory
configfile = os.path.join(config['server']['collectors_config_path'],
cls.__name__) + '.conf'
if os.path.exists(configfile):
# Merge Collector config file
self.config.merge(configobj.ConfigObj(configfile))
# Handle some config file changes transparently
if isinstance(self.config['byte_unit'], basestring):
self.config['byte_unit'] = self.config['byte_unit'].split()
self.config['enabled'] = str_to_bool(self.config['enabled'])
self.config['measure_collector_time'] = str_to_bool(
self.config['measure_collector_time'])
self.collect_running = False
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this collector
"""
return {
'enabled': 'Enable collecting these metrics',
'byte_unit': 'Default numeric output(s)',
'measure_collector_time': 'Collect the collector run time in ms',
}
def get_default_config(self):
"""
Return the default config for the collector
"""
return {
### Defaults options for all Collectors
# Uncomment and set to hardcode a hostname for the collector path
# Keep in mind, periods are seperators in graphite
# 'hostname': 'my_custom_hostname',
# If you perfer to just use a different way of calculating the
# hostname
# Uncomment and set this to one of these values:
# fqdn_short = Default. Similar to hostname -s
# fqdn = hostname output
# fqdn_rev = hostname in reverse (com.example.www)
# uname_short = Similar to uname -n, but only the first part
# uname_rev = uname -r in reverse (com.example.www)
# 'hostname_method': 'fqdn_short',
# All collectors are disabled by default
'enabled': False | ,
# Path Prefix
'path_prefix': 'servers',
# Path Prefix for Virtual Machine metrics
'instance_prefix': 'instances',
# Path Suffix
'path_suffix': '',
# Default splay time (seconds)
'splay': 1,
| # Default Poll Interval (seconds)
'interval': 300,
# Default collector threading model
'method': 'Sequential',
# Default numeric output
'byte_unit': 'byte',
# Collect the collector run time in ms
'measure_collector_time': False,
}
def get_stats_for_upload(self, config=None):
if config is None:
config = self.config
stats = {}
if 'enabled' in config:
stats['enabled'] = config['enabled']
else:
stats['enabled'] = False
if 'interval' in config:
stats['interval'] = config['interval']
return stats
def get_schedule(self):
"""
Return schedule for the collector
"""
# Return a dict of tuples containing (collector function,
# collector function args, splay, interval)
return {self.__class__.__name__: (self._run,
None,
int(self.config['splay']),
int(self.config['interval']))}
def get_metric_path(self, name, instance=None):
"""
Get metric path.
Instance indicates that this is a metric for a
virtual machine and should have a different
root prefix.
"""
if 'path' in self.config:
path = self.config['path']
else:
path |
jhermann/rituals | src/rituals/util/__init__.py | Python | gpl-2.0 | 1,671 | 0 | # -*- coding: utf-8 -*-
""" rituals.util – Helper modules.
"""
# Copyright ⓒ 2015 Jürgen Hermann
#
# Thi | s program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General | Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The full LICENSE file and source are available at
# https://github.com/jhermann/rituals
from __future__ import absolute_import, unicode_literals, print_function
import os
def search_file_upwards(name, base=None):
""" Search for a file named `name` from cwd or given directory to root.
Return None if nothing's found.
"""
base = base or os.getcwd()
while base != os.path.dirname(base):
if os.path.exists(os.path.join(base, name)):
return base
base = os.path.dirname(base)
return None
def add_dir2pypath(path):
"""Add given directory to PYTHONPATH, e.g. for pylint."""
py_path = os.environ.get('PYTHONPATH', '')
if path not in py_path.split(os.pathsep):
py_path = ''.join([path, os.pathsep if py_path else '', py_path])
os.environ['PYTHONPATH'] = py_path
# print('*** PYTHONPATH={}'.format(os.environ.get('PYTHONPATH', None)))
|
jacenkow/beard-server | beard_server/celery.py | Python | gpl-2.0 | 1,257 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Inspire.
# Copyright (C) 2016 CERN.
#
# Inspire is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Inspire is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Inspire; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or subm | it itself to any jurisdiction.
"""Celery service instance."""
from __future__ import absolute_import, division, print_function, \
unicode_literals
from beard_server import config
from celery import Celery
app = Celery('beard_server')
app.config_from_object( | config)
if __name__ == '__main__':
app.start()
|
gomjellie/SoongSiri | legacy_codes/app/managers.py | Python | mit | 10,044 | 0.000426 | from .message import *
from functools import wraps
import datetime
import pymongo
import re
from app import session
class Singleton(type):
instance = None
def __call__(cls, *args, **kwargs):
if not cls.instance:
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
class APIManager(metaclass=Singleton):
STATELESS_PROCESS = {
'오늘의 식단': FoodMessage,
'운영시간': TimeTableMessage,
'학식': PupilFoodMessage,
'교식': FacultyFoodMessage,
# '기식': DormFoodMessage,
'푸드코트': FoodCourtMessage,
'스낵코너': SnackCornerMessage,
'더 키친': TheKitchenMessage,
'버스': BusMessage,
'정문(20166)': BusFrontMessage,
'베라 앞(20165)': BusBeraMessage,
'중문(20169)': BusMiddleMessage,
'지하철': SubMessage,
'도서관': LibMessage,
}
PROCESS = {
'내일의 식단': [
{
'내일의 식단': TomorrowFoodMessage,
},
{
'학식': TomorrowPupilFoodMessage,
'교식': TomorrowFacultyFoodMessage,
# '기식': TomorrowDormFoodMessage,
'푸드코트': TomorrowFoodCourtMessage,
'스낵코너': TomorrowSnackCornerMessage,
'더 키친': TomorrowTheKitchenMessage,
},
],
# '도서관': [
# {
# '도서관': LibMessage,
# },
# {
# # 일단 예외로 둔다
# '*': OnGoingMessage,
# }
# ],
'식단 리뷰': [
| {
'식단 리뷰': ReviewInitMessage,
},
{
'리뷰 보기': Revie | wBrowseMessage,
'리뷰 남기기': ReviewPostMessage,
'리뷰 삭제하기': OnGoingMessage,
},
{
# 리뷰 남기기 하면 3단계까지 옴 키보드로 입력받은 문자열이 오기때문에 가능성이 다양함
'*': OnGoingMessage,
}
],
}
def handle_process(self, process, user_key, content):
"""
연속되는 문답이 필요한 항목들을 처리한다.
:return: Message Object
"""
if process == '도서관':
if '열람실' in content:
room = content[0] # '1 열람실 (이용률: 9.11%)'[0]하면 1만 빠져나온다
msg = LibStatMessage(room=room)
UserSessionAdmin.delete(user_key)
else:
UserSessionAdmin.delete(user_key)
return FailMessage('도서관 process에서 문제가 발생하였습니다 해당 세션을 초기화합니다.')
return msg
elif process == '식단 리뷰':
if content in self.PROCESS[process][1]:
new_msg = self.PROCESS[process][1][content]
if content in ['리뷰 보기', '리뷰 삭제']:
UserSessionAdmin.delete(user_key)
return new_msg()
else:
UserSessionAdmin.delete(user_key)
return ReviewPostSuccess(user_key, content)
elif process == '내일의 식단':
if content in self.PROCESS[process][1]:
new_msg = self.PROCESS[process][1][content]
UserSessionAdmin.delete(user_key)
else:
UserSessionAdmin.delete(user_key)
return FailMessage('내일의 식단 process에서 문제가 발생하였습니다 해당 세션을 초기화합니다.')
return new_msg()
return FailMessage('Unhandled process {}'.format(process))
def handle_stateless_process(self, user_key, content):
"""
연속적이지 않은 항목들을 처리한다.
:param user_key:
:param content:
:return: Message Object
"""
if content in self.PROCESS:
UserSessionAdmin.init_process(user_key, content)
new_msg = self.PROCESS[content][0][content]
return new_msg()
else:
new_msg = self.STATELESS_PROCESS[content]
return new_msg()
def get_msg(self, user_key, content):
has_session = UserSessionAdmin.check_user_key(user_key)
process = UserSessionAdmin.get_process(user_key)
if not has_session:
UserSessionAdmin.init(user_key, content)
if content == '취소':
UserSessionAdmin.delete(user_key)
return CancelMessage()
UserSessionAdmin.add_history(user_key, content)
if process:
return self.handle_process(process, user_key, content)
else:
return self.handle_stateless_process(user_key, content)
def process(self, stat, req=None):
if stat is 'home':
home_message = HomeMessage()
return home_message
elif stat is 'message':
content = req['content']
user_key = req['user_key']
return self.get_msg(user_key, content)
elif stat is 'fail':
log = req['log']
user_key = req['user_key']
fail_message = FailMessage('파악할수 없는 에러가 발생하여 해당 세션을 초기화 합니다\n{}'.format(log))
UserSessionAdmin.delete(user_key)
return fail_message
elif stat is 'etc':
return SuccessMessage()
elif stat is "scheduler":
return CronUpdateMessage()
elif stat is "refresh_tomorrow":
return CronUpdateTomorrowMessage()
else:
return FailMessage("stat not in list('home', 'message', 'fail')")
class SessionManager(metaclass=Singleton):
@staticmethod
def check_user_key(user_key):
if session.find_one({'user_key': user_key}):
return True
else:
return False
def verify_session(func):
@wraps(func)
def session_wrapper(*args, **kwargs):
user_key = args[1]
if session.find_one({'user_key': user_key}):
return func(*args, **kwargs)
else:
return False
return session_wrapper
def init(self, user_key, content=None, process=None):
session.insert_one({
'user_key': user_key,
'history': [content],
'process': process,
})
@verify_session
def delete(self, user_key):
session.remove({'user_key': user_key})
@verify_session
def add_history(self, user_key, content):
user = session.find_one({'user_key': user_key})
history = user['history']
history.append(content)
user.update({'history': history})
session.save(user)
@verify_session
def get_history(self, user_key):
user = session.find_one({'user_key': user_key})
history = user['history']
return history[:]
@verify_session
def init_process(self, user_key, process):
user = session.find_one({'user_key': user_key})
user.update({'process': process})
session.save(user)
@verify_session
def expire_process(self, user_key):
user = session.find_one({'user_key': user_key})
user.update({'process': None})
session.save(user)
@verify_session
def get_process(self, user_key):
user = session.find_one({'user_key': user_key})
return user['process']
class DBManager:
def __init__(self):
_conn = pymongo.MongoClient()
_food_db = _conn.food_db
self.hakusiku = _food_db.hakusiku
self.review = _food_db.review
self.ban_list = _food_db.ban_list
if self._get_black_list() is None:
self.ban_list.insert_one({'black_list': []})
def get_hakusiku_data(self, date=None):
date = date or datetime.date.today()
date_str = date.__str__()
data = self.hakusiku.find_one({'날짜': date_str})
return data
def set_hakusiku_data(self, data, date=None):
date = date or datetime.date.today()
date_str = date.__str__()
if self.get_hakusiku_data(date=date_str) is None:
self.hakusiku.insert_one(data)
else:
self.hakusiku.replace_one({"날짜": date_str}, data)
def is_banned_user(self, user_key):
return True if user_key in self._get_black_list() else False
def _get_black_list(self):
return self.ban_list.find_one({}, {'_id': 0, 'black_list': 1})
def ban_user(self, user_key):
black_list = self._get_black_lis |
MaximeGLegault/StrategyIA | RULEngine/Communication/receiver/vision_receiver.py | Python | mit | 843 | 0.00358 | # Under MIT License, see LICENSE.txt
"""
Implémente la logique et les services nécessaires pour communiquer avec le
serveur de vision.
"""
from RULEngine.Communication.protobuf import messages_robocup_ssl_wrapper_pb2 a | s ssl_wrapper
from RULEngine.Communication.util.protobuf_packet_receiver import ProtobufPacketReceiver
from config.config_service import ConfigService
class VisionReceiver(ProtobufPacketReceiver):
""" Initie le serveur de vision, semble superflue. """
# FIXME: est-c | e réellement utile? la classe ne semble aucunement générique
def __init__(self):
cfg = ConfigService()
host = cfg.config_dict["COMMUNICATION"]["udp_address"]
port = int(cfg.config_dict["COMMUNICATION"]["vision_port"])
super(VisionReceiver, self).__init__(host, port, ssl_wrapper.SSL_WrapperPacket)
|
kstilwell/tcex | tcex/stix/observables/domain_name.py | Python | apache-2.0 | 1,634 | 0.00306 | """Parser for STIX Domain Name Object.
see: https://docs.oa | sis-open.org/cti/stix/v2.1/csprd01/stix-v2.1-csprd01.html#_Toc16070687
"""
# standard library
from typing import Iterable, Union
# third-party
from stix2 import DomainName
# first-party
from tcex.stix.model import StixModel
class StixDomainNameObject(StixModel):
"""Parser for STIX Domain Name Object.
see: https://docs.oasis-open.org/cti/stix/v2.1/csprd01/stix-v2.1-csp | rd01.html#_Toc16070687
"""
# pylint: disable=arguments-differ
def produce(self, tc_data: Union[list, dict], **kwargs) -> Iterable[DomainName]:
"""Produce STIX 2.0 JSON object from TC API response."""
if isinstance(tc_data, list) and len(tc_data) > 0 and 'summary' in tc_data[0]:
indicator_field = 'summary'
else:
indicator_field = 'hostName'
parse_map = {
'type': 'domain-name',
'spec_version': '2.1',
'id': '@.id',
'value': f'@."{indicator_field}"',
}
yield from (DomainName(**stix_data) for stix_data in self._map(tc_data, parse_map))
# pylint: disable=arguments-differ
def consume(self, stix_data: Union[list, dict]):
"""Produce a ThreatConnect object from a STIX 2.0 JSON object."""
parse_map = {
'type': 'Host',
'hostName': '@.value',
'xid': '@.id',
'confidence': '@.confidence',
'attributes': [{'type': 'External ID', 'value': '@.id'}],
}
parse_map.update(self.default_map)
yield from self._map(
stix_data, parse_map,
)
|
jocave/snapcraft | snapcraft/plugins/nodejs.py | Python | gpl-3.0 | 4,091 | 0 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""The nodejs plugin is useful for node/npm based parts.
The plugin uses node to install dependencies from `package.json`. It
also sets up binaries defined in `package.json` into the `PATH`.
This plugin uses the common plugin keywords as well as those for "sources".
For more information check the 'plugins' topic for the former and the
'sources' topic for the latter.
Additionally, this plugin uses the following plugin-specific keywords:
- node-packages:
(list)
A list of dependencies to fetch using npm.
- node-engine:
(string)
The version of nodejs you want the snap to run on.
"""
import logging
import os
import platform
import shutil
import snapcraft
from snapcraft import sources
logger = logging.getLogger(__name__)
_NODEJS_BASE = 'node-v{version}-linux-{arch}'
_NODEJS_VERSION = '4.4.4'
_NODEJS_TMPL = 'https://nodejs.org/dist/v{version}/{base}.tar.gz'
_NODEJS_ARCHES = {
'i686': 'x86',
'x86_64': 'x64',
'armv7l': 'armv7l',
}
class NodePlugin(snapcraft.BasePlugin):
@classmethod
def schema(cls):
schema = super().schema()
schema['properties']['node-packages'] = {
'type': 'array',
'minitems': 1,
'uniqueItems': True,
'items': {
'type': 'string'
},
'default': []
}
schema['properties']['node-engine'] = {
'type': 'string',
'default': _NODEJS_VERSION
}
if 'required' in schema:
del schema['required']
# Inform Snapcraft of the properties associated with building. If these
# change in the YAML Snapcraft will consider the build step dirty.
schema['build-properties'].append('node-packages')
# Inform Snapcraft of the properties associated with pulling. If these
# change in the YAML Snapcraft will consider the build step dirty.
schema['pull-properties'].append('node-engine')
return schema
def __init__(self, name, options, project):
super().__init__(name, options, project)
self._npm_dir = os.path.join(self.partdir, 'npm')
self._nodejs_tar = sources.Tar(get_nodejs_release(
| self.options.node_engine), self._npm_dir)
def pull(self):
super().pull()
os.makedirs(self._npm_dir, exist_ok=True)
self._nodejs_tar.download()
def clean_pull(self):
super().clean_pull()
# Remove the npm directory (if any)
if os.path.exists(self._npm_dir):
shutil.rmtree(self._npm_dir)
def build(self):
su | per().build()
self._nodejs_tar.provision(
self.installdir, clean_target=False, keep_tarball=True)
for pkg in self.options.node_packages:
self.run(['npm', 'install', '-g', pkg])
if os.path.exists(os.path.join(self.builddir, 'package.json')):
self.run(['npm', 'install', '-g'])
def _get_nodejs_base(node_engine):
machine = platform.machine()
if machine not in _NODEJS_ARCHES:
raise EnvironmentError('architecture not supported ({})'.format(
machine))
return _NODEJS_BASE.format(version=node_engine,
arch=_NODEJS_ARCHES[machine])
def get_nodejs_release(node_engine):
return _NODEJS_TMPL.format(version=node_engine,
base=_get_nodejs_base(node_engine))
|
ioannistsanaktsidis/inspire-next | inspire/base/format_elements/bfe_inspire_conferences_date.py | Python | gpl-2.0 | 2,924 | 0.00513 | # -*- coding: utf-8 -*-
##
## This file is part of INSPIRE.
## Copyright (C) 2015 CERN.
##
## INSPIRE is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## INSPIRE is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more | details.
##
## You should have received a copy of the GNU General Public License
## along with INSPIRE; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
"""BibFormat element - Prints INSPIRE jobs contact name HEPNAMES search
"""
from datetime import datetime
def format_element(bfo, style="", separator=''):
"""Default format for the contact person link in the Jobs format.
This link will point to a direct s | earch in the HepNames database.
@param style: CSS class of the link
@param separator: the separator between names.
"""
out = []
fulladdress = bfo.fields("111__")
sday = ''
smonth = ''
syear = ''
fday = ''
fmonth = ''
fyear = ''
printaddress = ''
for printaddress in fulladdress:
if 'd' in printaddress:
out.append(printaddress['d'])
break
else:
if 'x' in printaddress:
sdate = printaddress['x']
sday = sdate[-2:]
smonth = sdate[5:7]
syear = sdate[:4]
if 'y' in printaddress:
fdate = printaddress['y']
fday = fdate[-2:]
fmonth = fdate[5:7]
fyear = fdate[:4]
try:
smonth = datetime.strptime(smonth, "%m").strftime("%b")
fmonth = datetime.strptime(fmonth, "%m").strftime("%b")
except ValueError:
pass
if printaddress in fulladdress:
if 'd' not in printaddress:
if syear == fyear:
if smonth == fmonth:
# year matches and month matches
out.append(sday+'-'+fday+' '+fmonth+' '+fyear)
else:
# year matches and month doesn't
out.append(sday + ' ' + smonth + ' - ' + fday + ' ' + fmonth + ' ' + fyear)
if not syear == fyear and not smonth == fmonth:
# year doesn't match and don't test month
out.append(sday + ' ' + smonth + ' ' + syear + ' - ' + fday + ' ' + fmonth + ' ' + fyear)
return separator.join(out)
def escape_values(bfo):
"""
Check if output of this element should be escaped.
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
|
vinaypost/multiuploader | multiuploader/context_processors.py | Python | mit | 93 | 0 | # -*- coding:utf-8 -*-
def booleans | (request):
| return {'True': True, 'False': False}
|
georgesk/webvtt-editor | video/models.py | Python | gpl-3.0 | 2,689 | 0.009745 | from django.db import models
from django.core.exceptions import ValidationError
# Create your models here.
class Etudiant(models.Model):
"""
Représente un étudiant qui peut "sous-titrer". Les champs "uidNumber" et
"uid" identifient l'étudiant dans l'annuaire LDAP de
l'établissement.
"""
uidNumber = models.IntegerField(unique=True)
uid = models.CharField(max_length=50)
nom = models.CharField(max_length=50)
prenom = models.CharField(max_length=50)
classe = models.CharField(max_length=10)
def __str__(self):
return "{nom} {prenom} {classe} {uid}".format(**self.__dict__)
class Enseignant(models.Model):
"""
Désigne un professeur ou un autre membre de l'équipe éducative.
le champ "uid" correspond à l'identifiant de ce professeur dans
l'annuaire LDAP de l'établissement.
"""
uid = models.IntegerField(unique=True)
nom = models.CharField(max_length=50)
prenom = models.CharField(max_length=50)
def __str__(self):
return "{} (prof)".format(self.nom)
class EnseignantClasse(models.Model):
"""
associe un professeur à une classe
"""
enseignant = models.ForeignKey(Enseignant)
classe= models.CharField(max_length=50)
gid = models.IntegerField() ## groupe ldap de la classe
d | ef __str__(self):
return "{} -> {}".format(self.enseignant.nom, self.classe)
class Atelier(models.Model):
"""
associe une vidéo et peut-être des sous-t | itres à une classe et un prof
"""
ec = models.ForeignKey(EnseignantClasse)
video = models.FileField(upload_to='video')
tt = models.TextField(default="WEBTT\n\n")
def __str__(self):
return "{} {} {}".format(self.ec, self.video, self.tt[:20]+"...")
class Travail(models.Model):
"""
décrit le travail d'un élève dans un atelier
"""
atelier=models.ForeignKey(Atelier)
tt = models.TextField(default="", blank=True)
etudiant=models.ForeignKey(Etudiant)
class Meta:
verbose_name_plural = "Travaux"
def __str__(self):
return "{} {} {}".format(self.etudiant,
self.tt[:30]+"...",
self.atelier)
def clean(self, *args, **kwargs):
if self.etudiant.classe != self.atelier.ec.classe:
raise (ValidationError("Erreur de classe entre l'élève et l'atelier"))
super(Travail, self).clean(*args, **kwargs)
return
def save(self, *args, **kwargs):
if not self.tt:
self.tt=self.atelier.tt
super(Travail, self).save(*args, **kwargs)
return
|
ezequielpereira/Time-Line | timelinelib/wxgui/dialogs/setcategory/controller.py | Python | gpl-3.0 | 2,583 | 0.000774 | # Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014, 2015 Rickard Lindberg, Roger Lindberg
#
# This file is part of Timeline.
#
# Timeline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Timeline is distributed in the | hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warran | ty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Timeline. If not, see <http://www.gnu.org/licenses/>.
from timelinelib.wxgui.framework import Controller
class SetCategoryDialogController(Controller):
def on_init(self, db, selected_event_ids):
self._db = db
self._selected_event_ids = selected_event_ids
self.view.PopulateCategories()
self._set_title()
def on_ok_clicked(self, event):
category = self.view.GetSelectedCategory()
if not self._category_is_given(category) and self._selected_event_ids == []:
self.view.DisplayErrorMessage(_("You must select a category!"))
else:
self._save_category_in_events(category)
self.view.EndModalOk()
def _set_title(self):
if self._selected_event_ids == []:
self.view.SetTitle(_("Set Category on events without category"))
else:
self.view.SetTitle(_("Set Category on selected events"))
def _category_is_given(self, category):
return category is not None
def _save_category_in_events(self, category):
if self._selected_event_ids == []:
self._save_category_in_events_for_events_without_category(category)
else:
self._save_category_in_events_for_selected_events(category)
def _save_category_in_events_for_selected_events(self, category):
for event_id in self._selected_event_ids:
event = self._db.find_event_with_id(event_id)
event.set_category(category)
def _save_category_in_events_for_events_without_category(self, category):
for event in self._db.get_all_events():
if event.get_category() is None:
event.set_category(category)
def _events_without_category_exists(self):
for event in self._db.get_all_events():
if event.category is None:
return True
return False
|
gdsglgf/tutorials | python/pillow/image_converter.py | Python | mit | 896 | 0.03288 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from PIL import Image
def gray2rgb(data):
return np.array(Image.fromarray(data, 'L').convert('RGB'))
# ITU-R 601-2 luma transform:
# L | = R * 299/1000 + G * 587/1000 + B * 114/1000
def rgb2gray(data):
return np.array(Image.fromarray(data, 'RGB').convert('L'))
def toGray(rgb):
row, column, pipe = rg | b.shape
for i in range(row):
for j in range(column):
pix = rgb[i, j]
r = pix[0]
g = pix[1]
b = pix[2]
gray = (r * 299 + g * 587 + b * 114) / 1000
print '%4d' %(gray),
print
def main():
rgb = np.array(np.arange(8 * 8 * 3).reshape((8, 8, 3)), dtype='uint8')
print rgb
print rgb2gray(rgb)
toGray(rgb)
Image.fromarray(rgb, 'RGB').save('rgb.jpg')
img = np.array(Image.open('rgb.jpg'))
print img # 图片失真, 与rgb不同
print rgb2gray(img)
toGray(img)
if __name__ == '__main__':
main() |
mtagle/airflow | tests/utils/test_weight_rule.py | Python | apache-2.0 | 1,201 | 0 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You | may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distribu | ted on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow.utils.weight_rule import WeightRule
class TestWeightRule(unittest.TestCase):
def test_valid_weight_rules(self):
self.assertTrue(WeightRule.is_valid(WeightRule.DOWNSTREAM))
self.assertTrue(WeightRule.is_valid(WeightRule.UPSTREAM))
self.assertTrue(WeightRule.is_valid(WeightRule.ABSOLUTE))
self.assertEqual(len(WeightRule.all_weight_rules()), 3)
|
finder/mako_base | auth/urls.py | Python | mit | 232 | 0.008621 | from django.conf.u | rls.defaults import *
import views
urlpatterns = patterns('',
(r'^login/$', views.login),
(r'^logout/$', views.lo | gout),
(r'^register/$', views.register),
(r'^forgot_pass/$', views.forgot_pass),
)
|
xuchao1213/AliyunDdnsPython | config.py | Python | gpl-3.0 | 1,746 | 0.002291 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
if sys.version_info < (3,):
import ConfigParser
else:
import configparser as ConfigParser
class Config:
def __init__(self):
self.interval = 30
self.access_key_id = None
self.access_key_secret = None
self.domain_name = None
self.sub_domain_name = None
self.record_type = "A"
self.region_id = "cn-hangzhou"
self.configParser = ConfigParser.ConfigParser()
def loadConfig(self, file):
if not self.configParser.read(file):
print "config file not exist"
return False
try:
self.interval = self.configParser.getint("CONFIG", "interval")
self.access_key_id = self.configParser.get("CONFIG", "access_key_id")
self.access_key_secret = self.configParser.get("CONFIG", "access_key_secret")
self.domain_name = self.configParser.get("CONFIG", "domain_name")
self.sub_domain_name = self.configParser.get("CONFIG", "sub_domain_name")
self.record_type = self.configParser.get("CONFIG", "record_type")
self.region_id = self.configParser.get("CONFIG", "region_id")
if not self.interval:
self.interval = 30
if not self.record_type:
self.record_ | type = "A"
if not self.region_id:
self.region_id = "cn-hangzhou"
except Exception, e:
print "invalid config: {0}".format(e.message)
return False
if not self.access_key_id or not self.access_key_secret or not self.domain_name or not self.sub_domain_name:
print "invalid config"
return False
re | turn True
|
delectable/DIGITS | digits/config/test_prompt.py | Python | bsd-3-clause | 3,424 | 0.004089 | # Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
import sys
from contextlib import contextmanager
from StringIO import StringIO
import mock
from nose.tools import raises
from . import prompt as _
class TestValueToStr():
def test_none(self):
# pass none to value_to_str
assert _.value_to_str(None) == '', 'passing None should return an empty string'
def test_nonstring(self):
# pass a non-string value to value_to_str
assert _.value_to_str(1) == '1', 'passing 1 should return the string "1"'
class TestSuggestion():
@raises(ValueError)
def test_new_bad_char_type(self):
# pass a non-string type as char to suggestion
_.Suggestion(None, 1)
@raises(ValueError)
def test_new_bad_multichar(self):
# pass multiple chars where one is expected
_.Suggestion(None, 'badvalue')
def test_str_method(self):
# test __str__ method of Suggestion
suggestion = _.Suggestion('alpha', 'a', 'test', True)
strval = str(suggestion)
expect = '<Suggestion char="a" desc="test" value="alpha" default>'
assert strval == expect, 'Suggestion is not producing the correct string value %s' % expect
@contextmanager
def mockInput(fn):
original = __builtins__['raw_input']
__builtins__['raw_input'] = fn
yield
__builtins__['raw_input'] = original
class TestGetInput():
def setUp(self):
self.suggestions = [_.Suggestion('alpha', 'a', 'test', False)]
@raises(SystemExit)
def test_get_input_sys_exit(self):
# bad input from user
def temp(_):
raise KeyboardInterrupt
with mockInput(temp):
_.get_input('Test', lambda _: True, self.suggestions)
def test_get_input_empty_then_full(self):
# test both major paths of get_input
# Python 2 does not have the 'nonlocal' keyword, so we fudge the closure with an object.
class Temp:
def __init__(self):
self.flag = False
def __call__(self, _):
| if not self.flag:
self.flag = True
return ''
else:
return 'a'
with mockInput(Temp()):
asser | t _.get_input('Test', lambda x: x, self.suggestions) == 'alpha', 'get_input should return "alpha" for input "a"'
def test_get_input_empty_default(self):
# empty input should choose the default
self.suggestions[0].default = True
with mockInput(lambda _: ''):
assert _.get_input('Test', lambda x: x+'_validated', self.suggestions) == 'alpha_validated', 'get_input should return the default value "alpha"'
def test_get_input_empty_default_no_validator(self):
# empty input should choose the default and not validate
self.suggestions[0].default = True
with mockInput(lambda _: ''):
assert _.get_input('Test', suggestions=self.suggestions) == 'alpha', 'get_input should return the default value "alpha"'
@mock.patch('os.path.expanduser')
def test_get_input_path(self, mock_expanduser):
# should correctly validate path
mock_expanduser.side_effect = lambda x: '/path'+x
with mockInput(lambda _: '/test'):
assert _.get_input(validator=lambda x: x, is_path=True) == '/path/test', 'get_input should return the default value "alpha"'
|
pythonkr/pyconapac-2016 | registration/admin.py | Python | mit | 3,854 | 0.001728 | # -*- coding: utf-8 -*-
from django.contrib import admin
from django.core.mail import send_mass_mail
from django.shortcuts import render
from constance import config
from datetime import datetime
from iamporter import get_access_token, Iamporter, IamporterError
from .models import Registration, Option
def send_bankpayment_alert_email(modeladmin, request, queryset):
messages = []
subject = u"PyCon APAC 2016 입금확인부탁드립니다. Please Check PyCon APAC 2016 payment"
body = u"""
안녕하세요. PyCon APAC 준비위원회입니다.
현재 입금여부를 확인하였으나 입금이 되지 않았습니다.
혹시나 다른 이름으로 입금하신분은 support@pycon.kr 로 메일 부탁드립니다.
입금시한은 구매로부터 일주일입니다.
감사합니다.
"""
from_email = "pycon@pycon.kr"
for obj in queryset:
email = obj.email
message = (subject, body, from_email, [email])
messages.append(message)
send_mass_mail(messages, fail_silently=False)
send_bankpayment_alert_email.short_description = "Send Bank Payment Email"
def cancel_registration(modeladmin, request, queryset):
messages = []
subject = u"PyCon APAC 2016 결제 취소 알림"
body = u"""
안녕하세요. PyCon APAC 준비위원회입니다.
결제가 취소되었음을 알려드립니다.
결제 대행사 사정에 따라 다소 늦게 카드 취소가 이뤄질 수 있습니다.
다른 문의 사항은 support@pycon.kr 로 메일 부탁드립니다.
감사합니다.
"""
from_email = "pycon@pycon.kr"
results = []
now = datetime.now()
access_token = get_access_token(config.IMP_API_KEY, config.IMP_API_SECRET)
imp_client = Iamporter(access_token)
for obj in queryset:
if obj.payment_method != 'card':
obj.cancel_reason = u'카드 결제만 취소 가능'
results.append(obj)
continue
if obj.payment_status != 'paid':
obj.cancel_reason = u'결제 완료만 취소 가능'
results.append(obj)
continue
try:
imp_params = dict(
merchant_uid=obj.merchant_uid,
reason=u'Cancel by admin',
)
imp_client.cancel(**imp_params)
except IOError:
obj.cancel_status = 'IOError'
results.append(obj)
continue
except IamporterError as e:
obj.cancel_status = e.code
obj.cancel_reason = e.message
results.append(obj)
continue
obj.canceled = now
obj.payment_status = 'cancelled'
obj.save(update_fields=['payment_status', 'canceled'])
obj.cancel_status = 'CANCELLED'
results.append(obj)
message = (subject, body, from_email, [obj.email])
messages.append(message)
send_mass_mail(messages, fail_silently=False)
return render(request, 'registration/cancellation_result.html', {'results': results})
cancel_registration.short_description = "Cancel registration"
class OptionAdmin(admin.ModelAdmin):
list_display = ('name', 'is_active', 'price')
list_editable = ('is_active',)
ordering = ('id',)
admin.site.register( | Option, OptionAdmin)
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('user', 'option', 'name', 'email', 'payment_method',
'payment_status', 'created', 'confirmed', 'canceled')
list_editable = ('payment_status',)
list_filter = ('option', 'payment_method', | 'payment_status')
csv_fields = ['name', 'email', 'company', 'option', ]
search_fields = ('name', 'email')
readonly_fields = ('created', )
ordering = ('id',)
actions = (send_bankpayment_alert_email, cancel_registration)
admin.site.register(Registration, RegistrationAdmin)
|
adwiputra/LUMENS-repo | processing/molusce/algorithms/models/crosstabs/manager.py | Python | gpl-2.0 | 3,527 | 0.005671 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from PyQt4.QtCore import *
import numpy as np
from processing.molusce.algorithms.models.crosstabs.model import CrossTable
class CrossTabManagerError(Exception):
'''Base class for exceptions in this module.'''
def __init__(self, msg):
self.msg = msg
class CrossTableManager(QObject):
'''
Provides statistic information about transitions InitState->FinalState.
'''
rangeChanged = pyqtSignal(str, int)
updateProgress = pyqtSignal()
crossTableFinished = pyqtSignal()
logMessage = pyqtSignal(str)
errorReport = pyqtSignal(str)
def __init__(self, initRaster, finalRaster):
QObject.__init__(self)
if not initRaster.geoDataMatch(finalRaster):
raise CrossTabManagerError('Geometries of the raster maps are different!')
if initRaster.getBandsCount() + finalRaster.getBandsCount() != 2:
raise CrossTabManagerError("An input raster has more then one band. Use 1-band rasters!")
self.pixelArea = initRaster.getPixelArea()
self.crosstable = CrossTable(initRaster.getBand(1), finalRaster.getBand(1))
self.crosstable.rangeChanged.connect(self.__crosstableProgressRangeChanged)
self.crosstable.updateProgress.connect(self.__crosstableProgressChanged)
self.crosstable.crossTableFinished.connect(self.__crosstableFinished)
s | elf.crosstable.errorReport.connect(self.__crosstableError)
def __crosstableFinished(sel | f):
self.crosstable.rangeChanged.disconnect(self.__crosstableProgressRangeChanged)
self.crosstable.updateProgress.disconnect(self.__crosstableProgressChanged)
self.crosstable.crossTableFinished.disconnect(self.__crosstableFinished)
self.crossTableFinished.emit()
def __crosstableProgressChanged(self):
self.updateProgress.emit()
def __crosstableProgressRangeChanged(self, message, maxValue):
self.rangeChanged.emit(message, maxValue)
def __crosstableError(self, message):
self.errorReport.emit(message)
def computeCrosstable(self):
try:
self.crosstable.computeCrosstable()
except MemoryError:
self.errorReport.emit(self.tr("The system out of memory during calculation of cross table"))
raise
except:
self.errorReport.emit(self.tr("An unknown error occurs during calculation of cross table"))
raise
def getCrosstable(self):
'''
dalam bentuk hectare
'''
return self.crosstable
def getTransitionMatrix(self):
'''
dalam bentuk proporsi
'''
tab = self.getCrosstable().getCrosstable()
s = 1.0/np.sum(tab, axis=1)
return tab*s[:,None]
def getTransitionStat(self):
pixelArea = self.pixelArea['area']
stat = {'unit': self.pixelArea['unit']}
tab = self.getCrosstable()
initArea = tab.getSumRows()
initArea = pixelArea * initArea
initPerc = 100.0 * initArea / sum(initArea)
stat['init'] = initArea
stat['initPerc'] = initPerc
finalArea = tab.getSumCols()
finalArea = pixelArea * finalArea
finalPerc = 100.0 * finalArea / sum(finalArea)
stat['final'] = finalArea
stat['finalPerc'] = finalPerc
deltas = finalArea - initArea
deltasPerc = finalPerc - initPerc
stat['deltas'] = deltas
stat['deltasPerc'] = deltasPerc
return stat
|
nontas/menpowidgets | docs/sphinxext/ref_prettify.py | Python | bsd-3-clause | 763 | 0 | from sphinx.domains.python import PyXRefRole
def setup(app):
"""
Any time a python class is referenced, make it a prett | y link that doesn't
include the full package path. This makes the base cla | sses much prettier.
"""
app.add_role_to_domain('py', 'class', truncate_class_role)
return {'parallel_read_safe': True}
def truncate_class_role(name, rawtext, text, lineno, inliner,
options={}, content=[]):
if '<' not in rawtext:
text = '{} <{}>'.format(text.split('.')[-1], text)
rawtext = ':{}:`{}`'.format(name, text)
# Return a python x-reference
py_xref = PyXRefRole()
return py_xref('py:class', rawtext, text, lineno,
inliner, options=options, content=content)
|
vijayendrabvs/hap | neutron/tests/unit/test_iptables_manager.py | Python | apache-2.0 | 28,031 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Locaweb.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: Juliano Martinez, Locaweb.
import inspect
import os
import mock
from neutron.agent.linux import iptables_manager
from neutron.tests import base
from neutron.tests import tools
IPTABLES_ARG = {'bn': iptables_manager.binary_name}
NAT_DUMP = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j '
'%(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % IPTABLES_ARG)
FILTER_DUMP = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % IPTABLES_ARG)
class IptablesManagerStateFulTestCase(base.BaseTestCase):
def setUp(self):
super(IptablesManagerStateFulTestCase, self).setUp()
self.root_helper = 'sudo'
self.iptables = (iptables_manager.
IptablesManager(root_helper=self | .root_helper))
self.exe | cute = mock.patch.object(self.iptables, "execute").start()
def test_binary_name(self):
self.assertEqual(iptables_manager.binary_name,
os.path.basename(inspect.stack()[-1][1])[:16])
def test_get_chain_name(self):
name = '0123456789' * 5
# 28 chars is the maximum length of iptables chain name.
self.assertEqual(iptables_manager.get_chain_name(name, wrap=False),
name[:28])
# 11 chars is the maximum length of chain name of iptable_manager
# if binary_name is prepended.
self.assertEqual(iptables_manager.get_chain_name(name, wrap=True),
name[:11])
def test_add_and_remove_chain_custom_binary_name(self):
bn = ("abcdef" * 5)
self.iptables = (iptables_manager.
IptablesManager(root_helper=self.root_helper,
binary_name=bn))
self.execute = mock.patch.object(self.iptables, "execute").start()
iptables_args = {'bn': bn[:16]}
filter_dump = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
filter_dump_mod = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% iptables_args)
nat_dump = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j '
'%(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump + filter_dump_mod,
root_helper=self.root_helper),
None),
(mock.call(['iptables-save', '-c'],
root_helper=self.root_helper),
''),
(mock.call(['iptables-restore', '-c'],
process_input=nat_dump + filter_dump,
root_helper=self.root_helper),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.apply()
self.iptables.ipv4['filter'].empty_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_empty_chain_custom_binary_name(self):
bn = ("abcdef" * 5)[:16]
self.iptables = (iptables_manager.
IptablesManager(root_helper=self.root_helper,
binary_name=bn))
self.execute = mock.patch.object(self.iptables, "execute").start()
iptables_args = {'bn': bn}
filter_dump = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn) |
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractBibliophilesociety888305596WordpressCom.py | Python | bsd-3-clause | 594 | 0.031987 |
def extractBibliophilesociety888305596WordpressCom(item):
'''
Parser for 'bibliophilesociety888305596.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not ( | chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag | , postfix=postfix, tl_type=tl_type)
return False
|
saltstack/salt | tests/pytests/unit/modules/test_salt_version.py | Python | apache-2.0 | 8,304 | 0.002288 | """
Unit tests for salt/modules/salt_version.py
"""
import salt.modules.salt_version as salt_version
import salt.version
from tests.support.mock import MagicMock, patch
def test_mocked_objects():
"""
Test that the mocked objects actually have what we expect.
For example, earlier tests incorrectly mocked the
salt.version.SaltStackVersion.LNAMES dict using upper-case indexes
"""
assert isinstance(salt.version.SaltStackVersion.LNAMES, dict)
sv = salt.version.SaltStackVersion(*salt.version.__version_info__)
for k, v in salt.version.SaltStackVersion.LNAMES.items():
assert k == k.lower()
assert isinstance(v, tuple)
if sv.new_version(major=v[0]):
assert len(v) == 1
else:
assert len(v) == 2
sv = sv.__str__()
assert isinstance(sv, str)
with patch("salt.version.SaltStackVersion.LNAMES", {"neon": (2019, 8)}):
sv = salt.version.SaltStackVersion.from_name("Neon")
assert sv.string == "2019.8.0"
def test_get_release_number_no_codename():
"""
Test that None is returned when the codename isn't found.
"""
assert salt_version.get_release_number("foo") is None
def test_get_release_number_unassigned():
"""
Test that a string is returned when a version is found, but unassigned.
"""
with patch("salt.version.SaltStackVersion.LNAMES", {"foo": (12345, 0)}):
mock_str = "No version assigned."
assert salt_version.get_release_number("foo") == mock_str
def test_get_release_number_success():
"""
Test that a version is returned for a released codename
"""
assert salt_version.get_release_number("Oxygen") == "2018.3"
def test_get_release_number_success_new_version():
"""
Test that a version is returned for new versioning (3000)
"""
assert salt_version.get_release_number("Neon") == "3000"
def test_equal_success():
"""
Test that the current version is equal to the codename
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="1900.5.0")):
with patch("salt.version.SaltStackVersion.LNAMES", {"foo": (1900, 5)}):
assert salt_version.equal("foo") is True
def test_equal_success_new_version():
"""
Test that the current version is equal to the codename
while using the new versioning
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="3000.1")):
with patch("salt.version.SaltStackVersion.LNAMES", {"foo": (3000,)}):
assert salt_version.equal("foo") is True
def test_equal_older_codename():
"""
Test that when an older codename is passed in, the function returns False.
" | ""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.version.SaltStackVersion.LNAMES",
{"oxygen": (2018, 3), "nitrogen": (2017, 7)},
) | :
assert salt_version.equal("Nitrogen") is False
def test_equal_older_codename_new_version():
"""
Test that when an older codename is passed in, the function returns False.
while also testing with the new versioning.
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.version.SaltStackVersion.LNAMES",
{"neon": (3000), "nitrogen": (2017, 7)},
):
assert salt_version.equal("Nitrogen") is False
def test_equal_newer_codename():
"""
Test that when a newer codename is passed in, the function returns False
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.version.SaltStackVersion.LNAMES",
{"fluorine": (salt.version.MAX_SIZE - 100, 0)},
):
assert salt_version.equal("Fluorine") is False
def test_greater_than_success():
"""
Test that the current version is newer than the codename
"""
with patch(
"salt.modules.salt_version.get_release_number", MagicMock(return_value="2017.7")
):
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
assert salt_version.greater_than("Nitrogen") is True
def test_greater_than_success_new_version():
"""
Test that the current version is newer than the codename
"""
with patch(
"salt.modules.salt_version.get_release_number", MagicMock(return_value="2017.7")
):
with patch("salt.version.SaltStackVersion", MagicMock(return_value="3000")):
assert salt_version.greater_than("Nitrogen") is True
def test_greater_than_with_equal_codename():
"""
Test that when an equal codename is passed in, the function returns False.
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch("salt.version.SaltStackVersion.LNAMES", {"oxygen": (2018, 3)}):
assert salt_version.greater_than("Oxygen") is False
def test_greater_than_with_newer_codename():
"""
Test that when a newer codename is passed in, the function returns False.
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.version.SaltStackVersion.LNAMES",
{"fluorine": (2019, 2), "oxygen": (2018, 3)},
):
assert salt_version.greater_than("Fluorine") is False
def test_greater_than_unassigned():
"""
Test that the unassigned codename is greater than the current version
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.modules.salt_version.get_release_number",
MagicMock(return_value="No version assigned."),
):
assert salt_version.greater_than("Fluorine") is False
def test_less_than_success():
"""
Test that when a newer codename is passed in, the function returns True.
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.modules.salt_version.get_release_number",
MagicMock(return_value="2019.2"),
):
assert salt_version.less_than("Fluorine") is True
def test_less_than_success_new_version():
"""
Test that when a newer codename is passed in, the function returns True
using new version
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.modules.salt_version.get_release_number",
MagicMock(return_value="3000"),
):
assert salt_version.less_than("Fluorine") is True
def test_less_than_with_equal_codename():
"""
Test that when an equal codename is passed in, the function returns False.
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch("salt.version.SaltStackVersion.LNAMES", {"oxygen": (2018, 3)}):
assert salt_version.less_than("Oxygen") is False
def test_less_than_with_older_codename():
"""
Test that the current version is less than the codename.
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.modules.salt_version.get_release_number",
MagicMock(return_value="2017.7"),
):
assert salt_version.less_than("Nitrogen") is False
def test_less_than_with_unassigned_codename():
"""
Test that when an unassigned codename greater than the current version.
"""
with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
with patch(
"salt.modules.salt_version.get_release_number",
MagicMock(return_value="No version assigned."),
):
assert salt_version.less_than("Fluorine") is True
def test_check_release_cmp_no_codename():
"""
Test that None is returned when the codename isn't found.
"""
assert salt_version._check_release_cmp("foo") is None
def test_check_release_cmp_success():
"""
Test tha |
Goyatuzo/Challenges | CodeEval/Easy/Fizz Buzz/fizz_buzz.py | Python | mit | 460 | 0.002174 | def fizz_buzz(fizz, buzz, highest):
result = []
for i in range(1, highest + 1):
letter = ''
# If divisible by fizz or buzz, add F or B appropriately.
if i % fizz == 0:
letter += 'F'
if i % buzz == 0:
letter += 'B'
# If neither F or B has been labeled, it's just the digit.
if let | ter == '':
letter = str(i | )
result.append(letter)
return " ".join(result) |
jonparrott/google-cloud-python | asset/google/cloud/asset_v1beta1/gapic/asset_service_client_config.py | Python | apache-2.0 | 1,173 | 0 | config = {
"interfaces": {
"google.cloud.asset.v1beta1.AssetService": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
"non_idempotent": []
| },
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_mult | iplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 20000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 20000,
"total_timeout_millis": 600000
}
},
"methods": {
"ExportAssets": {
"timeout_millis": 600000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"BatchGetAssetsHistory": {
"timeout_millis": 600000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
}
}
}
}
}
|
schroeder-dewitt/polyomino-self-assembly | test/SearchSpaceAdv/alpha-working/main.py | Python | apache-2.0 | 7,493 | 0.029227 | import pycuda.autoinit
import pycuda.driver as drv
import numpy
from pycuda.compiler import SourceModule
from jinja2 import Environment, PackageLoader
def main():
#FourPermutations set-up
FourPermutations = numpy.array([ [1,2,3,4],
[1,2,4,3],
[1,3,2,4],
[1,3,4,2],
[1,4,2,3],
[1,4,3,2],
[2,1,3,4],
[2,1,4,3],
[2,3,1,4],
[2,3,4,1],
[2,4,1,3],
[2,4,3,1],
[3,2,1,4],
[3,2,4,1],
[3,1,2,4],
[3,1,4,2],
[3,4,2,1],
[3,4,1,2],
[4,2,3,1],
[4,2,1,3],
[4,3,2,1],
[4,3,1,2],
[4,1,2,3],
[4,1,3,2],]).astype(numpy.uint8)
BankSize = 8 # Do not go beyond 8!
#Define constants
DimGridX = 19
DimGridY = 19
#SearchSpaceSize = 2**24
#BlockDimY = SearchSpaceSize / (2**16)
#BlockDimX = SearchSpaceSize / (BlockDimY)
#print "SearchSpaceSize: ", SearchSpaceSize, " (", BlockDimX, ", ", BlockDimY,")"
SearchSpaceSize = 600 * 600 * 32
BlockDimX = 600
BlockDimY = 600
FitnessValDim = SearchSpaceSize
GenomeDim = SearchSpaceSize
#Create dictionary argument for rendering
RenderArgs= {"safe_memory_mapping":1,
"aligned_byte_length_genome":4,
"bit_length_edge_type":3,
"curand_nr_threads_per_block":8*32,
"nr_tile_types":2,
"nr_edge_types":8,
"warpsize":32,
| "fit_dim_thread_x":3 | 2*BankSize,
"fit_dim_thread_y":1,
"fit_dim_block_x":BlockDimX,
"fit_dim_grid_x":19,
"fit_dim_grid_y":19,
"fit_nr_four_permutations":24,
"fit_length_movelist":244,
"fit_nr_redundancy_grid_depth":2,
"fit_nr_redundancy_assemblies":10,
"fit_tile_index_starting_tile":0,
"glob_nr_tile_orientations":4,
"banksize":BankSize
}
# Set environment for template package Jinja2
env = Environment(loader=PackageLoader('main', './'))
# Load source code from file
Source = env.get_template('./alpha.cu') #Template( file(KernelFile).read() )
# Render source code
RenderedSource = Source.render( RenderArgs )
# Save rendered source code to file
f = open('./rendered.cu', 'w')
f.write(RenderedSource)
f.close()
#Load source code into module
KernelSourceModule = SourceModule(RenderedSource, options=None, arch="compute_20", code="sm_20")
Kernel = KernelSourceModule.get_function("SearchSpaceKernel")
CurandKernel = KernelSourceModule.get_function("CurandInitKernel")
#Initialise InteractionMatrix
InteractionMatrix = numpy.zeros( ( 8, 8) ).astype(numpy.float32)
def Delta(a,b):
if a==b:
return 1
else:
return 0
for i in range(InteractionMatrix.shape[0]):
for j in range(InteractionMatrix.shape[1]):
InteractionMatrix[i][j] = ( 1 - i % 2 ) * Delta( i, j+1 ) + ( i % 2 ) * Delta( i, j-1 )
#Set up our InteractionMatrix
InteractionMatrix_h = KernelSourceModule.get_texref("t_ucInteractionMatrix")
drv.matrix_to_texref( InteractionMatrix, InteractionMatrix_h , order="C")
print InteractionMatrix
#Set-up genomes
dest = numpy.arange(GenomeDim*4).astype(numpy.uint8)
#for i in range(0, GenomeDim/4):
#dest[i*8 + 0] = int('0b00100101',2) #CRASHES
#dest[i*8 + 1] = int('0b00010000',2) #CRASHES
#dest[i*8 + 0] = int('0b00101000',2)
#dest[i*8 + 1] = int('0b00000000',2)
#dest[i*8 + 2] = int('0b00000000',2)
#dest[i*8 + 3] = int('0b00000000',2)
#dest[i*8 + 4] = int('0b00000000',2)
#dest[i*8 + 5] = int('0b00000000',2)
#dest[i*8 + 6] = int('0b00000000',2)
#dest[i*8 + 7] = int('0b00000000',2)
# dest[i*4 + 0] = 40
# dest[i*4 + 1] = 0
# dest[i*4 + 2] = 0
# dest[i*4 + 3] = 0
dest_h = drv.mem_alloc(GenomeDim*4) #dest.nbytes)
#drv.memcpy_htod(dest_h, dest)
#print "Genomes before: "
#print dest
#Set-up grids
#grids = numpy.zeros((10000, DimGridX, DimGridY)).astype(numpy.uint8) #TEST
#grids_h = drv.mem_alloc(GenomeDim*DimGridX*DimGridY) #TEST
#drv.memcpy_htod(grids_h, grids)
#print "Grids:"
#print grids
#Set-up fitness values
#fitness = numpy.zeros(FitnessValDim).astype(numpy.float32)
#fitness_h = drv.mem_alloc(fitness.nbytes)
fitness_left = numpy.zeros(FitnessValDim).astype(numpy.float32)
fitness_left_h = drv.mem_alloc(fitness_left.nbytes)
fitness_bottom = numpy.zeros(FitnessValDim).astype(numpy.float32)
fitness_bottom_h = drv.mem_alloc(fitness_bottom.nbytes)
#drv.memcpy_htod(fitness_h, fitness)
#print "Fitness values:"
#print fitness
#Set-up grids
grids = numpy.zeros((10000*32, DimGridX, DimGridY)).astype(numpy.uint8) #TEST
grids_h = drv.mem_alloc(GenomeDim*DimGridX*DimGridY) #TEST
#drv.memcpy_htod(grids_h, grids)
#print "Grids:"
#print grids
#Set-up curand
#curand = numpy.zeros(40*GenomeDim).astype(numpy.uint8);
#curand_h = drv.mem_alloc(curand.nbytes)
curand_h = drv.mem_alloc(40*GenomeDim)
#Set-up four permutations
FourPermutations_h = KernelSourceModule.get_global("c_ucFourPermutations") # Constant memory address
drv.memcpy_htod(FourPermutations_h[0], FourPermutations)
#SearchSpace control
#SearchSpaceSize = 2**24
#BlockDimY = SearchSpaceSize / (2**16)
#BlockDimX = SearchSpaceSize / (BlockDimY)
#print "SearchSpaceSize: ", SearchSpaceSize, " (", BlockDimX, ", ", BlockDimY,")"
#Set-up timer
start = drv.Event()
stop = drv.Event()
start.record()
print "Start kernel:"
#Call kernels
CurandKernel(curand_h, block=(32,1,1), grid=(BlockDimX,BlockDimY))
print "Finished Curand kernel, starting main kernel..."
Kernel(dest_h, grids_h, fitness_left_h, fitness_bottom_h, curand_h, block=(32*BankSize,1,1), grid=(BlockDimX,BlockDimY))
#End timer
stop.record()
stop.synchronize()
print "Total kernel time taken: %fs"%(start.time_till(stop)*1e-3)
#print "Mean time per generation: %fs"%(start.time_till(stop)*1e-3 / NrGenerations)
pass
#Output
#drv.memcpy_dtoh(dest, dest_h)
#print "Genomes after: "
#print dest[0:4]
drv.memcpy_dtoh(fitness_left, fitness_left_h)
print "FitnessLeft after: "
print fitness_left[1000000:1000500]
drv.memcpy_dtoh(fitness_bottom, fitness_bottom_h)
print "FitnessBottom after: "
print fitness_bottom[1000000:1000500]
drv.memcpy_dtoh(grids, grids_h)
print "Grids[0] after: "
print grids[9000]
print "Grids[31] after:"
print grids[9500]
if __name__ == '__main__':
main()
|
ratschlab/rQuant | tools/ParseGFF.py | Python | gpl-3.0 | 19,046 | 0.012969 | #!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Written (W) 2010 Vipin T Sreedharan
# Copyright (C) 2010 Max Planck Society
#
"""
Description: Parse genome annotation from a GFF3 (a tab delimited format for storing sequence features and annotations:
http://www.sequenceontology.org/gff3.shtml)
file and create gene struct which can be used for rQuant downstream processing.
"""
import re, sys, os
import scipy.io
def CreateExon(strand_p, five_p_utr, cds_cod, three_p_utr):
"""Create exon cordinates from UTR's and CDS region"""
exon_pos = []
if strand_p == '+':
utr5_start, utr5_end = 0, 0
if five_p_utr != []:
utr5_start = five_p_utr[-1][0]
utr5_end = five_p_utr[-1][1]
cds_5start = cds_cod[0][0]
cds_5end = cds_cod[0][1]
jun_exon = []
if cds_5start-utr5_end == 0 or cds_5start-utr5_end == 1:
jun_exon = [utr5_start, cds_5end]
if len(cds_cod) == 1:
five_prime_flag = 0
if jun_exon != []:
five_p_utr = five_p_utr[:-1]
five_prime_flag = 1
for utr5 in five_p_utr:
exon_pos.append(utr5)
jun_exon = []
utr3_start, utr3_end = 0, 0
if three_p_utr != []:
utr3_start = three_p_utr[0][0]
utr3_end = three_p_utr[0][1]
if utr3_start-cds_5end == 0 or utr3_start-cds_5end == 1:
jun_exon = [cds_5start, utr3_end]
three_prime_flag = 0
if jun_exon != []:
cds_cod = cds_cod[:-1]
three_p_utr = three_p_utr[1:]
three_prime_flag = 1
if five_prime_flag == 1 and three_prime_flag == 1:
exon_pos.append([utr5_start, utr3_end])
if five_prime_flag == 1 and three_prime_flag == 0:
exon_pos.append([utr5_start, cds_5end])
cds_cod = cds_cod[:-1]
if five_prime_flag == 0 and three_prime_flag == 1:
exon_pos.append([cds_5start, utr3_end])
for cds in cds_cod:
exon_pos.append(cds)
for utr3 in three_p_utr:
exon_pos.append(utr3)
else:
if jun_exon != []:
five_p_utr = five_p_utr[:-1]
cds_cod = cds_cod[1:]
for utr5 in five_p_utr:
exon_pos.append(utr5)
exon_pos.append(jun_exon) if jun_exon != [] else ''
jun_exon = []
utr3_start, utr3_end = 0, 0
if three_p_utr != []:
utr3_start = three_p_utr[0][0]
utr3_end = three_p_utr[0][1]
cds_3start = cds_cod[-1][0]
cds_3end = cds_cod[-1][1]
if utr3_start-cds_3end == 0 or utr3_start-cds_3end == 1:
jun_exon = [cds_3start, utr3_end]
if jun_exon != []:
cds_cod = cds_cod[:-1]
three_p_utr = three_p_utr[1:]
for cds in cds_cod:
exon_pos.append(cds)
exon_pos.append(jun_exon) if jun_exon != [] else ''
for utr3 in three_p_utr:
exon_pos.append(utr3)
elif strand_p == '-':
utr3_start, utr3_end = 0, 0
if three_p_utr != []:
utr3_start = three_p_utr[-1][0]
utr3_end = three_p_utr[-1][1]
cds_3start = cds_cod[0][0]
cds_3end = cds_cod[0][1]
jun_exon = []
if cds_3start-utr3_end == 0 or cds_3start-utr3_end == 1:
jun_exon = [utr3_start, cds_3end]
if len(cds_cod) == 1:
three_prime_flag = 0
if jun_exon != []:
three_p_utr = three_p_utr[:-1]
three_prime_flag = 1
for utr3 in three_p_utr:
exon_pos.append(utr3)
jun_exon = []
(utr5_start, utr5_end) = (0, 0)
if five_p_utr != []:
utr5_start = five_p_utr[0][0]
utr5_end = five_p_utr[0][1]
if utr5_start-cds_3end == 0 or utr5_start-cds_3end == 1:
jun_exon = [cds_3start, utr5_end]
five_prime_flag = 0
if jun_exon != []:
cds_cod = cds_cod[:-1]
five_p_utr = five_p_utr[1:]
five_prime_flag = 1
if three_prime_flag == 1 and five_prime_flag == 1:
exon_pos.append([utr3_start, utr5_end])
if three_prime_flag == 1 and five_prime_flag == 0:
exon_pos.append([utr3_start, cds_3end])
cds_cod = cds_cod[:-1]
if three_prime_flag == 0 and five_prime_flag == 1:
exon_pos.append([cds_3start, utr5_end])
for cds in cds_cod:
exon_pos.append(cds)
for utr5 in five_p_utr:
exon_pos.append(utr5)
else:
if jun_exon != []:
three_p_utr = three_p_utr[:-1]
cds_cod = cds_cod[1:]
for utr3 in three_p_utr:
exon_pos.append(utr3)
if jun_exon != []:
exon_pos.append(jun_exon)
jun_exon = []
(utr5_start, utr5_end) = (0, 0)
if five_p_utr != []:
utr5_start = five_p_utr[0][0]
utr5_end = five_p_utr[0][1]
cds_5start = cds_cod[-1][0]
cds_5end = cds_cod[-1][1]
if utr5_start-cds_5end == 0 or utr5_start-cds_5end == 1:
jun_exon = [cds_5start, utr5_end]
if jun_exon != []:
cds_cod = cds_cod[:-1]
five_p_utr = five_p_utr[1:]
for cds in cds_cod:
exon_pos.append(cds)
if jun_exon != []:
exon_pos.append(jun_exon)
for utr5 in five_p_utr:
exon_pos.append(utr5)
return exon_pos
def init_gene():
"""Initializing the gene structure"""
gene_details = dict(chr = '', exons = [], gene_info = {}, id = '', is_alt_spliced = 0, name = '', source = '', start = '', stop = '', strand = '', transcripts = []) |
return gene_details
def FeatureValueFormat(singlegene):
"""Make feature value compactable to write in a .mat format"""
## based on the feature set including for rQuant process each genes selected feature values.
import numpy as np
comp_exon = np.zeros((len(singlegene['exons']),), dty | pe=np.object)
for i in range(len(singlegene['exons'])):
comp_exon[i]= np.array(singlegene['exons'][i])
singlegene['exons'] = comp_exon
comp_transcripts = np.zeros((len(singlegene['transcripts']),), dtype=np.object)
for i in range(len(singlegene['transcripts'])):
comp_transcripts[i] = np.array(singlegene['transcripts'][i])
singlegene['transcripts'] = comp_transcripts
return singlegene
def CreateGeneModels(genes_cmpt, transcripts_cmpt, exons_cmpt, utr3_cmpt, utr5_cmpt, cds_cmpt):
"""Creating Coding/Non-coding gene models from parsed GFF objects."""
gene_counter, gene_models = 1, []
for gene_entry in genes_cmpt: ## Figure out the genes and transcripts associated feature
if gene_entry in transcripts_cmpt:
gene = init_gene() ## gene section related tags
gene['id'] = gene_counter
gene['name'] = gene_entry[1]
gene['chr'] = genes_cmpt[gene_entry]['chr']
gene['source'] = genes_cmpt[gene_entry]['source']
gene['start'] = genes_cmpt[gene_entry]['start']
gene['stop'] = genes_cmpt[gene_entry]['stop']
gene['strand'] = genes_cmpt[gene_entry]['strand']
if gene['strand'] != '+' and gene['strand'] != '-': gene['strand'] = '.' # Strand info not known replaced with a dot symbol instead of None, ?, . etc.
general_info = dict()
## TODO add more gene related information from a |
parroyo/Zappa | zappa/cli.py | Python | mit | 87,982 | 0.004065 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Zappa CLI
Deploy arbitrary Python programs as serverless Zappa applications.
"""
from __future__ import unicode_literals
from __future__ import division
import argcomplete
import argparse
import base64
import pkgutil
import botocore
import click
import collections
import hjson as json
import inspect
import importlib
import logging
import os
import pkg_resources
import random
import re
import requests
import slugify
import string
import sys
import tempfile
import time
import toml
import yaml
import zipfile
from click.exceptions import ClickException
from dateutil import parser
from datetime import datetime,timedelta
from zappa import Zappa, logger, API_GATEWAY_REGIONS
from util import (check_new_version_available, detect_django_settings,
detect_flask_apps, parse_s3_url, human_size)
CUSTOM_SETTINGS = [
'assume_policy',
'attach_policy',
'aws_region',
'delete_local_zip',
'delete_s3_zip',
'exclude',
'http_methods',
'integration_response_codes',
'method_header_types',
'method_response_codes',
'parameter_depth',
'role_name',
'touch',
]
##
# Main Input Processing
##
class ZappaCLI(object):
"""
ZappaCLI object is responsible for loading the settings,
handling the input arguments and executing the calls to the core library.
"""
# CLI
vargs = None
command = None
command_env = None
# Zappa settings
zappa = None
zappa_settings = None
load_credentials = True
# Specific settings
api_stage = None
app_function = None
aws_region = None
debug = None
prebuild_script = None
project_name = None
profile_name = None
lambda_arn = None
lambda_name = None
lambda_description = None
s3_bucket_name = None
settings_file = None
zip_path = None
handler_path = None
vpc_config = None
memory_siz | e = None
use_apigateway = None
lambda_handler = None
django_settings = None
manage_roles = True
exception_handler = None
environment_variables = None
authorizer = None
stage_name_env_pattern = re.compile('^[a-zA-Z0-9_]+$')
def __init__(self):
self._stage_config_overrides = {} # change using self.override_stage_config_setting(key, val)
@property
def stage_config(self):
"""
A shortcut | property for settings of a stage.
"""
def get_stage_setting(stage, extended_stages=None):
if extended_stages is None:
extended_stages = []
if stage in extended_stages:
raise RuntimeError(stage + " has already been extended to these settings. "
"There is a circular extends within the settings file.")
extended_stages.append(stage)
try:
stage_settings = dict(self.zappa_settings[stage].copy())
except KeyError:
raise ClickException("Cannot extend settings for undefined environment '" + stage + "'.")
extends_stage = self.zappa_settings[stage].get('extends', None)
if not extends_stage:
return stage_settings
extended_settings = get_stage_setting(stage=extends_stage, extended_stages=extended_stages)
extended_settings.update(stage_settings)
return extended_settings
settings = get_stage_setting(stage=self.api_stage)
# Backwards compatible for delete_zip setting that was more explicitly named delete_local_zip
if u'delete_zip' in settings:
settings[u'delete_local_zip'] = settings.get(u'delete_zip')
settings.update(self.stage_config_overrides)
return settings
@property
def stage_config_overrides(self):
"""
Returns zappa_settings we forcefully override for the current stage
set by `self.override_stage_config_setting(key, value)`
"""
return getattr(self, '_stage_config_overrides', {}).get(self.api_stage, {})
def override_stage_config_setting(self, key, val):
"""
Forcefully override a setting set by zappa_settings (for the current stage only)
:param key: settings key
:param val: value
"""
self._stage_config_overrides = getattr(self, '_stage_config_overrides', {})
self._stage_config_overrides.setdefault(self.api_stage, {})[key] = val
def handle(self, argv=None):
"""
Main function.
Parses command, load settings and dispatches accordingly.
"""
desc = ('Zappa - Deploy Python applications to AWS Lambda'
' and API Gateway.\n')
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
'-v', '--version', action='version',
version=pkg_resources.get_distribution("zappa").version,
help='Print the zappa version'
)
parser.add_argument(
'-a', '--app_function', help='The WSGI application function.'
)
parser.add_argument(
'-s', '--settings_file', help='The path to a Zappa settings file.'
)
env_parser = argparse.ArgumentParser(add_help=False)
group = env_parser.add_mutually_exclusive_group()
all_help = ('Execute this command for all of our defined '
'Zappa environments.')
group.add_argument('--all', action='store_true', help=all_help)
group.add_argument('command_env', nargs='?')
##
# Certify
##
subparsers = parser.add_subparsers(title='subcommands', dest='command')
cert_parser = subparsers.add_parser(
'certify', parents=[env_parser],
help='Create and install SSL certificate'
)
cert_parser.add_argument(
'--no-cleanup', action='store_true',
help=("Don't remove certificate files from /tmp during certify."
" Dangerous.")
)
##
# Deploy
##
subparsers.add_parser(
'deploy', parents=[env_parser], help='Deploy application.'
)
subparsers.add_parser('init', help='Initialize Zappa app.')
##
# Package
##
package_parser = subparsers.add_parser(
'package', parents=[env_parser], help='Build the application zip package locally.'
)
##
# Invocation
##
invoke_parser = subparsers.add_parser(
'invoke', parents=[env_parser],
help='Invoke remote function.'
)
invoke_parser.add_argument(
'--raw', action='store_true',
help=('When invoking remotely, invoke this python as a string,'
' not as a modular path.')
)
invoke_parser.add_argument('command_rest')
##
# Manage
##
manage_parser = subparsers.add_parser(
'manage',
help='Invoke remote Django manage.py commands.'
)
rest_help = ("Command in the form of <env> <command>. <env> is not "
"required if --all is specified")
manage_parser.add_argument('--all', action='store_true', help=all_help)
manage_parser.add_argument('command_rest', nargs='+', help=rest_help)
##
# Rollback
##
def positive_int(s):
""" Ensure an arg is positive """
i = int(s)
if i < 0:
msg = "This argument must be positive (got {})".format(s)
raise argparse.ArgumentTypeError(msg)
return i
rollback_parser = subparsers.add_parser(
'rollback', parents=[env_parser],
help='Rollback deployed code to a previous version.'
)
rollback_parser.add_argument(
'-n', '--num-rollback', type=positive_int, default=0,
help='The number of versions to rollback.'
)
##
# Scheduling
##
subparsers.add_parser(
'schedule', parents=[env_parser],
help='Schedule f |
GitOnUp/environs | vimenv.py | Python | mit | 2,178 | 0.003673 | """
vimenv.py
Vim-specific environment helpers. This module uses git and pathogen to manage
vim plugins.
"""
from collections import namedtuple
from os import path, makedirs, walk, chdir, getcwd
from urllib import urlretrieve
from subprocess import check_call
VimPlugin = namedtuple('VimPlugin', ['find_file', 'friendly', 'clone_url'])
plugins = [
VimPlugin('NERD_tree.vim', 'NERDTree', 'https://github.com/scrooloose/nerdtree.git'),
VimPlugin('unite.vim', 'Unite', 'https://github.com/Shougo/unite.vim.git'),
VimPlugin('airline.vim', 'Airline', 'https://github.com/bling/vim-airline'),
VimPlugin('fugitive.vim', 'Fugitive', 'git://github.com/tpope/vim-fugitive.git'),
VimPlugin('vimproc.vim', 'vimproc', 'https://github.com/Shougo/vimproc.vim.git'),
VimPlugin('molokai.vim', 'Molokai', 'https://github.com/tomasr/molokai.git'),
]
_dotvim = path.expanduser('~/.vim')
_autoload = path.join(_dotvim, 'autoload')
_bundle = path.join(_dotvim, 'bundle')
def ensure_pathogen():
if path.isfile(path.join(_dotvim, 'autoload/pathogen.vim')):
return
print 'Pathogen not installed, getting it.'
if not path.exists(_autoload):
print 'making autoload dir'
makedirs(_autoload)
if not path.exists(_bundle):
print 'making bundle dir'
makedirs(_bundle)
print 'downloading pathogen'
urlretrieve('https://tpo.pe | /pathogen.vim',
path.join(_autoload, 'pathogen.vim'))
d | ef install_plugins():
ensure_pathogen()
def find_vim_file(dv):
for root, dirs, files in walk(_dotvim):
for file in files:
if file == vp.find_file:
return True
return False
origwd = getcwd()
chdir(_bundle)
ex = None
for vp in plugins:
if find_vim_file(vp.find_file):
print 'found ' + vp.friendly
continue
print 'cloning ' + vp.friendly
clonecmd = ['git', 'clone', vp.clone_url]
try:
check_call(clonecmd)
except Exception as e:
ex = e
break
chdir(origwd)
if ex is not None:
raise ex
|
virtacoin/VirtaCoinProject | contrib/pyminer/pyminer.py | Python | mit | 6,441 | 0.034777 | #!/usr/bin/python
#
# Copyright (c) 2011 The VirtaCoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class VirtaCoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = VirtaCoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer | .py CONFIG-FILE"
sys.exit(1)
f = | open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 22815
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
|
evgeniy-shorgin/python_training | fixture/db.py | Python | apache-2.0 | 2,681 | 0.006341 | import pymysql.cursors
from model.group import Group
from model.contact import Contact
class DbFixture:
def __init__(self, host, name, user, password):
self.host = host
self.name = name
self.user = user
self.password = password
self.connection = pymysql.connect(host=host, database=name, user=user, password=password)
self.connection.autocommit(True)
def get_group_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name, group_header, group_footer from group_list")
for row in cursor:
(ident, name, header, footer) = row
list.append(Group(ident=str(ident), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contact_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("SELECT id, firstname, middlename, lastname, nickname, title, company, address, "
"home, mobile, work, fax, email, email2, email3, homepage, byear, ayear, address2, "
"phone2, notes FROM addressbook WHERE deprecated='0000-00-00 00:00:00'")
for row in cursor:
(ident, firstname, middlename, lastname, nickname, title, company, company_address,
homephone, mobilephone, workphone, telephone_fax, email, email2, email3,
homepage, birthday_year, anniversary, secondary_address, secondaryphone,
secondary_notes) | = row
list.append(Contact(ident=str(ident), firstname=firstname, middlename=middlename, lastname=lastname,
nickname=nickname, title=title, company=company, company_address=company_address,
homephone=homephone, mobilephone=mobilephone, workphone=workphone,
telephone_fax=telephone_fax, email=email, email2=ema | il2, email3=email3,
homepage=homepage, birthday_year=birthday_year, anniversary=anniversary,
secondary_address=secondary_address, secondaryphone=secondaryphone,
secondary_notes=secondary_notes,
all_emails_from_homepage=email + email2 + email3,
all_phones_from_homepage=homephone + mobilephone + workphone + secondaryphone))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()
|
opesci/devito | devito/ir/support/vector.py | Python | mit | 14,039 | 0.001353 | from collections import OrderedDict
from sympy import true
from devito.tools import as_tuple, is_integer, memoized_meth
from devito.types import Dimension
__all__ = ['Vector', 'LabeledVector', 'vmin', 'vmax']
class Vector(tuple):
"""
An object in an N-dimensional space.
The elements of a vector can be anything as long as they support the
comparison operators (`__eq__`, `__lt__`, ...). Also, the `__sub__`
operator must be available.
Notes
-----
1) Comparison of a Vector with a scalar
If a comparison between a vector and a non-vector is attempted, then the
non-vector is promoted to a vector; if this is not possible, an exception
is raised. This is handy because it turns a vector-scalar comparison into
a vector-vector comparison with the scalar broadcasted to as many vector
entries as necessary. For example:
(3, 4, 5) > 4 => (3, 4, 5) > (4, 4, 4) => False
2) Comparison of Vectors whose elements are SymPy expressions
We treat vectors of SymPy expressions as a very special case. When we
compare two elements, it might not be possible to determine the truth value
of the relation. For example, the truth value of `3*i < 4*j` cannot be
determined (unless some information about `i` and `j` is available). In
some cases, however, the comparison is feasible; for example, `i + 4 < i`
is definitely False. A sufficient condition for two Vectors to be
comparable is that their pair-wise indices are affine functions of the same
variables, with identical coefficient. If the Vector is instantiated
passing the keyword argument ``smart = True``, some manipulation will be
attempted to infer the truth value of a non-trivial symbolic relation. This
increases the cost of the comparison (and not always an answer may be
derived), so use it judiciously. By default, ``smart = False``.
Raises
------
TypeError
If two Vectors cannot be compared, e.g. due to incomparable symbolic entries.
"""
def __new__(cls, *items, smart=False):
obj = super(Vector, cls).__new__(cls, items)
obj.smart = smart
return obj
def _asvector(relax=False):
def __asvector(func):
def wrapper(self, other):
if not isinstance(other, Vector):
try:
other = Vector(*other)
except TypeError:
# Not iterable
other = Vector(*(as_tuple(other)*len(self)))
if relax is False and len(self) != len(other):
raise TypeError("Cannot operate with Vectors of different rank")
return func(self, other)
return wrapper
return __asvector
def __hash__(self):
return super(Vector, self).__hash__()
@_asvector()
def __add__(self, other):
return Vector(*[i + j for i, j in zip(self, other)], smart=self.smart)
@_asvector()
def __radd__(self, other):
return self + other
@_asvector()
def __sub__(self, other):
return Vector(*[i - j for i, j in zip(self, other)], smart=self.smart)
@_asvector()
def __rsub__(self, other):
return self - other
@_asvector(relax=True)
def __eq__(self, other):
return super(Vector, self).__eq__(other)
@_asvector(relax=True)
def __ne__(self, other):
return super(Vector, self).__ne__(other)
def __lt__(self, other):
# This might raise an exception if the distance between the i-th entry
# of `self` and `other` isn't integer, but rather a generic expression
# not comparable to 0. However, the implementation is "smart", in the
# sense that it will return as soon as the first two comparable entries
# (i.e., such that their distance is a non-zero integer) are found
for i in self.distance(other):
try:
val = int(i)
if val < 0:
return True
elif val > 0:
return False
except TypeError:
if self.smart:
if (i < 0) == true:
return True
elif (i <= 0) == true:
# If `i` can assume the value 0 in at least one case, then
# definitely `i < 0` is generally False, so __lt__ must
# return False
return False
elif (i >= 0) == true:
return False
raise TypeError("Non-comparable index functions")
return False
def __gt__(self, other):
# This method is "symmetric" to `__lt__`, but instead of just returning
# `other.__lt__(self)` we implement it explicitly because this way we
# can avoid computing the distance in the special case `other is 0`
# This might raise an exception if the distance between the i-th entry
# of `self` and `other` isn't integer, but rather a generic expression
# not comparable to 0. However, the implementation is "smart", in the
# sense that it will return as soon as the first two comparable entries
# (i.e., such that their distance is a non-zero integer) are found
for i in self.distance(other):
try:
val = int(i)
if val > 0:
return True
elif val < 0:
return False
except TypeError:
if self.smart:
if (i > 0) == true:
return True
elif (i >= 0) == true:
# If `i` can assume the value 0 in at least one case, then
| # definitely `i > 0` is generally False, so __gt__ must
# return False
return False
elif (i <= 0) == true:
return False
raise TypeError("Non-comparable index functions")
return False
def __le__(self, other):
if self.__eq__(other):
return True
# We cannot simply resort to `__lt__` as it might happen that:
# * v0 < v1 --> False
# * v0 | == v1 --> False
# But
# * v0 <= v1 --> True
#
# For example, take `v0 = (a + 2)` and `v1 = (2)`; if `a` is attached
# the property that definitely `a >= 0`, then surely `v1 <= v0`, even
# though it can't be assumed anything about `v1 < 0` and `v1 == v0`
for i in self.distance(other):
try:
val = int(i)
if val < 0:
return True
elif val > 0:
return False
except TypeError:
if self.smart:
if (i < 0) == true:
return True
elif (i <= 0) == true:
continue
elif (i > 0) == true:
return False
elif (i >= 0) == true:
# See analogous considerations in __lt__
return False
raise TypeError("Non-comparable index functions")
# Note: unlike `__lt__`, if we end up here, then *it is* <=. For example,
# with `v0` and `v1` as above, we would get here
return True
@_asvector()
def __ge__(self, other):
return other.__le__(self)
def __getitem__(self, key):
ret = super(Vector, self).__getitem__(key)
return Vector(*ret, smart=self.smart) if isinstance(key, slice) else ret
def __repr__(self):
return "(%s)" % ','.join(str(i) for i in self)
@property
def rank(self):
return len(self)
@property
def sum(self):
return sum(self)
@property
def is_constant(self):
return all(is_integer(i) for i in self)
def distance(self, other):
"""
Compute the distance from ``self`` to ``other``.
The distanc |
gregbdunn/aws-ec2rescue-linux | lib/boto3/dynamodb/types.py | Python | apache-2.0 | 9,677 | 0 | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from decimal import Decimal, Context, Clamped
from decimal import Overflow, Inexact, Underflow, Rounded
from boto3.compat import collections_abc
from botocore.compat import six
STRING = 'S'
NUMBER = 'N'
BINARY = 'B'
STRING_SET = 'SS'
NUMBER_SET = 'NS'
BINARY_SET = 'BS'
NULL = 'NULL'
BOOLEAN = 'BOOL'
MAP = 'M'
LIST = 'L'
DYNAMODB_CONTEXT = Context(
Emin=-128, Emax=126, prec=38,
traps=[Clamped, Overflow, Inexact, Rounded, Underflow])
BINARY_TYPES = (bytearray, six.binary_type)
class Binary(object):
"""A class for representing Binary in dynamodb
Especially for Python 2, use this class to explicitly specify
binary data for item in DynamoDB. It is essentially a wrapper around
binary. Unicode and Python 3 string types are not allowed.
"""
def __init__(self, value):
if not isinstance(value, BINARY_TYPES):
raise TypeError('Value must be of the following types: %s.' %
', '.join([str(t) for t in BINARY_TYPES]))
self.value = value
def __eq__(self, other):
if isinstance(other, Binary):
return self.value == other.value
return self.value == other
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return 'Binary(%r)' % self.value
def __str__(self):
return self.value
def __hash__(self):
return hash(self.value)
class TypeSerializer(object):
"""This class serializes Python data types to DynamoDB types."""
def serialize(self, value):
"""The method to serialize the Python data types.
:param value: A python value to be serialized to DynamoDB. Here are
the various conversions:
Python DynamoDB
------ --------
None {'NULL': True}
True/False {'BOOL': True/False}
int/Decimal {'N': str(value)}
| string { | 'S': string}
Binary/bytearray/bytes (py3 only) {'B': bytes}
set([int/Decimal]) {'NS': [str(value)]}
set([string]) {'SS': [string])
set([Binary/bytearray/bytes]) {'BS': [bytes]}
list {'L': list}
dict {'M': dict}
For types that involve numbers, it is recommended that ``Decimal``
objects are used to be able to round-trip the Python type.
For types that involve binary, it is recommended that ``Binary``
objects are used to be able to round-trip the Python type.
:rtype: dict
:returns: A dictionary that represents a dynamoDB data type. These
dictionaries can be directly passed to botocore methods.
"""
dynamodb_type = self._get_dynamodb_type(value)
serializer = getattr(self, '_serialize_%s' % dynamodb_type.lower())
return {dynamodb_type: serializer(value)}
def _get_dynamodb_type(self, value):
dynamodb_type = None
if self._is_null(value):
dynamodb_type = NULL
elif self._is_boolean(value):
dynamodb_type = BOOLEAN
elif self._is_number(value):
dynamodb_type = NUMBER
elif self._is_string(value):
dynamodb_type = STRING
elif self._is_binary(value):
dynamodb_type = BINARY
elif self._is_type_set(value, self._is_number):
dynamodb_type = NUMBER_SET
elif self._is_type_set(value, self._is_string):
dynamodb_type = STRING_SET
elif self._is_type_set(value, self._is_binary):
dynamodb_type = BINARY_SET
elif self._is_map(value):
dynamodb_type = MAP
elif self._is_list(value):
dynamodb_type = LIST
else:
msg = 'Unsupported type "%s" for value "%s"' % (type(value), value)
raise TypeError(msg)
return dynamodb_type
def _is_null(self, value):
if value is None:
return True
return False
def _is_boolean(self, value):
if isinstance(value, bool):
return True
return False
def _is_number(self, value):
if isinstance(value, (six.integer_types, Decimal)):
return True
elif isinstance(value, float):
raise TypeError(
'Float types are not supported. Use Decimal types instead.')
return False
def _is_string(self, value):
if isinstance(value, six.string_types):
return True
return False
def _is_binary(self, value):
if isinstance(value, Binary):
return True
elif isinstance(value, bytearray):
return True
elif six.PY3 and isinstance(value, six.binary_type):
return True
return False
def _is_set(self, value):
if isinstance(value, collections_abc.Set):
return True
return False
def _is_type_set(self, value, type_validator):
if self._is_set(value):
if False not in map(type_validator, value):
return True
return False
def _is_map(self, value):
if isinstance(value, collections_abc.Mapping):
return True
return False
def _is_list(self, value):
if isinstance(value, list):
return True
return False
def _serialize_null(self, value):
return True
def _serialize_bool(self, value):
return value
def _serialize_n(self, value):
number = str(DYNAMODB_CONTEXT.create_decimal(value))
if number in ['Infinity', 'NaN']:
raise TypeError('Infinity and NaN not supported')
return number
def _serialize_s(self, value):
return value
def _serialize_b(self, value):
if isinstance(value, Binary):
value = value.value
return value
def _serialize_ss(self, value):
return [self._serialize_s(s) for s in value]
def _serialize_ns(self, value):
return [self._serialize_n(n) for n in value]
def _serialize_bs(self, value):
return [self._serialize_b(b) for b in value]
def _serialize_l(self, value):
return [self.serialize(v) for v in value]
def _serialize_m(self, value):
return dict([(k, self.serialize(v)) for k, v in value.items()])
class TypeDeserializer(object):
"""This class deserializes DynamoDB types to Python types."""
def deserialize(self, value):
"""The method to deserialize the DynamoDB data types.
:param value: A DynamoDB value to be deserialized to a pythonic value.
Here are the various conversions:
DynamoDB Python
-------- ------
{'NULL': True} None
{'BOOL': True/False} True/False
{'N': str(value)} Decimal(str(value))
{'S': string} string
{'B': bytes} Binary(bytes)
{'NS': [str(value)]} set([Decimal(str(value))])
{'SS': [string]} set([string])
{'BS': [bytes]} set([bytes])
|
DavidParkin/pomodoro-indicator | app/twcurrent.py | Python | gpl-3.0 | 986 | 0.001014 | from taskw import TaskWarriorShellout
class TwCurrent(object):
def __init__(self, file=None):
self.tw = TaskWarriorShellout()
self.tw.config_filename = file
def get_current(self):
tw = TaskWarriorShellout()
tw.config_filename = self.tw.config_filename
tasks = tw.filter_tasks({'tags.contains': 'current'})
current = tasks[0]
return current
def set_current(self, id):
tasks = self.tw.filter_tasks({'tags.contains': 'current'})
for task in tasks:
task['tags'].remove('current')
self.tw.task_update(task)
id, task = self.tw.get_task()
try:
task['tags'].extend('current')
except KeyError:
task['tags'] = ['current']
self.tw.task_update(task)
def get_pending(self):
tasks = self.tw.filter_tasks | ({'status': 'pending'})
return tasks
if __name__ | == '__main__':
tw = TwCurrent()
tw.get_current()
|
EmadMokhtar/halaqat | halaqat/settings/shaha.py | Python | mit | 1,432 | 0.000698 | import os
import dj_database_url
from .base_settings import *
ALLOWED_HOSTS = ['shaha-halaqat.herokuapp.com', '0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
STATIC_ROOT = os.path.join(PROJECT_ROOT, ' | static')
STATIC_URL = '/static/'
STATICFILES_DIRS = ()
# STATICFILES_STORAGE = 'whi | tenoise.django.GzipManifestStaticFilesStorage'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': False,
'OPTIONS': {
'loaders': [
(
'django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
),
],
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
'django.template.context_processors.i18n',
],
},
},
]
LANGUAGE_CODE = 'ar'
LOCALE_PATHS = (
os.path.join(PROJECT_ROOT, 'locale'),
)
|
quake0day/oj | Sparse Matrix Multiplication.py | Python | mit | 535 | 0.005607 | class Solution(object):
def | multiply(self, A, B):
"""
:type A: List[List[int]]
:type B: List[List[int]]
:rtype: List[List[int]]
"""
p = len(B[0])
C = [[0 for _ in xrange(p)] for _ in xrange(len(A))]
for i in xrange(len(A)):
for j in xrange(len(B)):
if A[i][j] != 0:
for k in xrange(p):
C[i][k] += A[i][j] * B[j][k]
return C
| |
kawasaki2013/getting-started-python | 7-gce/config.py | Python | apache-2.0 | 4,302 | 0 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This file contains all of the configuration values for the application.
Update this file with the values for your specific Google Cloud project.
You can create and manage projects at https://console.developers.google.com
"""
import os
# The secret key is used by Flask to encrypt session cookies.
SECRET_KEY = 'secret'
# There are three different ways to store the data in the application.
# You can choose 'datastore', 'cloudsql', or 'mongodb'. Be sure to
# configure the respective settings for the one you choose below.
# You do not have to configure the other data backends. If unsure, choose
# 'datastore' as it does not require any additional configuration.
DATA_BACKEND = 'datastore'
# Google Cloud Project ID. This can be found on the 'Overview' page at
# https://console.developers.google.com
PROJECT_ID = 'your-project-id'
# CloudSQL & SQLAlchemy configuration
# Replace the following values the respective values of your Cloud SQL
# instance.
CLOUDSQL_USER = 'root'
CLOUDSQL_PASSWORD = 'your-cloudsql-password'
CLOUDSQL_DATABASE = 'bookshelf'
# Set this value to the Cloud SQL connection name, e.g.
# "project:region:cloudsql-instance".
# You must also update the value in app.yaml.
CLOUDSQL_CONNECTION_NAME = 'your-cloudsql-connection-name'
# The CloudSQL proxy is used locally to connect to the cloudsql instance.
# To start the proxy, use:
#
# $ cloud_sql_proxy -instances= | your-connection-name=tcp:3306
#
# Alternatively, you could use a local MySQL instance for testing.
LOCAL_SQLALCHEMY_DATABASE_URI = (
'mysql+pymysql://{user}:{password}@localhost/{database}').format(
user=CLOUDSQL_USER, password=CLOUDSQL_PASSWORD,
database=CLOUDSQL_DATABASE)
# When running on App Engine a unix socket is used to connect to the cloudsql
# instance.
LIVE_SQLALCHEMY_DATABASE_ | URI = (
'mysql+pymysql://{user}:{password}@localhost/{database}'
'?unix_socket=/cloudsql/{connection_name}').format(
user=CLOUDSQL_USER, password=CLOUDSQL_PASSWORD,
database=CLOUDSQL_DATABASE, connection_name=CLOUDSQL_CONNECTION_NAME)
if os.environ.get('GAE_APPENGINE_HOSTNAME'):
SQLALCHEMY_DATABASE_URI = LIVE_SQLALCHEMY_DATABASE_URI
else:
SQLALCHEMY_DATABASE_URI = LOCAL_SQLALCHEMY_DATABASE_URI
# Mongo configuration
# If using mongolab, the connection URI is available from the mongolab control
# panel. If self-hosting on compute engine, replace the values below.
MONGO_URI = 'mongodb://user:password@host:27017/database'
# Google Cloud Storage and upload settings.
# Typically, you'll name your bucket the same as your project. To create a
# bucket:
#
# $ gsutil mb gs://<your-bucket-name>
#
# You also need to make sure that the default ACL is set to public-read,
# otherwise users will not be able to see their upload images:
#
# $ gsutil defacl set public-read gs://<your-bucket-name>
#
# You can adjust the max content length and allow extensions settings to allow
# larger or more varied file types if desired.
CLOUD_STORAGE_BUCKET = 'your-bucket-name'
MAX_CONTENT_LENGTH = 8 * 1024 * 1024
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])
# OAuth2 configuration.
# This can be generated from the Google Developers Console at
# https://console.developers.google.com/project/_/apiui/credential.
# Note that you will need to add all URLs that your application uses as
# authorized redirect URIs. For example, typically you would add the following:
#
# * http://localhost:8080/oauth2callback
# * https://<your-app-id>.appspot.com/oauth2callback.
#
# If you receive a invalid redirect URI error review you settings to ensure
# that the current URI is allowed.
GOOGLE_OAUTH2_CLIENT_ID = \
'your-client-id'
GOOGLE_OAUTH2_CLIENT_SECRET = 'your-client-secret'
|
mihaimaruseac/dphcar | tools/stats_scripts/parse_icde.py | Python | bsd-3-clause | 6,386 | 0.008613 | import os
import sys
import matplotlib.pyplot as plt
class Experiment:
def __init__(self, db, eps, eps_share, c0, rmax, nits, bf, seed):
self.db = db
self.eps = eps
self.eps_share = eps_share
self.c0 = c0
self.rmax = rmax
self.nits = nits
self.bf = bf
self.seed = seed
self.leaves = 0
self.time = 0
self.tpl = 0
self.rules50 = 0
self.rules = 0
self.prec = 0
self.realrules = 0
self.recall = 0
def recordLeaves(self, leaves):
self.leaves = leaves
def recordTime(self, time):
self.time = time
self.tpl = self.time / self.leaves
def recordRules50(self, rules50):
self.rules50 = rules50
def recordPrecision(self, prec):
self.prec = prec
def recordRules(self, rules):
self.rules = rules
def recordRealRules(self, realrules):
self.realrules = realrules
def recordRecall(self, recall):
self.recall = recall
def __str__(self):
s = "{0.db:10}\t{0.eps}\t{0.eps_share}\t{0.c0}".format(self)
s = "{1}\t{0.rmax}\t{0.nits}\t{0.bf}\t{0.seed}".format(self, s)
s = "{1}\t{0.leaves}\t{0.time}\t{0.tpl:6 | .3f}".format(self, s)
s = "{1}\t{0.rules}\t{0.rules50}\t{0.prec:5.2f}".format(self, s)
s = "{1}\t{0.realrules}\t{0.recall}".format(self, s)
return s
@staticm | ethod
def legend():
s = "db\t\teps\tepsh\tc0\trmax\tnits\tbf\tseed\tleaves\ttime"
s += "\ttpl\trules\trules50\tprec50\trealrls\trecall"
return s
experiments = []
for r, _, files in os.walk(sys.argv[1]):
for fname in files:
with open(os.path.join(r, fname), "r") as f:
exp = None
for line in f:
if line.startswith("./dph"):
_, ds, _, eps, es, c0, rmax, nits, bf, seed = line.split()
exp = Experiment(ds.split('/')[-1].split('.')[0],\
float(eps), float(es), float(c0), int(rmax),\
int(nits), int(bf), int(seed))
elif line.startswith("Total leaves"):
exp.recordLeaves(int((line.split()[-1])))
elif line.startswith("Total time"):
exp.recordTime(float((line.split()[-1])))
elif line.startswith("\t0.50"):
_, _, _, rs, prec = line.split()
exp.recordRules50(int(rs))
exp.recordPrecision(float(prec))
elif line.startswith("\t0.00"):
_, _, _, rs, _ = line.split()
exp.recordRules(int(rs))
elif line.startswith("Real"):
_, _, _, rr, _ = line.split()
exp.recordRealRules(int(rr))
elif line.startswith("Recall"):
_, _, _, rc, _ = line.split()
exp.recordRecall(float(rc))
experiments.append(exp)
break # disable recursion
def print_experiments():
print Experiment.legend()
for exp in experiments:
print exp
print_experiments()
sys.exit(-1)
def plot_exp(exps, xfun, yfun, selecfuns=[], outname=None, title=None,
xlabel=None, ylabel=None, xrng=None, yrng=None):
xmax = ymax = None
for exp in experiments:
if not all([sf(exp) for sf in selecfuns]):
continue
xmax = max(xmax, xfun(exp))
ymax = max(ymax, yfun(exp))
plt.plot(xfun(exp), yfun(exp), 'bo')
if xmax is None or ymax is None:
return
if xrng: plt.xlim(xrng)
else: plt.xlim([0, 1.1*xmax])
if yrng: plt.ylim(yrng)
else: plt.ylim([0, 1.1*ymax])
if title: plt.title(title)
if xlabel: plt.xlabel(xlabel)
if ylabel: plt.ylabel(ylabel)
if outname: plt.savefig(outname)
else: plt.show()
plt.clf()
getDB = lambda xp: xp.db
getRMax = lambda xp: xp.rmax
getNI = lambda xp: xp.nits
getBF = lambda xp: xp.bf
getR = lambda xp: xp.rules
getR50 = lambda xp: xp.rules50
getP50 = lambda xp: xp.prec
getTime = lambda xp: xp.time
getTimeLeaf = lambda xp: xp.tpl
# have the x part and the selector filled in
def plot_against(exps, xfun, selecfuns, xlabel, xrng, xtitle):
title = 'Total time vs {}'.format(xtitle)
outname = '{}.png'.format(title.replace('/','_').replace(' ','_'))
plot_exp(exps, xfun, getTime, selecfuns, xlabel=xlabel, xrng=xrng,
ylabel="time (s)", yrng=None, title=title, outname=outname)
title = 'Time per leaf vs {}'.format(xtitle)
outname = '{}.png'.format(title.replace('/','_').replace(' ','_'))
plot_exp(exps, xfun, getTimeLeaf, selecfuns, xlabel=xlabel, xrng=xrng,
ylabel="time (s)", yrng=None, title=title, outname=outname)
title = 'Total rules vs {}'.format(xtitle)
outname = '{}.png'.format(title.replace('/','_').replace(' ','_'))
plot_exp(exps, xfun, getR, selecfuns, xlabel=xlabel, xrng=xrng,
ylabel="# rules", yrng=None, title=title, outname=outname)
title = 'Good rules vs {}'.format(xtitle)
outname = '{}.png'.format(title.replace('/','_').replace(' ','_'))
plot_exp(exps, xfun, getR50, selecfuns, xlabel=xlabel, xrng=xrng,
ylabel="# rules", yrng=None, title=title, outname=outname)
title = 'Precision vs {}'.format(xtitle)
outname = '{}.png'.format(title.replace('/','_').replace(' ','_'))
plot_exp(exps, xfun, getP50, selecfuns, xlabel=xlabel, xrng=xrng,
ylabel="% rules", yrng=[-0.1,1.1], title=title, outname=outname)
dbs = set([getDB(xp) for xp in experiments])
rmaxes = set([getRMax(xp) for xp in experiments])
nis = set([getNI(xp) for xp in experiments])
bfs = set([getBF(xp) for xp in experiments])
for db in dbs:
dbsf = lambda xp: getDB(xp) == db
for rmax in rmaxes:
rmaxsf = lambda xp: getRMax(xp) == rmax
for ni in nis:
nisf = lambda xp: getNI(xp) == ni
plot_against(experiments, getBF, [dbsf, rmaxsf, nisf],
"branching factor", None,
"branch for {} rmax={} items={}".format(db, rmax, ni))
for bf in bfs:
bisf = lambda xp: getBF(xp) == bf
plot_against(experiments, getNI, [dbsf, rmaxsf, bisf],
"# items", None,
"items for {} rmax={} branch={}".format(db, rmax, bf))
|
IncidentNormal/TestApps | pyTest/meshgrid.py | Python | gpl-2.0 | 546 | 0.016484 | import numpy as np
def get_test_data(delta=0.05):
'''
Return a tuple X, Y, Z with a test data set.
'''
from matplotlib.mlab import bivariate_normal
x = y = np.arange(-3.0, 3.0, delta)
prin | t x
X, Y = np.meshgrid(x, y)
Z = np.sin(X)
print Z
## Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0)
##
## Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1, 1)
##
## Z = Z2 - Z1
## #print 'Z'
## #print Z[0]
## #print Z[0][ | 0]
X = X * 10
Y = Y * 10
Z = Z * 500
return X, Y, Z
get_test_data()
|
Acidburn0zzz/readthedocs.org | readthedocs/rtd_tests/tests/test_doc_building.py | Python | mit | 818 | 0.001222 | import shutil
from django.contrib.admin.models import User
from projects.models import Project
from rtd_tests.ut | ils import make_test_git
from rtd_tests.base import RTDTestCase
class TestBuilding(RTDTestCase):
"""These tests run the build functions directly. They don't use celery"""
fixture | s = ['eric.json']
def setUp(self):
repo = make_test_git()
self.repo = repo
super(TestBuilding, self).setUp()
self.eric = User.objects.get(username='eric')
self.project = Project.objects.create(
name="Test Project",
repo_type="git",
#Our top-level checkout
repo=repo,
)
self.project.users.add(self.eric)
def tearDown(self):
shutil.rmtree(self.repo)
super(TestBuilding, self).tearDown()
|
anuragbanerjee/HandyTools | csv-to-json.py | Python | mit | 1,300 | 0.016923 | #!/usr/bin/env python
'''
CSV to JSON Converter
Created by Anurag Banerjee.
Copyright 2016. All rights reserved.
USAGE `python csv-to-json.py <CSV FILE>`
Use I/O Redirection for creating resulting files.
`python csv-to-json.py sample.csv > sample.json`
'''
from codecs import open
import json
import sys
def showInfo():
print "CSV to JSON Converter"
print "USAGE: csv-to-json.py <CSVFILE>"
print ""
print "Use I/O redirection for creating resulting files. Example:"
print "python ./csv-to-json.py sample.csv > sample.json"
def main():
if len(sys.argv) != 2 or sys.argv[1].split(".")[-1] != "csv":
showInfo()
return
result_json = []
with open(sys.argv[1], 'r', encoding="utf-8") as csv:
lines = csv.readlines()
fields = [field.rstrip() for field in lines[0].split(",")]
entryId = 0;
for l in lines[1:]:
| try:
entry = result_json[entryId]
except IndexError:
result_json.append({})
entry = result_json[-1]
for id, val in enumerate(l.split(",")):
result_json[entryId][fields[id]] = val.rstrip()
else:
entryId+=1
print json.dumps(
result_json,
encoding="utf-8",
indent=4,
separator | s=(',', ': '),
ensure_ascii=False
).encode("utf-8")
if __name__ == '__main__':
main() |
mikf/gallery-dl | gallery_dl/extractor/kohlchan.py | Python | gpl-2.0 | 2,737 | 0 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
"""Extractors for https://kohlchan.net/"""
from .common import Extractor, Message
from .. import text
import itertools
class KohlchanThreadExtractor(Extractor):
""" | Extractor for Kohlchan threads"""
category = "kohlch | an"
subcategory = "thread"
directory_fmt = ("{category}", "{boardUri}",
"{threadId} {subject|message[:50]}")
filename_fmt = "{postId}{num:?-//} {filename}.{extension}"
archive_fmt = "{boardUri}_{postId}_{num}"
pattern = r"(?:https?://)?kohlchan\.net/([^/?#]+)/res/(\d+)"
test = ("https://kohlchan.net/a/res/4594.html", {
"pattern": r"https://kohlchan\.net/\.media/[0-9a-f]{64}(\.\w+)?$",
"count": ">= 80",
})
def __init__(self, match):
Extractor.__init__(self, match)
self.board, self.thread = match.groups()
def items(self):
url = "https://kohlchan.net/{}/res/{}.json".format(
self.board, self.thread)
thread = self.request(url).json()
thread["postId"] = thread["threadId"]
posts = thread.pop("posts")
yield Message.Directory, thread
for post in itertools.chain((thread,), posts):
files = post.pop("files", ())
if files:
thread.update(post)
for num, file in enumerate(files):
file.update(thread)
file["num"] = num
url = "https://kohlchan.net" + file["path"]
text.nameext_from_url(file["originalName"], file)
yield Message.Url, url, file
class KohlchanBoardExtractor(Extractor):
"""Extractor for Kohlchan boards"""
category = "kohlchan"
subcategory = "board"
pattern = (r"(?:https?://)?kohlchan\.net"
r"/([^/?#]+)/(?:(?:catalog|\d+)\.html)?$")
test = (
("https://kohlchan.net/a/", {
"pattern": KohlchanThreadExtractor.pattern,
"count": ">= 100",
}),
("https://kohlchan.net/a/2.html"),
("https://kohlchan.net/a/catalog.html"),
)
def __init__(self, match):
Extractor.__init__(self, match)
self.board = match.group(1)
def items(self):
url = "https://kohlchan.net/{}/catalog.json".format(self.board)
for thread in self.request(url).json():
url = "https://kohlchan.net/{}/res/{}.html".format(
self.board, thread["threadId"])
thread["_extractor"] = KohlchanThreadExtractor
yield Message.Queue, url, thread
|
evernym/zeno | plenum/test/txn_author_agreement/acceptance/helper.py | Python | apache-2.0 | 2,183 | 0.000458 | import json
from indy.ledger import (
append_txn_author_agreement_acceptance_to_request, sign_request
) |
from plenum.common.util import randomString
from plenum.test.pool_transactions.helper import (
prepare_nym_request, prepare_new_node_data, prepare_node_request
)
| # TODO makes sense to make more generic and move to upper level helper
def build_nym_request(looper, sdk_wallet):
return looper.loop.run_until_complete(
prepare_nym_request(
sdk_wallet,
named_seed=randomString(32),
alias=randomString(5),
role=None
)
)[0]
# TODO makes sense to make more generic and move to upper level helper
def build_node_request(looper, tconf, tdir, sdk_wallet):
new_node_name = 'Node' + randomString(3)
sigseed, verkey, bls_key, nodeIp, nodePort, clientIp, clientPort, key_proof = \
prepare_new_node_data(tconf, tdir, new_node_name)
_, steward_did = sdk_wallet
node_request = looper.loop.run_until_complete(
prepare_node_request(steward_did,
new_node_name=new_node_name,
clientIp=clientIp,
clientPort=clientPort,
nodeIp=nodeIp,
nodePort=nodePort,
bls_key=bls_key,
sigseed=sigseed,
services=[],
key_proof=key_proof))
return node_request
def add_taa_acceptance(
looper,
request_json,
taa_text,
taa_version,
taa_acceptance_mech,
taa_acceptance_time
):
return looper.loop.run_until_complete(
append_txn_author_agreement_acceptance_to_request(
request_json,
text=taa_text,
version=taa_version,
taa_digest=None,
mechanism=taa_acceptance_mech,
time=taa_acceptance_time
)
)
def sign_request_dict(looper, sdk_wallet, req_dict):
wallet_h, did = sdk_wallet
req_json = looper.loop.run_until_complete(
sign_request(wallet_h, did, json.dumps(req_dict)))
return json.loads(req_json)
|
t11e/django | django/db/models/sql/subqueries.py | Python | bsd-3-clause | 7,675 | 0.002606 | """
Query subclasses which provide extra functionality beyond simple data retrieval.
"""
from django.core.exceptions import FieldError
from django.db import connections
from django.db.models.sql.constants import *
from django.db.models.sql.datastructures import Date
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.sql.query import Query
from django.db.models.sql.where import AND, Constraint
__all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'DateQuery',
'AggregateQuery']
class DeleteQuery(Query):
"""
Delete queries are done through this class, since they are more constrained
than general queries.
"""
compiler = 'SQLDeleteCompiler'
def do_query(self, table, where, using):
self.tables = [table]
self.where = where
self.get_compiler(using).execute_sql(None)
def delete_batch(self, pk_list, using):
"""
Set up and execute delete queries for all the objects in pk_list.
More than one physical query may be executed if there are a
lot of values in pk_list.
"""
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
where = self.where_class()
field = self.model._meta.pk
where.add((Constraint(None, field.column, field), 'in',
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]), AND)
self.do_query(self.model._meta.db_table, where, using=using)
class UpdateQuery(Query):
"""
Represents an "update" SQL query.
"""
compiler = 'SQLUpdateCompiler'
def __init__(self, *args, **kwargs):
super(UpdateQuery, self).__init__(*args, **kwargs)
self._setup_query()
def _setup_query(self):
"""
Runs on initialization and after cloning. Any attributes that would
normally be set in __init__ should go in here, instead, so that they
are also set up after a clone() call.
"""
self.values = []
self.related_ids = None
if not hasattr(self, 'related_updates'):
self.related_updates = {}
def clone(self, klass=None, **kwargs):
return super(UpdateQuery, self).cl | one(klass,
related_updates=self.related_updates.copy(), **kwargs)
def clear_related(self, related_field, pk_list, using):
"""
Set up | and execute an update query that clears related entries for the
keys in pk_list.
This is used by the QuerySet.delete_objects() method.
"""
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
self.where = self.where_class()
f = self.model._meta.pk
self.where.add((Constraint(None, f.column, f), 'in',
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]),
AND)
self.values = [(related_field, None, None)]
self.get_compiler(using).execute_sql(None)
def add_update_values(self, values):
"""
Convert a dictionary of field name to value mappings into an update
query. This is the entry point for the public update() method on
querysets.
"""
values_seq = []
for name, val in values.iteritems():
field, model, direct, m2m = self.model._meta.get_field_by_name(name)
if not direct or m2m:
raise FieldError('Cannot update model field %r (only non-relations and foreign keys permitted).' % field)
if model:
self.add_related_update(model, field, val)
continue
values_seq.append((field, model, val))
return self.add_update_fields(values_seq)
def add_update_fields(self, values_seq):
"""
Turn a sequence of (field, model, value) triples into an update query.
Used by add_update_values() as well as the "fast" update path when
saving models.
"""
self.values.extend(values_seq)
def add_related_update(self, model, field, value):
"""
Adds (name, value) to an update query for an ancestor model.
Updates are coalesced so that we only run one update query per ancestor.
"""
try:
self.related_updates[model].append((field, None, value))
except KeyError:
self.related_updates[model] = [(field, None, value)]
def get_related_updates(self):
"""
Returns a list of query objects: one for each update required to an
ancestor model. Each query will have the same filtering conditions as
the current query but will only update a single table.
"""
if not self.related_updates:
return []
result = []
for model, values in self.related_updates.iteritems():
query = UpdateQuery(model)
query.values = values
if self.related_ids:
query.add_filter(('pk__in', self.related_ids))
result.append(query)
return result
class InsertQuery(Query):
compiler = 'SQLInsertCompiler'
def __init__(self, *args, **kwargs):
super(InsertQuery, self).__init__(*args, **kwargs)
self.columns = []
self.values = []
self.params = ()
def clone(self, klass=None, **kwargs):
extras = {
'columns': self.columns[:],
'values': self.values[:],
'params': self.params
}
extras.update(kwargs)
return super(InsertQuery, self).clone(klass, **extras)
def insert_values(self, insert_values, raw_values=False):
"""
Set up the insert query from the 'insert_values' dictionary. The
dictionary gives the model field names and their target values.
If 'raw_values' is True, the values in the 'insert_values' dictionary
are inserted directly into the query, rather than passed as SQL
parameters. This provides a way to insert NULL and DEFAULT keywords
into the query, for example.
"""
placeholders, values = [], []
for field, val in insert_values:
placeholders.append((field, val))
self.columns.append(field.column)
values.append(val)
if raw_values:
self.values.extend([(None, v) for v in values])
else:
self.params += tuple(values)
self.values.extend(placeholders)
class DateQuery(Query):
"""
A DateQuery is a normal query, except that it specifically selects a single
date field. This requires some special handling when converting the results
back to Python objects, so we put it in a separate class.
"""
compiler = 'SQLDateCompiler'
def add_date_select(self, field, lookup_type, order='ASC'):
"""
Converts the query into a date extraction query.
"""
result = self.setup_joins([field.name], self.get_meta(),
self.get_initial_alias(), False)
alias = result[3][-1]
select = Date((alias, field.column), lookup_type)
self.select = [select]
self.select_fields = [None]
self.select_related = False # See #7097.
self.set_extra_mask([])
self.distinct = True
self.order_by = order == 'ASC' and [1] or [-1]
class AggregateQuery(Query):
"""
An AggregateQuery takes another query as a parameter to the FROM
clause and only selects the elements in the provided list.
"""
compiler = 'SQLAggregateCompiler'
def add_subquery(self, query, using):
self.subquery, self.sub_params = query.get_compiler(using).as_sql(with_col_aliases=True)
|
mahak/cinder | cinder/tests/unit/backup/drivers/test_backup_nfs.py | Python | apache-2.0 | 40,941 | 0 | # Copyright (C) 2015 Tom Barron <tpb@dyncloud.net>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for Backup NFS driver."""
import bz2
import filecmp
import hashlib
import os
import shutil
import stat
import tempfile
import threading
from unittest import mock
import zlib
import ddt
from eventlet import tpool
from os_brick import exception as brick_exception
from os_brick.remotefs import remotefs as remotefs_brick
from oslo_config import cfg
import zstd
from cinder.backup.drivers import nfs
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder import objects
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import test
CONF = cfg.CONF
FAKE_BACKUP_MOUNT_POINT_BASE = '/fake/mount-point-base'
FAKE_HOST = 'fake_host'
FAKE_EXPORT_PATH = 'fake/export/path'
FAKE_BACKUP_SHARE = '%s:/%s' % (FAKE_HOST, FAKE_EXPORT_PATH)
FAKE_BACKUP_PATH = os.path.join(FAKE_BACKUP_MOUNT_POINT_BASE,
FAKE_EXPORT_PATH)
FAKE_BACKUP_ID = fake.BACKUP_ID
FAKE_BACKUP_ID_PART1 = fake.BACKUP_ID[:2]
FAKE_BACKUP_ID_PART2 = fake.BACKUP_ID[2:4]
FAKE_BACKUP_ID_REST = fake.BACKUP_ID[4:]
UPDATED_CONTAINER_NAME = os.path.join(FAKE_BACKUP_ID_PART1,
FAKE_BACKUP_ID_PART2,
FAKE_BACKUP_ID)
FAKE_EGID = 1234
@ddt.ddt
class BackupNFSShareTestCase(test.TestCase):
def setUp(self):
super(BackupNFSShareTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.mock_object(nfs, 'LOG')
# Note(yikun): It mocks out the backup notifier to avoid to leak
# notifications into other test.
notify_patcher = mock.patch(
'cinder.volume.volume_utils.notify_about_backup_usage')
notify_patcher.start()
self.addCleanup(notify_patcher.stop)
def test_check_configuration_no_backup_share(self):
self.override_config('backup_share', None)
self.mock_object(nfs.NFSBackupDriver, '_init_backup_repo_path',
return_value=FAKE_BACKUP_PATH)
driver = nfs.NFSBackupDriver(self.ctxt)
self.assertRaises(exception.InvalidConfigurationValue,
driver.check_for_setup_error)
@mock.patch('os.getegid', return_value=FAKE_EGID)
@mock.patch('cinder.utils.get_file_gid')
@mock.patch('cinder.utils.get_file_mode')
@ddt.data((FA | KE_EGID, 0),
(FAKE_EGID, stat.S_IWGRP),
(6666, 0),
(6666, stat.S_IWGRP))
@ddt.unpack
def test_init_backup_repo_path(self,
file_gid,
file_mode,
mock_get_file_mode,
mock_get_file_gid,
| mock_getegid):
self.override_config('backup_share', FAKE_BACKUP_SHARE)
self.override_config('backup_mount_point_base',
FAKE_BACKUP_MOUNT_POINT_BASE)
mock_remotefsclient = mock.Mock()
mock_remotefsclient.get_mount_point = mock.Mock(
return_value=FAKE_BACKUP_PATH)
self.mock_object(nfs.NFSBackupDriver, 'check_for_setup_error')
self.mock_object(remotefs_brick, 'RemoteFsClient',
return_value=mock_remotefsclient)
with mock.patch.object(nfs.NFSBackupDriver, '_init_backup_repo_path'):
driver = nfs.NFSBackupDriver(self.ctxt)
mock_get_file_gid.return_value = file_gid
mock_get_file_mode.return_value = file_mode
mock_execute = self.mock_object(driver, '_execute')
path = driver._init_backup_repo_path()
self.assertEqual(FAKE_BACKUP_PATH, path)
mock_remotefsclient.mount.assert_called_once_with(FAKE_BACKUP_SHARE)
mock_remotefsclient.get_mount_point.assert_called_once_with(
FAKE_BACKUP_SHARE)
mock_execute_calls = []
if file_gid != FAKE_EGID:
mock_execute_calls.append(
mock.call('chgrp',
'-R',
FAKE_EGID,
path,
root_helper=driver._root_helper,
run_as_root=True))
if not (file_mode & stat.S_IWGRP):
mock_execute_calls.append(
mock.call('chmod',
'-R',
'g+w',
path,
root_helper=driver._root_helper,
run_as_root=True))
mock_execute.assert_has_calls(mock_execute_calls, any_order=True)
self.assertEqual(len(mock_execute_calls), mock_execute.call_count)
def test_init_backup_repo_path_unconfigured(self):
"""RemoteFsClient is not created if backup_share unset"""
self.override_config('backup_share', None)
mock_remotefsclient = mock.Mock()
self.mock_object(remotefs_brick, 'RemoteFsClient')
driver = nfs.NFSBackupDriver(self.ctxt)
driver._init_backup_repo_path()
self.assertEqual(0, mock_remotefsclient.call_count)
@mock.patch('time.sleep')
def test_init_backup_repo_path_mount_retry(self, mock_sleep):
self.override_config('backup_share', FAKE_BACKUP_SHARE)
self.override_config('backup_mount_attempts', 2)
mock_remotefsclient = mock.Mock()
self.mock_object(remotefs_brick, 'RemoteFsClient',
return_value=mock_remotefsclient)
mock_remotefsclient.mount.side_effect = [
brick_exception.BrickException] * 2
with mock.patch.object(nfs.NFSBackupDriver, '_init_backup_repo_path'):
driver = nfs.NFSBackupDriver(self.ctxt)
self.assertRaises(brick_exception.BrickException,
driver._init_backup_repo_path)
self.assertEqual([mock.call(FAKE_BACKUP_SHARE),
mock.call(FAKE_BACKUP_SHARE)],
mock_remotefsclient.mount.call_args_list)
def fake_md5(arg, usedforsecurity=False):
class result(object):
def hexdigest(self):
return 'fake-md5-sum'
ret = result()
return ret
class BackupNFSTestCase(test.TestCase):
"""Test Cases for NFS backup driver."""
_DEFAULT_VOLUME_ID = fake.VOLUME_ID
def _create_volume_db_entry(self, volume_id=_DEFAULT_VOLUME_ID):
vol = {'id': volume_id,
'size': 1,
'status': 'available',
'volume_type_id': self.vt['id']}
return db.volume_create(self.ctxt, vol)['id']
def _create_backup_db_entry(self,
volume_id=_DEFAULT_VOLUME_ID,
container='test-container',
backup_id=fake.BACKUP_ID,
parent_id=None,
status=None):
try:
db.volume_get(self.ctxt, volume_id)
except exception.NotFound:
self._create_volume_db_entry(volume_id=volume_id)
backup = {'id': backup_id,
'size': 1,
'container': container,
'volume_id': volume_id,
'parent_id': parent_id,
'user_id': fake.USER_ID,
'project_id': fake.PROJECT_ID,
'status': status,
}
return db.backup_create(self.ctxt, backup)['id']
def _write_effective_compression_file(self, data_size):
"""Ensure file content |
jolyonb/edx-platform | lms/djangoapps/verify_student/tests/test_signals.py | Python | agpl-3.0 | 4,171 | 0.003836 | """
Unit tests for the VerificationDeadline signals
"""
from datetime import timedelta
from django.utils.timezone import now
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification, VerificationDeadline
from lms.djangoapps.verify_student.signals import _listen_for_course_publish, _listen_for_lms_retire
from lms.djangoapps.verify_student.tests.factories import SoftwareSecurePhotoVerificationFactory
from openedx.core.djangoapps.user_api.accounts.tests.retirement_helpers import fake_completed_retirement
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class VerificationDeadlineSignalTest(ModuleStoreTestCase):
"""
Tests for the VerificationDeadline signal
"""
def setUp(self):
super(VerificationDeadlineSignalTest, self).setUp()
self.end = now().replace(microsecond=0) + timedelta(days=7)
self.course = CourseFactory.create(end=self.end)
VerificationDeadline.objects.all().delete()
def test_no_deadline(self):
""" Verify the signal sets deadline to course end when no deadline exists."""
_listen_for_course_publish('store', self.course.id)
self.assertEqual(VerificationDeadline.deadline_for_course(self.course.id), self.course.end)
def test_deadline(self):
""" Verify deadline is set to course end date by signal when changed. """
deadline = now() - timedelta(days=7)
VerificationDeadline.set_deadline(self.course.id, deadline)
_listen_for_course_publish('store', self.course.id)
self.assertEqual(VerificationDeadline.deadline_for_course(self.course.id), self.course.end)
def test_deadline_explicit(self):
""" Verify deadline is unchanged by signal when explicitly set. """
deadline = now() - timedelta(days=7)
VerificationDeadline.set_deadline(self.course.id, deadline, is_explicit=True)
_listen_for_course_publish('store', self.course.id)
actual_deadline = VerificationDeadline.deadline_for_course(self.course.id)
self.assertNotEqual(actual_deadline, self.course.end)
self.assertEqual(actual_deadline, deadline)
class RetirementSignalTest(ModuleStoreTestCase):
"""
Tests for the VerificationDeadline signal
"""
def _create_entry(self):
"""
Helper method to create and return a SoftwareSecurePhotoVerification with appropriate data
"""
name = 'Test Name'
face_url = 'https://test.invalid'
id_url = 'https://test2.invalid'
key = 'test+key'
user = UserFactory()
return SoftwareSecurePhotoVerificationFactory(
user=user,
name=name,
face_image_url=face_url,
photo_id_image_url=id_url,
photo_id_key=key
)
def test_retire_success(self):
verification = self._create_entry()
_listen_for_lms_retire(sender=self.__class__, user=verification.user)
ver_obj = SoftwareSecurePhotoVerification.objects.get(user=verification.user)
# All values for this user should now be empty string
for field in ('name', 'face_image_url', 'photo_id_image_url', 'photo_id_key'):
self.assertEqual('', getattr(ver_obj, field))
def test_retire_success_no_en | tries(self):
user = UserFactory()
_listen_for_lms_retire(sender=self.__class__, user=user)
def test_idempotent(self):
verification = self._create_entry()
# | Run this twice to make sure there are no errors raised 2nd time through
_listen_for_lms_retire(sender=self.__class__, user=verification.user)
fake_completed_retirement(verification.user)
_listen_for_lms_retire(sender=self.__class__, user=verification.user)
ver_obj = SoftwareSecurePhotoVerification.objects.get(user=verification.user)
# All values for this user should now be empty string
for field in ('name', 'face_image_url', 'photo_id_image_url', 'photo_id_key'):
self.assertEqual('', getattr(ver_obj, field))
|
mverzett/rootpy | rootpy/plotting/contrib/plot_corrcoef_matrix.py | Python | gpl-3.0 | 12,192 | 0.000082 | # Copyright 2012 the rootpy developers
# distributed under the terms of the GNU General Public License
from __future__ import absolute_import
from ...extern.six.moves import range
from ...extern.six import string_types
__all__ = [
'plot_corrcoef_matrix',
'corrcoef',
'cov',
]
def plot_corrcoef_matrix(matrix, names=None,
cmap=None, cmap_text=None,
fontsize=12, grid=False,
axes=None):
"""
This function will draw a lower-triangular correlation matrix
Parameters
----------
matrix : 2-dimensional numpy array/matrix
A correlation coefficient matrix
names : list of strings, optional (default=None)
List of the parameter names corresponding to the rows in ``matrix``.
cmap : matplotlib color map, optional (default=None)
Color map used to color the matrix cells.
cmap_text : matplotlib color map, optional (default=None)
Color map used to color the cell value text. If None, then
all values will be black.
fontsize : int, optional (default=12)
Font size of parameter name and correlation value text.
grid : bool, optional (default=False)
If True, then draw dashed grid lines around the matrix elements.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
Notes
-----
NumPy and matplotlib are required
Examples
--------
>>> matrix = corrcoef(data.T, weights=weights)
>>> plot_corrcoef_matrix(matrix, names)
"""
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import cm
if axes is None:
axes = plt.gca()
matrix = np.asarray(matrix)
if matrix.ndim != 2:
raise ValueError("matrix is not a 2-dimensional array or matrix")
if matrix.shape[0] != matrix.shape[1]:
raise ValueError("matrix is not square")
if names is not None and len(names) != matrix.shape[0]:
raise ValueError("the number of names does not match the number of "
"rows/columns in the matrix")
# mask out | the upper triangular matrix
matrix[np.triu_indices(matrix.shape[0])] = np.nan
if isinstance(cmap_text, string_types):
cmap_text = cm.get_cmap(cmap_text, 201)
if cmap is None:
cmap = cm.get_cmap('jet', 201)
elif isinstance(cmap, string_types):
cmap = cm.get_cmap(cmap, 201)
# make NaN pixels white
cmap.set_bad('w')
axes.imshow(matrix, interpolation='nearest',
cmap=cmap, origin='upp | er',
vmin=-1, vmax=1)
axes.set_frame_on(False)
plt.setp(axes.get_yticklabels(), visible=False)
plt.setp(axes.get_yticklines(), visible=False)
plt.setp(axes.get_xticklabels(), visible=False)
plt.setp(axes.get_xticklines(), visible=False)
if grid:
# draw grid lines
for slot in range(1, matrix.shape[0] - 1):
# vertical
axes.plot((slot - 0.5, slot - 0.5),
(slot - 0.5, matrix.shape[0] - 0.5), 'k:', linewidth=1)
# horizontal
axes.plot((-0.5, slot + 0.5),
(slot + 0.5, slot + 0.5), 'k:', linewidth=1)
if names is not None:
for slot in range(1, matrix.shape[0]):
# diagonal
axes.plot((slot - 0.5, slot + 1.5),
(slot - 0.5, slot - 2.5), 'k:', linewidth=1)
# label cell values
for row, col in zip(*np.tril_indices(matrix.shape[0], k=-1)):
value = matrix[row][col]
if cmap_text is not None:
color = cmap_text((value + 1.) / 2.)
else:
color = 'black'
axes.text(
col, row,
"{0:d}%".format(int(value * 100)),
color=color,
ha='center', va='center',
fontsize=fontsize)
if names is not None:
# write parameter names
for i, name in enumerate(names):
axes.annotate(
name, (i, i),
rotation=45,
ha='left', va='bottom',
transform=axes.transData,
fontsize=fontsize)
def cov(m, y=None, rowvar=1, bias=0, ddof=None, weights=None, repeat_weights=0):
"""
Estimate a covariance matrix, given data.
Covariance indicates the level to which two variables vary together.
If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`,
then the covariance matrix element :math:`C_{ij}` is the covariance of
:math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance
of :math:`x_i`.
Parameters
----------
m : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
form as that of `m`.
rowvar : int, optional
If `rowvar` is non-zero (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations given (unbiased estimate). If `bias` is 1, then
normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : int, optional
.. versionadded:: 1.5
If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is
the number of observations; this overrides the value implied by
``bias``. The default value is ``None``.
weights : array-like, optional
A 1-D array of weights with a length equal to the number of
observations.
repeat_weights : int, optional
The default treatment of weights in the weighted covariance is to first
normalize them to unit sum and use the biased weighted covariance
equation. If `repeat_weights` is 1 then the weights must represent an
integer number of occurrences of each observation and both a biased and
unbiased weighted covariance is defined because the total sample size
can be determined.
Returns
-------
out : ndarray
The covariance matrix of the variables.
See Also
--------
corrcoef : Normalized covariance matrix
Examples
--------
Consider two variables, :math:`x_0` and :math:`x_1`, which
correlate perfectly, but in opposite directions:
>>> x = np.array([[0, 2], [1, 1], [2, 0]]).T
>>> x
array([[0, 1, 2],
[2, 1, 0]])
Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance
matrix shows this clearly:
>>> np.cov(x)
array([[ 1., -1.],
[-1., 1.]])
Note that element :math:`C_{0,1}`, which shows the correlation between
:math:`x_0` and :math:`x_1`, is negative.
Further, note how `x` and `y` are combined:
>>> x = [-2.1, -1, 4.3]
>>> y = [3, 1.1, 0.12]
>>> X = np.vstack((x,y))
>>> print np.cov(X)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x, y)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x)
11.71
"""
import numpy as np
# Check inputs
if ddof is not None and ddof != int(ddof):
raise ValueError(
"ddof must be integer")
X = np.array(m, ndmin=2, dtype=float)
if X.size == 0:
# handle empty arrays
return np.array(m)
if X.shape[0] == 1:
rowvar = 1
if rowvar:
axis = 0
tup = (slice(None), np.newaxis)
else:
axis = 1
tup = (np.newaxis, slice(None))
if y is not None:
y = np.array(y, copy=False, ndmin=2, dtype=float)
X = np.concatenate((X, y), axis)
i |
404d/Temporals-Web | temporals_web/gmod/views.py | Python | mit | 7,019 | 0.001425 | # -*- encoding: utf-8 -*-
import hashlib
import json
import os
import re
import magic
from perpetualfailure.db import session
from pyramid.authentication import (
Authenticated,
Everyone,
)
from pyramid.httpexceptions import (
HTTPException,
HTTPBadRequest,
HTTPFound,
HTTPNotFound,
HTTPForbidden,
)
from pyramid.view import view_config
from sqlalchemy.sql.expression import func
import temporals_web.gmod.models as m
@view_config(
route_name='servers.gmod.loading',
renderer="gmod/loading.mako",
)
def gmod_loading(request):
id_magic_number = 76561197960265728
community_id = int(request.GET["steamid"])
auth_server = (community_id - id_magic_number) & 1
auth_id = (community_id - id_magic_number - auth_server) / 2
steam_id = "STEAM_0:%i:%i" % (auth_server, auth_id)
player = request.steamweb.player_profile(request.GET["steamid"])
data = {}
for mode in session.query(m.LS_Gamemode).all():
data[mode.name] = mode
game_data = json.dumps(data, cls=m.ObjectEncoder)
return {"steamid": steam_id, "player": player, "game_data": game_data}
@view_config(route_name='servers.gmod.background')
def gmod_background(request):
map = None
if "map" in request.GET:
map = request.GET["map"]
gamemode = None
if "gamemode" in request.GET:
gamemode = request.GET["gamemode"]
query = session.query(m.LS_Background)
query = query.filter(m.LS_Background.gamemode.in_([gamemode, None]))
query = query.filter(m.LS_Background.map == map)
if query.count() < 1:
query = session.query(m.LS_Background)
query = query.filter(m.LS_Background.map == map)
if query.count() < 1:
query = session.query(m.LS_Background)
query = query.filter(m.LS_Background.map == map, m.LS_Background.gamemode == None)
if query.count() < 1:
query = session.query(m.LS_Background)
query = query.filter(m.LS_Background.gamemode == gamemode)
if query.count() < 1:
query = session.query(m.LS_Background)
bg = query.order_by(func.random()).first()
return HTTPFound(location=request.resolve(bg.url))
@view_config(
route_name='servers.gmod.acp.loading',
renderer="gmod/acp/loading.mako",
permission=Authenticated,
)
def acp_loading(request):
modes = session.query(m.LS_Gamemode)
return {"gamemodes": modes}
@view_config(
route_name='servers.gmod.acp.background.gallery',
renderer="gmod/acp/background/gallery.mako",
permission=Authenticated,
)
def acp_background_gallery(request):
backgrounds = session.query(m.LS_Background).all()
return {"backgrounds": backgrounds}
@view_config(
route_name='servers.gmod.acp.background.add',
renderer="gmod/acp/background/edit.mako",
permission=Authenticated,
)
def acp_background_add(request):
background = m.LS_Background()
if not request.permits("create", background):
return HTTPForbidden()
r = background_update(request, background, upload=True)
if isinstance(r, HTTPException):
return r
return {"background": background, "upload": True}
@view_config(
route_name='servers.gmod.acp.background.edit',
renderer="gmod/acp/background/edit.mako",
permission=Authenticated,
)
def | acp_background_edit(request):
background = session.query(m.LS_Background).filter(m.LS_Background.id==request.matchdict["id"]).first()
if not request.permits("edit", background):
return HTTPForbidden()
r = background_update(request, ba | ckground)
if isinstance(r, HTTPException):
return r
return {"background": background, "upload": False}
@view_config(
route_name='servers.gmod.acp.gamemode.add',
renderer="gmod/acp/gamemode.mako",
permission=Authenticated,
)
def acp_gamemode_add(request):
mode = m.LS_Gamemode()
if not request.permits("create", mode):
return HTTPForbidden()
r = gamemode_update(request, mode)
if isinstance(r, HTTPException):
return r
return {"gamemode": mode}
@view_config(
route_name='servers.gmod.acp.gamemode.edit',
renderer="gmod/acp/gamemode.mako",
permission=Authenticated,
)
def acp_gamemode_edit(request):
mode = session.query(m.LS_Gamemode).filter(m.LS_Gamemode.id==request.matchdict["id"]).first()
if not request.permits("edit", mode):
return HTTPForbidden()
r = gamemode_update(request, mode)
if isinstance(r, HTTPException):
return r
return {"gamemode": mode}
def gamemode_update(request, gamemode):
if request.method != "POST":
return None
for key in ['title', 'name', 'rules', 'extrainfo']:
if key not in request.POST:
return HTTPBadRequest()
def clean(text):
lines = text.split("\n")
output = []
for line in lines:
oldline = line
if line.startswith("- "):
line = line[2:]
line = re.sub("^[0-9]+\. ", "", line)
if oldline == line and line.strip():
if not output:
output.append("")
output[-1] = (output[-1] + " " + line.strip()).strip()
elif line.strip():
output.append(line.strip())
return output
gamemode.title = request.params['title']
gamemode.name = request.params['name']
gamemode.rules = clean(request.params['rules'])
gamemode.extrainfo = clean(request.params['extrainfo'])
session.add(gamemode)
session.flush()
return HTTPFound(location=request.route_path('servers.gmod.acp.gamemode.edit', id=gamemode.id))
def background_update(request, gamemode, upload=False):
if request.method != "POST":
return None
for key in ['map', 'gamemode']:
if key not in request.POST:
return HTTPBadRequest()
if upload:
if 'image' not in request.POST or not request.POST['image'].file:
return HTTPBadRequest()
image = request.POST['image'].file
mime = magic.from_buffer(image.read(1024), mime=True)
if not mime.startswith("image/"):
return HTTPBadRequest()
ext = mime.split("/")[-1]
if ext not in ["png", "jpeg", "jpg", "gif", "bmp", "tiff", "targa"]:
return HTTPBadRequest()
image.seek(0)
hash = hashlib.sha1(image.read()).hexdigest()
image.seek(0)
cdn_path = os.path.join("bg", "%s.%s" % (hash, ext))
path = os.path.join(request.registry.settings["upload_path"], cdn_path)
with open(path, "w") as targetFile:
targetFile.write(image.read())
gamemode.url = "cdn:%s" % cdn_path
gamemode.map = request.params['map']
gamemode.gamemode = request.params['gamemode'] if request.params['gamemode'] else None
session.add(gamemode)
session.flush()
return HTTPFound(location=request.route_path('servers.gmod.acp.background.edit', id=gamemode.id))
|
synicalsyntax/zulip | zerver/tests/test_message_edit_notifications.py | Python | apache-2.0 | 21,406 | 0.001261 | from typing import Any, Dict, Mapping, Union
from unittest import mock
from django.utils.timezone import now as timezone_now
from zerver.lib.actions import get_client
from zerver.lib.push_notifications import get_apns_badge_count
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import Subscription, UserPresence
from zerver.tornado.event_queue import maybe_enqueue_notifications
class EditMessageSideEffectsTest(ZulipTestCase):
def _assert_update_does_not_notify_anybody(self, message_id: int, content: str) -> None:
url = '/json/messages/' + str(message_id)
request = dict(
message_id=message_id,
content=content,
)
with mock.patch('zerver.tornado.event_queue.maybe_enqueue_notifications') as m:
result = self.client_patch(url, request)
self.assert_json_success(result)
self.assertFalse(m.called)
def test_updates_with_pm_mention(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
self.login_user(hamlet)
message_id = self.send_personal_message(
hamlet,
cordelia,
content='no mention',
)
self._assert_update_does_not_notify_anybody(
message_id=message_id,
content='now we mention @**Cordelia Lear**',
)
def _login_and_send_original_stream_message(self, content: str,
enable_online_push_notifications: bool=False) -> int:
'''
Note our conventions here:
Hamlet is our logged in user (and sender).
Cordelia is the receiver we care about.
Scotland is the stream we send messages to.
'''
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
cordelia.enable_online_push_notifications = enable_online_push_notifications
cordelia.save()
self.login_user(hamlet)
self.subscribe(hamlet, 'Scotland')
self.subscribe(cordelia, 'Scotland')
message_id = self.send_stream_message(
hamlet,
'Scotland',
content=content,
)
return message_id
def _get_queued_data_for_message_update(self, message_id: int, content: str,
expect_short_circuit: bool=False) -> Dict[str, Any]:
'''
This function updates a message with a post to
/json/messages/(message_id).
By using mocks, we are able to capture two pieces of data:
enqueue_kwargs: These are the arguments passed in to
maybe_enqueue_notifications.
queue_messages: These are the messages that
maybe_enqueue_notifications actually
puts on the queue.
Using this helper allows you to construct a test that goes
pretty deep into the missed-messages codepath, without actually
queuing the final messages.
'''
url = '/json/messages/' + str(message_id)
request = dict(
message_id=message_id,
content=content,
)
with mock.patch('zerver.tornado.event_queue.maybe_enqueue_notifications') as m:
result = self.client_patch(url, request)
cordelia = self.example_user('cordelia')
cordelia_calls = [
call_args
for call_args in m.call_args_list
if call_args[1]['user_profile_id'] == cordelia.id
]
if expect_short_circuit:
self.assertEqual(len(cordelia_calls), 0)
return {}
# Normally we expect maybe_enqueue_notifications to be
# called for Cordelia, so continue on.
self.assertEqual(len(cordelia_calls), 1)
enqueue_kwargs = cordelia_calls[0][1]
queue_messages = []
def fake_publish(queue_name: str,
event: Union[Mapping[str, Any], str],
*args: Any) -> None:
queue_messages.append(dict(
queue_name=queue_name,
event=event,
))
with mock.patch('zerver.tornado.event_queue.queue_json_publish') as m:
m.side_effect = fake_publish
maybe_enqueue_notifications(**enqueue_kwargs)
self.assert_json_success(result)
return dict(
enqueue_kwargs=enqueue_kwargs,
queue_messages=queue_messages,
)
def _send_and_update_message(self, original_content: str, updated_content: str,
enable_online_push_notifications: bool=False,
expect_short_circuit: bool=False,
connected_to_zulip: bool=False,
present_on_web: bool=False) -> Dict[str, Any]:
message_id = self._login_and_send_original_stream_message(
content=original_content,
enable_online_push_notifications=enable_online_push_notifications,
)
if present_on_web:
self._make_cordelia_present_on_web()
if connected_to_zulip:
with self._cordelia_connected_to_zulip():
info = self._get_queued_data_for_message_update(
message_id=message_id,
content=updated_content,
expect_short_circuit=expect_short_circuit,
)
else:
info = self._get_queued_data_for_message_update(
message_id=message_id,
content=updated_content,
expect_short_circuit=expect_short_circuit,
)
return dict(
message_id=message_id,
info=info,
)
def test_updates_with_stream_mention(self) -> None:
original_content = 'no mention'
updated_content = 'now we mention @**Cordelia Lear**'
notification_message_d | ata = self._send_and_update_message(original_content, updated_content)
message_id = notification_message_data['message_id']
info = notification_message_data['info']
cordelia = self.example_user('cordelia')
expected_enqueue_kwargs = dict(
user_profile_id=cordelia.id,
message_id=message_id,
private_message=False,
mentioned=True,
wildcard_m | ention_notify=False,
stream_push_notify=False,
stream_email_notify=False,
stream_name='Scotland',
always_push_notify=False,
idle=True,
already_notified={},
)
self.assertEqual(info['enqueue_kwargs'], expected_enqueue_kwargs)
queue_messages = info['queue_messages']
self.assertEqual(len(queue_messages), 2)
self.assertEqual(queue_messages[0]['queue_name'], 'missedmessage_mobile_notifications')
mobile_event = queue_messages[0]['event']
self.assertEqual(mobile_event['user_profile_id'], cordelia.id)
self.assertEqual(mobile_event['trigger'], 'mentioned')
self.assertEqual(queue_messages[1]['queue_name'], 'missedmessage_emails')
email_event = queue_messages[1]['event']
self.assertEqual(email_event['user_profile_id'], cordelia.id)
self.assertEqual(email_event['trigger'], 'mentioned')
def test_second_mention_is_ignored(self) -> None:
original_content = 'hello @**Cordelia Lear**'
updated_content = 're-mention @**Cordelia Lear**'
self._send_and_update_message(original_content, updated_content,
expect_short_circuit=True)
def _turn_on_stream_push_for_cordelia(self) -> None:
'''
conventions:
Cordelia is the message receiver we care about.
Scotland is our stream.
'''
cordelia = self.example_user('cordelia')
stream = self.subscribe(cordelia, 'Scotland')
recipient = stream.recipient
cordelia_subscription = Subscription.objects.get(
user_profile_id=cordelia |
mfraezz/osf.io | tests/test_views.py | Python | apache-2.0 | 220,173 | 0.001894 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Views tests for the OSF."""
from __future__ import absolute_import
import datetime as dt
from rest_framework import status as http_status
import json
import time
import unittest
from future.moves.urllib.parse import quote
from flask import request
import mock
import pytest
from nose.tools import * # noqa PEP8 asserts
from django.utils import timezone
from django.apps import apps
from django.core.exceptions import ValidationError
from django.db import connection, transaction
from django.test import TransactionTestCase
from django.test.utils import CaptureQueriesContext
from addons.github.tests.factories import GitHubAccountFactory
from addons.wiki.models import WikiPage
from framework.auth import cas, authenticate
from framework.flask import redirect
from framework.auth.core import generate_verification_key
from framework import auth
from framework.auth.campaigns import get_campaigns, is_institution_login, is_native_login, is_proxy_login, campaign_url_for
from framework.auth import Auth
from framework.auth.cas import get_login_url
from framework.auth.exceptions import InvalidTokenError
from framework.auth.utils import impute_names_model, ensure_external_identity_uniqueness
from framework.auth.views import login_and_register_handler
from framework.celery_tasks import handlers
from framework.exceptions import HTTPError, TemplateHTTPError
from framework.transactions.handlers import no_auto_transaction
from website import mailchimp_utils, mails, settings, language
from addons.osfstorage import settings as osfstorage_settings
from osf.models import AbstractNode, NodeLog, QuickFilesNode
from website.profile.utils import add_contributor_json, serialize_unregistered
from website.profile.views import update_osf_help_mails_subscription
from website.project.decorators import check_can_access
from website.project.model import has_anonymous_link
from website.project.signals import contributor_added
from website.project.views.contributor import (
deserialize_contributors,
notify_added_contributor,
send_claim_email,
send_claim_registered_email,
)
from website.project | .views.node import _should_show_wiki_widget, _view_project, abbrev_authors
from website.util import api_url_for, web_url_for
from website.util import rubeus
from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag
from osf.utils import permissions
from osf.models import Commen | t
from osf.models import OSFUser, Tag
from osf.models import Email
from osf.models.spam import SpamStatus
from tests.base import (
assert_is_redirect,
capture_signals,
fake,
get_default_metaschema,
OsfTestCase,
assert_datetime_equal,
)
from tests.base import test_app as mock_app
from tests.utils import run_celery_tasks
from tests.test_cas_authentication import generate_external_user_with_resp, make_external_response
from api_tests.utils import create_test_file
pytestmark = pytest.mark.django_db
from osf.models import NodeRelation, QuickFilesNode, NotableEmailDomain
from osf_tests.factories import (
fake_email,
ApiOAuth2ApplicationFactory,
ApiOAuth2PersonalTokenFactory,
AuthUserFactory,
CollectionFactory,
CommentFactory,
InstitutionFactory,
NodeFactory,
OSFGroupFactory,
PreprintFactory,
PreprintProviderFactory,
PrivateLinkFactory,
ProjectFactory,
ProjectWithAddonFactory,
RegistrationFactory,
RegistrationProviderFactory,
UserFactory,
UnconfirmedUserFactory,
UnregUserFactory,
RegionFactory,
DraftRegistrationFactory,
)
@mock_app.route('/errorexc')
def error_exc():
UserFactory()
raise RuntimeError
@mock_app.route('/error500')
def error500():
UserFactory()
return 'error', 500
@mock_app.route('/noautotransact')
@no_auto_transaction
def no_auto_transact():
UserFactory()
return 'error', 500
class TestViewsAreAtomic(OsfTestCase):
def test_error_response_rolls_back_transaction(self):
original_user_count = OSFUser.objects.count()
self.app.get('/error500', expect_errors=True)
assert_equal(OSFUser.objects.count(), original_user_count)
# Need to set debug = False in order to rollback transactions in transaction_teardown_request
mock_app.debug = False
try:
self.app.get('/errorexc', expect_errors=True)
except RuntimeError:
pass
mock_app.debug = True
self.app.get('/noautotransact', expect_errors=True)
assert_equal(OSFUser.objects.count(), original_user_count + 1)
@pytest.mark.enable_bookmark_creation
class TestViewingProjectWithPrivateLink(OsfTestCase):
def setUp(self):
super(TestViewingProjectWithPrivateLink, self).setUp()
self.user = AuthUserFactory() # Is NOT a contributor
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory()
self.link.nodes.add(self.project)
self.link.save()
self.project_url = self.project.web_url_for('view_project')
def test_edit_private_link_empty(self):
node = ProjectFactory(creator=self.user)
link = PrivateLinkFactory()
link.nodes.add(node)
link.save()
url = node.api_url_for('project_private_link_edit')
res = self.app.put_json(url, {'pk': link._id, 'value': ''}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Title cannot be blank', res.body.decode())
def test_edit_private_link_invalid(self):
node = ProjectFactory(creator=self.user)
link = PrivateLinkFactory()
link.nodes.add(node)
link.save()
url = node.api_url_for('project_private_link_edit')
res = self.app.put_json(url, {'pk': link._id, 'value': '<a></a>'}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Invalid link name.', res.body.decode())
@mock.patch('framework.auth.core.Auth.private_link')
def test_can_be_anonymous_for_public_project(self, mock_property):
mock_property.return_value(mock.MagicMock())
mock_property.anonymous = True
anonymous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.add(self.project)
anonymous_link.save()
self.project.set_privacy('public')
self.project.save()
self.project.reload()
auth = Auth(user=self.user, private_key=anonymous_link.key)
assert_true(has_anonymous_link(self.project, auth))
def test_has_private_link_key(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_equal(res.status_code, 200)
def test_not_logged_in_no_key(self):
res = self.app.get(self.project_url, {'view_only': None})
assert_is_redirect(res)
res = res.follow(expect_errors=True)
assert_equal(res.status_code, 308)
assert_equal(
res.request.path,
'/login'
)
def test_logged_in_no_private_key(self):
res = self.app.get(self.project_url, {'view_only': None}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, http_status.HTTP_403_FORBIDDEN)
def test_logged_in_has_key(self):
res = self.app.get(
self.project_url, {'view_only': self.link.key}, auth=self.user.auth)
assert_equal(res.status_code, 200)
@unittest.skip('Skipping for now until we find a way to mock/set the referrer')
def test_prepare_private_key(self):
res = self.app.get(self.project_url, {'key': self.link.key})
res = res.click('Registrations')
assert_is_redirect(res)
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.request.GET['key'], self.link.key)
def test_cannot_access_registrations_or_forks_with_anon_key(self):
anonymous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.add(self.project)
anonymous_link.save()
self.project.is_public = False
self.project.save()
url |
LuyaoHuang/patchwatcher | patchwatcher2/patchwatcher2/wsgi.py | Python | lgpl-3.0 | 403 | 0 | """
| WSGI config for patchwatcher2 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi impo | rt get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "patchwatcher2.settings")
application = get_wsgi_application()
|
quarckster/cfme_tests | cfme/utils/virtual_machines.py | Python | gpl-2.0 | 3,708 | 0.004315 | """Helper functions related to the creation and destruction of virtual machines and instances
"""
import pytest
from cfme.utils.providers import get_crud
from fixtures.pytest_store import store
from novaclient.exceptions import OverLimit as OSOverLimit
from ovirtsdk.infrastructure.errors import RequestError as RHEVRequestError
from ssl import SSLError
from cfme.utils.log import logger
from cfme.utils.mgmt_system import exceptions
def _vm_cleanup(mgmt, vm_name):
"""Separated to make the logic able to propagate the exceptions directly."""
try:
logger.info("VM/Instance status: %s", mgmt.vm_status(vm_name))
except Exception as f:
logger.error(
"Could not retrieve VM/Instance status: %s: %s", type(f).__name__, str(f))
logger.info('Attempting cleanup on VM/instance %s', vm_name)
try:
if mgmt.does_vm_exist(vm_name):
# Stop the vm first
logger.warning('Destroying VM/instance %s', vm_name)
if mgmt.delete_vm(vm_name):
logger.info('VM/instance %s destroyed', vm_name)
else:
logger.error('Error destroying VM/instance %s', vm_name)
except Exception as f:
logger.error(
'Could not destroy VM/instance %s (%s: %s)', vm_name, type(f).__name__, str(f))
def deploy_template(provider_key, vm_name, template_name=None, timeout=900, **deploy_args):
"""
Args:
provider_key: Provider key on which the VM is to be created
vm_name: Name of the VM to be deployed
template_name: Name of the template that the VM is deployed from
timeout: the timeout for template deploy
"""
allow_skip = deploy_args.pop("allow_skip", ())
if isinstance(allow_skip, dict):
skip_exceptions = allow_skip.keys()
callable_mapping = allow_skip
elif isinstance(allow_skip, basestring) and allow_skip.lower() == "default":
skip_exceptions = (OSOverLimit, RHEVRequestError, exceptions.VMInstanceNotCloned, SSLError)
callable_mapping = {}
else:
skip_exceptions = allow_skip
callable_mapping = {}
provider_crud | = get_crud(provider_key)
deploy_args.update(vm_name=vm_name)
if template_name is None:
try:
deploy_args.update(template=provider_crud.data['templat | es']['small_template']['name'])
except KeyError:
raise KeyError('small_template not defined for Provider {} in cfme_data.yaml'
.format(provider_key))
else:
deploy_args.update(template=template_name)
deploy_args.update(provider_crud.deployment_helper(deploy_args))
logger.info("Getting ready to deploy VM/instance %s from template %s on provider %s",
vm_name, deploy_args['template'], provider_crud.data['name'])
try:
try:
logger.debug("Deploy args: %s", deploy_args)
vm_name = provider_crud.mgmt.deploy_template(timeout=timeout, **deploy_args)
logger.info("Provisioned VM/instance %s", vm_name) # instance ID in case of EC2
except Exception as e:
logger.error('Could not provisioning VM/instance %s (%s: %s)',
vm_name, type(e).__name__, str(e))
_vm_cleanup(provider_crud.mgmt, vm_name)
raise
except skip_exceptions as e:
e_c = type(e)
if e_c in callable_mapping and not callable_mapping[e_c](e):
raise
# Make it visible also in the log.
store.write_line(
"Skipping due to a provider error: {}: {}\n".format(e_c.__name__, str(e)), purple=True)
logger.exception(e)
pytest.skip("{}: {}".format(e_c.__name__, str(e)))
return vm_name
|
google/neural-light-transport | data_gen/util.py | Python | apache-2.0 | 2,045 | 0.000489 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
from os import makedirs
from os.path import exists, dirname, basename
import json
import numpy as np
import cv2
def load_json(json_path):
with open(json_path, 'r') as h:
data = json.load(h)
return data
def dump_json(data, path):
"""Pretty dump.
"""
dir_ = dirname(path)
if not exists(dir_):
makedirs(dir_)
with open(path, 'w') as h:
json.dump(data, h, indent=4, sort_keys=True)
def safe_cast_to_int(float_):
assert float_ == int(float_), "Failed to safely cast %f to integer" % float_
return int(float_)
def remap(src, mapping, force_kbg=True):
h, w = src.shape[:2]
mapping_x = mapping[:, :, 0] * w
mapping_y = mapping[:, :, 1] * h
mapping_x = mapping_x.astype(np.float32)
mapping_y = mapping_y.astype(np.float32)
src_ = deepcopy(src)
if force_kbg:
# Set l | eft-top corner (where background takes colors from) to black
src_[0, 0, ...] = 0
dst = cv2.remap(src_, mapping_x, mapping_y, cv2.INTER_LINEAR)
return dst
def add_b_ch(img_rg):
assert img_rg.ndim == 3 and img_rg.shape[2] == 2, "Input should be HxWx2"
img_rgb = np.dstack((img_rg, np.zeros_like(img_rg)[:, :, :1]))
return img_rgb
def save_float16_npy(data, path):
"""Use float16 for faster IO during training.
"""
np.save(path, data.astype(np.float16 | ))
def name_from_json_path(json_path):
return basename(json_path)[:-len('.json')]
|
henryfjordan/incident-commander | templates/responses.py | Python | mit | 3,231 | 0.001243 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from jinja2 import Template
CREATE_INCIDENT = Template("""
*INCIDENT CREATED!*
Thi | s incident is now being tracked and documented, but we still need your help!
When you have a moment, please use these commands to provide more information:
```
@commander set title <title>
@commander set reporter <@reporter>
@commander set severity <1-3>
@commander set description <description>
| ```
""")
CREATE_INCIDENT_FAILED = Template("Hey, did you forget to include an application name?") # noqa
NAG = Template("Be a dear and run `@commander set {{key}} <value>`")
SET = Template("Set *{{field}}* to `{{value}}`")
GET = Template("*{{field}}*: {{value}}")
GET_LIST = Template("""
*{{field}}:*
>>> {% for val in value -%}
{{loop.index}}. {% if val is iterable -%}
{% if val.removed is sameas false %} {{val.text}}
{%- else %} ~{{val.text}}~ {% endif %} (<@{{val.user}}>)
{% else -%}
{{val}}
{% endif %}
{%- endfor %}
""")
SUMMARY = Template("""
Summary of *{{name}}* incident:
_Status_: *{{status}}*
_Severity_: *{{severity}}*
_Started_: *{{start_date}}*
{% if status == "resolved" %}
_Ended_: *{{resolved_date}}*
{% endif %}
_Description_:
``` {{description}} ```
{% if symptom %}
_Symptoms_:
```
>>> {% for val in symptom -%}
{{loop.index}}. {% if val is iterable -%}
{% if val.removed is sameas false %} {{val.text}}
{%- else %} ~{{val.text}}~ {% endif %} (<@{{val.user}}>)
{% else -%}
{{val}}
{% endif %}
{%- endfor %}
```
{% endif %}
""")
NEW_CHANNEL_MESSAGE = Template("""
*When an incident occurs, follow these steps:*
1. Email incident-response@zefr.com to alert about SEV
2. Designate an Incident Commander (IC) for this incident. This is the leader of the response effort, and is responsible for managing the response.
3. Update information about the incident in this channel, using @commander commands.
4. Send an internal sitrep to this channel using @commander set status Status message
5. Assess the problem.
6. Mitigate the problem.
*Full Incident Response Instructions:* https://zefrinc.atlassian.net/wiki/display/ST/Incident+Response+Instructions
""")
SET_SEVERITY_PROMPT = Template("""
*Set incident severity!*
This incident is now being tracked and documented, but we still need your help!
*The available severity levels are:*
• S1 (critical)
• S2 (major)
• S3 (minor)
Consider the following in setting severity:
• App is down and teams can no longer function
• Business cost
• SLA (Service Level Agreement) in danger of violation
• Technical impact
To set severity, use the command:
```
@commander set severity <1-3>
```
""")
def renderField(field, value):
customRenderers = {
'start_date': RENDER_DATE,
'resolved_date': RENDER_DATE,
'hypothesis': RENDER_HYPOTHESIS,
'comment': RENDER_COMPLEX_LIST,
'step': RENDER_COMPLEX_LIST
}
if field in customRenderers:
return customRenderers[field].render(field=field, value=value)
elif isinstance(value, list):
return GET_LIST.render(field=field, value=value)
else:
return GET.render(field=field, value=value)
|
Hellrungj/CSC-412-Networking | rpc-project/venv/lib/python2.7/site-packages/plumbum/colors.py | Python | gpl-3.0 | 646 | 0.003096 | """
This module imitates a real module, providing standard syntax
like from `plumbum.colors` and from `plumbum.colors.bg` to work alongside
all the standard syntax for colors.
"""
from __future__ import print_function
import sys
import os
import atexit
from plumbum.colorlib import ansicolors, main
_reset = ansicolors.reset.now
if __name__ == '__main__':
main()
else: # Don't register an exit if this is called using -m!
atexit.register(_reset)
# Oddly, the order here matters for Python | 2, but not Python3
sys.modules[__name__ + '.fg'] = ansicolors.fg
sys.modules[__name__ + | '.bg'] = ansicolors.bg
sys.modules[__name__] = ansicolors
|
google-research/robel | robel/components/tracking/virtual_reality/client.py | Python | apache-2.0 | 3,972 | 0 | # Copyright 2019 The ROBEL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client to communicate with a VR device using OpenVR.
Example usage:
>>> client = VrClient()
>>> client.set_devices({'tracker': 1})
"""
from typing import List, Union
import openvr
from robel.components.tracking.virtual_reality.device import VrDevice
from robel.components.tracking.virtual_reality.poses import VrPoseBatch
class VrClient:
"""Communicates with a VR device."""
def __init__(self):
self._vr_system = None
self._devices = []
self._device_serial_lookup = {}
self._device_index_lookup = {}
self._last_pose_batch = None
self._plot = None
# Attempt to start OpenVR.
if not openvr.isRuntimeInstalled():
raise OSError('OpenVR runtime not installed.')
self._vr_system = openvr.init(openvr.VRApplication_Other)
def close(self):
"""Cleans up any resources used by the client."""
if self._vr_system is not None:
openvr.shutdown()
self._vr_system = None
def get_device(self, identifier: Union[int, str]) -> VrDevice:
"""Returns the device with the given name."""
identifier = str(identifier)
if identifier in self._device_index_lookup:
return self._device_index_lookup[identifier]
if identifier in self._device_serial_lookup:
return self._device_serial_lookup[identifier]
self.discover_devices()
if (identifier not in self._device_index_lookup
and identifier not in self._device_serial_lookup):
raise ValueError(
'Could not find device with name or index: {} (Available: {})'
.format(identifier, sorted(self._device_serial_lookup.keys())))
if identifier in self._device_index_lookup:
return self._device_index_lookup[identifier]
return self._device_serial_lookup[identifier]
def discover_devices(self) -> List[VrDevice]:
"""Returns and caches all connected devices."""
self._device_index | _lookup.clear()
self._device_serial_lookup.clear()
devices = []
for device_index in range(openvr.k_unMaxTrackedDeviceCount):
device = VrDevice(self._vr_system, device_index)
| if not device.is_connected():
continue
devices.append(device)
self._device_index_lookup[str(device.index)] = device
self._device_serial_lookup[device.get_serial()] = device
self._devices = devices
return devices
def get_poses(self, time_from_now: float = 0.0,
update_plot: bool = True) -> VrPoseBatch:
"""Returns a batch of poses that can be queried per device.
Args:
time_from_now: The seconds into the future to read poses.
update_plot: If True, updates an existing plot.
"""
pose_batch = VrPoseBatch(self._vr_system, time_from_now)
self._last_pose_batch = pose_batch
if update_plot and self._plot and self._plot.is_open:
self._plot.refresh()
return pose_batch
def __enter__(self):
"""Enables use as a context manager."""
return self
def __exit__(self, *args):
"""Enables use as a context manager."""
self.close()
def __del__(self):
"""Automatically disconnect on destruction."""
self.close()
|
marklocklear/Online-Student-Profile | deploy/osp_settings.py | Python | lgpl-3.0 | 5,374 | 0.000186 | from osp.conf.settings import *
# Unique key used for salting passwords
SECRET_KEY = 'Chac-8#haCa_Ra-e?-e+ucrur=gEFRasejayasaC?meMe!AC-a'
# DEBUG should be False in production, True in development
DEBUG = False
# List of administrators who should receive error reports
ADMINS = (
('John Smith', 'john.smith@example.edu'),
('Francis Drake', 'francis.drake@example.edu'),
)
MANAGERS = ADMINS
# List of developers who receive email messages in debug mode
DEBUG_USERS = ADMINS
# MySQL database configuration settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'osp',
'USER': 'osp',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Server time zone
TIME_ZONE = 'America/New_York'
# Used if you are hosting OSP off the top level (e.g. http://example.edu/osp/)
URL_PREFIX = ''
# The URL path at which media is being served
MEDIA_URL = URL_PREFIX + '/media/'
# The URL path at which admin media is being served
ADMIN_MEDIA_PREFIX = URL_PREFIX + '/media/admin/'
# Uncomment the following lines if you are using the LDAP backend
#
# import ldap
# from django_auth_ldap.config import LDAPSearch
#
# AUTHENTICATION_BACKENDS = [
# 'django_auth_ldap.backend.LDAPBackend',
# 'django.contrib.auth.backends.ModelBackend',
# ]
# AUTH_LDAP_SERVER_URI = 'ldap://ldap.example.edu'
# AUTH_LDAP_BIND_DN = 'service_user'
# AUTH_LDAP_BIND_PASSWORD = 'service_password'
# AUTH_LDAP_USER_SEARCH = LDAPSearch('ou=Users,dc=example,dc=edu',
# ldap.SCOPE_SUBTREE,
# '(uid=%(user)s)')
# AUTH_LDAP_USER_ATTR_MAP = {
# 'first_name': 'givenName',
# 'last_name': 'sn',
# 'email': 'mail'
# }
# LOGIN_REDIRECT_URL = URL_PREFIX + '/'
# Uncomment the following lines if you are using the CAS backend
#
# AUTHENTICATION_BACKENDS = [
# 'django.contrib.auth.backends.ModelBackend',
# 'django_cas.backends.CASBackend',
# ]
# MIDDLEWARE_CLASSES.append('django_cas.middleware.CASMiddleware')
# CAS_VERSION = '1'
# CAS_SERVER_URL = 'https://cas.example.edu'
# CAS_IGNORE_REFERER = True
# CAS_REDIRECT_URL = URL_PREFIX + '/'
# The URL paths for login and logout pages
LOGIN_URL = URL_PREFIX + '/login/'
LOGOUT_URL = URL_PREFIX + '/logout/'
# SMTP mail server configuration settings
EMAIL_HOST = 'smtp.example.edu'
EMAIL_PORT = 25
EMAIL_HOST_USER = 'service_user'
EMAIL_HOST_PASSWORD = 'service_password'
# List of IP addresses for hosts allowed to push data to the API
API_ALLOWED_HOSTS = []
# Authorization key for pushing data to the API
API_KEY = ''
# Email address that intervention requests are sent to
INTERVENTIONS_EMAIL = 'interventions@example.edu'
# "From" email address for the application
SERVER_EMAIL = 'osp@example.edu'
DEFAULT_FROM_EMAIL = SERVER_EMAIL
# All potential term choices that could be received by the API
TERM_CHOICES = [
('fa', 'Fall'),
('sp', 'Spring'),
('su', 'Summer'),
]
# Current year and term
CURRENT_TERM = 'su'
CURRENT_YEAR = 2011
# All potential enrollment status choices that could be received by the API
ENROLLMENT_STATUS_CHOICES = [
('A', 'Active'),
('D', 'Dropped'),
('W', 'Withdrawn'),
('X', 'Deleted'),
('C', 'Cancelled'),
('NP', 'Non-payment'),
]
# Enrollment statuses which are considered "active"
ACTIVE_ENROLLMENT_STATUSES = ['A',]
# List of campuses for your school
CAMPUS_CHOICES = [
'Main',
'Uptown',
]
# List of contact types for visits
VISIT_CONTACT_TYPE_CHOICES = [
'In Person',
'Email',
'Telephone',
'Online',
'Group Session',
]
# List of reasons for visits
VISIT_REASON_CHOICES = [
'New Student Admission',
'Academic Advising',
'Counseling',
'Personal Counseling',
'Early Alert Referral',
'Graduation Assessment Review',
'Career Counseling',
'Workshops, Class Presentations',
'Early Alert Counseling',
'Disability Counseling',
'Faculty Advising',
'Academic Warning',
'Academic Probation',
'First Academic Suspension',
'Final Academic Suspension',
]
# List of departments for visits
VISIT_DEPARTMENT_CHOICES = [
'Advising',
'Counseling',
]
# List of Career Services outcomes for visits
VISIT_CAREER_SERVICES_OUTCOME_CHOICES = [
'No Contact',
'Email',
'Phone',
'Sc | heduled Appointment with Career Services',
'No Show for Appointment',
'Took Career Assessment(s)',
'Met with Career Counselor',
'Career Decision in Process',
'Career and Program Decision Completed',
'Referred for Program Update',
'Program Updated',
]
# List of intervention reasons
INTERVENTION_REASONS = [
'Excessive Tardiness/Absenteeism',
'Failing Test/Quiz Scores',
'Missing Assignments',
'Needs Persona | l or Social Counseling',
'Needs Career Exploration',
'Needs Tutoring',
]
# Re-structure the choices lists for Django's sake
CAMPUS_CHOICES = [(x, x) for x in CAMPUS_CHOICES]
VISIT_CONTACT_TYPE_CHOICES = [(x, x) for x in VISIT_CONTACT_TYPE_CHOICES]
VISIT_REASON_CHOICES = [(x, x) for x in VISIT_REASON_CHOICES]
VISIT_DEPARTMENT_CHOICES = [(x, x) for x in VISIT_DEPARTMENT_CHOICES]
VISIT_CAREER_SERVICES_OUTCOME_CHOICES = [(x, x) for x in
VISIT_CAREER_SERVICES_OUTCOME_CHOICES]
INTERVENTION_REASONS = [(x, x) for x in INTERVENTION_REASONS]
|
showerst/openstates | openstates/ma/actions.py | Python | gpl-3.0 | 4,155 | 0.002407 | import re
from billy.scrape.actions import Rule, BaseCategorizer
# These are regex patterns that map to action categories.
_categorizer_rules = (
Rule([u'Amendment #\\S+ \\((?P<legislator>.+?)\\) bundle YES adopted'],
[u'amendment:passed']),
Rule([u'(?i)Signed by (the )Governor(.*)'], [u'governor:signed']),
Rule([u'Accompanied (by )?(?P<bill_id>[SH]\\S+)'], []),
Rule([u'Discharged to the committee on (?P<committees>.+)'],
[u'committee:referred']),
Rule([u'(?i)Amendment #\\d+ adopted'], [u'amendment:passed']),
Rule([u'Amendment #\\d+ \\((?P<legislator>.+?)\\) rejected',
u'(?i)amendment.+?rejected'],
[u'amendment:failed']),
Rule([u'(?is)Amendment \\S+ withdrawn'], [u'amendment:withdrawn']),
Rule([u'Amendment #\\S+ \\((?P<legislator>.+?)\\) Pending'],
[u'amendment:introduced']),
Rule([u'(?P<bill>[HS]\\d+)'], []),
Rule([u'(?i)Amendment \\(#\\d+\\) adopted'], [u'amendment:passed']),
Rule([u'(?i)with veto'], [u'governor:vetoed']),
Rule([u'reported favorably by committee'], [u'committee:passed:favorable']),
Rule([u'Accompan\\S+ .+?(?P<bill_id>[SH]\\S+)'], []),
Rule([u'(?i)Amendment \\d+ pending'], [u'amendment:tabled']),
Rule([u'Read,'], [u'bill:reading:1']),
Rule([u'(?i)Amendment #\ | \S+ \\((?P<legislator>.+?)\\)\\s+-\\s+rejected',
u'(?i)Amendment \\d+ rejected',
u'Amendment #?\\S+ \\((?P<legislator>.+ | ?)\\) rejected'],
[u'amendment:failed']),
Rule([u'Amended \\((?P<legislator>.+?)\\) ',
u'Amendment #?\\S+ \\((?P<legislator>.+?)\\) adopted'],
[u'amendment:passed']),
Rule([u'(?i)read.{,10}second'], [u'bill:reading:2']),
Rule([u'Amendment #\\d+ \\((?P<legislator>.+?)\\) pending'],
[u'amendment:introduced']),
Rule([u'Enacted'], [u'bill:passed']),
Rule([u'Amendment #\\S+ \\((?P<legislator>.+?)\\) Adopted',
u'Accompanied a study order, (see )?(?P<bill_id>[SH]\\S+)'],
[]),
Rule([u'passed over veto'], [u'bill:veto_override:passed']),
Rule([u'(?i)Read third'], [u'bill:reading:3']),
Rule([u'Bill Filed'], [u'bill:introduced']),
Rule([u'(?i)Amendment #\\S+ rejected'], [u'amendment:failed']),
Rule([u'laid aside'], [u'amendment:tabled']),
Rule([u'(?i)Amendment \\(#\\d+\\) rejected'], [u'amendment:failed']),
Rule([u'(?i)amendment.+?adopted'], [u'amendment:passed']),
Rule([u'Adopted, (see )?(?P<bill_id>[SH]\\S+)'], []),
Rule([u'(?is)Amendment \\(\\d+\\) rejected'], [u'amendment:failed']),
Rule([u'(?P<yes_votes>\\d+) YEAS.+?(?P<no_votes>\\d+) NAYS'], []),
Rule([u'Passed to be engrossed'], [u'bill:passed']),
Rule([u'Amendment #\\d+ \\((?P<legislator>.+?)\\) adopted'],
[u'amendment:passed']),
Rule([u'Amendment #\\S+ \\((?P<legislator>.+?)\\) Rejected'],
[u'amendment:failed']),
Rule([u'referred to (?P<committees>.+)'], []),
Rule([u'Amended by'], [u'amendment:passed']),
Rule(['Committee recommended ought to pass'], ['committee:passed:favorable']),
Rule([u'Amendment #\\S+ \\((?P<legislator>.+?)\\) bundle NO rejected'],
[u'amendment:failed']),
Rule([u'(?is)Amendment \\(\\d+\\) adopted'], [u'amendment:passed']),
Rule([u'(?i)(Referred|Recommittedra) to (?P<committees>committee on.+)'],
[u'committee:referred']),
Rule([u'Accompanied a new draft, (see )?(?P<bill_id>[SH]\\S+)'], []),
Rule([u'(?i)Amendment #\\S+ \\((?P<legislator>.+?)\\) bundle NO rejected'],
[u'amendment:failed']),
Rule([u'(?i)(Referred|Recommittedra) to (?P<chamber>\\S+) (?P<committees>committee on.+)'],
[u'committee:referred']),
Rule(['Committee recommended ought NOT'], ['committee:passed:unfavorable']),
Rule([u'(?i)(Referred|Recommittedra) (to|from)( the)? (?P<chamber>\\S+) (?P<committees>committee on.+)'],
[u'committee:referred']),
Rule([u'(?i)Amendment #\\d+ rejected'], [u'amendment:failed']),
Rule([u'(?i)Amendment \\d+ adopted'], [u'amendment:passed']),
Rule([u'Committee of Conference appointed \\((?P<legislator>.+?)\\)'], [])
)
class Categorizer(BaseCategorizer):
rules = _categorizer_rules
|
seem-sky/FrameworkBenchmarks | pyramid/setup_benchmark.py | Python | bsd-3-clause | 708 | 0.001412 | import subprocess
import setup_ut | il
import multiprocessing
import os
home = os.path.expanduser('~')
bin_dir = os.path.expanduser('~/FrameworkBenchmarks/installs/py3/bin')
NCPU = multiprocessing.cpu_count()
proc = None
def start(args):
global proc
setup_util.replace_text(
"frameworkbenchmarks/models.py",
"DBHOSTNAME = 'localhost'",
"DBHOSTNAME = '%s'" % args.database_host
)
proc = subprocess.Popen([
| bin_dir + '/gunicorn',
'wsgi:app',
'-b', "0.0.0.0:6543",
'-w', str(NCPU*3)],
cwd='pyramid'
)
return 0
def stop():
global proc
if proc is not None:
proc.terminate()
proc.wait()
return 0
|
lucacasagrande/qgis2web | olLayerScripts.py | Python | gpl-2.0 | 21,275 | 0.000141 | import re
import traceback
import os
import codecs
from urlparse import parse_qs
from PyQt4.QtCore import QCoreApplication
from qgis.core import (QgsRenderContext,
QgsSingleSymbolRendererV2,
QgsCategorizedSymbolRendererV2,
QgsGraduatedSymbolRendererV2,
QgsHeatmapRenderer,
QgsCoordinateReferenceSystem,
QgsCoordinateTransform,
QgsMessageLog)
from utils import safeName, is25d, BLEND_MODES
from basemaps import basemapOL
def writeLayersAndGroups(layers, groups, visible, folder, popup,
settings, json, matchCRS, clustered, iface,
restrictToExtent, extent):
canvas = iface.mapCanvas()
basemapList = settings["Appearance"]["Base layer"]
basemaps = [basemapOL()[item] for _, item in enumerate(basemapList)]
if len(basemapList) > 1:
baseGroup = "Base maps"
else:
baseGroup = ""
baseLayer = """var baseLayer = new ol.layer.Group({
'title': '%s',
layers: [%s\n]
});""" % (baseGroup, ','.join(basemaps))
layerVars = ""
layer_names_id = {}
for count, (layer, encode2json, cluster) in enumerate(zip(layers, json,
clustered)):
layer_names_id[layer.id()] = str(count)
try:
if is25d(layer, canvas, restrictToExtent, extent):
pass
else:
layerVars += "\n".join([layerToJavascript(iface, layer,
encode2json,
matchCRS, cluster,
restrictToExtent,
extent, count)])
except:
layerVars += "\n".join([layerToJavascript(iface, layer,
encode2json, matchCRS,
cluster,
restrictToExtent,
extent, count)])
groupVars = ""
groupedLayers = {}
for group, groupLayers in groups.iteritems():
groupVars += ('''var %s = new ol.layer.Group({
layers: [%s],
title: "%s"});\n''' %
("group_" + safeName(group),
",".join(["lyr_" + safeName(layer.name())
+ layer_names_id[layer.id()]
for layer in groupLayers]),
group))
for layer in groupLayers:
groupedLayers[layer.id()] = safeName(group)
mapLayers = ["baseLayer"]
usedGroups = []
osmb = ""
for count, layer in enumerate(layers):
try:
renderer = layer.rendererV2()
if is25d(layer, canvas, restrictToExtent, extent):
shadows = ""
renderer = layer.rendererV2()
renderContext = QgsRenderContext.fromMapSettings(
canvas.mapSettings())
fields = layer.pendingFields()
renderer.startRender(renderContext, fields)
for feat in layer.getFeatures():
if isinstance(renderer, QgsCategorizedSymbolRendererV2):
classAttribute = renderer.classAttribute()
attrValue = feat.attribute(classAttribute)
catIndex = renderer.categoryIndexForValue(attrValue)
categories = renderer.categories()
symbol = categories[catIndex].symbol()
elif isinstance(renderer, QgsGraduatedSymbolRendererV2):
classAttribute = renderer.classAttribute()
attrValue = feat.attribute(classAttribute)
ranges = renderer.ranges()
| for range in ranges:
if | (attrValue >= range.lowerValue() and
attrValue <= range.upperValue()):
symbol = range.symbol().clone()
else:
symbol = renderer.symbolForFeature2(feat,
renderContext)
symbolLayer = symbol.symbolLayer(0)
if not symbolLayer.paintEffect().effectList()[0].enabled():
shadows = "'2015-07-15 10:00:00'"
renderer.stopRender(renderContext)
osmb = """
var osmb = new OSMBuildings(map).date(new Date({shadows}));
osmb.set(geojson_{sln}{count});""".format(shadows=shadows,
sln=safeName(layer.name()),
count=unicode(count))
else:
mapLayers.append("lyr_" + safeName(layer.name()) +
unicode(count))
except:
QgsMessageLog.logMessage(traceback.format_exc(), "qgis2web",
level=QgsMessageLog.CRITICAL)
mapLayers.append("lyr_" + safeName(layer.name()) + unicode(count))
visibility = ""
for layer, v in zip(mapLayers[1:], visible):
visibility += "\n".join(["%s.setVisible(%s);" % (layer,
unicode(v).lower())])
group_list = ["baseLayer"] if len(basemapList) else []
no_group_list = []
for count, layer in enumerate(layers):
try:
if is25d(layer, canvas, restrictToExtent, extent):
pass
else:
if layer.id() in groupedLayers:
groupName = groupedLayers[layer.id()]
if groupName not in usedGroups:
group_list.append("group_" + safeName(groupName))
usedGroups.append(groupName)
else:
no_group_list.append("lyr_" + safeName(layer.name()) +
unicode(count))
except:
if layer.id() in groupedLayers:
groupName = groupedLayers[layer.id()]
if groupName not in usedGroups:
group_list.append("group_" + safeName(groupName))
usedGroups.append(groupName)
else:
no_group_list.append("lyr_" + safeName(layer.name()) +
unicode(count))
layersList = []
for layer in (group_list + no_group_list):
layersList.append(layer)
layersListString = "var layersList = [" + ",".join(layersList) + "];"
fieldAliases = ""
fieldImages = ""
fieldLabels = ""
blend_mode = ""
for count, (layer, labels) in enumerate(zip(layers, popup)):
sln = safeName(layer.name()) + unicode(count)
if layer.type() == layer.VectorLayer and not is25d(layer, canvas,
restrictToExtent,
extent):
fieldList = layer.pendingFields()
aliasFields = ""
imageFields = ""
labelFields = ""
for field, label in zip(labels.keys(), labels.values()):
labelFields += "'%(field)s': '%(label)s', " % (
{"field": field, "label": label})
labelFields = "{%(labelFields)s});\n" % (
{"labelFields": labelFields})
labelFields = "lyr_%(name)s.set('fieldLabels', " % (
{"name": sln}) + labelFields
fieldLabels += labelFields
for f in fieldList:
fieldIndex = fieldList.indexFromName(unicode(f.name()))
aliasFields += "'%(field)s': '%(alias)s', " % (
{"field": f.name(),
"alias": layer.attributeDisplayName(fieldIndex)})
|
sean797/tracer | tests/test_processes.py | Python | gpl-2.0 | 1,199 | 0.021685 | from .__meta__ import *
from tracer.resources.processes import Processes, Process
from tracer.resources.collections import ProcessesCollection
import os
import subprocess
@unittest.skipIf(True, "@TODO Create Mock for Processes class")
class TestProcesses(unittest.TestCase):
def test_children(self):
process = Processes.all()[0]
children = pr | ocess.children()
self.assertIsInstance(children, ProcessesCollection)
for child in children:
self.assertIsInstance(child, Process)
def test_unique_process(self):
process = Proc | ess(os.getpid())
parent = Process(os.getppid())
self.assertIs(process, Process(os.getpid()))
self.assertIs(parent, process.parent())
self.assertIn(process, parent.children())
Process.reset_cache()
process2 = Process(os.getpid())
self.assertEqual(process, process2)
self.assertIsNot(process, process2)
def test_process_caching(self):
process = Process(os.getpid())
# Populate the cache entry for children
process.children()
child = subprocess.Popen(os.sys.executable, stdin=subprocess.PIPE)
self.assertEqual(0, len(process.children()))
process.rebuild_cache()
self.assertEqual(1, len(process.children()))
child.terminate()
|
rbjorklin/lift-meet-manager | lift_meet_manager/urls.py | Python | bsd-2-clause | 342 | 0.011696 | from django | .conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'lift_meet_manager.views.home', name='home'),
# url(r'^blog/' | , include('blog.urls')),
url(r'^lift_tables/', include('lift_tables.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
pk-sam/crosswalk-test-suite | wrt/wrt-security-android-tests/security/permissiontest.py | Python | bsd-3-clause | 3,330 | 0.003604 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Hongjuan, Wang<hongjuanx.wang@intel.com>
import unittest
import os, sys, commands
import comm
class TestSecurityFunctions(unittest.TestCase):
def test_permission_chinese(self):
comm.setUp()
manifestPath = comm.ConstPath + "/../testapp/permission_field_chinese_tests/manifest.json"
cmd = "python %smake_apk.py --package=org.xwalk.example --arch=%s --mode=%s --manifest=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, manifestPath)
packInfo = commands.getstatusoutput(cmd)
self.assertNotEquals(0, packInfo[0])
def test_permission_noapi(self):
comm.setUp()
manifestPath = comm.ConstPath + "/../testapp/permission_field_noapi_tests/manifest.json"
cmd = "python %smake_apk.py --package=org.xwalk.example --arch=%s --mode=%s --manifest=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, manifestPath)
packInfo = commands.getstatusoutput(cmd)
self.assertNotEquals(0, packInfo[0])
def test_permission_null(self):
comm.setUp()
manifestPath = comm.ConstPath + "/../testapp/permission_field_null_tests/manifest.json"
cmd = "python %smake_apk.py --package=org.xwalk.example --arch=%s --mode=%s --manifest=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, manifestPath)
comm.gen_pkg(cmd, self)
def test_permission_splite(self):
comm.setUp()
manifestPath = comm.ConstPath + "/../testapp/permission_field_splite_tests/manifest.json"
cmd | = "python %smake_apk.py --package=org.xwalk.example --arch=%s --mode=%s --manifest=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, manifestPath)
packInfo = commands.getstatusoutput(cmd)
self.assertNotEquals(0, packInfo[0])
if __name__ == '__main__':
unittest.main() | |
isaced/ComSay | views/index.py | Python | mit | 3,226 | 0.027588 | from flask import Blueprint, render_template,request,Response,make_response,session,flash,redirect,url_for
index = Blueprint('index', __name__)
from models.User import User
from models.Company import Company
from models.Comments import Comments
import time,serials
@index.route('/')
def index_list():
list=Company.query.order_by(Company.createTime.desc()).all()
return render_template('index.html',list=list)
@index.route("/company/<string:id>")
def company(id):
company=Company.query.filter_by(id=id).first()
list=Comments.query.filter_by(company_id=id).all()
return render_template("company.html",list=list,company=company)
@index.route("/company/c | omments/submit",methods=["POST" | ])
def commentsSub():
user_id=request.form["user_id"]
company_id=request.form["company_id"]
contents=request.form["contents"]
if user_id==None or company_id==None or contents==None:
abort(404)
user=User.query.filter_by(id=user_id).first()
company=Company.query.filter_by(id=company_id).first()
comments=Comments(contents,time.strftime("%Y-%m-%d %T"),user,company)
Comments.add(comments)
return redirect(url_for("index.company",id=company_id))
@index.route("/logout")
def logout():
session["user"]=None
return render_template("index.html")
@index.route('/login',methods=["GET","POST"])
def login():
if request.method=="GET":
if session.get('user') is None:
return render_template("login.html")
else:
return redirect(url_for("vip.index"))
elif request.method=="POST":
user_name=request.form["username"]
password=request.form["password"]
error=None
if user_name == None or user_name=="":
error="NOT_EXISTS"
return render_template("login.html",error=error)
else:
user=User.query.filter_by(user_name=user_name).first()
if user is None:
error="NOT_EXISTS"
return render_template("login.html",error=error)
elif password==user.password:
_dict=serials.getDict(user)
session["user"]=_dict
return redirect(url_for("vip.index"))
else:
error="WRONG_PASSWORD"
return render_template("login.html",error=error)
else:
abort(404)
@index.route("/register",methods=["GET","POST"])
def register():
if request.method=="GET":
return render_template("register.html")
elif request.method=="POST":
user_name=request.form["username"]
password=request.form["password"]
user=User(user_name,password,time.strftime("%Y-%m-%d %T"),None)
User.add(user)
session["user"]=serials.getDict(user)
return redirect(url_for("vip.index"))
@index.route("/exists",methods=["POST"])
def ifExists():
name=request.form["username"]
resp=None
if name==None or name=="":
resp=make_response("USERNAME_EXISTS")
else:
user=User.query.filter_by(user_name=name).first()
if user is None:
resp=make_response("NOT_EXISTS")
else:
resp=make_response("USERNAME_EXISTS")
return resp |
hnb2/flask-customers | tests/test_customers.py | Python | mit | 8,076 | 0.002105 | '''
To run these tests you need to create a test database, instructions
are inside the README.md
'''
import customers
from customers.utils import db
import unittest
import json
import base64
class CustomersTestCase(unittest.TestCase):
'''
Test the customers application
'''
EMAIL = 'test@test.org'
PASSWORD = 'test'
def setUp(self):
'''
Create a test app and init the database
'''
customers.app.config.from_pyfile('../conf/config_test.cfg')
self.app = customers.app.test_client()
db.create_all()
#Create a customer
self._create_dummy_customer()
def tearDown(self):
'''
Drop the database
'''
db.session.close()
db.drop_all()
def _open(self, url, method, headers=None, data=None):
'''
Wrapper to make a request using the dummy values for
basic auth
'''
#This will raise a 40x if the content type is set and yet
# there is no data to send
content_type = "application/json"
if data is None:
content_type = None
return self.app.open(
path='/customer/%s' % url,
method=method,
data=data,
headers=headers,
content_type=content_type
)
def _open_with_auth(
self,
url,
method,
username,
password,
data=None
):
'''
Wrapper to make a request using Basic auth
'''
return self._open(
url,
method,
data=data,
headers={
'Authorization': 'Basic ' +\
base64.b64encode(
username + ":" + password
)
}
)
def _open_with_dummy_auth(self, url, method, data=None):
'''
Wrapper to make a request using the dummy values for
basic auth
'''
return self._open_with_auth(
url,
method,
self.EMAIL,
self.PASSWORD,
data=data
)
def _create_dummy_customer(self):
'''
Create a dummy customer
'''
data = dict(email=self.EMAIL, password=self.PASSWORD)
resp = self._open(
'register',
'POST',
data=json.dumps(data),
)
json_data = json.loads(resp.get_data())
self.assertEqual(json_data.get('id'), 1)
def test_register(self):
'''
Test the register command
'''
#Without parameters => failure
resp = self._open('register', 'POST')
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('errors'))
self.assertIsNotNone(json_data['errors'].get('email'))
self.assertIsNotNone(json_data['errors'].get('password'))
#Missing parameters => failure
data = dict(password="test")
resp = self._open(
'register',
'POST',
data=json.dumps(data),
)
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('errors'))
self.assertIsNotNone(json_data['errors'].get('email'))
#Email format invalid => failure
data = dict(email="test@", password="test" | )
resp = self._open(
'register',
'POST',
data=json.dumps(dat | a),
)
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('errors'))
self.assertIsNotNone(json_data['errors'].get('email'))
#Correct parameters => success
data = dict(email='test2@test.org', password='test2')
resp = self._open(
'register',
'POST',
data=json.dumps(data),
)
json_data = json.loads(resp.get_data())
self.assertIsNone(json_data.get('errors'))
self.assertIsNotNone(json_data.get('id'))
#Same request (same email address) => failure
resp = self._open(
'register',
'POST',
data=json.dumps(data),
)
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('errors'))
self.assertIsNotNone(json_data['errors'].get('email'))
def test_get_profile(self):
'''
Test that we can retrieve the profile of the current customer
'''
#Without credentials => failure
resp = self._open_with_auth(
'profile',
'GET',
'',
''
)
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('error'))
#With credentials => success
resp = self._open_with_dummy_auth(
'profile',
'GET'
)
json_data = json.loads(resp.get_data())
customer = json_data.get('customer')
customer_data = customer.get('data')
self.assertIsNone(json_data.get('error'))
self.assertIsNotNone(customer)
self.assertIsNotNone(customer.get('email'))
self.assertIsNotNone(customer.get('id'))
self.assertIsNotNone(customer.get('data'))
self.assertIsNotNone(customer_data.get('cellphone'))
self.assertIsNotNone(customer_data.get('first_name'))
self.assertIsNotNone(customer_data.get('last_name'))
self.assertIsNotNone(customer_data.get('newsletter'))
self.assertIsNotNone(customer_data.get('created'))
def test_put_profile(self):
'''
Test if we can update the profile of the current customer
'''
#With credentials => success
data = dict(cellphone=123456789)
resp = self._open_with_dummy_auth(
'profile',
'PUT',
data=json.dumps(data)
)
json_data = json.loads(resp.get_data())
customer = json_data.get('customer')
customer_data = customer.get('data')
self.assertIsNone(json_data.get('error'))
self.assertIsNotNone(customer)
self.assertIsNotNone(customer.get('email'))
self.assertIsNotNone(customer.get('id'))
self.assertIsNotNone(customer.get('data'))
self.assertEqual(customer_data.get('cellphone'), '123456789')
self.assertIsNotNone(customer_data.get('first_name'))
self.assertIsNotNone(customer_data.get('last_name'))
self.assertIsNotNone(customer_data.get('newsletter'))
self.assertIsNotNone(customer_data.get('created'))
def test_password(self):
'''
Test if we can change the password of the current customer
'''
#No data => failure
resp = self._open_with_dummy_auth(
'password',
'PATCH'
)
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('errors'))
#Old password is not correct => failure
data = dict(old_password='test2', password='test', confirm='test')
resp = self._open_with_dummy_auth(
'password',
'PATCH',
data=json.dumps(data)
)
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('errors'))
#new password and confirm do not match => failure
data = dict(old_password='test', password='test2', confirm='test')
resp = self._open_with_dummy_auth(
'password',
'PATCH',
data=json.dumps(data)
)
json_data = json.loads(resp.get_data())
self.assertIsNotNone(json_data.get('errors'))
#old_password password match and new+confirm match => success
data = dict(old_password='test', password='test2', confirm='test2')
resp = self._open_with_dummy_auth(
'password',
'PATCH',
data=json.dumps(data)
)
json_data = json.loads(resp.get_data())
self.assertIsNone(json_data.get('errors'))
if __name__ == '__main__':
unittest.main()
|
slyphon/pants | tests/python/pants_test/backend/jvm/tasks/test_jvm_dependency_usage.py | Python | apache-2.0 | 4,527 | 0.003755 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from collections import defaultdict
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.tasks.classpath_products import ClasspathProducts
from pants.backend.jvm.tasks.jvm_dependency_usage import JvmDependencyUsage
from pants.goal.products import MultipleRootedProducts
from pants.util.dirutil import safe_mkdir, touch
from pants_test.tasks.task_test_base import TaskTestBase
class TestJvmDependencyUsage(TaskTestBase):
@classmethod
def task_type(cls):
return JvmDependencyUsage
def _setup(self, target_classfiles):
"""Takes a dict mapping targets to lists of classfiles."""
context = self.context(target_roots=target_classfiles.keys())
# Create classfiles in a target-specific directory, and add it to the classpath for the target.
classpath_products = context.products.get_data('runtime_classpath', ClasspathProducts)
for target, classfiles in target_classfiles.items():
target_dir = os.path.join(self.test_workdir, target.id)
safe_mkdir(target_dir)
for classfile in classfiles:
touch(os.path.join(target_dir, classfile))
classpath_products.add_for_target(target, [('default', target_dir)])
product_deps_by_src = context.products.get_data('product_deps_by_src', dict)
return self.create_task(context), product_deps_by_src
def make_java_target(self, *args, **kwargs):
assert 'target_type' not in kwargs
return self.make_target(target_type=JavaLibrary, *args, **kwargs)
def _cover_output(self, graph):
# coverage of the output code
self.assertNotEqual(graph.to_json(), "")
self.assertNotEqual(graph.to_summary(), "")
def test_simple_dep_usage_graph(self):
t1 = self.make_java_target(spec=':t1', sources=['a.java', 'b.java'])
t2 = self.make_java_target(spec=':t2', sources=['c.java'], dependencies=[t1])
t3 = self.make_java_target(spec=':t3', sources=['d.java', 'e.java'], dependencies=[t1])
self.set_options(size_estimator='filecount')
dep_usage, product_deps_by_src = self._setup({
t1: ['a.class', 'b.class'],
t2: ['c.class'],
t3: ['d.class', 'e.class'],
})
product_deps_by_src[t1] = {}
product_deps_by_src[t2] = {'c.java': ['a.class']}
product_deps_by_src[t3] = {'d.java': ['a.class', 'b.class'],
'e.java': ['a.class', 'b.class']}
graph = dep_usage.create_dep_usage_graph([t1, t2, t3], '')
self.assertEqual(graph._nodes[t1].products_total, 2)
self.assertEqual(graph._nodes[t2].products_total, 1)
self.assertEqual(graph._nodes[t3].products_total, 2)
self.assertEqual(graph._nodes[t1].dep_edges, {})
self.assertEqual(len(graph._nodes[t2].dep_edges[t1].products_used), 1)
self.assertEqual(len(graph._nodes[t3].dep_edges[t1].products_used), 2)
self.assertEqual(graph._trans_cost(t1), 2)
self.assertEqual(graph._trans_c | ost(t2), 3)
self.assertEqual(graph._trans_cost(t3), 4)
self._cover_output(graph)
def test_dep_usage_graph_with_synthetic_targets(self):
t1 = self.make_java_target(spec=':t1', sources=['t1.thrift'])
t1_x = self.make_java_target(spec=':t1.x', derived_from=t1)
t1_y = self.make_java_target(spec=':t1.y', derived_from=t1)
t1_z = self.mak | e_java_target(spec=':t1.z', derived_from=t1)
t2 = self.make_java_target(spec=':t2',
sources=['a.java', 'b.java'],
dependencies=[t1, t1_x, t1_y, t1_z])
self.set_options(size_estimator='nosize')
dep_usage, product_deps_by_src = self._setup({
t1_x: ['x1.class'],
t1_y: ['y1.class'],
t1_z: ['z1.class', 'z2.class', 'z3.class'],
t2: ['a.class', 'b.class'],
})
product_deps_by_src[t1] = {}
product_deps_by_src[t1_x] = {}
product_deps_by_src[t1_y] = {}
product_deps_by_src[t1_z] = {}
product_deps_by_src[t2] = {'a.java': ['x1.class'],
'b.java': ['z1.class', 'z2.class']}
graph = dep_usage.create_dep_usage_graph([t1, t1_x, t1_y, t1_z, t2], '')
self.assertEqual(graph._nodes[t1].products_total, 5)
self.assertEqual(len(graph._nodes[t2].dep_edges[t1].products_used), 3)
self._cover_output(graph)
|
NovaXeros/OpelSqueeze | lib/smartshuffle.py | Python | gpl-2.0 | 859 | 0.044237 | #!/usr/bin/env python
import os.path
import time
import transmission as send
import shelving as db
def first_check():
if os.path.isfile('check.db') == False:
db.prime()
else:
if send.passthru(['mode','?']) == 'play':
db.update_shelf()
else:
perform_start_tasks()
def perform_start_tasks():
last_known_check = db.query('time')
last_known_song = db.query('son | g')
last_known_prog = db.query('prog')
ti | me_without = time.time() - int(last_known_check)
if last_known_song == 'PRIMED':
send.passthru(['randomplay','tracks'])
else:
if (time_without < 600):
send.passthru(['playlist','add',last_known_song])
send.passthru(['play','5'])
send.passthru(['time',last_known_prog])
send.rp_add()
db.update_shelf()
else:
send.passthru(['rescan'])
time.sleep(2)
send.passthru(['randomplay','tracks'])
db.update_shelf() |
OlgaKuratkina/python_training_qa | conftest.py | Python | apache-2.0 | 2,798 | 0.004289 | import pytest
import json
import os.path
import importlib
import jsonpickle
from fixture.application import Application
from fixture.db import DBfixture
from fixture.orm import ORMfixture
fixture = None
target = None
def load_config(file):
global target
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), file)
with open(config_file) as f:
target = json.load(f)
return target
@pytest.fixture
def app(request):
global fixture
browser = request.config.getoption("--browser")
web_config = load_config(request.config.getoption("--target"))["web"]
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=web_config['baseUrl'])
fixture.session.ensure_login(username=web_config['username'], password=web_config['password'])
return fixture
@pytest.fixture(scope="session")
def db(request):
db_config = load_config(request.config.getoption("--target"))["db"]
dbfixture = DBfixture(host=db_config["host"], name=db_config["name"], user=db_config["user"],
password=db_config["password"])
def fin():
dbfixture.destroy()
request.addfinalizer(fin)
return dbfixture
@pytest.fixture(scope="session")
def orm(request):
db_config = load_config(request.config.getoption("--target"))["db"]
orm_fixture = ORMfixture(host=db_config["host"], name=db_config["name"], user=db_config | ["user"],
password=db_config["password"])
return orm_fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
@pytest.fixture()
def check | _ui(request):
return request.config.getoption("--check_ui")
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="firefox")
parser.addoption("--target", action="store", default="target.json")
parser.addoption("--check_ui", action="store_true")
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("data_"):
test_data = load_from_module(fixture[5:])
metafunc.parametrize(fixture, test_data, ids=[str(x) for x in test_data])
elif fixture.startswith("json_"):
test_data = load_from_json(fixture[5:])
metafunc.parametrize(fixture, test_data, ids=[str(x) for x in test_data])
def load_from_module(module):
return importlib.import_module("data.%s" % module).test_data
def load_from_json(file):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f:
return jsonpickle.decode(f.read())
|
PyRsw/PyRsw | src/Fluxes/SADOURNY_SW.py | Python | mit | 4,319 | 0.015513 | import Differentiation as Diff
import numpy as np
import sys
# Geometry: periodic in x and y
# Arakawa C-grid
#
# | | | |
# h -- u -- h -- u -- h -- u -- h --
# | | | |
# | | | |
# v q v q v q v
# | | | |
# | | | |
# h -- u -- h -- u -- h -- u -- h --
# | | | |
# | | | |
# v q v q v q v
# | | | |
# | | | |
# h -- u -- h -- u -- h -- u -- h --
# | | | |
# | | | |
# v q v q v q v
# | | | |
# | | | |
# h -- u -- h -- u -- h -- u -- h --
#
# If Periodic X Periodic: u,v,h = Nx by Ny
# If Periodic X Walls: u,h: Nx by (Ny+1) and
# v: Nx by Ny
# If Walls X Periodic: v,h: (Nx+1) by Ny and
# u: Nx by Ny
#
# N,S rows:
# -> must advance u,h
# -> Must extend v to compute V_y
# -> If v = 0 then maybe (q*V^x) = 0 too?
# W,E columns:
# -> must advance v,h
# -> Must extend u to compute U_x
# -> If u = 0 then (q*U^y) = 0 too?
# ghost cells:
# u-eqn | : need q*V^x
# v-eqn: need q*U_y
# h-eqn: need U left | and V down
def sadourny_sw_flux(sim):
Nx, Ny, Nz = sim.Nx, sim.Ny, sim.Nz
dx, dy = sim.dx[0], sim.dx[1]
# Loop through each layer and compute the flux
for ii in range(Nz):
# Assign nice names to primary variables
h = sim.soln.h[:,:,ii]
u = sim.soln.u[:,:,ii]
v = sim.soln.v[:,:,ii]
# Compute secondary varibles
U = sim.avx_h(h)*u
V = sim.avy_h(h)*v
B = sim.gs[ii]*h + 0.5*(sim.avx_u(u**2) + sim.avy_v(v**2))
q = (sim.ddx_v(v,dx) - sim.ddy_u(u,dy) + sim.F)/(sim.avy_u(sim.avx_h(h)))
# Flux
#sim.curr_flux.u[:,:,ii] = sim.avy_v(q*sim.avx_v(V)) - sim.ddx_h(B,dx)
#sim.curr_flux.v[:,:,ii] = - sim.avx_u(q*sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.u[:,:,ii] = sim.avy_v(q)*sim.avy_v(sim.avx_v(V)) - sim.ddx_h(B,dx)
sim.curr_flux.v[:,:,ii] = - sim.avx_u(q)*sim.avx_u(sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.h[:,:,ii] = - sim.ddx_u(U,dx) - sim.ddy_v(V,dy)
return
def sadourny_sw_linear_flux(sim):
Nx, Ny, Nz = sim.Nx, sim.Ny, sim.Nz
dx, dy = sim.dx[0], sim.dx[1]
#ddx, ddy = sim.ddx, sim.ddy
#avx, avy = sim.avx, sim.avy
Hs = sim.Hs[0]
# Loop through each layer and compute the flux
for ii in range(sim.Nz):
# Assign nice names to primary variables
h = sim.soln.h[:,:,ii]
u = sim.soln.u[:,:,ii]
v = sim.soln.v[:,:,ii]
# Compute secondary varibles
U = Hs*u
V = Hs*v
q = sim.F/Hs
B = sim.gs[ii]*h
# Flux
#sim.curr_flux.u[:,:,ii] = sim.avy_v(q*sim.avx_v(V)) - sim.ddx_h(B,dx)
#sim.curr_flux.v[:,:,ii] = - sim.avx_u(q*sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.u[:,:,ii] = sim.avy_v(q)*sim.avy_v(sim.avx_v(V)) - sim.ddx_h(B,dx)
sim.curr_flux.v[:,:,ii] = - sim.avx_u(q)*sim.avx_u(sim.avy_u(U)) - sim.ddy_h(B,dy)
sim.curr_flux.h[:,:,ii] = - sim.ddx_u(U,dx) - sim.ddy_v(V,dy)
return
def sadourny_sw(sim):
# FJP: work on BCs
if sim.Nx == 1:
sim.Nkx = 1
else:
if sim.geomx == 'periodic':
sim.Nkx = sim.Nx
elif sim.geomx == 'walls':
sim.Nkx = 2*sim.Nx
if sim.Ny == 1:
sim.Nky = 1
else:
if sim.geomy == 'periodic':
sim.Nky = sim.Ny
elif sim.geomy == 'walls':
sim.Nky = 2*sim.Ny
sim.x_derivs = Diff.SADOURNY_x
sim.y_derivs = Diff.SADOURNY_y
if sim.dynamics == 'Nonlinear':
sim.flux_function = sadourny_sw_flux
elif sim.dynamics == 'Linear':
sim.flux_function = sadourny_sw_linear_flux
else:
print "dynamics must be from the list: Nonlinear, Linear"
sys.exit()
|
madvas/gae-angular-material-starter | main/api/v1/feedback_api.py | Python | mit | 1,108 | 0.00361 | # coding: utf-8
# pylint: disable=too-few-public-methods, no-self-use, missing-docstring, unused-argument
from flask_restful import reqparse, Resource
from flask import abort
from main import API
import task
import config
from api.helpers import ArgumentValidator, make_empty_ok_response
from api.decorators import verify_captcha
from model import UserValidator
@API.resource('/api/v1/feedback')
class FeedbackAPI(Resource):
@verify_captcha('feedbackForm')
def post(self):
"""Sends feedback email to admin"""
if not config.CONFIG_DB.feedback_email:
return abort(418)
parser = reqparse.RequestParser()
parser.add_argument('message', type=ArgumentValidator.create('feedback'), required=True)
parser.add_argument('email', type=UserValidator.create('email', required=False))
args = parser.parse_args()
body = '%s\n\n%s' % (args.message, args.email)
kwargs = {'reply_to': arg | s.email} i | f args.email else {}
task.send_mail_notification('%s...' % body[:48].strip(), body, **kwargs)
return make_empty_ok_response()
|
leonth/private-configs | sublime-text-3/Packages/Jedi - Python autocompletion/jedi/parser/__init__.py | Python | mit | 28,035 | 0.000214 | """
The ``Parser`` tries to convert the available Python code in an easy to read
format, something like an abstract syntax tree. The classes who represent this
tree, are sitting in the :mod:`parsing_representation` module.
The Python module ``tokenize`` is a very important part in the ``Parser``,
because it splits the code into different words (tokens). Sometimes it looks a
bit messy. Sorry for that! You might ask now: "Why didn't you use the ``ast``
module for this? Well, ``ast`` does a very good job understanding proper Python
code, but fails to work as soon as there's a single line of broken code.
There's one important optimization that needs to be known: Statements are not
being parsed completely. ``Statement`` is just a representation of the tokens
within the statement. This lowers memory usage and cpu time and reduces the
complexity of the ``Parser`` (there's another parser sitting inside
``Statement``, which produces ``Array`` and ``Call``).
"""
from __future__ import with_statement
import tokenizer as tokenize
import keyword
from jedi._compatibility import next, StringIO
from jedi import debug
from jedi import common
from jedi.parser import representation as pr
class Parser(object):
"""
This class is used to parse a Python file, it then divides them into a
class structure of different scopes.
:param source: The codebase for the parser.
:type source: str
:param module_path: The path of the module in the file system, may be None.
:type module_path: str
:param user_position: The line/column, the user is currently on.
:type user_position: tuple(int, int)
:param no_docstr: If True, a string at the beginning is not a docstr.
:param is_fast_parser: -> for fast_parser
:param top_module: Use this module as a parent instead of `self.module`.
"""
def __init__(self, source, module_path=None, user_position=None,
no_docstr=False, offset=(0, 0), is_fast_parser=None,
top_module=None):
self.user_position = user_position
self.user_scope = None
self.user_stmt = None
self.no_docstr = no_docstr
self.start_pos = self.end_pos = 1 + offset[0], offset[1]
# initialize global Scope
self.module = pr.SubModule(module_path, self.start_pos, top_module)
self._scope = self.module
self._current = (None, None)
source = source + '\n' # end with \n, because the parser needs it
buf = StringIO(source)
self._gen = common.NoErrorTokenizer(buf.readline, offset,
is_fast_parser)
self.top_module = top_module or self.module
try:
self._parse()
except (common.MultiLevelStopIteration, StopIteration):
# StopIteration needs to be added as well, because python 2 has a
# strange way of handling StopIterations.
# sometimes StopIteration isn't catched. Just ignore it.
pass
# clean up unused decorators
for d in self._decorators:
# set a parent for unused decorators, avoid NullPointerException
# because of `self.module.used_names`.
d.parent = self.module
if self._current[0] in (tokenize.NL, tokenize.NEWLINE):
# we added a newline before, so we need to "remove" it again.
self.end_pos = self._gen.previous[2]
elif self._current[0] == tokenize.INDENT:
self.end_pos = self._gen.last_previous[2]
self.start_pos = self.module.start_pos
self.module.end_pos = self.end_pos
del self._gen
def __repr__(self):
return "<%s: %s>" % (type(self).__name__, self.module)
def _check_user_stmt(self, simple):
# this is not user checking, just update the used_names
for tok_name in self.module.temp_used_names:
try:
self.module.used_names[tok_name].add(simple)
except KeyError:
self.module.used_names[tok_name] = set([simple])
self.module.temp_used_names = []
if not self.user_position:
return
# the position is right
if simple.start_pos <= self.user_position <= simple.end_pos:
if self.user_stmt is not None:
# if there is already a user position (another import, because
# imports are splitted) the names are checked.
for n in simple.get_set_vars():
if n.start_pos < self.user_position <= n.end_pos:
self.user_stmt = simple
else:
self.user_stmt = simple
def _parse_dot_name(self, pre_used_token=None):
"""
The dot name parser parses a name, variable or function and returns
their names.
:return: Tuple of Name, token_type, nexttoken.
| :rtype: tuple(Name, int, str)
"""
def append(el):
names.append(el)
self.module.temp_used_names.append(el[0])
names = []
if pre_used_token is None:
token_type, tok = self.next()
if t | oken_type != tokenize.NAME and tok != '*':
return [], token_type, tok
else:
token_type, tok = pre_used_token
if token_type != tokenize.NAME and tok != '*':
# token maybe a name or star
return None, token_type, tok
append((tok, self.start_pos))
first_pos = self.start_pos
while True:
end_pos = self.end_pos
token_type, tok = self.next()
if tok != '.':
break
token_type, tok = self.next()
if token_type != tokenize.NAME:
break
append((tok, self.start_pos))
n = pr.Name(self.module, names, first_pos, end_pos) if names else None
return n, token_type, tok
def _parse_import_list(self):
"""
The parser for the imports. Unlike the class and function parse
function, this returns no Import class, but rather an import list,
which is then added later on.
The reason, why this is not done in the same class lies in the nature
of imports. There are two ways to write them:
- from ... import ...
- import ...
To distinguish, this has to be processed after the parser.
:return: List of imports.
:rtype: list
"""
imports = []
brackets = False
continue_kw = [",", ";", "\n", ')'] \
+ list(set(keyword.kwlist) - set(['as']))
while True:
defunct = False
token_type, tok = self.next()
if tok == '(': # python allows only one `(` in the statement.
brackets = True
token_type, tok = self.next()
if brackets and tok == '\n':
self.next()
i, token_type, tok = self._parse_dot_name(self._current)
if not i:
defunct = True
name2 = None
if tok == 'as':
name2, token_type, tok = self._parse_dot_name()
imports.append((i, name2, defunct))
while tok not in continue_kw:
token_type, tok = self.next()
if not (tok == "," or brackets and tok == '\n'):
break
return imports
def _parse_parentheses(self):
"""
Functions and Classes have params (which means for classes
super-classes). They are parsed here and returned as Statements.
:return: List of Statements
:rtype: list
"""
names = []
tok = None
pos = 0
breaks = [',', ':']
while tok not in [')', ':']:
param, tok = self._parse_statement(added_breaks=breaks,
stmt_class=pr.Param)
if param and tok == ':':
# parse annotations
annotation, tok = self._parse_statement(added_breaks=breaks)
if annotation:
param.add_annotation(annotation)
# params w |
anselmobd/fo2 | src/contabil/forms.py | Python | mit | 4,534 | 0 | from pprint import pprint
from django import forms
from base.forms.custom import O2BaseForm
from base.forms.fields import (
O2FieldCorForm,
O2FieldModeloForm,
O2FieldRefForm,
)
class InfAdProdForm(forms.Form):
pedido = forms.CharField(
label='Pedido',
widget=forms.TextInput(attrs={'type': 'number',
'autofocus': 'autofocus'}))
class RemessaIndustrBaseForm(forms.Form):
data_de = forms. | DateField(
label='NF Remessa - Data inicial', required=False,
widget=forms.DateInput(attrs={'type': 'date'}))
da | ta_ate = forms.DateField(
label='NF Remessa - Data final', required=False,
widget=forms.DateInput(attrs={'type': 'date'}))
faccao = forms.CharField(
label='Facção', required=False,
help_text='Busca no nome e no CNPJ da facção',
widget=forms.TextInput(attrs={'type': 'string'}))
cliente = forms.CharField(
label='Cliente', required=False,
help_text='Busca no nome e no CNPJ do cliente',
widget=forms.TextInput(attrs={'type': 'string'}))
pedido = forms.CharField(
label='Pedido Tussor', required=False,
widget=forms.TextInput(attrs={'type': 'number'}))
pedido_cliente = forms.CharField(
label='Pedido de cliente', required=False,
widget=forms.TextInput(attrs={'type': 'string'}))
op = forms.CharField(
label='OP', required=False,
widget=forms.TextInput(attrs={'type': 'number'}))
CHOICES = [('T', 'Todas as remessas'),
('S', 'Só remessas Sem retorno'),
('C', 'Só remessas Com retorno'),
]
retorno = forms.ChoiceField(
label='Retorno', choices=CHOICES, initial='T')
data_ret_de = forms.DateField(
label='NF Retorno - Data inicial', required=False,
widget=forms.DateInput(attrs={'type': 'date'}))
data_ret_ate = forms.DateField(
label='NF Retorno - Data final', required=False,
widget=forms.DateInput(attrs={'type': 'date'}))
nf_ret = forms.CharField(
label='NF Retorno', required=False,
widget=forms.TextInput(attrs={'type': 'number'}))
nf = forms.CharField(
label='NF Remessa', required=False,
widget=forms.TextInput(attrs={'type': 'number'}))
def clean_faccao(self):
faccao = self.cleaned_data['faccao'].upper()
data = self.data.copy()
data['faccao'] = faccao
self.data = data
return faccao
def clean_cliente(self):
cliente = self.cleaned_data['cliente'].upper()
data = self.data.copy()
data['cliente'] = cliente
self.data = data
return cliente
class RemessaIndustrNFForm(RemessaIndustrBaseForm):
CHOICES = [('T', 'Todas as remessas'),
('A', 'Ativa'),
('C', 'Canceladas'),
('D', 'Devolvidas'),
]
situacao = forms.ChoiceField(
label='Situação', choices=CHOICES, initial='A')
CHOICES = [('I', 'Por item de NF de remessa'),
('1', 'Por item de nível 1 de NF de remessa'),
('R', 'Por referência de nível 1 de NF de remessa'),
('N', 'Por NF de remessa'),
]
detalhe = forms.ChoiceField(
label='Detalhamento', choices=CHOICES, initial='N')
class RemessaIndustrForm(RemessaIndustrBaseForm):
CHOICES = [('C', 'Apenas por cor'),
('T', 'Por cor e tamanho'),
]
detalhe = forms.ChoiceField(
label='Detalhe', choices=CHOICES, initial='C')
class NotaFiscalForm(forms.Form):
nf = forms.CharField(
label='Nota fiscal',
widget=forms.TextInput(attrs={'type': 'number',
'autofocus': 'autofocus'}))
class buscaNFForm(
O2BaseForm,
O2FieldCorForm,
O2FieldModeloForm,
O2FieldRefForm,
):
pagina = forms.IntegerField(
required=False, widget=forms.HiddenInput())
class Meta:
autofocus_field = 'ref'
order_fields = [
'ref',
'modelo',
'cor',
]
def clean(self):
filtros = (
self.cleaned_data['ref'] +
self.cleaned_data['cor']
)
if len(filtros.strip()) == 0 and self.cleaned_data['modelo'] is None:
raise forms.ValidationError(
"Algum filtro deve ser definido.")
class UploadArquivoForm(forms.Form):
arquivo = forms.FileField()
|
openprocurement/openprocurement.tender.competitivedialogue | setup.py | Python | apache-2.0 | 1,767 | 0.001132 | from setuptools import setup, find_packages
import os
version = '2.4.3'
requires = [
'setupto | ols'
]
api_requires = requires + [
'openprocurement.api>=2.4',
'openprocurement.tender.openua>=2.4.2',
'openprocurement.tender.openeu>=2.4.2',
'openp | rocurement.tender.core>=2.4'
]
test_requires = api_requires + requires + [
'webtest',
'python-coveralls',
]
docs_requires = requires + [
'sphinxcontrib-httpdomain',
]
entry_points = {
'openprocurement.tender.core.plugins': [
'competitivedialogue = openprocurement.tender.competitivedialogue.includeme:includeme'
]
}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
setup(name='openprocurement.tender.competitivedialogue',
version=version,
description="",
long_description=README,
# Get more strings from
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
],
keywords="web services",
author='Quintagroup, Ltd.',
author_email='info@quintagroup.com',
url='https://github.com/openprocurement/openprocurement.tender.competitivedialogue',
license='Apache License 2.0',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['openprocurement', 'openprocurement.tender'],
include_package_data=True,
zip_safe=False,
install_requires=requires,
extras_require={'test': test_requires, 'docs': docs_requires,
'api': api_requires},
test_suite="openprocurement.tender.competitivedialogue.tests.main.suite",
entry_points=entry_points)
|
quantumlib/Cirq | cirq-web/cirq_web/circuits/symbols.py | Python | apache-2.0 | 5,383 | 0.000557 | # Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is more of a placeholder for now, we can add
# official color schemes in follow-ups.
import abc
import dataclasses
from typing import Iterable, List, Optional
import cirq
from cirq.protocols.circuit_diagram_info_protocol import CircuitDiagramInfoArgs
@dataclasses.dataclass
class SymbolInfo:
"""Organizes information about a symbol."""
labels: List[str]
colors: List[str]
@staticmethod
def unknown_operation(num_qubits: int) -> 'SymbolInfo':
"""Generates a SymbolInfo object for an unknown operation.
Args:
num_qubits: the number of qubits in the operation
"""
symbol_info = SymbolInfo([], [])
for _ in range(num_qubits):
symbol_info.colors.append('gray')
symbol_info.labels.append('?')
return symbol_info
class SymbolResolver(metaclass=abc.ABCMeta):
"""Abstract class providing the interface for users to specify information
about how a particular symbol should be displayed in the 3D circuit
"""
def __call__(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
return self.resolve(operation)
@abc.abstractmethod
def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
"""Converts cirq.Operation objects into SymbolInfo objects for serialization."""
class DefaultResolver(SymbolResolver):
"""Default symbol resolver implementation. Takes information
from circuit_diagram_info, if unavailable, returns information representing
an unknown symbol.
"""
_SYMBOL_COLORS = {
'@': 'black',
'H': 'yellow',
'I': 'orange',
'X': 'black',
'Y': 'pink',
'Z': 'cyan',
'S': '#90EE90',
'T': '#CBC3E3',
}
def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
"""Checks for the _circuit_diagram_info attribute of the operation,
and if it exists, build the symbol information from it. Otherwise,
builds symbol info for an unknown operation.
Args:
operation: the cirq.Operation object to resolve
"""
try:
info = cirq.circuit_diagram_info(operation)
except TypeError:
return SymbolInfo.unknown_operation(cirq.num_qubits(operation))
wire_symbols = info.wire_symbols
symbol_expo | nent = info._wire_symbols_including_formatted_exponent(
CircuitDiagramInfoArgs.UNINFORMED_DEFAULT
)
symbol_info = SymbolInfo(list(symbol_exponent), [])
for symbol in wire_symbols:
symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray'))
return symbol_info
DEFAULT_SYMBOL_RESOLVERS: Iterable[SymbolResolver] = tuple([DefaultResolver()])
def resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver] | ) -> SymbolInfo:
"""Builds a SymbolInfo object based off of a designated operation
and list of resolvers. The latest resolver takes precendent.
Args:
operation: the cirq.Operation object to resolve
resolvers: a list of SymbolResolvers which provides instructions
on how to build SymbolInfo objects.
Raises:
ValueError: if the operation cannot be resolved into a symbol.
"""
symbol_info = None
for resolver in resolvers:
info = resolver(operation)
if info is not None:
symbol_info = info
if symbol_info is None:
raise ValueError(f'Cannot resolve operation: {operation}')
return symbol_info
class Operation3DSymbol:
def __init__(self, wire_symbols, location_info, color_info, moment):
"""Gathers symbol information from an operation and builds an
object to represent it in 3D.
Args:
wire_symbols: a list of symbols taken from circuit_diagram_info()
that will be used to represent the operation in the 3D circuit.
location_info: A list of coordinates for each wire_symbol. The
index of the coordinate tuple in the location_info list must
correspond with the index of the symbol in the wire_symbols list.
color_info: a list representing the desired color of the symbol(s).
These will also correspond to index of the symbol in the
wire_symbols list.
moment: the moment where the symbol should be.
"""
self.wire_symbols = wire_symbols
self.location_info = location_info
self.color_info = color_info
self.moment = moment
def to_typescript(self):
return {
'wire_symbols': list(self.wire_symbols),
'location_info': self.location_info,
'color_info': self.color_info,
'moment': self.moment,
}
|
comicxmz001/LeetCode | Python/28 Implement strStr.py | Python | mit | 792 | 0.059343 | class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needle: str
:rtype: int
"""
if not needle:
return 0
if len(haystack) < len(needle):
return -1
for i in xrange(len(haystack)):
if | i + len(needle) > len(haystack):
return -1
if haystack[i] != needle[0] or haystack[i+len(needle)-1] != needle[-1]:
continue
else:
j=0
while j < len(needle) and i+j < len(haystack):
if haystack[i+j] != needle[j]:
break
j += | 1
if j == len(needle):
return i
return -1
if __name__ == '__main__':
s1 = ""
s2 = ""
s3 = ""
print Solution().strStr(s1,s2)
|
thaim/ansible | lib/ansible/modules/cloud/cloudstack/cs_vpn_gateway.py | Python | mit | 5,470 | 0.000732 | #!/usr/bin/pytho | n
# -*- coding: utf-8 -*-
#
# (c) 2017, René Moser <mail@renemoser.net>
# GNU General Public License v3.0+ (see COPY | ING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_vpn_gateway
short_description: Manages site-to-site VPN gateways on Apache CloudStack based clouds.
description:
- Creates and removes VPN site-to-site gateways.
version_added: '2.4'
author: René Moser (@resmo)
options:
vpc:
description:
- Name of the VPC.
type: str
required: true
state:
description:
- State of the VPN gateway.
type: str
default: present
choices: [ present, absent ]
domain:
description:
- Domain the VPN gateway is related to.
type: str
account:
description:
- Account the VPN gateway is related to.
type: str
project:
description:
- Name of the project the VPN gateway is related to.
type: str
zone:
description:
- Name of the zone the VPC is related to.
- If not set, default zone is used.
type: str
poll_async:
description:
- Poll async jobs until job has finished.
type: bool
default: yes
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Ensure a vpn gateway is present
cs_vpn_gateway:
vpc: my VPC
delegate_to: localhost
- name: Ensure a vpn gateway is absent
cs_vpn_gateway:
vpc: my VPC
state: absent
delegate_to: localhost
'''
RETURN = '''
---
id:
description: UUID of the VPN site-to-site gateway.
returned: success
type: str
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
public_ip:
description: IP address of the VPN site-to-site gateway.
returned: success
type: str
sample: 10.100.212.10
vpc:
description: Name of the VPC.
returned: success
type: str
sample: My VPC
domain:
description: Domain the VPN site-to-site gateway is related to.
returned: success
type: str
sample: example domain
account:
description: Account the VPN site-to-site gateway is related to.
returned: success
type: str
sample: example account
project:
description: Name of project the VPN site-to-site gateway is related to.
returned: success
type: str
sample: Production
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackVpnGateway(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackVpnGateway, self).__init__(module)
self.returns = {
'publicip': 'public_ip'
}
def get_vpn_gateway(self):
args = {
'vpcid': self.get_vpc(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id')
}
vpn_gateways = self.query_api('listVpnGateways', **args)
if vpn_gateways:
return vpn_gateways['vpngateway'][0]
return None
def present_vpn_gateway(self):
vpn_gateway = self.get_vpn_gateway()
if not vpn_gateway:
self.result['changed'] = True
args = {
'vpcid': self.get_vpc(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id')
}
if not self.module.check_mode:
res = self.query_api('createVpnGateway', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
vpn_gateway = self.poll_job(res, 'vpngateway')
return vpn_gateway
def absent_vpn_gateway(self):
vpn_gateway = self.get_vpn_gateway()
if vpn_gateway:
self.result['changed'] = True
args = {
'id': vpn_gateway['id']
}
if not self.module.check_mode:
res = self.query_api('deleteVpnGateway', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'vpngateway')
return vpn_gateway
def get_result(self, vpn_gateway):
super(AnsibleCloudStackVpnGateway, self).get_result(vpn_gateway)
if vpn_gateway:
self.result['vpc'] = self.get_vpc(key='name')
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
vpc=dict(required=True),
state=dict(choices=['present', 'absent'], default='present'),
domain=dict(),
account=dict(),
project=dict(),
zone=dict(),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_vpn_gw = AnsibleCloudStackVpnGateway(module)
state = module.params.get('state')
if state == "absent":
vpn_gateway = acs_vpn_gw.absent_vpn_gateway()
else:
vpn_gateway = acs_vpn_gw.present_vpn_gateway()
result = acs_vpn_gw.get_result(vpn_gateway)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
ewandor/git-french-law | LegifranceClient/__init__.py | Python | gpl-2.0 | 2,270 | 0.002203 | from datetime import datetime
from mechanize import Browser
from FrenchLawModel import Text, Article, Version, Law
from page import ConstitutionPage, ArticlePage
class LegifranceClient(object):
host = 'http://www.legifrance.gouv.fr/'
def __init__(self):
self.user_agent = 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; .NET CLR 1.1.4322)'
self.__init_browser()
self.create_initial_law()
def __init_browser(self):
self.browser = Browser()
self.browser.set_handle_robots(False)
self.browser.addheaders = [('User-agent', self.user_agent)]
def get_page(self, page):
self.browser.open(self.host + page.get_adress())
page.set_content(self.browser.response().read())
return page
def create_initial_law(self):
self.initial_law = Law()
self.initial_law.title = "La Constitution du 4 octobre 1958"
self.initial_law.number = "-1"
self.initial_law.date = datetime(1958, 10, 4)
def get_constitution(self):
constitution = Text()
page = self.get_page(ConstitutionPage())
article_list = page.get_article_list()
for article_id in article_list:
article = Article()
page = self.get_page(ArticlePage(ConstitutionPage, article_id))
article_version_list = page.get_article_version_list()
for version_id in article_version_list:
page = self.get_page(ArticlePage(ConstitutionPage, article_id, version_id))
version = Version()
page.set_article_version(version)
if not page.abrogating_law_page is None:
law_page = self.get_page(page.abrogating_law_page)
law = law_page.set_law(Law())
version.set_abrogating_law(law)
if not page.modifying_law_page is None:
law_page = self.get_page(page.modi | fying_law_page)
law = law_page.set_law(Law())
version.set_modifying_law(law)
else:
version.set_modifying_law(self.initial_law)
article.add_version(version)
constitution.add_article(article)
return co | nstitution |
95subodh/Leetcode | 129. Sum Root to Leaf Numbers.py | Python | mit | 960 | 0.035417 | #Given a binary tree containing digits from 0-9 only, each root-to-leaf path could represent a number.
#
#An example is the root-to-leaf path 1->2->3 which represents the number 123.
#
#Find the total sum of all root-to-leaf numbers.
#
#For example,
#
# 1
# / \
# 2 3
#The root-to-leaf path 1->2 represents the number 12.
#The root-to-leaf path 1->3 represents the number 13.
#
#Return the sum = 12 + 13 = 25.
# Definition for a binary tree node.
# class TreeNode(object):
# de | f __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sumNumbers(self, root):
"""
:type root: TreeNode
:rtype: int
"""
stk=[]
ans=0
if root:
stk=[[root,root.val]]
while stk:
z=stk.pop()
if z[0].left:
stk.append([z[0].left,z[1]*10+z[0].left.val])
if z[0].right:
stk.append([z[0].right,z[1]*10+z[0].right.val])
elif not z[0].left:
ans+=z[1]
ret | urn ans
|
ErnieAllen/qpid-dispatch | tests/friendship_server.py | Python | apache-2.0 | 3,531 | 0.00085 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOT | ICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance |
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from concurrent import futures
import grpc
from friendship_pb2 import Person, CreateResult, PersonEmail, CommonFriendsResult, FriendshipResponse
from friendship_pb2_grpc import FriendshipServicer, add_FriendshipServicer_to_server
class FriendShipService(FriendshipServicer):
"""
Implementation of the gRPC FriendshipServicer.
See the friendship.proto definition for more info.
"""
def __init__(self):
self.people = list()
def Create(self, request, context):
person = request # type: Person
res = CreateResult() # type: CreateResult
if person.email in [p.email for p in self.people]:
res.success = False
res.message = "Person already exists (email: %s)" % person.email
return res
self.people.append(person)
res.success = True
return res
def ListFriends(self, request, context):
pe = request # type: PersonEmail
for p in self.people:
if pe.email in p.email:
for friend in p.friends:
yield self.get_person(friend)
def CommonFriendsCount(self, request_iterator, context):
res = CommonFriendsResult()
fs = {p.email for p in self.people}
for pe in request_iterator:
common_friends = [p.email for p in self.people if pe.email in p.friends]
fs.intersection_update(common_friends)
res.friends.extend(fs)
res.count = len(res.friends)
return res
def MakeFriends(self, request_iterator, context):
for fr in request_iterator:
res = FriendshipResponse()
try:
p1 = self.get_person(fr.email1)
p2 = self.get_person(fr.email2)
if None in [res.friend1, res.friend2]:
res.error = "Invalid email provided"
else:
if fr.email2 not in p1.friends:
p1.friends.append(fr.email2)
if fr.email1 not in p2.friends:
p2.friends.append(fr.email1)
res.friend1.CopyFrom(p1)
res.friend2.CopyFrom(p2)
except Exception as e:
res.error = e.__str__()
finally:
yield res
def get_person(self, email):
for p in self.people:
if p.email == email:
return p
return None
def serve(port, options=None):
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1),
options=options)
add_FriendshipServicer_to_server(FriendShipService(), server)
server.add_insecure_port('[::]:%s' % port)
server.start()
return server
|
thebigmunch/gmusicapi-wrapper | tests/test_filter_google_songs.py | Python | mit | 6,138 | 0.021505 | # coding=utf-8
"""Module for testing gmusciapi_wrapper.utils.filter_google_songs utility function."""
from gmusicapi_wrapper.utils import filter_google_songs
from fixtures import TEST_SONGS_1
def test_filter_google_songs_no_filters():
"""Test gmusicapi_wrapper.utils.filter_google_songs with no filters."""
matched, filtered = filter_google_songs(TEST_SONGS_1)
expected_matched = TEST_SONGS_1
expected_filtered = []
assert matched == expected_matched
assert filtered == expected_filtered
class TestIncludeFilters:
"""Test gmusicapi_wrapper.utils.filter_google_songs with input_filters."""
def test_filter_google_songs_single_include_filters_any_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with a single include filter matching with any."""
matched, filtered = filter_google_songs(TEST_SONGS_1, include_filters=[("title", "Take")], all_includes=False)
expected_matched = [TEST_SONGS_1[0]]
expected_filtered = [TEST_SONGS_1[1]]
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_single_include_filters_any_no_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with a single include filter not matching with any."""
matched, filtered = filter_google_songs(TEST_SONGS_1, include_filters=[("artist", "Modest")], all_includes=False)
expected_matched = []
expected_filtered = TEST_SONGS_1
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_include_filters_any_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple include filters matching with any."""
matched, filtered = filter_google_songs(TEST_SONGS_1, include_filters=[("artist", "Muse"), ("title", "Take")], all_includes=False)
expected_matched = TEST_SONGS_1
expected_filtered = []
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_include_filters_any_no_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple include filters not matching with any."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, include_filters=[("artist", "Modest"), ("title", "Everything")], all_includes=False
)
expected_matched = []
expected_filtered = TEST_SONGS_1
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_all_includes_filters_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple include filters matching with all."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, include_filters=[("artist", "Muse"), ("title", "Take")], all_includes=True
)
expected_matched = [TEST_SONGS_1[0]]
expected_filtered = [TEST_SONGS_1[1]]
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_all_includes_filters_no_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple include filters not matching with all."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, include_filters=[("artist", "Modest"), ("title", "Take")], all_includes=True
)
expected_matched = []
expected_filtered = TEST_SONGS_1
assert matched == expected_matched
assert filtered == expected_filtered
class TestExcludeFilters:
"""Test gmusicapi_wrapper.utils.filter_google_songs with exclude_filters."""
def test_filter_google_songs_single_exclude_filters_any_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with a single exclude filter matching with any."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, exclude_filters=[("title", "Take")]
)
expected_matched = [TEST_SONGS_1[1]]
expected_filtered = [TEST_SONGS_1[0]]
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_single_exclude_filters_any_no_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with a single exclude filter not matching with any."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, exclude_filters=[("artist", "Modest")]
)
expected_matched = TEST_SONGS_1
expected_filtered = []
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_exclude_filters_any_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple exclude filters matching with any."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, exclude_filters=[("artist", "Muse"), ("title", "Take")]
)
expected_matched = []
expected_filtered = TEST_SONGS_1
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_exclude_filters_any_no_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple exclude filters not matching with any."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, exclude_filters=[("artist", "Modest"), ("title", "Everything")]
)
expected_matched = TEST_SONGS_1
expected_filtered = []
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_all_excludes_filters_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple exclude filters matching with all."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, exclude_filters=[("artist", "Muse"), ("title", "Take")], all_excludes=True
)
expected_matched = [TEST_SONGS_1[1]]
expected_filtered = [TEST_SONGS_1[0]]
assert matched == expected_matched
assert filtered == expected_filtered
def test_filter_google_songs_multiple_all_excludes_filters_no_match(self):
"""Test gmusicapi_wrapper.utils.filter_google_songs with multiple exclude filters not matching with all."""
matched, filtered = filter_google_songs(
TEST_SONGS_1, exclude_filters=[("artist", "Modest"), ("title", "Take")], all_excludes=True
)
expected_matched | = TEST_SONGS_1
expected_f | iltered = []
assert matched == expected_matched
assert filtered == expected_filtered
|
snahelou/awx | awx/main/south_migrations/0055_v210_changes.py | Python | apache-2.0 | 53,135 | 0.00717 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CustomInventoryScript'
db.create_table(u'main_custominventoryscript', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=None)),
('modified', self.gf('django.db.models.fields.DateTimeField')(default=None)),
('description', self.gf('django.db.models.fields.TextField')(default='', blank=True)),
('created_by', self.gf('django.db.models.fields.related.ForeignKey')(default=None, related_name="{'class': 'custominventoryscript', 'app_label': 'main'}(class)s_created+", null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
('modified_by', self.gf('django.db.models.fields.related.ForeignKey')(default=None, related_name="{'class': 'custominventoryscript', 'app_label': 'main'}(class)s_modified+", null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=False, max_length=512)),
('script', self.gf('django.db.models.fields.TextField')(default='', blank=True)),
('organization', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Organization'])),
))
db.send_create_signal('main', ['CustomInventoryScript'])
db.create_unique(u'main_custominventoryscript', ['name', 'organization_id'])
# Adding field 'InventoryUpdate.source_script'
db.add_column(u'main_inventoryupdate', 'source_script',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['main.CustomInventoryScript'], null=True, on_delete=models.SET_NULL, blank=True),
keep_default=False)
# Adding field 'InventorySource.source_script'
db.add_column(u'main_inventorysource', 'source_script',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['main.CustomInventoryScript'], null=True, on_delete=models.SET_NULL, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'CustomInventoryScript'
db.delete_table(u'main_custominventoryscript')
# Deleting field 'InventoryUpdate.source_script'
db.delete_column(u'main_inventoryupdate', 'source_script_id')
# Deleting field 'InventorySource.source_script'
db.delete_column(u'main_inventorysource', 'source_script_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('djang | o.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_len | gth': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.activitystream': {
'Meta': {'object_name': 'ActivityStream'},
'actor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_stream'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'changes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'credential': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Credential']", 'symmetrical': 'False', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Host']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Inventory']", 'symmetrical': 'False', 'blank': 'True'}),
'inventory_source': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.InventorySource']", 'symmetrical': 'False', 'blank': 'True'}),
'inventory_update': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.InventoryUpdate']", 'symmetrical': 'False', 'blank': 'True'}),
'job': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Job']", 'symmetrical': 'False', 'blank': 'True'}),
'job_template': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.JobTemplate']", 'symmetrical': 'False', 'blank': 'True'}),
'object1': ('django.db.models.fields.TextField', [], {}),
'object2': ('django.db.models.fields.TextField', [], {}),
'object_relationship_type': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'operation': ('django.db.models.fields.CharField', [], {'max_length': '13'}),
'organization': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Organization']", 'symmetrical': 'False', 'bl |
achanda/flocker | flocker/node/agents/_logging.py | Python | apache-2.0 | 5,427 | 0 | # Copyright ClusterHQ Inc. See LICENSE file for details.
"""
Helper module to provide macros for logging support
for storage drivers (AWS, Cinder).
See https://clusterhq.atlassian.net/browse/FLOC-2053
for consolidation opportunities.
"""
from eliot import Field, ActionType, MessageType
# Begin: Common structures used by all (AWS, OpenStack)
# storage drivers.
# An OPERATION is a list of:
# IBlockDeviceAPI name, positional arguments, keyword arguments.
OPERATION = Field.for_types(
u"operation", [list],
u"The IBlockDeviceAPI operation being executed,"
u"along with positional and keyword arguments.")
# End: Common structures used by all storage drivers.
# Begin: Helper datastructures to log IBlockDeviceAPI calls
# from AWS storage driver using Eliot.
# ActionType used by AWS storage driver.
AWS_ACTION = ActionType(
u"flocker:node:agents:blockdevice:aws",
[OPERATION],
[],
u"An IBlockDeviceAPI operation is executing using AWS storage driver.")
# Three fields to gather from EC2 response to Boto.
AWS_CODE = Field.for_types(
"aws_code", [bytes, unicode],
u"The error response code.")
AWS_MESSAGE = Field.for_types(
"aws_message", [unicode],
u"A human-readable error message given by the response.",
)
AWS_REQUEST_ID = Field.for_types(
"aws_request_id", [bytes, unicode],
u"The unique identifier assigned by the server for this request.",
)
# Structures to help log ``boto.exception.EC2ResponseError`` from AWS.
BOTO_EC2RESPONSE_ERROR = MessageType(
u"flocker:node:agents:blockdevice:aws:boto_ec2response_error",
[AWS_CODE, AWS_MESSAGE, AWS_REQUEST_ID],
)
DEVICES = Field.for_types(
u"devices", [list],
u"List of devices currently in use by the compute instance.")
NO_AVAILABLE_DEVICE = MessageType(
u"flocker:node:agents:blockdevice:aws:no_available_device",
[DEVICES],
)
IN_USE_DEVICES = MessageType(
u"flocker:node:agents:blockdevice:aws:in_use_devices",
[DEVICES],
u"Log current devices.",
)
NEW_DEVICES = Field.for_types(
u"new_devices", [list],
u"List of new devices in the compute instance.")
NEW_DEVICES_SIZE = Field.for_types(
u"new_devices_size", [list],
u"List of sizes of new devices in the compute instance.")
SIZE = Field.for_types(
u"size", [int],
u"Size, in bytes, of new device we are expecting to manifest."
u"in the OS.")
TIME_LIMIT = Field.for_types(
u"time_limit", [int],
u"Time, in seconds, waited for new device to manifest in the OS.")
NO_NEW_DEVICE_IN_OS = MessageType(
u"flocker:node:agents:blockdevice:aws:no_new_device",
[NEW_DEVICES, NEW_DEVICES_SIZE, SIZE, TIME_LIMIT],
u"No new block device manifested in the OS in given time.",)
VOLUME_ID = Field.for_types(
u"volume_id", [bytes, unicode],
u"The identifier of volume of interest.")
STATUS = Field.for_types(
u"status", [bytes, unicode],
u"Current status of the volume.")
TARGET_STATUS = Field.for_types(
u"target_status", [bytes, unicode],
u"Expected target status of the volume, as a result of an AWS API call.")
WAIT_TIME = Field.for_types(
u"wait_time", [int],
u"Time, in seconds, system waited for the volume to reach target status.")
WAITING_FOR_VOLUME_STATUS_CHANGE = MessageType(
u"flocker:node:agents:blockdevice:aws:volume_status_change_wait",
[VOLUME_ID, STATUS, TARGET_STATUS, WAIT_TIME],
u"Waiting for a volume to reach target status.",)
BOTO_LOG_HEADER = u'flocker:node:agents:blockdevice:aws:boto_logs'
# End: Helper datastructures used by AWS storage driver.
# Begin: Helper datastructures used by OpenStack storage drivers
CODE = Field.for_types("code", [int], u"The HTTP response code.")
MESSAGE = Field.for_types(
"message", [unicode],
u"A human-readable error message given by the response.",
)
DETAILS = Field.for_types("details", [dict], u"Extra details about the error.")
REQUEST_ID = Field.for_types(
"request_id", [bytes, unicode],
u"The unique identifier assigned by the server for this request.",
)
URL = Field.for_types("url", [bytes, unicode], u"The request URL.")
METHOD = Field.for_types("method", [bytes, unicode], u"The request method.")
NOVA_CLIENT_EXCEPTION = MessageType(
u"flocker:node:agents:blockdevice:openstack:nova_client_exception",
[CODE, MESSAGE, DETAILS, REQUEST_ID, URL, METHOD],
)
RESPONSE = Field.for_types("response", [bytes, unicode], u"The response body.")
KEYSTONE_HTTP_ERROR = MessageType(
u"flocker:node:agents:blockdevice:openstack:keystone_http_error",
[CODE, RESPONSE, MESSAGE, DETAILS, REQUEST_ID, URL, METHOD],
)
LOCAL_IPS = Field(
u"local_ips",
repr,
u"The IP addresses found on the target node."
)
API_IPS = Field(
u"api_ips",
repr,
u"The IP addresses and instance_ids for all nodes."
)
COMPUTE_INSTANCE_ID_NOT_FOUND = MessageType(
u"flocker:node:agents:blockdevice:openstack:compute_instance_id:not_found",
[LOCAL_IPS, API_IPS],
u"Unable to determine the instance ID of this node.",
)
CINDER_LOG_HEADER = u'flocker:node | :agents:blockdevice:openstack'
# ActionType used by OpenStack storage driver.
OPENSTACK_ACTION = ActionType(
CINDER_LOG_HEADER,
[OPERATION],
[],
u"An IBlockDeviceAPI operation is executing using OpenStack"
u"sto | rage driver.")
CINDER_CREATE = u'flocker:node:agents:blockdevice:openstack:create_volume'
# End: Helper datastructures used by OpenStack storage driver.
|
bungoume/django-template | project_name/manage.py | Python | mit | 550 | 0.001818 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault( | "DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
| "Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
|
rackerlabs/pitchfork | pitchfork/config/config.example.py | Python | apache-2.0 | 1,097 | 0 | # Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" | BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
If you are running the application within docker using the provided
Dockerfile and docker-compose then you will need | to change the MONGO_HOST
option to use the correct container.
import os
MONGO_HOST = os.environ['PITCHFORK_DB_1_PORT_27017_TCP_ADDR']
"""
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_KWARGS = {'tz_aware': True}
MONGO_DATABASE = 'pitchfork'
ADMIN_USERNAME = 'cloud_username'
ADMIN_NAME = 'Admin Full Name'
ADMIN_EMAIL = 'Admin Email'
SECRET_KEY = 'secret_key_for_cookie'
|
sippy/rtpproxy | misc/includepolice.py | Python | bsd-2-clause | 6,459 | 0.006038 | #!/usr/bin/env python
from random import random
from subprocess import call
import sys, os
def get_ip_flags(iname, includedirs):
includedirs = ['.',] + includedirs
for dname in includedirs:
try:
f = open('%s/%s' % (dname, iname))
break
except IOError:
| continue
else:
raise Exception('%s is not found in %s' % (iname, includedirs))
for line in f.readlines():
line = line.strip()
if not line.startswith('/*') and line.endswith('*/'):
continue
line = line[2:-2].strip()
if not line.startswith('IP | OLICE_FLAGS:'):
continue
ip_pars = line.split(':', 1)
ip_flags = ip_pars[1].strip().split(',')
return ip_flags
return None
class header_file(object):
ifname = None
ip_flags = None
def __init__(self, ifname, includedirs):
self.ifname = ifname
if not ifname.startswith('"'):
return
iname = ifname.strip('"')
self.ip_flags = get_ip_flags(iname, includedirs)
def isflset(self, flname):
if self.ip_flags == None:
return False
return (flname in self.ip_flags)
def __lt__(self, other):
return self.ifname < other.ifname
def first_pass(fname, includedirs):
includes = []
for line in open(fname).readlines():
line = line.strip()
lparts = line.split(None, 1)
if len(lparts) < 2 or not lparts[0].startswith('#include'):
continue
incname = lparts[1]
tpos = incname.rfind('"')
if tpos == -1:
tpos = incname.rfind('>')
if tpos == -1:
continue
incname = incname[:tpos + 1]
if incname in [x.ifname for x in includes]:
# dupe
continue
includes.append(header_file(incname, includedirs))
if len(includes) > 0:
return tuple(includes)
return None
def block_line(fout, line):
fout.write('#if 0\n')
fout.write(line)
fout.write('#endif\n')
def err_line(fout, line):
fout.write('#error "OOPS"\n')
fout.write(line)
def second_pass(fname_in, fname_out, filter, target, edit_fn = block_line):
#print('second_pass', fname_in, fname_out, filter, target)
fout = open(fname_out, 'w')
fh_names = [x.ifname for x in filter + [target,]]
for line in open(fname_in).readlines():
line_s = line.strip()
lparts = line_s.split(None, 1)
if len(lparts) < 2 or not lparts[0].startswith('#include'):
fout.write(line)
continue
if lparts[1] not in fh_names:
fout.write(line)
continue
if lparts[1] == target.ifname:
edit_fn(fout, line)
else:
block_line(fout, line)
ofnames = []
if fname.endswith('.c') or fname.endswith('.h'):
objfile = fname[:-2] + '.o'
ofnames.append(objfile)
objfile_dbg = 'rtpproxy_debug-' + objfile
ofnames.append(objfile_dbg)
for objfile in ofnames:
if os.path.exists(objfile):
#print('removing', objfile)
os.remove(objfile)
class PassConf(object):
devnull = None
make_flags = None
cleanbuild_targets = None
build_targets = None
fname_bak = None
fname = None
def pass2_handler(pf):
call(('diff', '-du', fname_bak, fname), stdout = pf.devnull, \
stderr = pf.devnull)
pf.devnull.flush()
cargs = [make,]
if pf.make_flags != None:
cargs.extend(pf.make_flags)
if fname.endswith('.h'):
cargs.extend(pf.cleanbuild_targets)
else:
cargs.extend(pf.build_targets)
pf.devnull.write('\n\n***** Running: %s *****\n\n' % (str(cargs),))
pf.devnull.flush()
rval = call(cargs, stdout = pf.devnull, stderr = pf.devnull)
os.remove(fname)
pf.devnull.write('\n\n***** status %d *****\n\n' % (rval,))
pf.devnull.flush()
return rval
if __name__ == '__main__':
make = os.environ['SMAKE']
includedirs = os.environ['SIPATH'].split(':')
pconf = PassConf()
pconf.cleanbuild_targets = ('clean', 'opensips')
pconf.build_targets = ('opensips',)
try:
pconf.make_flags = os.environ['SMAKEFLAGS'].split()
except KeyError:
pconf.make_flags = None
always_ignore = ('<sys/types.h>', '"config.h"')
fname = sys.argv[1]
ignore = list(always_ignore)
if fname.endswith('.c'):
ignore.append('"%s.h"' % fname[:-2])
print('processing %s' % fname)
includes = first_pass(fname, includedirs)
if includes == None:
print(' ...no includes found')
sys.exit(0)
includes = [x for x in includes if x.ifname not in ignore \
and not x.isflset('DONT_REMOVE')]
includes.sort()
pconf.devnull = open('ipol/' + fname + '.iout', 'a')
print(' .collected %d "#include" statements' % len(includes))
print(' .doing dry run')
cargs = [make,]
if pconf.make_flags != None:
cargs.extend(pconf.make_flags)
cargs.extend(pconf.cleanbuild_targets)
pconf.devnull.write('\n\n***** Dry-Running: %s *****\n\n' % (str(cargs),))
pconf.devnull.flush()
rval = call(cargs, stdout = pconf.devnull, stderr = pconf.devnull)
if rval != 0:
print(' ...dry run failed')
sys.exit(255)
pconf.devnull.flush()
r = int(random() * 1000000.0)
sfl_includes = []
unusd_includes = []
fname_bak = '%s.%.6d' % (fname, r)
os.rename(fname, fname_bak)
print(' ..renamed "%s" into "%s"' % (fname, fname_bak))
while True:
#print('sfl_includes:', [x.ifname for x in sfl_includes])
sfl_includes_bak = sfl_includes[:]
for include in includes:
if include in sfl_includes + unusd_includes:
continue
second_pass(fname_bak, fname, sfl_includes, include, err_line)
rval = pass2_handler(pconf)
if rval == 0:
unusd_includes.append(include)
continue
second_pass(fname_bak, fname, sfl_includes, include, block_line)
rval = pass2_handler(pconf)
if rval == 0:
sfl_includes.append(include)
break
if len(sfl_includes_bak) == len(sfl_includes):
break
os.rename(fname_bak, fname)
for include in sfl_includes:
print('"#include %s" is superfluous in %s' % (include.ifname, fname))
sys.exit(len(sfl_includes))
|
cwolferh/heat-scratch | heat/engine/resources/openstack/neutron/lbaas/pool.py | Python | apache-2.0 | 9,455 | 0 | #
# Copyright 2015 IBM Corp.
#
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine.resources.openstack.neutron import neutron
from heat.engine import support
from heat.engine import translation
class Pool(neutron.NeutronResource):
"""A resource for managing LBaaS v2 Pools.
This resources manages Neutron-LBaaS v2 Pools, which represent a group
of nodes. Pools define the subnet where nodes reside, balancing algorithm,
and the nodes themselves.
"""
support_status = support.SupportStatus(version='6.0.0')
required_service_extension = 'lbaasv2'
PROPERTIES = (
ADMIN_STATE_UP, DESCRIPTION, SESSION_PERSISTENCE, NAME,
LB_ALGORITHM, LISTENER, PROTOCOL, SESSION_PERSISTENCE_TYPE,
SESSION_PERSISTENCE_COOKIE_NAME,
) = (
'admin_state_up', 'description', 'session_persistence', 'name',
'lb_algorithm', 'listener', 'protocol', 'type',
'cookie_name'
)
SESSION | _PERSISTENCE_TYPES = (
SOURCE_IP, HTTP_COOKIE, APP_COOKIE
) = (
'SOURCE_IP', 'HTTP_COOKIE', 'APP_COOKIE'
)
ATTRIBUTES = (
HEALTHMONITOR_ID_ATTR, LISTENERS_ATTR, MEMBERS_ATTR
) = (
'healthmonito | r_id', 'listeners', 'members'
)
properties_schema = {
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('The administrative state of this pool.'),
default=True,
update_allowed=True
),
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of this pool.'),
update_allowed=True,
default=''
),
SESSION_PERSISTENCE: properties.Schema(
properties.Schema.MAP,
_('Configuration of session persistence.'),
schema={
SESSION_PERSISTENCE_TYPE: properties.Schema(
properties.Schema.STRING,
_('Method of implementation of session '
'persistence feature.'),
required=True,
constraints=[constraints.AllowedValues(
SESSION_PERSISTENCE_TYPES
)]
),
SESSION_PERSISTENCE_COOKIE_NAME: properties.Schema(
properties.Schema.STRING,
_('Name of the cookie, '
'required if type is APP_COOKIE.')
)
},
),
NAME: properties.Schema(
properties.Schema.STRING,
_('Name of this pool.'),
update_allowed=True
),
LB_ALGORITHM: properties.Schema(
properties.Schema.STRING,
_('The algorithm used to distribute load between the members of '
'the pool.'),
required=True,
constraints=[
constraints.AllowedValues(['ROUND_ROBIN',
'LEAST_CONNECTIONS', 'SOURCE_IP']),
],
update_allowed=True,
),
LISTENER: properties.Schema(
properties.Schema.STRING,
_('Listener name or ID to be associated with this pool.'),
required=True,
constraints=[
constraints.CustomConstraint('neutron.lbaas.listener')
]
),
PROTOCOL: properties.Schema(
properties.Schema.STRING,
_('Protocol of the pool.'),
required=True,
constraints=[
constraints.AllowedValues(['TCP', 'HTTP', 'HTTPS']),
]
),
}
attributes_schema = {
HEALTHMONITOR_ID_ATTR: attributes.Schema(
_('ID of the health monitor associated with this pool.'),
type=attributes.Schema.STRING
),
LISTENERS_ATTR: attributes.Schema(
_('Listener associated with this pool.'),
type=attributes.Schema.STRING
),
MEMBERS_ATTR: attributes.Schema(
_('Members associated with this pool.'),
type=attributes.Schema.LIST
),
}
def translation_rules(self, props):
return [
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.LISTENER],
client_plugin=self.client_plugin(),
finder='find_resourceid_by_name_or_id',
entity='listener'
),
]
def __init__(self, name, definition, stack):
super(Pool, self).__init__(name, definition, stack)
self._lb_id = None
@property
def lb_id(self):
if self._lb_id is None:
listener_id = self.properties[self.LISTENER]
listener = self.client().show_listener(listener_id)['listener']
self._lb_id = listener['loadbalancers'][0]['id']
return self._lb_id
def validate(self):
res = super(Pool, self).validate()
if res:
return res
if self.properties[self.SESSION_PERSISTENCE] is not None:
session_p = self.properties[self.SESSION_PERSISTENCE]
persistence_type = session_p[self.SESSION_PERSISTENCE_TYPE]
if persistence_type == self.APP_COOKIE:
if not session_p.get(self.SESSION_PERSISTENCE_COOKIE_NAME):
msg = (_('Property %(cookie)s is required when %(sp)s '
'type is set to %(app)s.') %
{'cookie': self.SESSION_PERSISTENCE_COOKIE_NAME,
'sp': self.SESSION_PERSISTENCE,
'app': self.APP_COOKIE})
raise exception.StackValidationFailed(message=msg)
elif persistence_type == self.SOURCE_IP:
if session_p.get(self.SESSION_PERSISTENCE_COOKIE_NAME):
msg = (_('Property %(cookie)s must NOT be specified when '
'%(sp)s type is set to %(ip)s.') %
{'cookie': self.SESSION_PERSISTENCE_COOKIE_NAME,
'sp': self.SESSION_PERSISTENCE,
'ip': self.SOURCE_IP})
raise exception.StackValidationFailed(message=msg)
def _check_lb_status(self):
return self.client_plugin().check_lb_status(self.lb_id)
def handle_create(self):
properties = self.prepare_properties(
self.properties,
self.physical_resource_name())
properties['listener_id'] = properties.pop(self.LISTENER)
session_p = properties.get(self.SESSION_PERSISTENCE)
if session_p is not None:
session_props = self.prepare_properties(session_p, None)
properties[self.SESSION_PERSISTENCE] = session_props
return properties
def check_create_complete(self, properties):
if self.resource_id is None:
try:
pool = self.client().create_lbaas_pool(
{'pool': properties})['pool']
self.resource_id_set(pool['id'])
except Exception as ex:
if self.client_plugin().is_invalid(ex):
return False
raise
return self._check_lb_status()
def _show_resource(self):
return self.client().show_lbaas_pool(self.resource_id)['pool']
def handle_update(self, json_snippet, |
2deviant/Mathematica-Trees | trees.py | Python | mit | 2,017 | 0.003966 | import converters
import math
import random
import sys
def random_real(a, b):
"""
Random real between a and b inclusively.
"""
return a + random.random() * (b - a)
def branch_length(depth):
"""
Somewhat random length of the branch. Play around
with this to achieve a desired tree structure.
"""
return math.log(depth) * random_real(.5, 1)
def branch_angle(initial_lean, max_lean):
"""
Somewhat random angle of the branch. Play around
with this to achieve a desired tree structure.
"""
return initial_lean + max_lean * random_real(-.5, .5)
def branches(x0, y0, depth, nfurcation, max_lean, initial_lean):
"""
Make a tree!
"""
# maximum depth achieved, stop adding branches
# maybe add a fruit or flower here
if not depth:
return []
angle = branch_angle(initial_le | an, max_lean)
| length = branch_length(depth)
# branch is the line segment (x0, y0) - (x1, y0)
x1 = x0 + length*math.sin(angle)
y1 = y0 + length*math.cos(angle)
# construct the branch
# the depth -- or inverse height -- is stored so that the
# rendering code can use it to vary the thickness of the
# branches, color, etc.
new_branches = [[depth, [[x0, y0], [x1, y1]]]]
# number of branches
n = random.randint(1, nfurcation)
# branches growing out of this branch
for _ in xrange(n):
# angle of the current branch becomes the initial lean
new_branches.extend(
branches(x1, y1, depth-1, nfurcation, max_lean, angle)
)
return new_branches
def main():
tree = branches(
# origin
0, 0,
# 11 branches from trunk to crown
11,
# at each juncture, there's either 1 or 2 branches
2,
# the branch can deviate 90/2=45 degrees in either direction
math.pi/2,
# initial lean [bias] is zero degrees
0
)
print(converters.to_mathematica(tree))
if __name__ == '__main__':
main()
|
wdbm/media_editing | setup.py | Python | gpl-3.0 | 1,765 | 0.022663 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import setuptools
def main():
setuptools.setup(
name = "media_editing",
version = "2018.03.26.0007",
description = "media editing",
long_description = long_description(),
url = "https://github.com/wdbm/media_editing",
author = "Will Breaden Madden",
author_email = "wbm@protonmail.ch",
license = "GPLv3",
py_modules = [
"media_editing"
],
install_requires = [
"docopt",
"moviepy",
"propyte",
"pymediainfo",
"pyqrcode",
"shijian",
"technicolor"
],
scripts = [
"images_to_video.py",
"Markdown_to_HTML.py",
"text_to_QR_code.py",
"vidgif.py"
],
entry_points = """
[console_scripts]
media_editing = media_editing:media_editing
"""
)
def long_description(
filename = "README. | md"
):
if os.path.isfile(os.path.expandvars(filena | me)):
try:
import pypandoc
long_description = pypandoc.convert_file(filename, "rst")
except ImportError:
long_description = open(filename).read()
else:
long_description = ""
return long_description
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.