code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from tfs import *
from pylab import *
from numpy import *
import glob, os
import nibabel as nib
matplotlib.interactive(True)
session = tf.InteractiveSession()
dataPath = './corpusCallosum/'
# Class to serve up segmented images
def computePad(dims,depth):
y1=y2=x1=x2=0;
y,x = [numpy.ceil(dims[i]/float(2**depth)) * (2**depth) for i in range(-2,0)]
x = float(x); y = float(y);
y1 = int(numpy.floor((y - dims[-2])/2)); y2 = int(numpy.ceil((y - dims[-2])/2))
x1 = int(numpy.floor((x - dims[-1])/2)); x2 = int(numpy.ceil((x - dims[-1])/2))
return y1,y2,x1,x2
def padImage(img,depth):
"""Pads (or crops) an image so it is evenly divisible by 2**depth."""
y1,y2,x1,x2 = computePad(img.shape,depth)
dims = [(0,0) for i in img.shape]
dims[-2] = (y1,y2); dims[-1] = (x1,x2)
return numpy.pad(img,dims,'constant')
# Class to serve up segmented images
class CCData(object):
def __init__(self,paths,padding=None):
self.paths = paths
self.padding = padding
def getSlices(self,paths):
image,truth = paths
image = nib.load(image).get_data(); truth = nib.load(truth).get_data()
slicesWithValues = [unique(s) for s in where(truth>0)]
sliceAxis = argmin([len(s) for s in slicesWithValues])
slicesWithValues = slicesWithValues[sliceAxis]
slc = repeat(-1,3); slc[sliceAxis] = slicesWithValues[0]
if not self.padding is None:
image, truth = [padImage(im,self.padding) for im in (image[slc][0],truth[slc][0])]
else:
image, truth = (image[slc][0],truth[slc][0])
return (image,truth)
def next_batch(self,miniBatch=None):
if miniBatch is None or miniBatch==len(self.paths):
batch = arange(0,len(self.paths))
else:
batch = random.choice(arange(0,len(self.paths)),miniBatch)
images = [self.getSlices(self.paths[i]) for i in batch]
return list(zip(*images))
class Container(object):
def __init__(self,dataPath,reserve=2,**args):
self.dataPath = dataPath
images = glob.glob(os.path.join(dataPath,'?????.nii.gz'))
images = [(i,i.replace('.nii.gz','_cc.nii.gz')) for i in images]
self.train = CCData(images[0:-reserve],**args)
self.test = CCData(images[reserve:],**args)
data = Container(dataPath,reserve=2)
batch = data.train.next_batch(2)
trainingIterations = 1000
x = tf.placeholder('float',shape=[None,None,None],name='input')
y_ = tf.placeholder('float', shape=[None,None,None],name='truth')
y_OneHot = tf.one_hot(indices=tf.cast(y_,tf.int32),depth=2,name='truthOneHot')
xInput = tf.expand_dims(x,axis=3,name='xInput')
#Standard conv net from Session 3 using new TensorFlow layers
net = LD1 = tf.layers.conv2d(
inputs=xInput,
filters=2,
kernel_size=[5,5],
strides = 1,
padding = 'same',
activation=tf.nn.relu,
name='convD1'
)
logits = LD1
y = tf.nn.softmax(logits,-1)
loss = tf.losses.softmax_cross_entropy(onehot_labels=y_OneHot, logits=logits)
trainDict = {}
testDict = {}
logName = None #logName = 'logs/Conv'
# Training and evaluation
trainStep = tf.train.AdamOptimizer(1e-3).minimize(loss)
# Accuracy
correctPrediction = tf.equal(tf.argmax(y,axis=-1), tf.argmax(y_OneHot,axis=-1))
accuracy = tf.reduce_mean(tf.cast(correctPrediction,'float'))
# Jaccard
output = tf.cast(tf.argmax(y,axis=-1), dtype=tf.float32)
truth = tf.cast(tf.argmax(y_OneHot,axis=-1), dtype=tf.float32)
intersection = tf.reduce_sum(tf.reduce_sum(tf.multiply(output, truth), axis=-1),axis=-1)
union = tf.reduce_sum(tf.reduce_sum(tf.cast(tf.add(output, truth)>= 1, dtype=tf.float32), axis=-1),axis=-1)
jaccard = tf.reduce_mean(intersection / union)
train(session=session,trainingData=data.train,testingData=data.test,truth=y_,input=x,cost=loss,trainingStep=trainStep,accuracy=accuracy,iterations=trainingIterations,miniBatch=2,trainDict=trainDict,testDict=testDict,logName=logName)
# Make a figure
# Get a couple of examples
batch = data.test.next_batch(2)
ex = array(batch[0])
segmentation = y.eval({x:ex})
# Display each example
figure('Example 1'); clf()
imshow(batch[0][0].transpose(),cmap=cm.gray,origin='lower left');
#contour(batch[1][0].transpose(),alpha=0.5,color='g');
contour(segmentation[0,:,:,1].transpose(),alpha=0.5,color='b')
figure('Example 2'); clf()
imshow(batch[0][1].transpose(),cmap=cm.gray,origin='lower left');
#contour(batch[1][1].transpose(),alpha=0.5,color='g');
contour(segmentation[1,:,:,1].transpose(),alpha=0.5,color='b')
plotOutput(LD1,{x:ex[0:1]},figOffset='Layer 1 Output')
|
robb-brown/IntroToDeepLearning
|
6_MRISegmentation/ccseg.py
|
Python
|
mit
| 4,404
|
import _plotly_utils.basevalidators
class TicklenValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="ticklen", parent_name="treemap.marker.colorbar", **kwargs
):
super(TicklenValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "style"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/treemap/marker/colorbar/_ticklen.py
|
Python
|
mit
| 515
|
from django.core import serializers
from django.http import HttpResponse, JsonResponse
from Assessment.models import *
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST, require_GET
import json
@csrf_exempt
@require_GET
def getAssignmentByCode(request):
response_data = {}
try:
C = Assignment.objects.getAssignmentByCode(request.GET)
except Exception as e:
response_data["success"] = 0
response_data['exception'] = str(e)
else:
response_data["success"] = 1
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def getAssignmentsByCourse(request):
print (request)
response_data = {}
try:
C = Assignment.objects.getAssignmentsByCourse(request.GET)
except Exception as e:
response_data["success"] = 0
response_data['exception'] = str(e)
else:
response_data["success"] = 1
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def retrieveAssignmentByBranch(request):
response_data = {}
try:
C = Assignment.objects.filter(assignmentCode__contains="SE")
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
global data
try:
data = serializers.serialize('json', C)
except Exception as e:
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def retrieveAssignmentResponses(request):
response_data = {}
try:
C = AssignmentResponse.objects.retrieveAssignmentResponsesByStudent(request.GET)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
global data
try:
data = serializers.serialize('json', C)
except Exception as e:
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def retrieveAssignments(request):
response_data = {}
try:
C = Assignment.objects.retrieveAssignments(request.GET)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
global data
try:
data = serializers.serialize('json', C)
except Exception as e:
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
|
IEEEDTU/CMS
|
Assessment/views/Assignment.py
|
Python
|
mit
| 2,991
|
instr = [x.strip().split(' ') for x in open("input/dec25").readlines()]
skip = {}
modified = {}
#instr[1] = ['add', 'a', '2572']
#skip[2] = skip[3] = skip[4] = skip[5] = skip[6] = skip[7] = skip[8] = skip[9] = True
#instr[6] = ['add', 'a', 'c'] # adds c to d, sets c to 0
#skip[7] = True
#skip[8] = True
#modified[6] = modified[7] = modified[8] = True
#instr[9] = ['mul', 'a', 'd'] # multiplies a with d
#skip[10] = True
#modified[9] = modified[10] = True
"""instr[10] = ['add', 'a', 'b'] # adds b to a, sets b to 0
skip[11] = True
skip[12] = True"""
#instr[14] = ['mul', 'a', 'd'] # multiplies a with d
#skip[15] = True
def print_program(inss):
i = 0
for inst in inss:
prefix = ' # ' if i in skip else ' '
print(prefix, i, inst)
i += 1
print_program(instr)
# evaluated a couple of numbers, found that it found the binary representation of a number, found
# first number above 2572 (which instr 1 - 9 adds to the number) that repeats itself (ends with 0 and is 101010 etc.)
# and subtracted 2572
for x in [158]:
pc = 0
reg = {'a': x, 'b': 0, 'c': 0, 'd': 0}
output = ''
while pc < len(instr):
if pc in skip:
pc += 1
continue
inst = instr[pc]
if inst[0] == 'add':
v = reg[inst[2]] if inst[2] in reg else int(inst[2])
reg[inst[1]] += v
reg[inst[2]] = 0
pc += 1
elif inst[0] == 'mul':
reg[inst[1]] *= reg[inst[2]]
reg[inst[2]] = 0
pc += 1
elif inst[0] == 'cpy':
if inst[2] in reg:
if inst[1] in reg:
reg[inst[2]] = reg[inst[1]]
else:
reg[inst[2]] = int(inst[1])
pc += 1
elif inst[0] == 'inc':
reg[inst[1]] += 1
pc += 1
elif inst[0] == 'dec':
reg[inst[1]] -= 1
pc += 1
elif inst[0] == 'jnz':
if (inst[1] in reg and reg[inst[1]] != 0) or (inst[1] not in reg and int(inst[1]) != 0):
if inst[2] in reg:
pc += reg[inst[2]]
else:
pc += int(inst[2])
else:
pc += 1
elif inst[0] == 'tgl':
if inst[1] in reg:
d = pc + reg[inst[1]]
# valid
if d < len(instr) and d >= 0:
if d in modified:
print("modified instruction tggled")
if len(instr[d]) == 2:
if instr[d][0] == 'inc':
instr[d][0] = 'dec'
else:
instr[d][0] = 'inc'
elif len(instr[d]) == 3:
if instr[d][0] == 'jnz':
instr[d][0] = 'cpy'
else:
instr[d][0] = 'jnz'
else:
print(" invalid register", inst[1])
pc += 1
elif inst[0] == 'out':
v = reg[inst[1]] if inst[1] in reg else inst[1]
output += str(v)
print(output)
#if len(output) > 1 and output != '01':
# break
#elif len(output) > 1:
# print("THIS IS IT", x)
pc += 1
else:
print("INVALID INSTRUCTION", inst)
if pc == 8:
print(reg)
if pc == 28:
print('loop', reg)
if pc == 29:
print(x, bin(x), bin(x+2572), output)
break
print(reg['a'])
|
matslindh/codingchallenges
|
adventofcode2016/25.py
|
Python
|
mit
| 3,632
|
import enum
from typing import Dict, Optional, Set
@enum.unique
class MediaTag(enum.IntEnum):
# ndb keys are based on these! Don't change!
CHAIRMANS_VIDEO = 0
CHAIRMANS_PRESENTATION = 1
CHAIRMANS_ESSAY = 2
MEDIA_TAGS: Set[MediaTag] = {t for t in MediaTag}
TAG_NAMES: Dict[MediaTag, str] = {
MediaTag.CHAIRMANS_VIDEO: "Chairman's Video",
MediaTag.CHAIRMANS_PRESENTATION: "Chairman's Presentation",
MediaTag.CHAIRMANS_ESSAY: "Chairman's Essay",
}
TAG_URL_NAMES: Dict[MediaTag, str] = {
MediaTag.CHAIRMANS_VIDEO: "chairmans_video",
MediaTag.CHAIRMANS_PRESENTATION: "chairmans_presentation",
MediaTag.CHAIRMANS_ESSAY: "chairmans_essay",
}
CHAIRMANS_TAGS: Set[MediaTag] = {
MediaTag.CHAIRMANS_VIDEO,
MediaTag.CHAIRMANS_PRESENTATION,
MediaTag.CHAIRMANS_ESSAY,
}
def get_enum_from_url(url_name: str) -> Optional[MediaTag]:
inversed = {v: k for k, v in TAG_URL_NAMES.items()}
if url_name in inversed:
return inversed[url_name]
else:
return None
|
the-blue-alliance/the-blue-alliance
|
src/backend/common/consts/media_tag.py
|
Python
|
mit
| 1,026
|
from django.template import Library
register = Library()
@register.simple_tag(takes_context=True)
def assign(context, **kwargs):
"""
Usage:
{% assign hello="Hello Django" %}
"""
for key, value in kwargs.items():
context[key] = value
return ''
@register.filter
def get(content, key):
"""
Usage:
{% object|get:key|get:key %}
"""
if isinstance(content, dict):
return content.get(key, '')
if isinstance(content, object):
return getattr(content, key, '')
return ''
@register.simple_tag()
def call(fn, *args, **kwargs):
"""
Usage:
{% call object.method *args **kwargs %}
Callable function should be decorated with
redisca.template.decorators.template_func.
"""
if callable(fn):
return fn(*args, **kwargs)
return fn
|
redisca/django-redisca
|
redisca/template/templatetags/builtin.py
|
Python
|
mit
| 833
|
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import start_nodes
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEGATE, OP_NOP2, OP_DROP
from binascii import unhexlify
import cStringIO
'''
This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY).
Connect to a single node.
Mine a coinbase block, and then ...
Mine 1 version 4 block.
Check that the CLTV rules are enforced.
TODO: factor out common code from {bipdersig-p2p,bip65-cltv-p2p}.py.
'''
class BIP65Test(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']],
binary=[self.options.testbinary])
self.is_network_split = False
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(signresult['hex']))
tx.deserialize(f)
return tx
def invalidate_transaction(self, tx):
'''
Modify the signature in vin 0 of the tx to fail CLTV
Prepends -1 CLTV DROP in the scriptSig itself.
'''
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP2, OP_DROP] +
list(CScript(tx.vin[0].scriptSig)))
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(1)
self.nodes[0].generate(100)
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
'''Check that the rules are enforced.'''
for valid in (True, False):
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0],
self.nodeaddress, 1.0)
if not valid:
self.invalidate_transaction(spendtx)
spendtx.rehash()
gbt = self.nodes[0].getblocktemplate()
self.block_time = gbt["mintime"] + 1
self.block_bits = int("0x" + gbt["bits"], 0)
block = create_block(self.tip, create_coinbase(101),
self.block_time, self.block_bits)
block.nVersion = 4
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.block_time += 1
self.tip = block.sha256
yield TestInstance([[block, valid]])
if __name__ == '__main__':
BIP65Test().main()
|
litecoinz-project/litecoinz
|
qa/rpc-tests/bip65-cltv-p2p.py
|
Python
|
mit
| 3,533
|
# output formatting
# reprlib provides a version of repr()
import reprlib
print(reprlib.repr(set('supercalifragilisticexpialidocious')))
# pprint - more sophisticated control over printing objects in a way readable by the interpreter
import pprint
t = [[[['black','cyan'],'white',['green','red']],[['magenta','yellow'],'blue']]]
pprint.pprint(t, width=30)
print()
# the textwrap method formats paragraphs of text to fit a given screen width
import textwrap
doc = """The wrap() method is just like fill() except that it returns a list of strings with newlines to separate the wrapped lines."""
print(textwrap.fill(doc, width=40))
# the locale module accesses a database of culture specific data formats
import locale
locale.setlocale(locale.LC_ALL,'en_US.utf8')
conv = locale.localeconv() # get a mapping of conventions
x = 1234567.8
print(locale.format('%d', x, grouping=True))
print(locale.format_string('%s%.*f', (conv['currency_symbol'], conv['frac_digits'], x), grouping=True))
# templating
# the string module includes a versatile Template class with a simplified syntax
from string import Template
t = Template('${village}folk send $$10 to $cause.')
print(t.substitute(village='Nottingham', cause='the ditch fund'))
# the substitute method raises KeyError when a placeholder is not supplied in a dictionary or a keyword argument
# safe_substitute() - will leave placeholders unchanged if data is missing
t2 = Template('Return the $item to $owner.')
d = dict(item='unladen swallow')
try:
t2.substitute(d)
except KeyError as keinst:
print('KeyError occurred.')
print(type(keinst), '-', keinst)
print(t2.safe_substitute(d))
# template subclasses can specify a custom delimeter
import time, os.path
photofiles = ['img_1074.jpg', 'img_1076.jpg', 'img_1077.jpg']
class BatchRename(Template):
delimiter = '%'
# fmt = input('Enter rename style (%d-date %n-seqnum %f-format): ')
fmt = 'Cajetan_%n%f'
t = BatchRename(fmt)
date = time.strftime('%d%b%y')
for i, filename in enumerate(photofiles):
base, ext = os.path.splitext(filename)
newname = t.substitute(d=date, n=i, f=ext)
print('{0} -> {1}'.format(filename, newname))
# another application for templating is separating program logic from the details of multiple output formats
# the struct module provides pack() and unpack() functions for working with variable length binary record formats
# following example shows how to loop through header information in a ZIP file without using the zipfile module
# pack codes H and I represent two and four byte unsigned numbers respectively. the '<' indicates that they are std size and in little-endian byte order
"""
import struct
with open('myfile.zip','rb') as f:
data = f.read()
start = 0
for i in range(3):
start += 14
fields = struct.unpack('<IIIHH', data[start:start+6])
crc32, comp_size, uncomp_size, filenamesize, extra_size = fields
start += 16
filename = data[start:start+filenamesize]
start += filenamesize
extra = data[start:start+extra_size]
print(filename, hex(crc32), comp_size, uncomp_size)
start += extra_size + comp_size # skip to the next header
"""
# multi-threading
# how the high level threading module can run tasks in background while the main program continues to run
print()
import threading, zipfile
class AsyncZip(threading.Thread):
def __init__(self, infile, outfile):
threading.Thread.__init__(self)
self.infile = infile
self.outfile = outfile
def run(self):
f = zipfile.ZipFile(self.outfile,'w',zipfile.ZIP_DEFLATED)
f.write(self.infile)
f.close()
print('Finished background zip of:', self.infile)
background = AsyncZip('mydata.txt', 'myarchive.zip')
print('Task in another thread is starting...', '\n')
background.start()
print('The main program continues to run in foreground.')
print('Something happens in here...', '\n')
background.join() # wait for the background task to finish
print('Main program waited until background was done.')
# the principial challenge of multi-threaded applications is coordinating threads that share data or other resources
# it is good to use queue objects with multi-threading
print()
# logging
# the logging module offers a full featured and flexible logging system
import logging
logging.debug('Debugging information')
logging.info('Informational message')
logging.warning('Warning:config file %s not found', 'server.conf')
logging.error('Error occurred')
logging.critical('Critical error - shutting down')
# the logging system can be configured directly from python or can be loaded from a user editable cfg file file for customized logging without altering the application
# weak references
import weakref, gc
class A:
def __init__(self,value):
self.value = value
def __repr__(self):
return str(self.value)
a = A(10) # creating a reference
d = weakref.WeakValueDictionary()
d['primary'] = a # does not create a reference
print(d['primary']) # fetch the if it is still alive
del a # removes the one reference
gc.collect() # run garbage collector right away
# d['primary'] # entry was automatically removed so it will cause an error
# tools for working with lists
|
CajetanP/code-learning
|
Python/Learning/Language/stdlib_2.py
|
Python
|
mit
| 5,228
|
"""
Wind Turbine Company - 2013
Author: Stephan Rayner
Email: stephan.rayner@gmail.com
"""
import time
from test.Base_Test import Base_Test
class Maintenance_153Validation(Base_Test):
def setUp(self):
self.WindSpeedValue = "4.5"
self.interface.reset()
self.interface.write("Yaw_Generation", "2")
self.interface.expect("Emergency_Stop", "off")
self.interface.expect("Maintenance_Mode", "off")
def test_MaintenanceSD46(self):
'''
Moving into Maintenance Mode while the turbine is running (State 2 or
Higher) Causes SD_46 to before any other shutdowns. In other words
SD_46 Should fire and only SD_46
'''
self._State2Setup()
self.interface.expect("Maintenance_Mode", "on")
self.TEST_CONDITION = self.interface.Shutdown.read(self.interface.Shutdown_List, return_onlyHigh = True)
print self.TEST_CONDITION
self.assertTrue("@GV.SD_46" in self.TEST_CONDITION,"Shutdown 46 did not fire")
self.assertEqual(self.TEST_CONDITION.keys()[0], "@GV.SD_46","Shutdown did not fire first")
self.assertEqual(len(self.TEST_CONDITION), 1,"More that one shutdown is pressent.")
self.TEST_CONDITION = self.interface.read("Turbine_State")
self.assertEqual(self.TEST_CONDITION,"0")
def test_MaintenanceHardwareControl(self):
'''
DO_BypLineProtRelMaintMode and DO_BypassRotorOverSpeed should be 0
then Maintenance Mode is activated SD_46 goes high and 1 minute later
DO_BypLineProtRelMaintMode and DO_BypassRotorOverSpeed should be 1
//I am using a running counter with a read to check time not a wait and read.
//This maintain that the values don't flip early.
'''
self._State2Setup()
read_Vars = ["@GV.DO_BypLineProtRelMaintMode","@GV.DO_BypassRotorOverSpeed"]
#
self.assertEqual(self._readpxUtils(read_Vars),["0","0"])
self.interface.expect("Maintenance_Mode","on")
elapseTime = 0.0
initialTime = time.time()
self.TEST_CONDITION = self.interface.Shutdown.read(self.interface.Shutdown_List, return_onlyHigh = True)
#
self.assertTrue("@GV.SD_46" in self.TEST_CONDITION,"Shutdown 46 did not fire")
print "\nPlease Wait One Minute\n"
while((self._readpxUtils(read_Vars) == ["0","0"]) and (elapseTime < 120)):
elapseTime = time.time() - initialTime
expectedRunningTime = 60
tollerance = 10
self.TEST_CONDITION = self._readpxUtils(read_Vars)
#
self.assertEqual(self.TEST_CONDITION,["1","1"])
#
self.assertLessEqual(abs(expectedRunningTime-elapseTime),tollerance,"The hardware does not retain control over the UPR and the Smartplug unitil the breaks apply as expected:\nElapse Time: %s\n%s : %s\n%s : %s\n" % (str(elapseTime), read_Vars[0], self.TEST_CONDITION[0], read_Vars[1], self.TEST_CONDITION[1]))
#Helper Functions
def _State2Setup(self):
self.interface.write("Wind_Speed",self.WindSpeedValue)
self.interface.write("Yaw_Generation", "2")
print ("Waiting for 2 minutes")
time.sleep(70)# must hold this here for the Minute averages to hold
self.interface.Shutdown.bypass([24, 31])
self.interface.Shutdown.reset()
self.interface.start()
def _readpxUtils(self,List):
a = self.interface.mcc.read(List)
tmp=[]
for x in List:
tmp.append(a[x])
return tmp
|
stephan-rayner/HIL-TestStation
|
Tests/e3120/Maintenance/TransitionIntoState0.py
|
Python
|
mit
| 3,533
|
# -*- encoding: utf-8 -*-
import ast
import inspect
class NameLower(ast.NodeVisitor):
def __init__(self, lowered_names):
self.lowered_names = lowered_names
def visit_FunctionDef(self, node):
code = '__globals = globals()\n'
code += '\n'.join("{0} = __globals['{0}']".format(name) for name in self.lowered_names)
code_ast = ast.parse(code, mode='exec')
node.body[:0] = code_ast.body
self.func = node
def lower_names(*namelist):
def lower(func):
srclines = inspect.getsource(func).splitlines()
for n, line in enumerate(srclines):
if '@lower_names' in line:
break
src = '\n'.join(srclines[n + 1:])
if src.startswith(' ', '\t'):
src = 'if 1:\n' + src
top = ast.parse(src, mode='exec')
cl = NameLower(namelist)
cl.visit(top)
temp = {}
exec(compile(top, '', 'exec'), temp, temp)
func.__code__ = temp[func.__name__].__code__
return func
return lower
|
xu6148152/Binea_Python_Project
|
PythonCookbook/meta/newlower.py
|
Python
|
mit
| 1,116
|
from django.utils.encoding import python_2_unicode_compatible
from allauth.socialaccount import app_settings
from allauth.account.models import EmailAddress
from ..models import SocialApp, SocialAccount, SocialLogin
from ..adapter import get_adapter
class AuthProcess(object):
LOGIN = 'login'
CONNECT = 'connect'
REDIRECT = 'redirect'
class AuthAction(object):
AUTHENTICATE = 'authenticate'
REAUTHENTICATE = 'reauthenticate'
class AuthError(object):
UNKNOWN = 'unknown'
CANCELLED = 'cancelled' # Cancelled on request of user
DENIED = 'denied' # Denied by server
class Provider(object):
def get_login_url(self, request, next=None, **kwargs):
"""
Builds the URL to redirect to when initiating a login for this
provider.
"""
raise NotImplementedError("get_login_url() for " + self.name)
def get_app(self, request):
return SocialApp.objects.get_current(self.id, request)
def media_js(self, request):
"""
Some providers may require extra scripts (e.g. a Facebook connect)
"""
return ''
def wrap_account(self, social_account):
return self.account_class(social_account)
def get_settings(self):
return app_settings.PROVIDERS.get(self.id, {})
def sociallogin_from_response(self, request, response):
"""
Instantiates and populates a `SocialLogin` model based on the data
retrieved in `response`. The method does NOT save the model to the
DB.
Data for `SocialLogin` will be extracted from `response` with the
help of the `.extract_uid()`, `.extract_extra_data()`,
`.extract_common_fields()`, and `.extract_email_addresses()`
methods.
:param request: a Django `HttpRequest` object.
:param response: object retrieved via the callback response of the
social auth provider.
:return: A populated instance of the `SocialLogin` model (unsaved).
"""
adapter = get_adapter()
uid = self.extract_uid(response)
extra_data = self.extract_extra_data(response)
common_fields = self.extract_common_fields(response)
socialaccount = SocialAccount(extra_data=extra_data,
uid=uid,
provider=self.id)
email_addresses = self.extract_email_addresses(response)
self.cleanup_email_addresses(common_fields.get('email'),
email_addresses)
sociallogin = SocialLogin(account=socialaccount,
email_addresses=email_addresses)
user = sociallogin.user = adapter.new_user(request, sociallogin)
user.set_unusable_password()
adapter.populate_user(request, sociallogin, common_fields)
return sociallogin
def extract_uid(self, data):
"""
Extracts the unique user ID from `data`
"""
raise NotImplementedError(
'The provider must implement the `extract_uid()` method'
)
def extract_extra_data(self, data):
"""
Extracts fields from `data` that will be stored in
`SocialAccount`'s `extra_data` JSONField.
:return: any JSON-serializable Python structure.
"""
return data
def extract_common_fields(self, data):
"""
Extracts fields from `data` that will be used to populate the
`User` model in the `SOCIALACCOUNT_ADAPTER`'s `populate_user()`
method.
For example:
{'first_name': 'John'}
:return: dictionary of key-value pairs.
"""
return {}
def cleanup_email_addresses(self, email, addresses):
# Move user.email over to EmailAddress
if (email and email.lower() not in [
a.email.lower() for a in addresses]):
addresses.append(EmailAddress(email=email,
verified=False,
primary=True))
# Force verified emails
settings = self.get_settings()
verified_email = settings.get('VERIFIED_EMAIL', False)
if verified_email:
for address in addresses:
address.verified = True
def extract_email_addresses(self, data):
"""
For example:
[EmailAddress(email='john@doe.org',
verified=True,
primary=True)]
"""
return []
@python_2_unicode_compatible
class ProviderAccount(object):
def __init__(self, social_account):
self.account = social_account
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
def get_brand(self):
"""
Returns a dict containing an id and name identifying the
brand. Useful when displaying logos next to accounts in
templates.
For most providers, these are identical to the provider. For
OpenID however, the brand can derived from the OpenID identity
url.
"""
provider = self.account.get_provider()
return dict(id=provider.id,
name=provider.name)
def __str__(self):
return self.to_str()
def to_str(self):
"""
Due to the way python_2_unicode_compatible works, this does not work:
@python_2_unicode_compatible
class GoogleAccount(ProviderAccount):
def __str__(self):
dflt = super(GoogleAccount, self).__str__()
return self.account.extra_data.get('name', dflt)
It will result in and infinite recursion loop. That's why we
add a method `to_str` that can be overriden in a conventional
fashion, without having to worry about @python_2_unicode_compatible
"""
return self.get_brand()['name']
|
sih4sing5hong5/django-allauth
|
allauth/socialaccount/providers/base.py
|
Python
|
mit
| 5,951
|
XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXX X XXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXXX
XXXXXXXXXXX X
XXXXXX X XX XX XX XX XX X X
X
X XX
X XX
XXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XXXXXXX X XXXXXX XXXXXXXX XXXXX XX XXX XXXX XX XXX XXXX XXXXXX XXXXXXXXX XXX XXXX XXXXX XXXXXXX XXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XX XX XXXXX XX XXXXXXXX XXXX XXX XXXXXX XXXXXXXX XXX XXXXXXXXX
XXXXXX XXXX XXXXXXXXXXX XXXXX XXX XXXXXXXXXX XXXXXXXX XXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXXX
XXX XXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXX
XXXXXX XXXXXXXXXX XXXXX XXXXX XX XXXX XX XXXXXXXXXX XXX XXXXX XXXXX XX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXXX
XXXXXXXXXXX X
XXXXXX X XX XX XX XX XX X X
X
X XX
X XXXXXXXXX
XXXXX XXXXXXXX XX XXX XXXXX XXXXX XXX XXXXXXXXX XXXXXXXXXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXX XXXX XXXXX XXXXX XX XXX XXX XXXX XXXXX XXXXXXXX XXXXXX XX XXX XXXX XXXXXXXX XX XXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXX
XXXXXXX XXXXXXX XXXX X XXXXXX XXX XX XXXXXXXXXX XXX XXXXXX XXXX XX XXXXXX XXXX XXX XXXXXXX XXXXX XXXXXXXX XX XXXXX XX XXXXXXXXX XXXXXXX XXX XXXXXX XXX
XXXXXXXXXX XXX XXXX XX XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXX XXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXX XX XXXXXXX XXX XXXXXXX XX XXX XXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXX XXXXX XXXXX XXXX XX XXXXX XXX XXXXXX XXXX XXXX XXX XXXX XXXXXX XX XXXXX XXXXXX XXXX XXXX XXXX XXXXXX XXXXXXXXXXXXX XX XXX XXXXXXXXXX XXXX XX
XXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXXX XXXXXX XXXX XX XXXXXXX XXX XXXXXXXXXXX XXXXXXXXXX XXX XXXX XXXXX XX XXXXX XXXXXX XXXXXX XXXX XXXX XXXX XX XXXX XX XXXXXXX XXXXXX XXXXX XXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXX XXX XX XXXXXXX XX XXX XXXXXXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXX XX XXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXX XXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXXX
XXXXXXX
|
dnaextrim/django_adminlte_x
|
adminlte/static/plugins/datatables/extensions/ColReorder/examples/predefined.html.py
|
Python
|
mit
| 16,794
|
from ..baseapi import BaseApi
class Template(BaseApi):
def __init__(self, *args, **kwargs):
super(Template, self).__init__(*args, **kwargs)
self.endpoint = 'templates'
self.list_id = None
def all(self):
"""
returns a list of available templates.
"""
return self._mc_client._get(url=self.endpoint)
def get(self, template_id):
"""
returns a specific template.
"""
return self._mc_client._get(url=self._build_path(template_id))
def update(self, template_id, data):
"""
updates a specific template
"""
return self._mc_client._patch(url=self._build_path(template_id), data=data)
def delete(self, template_id):
"""
removes a specific template.
"""
return self._mc_client._delete(url=self._build_path(template_id))
|
s0x90/python-mailchimp
|
mailchimp3/entities/template.py
|
Python
|
mit
| 887
|
import datetime
import os
import sys
from contextlib import contextmanager
import freezegun
import pretend
import pytest
from pip._vendor import lockfile
from pip._internal.index import InstallationCandidate
from pip._internal.utils import outdated
class MockPackageFinder(object):
BASE_URL = 'https://pypi.python.org/simple/pip-{0}.tar.gz'
PIP_PROJECT_NAME = 'pip'
INSTALLATION_CANDIDATES = [
InstallationCandidate(PIP_PROJECT_NAME, '6.9.0',
BASE_URL.format('6.9.0')),
InstallationCandidate(PIP_PROJECT_NAME, '3.3.1',
BASE_URL.format('3.3.1')),
InstallationCandidate(PIP_PROJECT_NAME, '1.0',
BASE_URL.format('1.0')),
]
def __init__(self, *args, **kwargs):
pass
def find_all_candidates(self, project_name):
return self.INSTALLATION_CANDIDATES
def _options():
''' Some default options that we pass to outdated.pip_version_check '''
return pretend.stub(
find_links=False, extra_index_urls=[], index_url='default_url',
pre=False, trusted_hosts=False, process_dependency_links=False,
)
@pytest.mark.parametrize(
[
'stored_time',
'installed_ver',
'new_ver',
'check_if_upgrade_required',
'check_warn_logs',
],
[
# Test we return None when installed version is None
('1970-01-01T10:00:00Z', None, '1.0', False, False),
# Need an upgrade - upgrade warning should print
('1970-01-01T10:00:00Z', '1.0', '6.9.0', True, True),
# No upgrade - upgrade warning should not print
('1970-01-9T10:00:00Z', '6.9.0', '6.9.0', False, False),
]
)
def test_pip_version_check(monkeypatch, stored_time, installed_ver, new_ver,
check_if_upgrade_required, check_warn_logs):
monkeypatch.setattr(outdated, 'get_installed_version',
lambda name: installed_ver)
monkeypatch.setattr(outdated, 'PackageFinder', MockPackageFinder)
monkeypatch.setattr(outdated.logger, 'warning',
pretend.call_recorder(lambda *a, **kw: None))
monkeypatch.setattr(outdated.logger, 'debug',
pretend.call_recorder(lambda s, exc_info=None: None))
fake_state = pretend.stub(
state={"last_check": stored_time, 'pypi_version': installed_ver},
save=pretend.call_recorder(lambda v, t: None),
)
monkeypatch.setattr(
outdated, 'load_selfcheck_statefile', lambda: fake_state
)
with freezegun.freeze_time(
"1970-01-09 10:00:00",
ignore=[
"six.moves",
"pip._vendor.six.moves",
"pip._vendor.requests.packages.urllib3.packages.six.moves",
]):
latest_pypi_version = outdated.pip_version_check(None, _options())
# See we return None if not installed_version
if not installed_ver:
assert not latest_pypi_version
# See that we saved the correct version
elif check_if_upgrade_required:
assert fake_state.save.calls == [
pretend.call(new_ver, datetime.datetime(1970, 1, 9, 10, 00, 00)),
]
else:
# Make sure no Exceptions
assert not outdated.logger.debug.calls
# See that save was not called
assert fake_state.save.calls == []
# Ensure we warn the user or not
if check_warn_logs:
assert len(outdated.logger.warning.calls) == 1
else:
assert len(outdated.logger.warning.calls) == 0
def test_virtualenv_state(monkeypatch):
CONTENT = '{"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}'
fake_file = pretend.stub(
read=pretend.call_recorder(lambda: CONTENT),
write=pretend.call_recorder(lambda s: None),
)
@pretend.call_recorder
@contextmanager
def fake_open(filename, mode='r'):
yield fake_file
monkeypatch.setattr(outdated, 'open', fake_open, raising=False)
monkeypatch.setattr(outdated, 'running_under_virtualenv',
pretend.call_recorder(lambda: True))
monkeypatch.setattr(sys, 'prefix', 'virtually_env')
state = outdated.load_selfcheck_statefile()
state.save('2.0', datetime.datetime.utcnow())
assert len(outdated.running_under_virtualenv.calls) == 1
expected_path = os.path.join('virtually_env', 'pip-selfcheck.json')
assert fake_open.calls == [
pretend.call(expected_path),
pretend.call(expected_path, 'w'),
]
# json.dumps will call this a number of times
assert len(fake_file.write.calls)
def test_global_state(monkeypatch, tmpdir):
CONTENT = '''{"pip_prefix": {"last_check": "1970-01-02T11:00:00Z",
"pypi_version": "1.0"}}'''
fake_file = pretend.stub(
read=pretend.call_recorder(lambda: CONTENT),
write=pretend.call_recorder(lambda s: None),
)
@pretend.call_recorder
@contextmanager
def fake_open(filename, mode='r'):
yield fake_file
monkeypatch.setattr(outdated, 'open', fake_open, raising=False)
@pretend.call_recorder
@contextmanager
def fake_lock(filename):
yield
monkeypatch.setattr(outdated, "check_path_owner", lambda p: True)
monkeypatch.setattr(lockfile, 'LockFile', fake_lock)
monkeypatch.setattr(os.path, "exists", lambda p: True)
monkeypatch.setattr(outdated, 'running_under_virtualenv',
pretend.call_recorder(lambda: False))
cache_dir = tmpdir / 'cache_dir'
monkeypatch.setattr(outdated, 'USER_CACHE_DIR', cache_dir)
monkeypatch.setattr(sys, 'prefix', tmpdir / 'pip_prefix')
state = outdated.load_selfcheck_statefile()
state.save('2.0', datetime.datetime.utcnow())
assert len(outdated.running_under_virtualenv.calls) == 1
expected_path = cache_dir / 'selfcheck.json'
assert fake_lock.calls == [pretend.call(expected_path)]
assert fake_open.calls == [
pretend.call(expected_path),
pretend.call(expected_path),
pretend.call(expected_path, 'w'),
]
# json.dumps will call this a number of times
assert len(fake_file.write.calls)
|
zvezdan/pip
|
tests/unit/test_unit_outdated.py
|
Python
|
mit
| 6,190
|
import sys, math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
class Tree(object):
def __repr__(self):
return self.val
def __init__(self, val=None):
self.val = val
self.childs = []
def add_number(self, number):
if not number:
return
for child in self.childs:
if number[0] == child.val:
del number[0]
child.add_number(number)
return
new_child = Tree(number[0])
self.childs.append(new_child)
del number[0]
new_child.add_number(number)
def calculate(self):
plus = 1 if self.val else 0
return plus + sum([child.calculate() for child in self.childs])
def show(self, order=''):
print order + str(self.val)
order += ' '
for child in self.childs:
child.show(order)
# N = int(raw_input())
# for i in xrange(N):
# telephone = raw_input()
# # Write an action using print
# # To debug: print >> sys.stderr, "Debug messages..."
# print "number" # The number of elements (referencing a number) stored in the structure.
if __name__ == '__main__':
t = Tree()
t.add_number(list('0123456789'))
t.add_number(list('0123'))
print t.calculate()
t.show()
|
hibou107/algocpp
|
telephone.py
|
Python
|
mit
| 1,349
|
# coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
class Provider(InternetProvider):
safe_email_tlds = ('com', 'net', 'fr', 'fr')
free_email_domains = (
'voila.fr', 'gmail.com', 'hotmail.fr', 'yahoo.fr', 'laposte.net', 'free.fr', 'sfr.fr', 'orange.fr', 'bouygtel.fr',
'club-internet.fr', 'dbmail.com', 'live.com', 'ifrance.com', 'noos.fr', 'tele2.fr', 'tiscali.fr', 'wanadoo.fr')
tlds = ('com', 'com', 'com', 'net', 'org', 'fr', 'fr', 'fr')
@staticmethod
def _to_ascii(string):
replacements = (
('à', 'a'), ('À', 'A'), ('ç', 'c'), ('Ç', 'c'), ('é', 'e'), ('É', 'E'), ('è', 'e'),
('È', 'E'), ('ë', 'e'), ('Ë', 'E'), ('ï', 'i'), ('Ï', 'I'), ('î', 'i'), ('Î', 'I'),
('ô', 'o'), ('Ô', 'O'), ('ù', ''), ('Ù', 'U'),
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(self.bothify(self.generator.parse(pattern))).lower()
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = company_elements[0]
company = company.replace(" ", "")
return self._to_ascii(company).lower()
|
ShaguptaS/faker
|
faker/providers/fr_FR/internet.py
|
Python
|
mit
| 1,414
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-21 04:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0011_auto_20170621_1224'),
]
operations = [
migrations.AddField(
model_name='category',
name='slug',
field=models.SlugField(default=''),
),
migrations.AddField(
model_name='tag',
name='slug',
field=models.SlugField(default=''),
),
]
|
r26zhao/django_blog
|
blog/migrations/0012_auto_20170621_1250.py
|
Python
|
mit
| 586
|
import _plotly_utils.basevalidators
class TickvalsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(
self, plotly_name="tickvals", parent_name="parcoords.dimension", **kwargs
):
super(TickvalsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/parcoords/dimension/_tickvals.py
|
Python
|
mit
| 473
|
from django.forms import ModelForm, ModelChoiceField
from django.utils.translation import ugettext_lazy as _
from apps.task.models import Task
class FormChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.name
class TaskForm(ModelForm):
"""
Task form used to add or update a task in the Chronos platform.
TODO: Develop this form
"""
parenttask = FormChoiceField(
queryset=Task.objects.all().order_by('name'),
empty_label=_('Please select an option'),
required=False,
)
class Meta:
model = Task
fields = ['name', 'description', 'comments', 'price', 'parenttask', 'is_visible']
|
hgpestana/chronos
|
apps/task/forms.py
|
Python
|
mit
| 630
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import redis
import json
class Redis(object):
def __init__(self, host="localhost", port=6379):
self._host = host
self._port = port
self._redis_cursor = None
def conn(self):
if self._redis_cursor is None:
pool = redis.ConnectionPool(host=self._host, port=self._port, db=0)
self._redis_cursor = redis.Redis(connection_pool=pool)
def enqueue(self, qname, data):
self.conn()
self._redis_cursor.rpush(qname, json.dumps(data))
def dequeue(self, qname):
self.conn()
r = self._redis_cursor.blpop(qname)
return json.loads(r[1])
if __name__ == "__main__":
pass
|
huhuchen/asyncqueue
|
asyncqueue/_redis.py
|
Python
|
mit
| 720
|
__author__ = 'thorwhalen'
import requests
from serialize.khan_logger import KhanLogger
import logging
class SimpleRequest(object):
def __init__(self, log_file_name=None, log_level=logging.INFO):
full_log_path_and_name = KhanLogger.default_log_path_with_unique_name(log_file_name)
self.logger = KhanLogger(file_path_and_name=full_log_path_and_name, level=log_level)
def slurp(self, url):
r = requests.get(url, timeout=30.0)
if not r.ok:
self.logger.log(level=logging.WARN, simple_request="HTTP Error: {} for url {}".format(r.status_code, url))
else:
self.logger.log(level=logging.INFO, simple_request="Slurped url {}".format(url))
return r.text
if __name__ == '__main__':
sr = SimpleRequest()
|
thorwhalen/ut
|
slurp/simple_request.py
|
Python
|
mit
| 787
|
import os
import glob
#####################################################
######Init the files##################################
#####################################################
os.remove("a0.txt")
os.remove("a1.txt")
os.remove("a2.txt")
os.remove("a3.txt")
os.remove("a4.txt")
os.remove("a5.txt")
os.remove("a6.txt")
os.remove("a7.txt")
os.remove("a8.txt")
os.remove("a9.txt")
os.remove("n0.txt")
os.remove("n1.txt")
os.remove("n2.txt")
os.remove("n3.txt")
os.remove("n4.txt")
os.remove("n5.txt")
os.remove("n6.txt")
os.remove("n7.txt")
os.remove("n8.txt")
os.remove("n9.txt")
os.remove("v0.txt")
os.remove("v1.txt")
os.remove("v2.txt")
os.remove("v3.txt")
os.remove("v4.txt")
os.remove("v5.txt")
os.remove("v6.txt")
os.remove("v7.txt")
os.remove("v8.txt")
os.remove("v9.txt")
file_a0 = open("a0.txt", "a")
file_a1 = open("a1.txt", "a")
file_a2 = open("a2.txt", "a")
file_a3 = open("a3.txt", "a")
file_a4 = open("a4.txt", "a")
file_a5 = open("a5.txt", "a")
file_a6 = open("a6.txt", "a")
file_a7 = open("a7.txt", "a")
file_a8 = open("a8.txt", "a")
file_a9 = open("a9.txt", "a")
format_a = [file_a0,file_a1,file_a2,file_a3,file_a4,file_a5,file_a6,file_a7,file_a8,file_a9]
file_n0 = open("n0.txt", "a")
file_n1 = open("n1.txt", "a")
file_n2 = open("n2.txt", "a")
file_n3 = open("n3.txt", "a")
file_n4 = open("n4.txt", "a")
file_n5 = open("n5.txt", "a")
file_n6 = open("n6.txt", "a")
file_n7 = open("n7.txt", "a")
file_n8 = open("n8.txt", "a")
file_n9 = open("n9.txt", "a")
format_n = [file_n0,file_n1,file_n2,file_n3,file_n4,file_n5,file_n6,file_n7,file_n8,file_n9]
file_v0 = open("v0.txt", "a")
file_v1 = open("v1.txt", "a")
file_v2 = open("v2.txt", "a")
file_v3 = open("v3.txt", "a")
file_v4 = open("v4.txt", "a")
file_v5 = open("v5.txt", "a")
file_v6 = open("v6.txt", "a")
file_v7 = open("v7.txt", "a")
file_v8 = open("v8.txt", "a")
file_v9 = open("v9.txt", "a")
format_v = [file_v0,file_v1,file_v2,file_v3,file_v4,file_v5,file_v6,file_v7,file_v8,file_v9]
the_attack_files = glob.glob("../Basic_Attack/*.txt")
the_normal_files = glob.glob("../Normal_Data/*.txt")
the_vali_files = glob.glob("../Vali_Data/*.txt")
#####################################################
########Format the files##############################
#####################################################
attack_words = []
normal_words = []
vali_words = []
#####################################################
########Read in the sequences########################
#########separate them into 2D arrays################
#####################################################
for f in the_attack_files:
e = open(f,"r+")
attack_words.extend([e.read().split()])
e.close()
for f in the_normal_files:
e = open(f,"r+")
normal_words.extend([e.read().split()])
e.close()
for f in the_vali_files:
e = open(f,"r+")
vali_words.extend([e.read().split()])
e.close()
files_a = len(attack_words)/10
files_n = len(normal_words)/10
files_v = len(vali_words)/10
print("Normal Words: " + str(len(normal_words)))
print("Average normal words per formatted file: " + str(files_n))
print("Attack Words: " + str(len(attack_words)))
print("Average attack words per formatted file: " + str(files_a))
print("Validation Words: " + str(len(vali_words)))
print("Average validation words per formatted file: " + str(files_v))
input_n = raw_input("Please input a value for n: ")
print("Performing formatting with " + str(input_n) + " grams...")
n = int(input_n)
y = 0
index = 0
to_write = format_n[index]
for norm in normal_words:
for x in range(0,len(norm) - (n-1)):
for form in range(0, n):
if(form < n-1):
to_write.write(str(norm[x+form]) + " ")
elif(form == n-1):
to_write.write(str(norm[x+form]) + " 0\n")
to_write.write("new\n")
y += 1
if(y % files_n == 0 and index < 9):
print( str(y) + " instances in norm_block...")
#print("X: " + str(y))
#print("Ending: " + str(index) + "\n Starting: " + str(index+1))
to_write.close()
index = index + 1
to_write = format_n[index]
y = 0
index = 0
to_write = format_a[index]
for norm in attack_words:
for x in range(0,len(norm) - (n-1)):
for form in range(0, n):
if(form < n-1):
to_write.write(str(norm[x+form]) + " ")
elif(form == n-1):
to_write.write(str(norm[x+form]) + " 1\n")
to_write.write("new\n")
y += 1
if(y % files_a == 0 and index < 9):
print( str(y) + " instances in att_block...")
#print("Ending: " + str(index) + "\n Starting: " + str(index+1))
to_write.close()
index = index + 1
to_write = format_a[index]
y = 0
index = 0
to_write = format_v[index]
for norm in vali_words:
for x in range(0,len(norm) - (n-1)):
for form in range(0,n):
if(form < n-1):
to_write.write(str(norm[x+form]) + " ")
elif(form == n-1):
to_write.write(str(norm[x+form]) + " 0\n")
to_write.write("new\n")
y += 1
if(y % files_v == 0 and index < 9):
print( str(y) + " instances in vali_block...")
#print("Ending: " + str(index) + "\n Starting: " + str(index+1))
to_write.close()
index = index + 1
to_write = format_v[index]
#####################################################
########Generate the n-gram##########################
#########and write that to the file##################
#####################################################
#n = 3
#for norm in normal_words:
# for x in range(0,len(norm)-(n-1)):
# file__.write(str(norm[x]) + " " + str(norm[x+1]) + " " + str(norm[x+2]) + " 0\n")
#for att in attack_words:
# for x in range(0,len(att)-(n-1)):
# file_.write(str(att[x]) + " " + str(att[x+1]) + " " + str(att[x+2]) + " 1\n")
#for vali in vali_words:
# for x in range(0,len(vali)-(n-1)):
# file_v.write(str(vali[x]) + " " + str(vali[x+1]) + " " + str(vali[x+2]) + " 0\n")
# file_v.write("new\n")
print("Data Formatted...")
|
doylew/detectionsc
|
format_py/ngram_nskip.py
|
Python
|
mit
| 6,042
|
#
# Utility stackables
#
from __future__ import print_function, absolute_import, unicode_literals, division
from stackable.stackable import Stackable, StackableError
import json, pickle
from time import sleep
from threading import Thread, Event
from datetime import datetime, timedelta
class StackablePickler(Stackable):
'Pickle codec'
def process_input(self, data):
return pickle.loads(data)
def process_output(self, data):
return pickle.dumps(data, protocol=2)
class StackableJSON(Stackable):
'JSON codec'
def process_input(self, data):
try:
return json.loads(data)
except ValueError:
return None
def process_output(self, data):
return json.dumps(data)
class StackableWriter(Stackable):
'Reads and writes from/to a file'
def __init__(self, filename):
super(StackableWriter, self).__init__()
self.fd = open(filename, "w")
def process_input(self, data):
self.fd.write(data)
self.fd.flush()
def process_output(self, data):
return data
# def poll(self):
# return self.fd.read()
class StackablePrinter(Stackable):
'''Prints all input and output, and returns it unmodified.
Useful for quick debugging of Stackables.'''
def __init__(self, printer=print):
'Takes a printing function as argument - defaults to print'
self.printer = printer
super(StackablePrinter, self).__init__()
def process_input(self, data):
self.printer(data)
return data
def process_output(self, data):
self.printer(data)
return data
import sys
class StackableStdout(Stackable):
def process_input(self, data):
sys.stdout.write(data)
return data
def process_output(self, data):
return data
from collections import deque
class StackableInjector(Stackable):
def __init__(self):
super(StackableInjector, self).__init__()
self.in_buf = deque()
self.out_buf = deque()
def push(self, data):
self.in_buf.append(data)
def poll(self):
if len(self.in_buf):
return self.in_buf.popleft()
return None
def process_output(self, data):
self.out_buf.append(data)
return data
class StackablePoker(Stackable):
def __init__(self, interval=20, send=True, ping_string='__stack_ping', pong_string='__stack_pong'):
super(StackablePoker, self).__init__()
self.ping_string = ping_string.encode('utf-8')
self.pong_string = pong_string.encode('utf-8')
self.w = Event()
self.interval = interval
self.send = send
if self.send:
self.reset()
def _detach(self):
super(StackablePoker, self)._detach()
self.w.set()
def reset(self):
self.timestamp = datetime.now()
def ping():
self.w.wait(self.interval)
try:
self._feed(self.ping_string)
except:
pass
x = Thread(target=ping)
x.daemon = True
x.start()
def process_output(self, data):
if self.send and (datetime.now() - self.timestamp) > timedelta(seconds=30):
raise StackableError('Pong not received')
return data
def process_input(self, data):
if data == self.pong_string:
self.reset()
return None
elif data == self.ping_string:
self._feed(self.pong_string)
return None
elif self.send and (datetime.now() - self.timestamp) > timedelta(seconds=30):
raise StackableError('Pong not received')
return data
|
joushou/stackable
|
utils.py
|
Python
|
mit
| 3,175
|
from flask_pymongo import PyMongo
from flask_cors import CORS
mongo = PyMongo()
cors = CORS()
|
TornikeNatsvlishvili/skivri.ge
|
backend/backend/extensions.py
|
Python
|
mit
| 94
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
import uchicagohvz.overwrite_fs
from django.conf import settings
import django.utils.timezone
import uchicagohvz.game.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Award',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('points', models.FloatField(help_text=b'Can be negative, e.g. to penalize players')),
('code', models.CharField(help_text=b'leave blank for automatic (re-)generation', max_length=255, blank=True)),
('redeem_limit', models.IntegerField(help_text=b'Maximum number of players that can redeem award via code entry (set to 0 for awards to be added by moderators only)')),
('redeem_type', models.CharField(max_length=1, choices=[(b'H', b'Humans only'), (b'Z', b'Zombies only'), (b'A', b'All players')])),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('registration_date', models.DateTimeField()),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('rules', models.FileField(storage=uchicagohvz.overwrite_fs.OverwriteFileSystemStorage(), upload_to=uchicagohvz.game.models.gen_rules_filename)),
('picture', models.FileField(storage=uchicagohvz.overwrite_fs.OverwriteFileSystemStorage(), null=True, upload_to=uchicagohvz.game.models.gen_pics_filename, blank=True)),
('color', models.CharField(default=b'#FFFFFF', max_length=64)),
('flavor', models.TextField(default=b'', max_length=6000)),
],
options={
'ordering': ['-start_date'],
},
),
migrations.CreateModel(
name='HighValueDorm',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('dorm', models.CharField(max_length=4, choices=[(b'BJ', b'Burton-Judson Courts'), (b'IH', b'International House'), (b'MAX', b'Max Palevsky'), (b'NC', b'North Campus'), (b'SH', b'Snell-Hitchcock'), (b'SC', b'South Campus'), (b'ST', b'Stony Island'), (b'OFF', b'Off campus')])),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('points', models.IntegerField(default=3)),
('game', models.ForeignKey(to='game.Game')),
],
),
migrations.CreateModel(
name='HighValueTarget',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('kill_points', models.IntegerField(default=3, help_text=b'# of points zombies receive for killing this HVT')),
('award_points', models.IntegerField(default=0, help_text=b'# of points the HVT earns if he/she survives for the entire duration')),
],
),
migrations.CreateModel(
name='Kill',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(default=django.utils.timezone.now)),
('points', models.IntegerField(default=1)),
('notes', models.TextField(blank=True)),
('lat', models.FloatField(null=True, verbose_name=b'latitude', blank=True)),
('lng', models.FloatField(null=True, verbose_name=b'longitude', blank=True)),
('lft', models.PositiveIntegerField(editable=False, db_index=True)),
('rght', models.PositiveIntegerField(editable=False, db_index=True)),
('tree_id', models.PositiveIntegerField(editable=False, db_index=True)),
('level', models.PositiveIntegerField(editable=False, db_index=True)),
('hvd', models.ForeignKey(related_name='kills', on_delete=django.db.models.deletion.SET_NULL, verbose_name=b'High-value Dorm', blank=True, to='game.HighValueDorm', null=True)),
('hvt', models.OneToOneField(related_name='kill', null=True, on_delete=django.db.models.deletion.SET_NULL, blank=True, to='game.HighValueTarget', verbose_name=b'High-value target')),
],
options={
'ordering': ['-date'],
},
),
migrations.CreateModel(
name='Mission',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=63)),
('description', models.CharField(max_length=255)),
('summary', models.TextField(default=b'', max_length=6000)),
('zombies_win', models.BooleanField(default=False)),
('awards', models.ManyToManyField(help_text=b'Awards associated with this mission.', related_name='missions', to='game.Award', blank=True)),
('game', models.ForeignKey(related_name='missions', to='game.Game')),
],
),
migrations.CreateModel(
name='MissionPicture',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('picture', models.FileField(storage=uchicagohvz.overwrite_fs.OverwriteFileSystemStorage(), upload_to=uchicagohvz.game.models.gen_pics_filename)),
('lat', models.FloatField(null=True, verbose_name=b'latitude', blank=True)),
('lng', models.FloatField(null=True, verbose_name=b'longitude', blank=True)),
('game', models.ForeignKey(related_name='pictures', to='game.Game')),
],
),
migrations.CreateModel(
name='New_Squad',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
('game', models.ForeignKey(related_name='new_squads', to='game.Game')),
],
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('active', models.BooleanField(default=False)),
('bite_code', models.CharField(help_text=b'leave blank for automatic (re-)generation', max_length=255, blank=True)),
('dorm', models.CharField(max_length=4, choices=[(b'BJ', b'Burton-Judson Courts'), (b'IH', b'International House'), (b'MAX', b'Max Palevsky'), (b'NC', b'North Campus'), (b'SH', b'Snell-Hitchcock'), (b'SC', b'South Campus'), (b'ST', b'Stony Island'), (b'OFF', b'Off campus')])),
('major', models.CharField(help_text=b'autopopulates from LDAP', max_length=255, blank=True)),
('human', models.BooleanField(default=True)),
('opt_out_hvt', models.BooleanField(default=False)),
('gun_requested', models.BooleanField(default=False)),
('renting_gun', models.BooleanField(default=False)),
('gun_returned', models.BooleanField(default=False)),
('last_words', models.CharField(max_length=255, blank=True)),
('lead_zombie', models.BooleanField(default=False)),
('delinquent_gun', models.BooleanField(default=False)),
('game', models.ForeignKey(related_name='players', to='game.Game')),
('new_squad', models.ForeignKey(related_name='players', blank=True, to='game.New_Squad', null=True)),
],
options={
'ordering': ['-game__start_date', 'user__username', 'user__last_name', 'user__first_name'],
},
),
migrations.CreateModel(
name='Squad',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
('game', models.ForeignKey(related_name='squads', to='game.Game')),
],
),
migrations.AddField(
model_name='player',
name='squad',
field=models.ForeignKey(related_name='players', blank=True, to='game.Squad', null=True),
),
migrations.AddField(
model_name='player',
name='user',
field=models.ForeignKey(related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='missionpicture',
name='players',
field=models.ManyToManyField(help_text=b'Players in this picture.', related_name='pictures', to='game.Player', blank=True),
),
migrations.AddField(
model_name='kill',
name='killer',
field=models.ForeignKey(related_name='+', to='game.Player'),
),
migrations.AddField(
model_name='kill',
name='parent',
field=mptt.fields.TreeForeignKey(related_name='children', blank=True, editable=False, to='game.Kill', null=True),
),
migrations.AddField(
model_name='kill',
name='victim',
field=models.ForeignKey(related_name='+', to='game.Player'),
),
migrations.AddField(
model_name='highvaluetarget',
name='player',
field=models.OneToOneField(related_name='hvt', to='game.Player'),
),
migrations.AddField(
model_name='award',
name='game',
field=models.ForeignKey(related_name='+', to='game.Game'),
),
migrations.AddField(
model_name='award',
name='players',
field=models.ManyToManyField(help_text=b'Players that should receive this award.', related_name='awards', to='game.Player', blank=True),
),
migrations.AlterUniqueTogether(
name='squad',
unique_together=set([('game', 'name')]),
),
migrations.AlterUniqueTogether(
name='player',
unique_together=set([('game', 'bite_code'), ('user', 'game')]),
),
migrations.AlterUniqueTogether(
name='new_squad',
unique_together=set([('game', 'name')]),
),
migrations.AlterUniqueTogether(
name='mission',
unique_together=set([('game', 'name')]),
),
migrations.AlterUniqueTogether(
name='kill',
unique_together=set([('parent', 'killer', 'victim')]),
),
migrations.AlterUniqueTogether(
name='highvaluedorm',
unique_together=set([('game', 'dorm')]),
),
migrations.AlterUniqueTogether(
name='award',
unique_together=set([('game', 'name'), ('game', 'code')]),
),
]
|
kz26/uchicago-hvz
|
uchicagohvz/game/dorm_migrations/0001_initial.py
|
Python
|
mit
| 11,763
|
import torch
import sys
import types
class VFModule(types.ModuleType):
def __init__(self, name):
super(VFModule, self).__init__(name)
self.vf = torch._C._VariableFunctions
def __getattr__(self, attr):
return getattr(self.vf, attr)
sys.modules[__name__] = VFModule(__name__)
|
ryfeus/lambda-packs
|
pytorch/source/torch/nn/_VF.py
|
Python
|
mit
| 310
|
"""
Constructs a planner that is good for being kinda like a car-boat thing!
"""
from __future__ import division
import numpy as np
import numpy.linalg as npl
from params import *
import lqrrt
################################################# DYNAMICS
magic_rudder = 6000
def dynamics(x, u, dt):
"""
Returns next state given last state x, wrench u, and timestep dt.
"""
# Rotation matrix (orientation, converts body to world)
R = np.array([
[np.cos(x[2]), -np.sin(x[2]), 0],
[np.sin(x[2]), np.cos(x[2]), 0],
[ 0, 0, 1]
])
# Construct drag coefficients based on our motion signs
D = np.copy(D_neg)
for i, v in enumerate(x[3:]):
if v >= 0:
D[i] = D_pos[i]
# Heading controller trying to keep us car-like
vw = R[:2, :2].dot(x[3:5])
ang = np.arctan2(vw[1], vw[0])
c = np.cos(x[2])
s = np.sin(x[2])
cg = np.cos(ang)
sg = np.sin(ang)
u[2] = magic_rudder*np.arctan2(sg*c - cg*s, cg*c + sg*s)
# Actuator saturation
u = B.dot(np.clip(invB.dot(u), -thrust_max, thrust_max))
# M*vdot + D*v = u and pdot = R*v
xdot = np.concatenate((R.dot(x[3:]), invM*(u - D*x[3:])))
# First-order integrate
xnext = x + xdot*dt
# Impose not driving backwards
if xnext[3] < 0:
xnext[3] = abs(x[3])
# # Impose not turning in place
# xnext[5] = np.clip(np.abs(xnext[3]/velmax_pos[0]), 0, 1) * xnext[5]
return xnext
################################################# POLICY
kp = np.diag([150, 150, 0])
kd = np.diag([150, 5, 0])
S = np.diag([1, 1, 1, 0, 0, 0])
def lqr(x, u):
"""
Returns cost-to-go matrix S and policy matrix K given local state x and effort u.
"""
R = np.array([
[np.cos(x[2]), -np.sin(x[2]), 0],
[np.sin(x[2]), np.cos(x[2]), 0],
[ 0, 0, 1]
])
K = np.hstack((kp.dot(R.T), kd))
return (S, K)
################################################# HEURISTICS
goal_buffer = [0.5*free_radius, 0.5*free_radius, np.inf, np.inf, np.inf, np.inf]
error_tol = np.copy(goal_buffer)/10
def gen_ss(seed, goal, buff=[ss_start]*4):
"""
Returns a sample space given a seed state, goal state, and buffer.
"""
return [(min([seed[0], goal[0]]) - buff[0], max([seed[0], goal[0]]) + buff[1]),
(min([seed[1], goal[1]]) - buff[2], max([seed[1], goal[1]]) + buff[3]),
(-np.pi, np.pi),
(0.9*velmax_pos[0], velmax_pos[0]),
(-abs(velmax_neg[1]), velmax_pos[1]),
(-abs(velmax_neg[2]), velmax_pos[2])]
################################################# MAIN ATTRIBUTES
constraints = lqrrt.Constraints(nstates=nstates, ncontrols=ncontrols,
goal_buffer=goal_buffer, is_feasible=unset)
planner = lqrrt.Planner(dynamics, lqr, constraints,
horizon=horizon, dt=dt, FPR=FPR,
error_tol=error_tol, erf=unset,
min_time=basic_duration, max_time=basic_duration, max_nodes=max_nodes,
sys_time=unset, printing=False)
|
jnez71/lqRRT
|
demos/lqrrt_ros/behaviors/car.py
|
Python
|
mit
| 3,231
|
from django.test import TestCase
from builds.models import Version
from projects.models import Project
class RedirectTests(TestCase):
fixtures = ["eric", "test_data"]
def setUp(self):
self.client.login(username='eric', password='test')
r = self.client.post(
'/dashboard/import/',
{'repo_type': 'git', 'name': 'Pip',
'tags': 'big, fucking, monkey', 'default_branch': '',
'project_url': 'http://pip.rtfd.org',
'repo': 'https://github.com/fail/sauce',
'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be',
'default_version': 'latest',
'privacy_level': 'public',
'version_privacy_level': 'public',
'description': 'wat',
'documentation_type': 'sphinx'})
pip = Project.objects.get(slug='pip')
pip_latest = Version.objects.create(project=pip, identifier='latest', verbose_name='latest', slug='latest', active=True)
def test_proper_url_no_slash(self):
r = self.client.get('/docs/pip')
# This is triggered by Django, so its a 301, basically just APPEND_SLASH
self.assertEqual(r.status_code, 301)
self.assertEqual(r._headers['location'], ('Location', 'http://testserver/docs/pip/'))
r = self.client.get(r._headers['location'][1])
self.assertEqual(r.status_code, 302)
r = self.client.get(r._headers['location'][1])
self.assertEqual(r.status_code, 200)
def test_proper_url(self):
r = self.client.get('/docs/pip/')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://testserver/docs/pip/en/latest/'))
r = self.client.get(r._headers['location'][1])
self.assertEqual(r.status_code, 200)
def test_inproper_url(self):
r = self.client.get('/docs/pip/en/')
self.assertEqual(r.status_code, 404)
def test_proper_url_full(self):
r = self.client.get('/docs/pip/en/latest/')
self.assertEqual(r.status_code, 200)
# Subdomains
def test_proper_subdomain(self):
r = self.client.get('/', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://pip.readthedocs.org/en/latest/'))
# Keep this around for now, until we come up with a nicer interface
"""
def test_inproper_subdomain(self):
r = self.client.get('/en/', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 404)
"""
def test_proper_subdomain_and_url(self):
r = self.client.get('/en/latest/', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 200)
# Specific Page Redirects
def test_proper_page_on_subdomain(self):
r = self.client.get('/page/test.html', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://pip.readthedocs.org/en/latest/test.html'))
# Specific Page Redirects
def test_proper_page_on_main_site(self):
r = self.client.get('/docs/pip/page/test.html')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://testserver/docs/pip/en/latest/test.html'))
|
ojii/readthedocs.org
|
readthedocs/rtd_tests/tests/test_redirects.py
|
Python
|
mit
| 3,374
|
import hexchat
import re
import sys
import twitch.hook, twitch.jtvmsghandler, twitch.user, twitch.channel
import twitch.normalize, twitch.commands, twitch.exceptions, twitch.topic
import twitch.logger, twitch.settings
from twitch import irc
log = twitch.logger.get()
# regex for extracting time from ban message
ban_msg_regex = re.compile(r"for (\d+) more seconds")
# Identify ourselves as Twitch IRC client to get user info
def endofmotd_cb(word, word_eol, userdata):
hexchat.command('CAP REQ :twitch.tv/tags twitch.tv/commands')
# Ignore various "unknown command" errors
unknowncommands = ('WHO', 'WHOIS')
def servererr_cb(word, word_eol, userdata):
if word[3] in unknowncommands:
return hexchat.EAT_ALL
return hexchat.EAT_NONE
# PRIVMSG hook to handle various notification messages from Twitch.
def privmsg_cb(word, word_eol, msgtype):
try:
nick = twitch.normalize.nick((word[0][1:].split('!')[0]))
chan = word[2]
text = word_eol[3]
if chan == '#jtv' and nick == 'jtv':
hexchat.emit_print('Server Text', text[1:])
return hexchat.EAT_ALL
elif nick == 'jtv':
if chan[0] != '#':
irc.emit_print(None, 'Server Text', text[1:])
return hexchat.EAT_ALL
elif "You are banned" in text:
chan = twitch.channel.get(chan)
if not chan.areWeBanned:
chan.areWeBanned = True
match = ban_msg_regex.search(text)
time = int(match.group(1))
def clear_ban(userdata):
chan.areWeBanned = False
chan.emit_print('Server Text',
"You are (hopefully) no longer banned")
hexchat.hook_timer(time * 1000, clear_ban)
else:
action = word[3][1:]
param = word[4:]
if action[0] != '_' and hasattr(twitch.jtvmsghandler, action):
return getattr(twitch.jtvmsghandler, action)(chan, param)
else:
#log.warning("Unhandled JTV message: %s" % str(word))
ctxt = twitch.channel.get(chan).getContext()
twitch.channel.get(chan).emit_print('Server Text', text[1:])
return hexchat.EAT_ALL
elif nick == 'twitchnotify':
twitch.channel.get(chan).emit_print('Server Text', text[1:])
return hexchat.EAT_ALL
else:
twitch.user.get(nick).joinChannel(chan)
return hexchat.EAT_NONE
except:
log.exception("Unhandled exception in twitch.privmsg_cb")
return hexchat.EAT_NONE
# handle Twitch WHISPER message
def whisper_cb(word, word_eol, msgtype):
try:
nick = twitch.normalize.nick((word[0][1:].split('!')[0]))
dest = word[2]
msg = word_eol[3][1:]
log.debug("Got WHISPER: %s", word)
hexchat.emit_print('Notice', nick, msg)
except:
log.exception("Unhandled exception in twitch.whisper_cb")
finally:
return hexchat.EAT_ALL
# handle Twitch USERSTATE and GLOBALUSERSTATE messages
def userstate_cb(word, word_eol, msgtype):
try:
# log.debug("Got %s msg: %s", msgtype, word)
# Nothing to do here (except eat the message) until Hexchat adds a
# way to read the message's IRCv3 tags.
pass
except:
log.exception("Unhandled exception in twitch.userstate_cb")
finally:
return hexchat.EAT_ALL
# handle Twitch HOSTTARGET messages
# :tmi.twitch.tv HOSTTARGET #renakunisaki :cloakedyoshi -
def hosttarget_cb(word, word_eol, msgtype):
try:
log.debug("%s %s", msgtype, word)
chan = word[2]
param = word[3:]
return twitch.jtvmsghandler.HOSTTARGET(chan, param)
except:
log.exception("Unhandled exception in twitch.hosttarget_cb")
finally:
return hexchat.EAT_ALL
# handle Twitch CLEARCHAT messages
# :tmi.twitch.tv CLEARCHAT #darkspinessonic :ishmon
def clearchat_cb(word, word_eol, msgtype):
try:
log.debug("%s %s", msgtype, word)
if len(word) >= 4: param = [word[3][1:]]
else: param = []
chan = word[2]
# log.debug("Chan = %s, whom = %s", chan, param)
return twitch.jtvmsghandler.CLEARCHAT(chan, param)
except:
log.exception("Unhandled exception in twitch.clearchat_cb")
finally:
return hexchat.EAT_ALL
#def rawmsg_cb(word, word_eol, msgtype, attributes):
# try:
# log.debug("Got raw msg: %s", word)
# except:
# log.exception("Unhandled exception in twitch.rawmsg_cb")
# finally:
# return hexchat.EAT_NONE
# message hook to format user messages nicely.
message_cb_recurse = False
def message_cb(word, word_eol, msgtype):
# avoid infinite loop
global message_cb_recurse
if message_cb_recurse:
return
message_cb_recurse = True
try:
#log.debug("message_cb word=%s" % str(word))
#log.debug("message_cb word_eol=%s" % str(word_eol))
if len(word) < 1:
return hexchat.EAT_NONE
nick = twitch.normalize.nick(word[0])
try:
text = word[1]
except IndexError:
text = ''
user = twitch.user.get(nick)
chan = twitch.channel.get(hexchat.get_context())
if chan is not None:
user.joinChannel(chan)
user.printMessage(chan, text, msgtype)
else:
log.error("Got user message for invalid channel: <%s> %s" %
(nick, text))
return hexchat.EAT_ALL
except:
log.exception("Unhandled exception in twitch.message_cb")
return hexchat.EAT_NONE
finally:
message_cb_recurse = False
# MODE hook to track mods
def mode_cb(word, word_eol, msgtype):
try:
chan = word[2]
mode = word[3]
whom = word[4]
user = twitch.user.get(whom)
what = '+'
for char in mode:
if char == '+' or char == '-':
what = char
elif what == '+':
user.setChannelMode(chan, char, True)
elif what == '-':
user.setChannelMode(chan, char, False)
except:
log.exception("Unhandled exception in twitch.mode_cb")
finally:
return hexchat.EAT_NONE
# When we join a channel, set up the user info and get stream status
def youjoin_cb(word, word_eol, msgtype):
try:
chan = twitch.channel.get(word[1])
chan.join()
hexchat.command("CAP REQ :twitch.tv/membership")
# automatically set up some users
jtv = twitch.user.get('jtv')
jtv.joinChannel(chan)
jtv.setAttrs({'admin':True,'bot':True})
twitchnotify = twitch.user.get('twitchnotify')
twitchnotify.joinChannel(chan)
twitchnotify.setAttrs({'admin':True,'bot':True})
broadcaster = twitch.user.get(chan.name)
broadcaster.joinChannel(chan)
broadcaster.setChanAttr(chan, 'broadcaster', True)
except:
log.exception("Unhandled exception in twitch.youjoin_cb")
finally:
return hexchat.EAT_NONE
# When we leave a channel, stop updating it
def youpart_cb(word, word_eol, msgtype):
try:
if msgtype == 'You Kicked':
chan = word[1]
else:
chan = word[2]
twitch.channel.get(chan).leave()
except:
log.exception("Unhandled exception in twitch.youpart_cb")
def isCommand(name, obj):
return (callable(obj) and (not name.startswith('_'))
and hasattr(obj, 'command'))
# handler for /twitch command
def twitchcmd_cb(word, word_eol, userdata):
try:
log.debug("/twitch command: %s" % word)
if len(word) < 2:
print("Available commands:")
for name, obj in twitch.commands.__dict__.items():
if isCommand(name, obj):
print("%s - %s" % (name, obj.command['desc']))
return hexchat.EAT_ALL
cmd = word[1]
if not hasattr(twitch.commands, cmd):
raise twitch.exceptions.UnknownCommandError(cmd)
f = getattr(twitch.commands, cmd)
if not hasattr(f, 'command'):
raise twitch.exceptions.UnknownCommandError(cmd)
f(word[2:], word_eol[2:])
except twitch.exceptions.BadParameterError as ex:
print("%s: %s" % (cmd, ex))
except twitch.exceptions.UnknownCommandError as ex:
print("%s: Unknown command" % ex)
except:
log.exception("Unhandled exception in twitch.twitchcmd_cb(%s)" % cmd)
finally:
return hexchat.EAT_ALL
# ignore repeated JOIN events that can happen because we simulate them
# (since Twitch doesn't always send them reliably)
def join_cb(word, word_eol, msgtype):
try:
nick = twitch.normalize.nick((word[0][1:].split('!')[0]))
user = twitch.user.get(nick)
chan = twitch.channel.get(word[2])
if chan.hasUser(user):
return hexchat.EAT_ALL
else:
user.joinChannel(chan)
if ".twitch.hexchat.please.stop.being.butts" not in word[0]:
# eat JOINs that actually come from Twitch
return hexchat.EAT_ALL
else:
return hexchat.EAT_NONE
except:
log.exception("Unhandled exception in twitch.join_cb(%s)" % str(word))
return hexchat.EAT_NONE
# suppress "gives/removes channel operator status" messages
def chanop_cb(word, word_eol, msgtype):
if twitch.settings.get('mute.chanop'):
return hexchat.EAT_ALL
else:
return hexchat.EAT_NONE
# suppress join/part messages
def joinpart_cb(word, word_eol, msgtype):
if twitch.settings.get('mute.joinpart'):
log.debug("Muted a join/part message: %s" % str(word))
return hexchat.EAT_ALL
else:
return hexchat.EAT_NONE
# suppress "capabilities acknowledged" messages
def capack_cb(word, word_eol, msgtype):
return hexchat.EAT_ALL
# suppress "invalid CAP command" caused by Hexchat doing "CAP LS" at startup
def cmd410_cb(word, word_eol, msgtype):
return hexchat.EAT_ALL
# lowercase channel name before joining, or else we won't get any messages
def joincmd_cb(word, word_eol, userdata):
try:
chan = word[1]
orig = chan
chan = chan.lower()
# also handle URLs
unslashed = re.search('([^/]+)$', chan)
if unslashed: chan = unslashed.group(1)
# also handle bare username
if chan[0] != '#': chan = '#' + chan
log.debug("JOIN(%s) => (%s)", orig, chan)
if orig == chan:
return hexchat.EAT_NONE
else:
hexchat.command("JOIN " + chan)
return hexchat.EAT_ALL
except:
log.exception("Unhandled exception in twitch.joincmd_cb(%s)" % cmd)
return hexchat.EAT_NONE
# handle /w command (whisper)
def whispercmd_cb(word, word_eol, userdata):
try:
log.debug("Got /w: %s", word_eol)
hexchat.command("PRIVMSG #jtv :/w %s" % word_eol[1])
hexchat.emit_print('Message Send', word[1], word_eol[2])
return hexchat.EAT_ALL
except:
log.exception("Unhandled exception in twitch.whispercmd_cb")
return hexchat.EAT_ALL
# Install the hooks
def install():
twitch.hook.server ('376', endofmotd_cb)
twitch.hook.server ('410', cmd410_cb)
twitch.hook.server ('421', servererr_cb)
twitch.hook.server ('PRIVMSG', privmsg_cb)
twitch.hook.server ('USERSTATE', userstate_cb)
twitch.hook.server ('GLOBALUSERSTATE', userstate_cb)
twitch.hook.server ('HOSTTARGET', hosttarget_cb)
twitch.hook.server ('CLEARCHAT', clearchat_cb)
twitch.hook.server ('WHISPER', whisper_cb)
#twitch.hook.server_attrs('RAW LINE', rawmsg_cb)
twitch.hook.prnt ('Channel Action', message_cb)
twitch.hook.prnt ('Channel Action Hilight', message_cb)
twitch.hook.prnt ('Channel Message', message_cb)
twitch.hook.prnt ('Channel Msg Hilight', message_cb)
twitch.hook.prnt ('Your Action', message_cb)
twitch.hook.prnt ('Your Message', message_cb)
twitch.hook.server ('MODE', mode_cb)
twitch.hook.server ('JOIN', join_cb)
twitch.hook.prnt ('You Join', youjoin_cb)
twitch.hook.prnt ('You Part', youpart_cb)
twitch.hook.prnt ('You Part with Reason', youpart_cb)
twitch.hook.prnt ('You Kicked', youpart_cb)
twitch.hook.command('twitch', twitchcmd_cb)
twitch.hook.prnt ('Channel Operator', chanop_cb)
twitch.hook.prnt ('Channel DeOp', chanop_cb)
twitch.hook.prnt ('Join', joinpart_cb)
twitch.hook.prnt ('Part', joinpart_cb)
twitch.hook.command('join', joincmd_cb)
twitch.hook.prnt ('Capability Acknowledgement', joinpart_cb)
twitch.hook.command('w', whispercmd_cb)
|
RenaKunisaki/hexchat-twitch
|
twitch/hooks.py
|
Python
|
mit
| 11,648
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, division
from ._version import __version__, __version_info__ # noqa
from .decorators import route, resource, asynchronous
from .helpers import use
from .relationship import Relationship
__all__ = [
'route',
'resource',
'asynchronous',
'use',
'Relationship'
]
|
armet/python-armet
|
armet/__init__.py
|
Python
|
mit
| 363
|
#!/usr/bin/env python
#
# License: MIT
#
from __future__ import absolute_import, division, print_function
##############################################################################
# Imports
##############################################################################
import os
import sys
import argparse
import ros1_pytemplate
import logging.config
logging.config.dictConfig(
{
'version': 1,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(name)s:%(message)s'
},
},
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'simple',
'stream': 'ext://sys.stdout',
},
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': 'ros1_pytemplate.log',
'maxBytes': 1024,
'backupCount': 3,
'formatter': 'verbose'
},
},
'loggers': {
'ros1_template': {
'handlers': ['logfile'],
'level': 'DEBUG',
'propagate': True,
},
'question': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
}
}
)
def show_description():
return "ros template test script"
def show_usage(cmd=None):
cmd = os.path.relpath(sys.argv[0], os.getcwd()) if cmd is None else cmd
return "{0} [-h|--help] [--version]".format(cmd)
def show_epilog():
return "never enough testing"
##############################################################################
# Main
##############################################################################
if __name__ == '__main__':
# Ref : https://docs.python.org/2/library/argparse
parser = argparse.ArgumentParser(description=show_description(),
usage=show_usage(),
epilog=show_epilog(),
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--version", action='store_true', help="display the version number and exits.")
parsed_known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
if parsed_known_args.version:
print("ROS1 pytemplate version " + ros1_pytemplate.__version__ +
"\n from " + ros1_pytemplate.__file__)
sys.exit(0)
logger = logging.getLogger("question")
answer = ros1_pytemplate.Answer(6)
logger.info(answer.retrieve())
|
pyros-dev/ros1_template
|
ros1_pytemplate/scripts/question_cli.py
|
Python
|
mit
| 2,859
|
from flask.ext.login import UserMixin, AnonymousUserMixin
from codeta import app, auth, logger
from codeta.models.course import Course
class User(UserMixin):
def __init__(self, user_id, username, password, email, fname, lname, active=True, courses=[]):
self.user_id = user_id
self.username = username
self.password = password
self.email = email
self.fname = fname
self.lname = lname
self.active = active
self.update_courses()
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.user_id)
def __repr__(self):
return '<User %r>' % (self.username)
def get_courses(self):
return self.courses
def get_course_titles(self):
"""
Gets a list of course titles the user is enrolled in
"""
titles = []
[ titles.append(c.title) for c in self.courses ]
return titles
def add_course(self, course):
"""
Adds a course to the list of courses
"""
self.courses.append(course)
def update_courses(self):
""" Get a new list of courses from the database """
self.courses = Course.get_courses(self.username)
def create(self):
"""
Register a user in the database
"""
pw_hash = auth.hash_password(self.password)
sql = ("""
insert into Users
(username, password, email, first_name, last_name)
values
(%s, %s, %s, %s, %s)
returning
user_id
""")
data = (
self.username,
pw_hash,
self.email,
self.fname,
self.lname,
)
user_id = app.db.exec_query(sql, data, 'commit', 'returning')
if user_id:
self.user_id = user_id
self.password = pw_hash
logger.debug("Created new user_id: %s | username: %s" % (user_id, self.username))
else:
logger.debug("Failed to create username: %s" % (username))
return user_id
def read(self):
"""
Update the User member variables with fresh data from the database
"""
sql = ("""
select
*
from
Users
where
user_id = (%s)
""")
data = (
int(self.user_id),
)
user = app.db.exec_query(sql, data, 'fetchall', 'return_dict')
if user:
user = user[0]
self.user_id = int(user['user_id'])
self.username = user['username']
self.password = user['password']
self.email = user['email']
self.fname = user['first_name']
self.lname = user['last_name']
return user
def update(self):
"""
Update the user's data in the database from member variables
"""
sql = ("""
update Users set
password = (%s),
email = (%s),
first_name = (%s),
last_name = (%s)
where
user_id = (%s)
""")
data = (
self.password,
self.email,
self.fname,
self.lname,
int(self.user_id),
)
commit = app.db.exec_query(sql, data, 'commit')
if commit:
logger.debug("Successfully updated user: %s" % (self.username))
else:
logger.debug("Failed to update user: %s" % (self.username))
return commit
@staticmethod
def auth_user(username, password):
"""
Authenticates a user and returns a User object
if the correct credentials were provided
otherwise, return None
"""
logger.debug("User: %s - Pass: %s - auth attempt. " % (username, password))
sql = ("""
select
*
from
Users
where
username = (%s)
""")
data = (
username,
)
user = app.db.exec_query(sql, data, 'fetchall', 'return_dict')
if user:
user = user[0]
if(auth.check_password(password, user['password'])):
user = User(
int(user['user_id']),
user['username'],
user['password'],
user['email'],
user['first_name'],
user['last_name'])
logger.debug("User: %s - auth success." % (username))
else:
user = None
logger.debug("User: %s - auth failure." % (username))
return user
@staticmethod
def get_user(user_id):
"""
Creates a new User object from the database
returns a User object if found, otherwise None
"""
sql = ("""
select
*
from
Users
where
user_id = (%s)
""")
data = (
int(user_id),
)
user = app.db.exec_query(sql, data, 'fetchall', 'return_dict')
if user:
user = user[0]
user = User(
int(user['user_id']),
user['username'],
user['password'],
user['email'],
user['first_name'],
user['last_name'])
return user
@staticmethod
def check_username(username):
"""
Checks to see if a username already exists in the db.
returns username if username is found, otherwise None
"""
sql = ("""
select
username
from
Users
where
username = (%s)
""")
data = (
username,
)
username = app.db.exec_query(sql, data, 'fetchall', 'return_dict')
if username:
return username[0].get('username')
else:
return None
|
CapstoneGrader/codeta
|
codeta/models/user.py
|
Python
|
mit
| 6,342
|
# Copyright (c) 2020, Manfred Moitzi
# License: MIT License
import sys
import time
from datetime import datetime
from pathlib import Path
from ezdxf.acc import USE_C_EXT
from ezdxf.render.forms import ellipse
if USE_C_EXT is False:
print("C-extension disabled or not available.")
sys.exit(1)
from ezdxf.math._construct import (
has_clockwise_orientation as py_has_clockwise_orientation,
)
from ezdxf.acc.construct import (
has_clockwise_orientation as cy_has_clockwise_orientation,
)
from ezdxf.math._construct import (
intersection_line_line_2d as py_intersection_line_line_2d,
)
from ezdxf.acc.construct import (
intersection_line_line_2d as cy_intersection_line_line_2d,
)
from ezdxf.version import __version__
from ezdxf.acc.vector import Vec2
def open_log(name: str):
parent = Path(__file__).parent
p = parent / "logs" / Path(name + ".csv")
if not p.exists():
with open(p, mode="wt") as fp:
fp.write(
'"timestamp"; "pytime"; "cytime"; '
'"python_version"; "ezdxf_version"\n'
)
log_file = open(p, mode="at")
return log_file
def log(name: str, pytime: float, cytime: float):
log_file = open_log(name)
timestamp = datetime.now().isoformat()
log_file.write(
f'{timestamp}; {pytime}; {cytime}; "{sys.version}"; "{__version__}"\n'
)
log_file.close()
def profile1(func, *args) -> float:
t0 = time.perf_counter()
func(*args)
t1 = time.perf_counter()
return t1 - t0
def profile(text, log_name, pyfunc, cyfunc, *args):
pytime = profile1(pyfunc, *args)
cytime = profile1(cyfunc, *args)
ratio = pytime / cytime
print(f"Python - {text} {pytime:.3f}s")
print(f"Cython - {text} {cytime:.3f}s")
print(f"Ratio {ratio:.1f}x")
log(log_name, pytime, cytime)
def profile_py_has_clockwise_orientation(vertices, count):
for _ in range(count):
py_has_clockwise_orientation(vertices)
def profile_cy_has_clockwise_orientation(vertices, count):
for _ in range(count):
cy_has_clockwise_orientation(vertices)
def profile_py_intersection_line_line_2d(count):
line1 = [Vec2(0, 0), Vec2(2, 0)]
line2 = [Vec2(1, -1), Vec2(1, 1)]
for _ in range(count):
py_intersection_line_line_2d(line1, line2)
def profile_cy_intersection_line_line_2d(count):
line1 = [Vec2(0, 0), Vec2(2, 0)]
line2 = [Vec2(1, -1), Vec2(1, 1)]
for _ in range(count):
cy_intersection_line_line_2d(line1, line2)
def profile_py_no_intersection_line_line_2d(count):
line1 = [Vec2(0, 0), Vec2(2, 0)]
line2 = [Vec2(0, 1), Vec2(2, 1)]
for _ in range(count):
py_intersection_line_line_2d(line1, line2)
def profile_cy_no_intersection_line_line_2d(count):
line1 = [Vec2(0, 0), Vec2(2, 0)]
line2 = [Vec2(0, 1), Vec2(2, 1)]
for _ in range(count):
cy_intersection_line_line_2d(line1, line2)
RUNS = 100_000
ellipse_vertices = list(ellipse(count=100, rx=10, ry=5))
print(f"Profiling 2D construction tools as Python and Cython implementations:")
profile(
f"detect {RUNS}x clockwise orientation of {len(ellipse_vertices)} vertices:",
"c2d_has_clockwise_orientation",
profile_py_has_clockwise_orientation,
profile_cy_has_clockwise_orientation,
ellipse_vertices,
RUNS,
)
profile(
f"detect {RUNS}x real 2D line intersections:",
"c2d_intersection_line_line_2d",
profile_py_intersection_line_line_2d,
profile_cy_intersection_line_line_2d,
RUNS,
)
profile(
f"detect {RUNS}x no 2D line intersections:",
"c2d_no_intersection_line_line_2d",
profile_py_no_intersection_line_line_2d,
profile_cy_no_intersection_line_line_2d,
RUNS,
)
|
mozman/ezdxf
|
profiling/construct.py
|
Python
|
mit
| 3,719
|
# A program that has a list of six colors and chooses one by random. The user can then has three chances to quess the right color. After the third attepmt the program outputs "Nope. The color I was thinking of was..."
import random
# this is the function that will execute the program
def program():
# These are the constants declaring what the colors are.
RED = 'red'
BLUE = 'blue'
GREEN = 'green'
ORANGE = 'orange'
PURPLE = 'purple'
PINK = 'pink'
class Color:
pass
c1 = Color()
c2 = Color()
c3 = Color()
guesses_made = 0
# This input causes the program to refer to you as your name.
c1.name = input('Hello! What is your name?\n')
c2.color = [BLUE, GREEN, RED, ORANGE, PURPLE, PINK]
# This randomizes what color is chosen
c2.color = random.choice(c2.color)
print ('Well, {0}, I am thinking of a color between blue, green, red, orange, purple and pink.'.format(c1.name))
while guesses_made < 3:
c3.guess = input('Take a guess: ')
guesses_made += 1
if c3.guess != c2.color:
print ('Your guess is wrong.')
if c3.guess == c2.color:
break
if c3.guess == c2.color:
print ('Good job, {0}! You guessed my color in {1} guesses!'.format(c1.name, guesses_made))
else:
print ('Nope. The color I was thinking of was {0}'.format(c2.color))
if __name__ == "__main__":
program()
|
starnes/Python
|
guessnameclass.py
|
Python
|
mit
| 1,452
|
import os
from pi3bar.plugins.base import Plugin
from pi3bar.utils import humanize_size_bytes
class Disk(Plugin):
"""
:class:`pi3bar.app.Pi3Bar` plugin to disk usage.
Available format replacements (``*_p`` = percentage):
* ``%(size)s`` E.g. '100GB'
* ``%(free)s`` E.g. '70GB'
* ``%(free_p)f`` E.g. 70.0
* ``%(available)s`` E.g. '65GB'
* ``%(available_p)f`` E.g. 65.0
* ``%(usage)s`` E.g. '30GB'
* ``%(usage_p)f`` E.g. 30.0
:param full_format: :class:`str` - Format string (default: '%(usage_p).2f%% (%(size)s)')
:param short_format: :class:`str` - Short format string (default: '%(usage_p).2f%%')
:param warning_usage: :class:`int` - Warning breakpoint (default: 90)
:param warning_color: :class:`str` - Warning color (default: '#ffff00')
:param warning_background: :class:`str` - Warning background color (default: None)
:param critical_usage: :class:`int` - Critical breakpoint (default: 95)
:param critical_color: :class:`str` - Critical color (default: None)
:param critical_background: :class:`str` - Critical background color (default: '#ff0000')
Examples:
.. code-block:: python
# root
Disk('/')
# custom format (escape '%' with '%')
Disk('/', full_format='%(usage)s / %(size)s', short_format='%(free_p)f%%')
# adjust warning/critical switches
Disk('/mnt', warning_usage=80, critical_usage=90)
"""
def __init__(self, mount_path, **kwargs):
self.instance = mount_path
self.mount_path = mount_path
self.full_format = kwargs.pop('full_format', '%(usage_p).2f%% (%(size)s)')
self.short_format = kwargs.pop('short_format', '%(usage_p).2f%%')
self.warning_usage = kwargs.pop('warning_usage', 90)
self.warning_color = kwargs.pop('warning_color', '#ffff00')
self.warning_background = kwargs.pop('warning_background', None)
self.critical_usage = kwargs.pop('critical_usage', 95)
self.critical_color = kwargs.pop('critical_color', None)
self.critical_background = kwargs.pop('critical_background', '#ff0000')
super(Disk, self).__init__(**kwargs)
def get_stats(self):
statvfs = os.statvfs(self.mount_path)
size_bytes = statvfs.f_frsize * statvfs.f_blocks
free_bytes = statvfs.f_frsize * statvfs.f_bfree # with reserved space
free_percent = 100.0 / size_bytes * free_bytes
available_bytes = statvfs.f_frsize * statvfs.f_bavail # without reserved space
available_percent = 100.0 / size_bytes * available_bytes
usage_bytes = size_bytes - free_bytes
usage_percent = 100.0 / size_bytes * usage_bytes
return {
'size': humanize_size_bytes(size_bytes), # 100GB
'free': humanize_size_bytes(free_bytes), # 70GB
'available': humanize_size_bytes(available_bytes), # 65GB
'usage': humanize_size_bytes(usage_bytes), # 30GB
'free_p': free_percent, # 70.0
'available_p': available_percent, # 65.0
'usage_p': usage_percent, # 30.0
}
def cycle(self):
stats = self.get_stats()
prefix = '%s ' % self.mount_path
self.full_text = prefix + self.full_format % stats
self.short_text = prefix + self.short_format % stats
if float(stats['usage_p']) > self.critical_usage:
self.color = self.critical_color
self.background = self.critical_background
elif float(stats['usage_p']) > self.warning_usage:
self.color = self.warning_color
self.background = self.warning_background
else:
self.color = None
self.background = None
|
knoppo/pi3bar
|
pi3bar/plugins/disk.py
|
Python
|
mit
| 3,744
|
"""Process `site.json` and bower package tools."""
import os
import json
import subprocess
from functools import partial
import importlib
import sys
from flask import Flask, render_template, g, redirect, current_app
from gitloader import git_show
from import_code import import_code
try:
from app import app
except ImportError:
from deckmaster.app import app
sys.path.append('.')
component_dir = 'static/components'
bower_str = 'bower install --config.directory="%s" %s > /dev/null'
def get_pkg_dir(package):
"""Join the component and package directory."""
return os.path.join(component_dir, package)
def get_pkg_main(package):
"""Check `package.json` then `bower.json` for the main included file."""
pkg = json.load(
open(os.path.join(get_pkg_dir(package), 'bower.json'))
)
if isinstance(pkg['main'],list):
return [os.path.join(get_pkg_dir(package), p) for p in pkg['main']]
else:
return os.path.join(get_pkg_dir(package), pkg['main'])
def check_pkg(package):
"""CHeck if the package exists, if not use bower to install."""
if not os.path.exists(os.path.join(component_dir, package)):
subprocess.call(
bower_str % (component_dir, package),
shell = True
)
return True
def script_or_style(path):
if path.endswith('js'):
return 'script'
elif path.endswith('css'):
return 'style'
else:
print "Script or style? " + path
def process_bower(deps):
retval = {'styles':[], 'scripts':[]}
try:
for pkg in deps['bower']:
check_pkg(pkg)
main = get_pkg_main(pkg)
if isinstance(main,list):
pkgassets = {}
for path in reversed(main):
try:
pkgassets[script_or_style(path)+'s'] = [path]
except TypeError:
pass
retval['scripts'] += pkgassets['scripts']
retval['styles'] += pkgassets['styles']
else:
retval[script_or_style(main)+'s'].append(main)
except KeyError:
pass
return retval
def process_local(deps):
retval = {'styles':[], 'scripts':[]}
try:
for path in deps['local']:
retval[script_or_style(path)+'s'].append(path)
except KeyError:
pass
return retval
def process_deps(deps):
"""Process script element in the config for local vs bower components."""
local, bower = process_local(deps), process_bower(deps)
retval = {}
for tag in local:
retval[tag] = local[tag] + bower[tag]
return retval
def process_route(route):
if not route.get('view'):
def route_handler(revid = None, path = None):
g.revid = revid
try:
return render_template(
'html/base.html', **process_deps(route['deps'])
)
except AttributeError:
return 'Not Found', 404
return route_handler
mname, fname = route['view'].rsplit('.', 1)
module = importlib.import_module(mname)
viewfunc = getattr(module, fname)
def route_handler(revid = None, path = None):
if revid is not None:
codestr = git_show('./views.py', revid)
mod = import_code(codestr, mname)
return getattr(mod,fname)()
return viewfunc()
return route_handler
def lazy_router(revid, path = None):
g.revid = revid
if path is None:
path = ''
if not path.startswith('/'):
path = '/' + path
cfgstr = git_show('./site.json', revid)
try:
return process_route(json.loads(cfgstr)[path])(revid, path)
except KeyError:
print cfgstr
def process_site(site = None, revid = None):
"""Process `site.json` based on the config and CLI options."""
if site is None:
try:
site = json.load(open('site.json'))
except IOError:
return []
if 'deps' in site:
return [
('/', 'index', process_route(site)),
('/<revid>/', 'index_revid', process_route(site)),
]
retval = [
('/favicon.ico', 'favicon', lambda: ''),
('/<revid>/', 'revid_lazy_index', lazy_router),
('/<revid>/<path:path>', 'revid_lazy', lazy_router),
]
for rt in site:
retval.append((rt, 'index' if rt=='/' else rt, process_route(site[rt])))
return retval
|
cacahootie/deckmaster
|
deckmaster/app/process_site.py
|
Python
|
mit
| 4,475
|
#!env/bin/python
from app import app
import sys
port = 5000
debug = True
if len(sys.argv) == 3:
debug = sys.argv[1] == 'debug'
port = int(sys.argv[2])
app.run(debug = debug, port = port)
|
BamX/dota2-matches-statistic
|
run.py
|
Python
|
mit
| 191
|
#!/usr/bin/env python3
"""
Find characters deep in the expanded string, for fun.
"""
import sys
from collections import Counter
def real_step(s, rules):
out = ""
for i in range(len(s)):
out += s[i]
k = s[i:i+2]
if k in rules:
out += rules[k]
return out
def step(cnt, rules):
ncnt = Counter()
for k, v in cnt.items():
if k in rules:
c = rules[k]
ncnt[k[0] + c] += v
ncnt[c + k[1]] += v
else:
ncnt[k] += v
return ncnt
def size(s, n, rules):
cnt = Counter(s[i:i+2] for i in range(len(s)-1))
for _ in range(n):
cnt = step(cnt, rules)
lcnt = Counter(s[0])
for k, v in cnt.items():
lcnt[k[1]] += v
return sum(lcnt.values())
def get_char(s, idx, iters, rules):
for i in range(iters):
h = len(s) // 2
first = s[:h+1]
sz = size(first, iters - i, rules)
if idx < sz:
s = real_step(first, rules)
else:
s = real_step(s[h:], rules)
idx -= sz - 1
return s[idx]
def main(args):
data = [s.strip() for s in sys.stdin]
s = data[0]
rules = dict(x.split(" -> ") for x in data[2:])
# Make sure it works
t = s
for i in range(4):
t = real_step(t, rules)
for idx in range(len(t)):
c = get_char(s, idx, 4, rules)
assert t[idx] == c
# find some random characters deep into it
print(size(s, 40, rules))
start = 7311752324710
out = ""
for i in range(10):
out += get_char(s, start + i, 40, rules)
print(out)
if __name__ == '__main__':
main(sys.argv)
|
msullivan/advent-of-code
|
2021/14balt.py
|
Python
|
mit
| 1,674
|
from raptiformica.utils import list_all_files_in_directory
from tests.testcase import TestCase
class TestListAllFilesInDirectory(TestCase):
def setUp(self):
self.walk = self.set_up_patch('raptiformica.utils.walk')
self.walk.return_value = [
('/tmp/a/directory', ['dir'], ['file.txt', 'file2.txt']),
('/tmp/a/directory/dir', ['dir2'], ['file3.txt']),
('/tmp/a/directory/dir/dir2', [], ['file5.txt', 'file4.txt'])
]
def test_list_all_files_in_directory_lists_all_files_in_directory_walks_path(self):
list_all_files_in_directory('/tmp/a/directory')
self.walk.assert_called_once_with('/tmp/a/directory')
def test_list_all_files_in_directory_returns_all_files(self):
ret = list_all_files_in_directory('/tmp/a/directory')
expected_list = [
'/tmp/a/directory/file.txt',
'/tmp/a/directory/file2.txt',
'/tmp/a/directory/dir/file3.txt',
'/tmp/a/directory/dir/dir2/file4.txt',
'/tmp/a/directory/dir/dir2/file5.txt'
]
self.assertCountEqual(ret, expected_list)
|
vdloo/raptiformica
|
tests/unit/raptiformica/utils/test_list_all_files_in_directory.py
|
Python
|
mit
| 1,134
|
import threading, time
from sqlalchemy import pool, interfaces, select, event
import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing.util import gc_collect, lazy_gc
from sqlalchemy.testing import eq_, assert_raises
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.testing import fixtures
mcid = 1
class MockDBAPI(object):
throw_error = False
def connect(self, *args, **kwargs):
if self.throw_error:
raise Exception("couldnt connect !")
delay = kwargs.pop('delay', 0)
if delay:
time.sleep(delay)
return MockConnection()
class MockConnection(object):
closed = False
def __init__(self):
global mcid
self.id = mcid
mcid += 1
def close(self):
self.closed = True
def rollback(self):
pass
def cursor(self):
return MockCursor()
class MockCursor(object):
def execute(self, *args, **kw):
pass
def close(self):
pass
class PoolTestBase(fixtures.TestBase):
def setup(self):
pool.clear_managers()
@classmethod
def teardown_class(cls):
pool.clear_managers()
def _queuepool_fixture(self, **kw):
dbapi, pool = self._queuepool_dbapi_fixture(**kw)
return pool
def _queuepool_dbapi_fixture(self, **kw):
dbapi = MockDBAPI()
return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'),
**kw)
class PoolTest(PoolTestBase):
def test_manager(self):
manager = pool.manage(MockDBAPI(), use_threadlocal=True)
c1 = manager.connect('foo.db')
c2 = manager.connect('foo.db')
c3 = manager.connect('bar.db')
c4 = manager.connect("foo.db", bar="bat")
c5 = manager.connect("foo.db", bar="hoho")
c6 = manager.connect("foo.db", bar="bat")
assert c1.cursor() is not None
assert c1 is c2
assert c1 is not c3
assert c4 is c6
assert c4 is not c5
def test_manager_with_key(self):
class NoKws(object):
def connect(self, arg):
return MockConnection()
manager = pool.manage(NoKws(), use_threadlocal=True)
c1 = manager.connect('foo.db', sa_pool_key="a")
c2 = manager.connect('foo.db', sa_pool_key="b")
c3 = manager.connect('bar.db', sa_pool_key="a")
assert c1.cursor() is not None
assert c1 is not c2
assert c1 is c3
def test_bad_args(self):
manager = pool.manage(MockDBAPI())
connection = manager.connect(None)
def test_non_thread_local_manager(self):
manager = pool.manage(MockDBAPI(), use_threadlocal = False)
connection = manager.connect('foo.db')
connection2 = manager.connect('foo.db')
self.assert_(connection.cursor() is not None)
self.assert_(connection is not connection2)
@testing.fails_on('+pyodbc',
"pyodbc cursor doesn't implement tuple __eq__")
def test_cursor_iterable(self):
conn = testing.db.raw_connection()
cursor = conn.cursor()
cursor.execute(str(select([1], bind=testing.db)))
expected = [(1, )]
for row in cursor:
eq_(row, expected.pop(0))
def test_no_connect_on_recreate(self):
def creator():
raise Exception("no creates allowed")
for cls in (pool.SingletonThreadPool, pool.StaticPool,
pool.QueuePool, pool.NullPool, pool.AssertionPool):
p = cls(creator=creator)
p.dispose()
p2 = p.recreate()
assert p2.__class__ is cls
mock_dbapi = MockDBAPI()
p = cls(creator=mock_dbapi.connect)
conn = p.connect()
conn.close()
mock_dbapi.throw_error = True
p.dispose()
p.recreate()
def testthreadlocal_del(self):
self._do_testthreadlocal(useclose=False)
def testthreadlocal_close(self):
self._do_testthreadlocal(useclose=True)
def _do_testthreadlocal(self, useclose=False):
dbapi = MockDBAPI()
for p in pool.QueuePool(creator=dbapi.connect,
pool_size=3, max_overflow=-1,
use_threadlocal=True), \
pool.SingletonThreadPool(creator=dbapi.connect,
use_threadlocal=True):
c1 = p.connect()
c2 = p.connect()
self.assert_(c1 is c2)
c3 = p.unique_connection()
self.assert_(c3 is not c1)
if useclose:
c2.close()
else:
c2 = None
c2 = p.connect()
self.assert_(c1 is c2)
self.assert_(c3 is not c1)
if useclose:
c2.close()
else:
c2 = None
lazy_gc()
if useclose:
c1 = p.connect()
c2 = p.connect()
c3 = p.connect()
c3.close()
c2.close()
self.assert_(c1.connection is not None)
c1.close()
c1 = c2 = c3 = None
# extra tests with QueuePool to ensure connections get
# __del__()ed when dereferenced
if isinstance(p, pool.QueuePool):
lazy_gc()
self.assert_(p.checkedout() == 0)
c1 = p.connect()
c2 = p.connect()
if useclose:
c2.close()
c1.close()
else:
c2 = None
c1 = None
lazy_gc()
self.assert_(p.checkedout() == 0)
def test_info(self):
p = self._queuepool_fixture(pool_size=1, max_overflow=0)
c = p.connect()
self.assert_(not c.info)
self.assert_(c.info is c._connection_record.info)
c.info['foo'] = 'bar'
c.close()
del c
c = p.connect()
self.assert_('foo' in c.info)
c.invalidate()
c = p.connect()
self.assert_('foo' not in c.info)
c.info['foo2'] = 'bar2'
c.detach()
self.assert_('foo2' in c.info)
c2 = p.connect()
self.assert_(c.connection is not c2.connection)
self.assert_(not c2.info)
self.assert_('foo2' in c.info)
class PoolDialectTest(PoolTestBase):
def _dialect(self):
canary = []
class PoolDialect(object):
def do_rollback(self, dbapi_connection):
canary.append('R')
dbapi_connection.rollback()
def do_commit(self, dbapi_connection):
canary.append('C')
dbapi_connection.commit()
def do_close(self, dbapi_connection):
canary.append('CL')
dbapi_connection.close()
return PoolDialect(), canary
def _do_test(self, pool_cls, assertion):
mock_dbapi = MockDBAPI()
dialect, canary = self._dialect()
p = pool_cls(creator=mock_dbapi.connect)
p._dialect = dialect
conn = p.connect()
conn.close()
p.dispose()
p.recreate()
conn = p.connect()
conn.close()
eq_(canary, assertion)
def test_queue_pool(self):
self._do_test(pool.QueuePool, ['R', 'CL', 'R'])
def test_assertion_pool(self):
self._do_test(pool.AssertionPool, ['R', 'CL', 'R'])
def test_singleton_pool(self):
self._do_test(pool.SingletonThreadPool, ['R', 'CL', 'R'])
def test_null_pool(self):
self._do_test(pool.NullPool, ['R', 'CL', 'R', 'CL'])
def test_static_pool(self):
self._do_test(pool.StaticPool, ['R', 'R'])
class PoolEventsTest(PoolTestBase):
def _first_connect_event_fixture(self):
p = self._queuepool_fixture()
canary = []
def first_connect(*arg, **kw):
canary.append('first_connect')
event.listen(p, 'first_connect', first_connect)
return p, canary
def _connect_event_fixture(self):
p = self._queuepool_fixture()
canary = []
def connect(*arg, **kw):
canary.append('connect')
event.listen(p, 'connect', connect)
return p, canary
def _checkout_event_fixture(self):
p = self._queuepool_fixture()
canary = []
def checkout(*arg, **kw):
canary.append('checkout')
event.listen(p, 'checkout', checkout)
return p, canary
def _checkin_event_fixture(self):
p = self._queuepool_fixture()
canary = []
def checkin(*arg, **kw):
canary.append('checkin')
event.listen(p, 'checkin', checkin)
return p, canary
def _reset_event_fixture(self):
p = self._queuepool_fixture()
canary = []
def reset(*arg, **kw):
canary.append('reset')
event.listen(p, 'reset', reset)
return p, canary
def test_first_connect_event(self):
p, canary = self._first_connect_event_fixture()
c1 = p.connect()
eq_(canary, ['first_connect'])
def test_first_connect_event_fires_once(self):
p, canary = self._first_connect_event_fixture()
c1 = p.connect()
c2 = p.connect()
eq_(canary, ['first_connect'])
def test_first_connect_on_previously_recreated(self):
p, canary = self._first_connect_event_fixture()
p2 = p.recreate()
c1 = p.connect()
c2 = p2.connect()
eq_(canary, ['first_connect', 'first_connect'])
def test_first_connect_on_subsequently_recreated(self):
p, canary = self._first_connect_event_fixture()
c1 = p.connect()
p2 = p.recreate()
c2 = p2.connect()
eq_(canary, ['first_connect', 'first_connect'])
def test_connect_event(self):
p, canary = self._connect_event_fixture()
c1 = p.connect()
eq_(canary, ['connect'])
def test_connect_event_fires_subsequent(self):
p, canary = self._connect_event_fixture()
c1 = p.connect()
c2 = p.connect()
eq_(canary, ['connect', 'connect'])
def test_connect_on_previously_recreated(self):
p, canary = self._connect_event_fixture()
p2 = p.recreate()
c1 = p.connect()
c2 = p2.connect()
eq_(canary, ['connect', 'connect'])
def test_connect_on_subsequently_recreated(self):
p, canary = self._connect_event_fixture()
c1 = p.connect()
p2 = p.recreate()
c2 = p2.connect()
eq_(canary, ['connect', 'connect'])
def test_checkout_event(self):
p, canary = self._checkout_event_fixture()
c1 = p.connect()
eq_(canary, ['checkout'])
def test_checkout_event_fires_subsequent(self):
p, canary = self._checkout_event_fixture()
c1 = p.connect()
c2 = p.connect()
eq_(canary, ['checkout', 'checkout'])
def test_checkout_event_on_subsequently_recreated(self):
p, canary = self._checkout_event_fixture()
c1 = p.connect()
p2 = p.recreate()
c2 = p2.connect()
eq_(canary, ['checkout', 'checkout'])
def test_checkin_event(self):
p, canary = self._checkin_event_fixture()
c1 = p.connect()
eq_(canary, [])
c1.close()
eq_(canary, ['checkin'])
def test_reset_event(self):
p, canary = self._reset_event_fixture()
c1 = p.connect()
eq_(canary, [])
c1.close()
eq_(canary, ['reset'])
def test_checkin_event_gc(self):
p, canary = self._checkin_event_fixture()
c1 = p.connect()
eq_(canary, [])
del c1
lazy_gc()
eq_(canary, ['checkin'])
def test_checkin_event_on_subsequently_recreated(self):
p, canary = self._checkin_event_fixture()
c1 = p.connect()
p2 = p.recreate()
c2 = p2.connect()
eq_(canary, [])
c1.close()
eq_(canary, ['checkin'])
c2.close()
eq_(canary, ['checkin', 'checkin'])
def test_listen_targets_scope(self):
canary = []
def listen_one(*args):
canary.append("listen_one")
def listen_two(*args):
canary.append("listen_two")
def listen_three(*args):
canary.append("listen_three")
def listen_four(*args):
canary.append("listen_four")
engine = testing_engine(testing.db.url)
event.listen(pool.Pool, 'connect', listen_one)
event.listen(engine.pool, 'connect', listen_two)
event.listen(engine, 'connect', listen_three)
event.listen(engine.__class__, 'connect', listen_four)
engine.execute(select([1])).close()
eq_(
canary, ["listen_one","listen_four", "listen_two","listen_three"]
)
def test_listen_targets_per_subclass(self):
"""test that listen() called on a subclass remains specific to that subclass."""
canary = []
def listen_one(*args):
canary.append("listen_one")
def listen_two(*args):
canary.append("listen_two")
def listen_three(*args):
canary.append("listen_three")
event.listen(pool.Pool, 'connect', listen_one)
event.listen(pool.QueuePool, 'connect', listen_two)
event.listen(pool.SingletonThreadPool, 'connect', listen_three)
p1 = pool.QueuePool(creator=MockDBAPI().connect)
p2 = pool.SingletonThreadPool(creator=MockDBAPI().connect)
assert listen_one in p1.dispatch.connect
assert listen_two in p1.dispatch.connect
assert listen_three not in p1.dispatch.connect
assert listen_one in p2.dispatch.connect
assert listen_two not in p2.dispatch.connect
assert listen_three in p2.dispatch.connect
p1.connect()
eq_(canary, ["listen_one", "listen_two"])
p2.connect()
eq_(canary, ["listen_one", "listen_two", "listen_one", "listen_three"])
def teardown(self):
# TODO: need to get remove() functionality
# going
pool.Pool.dispatch._clear()
class DeprecatedPoolListenerTest(PoolTestBase):
@testing.requires.predictable_gc
@testing.uses_deprecated(r".*Use event.listen")
def test_listeners(self):
class InstrumentingListener(object):
def __init__(self):
if hasattr(self, 'connect'):
self.connect = self.inst_connect
if hasattr(self, 'first_connect'):
self.first_connect = self.inst_first_connect
if hasattr(self, 'checkout'):
self.checkout = self.inst_checkout
if hasattr(self, 'checkin'):
self.checkin = self.inst_checkin
self.clear()
def clear(self):
self.connected = []
self.first_connected = []
self.checked_out = []
self.checked_in = []
def assert_total(innerself, conn, fconn, cout, cin):
eq_(len(innerself.connected), conn)
eq_(len(innerself.first_connected), fconn)
eq_(len(innerself.checked_out), cout)
eq_(len(innerself.checked_in), cin)
def assert_in(innerself, item, in_conn, in_fconn,
in_cout, in_cin):
self.assert_((item in innerself.connected) == in_conn)
self.assert_((item in innerself.first_connected) == in_fconn)
self.assert_((item in innerself.checked_out) == in_cout)
self.assert_((item in innerself.checked_in) == in_cin)
def inst_connect(self, con, record):
print "connect(%s, %s)" % (con, record)
assert con is not None
assert record is not None
self.connected.append(con)
def inst_first_connect(self, con, record):
print "first_connect(%s, %s)" % (con, record)
assert con is not None
assert record is not None
self.first_connected.append(con)
def inst_checkout(self, con, record, proxy):
print "checkout(%s, %s, %s)" % (con, record, proxy)
assert con is not None
assert record is not None
assert proxy is not None
self.checked_out.append(con)
def inst_checkin(self, con, record):
print "checkin(%s, %s)" % (con, record)
# con can be None if invalidated
assert record is not None
self.checked_in.append(con)
class ListenAll(tsa.interfaces.PoolListener, InstrumentingListener):
pass
class ListenConnect(InstrumentingListener):
def connect(self, con, record):
pass
class ListenFirstConnect(InstrumentingListener):
def first_connect(self, con, record):
pass
class ListenCheckOut(InstrumentingListener):
def checkout(self, con, record, proxy, num):
pass
class ListenCheckIn(InstrumentingListener):
def checkin(self, con, record):
pass
def assert_listeners(p, total, conn, fconn, cout, cin):
for instance in (p, p.recreate()):
self.assert_(len(instance.dispatch.connect) == conn)
self.assert_(len(instance.dispatch.first_connect) == fconn)
self.assert_(len(instance.dispatch.checkout) == cout)
self.assert_(len(instance.dispatch.checkin) == cin)
p = self._queuepool_fixture()
assert_listeners(p, 0, 0, 0, 0, 0)
p.add_listener(ListenAll())
assert_listeners(p, 1, 1, 1, 1, 1)
p.add_listener(ListenConnect())
assert_listeners(p, 2, 2, 1, 1, 1)
p.add_listener(ListenFirstConnect())
assert_listeners(p, 3, 2, 2, 1, 1)
p.add_listener(ListenCheckOut())
assert_listeners(p, 4, 2, 2, 2, 1)
p.add_listener(ListenCheckIn())
assert_listeners(p, 5, 2, 2, 2, 2)
del p
snoop = ListenAll()
p = self._queuepool_fixture(listeners=[snoop])
assert_listeners(p, 1, 1, 1, 1, 1)
c = p.connect()
snoop.assert_total(1, 1, 1, 0)
cc = c.connection
snoop.assert_in(cc, True, True, True, False)
c.close()
snoop.assert_in(cc, True, True, True, True)
del c, cc
snoop.clear()
# this one depends on immediate gc
c = p.connect()
cc = c.connection
snoop.assert_in(cc, False, False, True, False)
snoop.assert_total(0, 0, 1, 0)
del c, cc
lazy_gc()
snoop.assert_total(0, 0, 1, 1)
p.dispose()
snoop.clear()
c = p.connect()
c.close()
c = p.connect()
snoop.assert_total(1, 0, 2, 1)
c.close()
snoop.assert_total(1, 0, 2, 2)
# invalidation
p.dispose()
snoop.clear()
c = p.connect()
snoop.assert_total(1, 0, 1, 0)
c.invalidate()
snoop.assert_total(1, 0, 1, 1)
c.close()
snoop.assert_total(1, 0, 1, 1)
del c
lazy_gc()
snoop.assert_total(1, 0, 1, 1)
c = p.connect()
snoop.assert_total(2, 0, 2, 1)
c.close()
del c
lazy_gc()
snoop.assert_total(2, 0, 2, 2)
# detached
p.dispose()
snoop.clear()
c = p.connect()
snoop.assert_total(1, 0, 1, 0)
c.detach()
snoop.assert_total(1, 0, 1, 0)
c.close()
del c
snoop.assert_total(1, 0, 1, 0)
c = p.connect()
snoop.assert_total(2, 0, 2, 0)
c.close()
del c
snoop.assert_total(2, 0, 2, 1)
# recreated
p = p.recreate()
snoop.clear()
c = p.connect()
snoop.assert_total(1, 1, 1, 0)
c.close()
snoop.assert_total(1, 1, 1, 1)
c = p.connect()
snoop.assert_total(1, 1, 2, 1)
c.close()
snoop.assert_total(1, 1, 2, 2)
@testing.uses_deprecated(r".*Use event.listen")
def test_listeners_callables(self):
def connect(dbapi_con, con_record):
counts[0] += 1
def checkout(dbapi_con, con_record, con_proxy):
counts[1] += 1
def checkin(dbapi_con, con_record):
counts[2] += 1
i_all = dict(connect=connect, checkout=checkout, checkin=checkin)
i_connect = dict(connect=connect)
i_checkout = dict(checkout=checkout)
i_checkin = dict(checkin=checkin)
for cls in (pool.QueuePool, pool.StaticPool):
counts = [0, 0, 0]
def assert_listeners(p, total, conn, cout, cin):
for instance in (p, p.recreate()):
eq_(len(instance.dispatch.connect), conn)
eq_(len(instance.dispatch.checkout), cout)
eq_(len(instance.dispatch.checkin), cin)
p = self._queuepool_fixture()
assert_listeners(p, 0, 0, 0, 0)
p.add_listener(i_all)
assert_listeners(p, 1, 1, 1, 1)
p.add_listener(i_connect)
assert_listeners(p, 2, 1, 1, 1)
p.add_listener(i_checkout)
assert_listeners(p, 3, 1, 1, 1)
p.add_listener(i_checkin)
assert_listeners(p, 4, 1, 1, 1)
del p
p = self._queuepool_fixture(listeners=[i_all])
assert_listeners(p, 1, 1, 1, 1)
c = p.connect()
assert counts == [1, 1, 0]
c.close()
assert counts == [1, 1, 1]
c = p.connect()
assert counts == [1, 2, 1]
p.add_listener(i_checkin)
c.close()
assert counts == [1, 2, 2]
class QueuePoolTest(PoolTestBase):
def testqueuepool_del(self):
self._do_testqueuepool(useclose=False)
def testqueuepool_close(self):
self._do_testqueuepool(useclose=True)
def _do_testqueuepool(self, useclose=False):
p = self._queuepool_fixture(pool_size=3,
max_overflow=-1)
def status(pool):
tup = pool.size(), pool.checkedin(), pool.overflow(), \
pool.checkedout()
print 'Pool size: %d Connections in pool: %d Current '\
'Overflow: %d Current Checked out connections: %d' % tup
return tup
c1 = p.connect()
self.assert_(status(p) == (3, 0, -2, 1))
c2 = p.connect()
self.assert_(status(p) == (3, 0, -1, 2))
c3 = p.connect()
self.assert_(status(p) == (3, 0, 0, 3))
c4 = p.connect()
self.assert_(status(p) == (3, 0, 1, 4))
c5 = p.connect()
self.assert_(status(p) == (3, 0, 2, 5))
c6 = p.connect()
self.assert_(status(p) == (3, 0, 3, 6))
if useclose:
c4.close()
c3.close()
c2.close()
else:
c4 = c3 = c2 = None
lazy_gc()
self.assert_(status(p) == (3, 3, 3, 3))
if useclose:
c1.close()
c5.close()
c6.close()
else:
c1 = c5 = c6 = None
lazy_gc()
self.assert_(status(p) == (3, 3, 0, 0))
c1 = p.connect()
c2 = p.connect()
self.assert_(status(p) == (3, 1, 0, 2), status(p))
if useclose:
c2.close()
else:
c2 = None
lazy_gc()
self.assert_(status(p) == (3, 2, 0, 1))
c1.close()
lazy_gc()
assert not pool._refs
def test_timeout(self):
p = self._queuepool_fixture(pool_size=3,
max_overflow=0,
timeout=2)
c1 = p.connect()
c2 = p.connect()
c3 = p.connect()
now = time.time()
try:
c4 = p.connect()
assert False
except tsa.exc.TimeoutError, e:
assert int(time.time() - now) == 2
def test_timeout_race(self):
# test a race condition where the initial connecting threads all race
# to queue.Empty, then block on the mutex. each thread consumes a
# connection as they go in. when the limit is reached, the remaining
# threads go in, and get TimeoutError; even though they never got to
# wait for the timeout on queue.get(). the fix involves checking the
# timeout again within the mutex, and if so, unlocking and throwing
# them back to the start of do_get()
dbapi = MockDBAPI()
p = pool.QueuePool(
creator = lambda: dbapi.connect(delay=.05),
pool_size = 2,
max_overflow = 1, use_threadlocal = False, timeout=3)
timeouts = []
def checkout():
for x in xrange(1):
now = time.time()
try:
c1 = p.connect()
except tsa.exc.TimeoutError, e:
timeouts.append(time.time() - now)
continue
time.sleep(4)
c1.close()
threads = []
for i in xrange(10):
th = threading.Thread(target=checkout)
th.start()
threads.append(th)
for th in threads:
th.join()
assert len(timeouts) > 0
for t in timeouts:
assert t >= 3, "Not all timeouts were >= 3 seconds %r" % timeouts
# normally, the timeout should under 4 seconds,
# but on a loaded down buildbot it can go up.
assert t < 10, "Not all timeouts were < 10 seconds %r" % timeouts
def _test_overflow(self, thread_count, max_overflow):
gc_collect()
dbapi = MockDBAPI()
def creator():
time.sleep(.05)
return dbapi.connect()
p = pool.QueuePool(creator=creator,
pool_size=3, timeout=2,
max_overflow=max_overflow)
peaks = []
def whammy():
for i in range(10):
try:
con = p.connect()
time.sleep(.005)
peaks.append(p.overflow())
con.close()
del con
except tsa.exc.TimeoutError:
pass
threads = []
for i in xrange(thread_count):
th = threading.Thread(target=whammy)
th.start()
threads.append(th)
for th in threads:
th.join()
self.assert_(max(peaks) <= max_overflow)
lazy_gc()
assert not pool._refs
def test_waiters_handled(self):
"""test that threads waiting for connections are
handled when the pool is replaced.
"""
dbapi = MockDBAPI()
def creator():
return dbapi.connect()
success = []
for timeout in (None, 30):
for max_overflow in (0, -1, 3):
p = pool.QueuePool(creator=creator,
pool_size=2, timeout=timeout,
max_overflow=max_overflow)
def waiter(p):
conn = p.connect()
time.sleep(.5)
success.append(True)
conn.close()
time.sleep(.2)
c1 = p.connect()
c2 = p.connect()
for i in range(2):
t = threading.Thread(target=waiter, args=(p, ))
t.setDaemon(True) # so the tests dont hang if this fails
t.start()
c1.invalidate()
c2.invalidate()
p2 = p._replace()
time.sleep(2)
eq_(len(success), 12)
@testing.requires.python26
def test_notify_waiters(self):
dbapi = MockDBAPI()
canary = []
def creator1():
canary.append(1)
return dbapi.connect()
def creator2():
canary.append(2)
return dbapi.connect()
p1 = pool.QueuePool(creator=creator1,
pool_size=1, timeout=None,
max_overflow=0)
p2 = pool.QueuePool(creator=creator2,
pool_size=1, timeout=None,
max_overflow=-1)
def waiter(p):
conn = p.connect()
time.sleep(.5)
conn.close()
c1 = p1.connect()
for i in range(5):
t = threading.Thread(target=waiter, args=(p1, ))
t.setDaemon(True)
t.start()
time.sleep(.5)
eq_(canary, [1])
p1._pool.abort(p2)
time.sleep(1)
eq_(canary, [1, 2, 2, 2, 2, 2])
def test_dispose_closes_pooled(self):
dbapi = MockDBAPI()
def creator():
return dbapi.connect()
p = pool.QueuePool(creator=creator,
pool_size=2, timeout=None,
max_overflow=0)
c1 = p.connect()
c2 = p.connect()
conns = [c1.connection, c2.connection]
c1.close()
eq_([c.closed for c in conns], [False, False])
p.dispose()
eq_([c.closed for c in conns], [True, False])
# currently, if a ConnectionFairy is closed
# after the pool has been disposed, there's no
# flag that states it should be invalidated
# immediately - it just gets returned to the
# pool normally...
c2.close()
eq_([c.closed for c in conns], [True, False])
# ...and that's the one we'll get back next.
c3 = p.connect()
assert c3.connection is conns[1]
def test_no_overflow(self):
self._test_overflow(40, 0)
def test_max_overflow(self):
self._test_overflow(40, 5)
def test_mixed_close(self):
p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True)
c1 = p.connect()
c2 = p.connect()
assert c1 is c2
c1.close()
c2 = None
assert p.checkedout() == 1
c1 = None
lazy_gc()
assert p.checkedout() == 0
lazy_gc()
assert not pool._refs
def test_overflow_no_gc_tlocal(self):
self._test_overflow_no_gc(True)
def test_overflow_no_gc(self):
self._test_overflow_no_gc(False)
def _test_overflow_no_gc(self, threadlocal):
p = self._queuepool_fixture(pool_size=2,
max_overflow=2)
# disable weakref collection of the
# underlying connections
strong_refs = set()
def _conn():
c = p.connect()
strong_refs.add(c.connection)
return c
for j in xrange(5):
conns = [_conn() for i in xrange(4)]
for c in conns:
c.close()
still_opened = len([c for c in strong_refs if not c.closed])
eq_(still_opened, 2)
@testing.requires.predictable_gc
def test_weakref_kaboom(self):
p = self._queuepool_fixture(pool_size=3,
max_overflow=-1, use_threadlocal=True)
c1 = p.connect()
c2 = p.connect()
c1.close()
c2 = None
del c1
del c2
gc_collect()
assert p.checkedout() == 0
c3 = p.connect()
assert c3 is not None
def test_trick_the_counter(self):
"""this is a "flaw" in the connection pool; since threadlocal
uses a single ConnectionFairy per thread with an open/close
counter, you can fool the counter into giving you a
ConnectionFairy with an ambiguous counter. i.e. its not true
reference counting."""
p = self._queuepool_fixture(pool_size=3,
max_overflow=-1, use_threadlocal=True)
c1 = p.connect()
c2 = p.connect()
assert c1 is c2
c1.close()
c2 = p.connect()
c2.close()
self.assert_(p.checkedout() != 0)
c2.close()
self.assert_(p.checkedout() == 0)
def test_recycle(self):
p = self._queuepool_fixture(pool_size=1,
max_overflow=0,
recycle=3)
c1 = p.connect()
c_id = id(c1.connection)
c1.close()
c2 = p.connect()
assert id(c2.connection) == c_id
c2.close()
time.sleep(4)
c3 = p.connect()
assert id(c3.connection) != c_id
def test_invalidate(self):
p = self._queuepool_fixture(pool_size=1, max_overflow=0)
c1 = p.connect()
c_id = c1.connection.id
c1.close()
c1 = None
c1 = p.connect()
assert c1.connection.id == c_id
c1.invalidate()
c1 = None
c1 = p.connect()
assert c1.connection.id != c_id
def test_recreate(self):
p = self._queuepool_fixture(pool_size=1, max_overflow=0)
p2 = p.recreate()
assert p2.size() == 1
assert p2._use_threadlocal is False
assert p2._max_overflow == 0
def test_reconnect(self):
"""tests reconnect operations at the pool level. SA's
engine/dialect includes another layer of reconnect support for
'database was lost' errors."""
dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, max_overflow=0)
c1 = p.connect()
c_id = c1.connection.id
c1.close()
c1 = None
c1 = p.connect()
assert c1.connection.id == c_id
dbapi.raise_error = True
c1.invalidate()
c1 = None
c1 = p.connect()
assert c1.connection.id != c_id
def test_detach(self):
dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, max_overflow=0)
c1 = p.connect()
c1.detach()
c_id = c1.connection.id
c2 = p.connect()
assert c2.connection.id != c1.connection.id
dbapi.raise_error = True
c2.invalidate()
c2 = None
c2 = p.connect()
assert c2.connection.id != c1.connection.id
con = c1.connection
assert not con.closed
c1.close()
assert con.closed
def test_threadfairy(self):
p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True)
c1 = p.connect()
c1.close()
c2 = p.connect()
assert c2.connection is not None
class SingletonThreadPoolTest(PoolTestBase):
def test_cleanup(self):
self._test_cleanup(False)
def test_cleanup_no_gc(self):
self._test_cleanup(True)
def _test_cleanup(self, strong_refs):
"""test that the pool's connections are OK after cleanup() has
been called."""
dbapi = MockDBAPI()
p = pool.SingletonThreadPool(creator=dbapi.connect,
pool_size=3)
if strong_refs:
sr = set()
def _conn():
c = p.connect()
sr.add(c.connection)
return c
else:
def _conn():
return p.connect()
def checkout():
for x in xrange(10):
c = _conn()
assert c
c.cursor()
c.close()
time.sleep(.1)
threads = []
for i in xrange(10):
th = threading.Thread(target=checkout)
th.start()
threads.append(th)
for th in threads:
th.join()
assert len(p._all_conns) == 3
if strong_refs:
still_opened = len([c for c in sr if not c.closed])
eq_(still_opened, 3)
class AssertionPoolTest(PoolTestBase):
def test_connect_error(self):
dbapi = MockDBAPI()
p = pool.AssertionPool(creator = lambda: dbapi.connect('foo.db'))
c1 = p.connect()
assert_raises(AssertionError, p.connect)
def test_connect_multiple(self):
dbapi = MockDBAPI()
p = pool.AssertionPool(creator = lambda: dbapi.connect('foo.db'))
c1 = p.connect()
c1.close()
c2 = p.connect()
c2.close()
c3 = p.connect()
assert_raises(AssertionError, p.connect)
class NullPoolTest(PoolTestBase):
def test_reconnect(self):
dbapi = MockDBAPI()
p = pool.NullPool(creator = lambda: dbapi.connect('foo.db'))
c1 = p.connect()
c_id = c1.connection.id
c1.close(); c1=None
c1 = p.connect()
dbapi.raise_error = True
c1.invalidate()
c1 = None
c1 = p.connect()
assert c1.connection.id != c_id
class StaticPoolTest(PoolTestBase):
def test_recreate(self):
dbapi = MockDBAPI()
creator = lambda: dbapi.connect('foo.db')
p = pool.StaticPool(creator)
p2 = p.recreate()
assert p._creator is p2._creator
|
rclmenezes/sqlalchemy
|
test/engine/test_pool.py
|
Python
|
mit
| 37,265
|
from .models import EmailUser
class EmailOrPhoneModelBackend:
def authenticate(self, username=None, password=None):
if '@' in username:
kwargs = {'email__iexact': username}
else:
kwargs = {'phone': username}
try:
user = EmailUser.objects.get(**kwargs)
if user.check_password(password):
return user
except EmailUser.DoesNotExist:
return None
def get_user(self, user_id):
try:
return EmailUser.objects.get(pk=user_id)
except EmailUser.DoesNotExist:
return None
|
pannkotsky/groupmate
|
backend/apps/users/login_backend.py
|
Python
|
mit
| 618
|
"""
Production settings
Debug OFF
Djeroku Defaults:
Mandrill Email -- Requires Mandrill addon
dj_database_url and django-postgrespool for heroku postgres configuration
memcachify for heroku memcache configuration
Commented out by default - redisify for heroku redis cache configuration
What you need to set in your heroku environment (heroku config:set key=value):
ALLOWED_HOSTS - You MUST add your site urls here if they don't match
the included defaults. If you have trouble, try prepending your url with
a . - eg: '.yourproject.herokuapp.com'.
Optional - Update your production environment SECRET_KEY (created and set
automatically by during project creation by the djeroku setup)
Email:
Defaults to mandril, which is already set up when added to your app
There is also a commented version that uses your gmail address.
For more control, you can set any of the following keys in your
environment:
EMAIL_HOST, EMAIL_HOST_PASSWORD, EMAIL_HOST_USER, EMAIL_PORT
"""
from os import environ
import dj_database_url
# automagically sets up whatever memcache heroku addon you have as the cache
# https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
# use redisify instead of memcacheify if you prefer
# https://github.com/dirn/django-heroku-redisify
# from redisify import redisify
from project.settings.common import * # NOQA
# ALLOWED HOSTS
# https://docs.djangoproject.com/en/1.8/ref/settings/#allowed-hosts
ALLOWED_HOSTS = [
'.{{ project_name }}.herokuapp.com',
'.{{ project_name}}-staging.herokuapp.com'
]
# you MUST add your domain names here, check the link for details
# END ALLOWED HOSTS
# EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-port
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls
EMAIL_HOST = environ.get('EMAIL_HOST', 'smtp.mandrillapp.com')
EMAIL_HOST_PASSWORD = environ.get('MANDRILL_APIKEY', '')
EMAIL_HOST_USER = environ.get('MANDRILL_USERNAME', '')
EMAIL_PORT = environ.get('EMAIL_PORT', 587)
EMAIL_USE_TLS = True
# use this to channel your emails through a gmail powered account instead
# EMAIL_HOST = environ.get('EMAIL_HOST', 'smtp.gmail.com')
# EMAIL_HOST_USER = environ.get('EMAIL_HOST_USER', 'your_email@gmail.com')
# EMAIL_HOST_PASSWORD = environ.get('EMAIL_HOST_PASSWORD', '')
# EMAIL_PORT = environ.get('EMAIL_PORT', 587)
# EMAIL_USE_TLS = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = '[%s] ' % SITE_NAME
# See: https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL CONFIGURATION
# DATABASE CONFIGURATION
DATABASES['default'] = dj_database_url.config()
DATABASES['default']['ENGINE'] = 'django_postgrespool'
# END DATABASE CONFIGURATION
# CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = memcacheify()
# CACHES = redisify()
# END CACHE CONFIGURATION
# Simplest redis-based config possible
# *very* easy to overload free redis/MQ connection limits
# You MUST update REDIS_SERVER_URL or use djeroku_redis to set it automatically
BROKER_POOL_LIMIT = 0
BROKER_URL = environ.get('REDIS_SERVER_URL')
CELERY_IGNORE_RESULT = True
CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True
# END CELERY CONFIGURATION
# SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = environ.get('SECRET_KEY', SECRET_KEY)
# END SECRET CONFIGURATION
# ADDITIONAL MIDDLEWARE
MIDDLEWARE_CLASSES += ()
# END ADDITIONAL MIDDLEWARE
|
collingreen/djeroku
|
project/settings/prod.py
|
Python
|
mit
| 4,008
|
import json
import logging
from flask import abort, redirect, render_template, request, send_file
import ctrl.snip
from . import handlers
@handlers.route('/snip/<slug>.png')
def snip_view(slug):
filename = ctrl.snip.getSnipPath(slug)
if filename == None:
logging.warning("invalid slug: " + slug)
abort(400)
return send_file(filename, attachment_filename=slug + '.png', mimetype='image/png')
@handlers.route('/snip/<slug>')
def snip_page(slug):
snip = ctrl.snip.getSnip(slug)
return render_template('snip.html', snip=snip)
@handlers.route('/snip/upload', methods=['POST'])
def snip_upload():
if 'file' not in request.files:
abort(400)
file = request.files['file']
snip = ctrl.snip.saveSnip(file)
if request.args.get('redirect') == '0':
return json.dumps({'slug': snip.slug})
else:
return redirect('/snip/' + snip.slug)
@handlers.route('/snip/new')
def snip_new():
return render_template('snip-new.html')
|
codeka/website
|
handlers/snip.py
|
Python
|
mit
| 955
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ClusterUpgradeDescriptionObject(Model):
"""Represents a ServiceFabric cluster upgrade.
:param config_version: The cluster configuration version (specified in the
cluster manifest).
:type config_version: str
:param code_version: The ServiceFabric code version of the cluster.
:type code_version: str
:param upgrade_kind: The kind of upgrade out of the following possible
values. Possible values include: 'Invalid', 'Rolling'. Default value:
"Rolling" .
:type upgrade_kind: str or ~azure.servicefabric.models.UpgradeKind
:param rolling_upgrade_mode: The mode used to monitor health during a
rolling upgrade. Possible values include: 'Invalid', 'UnmonitoredAuto',
'UnmonitoredManual', 'Monitored'. Default value: "UnmonitoredAuto" .
:type rolling_upgrade_mode: str or ~azure.servicefabric.models.UpgradeMode
:param upgrade_replica_set_check_timeout_in_seconds: The maximum amount of
time to block processing of an upgrade domain and prevent loss of
availability when there are unexpected issues. When this timeout expires,
processing of the upgrade domain will proceed regardless of availability
loss issues. The timeout is reset at the start of each upgrade domain.
Valid values are between 0 and 42949672925 inclusive. (unsigned 32-bit
integer).
:type upgrade_replica_set_check_timeout_in_seconds: long
:param force_restart: If true, then processes are forcefully restarted
during upgrade even when the code version has not changed (the upgrade
only changes configuration or data).
:type force_restart: bool
:param enable_delta_health_evaluation: When true, enables delta health
evaluation rather than absolute health evaluation after completion of each
upgrade domain.
:type enable_delta_health_evaluation: bool
:param monitoring_policy: Describes the parameters for monitoring an
upgrade in Monitored mode.
:type monitoring_policy:
~azure.servicefabric.models.MonitoringPolicyDescription
:param cluster_health_policy: Defines a health policy used to evaluate the
health of the cluster or of a cluster node.
:type cluster_health_policy:
~azure.servicefabric.models.ClusterHealthPolicy
:param cluster_upgrade_health_policy: Defines a health policy used to
evaluate the health of the cluster during a cluster upgrade.
:type cluster_upgrade_health_policy:
~azure.servicefabric.models.ClusterUpgradeHealthPolicyObject
:param application_health_policy_map: Defines a map that contains specific
application health policies for different applications.
Each entry specifies as key the application name and as value an
ApplicationHealthPolicy used to evaluate the application health.
If an application is not specified in the map, the application health
evaluation uses the ApplicationHealthPolicy found in its application
manifest or the default application health policy (if no health policy is
defined in the manifest).
The map is empty by default.
:type application_health_policy_map:
list[~azure.servicefabric.models.ApplicationHealthPolicyMapItem]
"""
_attribute_map = {
'config_version': {'key': 'ConfigVersion', 'type': 'str'},
'code_version': {'key': 'CodeVersion', 'type': 'str'},
'upgrade_kind': {'key': 'UpgradeKind', 'type': 'str'},
'rolling_upgrade_mode': {'key': 'RollingUpgradeMode', 'type': 'str'},
'upgrade_replica_set_check_timeout_in_seconds': {'key': 'UpgradeReplicaSetCheckTimeoutInSeconds', 'type': 'long'},
'force_restart': {'key': 'ForceRestart', 'type': 'bool'},
'enable_delta_health_evaluation': {'key': 'EnableDeltaHealthEvaluation', 'type': 'bool'},
'monitoring_policy': {'key': 'MonitoringPolicy', 'type': 'MonitoringPolicyDescription'},
'cluster_health_policy': {'key': 'ClusterHealthPolicy', 'type': 'ClusterHealthPolicy'},
'cluster_upgrade_health_policy': {'key': 'ClusterUpgradeHealthPolicy', 'type': 'ClusterUpgradeHealthPolicyObject'},
'application_health_policy_map': {'key': 'ApplicationHealthPolicyMap', 'type': '[ApplicationHealthPolicyMapItem]'},
}
def __init__(self, config_version=None, code_version=None, upgrade_kind="Rolling", rolling_upgrade_mode="UnmonitoredAuto", upgrade_replica_set_check_timeout_in_seconds=None, force_restart=None, enable_delta_health_evaluation=None, monitoring_policy=None, cluster_health_policy=None, cluster_upgrade_health_policy=None, application_health_policy_map=None):
super(ClusterUpgradeDescriptionObject, self).__init__()
self.config_version = config_version
self.code_version = code_version
self.upgrade_kind = upgrade_kind
self.rolling_upgrade_mode = rolling_upgrade_mode
self.upgrade_replica_set_check_timeout_in_seconds = upgrade_replica_set_check_timeout_in_seconds
self.force_restart = force_restart
self.enable_delta_health_evaluation = enable_delta_health_evaluation
self.monitoring_policy = monitoring_policy
self.cluster_health_policy = cluster_health_policy
self.cluster_upgrade_health_policy = cluster_upgrade_health_policy
self.application_health_policy_map = application_health_policy_map
|
lmazuel/azure-sdk-for-python
|
azure-servicefabric/azure/servicefabric/models/cluster_upgrade_description_object.py
|
Python
|
mit
| 5,840
|
#!/usr/bin/env python
"""Distutils installer for extras."""
from setuptools import setup
import os.path
import extras
testtools_cmd = extras.try_import('testtools.TestCommand')
def get_version():
"""Return the version of extras that we are building."""
version = '.'.join(
str(component) for component in extras.__version__[0:3])
return version
def get_long_description():
readme_path = os.path.join(
os.path.dirname(__file__), 'README.rst')
return open(readme_path).read()
cmdclass = {}
if testtools_cmd is not None:
cmdclass['test'] = testtools_cmd
setup(name='extras',
author='Testing cabal',
author_email='testtools-dev@lists.launchpad.net',
url='https://github.com/testing-cabal/extras',
description=('Useful extra bits for Python - things that shold be '
'in the standard library'),
long_description=get_long_description(),
version=get_version(),
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
packages=[
'extras',
'extras.tests',
],
cmdclass=cmdclass)
|
testing-cabal/extras
|
setup.py
|
Python
|
mit
| 1,687
|
import tensorflow as tf
import numpy as np
import deep_architect.modules as mo
import deep_architect.hyperparameters as hp
from deep_architect.contrib.misc.search_spaces.tensorflow.common import siso_tfm
D = hp.Discrete # Discrete Hyperparameter
def dense(h_units):
def compile_fn(di, dh): # compile function
Dense = tf.keras.layers.Dense(dh['units'])
def fn(di): # forward function
return {'out': Dense(di['in'])}
return fn
return siso_tfm('Dense', compile_fn, {'units': h_units})
def flatten():
def compile_fn(di, dh):
Flatten = tf.keras.layers.Flatten()
def fn(di):
return {'out': Flatten(di['in'])}
return fn
return siso_tfm('Flatten', compile_fn, {})
def nonlinearity(h_nonlin_name):
def compile_fn(di, dh):
def fn(di):
nonlin_name = dh['nonlin_name']
if nonlin_name == 'relu':
Out = tf.keras.layers.Activation('relu')(di['in'])
elif nonlin_name == 'tanh':
Out = tf.keras.layers.Activation('tanh')(di['in'])
elif nonlin_name == 'elu':
Out = tf.keras.layers.Activation('elu')(di['in'])
else:
raise ValueError
return {"out": Out}
return fn
return siso_tfm('Nonlinearity', compile_fn, {'nonlin_name': h_nonlin_name})
def dropout(h_keep_prob):
def compile_fn(di, dh):
Dropout = tf.keras.layers.Dropout(dh['keep_prob'])
def fn(di):
return {'out': Dropout(di['in'])}
return fn
return siso_tfm('Dropout', compile_fn, {'keep_prob': h_keep_prob})
def batch_normalization():
def compile_fn(di, dh):
bn = tf.keras.layers.BatchNormalization()
def fn(di):
return {'out': bn(di['in'])}
return fn
return siso_tfm('BatchNormalization', compile_fn, {})
def dnn_net_simple(num_classes):
# defining hyperparameter
h_num_hidden = D([64, 128, 256, 512,
1024]) # number of hidden units for dense module
h_nonlin_name = D(['relu', 'tanh',
'elu']) # nonlinearity function names to choose from
h_opt_drop = D(
[0, 1]) # dropout optional hyperparameter; 0 is exclude, 1 is include
h_drop_keep_prob = D([0.25, 0.5,
0.75]) # dropout probability to choose from
h_opt_bn = D([0, 1]) # batch_norm optional hyperparameter
h_perm = D([0, 1]) # order of swapping for permutation
h_num_repeats = D([1, 2]) # 1 is appearing once, 2 is appearing twice
# defining search space topology
model = mo.siso_sequential([
flatten(),
mo.siso_repeat(
lambda: mo.siso_sequential([
dense(h_num_hidden),
nonlinearity(h_nonlin_name),
mo.siso_permutation([
lambda: mo.siso_optional(lambda: dropout(h_drop_keep_prob),
h_opt_drop),
lambda: mo.siso_optional(batch_normalization, h_opt_bn),
], h_perm)
]), h_num_repeats),
dense(D([num_classes]))
])
return model
def dnn_cell(h_num_hidden, h_nonlin_name, h_swap, h_opt_drop, h_opt_bn,
h_drop_keep_prob):
return mo.siso_sequential([
dense(h_num_hidden),
nonlinearity(h_nonlin_name),
mo.siso_permutation([
lambda: mo.siso_optional(lambda: dropout(h_drop_keep_prob),
h_opt_drop),
lambda: mo.siso_optional(batch_normalization, h_opt_bn),
], h_swap)
])
def dnn_net(num_classes):
h_nonlin_name = D(['relu', 'tanh', 'elu'])
h_swap = D([0, 1])
h_opt_drop = D([0, 1])
h_opt_bn = D([0, 1])
return mo.siso_sequential([
flatten(),
mo.siso_repeat(
lambda: dnn_cell(D([64, 128, 256, 512, 1024]),
h_nonlin_name, h_swap, h_opt_drop, h_opt_bn,
D([0.25, 0.5, 0.75])), D([1, 2])),
dense(D([num_classes]))
])
import deep_architect.searchers.random as se
import deep_architect.core as co
def get_search_space(num_classes):
def fn():
co.Scope.reset_default_scope()
inputs, outputs = dnn_net(num_classes)
return inputs, outputs, {}
return fn
class SimpleClassifierEvaluator:
def __init__(self,
train_dataset,
num_classes,
max_num_training_epochs=20,
batch_size=256,
learning_rate=1e-3):
self.train_dataset = train_dataset
self.num_classes = num_classes
self.max_num_training_epochs = max_num_training_epochs
self.learning_rate = learning_rate
self.batch_size = batch_size
self.val_split = 0.1 # 10% of dataset for validation
def evaluate(self, inputs, outputs):
tf.keras.backend.clear_session()
tf.reset_default_graph()
(x_train, y_train) = self.train_dataset
X = tf.keras.layers.Input(x_train[0].shape)
co.forward({inputs['in']: X})
logits = outputs['out'].val
probs = tf.keras.layers.Softmax()(logits)
model = tf.keras.models.Model(inputs=[inputs['in'].val],
outputs=[probs])
optimizer = tf.keras.optimizers.Adam(lr=self.learning_rate)
model.compile(optimizer=optimizer,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.summary()
history = model.fit(x_train,
y_train,
batch_size=self.batch_size,
epochs=self.max_num_training_epochs,
validation_split=self.val_split)
results = {'val_acc': history.history['val_acc'][-1]}
return results
def main():
num_classes = 10
num_samples = 3 # number of architecture to sample
best_val_acc, best_architecture = 0., -1
# load and normalize data
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
# defining evaluator and searcher
evaluator = SimpleClassifierEvaluator((x_train, y_train),
num_classes,
max_num_training_epochs=5)
searcher = se.RandomSearcher(get_search_space(num_classes))
for i in xrange(num_samples):
print("Sampling architecture %d" % i)
inputs, outputs, _, searcher_eval_token = searcher.sample()
val_acc = evaluator.evaluate(
inputs,
outputs)['val_acc'] # evaluate and return validation accuracy
print("Finished evaluating architecture %d, validation accuracy is %f" %
(i, val_acc))
if val_acc > best_val_acc:
best_val_acc = val_acc
best_architecture = i
searcher.update(val_acc, searcher_eval_token)
print("Best validation accuracy is %f with architecture %d" %
(best_val_acc, best_architecture))
if __name__ == "__main__":
main()
|
negrinho/deep_architect
|
dev/tutorials/mnist/tensorflow/mnist_tf_keras.py
|
Python
|
mit
| 7,276
|
from cuescience_shop.models import Client, Address, Order
from natspec_utils.decorators import TextSyntax
from cart.cart import Cart
from django.test.client import Client as TestClient
class ClientTestSupport(object):
def __init__(self, test_case):
self.test_case = test_case
self.client = TestClient()
@TextSyntax("Create address #1 #2 #3 #4", types=["str", "str", "str", "str"], return_type="Address")
def create_address(self, street, number, postcode, city):
address = Address(street=street, number=number, postcode=postcode, city=city)
address.save()
return address
@TextSyntax("Create client #1 #2", types=["str", "str", "Address"], return_type="Client")
def create_client(self, first_name, last_name, address):
client = Client(first_name=first_name, last_name=last_name, shipping_address=address, billing_address=address)
client.save()
return client
@TextSyntax("Create order", types=["Client"], return_type="Order")
def create_order(self, client):
cart = Cart(self.client)
cart.create_cart()
cart = cart.cart
order = Order(client=client, cart=cart)
order.save()
return order
@TextSyntax("Assert client number is #1", types=["str", "Client"])
def assert_client_number(self, client_number, client):
self.test_case.assertEqual(client_number, client.client_number)
@TextSyntax("Assert order number is #1", types=["str", "Order"])
def assert_order_number(self, order_number, order):
self.test_case.assertEqual(order_number, order.order_number)
|
cuescience/cuescience-shop
|
shop/tests/support/model_support.py
|
Python
|
mit
| 1,689
|
# type command prints file contents
from lib.utils import *
def _help():
usage = '''
Usage: type (file)
Print content of (file)
Use '%' in front of global
vars to use value as file
name.
'''
print(usage)
def main(argv):
if len(argv) < 1 or '-h' in argv:
_help()
return
# The shell doesnt send the
# command name in the arg list
# so the next line is not needed
# anymore
# argv.pop(0)
# The shell does the work of replacing
# vars already. Code segment below
# is not required anymore.
# argv=replace_vars(argv)
argv = make_s(argv)
path = get_path() + argv
if os.path.isfile(path):
with open(path) as f:
data = f.readlines()
print('_________________<START>_________________\n')
print(make_s2(data))
print('__________________<END>__________________\n')
return
elif os.path.isdir(path):
err(3, add=argv + ' is a directory')
else:
err(2, path)
|
nayas360/pyterm
|
bin/type.py
|
Python
|
mit
| 1,002
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.OperationalLimits.OperationalLimit import OperationalLimit
class ActivePowerLimit(OperationalLimit):
"""Limit on active power flow.
"""
def __init__(self, value=0.0, *args, **kw_args):
"""Initialises a new 'ActivePowerLimit' instance.
@param value: Value of active power limit.
"""
#: Value of active power limit.
self.value = value
super(ActivePowerLimit, self).__init__(*args, **kw_args)
_attrs = ["value"]
_attr_types = {"value": float}
_defaults = {"value": 0.0}
_enums = {}
_refs = []
_many_refs = []
|
rwl/PyCIM
|
CIM14/IEC61970/OperationalLimits/ActivePowerLimit.py
|
Python
|
mit
| 1,717
|
# Python - 2.7.6
to_freud = lambda sentence: ' '.join(['sex'] * len(sentence.split(' ')))
|
RevansChen/online-judge
|
Codewars/8kyu/freudian-translator/Python/solution1.py
|
Python
|
mit
| 91
|
def filename_directive(filename):
return "\t.file\t\"{0}\"\n".format(filename)
def compiler_ident_directive():
return "\t.ident\t\"{0}\"\n".format("HEROCOMP - Tomas Mikula 2017")
def text_directive():
return "\t.text\n"
def data_directive():
return "\t.data\n"
def quad_directive(arg):
return "\t.quad\t{}\n".format(arg)
def global_array(identifier, size):
return "\t.comm {0},{1},32\n".format(identifier, size * 8)
def global_directive(arg):
return "\t.global\t{0}\n".format(arg)
def label(name):
return "{0}:\n".format(name)
def instruction(name, *args):
code = "\t{0}\t".format(name)
for i in range(len(args)):
if i == len(args) - 1:
code += "{0}".format(args[i])
else:
code += "{0}, ".format(args[i])
code += "\n"
return code
def number_constant(number):
return "${0}".format(number)
|
wilima/herocomp
|
herocomp/asm/Asm.py
|
Python
|
mit
| 901
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('',
url(r'^$', 'tests.views.index'),
)
|
codeinthehole/django-async-messages
|
tests/urls.py
|
Python
|
mit
| 119
|
from flask import Flask, redirect, url_for, session, request, render_template_string, abort
import requests
import os
import ast
import base64
from nocache import nocache
#App config
ALLOWED_EXTENSIONS = set(['mp4'])
app = Flask(__name__)
app.secret_key = os.urandom(24)
@app.errorhandler(404)
@nocache
def error_404(e):
"""
- Displays the 404 error page.
"""
error_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('404.html'))).content).decode("utf-8") # Done
return render_template_string(error_page)
@app.errorhandler(403)
@nocache
def error_403(e):
"""
- Displays the 404 error page.
"""
error_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('403.html'))).content).decode("utf-8") # Done
return render_template_string(error_page)
@app.route("/", methods = ['GET'])
@nocache
def start(): #WORKS
"""
- The starting page.
- Redirects to login page if not logged in.
- Redirects to dashboard if logged in.
"""
logged_in = False
if 'user' in session:
logged_in = True
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
return redirect(url_for('dashboard'))
most_viewed_video_IDs = ((requests.get('http://127.0.0.1:8080/get-most-viewed')).content).decode("utf-8") # Done
most_viewed = {}
most_viewed_video_IDs = ast.literal_eval(most_viewed_video_IDs)
for ID in most_viewed_video_IDs:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(ID))).content).decode("utf-8") # Done
details = [title, views, uploader]
most_viewed.update({ID : details})
homepage = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('homepage.html'))).content).decode("utf-8") # Done
return render_template_string(homepage, logged_in = logged_in, most_viewed = most_viewed)
@app.route("/login", methods = ['POST', 'GET'])
@nocache
def login_form(): #WORKS
"""
In GET request,
- Redirects to dashboard if logged in.
- Displays login form if not logged in.
"""
if request.method == 'GET':
login_error = request.args.get('l_error', False)
if 'user' in session:
return redirect(url_for("start"))
else:
login_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('login.html'))).content).decode("utf-8") # Done
return render_template_string(login_page, loginError = login_error)
"""
In POST request
- Gets data from form.
- Validates user credentials.
"""
if request.method == 'POST':
if 'user' in session:
return redirect(url_for('dashboard'))
username = (request.form['username']).lower().strip()
password = (request.form['password'])
is_valid_user = ((requests.post(url='http://127.0.0.1:8080/is-valid-user', data={'username' : username, 'password' : password})).content).decode("utf-8") # Done
if is_valid_user == "True":
session['user'] = username
return redirect(url_for("start"))
else:
return redirect(url_for("login_form", l_error = True))
@app.route("/signup", methods = ['GET', 'POST'])
@nocache
def signup_form(): #WORKS
"""
In GET request
- Displays sign up page.
"""
if request.method == 'GET':
if 'user' in session:
return redirect(url_for('start'))
signup_error = request.args.get('s_error', False)
signup_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('signup.html'))).content).decode("utf-8") # Done
return render_template_string(signup_page, signupError = signup_error)
"""
In POST request
- Gets data from form.
- Checks if username is not already present.
- Adds to database if not present.
- Redirects to dashboard.
"""
if request.method == 'POST':
username = (request.form['username']).lower().strip()
password = (request.form['password'])
is_valid_username = ((requests.get(url='http://127.0.0.1:8080/is-valid-username/{}'.format(username))).content).decode("utf-8") # Done
if is_valid_username == "False":
requests.post(url='http://127.0.0.1:8080/add-user', data={'username' : username, 'password' : password}) # Done
session['user'] = username
return redirect(url_for("start"))
else:
return redirect(url_for("signup_form", s_error = True))
@app.route("/change-password", methods = ['GET', 'POST'])
@nocache
def password_update_form(): #WORKS
"""
In GET request
- Redirects to login page if not logged in.
- Displays the password update form.
"""
if request.method == 'GET':
u_error = request.args.get('u_error', False)
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
password_update_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('password_update.html'))).content).decode("utf-8") # Done
if u_error == False:
return render_template_string(password_update_page)
else:
return render_template_string(password_update_page, update_error = True)
"""
In POST request
- Gets the old and new passwords.
- Checks the old password.
- If it matches the stored password, password is updated.
- Otherwise, error is thrown.
"""
if request.method == 'POST':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
old_password = request.form['oldPassword']
new_password = request.form['newPassword']
done = (requests.post(url='http://127.0.0.1:8080/update-password', data={'username' : username, 'old_password' : old_password, 'new_password' : new_password}).content).decode("utf-8") # Done
if done == "True":
return redirect(url_for('start'))
else:
return redirect(url_for('password_update_form', u_error = True))
@app.route("/delete", methods = ['GET', 'POST'])
@nocache
def delete_own_account(): #WORKS
"""
In GET request
- Displays confirmation page.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
confirmation_error = request.args.get('c_error', False)
confirmation_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('account_delete_confirm.html'))).content).decode("utf-8") # Done
if confirmation_error == False:
return render_template_string(confirmation_page)
else:
return render_template_string(confirmation_page, c_error = True)
"""
In POST request
- Deletes the user credentials from the database.
- Redirects to login page.
"""
if request.method == 'POST':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
password = request.form['password']
is_deleted = ((requests.post(url='http://127.0.0.1:8080/delete-user', data={'username' : username, 'password' : password})).content).decode("utf-8") # Done
if is_deleted == "True":
session.pop('user', None)
return redirect(url_for("login_form"))
else:
return redirect(url_for('delete_own_account', c_error = True))
@app.route("/logout", methods = ['GET'])
@nocache
def logout_user(): #WORKS
"""
- Removes user from session.
- Redirects to login page.
"""
session.pop('user', None)
return redirect(url_for("start"))
@app.route("/dashboard", methods = ['GET'])
@nocache
def dashboard(): #WORKS
"""
- Redirects to login page if not logged in.
- Displays dashboard page if logged in.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect(url_for("login_form"))
else:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
user_count = (requests.get(url='http://127.0.0.1:8080/user-count').content).decode("utf-8") # Done
video_count = (requests.get(url='http://127.0.0.1:8080/video-count').content).decode("utf-8") # Done
view_count = (requests.get(url='http://127.0.0.1:8080/view-count').content).decode("utf-8") # Done
flag_count = (requests.get(url='http://127.0.0.1:8080/flag-count').content).decode("utf-8") # Done
admin_dashboard = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('administrator_dashboard.html'))).content).decode("utf-8") # Done
return render_template_string(admin_dashboard, user_count = user_count, video_count = video_count, view_count = view_count, flag_count = flag_count)
else:
username = session['user']
video_count = (requests.get(url='http://127.0.0.1:8080/user-video-count/{}'.format(username)).content).decode("utf-8") # Done
view_count = (requests.get(url='http://127.0.0.1:8080/user-view-count/{}'.format(username)).content).decode("utf-8") # Done
best_vid_ID = (requests.get(url='http://127.0.0.1:8080/user-best-video/{}'.format(username)).content).decode("utf-8") # Done
best_vid_title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(best_vid_ID))).content).decode("utf-8") # Done
fav_vid_ID = (requests.get(url='http://127.0.0.1:8080/user-fav-video/{}'.format(username)).content).decode("utf-8") # Done
fav_vid_title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(fav_vid_ID))).content).decode("utf-8") # Done
user_dashboard = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('user_dashboard.html'))).content).decode("utf-8") # Done
return render_template_string(user_dashboard, username = session['user'], view_count = view_count, video_count = video_count, high_video_ID = best_vid_ID, high_title = best_vid_title, fav_video_ID = fav_vid_ID, fav_title = fav_vid_title)
def allowed_file(filename): #WORKS
"""
- Checks if the uploaded file is an MP4 file.
"""
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route("/upload", methods = ['GET', 'POST'])
@nocache
def upload_form(): #WORKS
"""
In GET request
- Displays upload form.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
upload_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('upload.html'))).content).decode("utf-8")
return render_template_string(upload_page)
"""
In POST request
- Accepts video from user.
"""
if request.method == 'POST':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
file = request.files['file']
username = session['user']
title = request.form['title']
if file and allowed_file(file.filename):
video_ID = ((requests.post(url='http://127.0.0.1:8080/upload', data={'username' : username, 'title' : title, 'file' : base64.b64encode(file.read())})).content).decode("utf-8") # Done
return redirect(url_for('watch_video', v = video_ID))
else:
return redirect(url_for('upload_form'))
@app.route("/remove", methods = ['GET', 'POST'])
@nocache
def delete_own_video():
"""
In GET request
- Redirects to login page if not logged in.
- If the uploader and current user are the same, it deletes the video.
- Redirects to dashboard.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
d_error = request.args.get('d_error', False)
video_ID = request.args.get('video_ID')
title = ((requests.get('http://127.0.0.1:8080/title/{}'.format(video_ID))).content).decode("utf-8") # Done
uploader = ((requests.get('http://127.0.0.1:8080/uploader/{}'.format(video_ID))).content).decode("utf-8") # Done
if uploader == 'Error getting username':
abort(404)
username = session['user']
if username != uploader:
abort(403)
else:
video_delete_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('video_delete_confirmation.html'))).content).decode("utf-8")
return render_template_string(video_delete_page, video_ID = video_ID, title = title, c_error = d_error)
"""
In POST request
- Accepts password from form.
- Checks if the user is valid.
- Deletes the video.
"""
if request.method == 'POST':
if 'user' not in session:
abort(403)
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
password = request.form['password']
video_ID = request.form['video_ID']
is_deleted = ((requests.post(url='http://127.0.0.1:8080/delete-video', data={'username' : username, 'password' : password, 'video_ID' : video_ID})).content).decode("utf-8") # Done
if is_deleted == "True":
return redirect(url_for('my_videos'))
else:
return redirect(url_for('delete_own_video', video_ID = video_ID, d_error = True))
@app.route("/watch", methods = ['GET'])
@nocache
def watch_video(): #WORKS
"""
In GET request
- Plays the video with the corresponding video ID.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
video_ID = request.args.get('v', None)
if video_ID == None:
return redirect(url_for('dashboard'))
is_available = ((requests.get(url='http://127.0.0.1:8080/is-available/{}'.format(video_ID))).content).decode("utf-8")
if is_available == "False":
abort(404)
requests.post(url='http://127.0.0.1:8080/update-count', data={'video_ID' : video_ID}) # Done
vid_title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(video_ID))).content).decode("utf-8") # Done
vid_uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(video_ID))).content).decode("utf-8") # Done
vid_views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(video_ID))).content).decode("utf-8") # Done
vid_upload_date = ((requests.get(url='http://127.0.0.1:8080/upload-date/{}'.format(video_ID))).content).decode("utf-8") # Done
video_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('video.html'))).content).decode("utf-8") # Done
random_vids = {}
random_video_IDs = ((requests.get('http://127.0.0.1:8080/get-random/{}'.format(video_ID))).content).decode("utf-8") # Done
random_video_IDs = ast.literal_eval(random_video_IDs)
for ID in random_video_IDs:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(ID))).content).decode("utf-8") # Done
details = [title, views, uploader]
random_vids.update({ID : details})
if 'user' in session:
username = session['user']
requests.post(url='http://127.0.0.1:8080/update-watched', data={'username' : username, 'video_ID' : video_ID}) # Done
username = session['user']
return render_template_string(video_page, random_vids = random_vids, video_ID = video_ID, title = vid_title, uploader = vid_uploader, views = vid_views, vid_upload_date = vid_upload_date, logged_in = True, username = username)
else:
return render_template_string(video_page, random_vids = random_vids, video_ID = video_ID, title = vid_title, uploader = vid_uploader, views = vid_views, vid_upload_date = vid_upload_date)
@app.route("/search", methods = ['POST'])
@nocache
def search_videos():
"""
In POST request
- Accepts the search key from the user.
"""
if request.method == 'POST':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
search_key = request.form['search']
return redirect(url_for('results', search_query = search_key))
@app.route("/results", methods = ['GET'])
@nocache
def results():
"""
In GET request
- Displays the search results.
"""
if request.method == 'GET':
logged_in = False
if 'user' in session:
logged_in = True
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
search_key = request.args.get('search_query', None)
if search_key == None:
return redirect('dashboard')
results = ((requests.get(url='http://127.0.0.1:8080/fuzzy/{}'.format(search_key))).content).decode("utf-8") # Done
result_dict = {}
results = ast.literal_eval(results)
for ID in results:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(ID))).content).decode("utf-8") # Done
details = [title, views, uploader]
result_dict.update({ID : details})
search_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('search.html'))).content).decode("utf-8")
return render_template_string(search_page, results = result_dict, search = search_key, logged_in = logged_in)
@app.route("/random", methods = ['GET'])
@nocache
def random_video():
"""
In GET request
- Selects a random video from the database and redirects to the page of the video.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
random_video_ID = ((requests.get(url='http://127.0.0.1:8080/random').content)).decode("utf-8") # Done
return redirect(url_for('watch_video', v = random_video_ID))
@app.route("/watched", methods = ['GET'])
@nocache
def watched_videos():
"""
In GET request
- Displays a page of all videos watched by the user in the WATCHED tables.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
watched_IDs = ((requests.get(url='http://127.0.0.1:8080/watched/{}'.format(username))).content).decode("utf-8") # Done
watched_IDs = ast.literal_eval(watched_IDs)
watched_dictionary = {}
for ID in watched_IDs:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(ID))).content).decode("utf-8") # Done
watched_dictionary.update({ID : [title, views, uploader]})
watched_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('watched.html'))).content).decode("utf-8")
return render_template_string(watched_page, watched = watched_dictionary)
@app.route("/user/<username>", methods = ['GET'])
@nocache
def user_videos(username):
"""
In GET request
- Displays a page of all videos uploaded by the user in the VIDEOS table.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
if username == session['user']:
return redirect(url_for('my_videos'))
is_user_present = ((requests.get(url='http://127.0.0.1:8080/is-user-present/{}'.format(username))).content).decode("utf-8") # Done
if is_user_present == "False":
abort(404)
uploaded_IDs = ((requests.get(url='http://127.0.0.1:8080/uploaded/{}'.format(username))).content).decode("utf-8") # Done
uploaded_IDs = ast.literal_eval(uploaded_IDs)
uploaded_dictionary = {}
for ID in uploaded_IDs:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploaded_dictionary.update({ID : [title, views]})
user_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('user.html'))).content).decode("utf-8")
logged_in = False
if 'user' in session:
logged_in = True
return render_template_string(user_page, logged_in = logged_in, username = username, user_videos = uploaded_dictionary)
@app.route("/my-videos", methods = ['GET'])
@nocache
def my_videos():
"""
In GET request
- Returns a page of videos uploaded by the logged in user.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect(url_for('login_form'))
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
uploaded_IDs = ((requests.get(url='http://127.0.0.1:8080/uploaded/{}'.format(username))).content).decode("utf-8") # Done
uploaded_IDs = ast.literal_eval(uploaded_IDs)
uploaded_dictionary = {}
for ID in uploaded_IDs:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploaded_dictionary.update({ID : [title, views]})
my_videos_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('my_videos.html'))).content).decode("utf-8") # Done
return render_template_string(my_videos_page, username = username, user_videos = uploaded_dictionary)
@app.route("/flag", methods = ['GET'])
@nocache
def flag_video():
"""
In GET request
- Flags the video.
- Redirects to home page.
"""
if request.method == 'GET':
if 'user' not in session:
return redirect('login_form')
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
video_ID = request.args.get('v')
username = session['user']
requests.post(url='http://127.0.0.1:8080/flag', data={'video_ID' : video_ID, 'username' : username})
return redirect(url_for('start'))
@app.route("/favourites", methods = ['GET'])
@nocache
def favourites():
"""
In GET request
- Displays a list of favourite videos.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
abort(403)
username = session['user']
fav_list = (requests.get(url='http://127.0.0.1:8080/favourites/{}'.format(username)).content).decode("utf-8")
fav_list = ast.literal_eval(fav_list)
fav_dicttionary = {}
for ID in fav_list:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(ID))).content).decode("utf-8") # Done
fav_dicttionary.update({ID : [title, views, uploader]})
favourites_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('favourite.html'))).content).decode("utf-8") # Done
return render_template_string(favourites_page, fav = fav_dicttionary)
else:
return redirect(url_for('login_form'))
# ADMIN PART
@app.route("/add-admin", methods = ['GET', 'POST'])
@nocache
def add_admin():
"""
In GET request
- Displays the add administrator page.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
add_admin_page = (requests.get(url='http://127.0.0.1:8080/html/{}'.format('add_admin.html')).content).decode("utf-8") # Done
name_error = request.args.get('name_error', False)
pass_error = request.args.get('pass_error', False)
return render_template_string(add_admin_page, nameError = name_error, passError = pass_error)
else:
abort(403)
else:
return redirect(url_for('login_form'))
"""
In POST request
- Checks if the administrator credentials are valid.
- Checks if the new username is not already taken.
- Adds the new administrator to the ADMINS table.
"""
if request.method == 'POST':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
admin_password = request.form['admin_password']
new_username = request.form['new_username']
new_password = request.form['new_password']
is_valid_admin = (requests.post(url='http://127.0.0.1:8080/is-valid-user', data={'username' : session['user'], 'password' : admin_password}).content).decode("utf-8") # Done
if is_valid_admin == "True":
is_valid_username = (requests.get(url='http://127.0.0.1:8080/is-valid-username/{}'.format(new_username)).content).decode("utf-8") # Done
if is_valid_username == "False":
requests.post(url='http://127.0.0.1:8080/add-admin', data={'username' : new_username, 'password' : new_password})
return redirect(url_for('dashboard'))
else:
return redirect(url_for('add_admin', name_error = True))
else:
return redirect(url_for('add_admin', pass_error = True))
else:
abort(403)
else:
return redirect(url_for('login_form'))
@app.route("/flagged", methods = ['GET'])
@nocache
def flagged_videos():
"""
In GET request
- Displays all the flagged videos.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
flagged_IDs = ((requests.get(url='http://127.0.0.1:8080/flagged')).content).decode("utf-8") # Done
flagged_IDs = ast.literal_eval(flagged_IDs)
flagged_dictionary = {}
for ID in flagged_IDs:
title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(ID))).content).decode("utf-8") # Done
views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(ID))).content).decode("utf-8") # Done
uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(ID))).content).decode("utf-8") # Done
flagger = ((requests.get(url='http://127.0.0.1:8080/flagger/{}'.format(ID))).content).decode("utf-8") # Done
flagged_dictionary.update({ID : [title, views, uploader, flagger]})
flagged_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('flagged.html'))).content).decode("utf-8") # Done
return render_template_string(flagged_page, flagged_videos = flagged_dictionary)
else:
abort(403)
else:
return redirect(url_for('login_form'))
@app.route("/admin-delete-video", methods = ['GET'])
@nocache
def admin_delete_video():
"""
In GET request
- Deletes the video with the corresponding video ID.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
video_ID = request.args.get('video_ID')
requests.post(url='http://127.0.0.1:8080/admin-delete-video', data={'video_ID' : video_ID})
return redirect(url_for('flagged_videos'))
else:
abort(403)
else:
return redirect(url_for('login_form'))
@app.route("/admin-users", methods = ['GET'])
@nocache
def admin_list_users():
"""
In GET request
- Displays a list of users.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
user_list = (requests.get(url='http://127.0.0.1:8080/user-list').content).decode("utf-8")
user_list = ast.literal_eval(user_list)
user_dictionary = {}
for username in user_list:
num_videos = (requests.get(url='http://127.0.0.1:8080/num-videos/{}'.format(username)).content).decode("utf-8")
num_flagged = (requests.get(url='http://127.0.0.1:8080/num-flags/{}'.format(username)).content).decode("utf-8")
user_dictionary.update({username : [num_videos, num_flagged]})
users_page = (requests.get(url='http://127.0.0.1:8080/html/{}'.format('user_list.html')).content).decode("utf-8")
return render_template_string(users_page, user_dict = user_dictionary)
else:
abort(403)
else:
return redirect(url_for('login_form'))
@app.route("/admin-delete-user/<username>", methods = ['GET'])
@nocache
def admin_delete_user(username):
"""
In GET request
- Deletes the user with the corresponding username.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
requests.post(url='http://127.0.0.1:8080/admin-delete-user', data={'username' : username})
return redirect(url_for('admin_list_users'))
else:
abort(403)
else:
return redirect(url_for('login_form'))
@app.route("/review", methods = ['GET'])
@nocache
def admin_review_video():
"""
In GET request
- The administrator can watch the video.
- Delete the video.
- Remove the flag.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
video_ID = request.args.get('v')
vid_title = ((requests.get(url='http://127.0.0.1:8080/title/{}'.format(video_ID))).content).decode("utf-8") # Done
vid_uploader = ((requests.get(url='http://127.0.0.1:8080/uploader/{}'.format(video_ID))).content).decode("utf-8") # Done
vid_views = ((requests.get(url='http://127.0.0.1:8080/views/{}'.format(video_ID))).content).decode("utf-8") # Done
video_page = ((requests.get(url='http://127.0.0.1:8080/html/{}'.format('review.html'))).content).decode("utf-8") # Done
return render_template_string(video_page, video_ID = video_ID, title = vid_title, uploader = vid_uploader, views = vid_views)
else:
abort(403)
else:
return redirect(url_for('login_form'))
@app.route("/admin-remove-flag", methods = ['GET'])
@nocache
def admin_remove_flag():
"""
In GET request
- Deletes the flag for the respective video ID.
"""
if request.method == 'GET':
if 'user' in session:
is_admin = (requests.get(url='http://127.0.0.1:8080/is-admin/{}'.format(session['user'])).content).decode("utf-8") # Done
if is_admin == "True":
video_ID = request.args.get('v')
requests.post(url='http://127.0.0.1:8080/remove-flag', data={'video_ID' : video_ID})
return redirect(url_for('flagged_videos'))
else:
abort(403)
else:
return redirect(url_for('login_form'))
if __name__ == "__main__":
app.run(port=5000, threaded=True, debug=True)
|
sharadbhat/Video-Sharing-Platform
|
Client/client.py
|
Python
|
mit
| 36,232
|
import pytest
from bcbio.pipeline import rnaseq
@pytest.yield_fixture
def ericscript_run(mocker):
yield mocker.patch('bcbio.pipeline.rnaseq.ericscript.run', autospec=True)
@pytest.fixture
def sample():
return {
'config': {
'algorithm': {
'fusion_mode': True,
'fusion_caller': 'ericscript'
}
}
}
def test_detect_fusion_callers_calls_for_each_sample(ericscript_run, sample):
samples = [[sample], [sample]]
rnaseq.detect_fusions(samples)
assert ericscript_run.call_count == len(samples)
def test_detect_fusions_returns_updated_samples(ericscript_run, sample):
samples = [[sample]]
result = rnaseq.detect_fusions(samples)
assert result == [[ericscript_run.return_value]]
def test_detect_fusions_calls_caller_with_sample_dict(ericscript_run, sample):
samples = [[sample]]
rnaseq.detect_fusions(samples)
ericscript_run.assert_called_once_with(sample)
|
biocyberman/bcbio-nextgen
|
tests/unit/pipeline/test_rnaseq.py
|
Python
|
mit
| 976
|
# TODO:
# 1. Fix broken functions
# 2. Network architecture changed, but functions need to use mask differently
# 3. Evaluation function
from __future__ import print_function
import cPickle as pickle
import sys
import time
from collections import OrderedDict
import numpy
import theano
import theano.tensor as tensor
from theano import config
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import encoder_decoder
# datasets = {'imdb': (imdb.load_data, imdb.prepare_data)}
# Set the random number generators' seeds for consistency
SEED = 123
numpy.random.seed(SEED)
def numpy_floatX(data):
return numpy.asarray(data, dtype=config.floatX)
def get_minibatches_idx(n, minibatch_size, shuffle=False):
"""
Used to shuffle the dataset at each iteration.
"""
idx_list = numpy.arange(n, dtype="int32")
if shuffle:
numpy.random.shuffle(idx_list)
minibatches = []
minibatch_start = 0
for i in range(n // minibatch_size):
minibatches.append(idx_list[minibatch_start:
minibatch_start + minibatch_size])
minibatch_start += minibatch_size
if (minibatch_start != n):
# Make a minibatch out of what is left
minibatches.append(idx_list[minibatch_start:])
return zip(range(len(minibatches)), minibatches)
def zipp(params, tparams):
"""
When we reload the model. Needed for the GPU stuff.
"""
for kk, vv in params.items():
tparams[kk].set_value(vv)
def unzip(zipped):
"""
When we pickle the model. Needed for the GPU stuff.
"""
new_params = OrderedDict()
for kk, vv in zipped.items():
new_params[kk] = vv.get_value()
return new_params
def dropout_layer(state_before, use_noise, trng):
proj = tensor.switch(use_noise,
(state_before *
trng.binomial(state_before.shape,
p=0.5, n=1,
dtype=state_before.dtype)),
state_before * 0.5)
return proj
def _p(pp, name):
return '%s_%s' % (pp, name)
def init_params(options):
"""
Global (not LSTM) parameter. For the embedding and the classifier.
"""
params = OrderedDict()
# TODO(biteandbytes) : params['Wemb'] is not required anymore. Remove ?
# embedding
# randn = numpy.random.rand(options['n_words'],
# options['dim_proj'])
# params['Wemb'] = (0.01 * randn).astype(config.floatX)
params = get_layer(options['encoder'])[0](options,
params,
prefix=options['encoder'])
# classifier
params['U'] = 0.01 * numpy.random.randn(options['ydim'], options['dim_proj']).astype(config.floatX)
params['b'] = numpy.zeros((options['ydim'],)).astype(config.floatX)
return params
def load_params(path, params):
pp = numpy.load(path)
for kk, vv in params.items():
if kk not in pp:
raise Warning('%s is not in the archive' % kk)
params[kk] = pp[kk]
return params
def init_tparams(params):
tparams = OrderedDict()
for kk, pp in params.items():
tparams[kk] = theano.shared(params[kk], name=kk)
return tparams
def get_layer(name):
fns = layers[name]
return fns
def ortho_weight(ndim):
W = numpy.random.randn(ndim, ndim)
u, s, v = numpy.linalg.svd(W)
return u.astype(config.floatX)
def param_init_lstm(options, params, prefix='lstm'):
"""
Init the LSTM parameter:
:see: init_params
"""
# Weight matrices
# Hidden state of dims |H|
# Input state of dims 3
# Cell state of dims |H|
# 4 matrices of size |H|*|H+X_dim|
# TODO: Better if orthogonal?
weight = 0.01 * numpy.random.randn( 4, options['dim_proj'], options['dim_proj'] + 5 );
# Bias vectors of length |H|
# 4 for each of the above matrices
bias = numpy.zeros(( 4, options['dim_proj'] ));
params['weight'] = weight
params['bias'] = bias.astype(config.floatX)
return params
def lstm_layer(tparams, state_below, options, prefix='lstm', mask=None):
nsteps = state_below.shape[0]
if state_below.ndim == 3:
n_samples = state_below.shape[1]
else:
n_samples = 1
assert mask is not None
# NOT USED, REMOVE ?
def _slice(_x, n, dim):
if _x.ndim == 3:
return _x[:, :, n * dim:(n + 1) * dim]
return _x[:, n * dim:(n + 1) * dim]
# Dims
# m_ : N
# W : Hx(H+3)
# B : H
# x_ : Nx3
# h_ : NxH
# c_ : NxH
# NOTE(bitesandbytes): WHY THE CHANGE IN CONVENTION? Always keep N and T on top. Becomes extremely confusing especially when the rest
# of the code is N major.
# TODO(bitesandbytes) Use _p( prefix, "weight" ) other wise we can't stack LSTMs properly.
def _step(m_, x_, h_, c_):
# Concat x_ and h_ to get Nx(H+3) matrix
ip_mat = tensor.concatenate([x_, h_], axis=1 )
# Compute forget gate values
# f : NxH matrix
f = tensor.nnet.sigmoid(
tensor.tensordot(ip_mat, tparams['weight'][0], axes=[1, 1]) + tparams['bias'][0, :][None, :])
#f = tensor.nnet.sigmoid(tensor.dot(tparams['weight'][0, :, :], ip_mat) + tparams['bias'][0, :][:, None])
# Compute input gate values
# i : NxH matrix
i = tensor.nnet.sigmoid(tensor.tensordot(ip_mat, tparams['weight'][1], axes=[1,1]) + tparams['bias'][1, :][None, :])
#i = tensor.nnet.sigmoid(tensor.dot(tparams['weight'][1, :, :], ip_mat) + tparams['bias'][1, :][:, None])
#c_new : NxH matrix
c_new = tensor.tanh(tensor.tensordot(ip_mat, tparams['weight'][2], axes=[1,1]) + tparams['bias'][2, :][None, :])
#c_new = tensor.tanh(tensor.dot(tparams['weight'][2, :, :], ip_mat) + tparams['bias'][2, :][:, None])
# Compute new memory
# c : NxH
c = i * c_new + f * c_
# Retain based on mask
c = m_[:, None] * c + (1. - m_)[:, None] * c_
# Compute new hidden state
# h : NxH
h = tensor.nnet.sigmoid(
tensor.tensordot(ip_mat, tparams['weight'][3], axes=[1,1]) + tparams['bias'][3, :][None, :]) * tensor.tanh(c)
#h = tensor.nnet.sigmoid(
# tensor.dot(tparams['weight'][3, :, :], ip_mat) + tparams['bias'][3, :][:, None]) * tensor.tanh(c)
# Retain based on mask
h = m_[:, None] * h + (1. - m_)[:, None] * h_
return h, c
# No idea why this is here. :/
# TODO(saipraveenb, akshay-balaji, rockermaxx) : Remove this ?
# NOTE: These are the inputs X. It is called state_below to allow for stacking multiple LSTMs on top of each other.
# And yes.. not needed anymore. This was the original Wx computation.
#state_below = (tensor.dot(state_below, tparams[_p(prefix, 'W')]) +
# tparams[_p(prefix, 'b')])
# TODO(saipraveenb) : Can you fix this scan function ?
dim_proj = options['dim_proj']
rval, updates = theano.scan(_step,
sequences=[mask, state_below],
outputs_info=[tensor.alloc(numpy_floatX(0.), n_samples, dim_proj),
tensor.alloc(numpy_floatX(0.), n_samples, dim_proj)],
name=_p(prefix, '_layers'),
n_steps=nsteps)
return rval[0]
# ff: Feed Forward (normal neural net), only useful to put after lstm
# before the classifier.
layers = {'lstm': (param_init_lstm, lstm_layer)}
def sgd(lr, tparams, grads, x, mask, y, cost):
""" Stochastic Gradient Descent
:note: A more complicated version of sgd then needed. This is
done like that for adadelta and rmsprop.
"""
# New set of shared variable that will contain the gradient
# for a mini-batch.
gshared = [theano.shared(p.get_value() * 0., name='%s_grad' % k)
for k, p in tparams.items()]
gsup = [(gs, g) for gs, g in zip(gshared, grads)]
# Function that computes gradients for a mini-batch, but do not
# updates the weights.
f_grad_shared = theano.function([x, mask, y], cost, updates=gsup,
name='sgd_f_grad_shared')
pup = [(p, p - lr * g) for p, g in zip(tparams.values(), gshared)]
# Function that updates the weights from the previously computed
# gradient.
f_update = theano.function([lr], [], updates=pup,
name='sgd_f_update')
return f_grad_shared, f_update
def adadelta(lr, tparams, grads, x, mask, y, cost):
"""
An adaptive learning rate optimizer
Parameters
----------
lr : Theano SharedVariable
Initial learning rate
tpramas: Theano SharedVariable
Model parameters
grads: Theano variable
Gradients of cost w.r.t to parameres
x: Theano variable
Model inputs
mask: Theano variable
Sequence mask
y: Theano variable
Targets
cost: Theano variable
Objective fucntion to minimize
Notes
-----
For more information, see [ADADELTA]_.
.. [ADADELTA] Matthew D. Zeiler, *ADADELTA: An Adaptive Learning
Rate Method*, arXiv:1212.5701.
"""
zipped_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_grad' % k)
for k, p in tparams.items()]
running_up2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rup2' % k)
for k, p in tparams.items()]
running_grads2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad2' % k)
for k, p in tparams.items()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function([x, mask, y], cost, updates=zgup + rg2up,
name='adadelta_f_grad_shared')
updir = [-tensor.sqrt(ru2 + 1e-6) / tensor.sqrt(rg2 + 1e-6) * zg
for zg, ru2, rg2 in zip(zipped_grads,
running_up2,
running_grads2)]
ru2up = [(ru2, 0.95 * ru2 + 0.05 * (ud ** 2))
for ru2, ud in zip(running_up2, updir)]
param_up = [(p, p + ud) for p, ud in zip(tparams.values(), updir)]
f_update = theano.function([lr], [], updates=ru2up + param_up,
on_unused_input='ignore',
name='adadelta_f_update')
return f_grad_shared, f_update
def rmsprop(lr, tparams, grads, x, mask, y, cost):
"""
A variant of SGD that scales the step size by running average of the
recent step norms.
Parameters
----------
lr : Theano SharedVariable
Initial learning rate
tpramas: Theano SharedVariable
Model parameters
grads: Theano variable
Gradients of cost w.r.t to parameres
x: Theano variable
Model inputs
mask: Theano variable
Sequence mask
y: Theano variable
Targets
cost: Theano variable
Objective fucntion to minimize
Notes
-----
For more information, see [Hint2014]_.
.. [Hint2014] Geoff Hinton, *Neural Networks for Machine Learning*,
lecture 6a,
http://cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf
"""
zipped_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_grad' % k)
for k, p in tparams.items()]
running_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad' % k)
for k, p in tparams.items()]
running_grads2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad2' % k)
for k, p in tparams.items()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rgup = [(rg, 0.95 * rg + 0.05 * g) for rg, g in zip(running_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function([x, mask, y], cost,
updates=zgup + rgup + rg2up,
name='rmsprop_f_grad_shared')
updir = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_updir' % k)
for k, p in tparams.items()]
updir_new = [(ud, 0.9 * ud - 1e-4 * zg / tensor.sqrt(rg2 - rg ** 2 + 1e-4))
for ud, zg, rg, rg2 in zip(updir, zipped_grads, running_grads,
running_grads2)]
param_up = [(p, p + udn[1])
for p, udn in zip(tparams.values(), updir_new)]
f_update = theano.function([lr], [], updates=updir_new + param_up,
on_unused_input='ignore',
name='rmsprop_f_update')
return f_grad_shared, f_update
def build_model(tparams, options):
trng = RandomStreams(SEED)
# Used for dropout.
use_noise = theano.shared(numpy_floatX(0.))
# TxNxX float
x = tensor.tensor3('x', dtype=config.floatX)
# TxN float( logically boolean )
mask = tensor.matrix('mask', dtype=config.floatX)
# TxN int64
y = tensor.matrix('y', dtype='int64')
n_timesteps = x.shape[0]
n_samples = x.shape[1]
# TODO(biteandbytes) : This gets inputs. Change to n_timesteps*n_samples*3 tensor
# emb = tparams['Wemb'][x.flatten()].reshape([n_timesteps,
# n_samples,
# options['dim_proj']])
#
# TxNxH.
proj = get_layer(options['encoder'])[1](tparams, x, options,
prefix=options['encoder'],
mask=mask)
# TODO(biteandbytes) : Modify this ?
# tparams[U] = HxO # WRONG. It's _'OxH'_
# O = output one hot vector.
# H = Hidden state size.
# NOTE: IT'S 'OxH' NOT 'HxO'. DON'T MENTION THINGS YOU ARE NOT
# SURE ABOUT! I JUST WASTED AN HOUR.
if options['encoder'] == 'lstm':
#proj = (proj * mask[:, :, None]).sum(axis=0)
#proj = proj / mask.sum(axis=0)[:, None]
# B = O
# TxNxO.
proj = tensor.tensordot( proj, tparams['U'], axes=[2,1] ) + tparams['b'][None, None, :];
# TODO(saipraveenb): Check if we need dropout option.
#if options['use_dropout']:
# proj = dropout_layer(proj, use_noise, trng)
# pred = TxNxO
#pred = tensor.nnet.softmax( proj );
exp_pred = tensor.exp( proj );
# TxNxO ( last dimension softmaxed )
pred = exp_pred / exp_pred.sum( axis=2, keepdims=True );
f_pred_prob = theano.function([x, mask], pred, name='f_pred_prob')
f_pred = theano.function([x, mask], pred.argmax(axis=2), name='f_pred')
off = 1e-8
if pred.dtype == 'float16':
off = 1e-6
# NOTE: Finished adding the softmax layer with mask.
cost = -tensor.log( pred[ tensor.arange(pred.shape[0])[:,None], tensor.arange(pred.shape[1])[None,:], y ] * mask + off ).mean()
return use_noise, x, mask, y, f_pred_prob, f_pred, cost
def pred_probs(f_pred_prob, prepare_data, data, iterator, verbose=False):
""" If you want to use a trained model, this is useful to compute
the probabilities of new examples.
"""
n_samples = len(data[0])
probs = numpy.zeros((n_samples, 2)).astype(config.floatX)
n_done = 0
for _, valid_index in iterator:
x, mask, y = prepare_data([data[0][t] for t in valid_index],
numpy.array(data[1])[valid_index],
maxlen=None, x_dim = 5)
pred_probs = f_pred_prob(x, mask)
probs[valid_index, :] = pred_probs
n_done += len(valid_index)
if verbose:
print('%d/%d samples classified' % (n_done, n_samples))
return probs
def pred_error(f_pred, prepare_data, data, iterator, verbose=False):
"""
Just compute the error
f_pred: Theano fct computing the prediction
prepare_data: usual prepare_data for that dataset.
"""
valid_err = 0
for _, valid_index in iterator:
x, mask, y = prepare_data([data[0][t] for t in valid_index],
numpy.array(data[1])[valid_index],
maxlen=None, x_dim = 5)
# TxN
preds = f_pred(x, mask)
# TxN
targets = y;
#print("TARGET: ")
#print(targets)
#print("PRED: ")
#print(preds);
valid_err += (preds == targets).sum()
valid_err = 1. - numpy_floatX(valid_err) / len(data[0])
return valid_err
def train_lstm(
dim_proj=128, # word embeding dimension and LSTM number of hidden units.
patience=10, # Number of epoch to wait before early stop if no progress
max_epochs=10000, # The maximum number of epoch to run
dispFreq=10, # Display to stdout the training progress every N updates
decay_c=0., # Weight decay for the classifier applied to the U weights.
lrate=0.00005, # Learning rate for sgd (not used for adadelta and rmsprop)
n_words=42, # Vocabulary size
optimizer=adadelta,
# sgd, adadelta and rmsprop available, sgd very hard to use, not recommanded (probably need momentum and decaying learning rate).
encoder='lstm', # TODO: can be removed must be lstm.
saveto='lstm_model.npz', # The best model will be saved there
validFreq=1000, # Compute the validation error after this number of update.
saveFreq=3000, # Save the parameters after every saveFreq updates
maxlen=100, # Sequence longer then this get ignored
batch_size=64, # The batch size during training.
valid_batch_size=16, # The batch size used for validation/test set.
dataset='imdb',
# Parameter for extra option
noise_std=0.,
use_dropout=False, # if False slightly faster, but worst test error
# This frequently need a bigger model.
reload_model=None, # Path to a saved model we want to start from.
test_size=-1, # If >0, we keep only this number of test example.
ydim=42, # Output dimensions.
w_multiplier=1,
b_multiplier=1,
exampleFreq=100,
):
# Model options
model_options = locals().copy()
print("model options", model_options)
print('Loading data')
# (N*[x], N*[y])
train, valid, test, vocab = encoder_decoder.get_raw_data("../data/complex_xs_50000.txt",
"../data/complex_targets_50000.txt")
vocab_lst = [''] * ( len(vocab.items()) + 1 );
for w,i in vocab.items():
print(i);
vocab_lst[i] = w;
# Input - seqs: num_samples*3, labels: num_samples*[list]
# Return X:maxlen*num_samples*3, X_mask: max_len*num_samples, labels: maxlen*num_samples
prepare_data = encoder_decoder.prepare_data
# Chosen as |num words| + 1 (0 -> no word | empty)
# NOTE: Added ydim as an input to the function and initialized to 22.
# ydim = 22
#model_options['ydim'] = ydim
print('Building model')
# This create the initial parameters as numpy ndarrays.
# Dict name (string) -> numpy ndarray
params = init_params(model_options)
if reload_model:
load_params('lstm_model.npz', params)
# This create Theano Shared Variable from the parameters.
# Dict name (string) -> Theano Tensor Shared Variable
# params and tparams have different copy of the weights.
tparams = init_tparams(params)
# use_noise is for dropout
(use_noise, x, mask,
y, f_pred_prob, f_pred, cost) = build_model(tparams, model_options)
if decay_c > 0.:
decay_c = theano.shared(numpy_floatX(decay_c), name='decay_c')
weight_decay = 0.
weight_decay += (tparams['U'] ** 2).sum()
weight_decay *= decay_c
cost += weight_decay
f_cost = theano.function([x, mask, y], cost, name='f_cost')
grads = tensor.grad(cost, wrt=list(tparams.values()))
f_grad = theano.function([x, mask, y], grads, name='f_grad')
lr = tensor.scalar(name='lr')
f_grad_shared, f_update = optimizer(lr, tparams, grads,
x, mask, y, cost)
print('Optimization')
# Random shuffle partition.
kf_valid = get_minibatches_idx(len(valid[0]), valid_batch_size)
kf_test = get_minibatches_idx(len(test[0]), valid_batch_size)
example_test_batch = kf_test[ int( numpy.random.rand() * len(kf_test) ) ][1];
print("%d train examples" % len(train[0]))
print("%d valid examples" % len(valid[0]))
print("%d test examples" % len(test[0]))
history_errs = []
best_p = None
bad_count = 0
if validFreq == -1:
validFreq = len(train[0]) # batch_size
if saveFreq == -1:
saveFreq = len(train[0]) # batch_size
uidx = 0 # the number of update done
estop = False # early stop
start_time = time.time()
try:
for eidx in range(max_epochs):
n_samples = 0
# Get new shuffled index for the training set.
kf = get_minibatches_idx(len(train[0]), batch_size, shuffle=True)
for _, train_index in kf:
uidx += 1
use_noise.set_value(0.)
# Select the random examples for this minibatch
y = [train[1][t] for t in train_index]
x = [train[0][t] for t in train_index]
# Get the data in numpy.ndarray format
# This swap the axis!
# Return something of shape (minibatch maxlen, n samples)
# x = TxNx3 float16
# m = TxN boolean
# y = TxN int64
x, mask, y = prepare_data(x, y, x_dim = 5)
n_samples += x.shape[1]
# Sample.
#print("SAMPLE MASK");
#print( x );
cost = f_grad_shared(x, mask, y.astype(numpy.int64))
f_update(lrate)
if numpy.isnan(cost) or numpy.isinf(cost):
print('bad cost detected: ', cost)
return 1., 1., 1.
if numpy.mod(uidx, dispFreq) == 0:
print('Epoch ', eidx, 'Update ', uidx, 'Cost ', cost)
if numpy.mod(uidx, exampleFreq) == 0:
example_index = example_test_batch;
x, mask, y = prepare_data([test[0][t] for t in example_index],
numpy.array(test[1])[example_index],
maxlen=None, x_dim = 5)
# TxN
preds = f_pred(x, mask).transpose().astype(numpy.int64);
# TxN
targets = y.transpose().astype(numpy.int64);
k = int( numpy.random.rand() * len(targets) );
print( "Targets for x=", x[0][k] );
print( ''.join([ vocab_lst[o] + ' ' for o in targets[k].tolist() ] ) )
print( "Prediction " );
print( ''.join([ vocab_lst[o] + ' ' for o in preds[k].tolist() ] ) )
if saveto and numpy.mod(uidx, saveFreq) == 0:
print('Saving...')
if best_p is not None:
params = best_p
else:
params = unzip(tparams)
numpy.savez(saveto, history_errs=history_errs, **params)
pickle.dump(model_options, open('%s.pkl' % saveto, 'wb'), -1)
print('Done')
if numpy.mod(uidx, validFreq) == 0:
use_noise.set_value(0.)
train_err = pred_error(f_pred, prepare_data, train, kf)
valid_err = pred_error(f_pred, prepare_data, valid,
kf_valid)
test_err = pred_error(f_pred, prepare_data, test, kf_test)
history_errs.append([valid_err, test_err])
#if best_p is None or valid_err <= numpy.array(history_errs)[:, 0].min():
# best_p = unzip(tparams)
# bad_counter = 0
print('Train ', train_err, 'Valid ', valid_err,
'Test ', test_err)
#if len(history_errs) > patience and valid_err >= numpy.array(history_errs)[:-patience, 0].min():
# bad_counter += 1
# if bad_counter > patience:
# print('Early Stop!')
# estop = True
# break
print('Seen %d samples' % n_samples)
if estop:
break
except KeyboardInterrupt:
print("Training interupted")
end_time = time.time()
if best_p is not None:
zipp(best_p, tparams)
else:
best_p = unzip(tparams)
use_noise.set_value(0.)
kf_train_sorted = get_minibatches_idx(len(train[0]), batch_size)
train_err = pred_error(f_pred, prepare_data, train, kf_train_sorted)
valid_err = pred_error(f_pred, prepare_data, valid, kf_valid)
test_err = pred_error(f_pred, prepare_data, test, kf_test)
print('Train ', train_err, 'Valid ', valid_err, 'Test ', test_err)
if saveto:
numpy.savez(saveto, train_err=train_err,
valid_err=valid_err, test_err=test_err,
history_errs=history_errs, **best_p)
print('The code run for %d epochs, with %f sec/epochs' % (
(eidx + 1), (end_time - start_time) / (1. * (eidx + 1))))
print(('Training took %.1fs' %
(end_time - start_time)), file=sys.stderr)
return train_err, valid_err, test_err
if __name__ == '__main__':
# See function train for all possible parameter and there definition.
train_lstm(
max_epochs=100,
test_size=500,
)
|
bitesandbytes/upgraded-system
|
src/simple_5_lstm.py
|
Python
|
mit
| 26,400
|
from django.shortcuts import render
from django.middleware.csrf import get_token
from ajaxuploader.views import AjaxFileUploader
from pandora.backends import SignalBasedLocalUploadBackend
from pandora.models import Item
def home(request):
return render(request, 'pandora/home.html', {
'items': Item.objects.all(),
'csrf_token': get_token(request)
})
import_uploader = AjaxFileUploader(SignalBasedLocalUploadBackend)
|
vrde/pandora
|
pandora/views.py
|
Python
|
mit
| 458
|
#!/usr/bin/env python
import os
import re
from setuptools import find_packages, setup
def text_of(relpath):
"""
Return string containing the contents of the file at *relpath* relative to
this file.
"""
thisdir = os.path.dirname(__file__)
file_path = os.path.join(thisdir, os.path.normpath(relpath))
with open(file_path) as f:
text = f.read()
return text
# Read the version from docx.__version__ without importing the package
# (and thus attempting to import packages it depends on that may not be
# installed yet)
version = re.search(r'__version__ = "([^"]+)"', text_of("docx/__init__.py")).group(1)
NAME = "python-docx"
VERSION = version
DESCRIPTION = "Create and update Microsoft Word .docx files."
KEYWORDS = "docx office openxml word"
AUTHOR = "Steve Canny"
AUTHOR_EMAIL = "python-docx@googlegroups.com"
URL = "https://github.com/python-openxml/python-docx"
LICENSE = text_of("LICENSE")
PACKAGES = find_packages(exclude=["tests", "tests.*"])
PACKAGE_DATA = {"docx": ["templates/*.xml", "templates/*.docx"]}
INSTALL_REQUIRES = ["lxml>=2.3.2"]
TEST_SUITE = "tests"
TESTS_REQUIRE = ["behave", "mock", "pyparsing", "pytest"]
CLASSIFIERS = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Office/Business :: Office Suites",
"Topic :: Software Development :: Libraries",
]
LONG_DESCRIPTION = text_of("README.rst") + "\n\n" + text_of("HISTORY.rst")
ZIP_SAFE = False
params = {
"name": NAME,
"version": VERSION,
"description": DESCRIPTION,
"keywords": KEYWORDS,
"long_description": LONG_DESCRIPTION,
"author": AUTHOR,
"author_email": AUTHOR_EMAIL,
"url": URL,
"license": LICENSE,
"packages": PACKAGES,
"package_data": PACKAGE_DATA,
"install_requires": INSTALL_REQUIRES,
"tests_require": TESTS_REQUIRE,
"test_suite": TEST_SUITE,
"classifiers": CLASSIFIERS,
"zip_safe": ZIP_SAFE,
}
setup(**params)
|
python-openxml/python-docx
|
setup.py
|
Python
|
mit
| 2,381
|
"""
Author: Junhong Chen
"""
from Bio import SeqIO
import gzip
import sys
import os
pe1 = []
pe2 = []
pname = []
for dirName, subdirList, fileList in os.walk(sys.argv[1]):
for fname in fileList:
tmp = fname.split(".")[0]
tmp = tmp[:len(tmp)-1]
if tmp not in pname:
pname.append(tmp)
pe1.append(dirName+"/"+tmp+"1.fq.gz")
pe2.append(dirName+"/"+tmp+"2.fq.gz")
def concat(name,file_list):
with open(name, 'w') as w_file:
for filen in file_list:
print 'working with',filen
with gzip.open(filen, 'rU') as o_file:
seq_records = SeqIO.parse(o_file, 'fastq')
SeqIO.write(seq_records, w_file, 'fastq')
#print pe1
#print pe2
concat(sys.argv[2]+"-pe1.fq", pe1)
concat(sys.argv[2]+"-pe2.fq", pe2)
|
macmanes-lab/MCBS913
|
code/Junhong Chen/concatReads.py
|
Python
|
mit
| 925
|
class Color:
''' print() wrappers for console colors
'''
def red(*args, **kwargs): print("\033[91m{}\033[0m".format(" ".join(map(str,args))), **kwargs)
def green(*args, **kwargs): print("\033[92m{}\033[0m".format(" ".join(map(str,args))), **kwargs)
def yellow(*args, **kwargs): print("\033[93m{}\033[0m".format(" ".join(map(str,args))), **kwargs)
|
banderlog/greed
|
greed/colors.py
|
Python
|
mit
| 370
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-23 22:17
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('backups', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='backupfirewall',
name='file_location',
),
migrations.RemoveField(
model_name='backupfirewall',
name='site_name',
),
migrations.RemoveField(
model_name='backuprouter',
name='file_location',
),
migrations.RemoveField(
model_name='backuprouter',
name='site_name',
),
]
|
Landver/netmon
|
apps/backups/migrations/0002_auto_20170424_0117.py
|
Python
|
mit
| 737
|
###################################################################
# Numexpr - Fast numerical array expression evaluator for NumPy.
#
# License: MIT
# Author: See AUTHORS.txt
#
# See LICENSE.txt and LICENSES/*.txt for details about copyright and
# rights to use.
####################################################################
version = '2.4.1'
release = False
if not release:
version += '.dev'
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('numexpr.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py', 'U', 1))
version += svn.version
|
cpcloud/numexpr
|
numexpr/version.py
|
Python
|
mit
| 855
|
# coding: utf-8
# # Preprocessing Notebook
#
# ### Author: James Foster, jmfoster@gmail.com
#
# #### Install gomill: http://mjw.woodcraft.me.uk/gomill/doc/0.7.4/install.html
# In[1]:
import numpy as np
import pandas as pd
from gomill import sgf
from gomill import ascii_boards
from gomill import sgf_moves
from IPython.core.debugger import Tracer
# In[2]:
def sgf_filename_to_game(game_filename):
"""
Read in sgf game file and convert to gomill Game object
"""
with open(game_filename, 'r') as myfile:
game_string=myfile.read() #.replace('\n', '')
g = sgf.Sgf_game.from_string(game_string)
return g
# In[3]:
def game_to_string(game):
"""
Print info about Game object
"""
print g.get_winner()
print g.get_size()
print g.get_root().get_raw('BR')
print
for node in g.get_main_sequence():
print node
# In[4]:
def show_sgf_file(sgf_game, move_number=None):
"""
Show the position from an SGF file. If a move number is specified, the position
before that move is shown (this is to match the behaviour of GTP loadsgf).
"""
try:
board, plays = sgf_moves.get_setup_and_moves(sgf_game)
except ValueError, e:
raise StandardError(str(e))
if move_number is not None:
move_number = max(0, move_number-1)
plays = plays[:move_number]
for colour, move in plays:
if move is None:
continue
row, col = move
try:
board.play(row, col, colour)
except ValueError:
raise StandardError("illegal move in sgf file")
print ascii_boards.render_board(board)
print
# In[5]:
def game_to_board(game, move_number=None):
"""
Convert gomill Game object to Board object. If move number is
specified, the position before that move is shown (this is to
match the behaviour of GTP loadsgf).
"""
if move_number<1:
raise ValueError('Game undefined for move_number < 1')
try:
board, plays = sgf_moves.get_setup_and_moves(game)
except ValueError, e:
raise StandardError(str(e))
if move_number is not None:
move_number = max(0, move_number-1)
if move_number==0: # Special case for first move of the game
turn, _ = plays[0]
plays = plays[:move_number]
swap_dict = {'w':'b', 'b':'w'}
for colour, move in plays:
if move is None:
continue
row, col = move
try:
board.play(row, col, colour)
except ValueError:
raise StandardError("illegal move in sgf file")
turn = swap_dict[colour]
if move_number is None or move_number > len(plays): # Game is over, it's neither player's turn
turn = None
return (board, turn)
# In[6]:
def game_move_to_board(game, move_number):
"""
Convert gomill Game object to Board object that includes only the specified move.
The position before the specified move is shown (this is to match the behaviour of GTP loadsgf).
"""
try:
board, plays = sgf_moves.get_setup_and_moves(game)
except ValueError, e:
raise StandardError(str(e))
if move_number is not None:
move_number = max(0, move_number-2)
play = plays[move_number]
colour, move = play # Unpack tuple
row, col = move # Unpack tuple
try:
board.play(row, col, colour)
except ValueError:
raise StandardError("illegal move in sgf file")
turn = colour
return (board, turn)
# In[7]:
def board_to_array(board, dimension=1, turn=None, white=-1, black=1):
"""
Convert gomill Board object to numpy 1D array (default) or 2D matrix.
If turn is None, use default values for white and black stones (default is white=-1, black=1).
Else, convert stones to perspective of player whose turn it is: 1 is my stone, -1 is your stone.
"""
size = board.side
if dimension==1:
array = np.zeros(size*size, dtype=np.int8) # Initialize numpy 1D array of zeros
elif dimension==2:
array = np.zeros((size,size), dtype=np.int8) # Initialize numpy 2D array of zeros
else:
raise ValueError('Invalid number of dimensions specified: ', dimension)
points = board.board_points
for row, col in points:
colour = board.board[row][col]
if turn: # Alternate perspectivers according to whose turn it is
if colour:
value = (colour==turn)*2-1 # value is 1 for player whose turn it is, -1 for other player
else: # Point was played but was captured, is now empty?
value = 0
else: # turn is none, don't alternate perspectives according to turn
if colour=='w':
value = white
elif colour=='b':
value = black
else: # Point was played but was captured, is now empty?
value = 0
row = size-row-1 # Convert Board row index (which starts at bottom of board) into matrix row index (which starts at top)
if dimension==1:
array[row*size+col] = value
elif dimension==2:
array[row,col] = value
else:
raise ValueError('Invalid number of dimensions specified: ', dimension)
return array
# In[8]:
# Test Representation conversions
def test_representation():
game_filename = './Game_Files/9x9/Go_Seigen/1968-08-00.sgf'
g = sgf_filename_to_game(game_filename)
move = 4
print show_sgf_file(g,move)
b, turn = game_to_board(g, move)
print ascii_boards.render_board(b)
matrix = board_to_array(b, dimension=2, turn=turn)
print
print matrix
matrix = board_to_array(b, dimension=2, turn=None)
print
print matrix
print
print board_to_array(b, dimension=1, turn=turn)
print board_to_array(b, dimension=1, turn=None)
print matrix.flatten(order='C')
print board_to_array(b, dimension=1)
assert (matrix.flatten(order='C') == board_to_array(b,1)).all()
#test_representation()
# In[9]:
def test_game_move_to_board():
game_file = './Game_Files/9x9/Go_Seigen/1968-08-00.sgf'
g = sgf_filename_to_game(game_file)
move = 5
b, turn = game_to_board(g, move)
print ascii_boards.render_board(b)
matrix = board_to_array(b, dimension=2)
print
print matrix
b, turn = game_to_board(g, move+1)
print ascii_boards.render_board(b)
matrix = board_to_array(b, dimension=2)
print
print matrix
print
b, turn = game_move_to_board(g, move+1)
print ascii_boards.render_board(b)
print
matrix = board_to_array(b, dimension=2, turn=turn)
print matrix
print
vector = board_to_array(b, dimension=1, turn=turn)
print vector
print turn
#test_game_move_to_board()
# In[10]:
import os
from fnmatch import fnmatch
def directory_to_data_files(root_dir, output_filename, size, print_progress=False):
"""
Load and convert all .sgf files from a root directory into text file of data vectors
"""
pattern = "*.sgf"
sgf_files = []
for path, subdirs, files in os.walk(root_dir):
for name in files:
if fnmatch(name, pattern):
sgf_files.append(os.path.join(path, name))
n_files = len(sgf_files)
# Open data file for writing
vectors = open(output_filename, 'wb')
# Convert sgf files to numerical array data files
for i, sgf_file in enumerate(sgf_files):
try:
game = sgf_filename_to_game(sgf_file)
if print_progress:
print str(i+1)+"/"+str(n_files), 'Processing file:', sgf_file
except ValueError as ve:
print 'Exception:',str(ve)+'.','File "'+sgf_file+'"', 'is likely malformed.'
for move in range(1,len(game.get_main_sequence())):
try:
# Create current move vector
board, turn = game_to_board(game, move)
vector = board_to_array(board, dimension=1, turn=turn)
# Create next move vector
next_move_board, turn = game_move_to_board(game, move+1) # Get board containing only the move after the current move
next_move_vector = board_to_array(next_move_board, dimension=1, turn=turn)
# Create winner, 1 means current play won, -1 means other player one
winner = np.int8((game.get_winner()==turn)*2-1)
if len(vector)!=size or len(next_move_vector)!=size:
msg = 'Board size is '+str(len(vector))+'. Expected size is '+str(size)
raise SizeException(msg)
# Write data arrays to files
np.savetxt(vectors, winner[None], fmt='%i', newline=';')
np.savetxt(vectors, vector[None], fmt='%i', newline=';')
np.savetxt(vectors, next_move_vector[None], fmt='%i')
except TypeError as te:
print 'Exception:',str(te)+'.','File "'+sgf_file+'"', 'is likely malformed.'
except ValueError as ve:
print 'Exception:',str(ve)+'.','File "'+sgf_file+'"', 'is likely malformed.'
except IndexError as ie:
print 'Exception:',str(ie)+'.','File "'+sgf_file+'"', 'is likely malformed.'
except Exception as e:
print 'Exception:',str(e)+'.','File "'+sgf_file+'"', 'is likely malformed.'
vectors.close()
class SizeException(Exception):
pass
# In[11]:
def parse_line(line):
"""
Parse line string into winner, vector, and next_move_vector
"""
#line = line.rstrip() # Remove '\n' at end of line
line = line.split(';') # Split line into winner, vector, and next_move_vector
winner = np.int8(line[0])
vector = np.fromstring(line[1], dtype='int8', sep=' ')
next_move_vector = np.fromstring(line[2], dtype='int8', sep=' ')
return winner, vector, next_move_vector
# In[12]:
from random_sampler3 import random_sampler
def sample_data(data_filename, k):
"""
Randomly sample k lines from file, parse them.
Return lists of winners, vectors, and next_move_vectors
"""
lines = random_sampler(filename=data_filename, k=k)
state_tuples = map(parse_line, lines) # Apply parse_lines() to each sampled line
state_lists = map(list, zip(*state_tuples)) # Unzip list of state_tuples into aligned-index list of winners, vectors, next_move_vectors
winners = state_lists[0]
vectors = state_lists[1]
next_move_vectors = state_lists[2]
return winners, vectors, next_move_vectors
# In[40]:
def file_len(fname):
with open(fname) as f:
for i, l in enumerate(f):
pass
return i + 1
# In[41]:
# Main method for running from command line
if __name__ == "__main__":
print 'main method executed'
# Convert and save data to file
root_dir = './Game_Files/9x9'
output_filename = './Data/data_9x9.txt'
#directory_to_data_files(root_dir, output_filename, size=81, print_progress=True)
# Load data from file
data_filename = './Data/data_9x9.txt'
max_examples = file_len(data_filename)
k = max_examples # Number of training examples to randomly sample from data file (note: repeated sampling could give repeat examples)
k = min(k, max_examples) # Don't try to sample more examples than rows in the data file
winners, vectors, next_move_vectors = sample_data(data_filename, k=k)
X = np.array(vectors) # Convert list of vectors into 2D array X
Y = np.array(next_move_vectors) # Convert list of next_move_vectors into 2D array Y
winners = np.array(winners) # Convert list of winners into 1D array winners
# In[ ]:
|
hotfuzzy/go
|
preprocessing.py
|
Python
|
mit
| 11,844
|
def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
from datetime import datetime
now = datetime.now()
if type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time,datetime):
diff = now - time
elif not time:
diff = now - now
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str(second_diff) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return str( second_diff / 60 ) + " minutes ago"
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return str( second_diff / 3600 ) + " hours ago"
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " days ago"
if day_diff < 31:
return str(day_diff/7) + " weeks ago"
if day_diff < 365:
return str(day_diff/30) + " months ago"
return str(day_diff/365) + " years ago"
|
zellahenderson/PennApps2013
|
src/prettydate.py
|
Python
|
mit
| 1,307
|
'''
Pull one page of 100 results from seeclickfix using the global PARAMS
value if the parameters are not supplied. If there are more than 100
results, make another pull passing paramters that include the next page to
be pulled.
Nicole Donnelly 30May2016, updated 21Oct2016
'''
import requests
import json
def get_seeclickfix(page=1, pulled=0, search_params={'place_url':
'district-of-columbia', 'after': '2016-10-01',
'per_page': 100}):
# base_url for usajobs api to build the request url
base_url = 'https://seeclickfix.com/api/v2/issues'
# send a get request with the url, parameters, and header
myResponse = requests.get(url=base_url, params=search_params)
# For successful API call, response code will be 200 (OK)
if(myResponse.ok):
# Loading the response data into a dict variable
data = json.loads(myResponse.content.decode('utf-8'))
# get the total search result count and set it to count_all. the
# API only allows 100 results per page
count_all = data['metadata']['pagination']['entries']
# track the number of items we have pulled with our requests
pulled = pulled + 100
# create a file name that reflects which page of results it contains
# and write that file
file_name = 'data%d.json' % page
with open(file_name, 'w') as outfile:
json.dump(data, outfile)
# check to see if we pulled all the results. If not, increment the
# page count, update the parameters dictionary to include the page
# number, and run the process again.
if pulled < count_all:
page += 1
page_param = {'page': page}
search_params.update(page_param)
print(search_params)
get_seeclickfix(page, pulled, search_params)
else:
# If response code is not ok (200), print the resulting http error
# code with description
myResponse.raise_for_status()
if __name__ == '__main__':
get_seeclickfix()
|
nd1/women_in_tech_summit_DC2017
|
api/seeclickfix_api.py
|
Python
|
mit
| 2,074
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import os
import argparse
import tensorflow as tf
from gym import wrappers
from yarll.environment.registration import make
class ModelRunner(object):
"""
Run an already learned model.
Currently only supports one variation of an environment.
"""
def __init__(self, env, model_directory: str, save_directory: str, **usercfg) -> None:
super(ModelRunner, self).__init__()
self.env = env
self.model_directory = model_directory
self.save_directory = save_directory
self.config = dict(
episode_max_length=self.env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps'),
repeat_n_actions=1
)
self.config.update(usercfg)
self.session = tf.Session()
self.saver = tf.train.import_meta_graph(os.path.join(self.model_directory, "model.meta"))
self.saver.restore(self.session, os.path.join(self.model_directory, "model"))
self.action = tf.get_collection("action")[0]
self.states = tf.get_collection("states")[0]
def choose_action(self, state):
"""Choose an action."""
return self.session.run([self.action], feed_dict={self.states: [state]})[0]
def get_trajectory(self, render: bool = False):
"""
Run agent-environment loop for one whole episode (trajectory)
Return dictionary of results
"""
state = self.env.reset()
for _ in range(self.config["episode_max_length"]):
action = self.choose_action(state)
for _ in range(self.config["repeat_n_actions"]):
_, _, done, _ = self.env.step(action)
if done: # Don't continue if episode has already ended
break
if done:
break
if render:
self.env.render()
return
def run(self):
for _ in range(self.config["n_iter"]):
self.get_trajectory()
parser = argparse.ArgumentParser()
parser.add_argument("environment", metavar="env", type=str, help="Gym environment to execute the model on.")
parser.add_argument("model_directory", type=str, help="Directory from where model files are loaded.")
parser.add_argument("save_directory", type=str, help="Directory where results of running the model are saved")
parser.add_argument("--iterations", default=100, type=int, help="Number of iterations to run the algorithm.")
def main():
args = parser.parse_args()
env = make(args.environment)
runner = ModelRunner(env, args.model_directory, args.save_directory, n_iter=args.iterations)
try:
runner.env = wrappers.Monitor(runner.env, args.save_directory, video_callable=False, force=True)
runner.run()
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
|
arnomoonens/DeepRL
|
yarll/scripts/run_model.py
|
Python
|
mit
| 2,858
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-25 15:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0012_auto_20160519_1740'),
]
operations = [
migrations.AddField(
model_name='formpage',
name='button_name',
field=models.CharField(default='Submit', max_length=500, verbose_name='Button name'),
),
]
|
bruecksen/isimip
|
isi_mip/pages/migrations/0013_formpage_button_name.py
|
Python
|
mit
| 500
|
from __future__ import print_function
import argparse
from collections import OrderedDict
import json
import os
import logging
from keras.callbacks import EarlyStopping
from sklearn.preprocessing import normalize
from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score
from scipy.sparse import csr_matrix
from keras.utils.io_utils import HDF5Matrix
#from keras.utils.visualize_util import plot
from keras.optimizers import SGD, Adam
from sklearn.metrics import r2_score
import numpy as np
import theano.tensor as tt
import pandas as pd
import random
import common
import models
from predict import obtain_predictions
from eval import do_eval
import h5py
class Config(object):
"""Configuration for the training process."""
def __init__(self, params, normalize=False, whiten=True):
self.model_id = common.get_next_model_id()
self.norm = normalize
self.whiten = whiten
self.x_path = '%s_%sx%s' % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
self.y_path = '%s_%s_%s' % (params['dataset']['fact'],params['dataset']['dim'],params['dataset']['dataset'])
self.dataset_settings = params['dataset']
self.training_params = params['training']
self.model_arch = params['cnn']
self.predicting_params = params['predicting']
def get_dict(self):
object_dict = self.__dict__
first_key = "model_id"
conf_dict = OrderedDict({first_key: object_dict[first_key]})
conf_dict.update(object_dict)
return conf_dict
def _squared_magnitude(x):
return tt.sqr(x).sum(axis=-1)
def _magnitude(x):
return tt.sqrt(tt.maximum(_squared_magnitude(x), np.finfo(x.dtype).tiny))
def cosine(x, y):
return tt.clip((1 - (x * y).sum(axis=-1) /
(_magnitude(x) * _magnitude(y))) / 2, 0, 1)
def load_sparse_csr(filename):
loader = np.load(filename)
return csr_matrix(( loader['data'], loader['indices'], loader['indptr']),
shape = loader['shape'])
def build_model(config):
"""Builds the cnn."""
params = config.model_arch
get_model = getattr(models, 'get_model_'+str(params['architecture']))
model = get_model(params)
#model = model_kenun.build_convnet_model(params)
# Learning setup
t_params = config.training_params
sgd = SGD(lr=t_params["learning_rate"], decay=t_params["decay"],
momentum=t_params["momentum"], nesterov=t_params["nesterov"])
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
optimizer = eval(t_params['optimizer'])
metrics = ['mean_squared_error']
if config.model_arch["final_activation"] == 'softmax':
metrics.append('categorical_accuracy')
if t_params['loss_func'] == 'cosine':
loss_func = eval(t_params['loss_func'])
else:
loss_func = t_params['loss_func']
model.compile(loss=loss_func, optimizer=optimizer,metrics=metrics)
return model
def load_data_preprocesed(params, X_path, Y_path, dataset, val_percent, test_percent, n_samples, with_metadata=False, only_metadata=False, metadata_source='rovi'):
factors = np.load(common.DATASETS_DIR+'/y_train_'+Y_path+'.npy') # OJO remove S
index_factors = open(common.DATASETS_DIR+'/items_index_train_'+dataset+'.tsv').read().splitlines()
if not only_metadata:
all_X = np.load(common.TRAINDATA_DIR+'/X_train_'+X_path+'.npy')
index_train = open(common.TRAINDATA_DIR+'/index_train_%s.tsv' % (X_path)).read().splitlines()
all_Y = np.zeros((len(index_train),factors.shape[1]))
index_factors_inv = dict()
for i,item in enumerate(index_factors):
index_factors_inv[item] = i
for i,item in enumerate(index_train):
all_Y[i,:] = factors[index_factors_inv[item]]
else:
all_Y = factors
if with_metadata:
if 'w2v' in metadata_source:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))[:,:int(params['cnn']['sequence_length'])]
elif 'model' in metadata_source or not params['dataset']['sparse']:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))
else:
all_X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,dataset)).todense()
all_X_in_meta = all_X = all_X_meta
print(all_X.shape)
print(all_Y.shape)
if n_samples != 'all':
n_samples = int(n_samples)
all_X = all_X[:n_samples]
all_Y = all_Y[:n_samples]
if with_metadata:
all_X_in_meta = all_X_in_meta[:n_samples]
if params['training']['normalize_y'] == True:
normalize(all_Y,copy=False)
if params['training']["val_from_file"]:
Y_val = np.load(common.DATASETS_DIR+'/y_val_'+Y_path+'.npy')
Y_test = np.load(common.DATASETS_DIR+'/y_test_'+Y_path+'.npy') #!!! OJO remove S from trainS
if params['dataset']['sparse']:
X_val = load_sparse_csr(common.TRAINDATA_DIR+'/X_val_%s_%s.npz' % (metadata_source,dataset)).todense()
X_test = load_sparse_csr(common.TRAINDATA_DIR+'/X_test_%s_%s.npz' % (metadata_source,dataset)).todense()
else:
X_val = np.load(common.TRAINDATA_DIR+'/X_val_%s_%s.npy' % (metadata_source,dataset))
X_test = np.load(common.TRAINDATA_DIR+'/X_test_%s_%s.npy' % (metadata_source,dataset))
X_train = all_X
Y_train = all_Y
else:
N = all_Y.shape[0]
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
logging.debug("Training data points: %d" % N_train)
logging.debug("Validation data points: %d" % N_val)
logging.debug("Test data points: %d" % (N - N_train - N_val))
if not only_metadata:
# Slice data
X_train = all_X[:N_train]
X_val = all_X[N_train:N_train + N_val]
X_test = all_X[N_train + N_val:]
Y_train = all_Y[:N_train]
Y_val = all_Y[N_train:N_train + N_val]
Y_test = all_Y[N_train + N_val:]
if with_metadata:
if only_metadata:
X_train = all_X_in_meta[:N_train]
X_val = all_X_in_meta[N_train:N_train + N_val]
X_test = all_X_in_meta[N_train + N_val:]
else:
X_train = [X_train,all_X_in_meta[:N_train]]
X_val = [X_val,all_X_in_meta[N_train:N_train + N_val]]
X_test = [X_test,all_X_in_meta[N_train + N_val:]]
return X_train, Y_train, X_val, Y_val, X_test, Y_test
def load_data_hf5(params,val_percent, test_percent):
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%s.hdf5" % (params['dataset']['dataset'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
N = f["targets"].shape[0]
f.close()
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
X_train = HDF5Matrix(hdf5_file, 'features', start=0, end=N_train)
Y_train = HDF5Matrix(hdf5_file, 'targets', start=0, end=N_train)
X_val = HDF5Matrix(hdf5_file, 'features', start=N_train, end=N_train+N_val)
Y_val = HDF5Matrix(hdf5_file, 'targets', start=N_train, end=N_train+N_val)
X_test = HDF5Matrix(hdf5_file, 'features', start=N_train+N_val, end=N)
Y_test = HDF5Matrix(hdf5_file, 'targets', start=N_train+N_val, end=N)
return X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train
def load_data_hf5_memory(params,val_percent, test_percent, y_path, id2gt, X_meta = None, val_from_file = False):
if val_from_file:
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
index_train = f["index"][:]
index_train = np.delete(index_train, np.where(index_train == ""))
N_train = index_train.shape[0]
val_hdf5_file = common.PATCHES_DIR+"/patches_val_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f_val = h5py.File(val_hdf5_file,"r")
X_val = f_val['features'][:]
#Y_val = f_val['targets'][:]
factors_val = np.load(common.DATASETS_DIR+'/y_val_'+y_path+'.npy')
index_factors_val = open(common.DATASETS_DIR+'/items_index_val_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt_val = dict((index,factor) for (index,factor) in zip(index_factors_val,factors_val))
index_val = [i for i in f_val['index'][:] if i in id2gt_val]
X_val = np.delete(X_val, np.where(index_val == ""), axis=0)
index_val = np.delete(index_val, np.where(index_val == ""))
Y_val = np.asarray([id2gt_val[id] for id in index_val])
test_hdf5_file = common.PATCHES_DIR+"/patches_test_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f_test = h5py.File(test_hdf5_file,"r")
X_test = f_test['features'][:]
#Y_test = f_test['targets'][:]
factors_test = np.load(common.DATASETS_DIR+'/y_test_'+y_path+'.npy')
index_factors_test = open(common.DATASETS_DIR+'/items_index_test_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt_test = dict((index,factor) for (index,factor) in zip(index_factors_test,factors_test))
index_test = [i for i in f_test['index'][:] if i in id2gt_test]
X_test = np.delete(X_test, np.where(index_test == ""), axis=0)
index_test = np.delete(index_test, np.where(index_test == ""))
Y_test = np.asarray([id2gt_test[id] for id in index_test])
else:
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
index_all = f["index"][:]
N = index_all.shape[0]
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
X_val = f['features'][N_train:N_train+N_val]
index_val = f['index'][N_train:N_train+N_val]
X_val = np.delete(X_val, np.where(index_val == ""), axis=0)
index_val = np.delete(index_val, np.where(index_val == ""))
Y_val = np.asarray([id2gt[id] for id in index_val])
X_test = f['features'][N_train+N_val:N]
index_test = f['index'][N_train+N_val:N]
print(index_test.shape)
print(X_test.shape)
X_test = np.delete(X_test, np.where(index_test == ""), axis=0)
index_test = np.delete(index_test, np.where(index_test == ""))
print(index_test.shape)
print(X_test.shape)
Y_test = np.asarray([id2gt[id] for id in index_test])
print(Y_test.shape)
index_train = f['index'][:N_train]
index_train = np.delete(index_train, np.where(index_train == ""))
N_train = index_train.shape[0]
if X_meta != None:
X_val = [X_val,X_meta[N_train:N_train+N_val]]
X_test = [X_test,X_meta[N_train+N_val:N]]
return X_val, Y_val, X_test, Y_test, N_train
def batch_block_generator(params, y_path, N_train, id2gt, X_meta=None,
val_from_file=False):
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
block_step = 50000
batch_size = params['training']['n_minibatch']
randomize = True
with_meta = False
if X_meta != None:
with_meta = True
while 1:
for i in range(0, N_train, block_step):
x_block = f['features'][i:min(N_train, i+block_step)]
index_block = f['index'][i:min(N_train, i+block_step)]
#y_block = f['targets'][i:min(N_train,i+block_step)]
x_block = np.delete(x_block, np.where(index_block == ""), axis=0)
index_block = np.delete(index_block, np.where(index_block == ""))
y_block = np.asarray([id2gt[id] for id in index_block])
if params['training']['normalize_y']:
normalize(y_block, copy=False)
items_list = range(x_block.shape[0])
if randomize:
random.shuffle(items_list)
for j in range(0, len(items_list), batch_size):
if j+batch_size <= x_block.shape[0]:
items_in_batch = items_list[j:j+batch_size]
x_batch = x_block[items_in_batch]
y_batch = y_block[items_in_batch]
if with_meta:
x_batch = [x_batch, X_meta[items_in_batch]]
yield (x_batch, y_batch)
def process(params,with_predict=True,with_eval=True):
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
params['cnn']['n_out'] = int(params['dataset']['dim'])
#params['cnn']['n_frames'] = int(params['dataset']['window'] * SR / float(HR))
with_metadata = params['dataset']['with_metadata']
only_metadata = params['dataset']['only_metadata']
metadata_source = params['dataset']['meta-suffix']
if with_metadata:
if 'w2v' in metadata_source:
X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,params['dataset']['dataset']))[:,:int(params['cnn']['sequence_length'])]
params['cnn']['n_metafeatures'] = len(X_meta[0])
if 'meta-suffix2' in params['dataset']:
X_meta2 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix2'],params['dataset']['dataset']))
params['cnn']['n_metafeatures2'] = len(X_meta2[0])
if 'meta-suffix3' in params['dataset']:
X_meta3 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix3'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta3[0])
if 'meta-suffix4' in params['dataset']:
X_meta4 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix4'],params['dataset']['dataset']))
params['cnn']['n_metafeatures4'] = len(X_meta4[0])
elif 'model' in metadata_source or not params['dataset']['sparse']:
X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,params['dataset']['dataset']))
params['cnn']['n_metafeatures'] = len(X_meta[0])
if 'meta-suffix2' in params['dataset']:
X_meta2 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix2'],params['dataset']['dataset']))
params['cnn']['n_metafeatures2'] = len(X_meta2[0])
if 'meta-suffix3' in params['dataset']:
X_meta3 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix3'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta3[0])
if 'meta-suffix4' in params['dataset']:
X_meta4 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix4'],params['dataset']['dataset']))
params['cnn']['n_metafeatures4'] = len(X_meta4[0])
else:
X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,params['dataset']['dataset'])).todense()
params['cnn']['n_metafeatures'] = X_meta.shape[1]
if 'meta-suffix2' in params['dataset']:
X_meta2 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix2'],params['dataset']['dataset']))
params['cnn']['n_metafeatures2'] = X_meta2.shape[1]
if 'meta-suffix3' in params['dataset']:
X_meta3 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix3'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta3[0])
if 'meta-suffix4' in params['dataset']:
X_meta4 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix4'],params['dataset']['dataset']))
params['cnn']['n_metafeatures3'] = len(X_meta4[0])
print(X_meta.shape)
else:
X_meta = None
config = Config(params)
model_dir = os.path.join(common.MODELS_DIR, config.model_id)
common.ensure_dir(common.MODELS_DIR)
common.ensure_dir(model_dir)
model_file = os.path.join(model_dir, config.model_id + common.MODEL_EXT)
logging.debug("Building Network...")
#model = build_model(config)
model = build_model(config)
print(model.summary())
#plot(model, to_file='model2.png', show_shapes=True)
trained_model = config.get_dict()
# Save model
#plot(model, to_file=os.path.join(model_dir, config.model_id + PLOT_EXT))
common.save_model(model, model_file)
logging.debug(trained_model["model_id"])
logging.debug("Loading Data...")
with_generator = True
if only_metadata:
X_train, Y_train, X_val, Y_val, X_test, Y_test = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, metadata_source)
if 'meta-suffix2' in params['dataset']:
X_train2, Y_train2, X_val2, Y_val2, X_test2, Y_test2 = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix2'])
X_train = [X_train,X_train2]
X_val = [X_val,X_val2]
X_test = [X_test,X_test2]
print("X_train bi", len(X_train))
if 'meta-suffix3' in params['dataset']:
X_train3, Y_train3, X_val3, Y_val3, X_test3, Y_test3 = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix3'])
X_train.append(X_train3)
X_val.append(X_val3)
X_test.append(X_test3)
print("X_train tri", len(X_train))
if 'meta-suffix4' in params['dataset']:
X_train4, Y_train4, X_val4, Y_val4, X_test4, Y_test4 = \
load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"],
config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix4'])
X_train.append(X_train4)
X_val.append(X_val4)
X_test.append(X_test4)
print("X_train four", len(X_train))
else:
if with_generator:
id2gt = dict()
factors = np.load(common.DATASETS_DIR+'/y_train_'+config.y_path+'.npy')
index_factors = open(common.DATASETS_DIR+'/items_index_train_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt = dict((index,factor) for (index,factor) in zip(index_factors,factors))
X_val, Y_val, X_test, Y_test, N_train = load_data_hf5_memory(params,config.training_params["validation"],config.training_params["test"],config.y_path,id2gt,X_meta,config.training_params["val_from_file"])
if params['dataset']['nsamples'] != 'all':
N_train = min(N_train,params['dataset']['nsamples'])
else:
X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train = load_data_hf5(params,config.training_params["validation"],config.training_params["test"])
trained_model["whiten_scaler"] = common.TRAINDATA_DIR+'/scaler_%s.pk' % config.x_path
logging.debug("Training...")
if config.model_arch["final_activation"] == 'softmax':
monitor_metric = 'val_categorical_accuracy'
else:
monitor_metric = 'val_loss'
early_stopping = EarlyStopping(monitor=monitor_metric, patience=4)
if only_metadata:
epochs = model.fit(X_train, Y_train,
batch_size=config.training_params["n_minibatch"],
#shuffle='batch',
nb_epoch=config.training_params["n_epochs"],
verbose=1, validation_data=(X_val, Y_val),
callbacks=[early_stopping])
else:
if with_generator:
print(N_train)
epochs = model.fit_generator(batch_block_generator(params,config.y_path,N_train,id2gt,X_meta,config.training_params["val_from_file"]),
samples_per_epoch = N_train-(N_train % config.training_params["n_minibatch"]),
nb_epoch = config.training_params["n_epochs"],
verbose=1,
validation_data = (X_val, Y_val),
callbacks=[early_stopping])
else:
epochs = model.fit(X_train, Y_train,
batch_size=config.training_params["n_minibatch"],
shuffle='batch',
nb_epoch=config.training_params["n_epochs"],
verbose=1,
validation_data=(X_val, Y_val),
callbacks=[early_stopping])
model.save_weights(os.path.join(model_dir, config.model_id + common.WEIGHTS_EXT))
logging.debug("Saving trained model %s in %s..." %
(trained_model["model_id"], common.DEFAULT_TRAINED_MODELS_FILE))
common.save_trained_model(common.DEFAULT_TRAINED_MODELS_FILE, trained_model)
logging.debug("Evaluating...")
print(X_test[0].shape,X_test[1].shape)
preds=model.predict(X_test)
print(preds.shape)
if params["dataset"]["evaluation"] in ['binary','multiclass']:
y_pred = (preds > 0.5).astype('int32')
acc = accuracy_score(Y_test,y_pred)
prec = precision_score(Y_test,y_pred,average='macro')
recall = recall_score(Y_test,y_pred,average='macro')
f1 = f1_score(Y_test,y_pred,average='macro')
print('Accuracy', acc)
print("%.3f\t%.3f\t%.3f" % (prec,recall,f1))
if params["dataset"]["fact"] == 'class':
good_classes = np.nonzero(Y_test.sum(0))[0]
print(Y_test.shape,preds.shape)
#roc_auc=roc_auc_score(Y_test[:,good_classes],preds[:,good_classes])
#logging.debug('ROC-AUC '+str(roc_auc))
#pr_auc = average_precision_score(Y_test[:,good_classes],preds[:,good_classes])
#print('PR-AUC',pr_auc)
#r2 = roc_auc
elif params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']:
r2s = []
for i,pred in enumerate(preds):
r2 = r2_score(Y_test[i],pred)
r2s.append(r2)
r2 = np.asarray(r2s).mean()
logging.debug('R2 avg '+str(r2))
# Batch prediction
if X_test[1].shape == Y_test[1].shape:
score = model.evaluate(X_test, Y_test, verbose=0)
logging.debug(score)
logging.debug(model.metrics_names)
print(score)
trained_model["loss_score"] = score[0]
trained_model["mse"] = score[1]
if params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']:
trained_model["r2"] = r2
fw=open(common.DATA_DIR+'/results/train_results.txt','a')
fw.write(trained_model["model_id"]+'\n')
if params["training"]["loss_func"] == 'binary_crossentropy':
fw.write('ROC-AUC: '+str(roc_auc)+'\n')
print('ROC-AUC: '+str(roc_auc))
fw.write('Loss: '+str(score[0])+' ('+config.training_params["loss_func"]+')\n')
fw.write('MSE: '+str(score[1])+'\n')
elif params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']:
fw.write('R2 avg: '+str(r2)+'\n')
print('R2 avg: '+str(r2))
fw.write('Loss: '+str(score[0])+' ('+config.training_params["loss_func"]+')\n')
fw.write('MSE: '+str(score[1])+'\n')
fw.write(json.dumps(epochs.history)+"\n\n")
fw.close()
if with_predict:
trained_models = pd.read_csv(common.DEFAULT_TRAINED_MODELS_FILE, sep='\t')
model_config = trained_models[trained_models["model_id"] == trained_model["model_id"]]
model_config = model_config.to_dict(orient="list")
testset = open(common.DATASETS_DIR+'/items_index_test_%s.tsv' % (config.dataset_settings["dataset"])).read().splitlines()
if config.training_params["val_from_file"] and not only_metadata:
predictions, predictions_index = obtain_predictions(model_config, testset, trained_model["model_id"], config.predicting_params["trim_coeff"], model=model, with_metadata=with_metadata, only_metadata=only_metadata, metadata_source=metadata_source, with_patches=True)
else:
predictions, predictions_index = obtain_predictions(model_config, testset, trained_model["model_id"], config.predicting_params["trim_coeff"], model=model, with_metadata=with_metadata, only_metadata=only_metadata, metadata_source=metadata_source)
print("Predictions created")
if with_eval:
do_eval(trained_model["model_id"],get_roc=True,get_map=True,get_p=True,predictions=predictions,predictions_index=predictions_index)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Evaluates the model',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p',
'--params',
dest="params_file",
help='JSON file with params',
default=False)
parser.add_argument('-pred',
'--predict',
dest="with_predict",
help='Predict factors',
action='store_true',
default=False)
parser.add_argument('-eval',
'--eval',
dest="with_eval",
help='Eval factors',
action='store_true',
default=False)
parser.add_argument('-m',
'--metadata',
dest="with_metadata",
help='Use metadata',
action='store_true',
default=False)
parser.add_argument('-om',
'--only_metadata',
dest="only_metadata",
help='Use only metadata',
action='store_true',
default=False)
parser.add_argument('-ms',
'--metadata_source',
dest="metadata_source",
type=str,
help='Suffix of metadata files',
default="rovi")
args = parser.parse_args()
params = models.params_1
if args.params_file:
params = json.load(open(args.params_file))
process(params)
|
sergiooramas/tartarus
|
src/train.py
|
Python
|
mit
| 27,751
|
from jinja2 import Template
import codecs
def render(file, props=None):
if props == None:
return '404'
with codecs.open('./views/' + file + '.html', 'r', encoding='utf8') as f:
content = f.read()
templated = Template(content).render(props)
return templated
|
avocadoinnocenceproject/farflungfruit
|
render.py
|
Python
|
mit
| 303
|
#!/usr/bin/env python3
from mathbind.types import BasicType
class BasicValueType(BasicType):
"""
Represents a basic pure type that can be passed by value, thus excluding arrays and pointers.
Attributes:
- typename (str): basic C typename (int, long long, unsigned, bool, etc)
- c_math_name (str): corresponding Mathematica C type
- math_name (str): corresponding Mathematica type (Integer, Real)
- c_name (str): corresponding C type (int, long long, float).
"""
def __init__(self, typename):
self.typename = typename
type_parts = set(typename.split())
self.c_name = typename
if not type_parts:
raise ValueError
elif {'float', 'double'} & type_parts:
self.c_math_name = 'mreal'
self.math_name = 'Real'
elif 'bool' in type_parts:
self.c_name = 'int'
self.c_math_name = 'mbool'
self.math_name = 'Boolean'
elif not type_parts - {'signed', 'unsigned', 'char', 'int', 'short', 'long'}:
self.c_math_name = 'mint'
self.math_name = 'Integer'
else:
raise ValueError('Unrecognized C type')
@classmethod
def from_str(cls, s):
"""
Tries to build a new BasicValueType from the string specification, failing if
the type is a pointer or array-like.
"""
if '*' in s or '[' in s or ']' in s:
raise ValueError('Not a valid basic C type')
while ' ' in s:
s = s.replace(' ', ' ')
return BasicValueType(s.strip())
@classmethod
def from_prototype_cstr(cls, s):
"""
Tries to extract (type, argname) from the string.
"""
while s.count(' ') > 2:
s = s.replace(' ', '')
s = s.strip()
if not s.replace(' ', '').replace('_', '').isalnum():
raise ValueError('Unrecognized characters')
*words, argname = s.split()
return BasicValueType.from_str(' '.join(words)), argname.strip()
def __repr__(self):
return 'BasicValueType(typename=%r)' % self.typename
def __eq__(self, other):
return self.typename == other.typename
def retrieve_cstr(self, argname, index, tab='', suffix=None):
if suffix is None:
suffix = self.default_suffix
form = '{tab}{self.c_name} {argname} = MArgument_get{self.math_name}(Args{suffix}[{index}]);\n'
return form.format(argname=argname, self=self, tab=tab, index=index, suffix=suffix)
def return_cstr(self, func_call, tab='', suffix=None):
if suffix is None:
suffix = self.default_suffix
form = (
'{tab}{self.c_name} return_value{suffix} = {func_call};\n'
'{tab}MArgument_set{self.math_name}(Res{suffix}, return_value{suffix});\n'
)
return form.format(func_call=func_call, tab=tab, self=self, suffix=suffix)
def prototype_cstr(self, argname):
return self.c_name + ' ' + argname
def prototype_return_cstr(self):
"""
Returns a C string representing the declaration in a prototype return.
"""
return self.c_name
@property
def math_convert_f(self):
"""
Returns the Mathematica function responsible for converting values
to this one.
"""
if 'float' in self.typename or 'double' in self.typename:
return 'N'
else:
return 'IntegerPart'
|
diogenes1oliveira/mathbind
|
mathbind/types/basicvaluetype.py
|
Python
|
mit
| 3,498
|
import os
from setuptools import setup
LONG_DESCRIPTION = """
A modular framework for mobile surveys and field data collection via offline-capable mobile web apps.
"""
def readme():
try:
readme = open('README.md')
except IOError:
return LONG_DESCRIPTION
else:
return readme.read()
setup(
name='wq',
use_scm_version=True,
author='S. Andrew Sheppard',
author_email='andrew@wq.io',
url='https://wq.io/',
license='MIT',
description=LONG_DESCRIPTION.strip(),
long_description=readme(),
long_description_content_type='text/markdown',
install_requires=[
'wq.app>=1.3.0b1,<1.4.0',
'wq.build>=1.3.0b1,<1.4.0',
'wq.create>=1.3.0b1,<1.4.0',
'wq.db>=1.3.0b1,<1.4.0',
],
python_requires='>=3',
packages=['wq'],
namespace_packages=['wq'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: JavaScript',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3 :: Only',
'Framework :: Django',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.0',
'Framework :: Django :: 2.1',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.0',
'Framework :: Django :: 3.2',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Text Processing :: Markup :: HTML',
'Topic :: Scientific/Engineering :: GIS',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Pre-processors',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Text Processing :: Markup :: XML',
],
setup_requires=[
'setuptools_scm',
],
project_urls={
'Homepage': 'https://wq.io/',
'Documentation': 'https://wq.io/',
'Source': 'https://github.com/wq/wq',
'Release Notes': 'https://github.com/wq/wq/releases',
'Issues': 'https://github.com/wq/wq/issues',
},
)
|
wq/wq
|
setup.py
|
Python
|
mit
| 2,538
|
# Retrieves all text files from CNAM ABU (see license here : http://abu.cnam.fr/cgi-bin/donner_licence) for further treatment
import urllib.request
import os
from bs4 import BeautifulSoup, SoupStrainer
# Proxy handling
proxies = {'http':''}
opnr = urllib.request.build_opener(urllib.request.ProxyHandler(proxies))
urllib.request.install_opener( opnr )
with urllib.request.urlopen('http://abu.cnam.fr/BIB/') as response:
html = response.read()
# Search for all links towards a text file
for link in BeautifulSoup(html, parseOnlyThese=SoupStrainer('a')):
if link.has_attr('href'):
if(link['href'].startswith('/cgi-bin/go')):
name = link['href'].split('?')[1]
print(name)
# Download text file
with urllib.request.urlopen('http://abu.cnam.fr/cgi-bin/donner_unformated?'+name) as response:
# Conversion to ISO-Latin 1
text = response.read().decode('ISO-8859-1', errors='replace')
lines = text.split('\n')
dir = 'Texts/'+name
os.mkdir(dir)
read=False
cnt=0
buffstring=''
# Reads text file from "DEBUT DU FICHIER" to "FIN DU FICHIER", regroups lines in packets of ~15000 chars and stores in text file
for i in range(len(lines)):
if "DEBUT DU FICHIER" in lines[i] or "FIN DU FICHIER" in lines[i]:
read = not(read)
l = len(buffstring)
if read:
if l>15000:
f=open(dir+'/'+str(cnt)+'.txt','w',encoding='utf8')
f.write(buffstring)
f.close()
buffstring=''
cnt =cnt+1
else:
buffstring=buffstring+' '+lines[i]
if len(buffstring)>0:
f=open(dir+'/'+str(cnt)+'.txt','w',encoding='utf8')
f.write(buffstring)
f.close()
|
Mandrathax/Entropy
|
Scripts/french_texts_dl.py
|
Python
|
mit
| 2,276
|
#!/usr/bin/env python3
import math, logging, threading, concurrent.futures
import numpy
import simplespectral
from soapypower import threadpool
logger = logging.getLogger(__name__)
class PSD:
"""Compute averaged power spectral density using Welch's method"""
def __init__(self, bins, sample_rate, fft_window='hann', fft_overlap=0.5,
crop_factor=0, log_scale=True, remove_dc=False, detrend=None,
lnb_lo=0, max_threads=0, max_queue_size=0):
self._bins = bins
self._sample_rate = sample_rate
self._fft_window = fft_window
self._fft_overlap = fft_overlap
self._fft_overlap_bins = math.floor(self._bins * self._fft_overlap)
self._crop_factor = crop_factor
self._log_scale = log_scale
self._remove_dc = remove_dc
self._detrend = detrend
self._lnb_lo = lnb_lo
self._executor = threadpool.ThreadPoolExecutor(
max_workers=max_threads,
max_queue_size=max_queue_size,
thread_name_prefix='PSD_thread'
)
self._base_freq_array = numpy.fft.fftfreq(self._bins, 1 / self._sample_rate)
def set_center_freq(self, center_freq):
"""Set center frequency and clear averaged PSD data"""
psd_state = {
'repeats': 0,
'freq_array': self._base_freq_array + self._lnb_lo + center_freq,
'pwr_array': None,
'update_lock': threading.Lock(),
'futures': [],
}
return psd_state
def result(self, psd_state):
"""Return freqs and averaged PSD for given center frequency"""
freq_array = numpy.fft.fftshift(psd_state['freq_array'])
pwr_array = numpy.fft.fftshift(psd_state['pwr_array'])
if self._crop_factor:
crop_bins_half = round((self._crop_factor * self._bins) / 2)
freq_array = freq_array[crop_bins_half:-crop_bins_half]
pwr_array = pwr_array[crop_bins_half:-crop_bins_half]
if psd_state['repeats'] > 1:
pwr_array = pwr_array / psd_state['repeats']
if self._log_scale:
pwr_array = 10 * numpy.log10(pwr_array)
return (freq_array, pwr_array)
def wait_for_result(self, psd_state):
"""Wait for all PSD threads to finish and return result"""
if len(psd_state['futures']) > 1:
concurrent.futures.wait(psd_state['futures'])
elif psd_state['futures']:
psd_state['futures'][0].result()
return self.result(psd_state)
def result_async(self, psd_state):
"""Return freqs and averaged PSD for given center frequency (asynchronously in another thread)"""
return self._executor.submit(self.wait_for_result, psd_state)
def _release_future_memory(self, future):
"""Remove result from future to release memory"""
future._result = None
def update(self, psd_state, samples_array):
"""Compute PSD from samples and update average for given center frequency"""
freq_array, pwr_array = simplespectral.welch(samples_array, self._sample_rate, nperseg=self._bins,
window=self._fft_window, noverlap=self._fft_overlap_bins,
detrend=self._detrend)
if self._remove_dc:
pwr_array[0] = (pwr_array[1] + pwr_array[-1]) / 2
with psd_state['update_lock']:
psd_state['repeats'] += 1
if psd_state['pwr_array'] is None:
psd_state['pwr_array'] = pwr_array
else:
psd_state['pwr_array'] += pwr_array
def update_async(self, psd_state, samples_array):
"""Compute PSD from samples and update average for given center frequency (asynchronously in another thread)"""
future = self._executor.submit(self.update, psd_state, samples_array)
future.add_done_callback(self._release_future_memory)
psd_state['futures'].append(future)
return future
|
xmikos/soapy_power
|
soapypower/psd.py
|
Python
|
mit
| 4,040
|
# This file starts the WSGI web application.
# - Heroku starts gunicorn, which loads Procfile, which starts manage.py
# - Developers can run it from the command line: python runserver.py
import logging
from logging.handlers import RotatingFileHandler
from app import create_app
app = create_app()
# Start a development web server if executed from the command line
if __name__ == "__main__":
# Manage the command line parameters such as:
# - python manage.py runserver
# - python manage.py db
from app import manager
manager.run()
|
dleicht/planx
|
manage.py
|
Python
|
mit
| 559
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^sql/$', 'sqlparser.views.parse_sql'),
)
|
slack-sqlbot/slack-sqlbot
|
slack_sqlbot/urls.py
|
Python
|
mit
| 164
|
'''
Created by auto_sdk on 2015.06.23
'''
from aliyun.api.base import RestApi
class Ram20140214GetUserRequest(RestApi):
def __init__(self,domain='ram.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.AccountSpace = None
self.UserName = None
def getapiname(self):
return 'ram.aliyuncs.com.GetUser.2014-02-14'
|
francisar/rds_manager
|
aliyun/api/rest/Ram20140214GetUserRequest.py
|
Python
|
mit
| 334
|
class Paginator(object):
def __init__(self, collection, page_number=0, limit=20, total=-1):
self.collection = collection
self.page_number = int(page_number)
self.limit = int(limit)
self.total = int(total)
@property
def page(self):
start = self.page_number * self.limit
end = start + self.limit
try:
return self.collection[start:end]
except Exception as detail:
print detail
return []
@property
def current_page(self):
return self.page_number + 1
@property
def page_count(self):
if self.total != -1:
pages = abs(self.total / self.limit)+1
return pages
else:
return None
@property
def has_previous(self):
return True if (self.page_number > 0) else False
@property
def has_next(self):
return True if (len(self.page) == self.limit) else False
@property
def previous_page(self):
if self.has_previous:
return self.page_number-1
@property
def next_page(self):
if self.has_next:
return self.page_number+1
def previous_page_link(self, request):
return self.__build_url(self.previous_page, request.full_url())
def next_page_link(self, request):
return self.__build_url(self.next_page, request.full_url())
def __build_url(self, page_num, url):
import re
#check if there is a query string
if url.find('?') != -1:
if re.search(r'page=\d',url) != None:
page_str = "&page=%d" % page_num
return re.sub(r'&page=\d+', page_str, url)
else:
return "%s&page=%d" % (url, page_num)
else:
return "%s?page=%d" % (url, page_num)
|
michaelcontento/whirlwind
|
whirlwind/view/paginator.py
|
Python
|
mit
| 1,921
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The nealcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP 9 soft forks.
Connect to a single node.
regtest lock-in with 108/144 block signalling
activation after a further 144 blocks
mine 2 block and save coinbases for later use
mine 141 blocks to transition from DEFINED to STARTED
mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
mine a further 143 blocks (LOCKED_IN)
test that enforcement has not triggered (which triggers ACTIVE)
test that enforcement has triggered
"""
from test_framework.blockstore import BlockStore
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
from io import BytesIO
import time
import itertools
class BIP9SoftForksTest(ComparisonTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 1
def setup_network(self):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']],
binary=[self.options.testbinary])
def run_test(self):
self.test = TestManager(self, self.options.tmpdir)
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
self.test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(rawtx))
tx.deserialize(f)
tx.nVersion = 2
return tx
def sign_transaction(self, node, tx):
signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def generate_blocks(self, number, version, test_blocks = []):
for i in range(number):
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = version
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
self.height += 1
return test_blocks
def get_bip9_status(self, key):
info = self.nodes[0].getblockchaininfo()
return info['bip9_softforks'][key]
def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature, bitno):
assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
assert_equal(self.get_bip9_status(bipName)['since'], 0)
# generate some coins for later
self.coinbase_blocks = self.nodes[0].generate(2)
self.height = 3 # height of the next block to build
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = int(time.time())
assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
assert_equal(self.get_bip9_status(bipName)['since'], 0)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert(bipName not in tmpl['vbavailable'])
assert_equal(tmpl['vbrequired'], 0)
assert_equal(tmpl['version'], 0x20000000)
# Test 1
# Advance from DEFINED to STARTED
test_blocks = self.generate_blocks(141, 4)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
assert_equal(self.get_bip9_status(bipName)['since'], 144)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert_equal(tmpl['vbavailable'][bipName], bitno)
assert_equal(tmpl['vbrequired'], 0)
assert(tmpl['version'] & activated_version)
# Test 2
# Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
assert_equal(self.get_bip9_status(bipName)['since'], 144)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert_equal(tmpl['vbavailable'][bipName], bitno)
assert_equal(tmpl['vbrequired'], 0)
assert(tmpl['version'] & activated_version)
# Test 3
# 108 out of 144 signal bit 1 to achieve LOCKED_IN
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(58, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
assert_equal(self.get_bip9_status(bipName)['since'], 432)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
# Test 4
# 143 more version 536870913 blocks (waiting period-1)
test_blocks = self.generate_blocks(143, 4)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
assert_equal(self.get_bip9_status(bipName)['since'], 432)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
# Test 5
# Check that the new rule is enforced
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
invalidate(spendtx)
spendtx = self.sign_transaction(self.nodes[0], spendtx)
spendtx.rehash()
invalidatePostSignature(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = activated_version
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
self.height += 1
yield TestInstance([[block, True]])
assert_equal(self.get_bip9_status(bipName)['status'], 'active')
assert_equal(self.get_bip9_status(bipName)['since'], 576)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName in tmpl['rules'])
assert(bipName not in tmpl['vbavailable'])
assert_equal(tmpl['vbrequired'], 0)
assert(not (tmpl['version'] & (1 << bitno)))
# Test 6
# Check that the new sequence lock rules are enforced
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
invalidate(spendtx)
spendtx = self.sign_transaction(self.nodes[0], spendtx)
spendtx.rehash()
invalidatePostSignature(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = 5
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
# Restart all
self.test.block_store.close()
stop_nodes(self.nodes)
shutil.rmtree(self.options.tmpdir)
self.setup_chain()
self.setup_network()
self.test.block_store = BlockStore(self.options.tmpdir)
self.test.clear_all_connections()
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
def get_tests(self):
for test in itertools.chain(
self.test_BIP('csv', 0x20000001, self.sequence_lock_invalidate, self.donothing, 0),
self.test_BIP('csv', 0x20000001, self.mtp_invalidate, self.donothing, 0),
self.test_BIP('csv', 0x20000001, self.donothing, self.csv_invalidate, 0)
):
yield test
def donothing(self, tx):
return
def csv_invalidate(self, tx):
"""Modify the signature in vin 0 of the tx to fail CSV
Prepends -1 CSV DROP in the scriptSig itself.
"""
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP] +
list(CScript(tx.vin[0].scriptSig)))
def sequence_lock_invalidate(self, tx):
"""Modify the nSequence to make it fails once sequence lock rule is
activated (high timespan).
"""
tx.vin[0].nSequence = 0x00FFFFFF
tx.nLockTime = 0
def mtp_invalidate(self, tx):
"""Modify the nLockTime to make it fails once MTP rule is activated."""
# Disable Sequence lock, Activate nLockTime
tx.vin[0].nSequence = 0x90FFFFFF
tx.nLockTime = self.last_block_time
if __name__ == '__main__':
BIP9SoftForksTest().main()
|
appop/bitcoin
|
qa/rpc-tests/bip9-softforks.py
|
Python
|
mit
| 10,528
|
from contrib import *
import re
def tokenize(text):
tokens = re.findall('(?u)[\w.-]+',text)
tokens = [t for t in tokens if not re.match('[\d.-]+$',t)]
#tokens = [t for t in tokens if len(t)>2]
# TODO remove stopwords
return u' '.join(tokens)
## text = KV('data/text.db',5)
## tokens = KV('data/tokens.db',5)
text = KO('data/text')
tokens = KO('data/tokens')
for k,v in text.items():
print(k)
tokens[k] = tokenize(v.decode('utf8'))
tokens.sync()
|
mobarski/sandbox
|
topic/tokens.py
|
Python
|
mit
| 455
|
from ...plugin import hookimpl
from ..custom import CustomBuilder
from ..sdist import SdistBuilder
from ..wheel import WheelBuilder
@hookimpl
def hatch_register_builder():
return [CustomBuilder, SdistBuilder, WheelBuilder]
|
ofek/hatch
|
backend/src/hatchling/builders/plugin/hooks.py
|
Python
|
mit
| 229
|
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from .models import Submission
from .serializers import SubmissionSerializer
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from problem.models import Problem
from django.shortcuts import get_object_or_404
from .forms import SubmissionForm
from django_tables2 import RequestConfig
from .tables import SubmissionTable
# from guardian.shortcuts import get_objects_for_user
class SubmissionViewSet(viewsets.ModelViewSet):
queryset = Submission.objects.all()
serializer_class = SubmissionSerializer
permission_classes = (IsAuthenticated,)
class SubmissionListView(ListView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionListView, self).get_context_data(**kwargs)
submissions_table = SubmissionTable(self.get_queryset())
RequestConfig(self.request).configure(submissions_table)
# add filter here
context['submissions_table'] = submissions_table
return context
class SubmissionDetailView(DetailView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionDetailView, self).get_context_data(**kwargs)
return context
class SubmissionCreateView(CreateView):
model = Submission
form_class = SubmissionForm
template_name_suffix = '_create_form'
@method_decorator(login_required)
def dispatch(self, request, pid=None, *args, **kwargs):
pid = self.kwargs['pid']
self.problem = get_object_or_404(Problem.objects.all(), pk=pid)
return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kw = super(SubmissionCreateView, self).get_form_kwargs()
kw['qs'] = self.problem.allowed_lang.all()
return kw
def get_context_data(self, **kwargs):
context = super(SubmissionCreateView, self).get_context_data(**kwargs)
context['problem'] = self.problem
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.problem = self.problem
self.object.user = self.request.user
return super(SubmissionCreateView, self).form_valid(form)
|
wangzitian0/BOJ-V4
|
submission/views.py
|
Python
|
mit
| 2,454
|
#! /usr/bin/env python
"""
This programs plots the electronic coupling between two states.
It reads all Ham_*_im files and cache them in a tensor saved on disk.
Usage:
plot_couplings.py -p . -s1 XX -s2 YY -dt 1.0
p = path to the hamiltonian files
s1 = state 1 index
s2 = state 2 index
dt = time step in fs
"""
import numpy as np
import matplotlib.pyplot as plt
import argparse
import glob
import os.path
r2meV = 13605.698 # From Rydeberg to eV
def main(path_output, s1, s2, dt):
# Check if the file with couplings exists
if not os.path.isfile('couplings.npy'):
# Check all the files stored
files_im = glob.glob('Ham_*_im')
# Read the couplings
couplings = np.stack(
np.loadtxt(f'Ham_{f}_im') for f in range(len(files_im)))
# Save the file for fast reading afterwards
np.save('couplings', couplings)
else:
couplings = np.load('couplings.npy')
ts = np.arange(couplings.shape[0]) * dt
plt.plot(ts, couplings[:, s1, s2] * r2meV)
plt.xlabel('Time (fs)')
plt.ylabel('Energy (meV)')
plt.show()
def read_cmd_line(parser):
"""
Parse Command line options.
"""
args = parser.parse_args()
attributes = ['p', 's1', 's2', 'dt']
return [getattr(args, p) for p in attributes]
if __name__ == "__main__":
msg = "plot_decho -p <path/to/hamiltonians> -s1 <State 1> -s2 <State 2>\
-dt <time step>"
parser = argparse.ArgumentParser(description=msg)
parser.add_argument('-p', required=True,
help='path to the Hamiltonian files in Pyxaid format')
parser.add_argument('-s1', required=True, type=int,
help='Index of the first state')
parser.add_argument('-s2', required=True, type=int,
help='Index of the second state')
parser.add_argument('-dt', type=float, default=1.0,
help='Index of the second state')
main(*read_cmd_line(parser))
|
felipeZ/nonAdiabaticCoupling
|
scripts/hamiltonians/plot_couplings.py
|
Python
|
mit
| 2,000
|
import ast
import traceback
import os
import sys
userFunctions = {}
renames = ['vex.pragma','vex.motor','vex.slaveMotors','vex.motorReversed']
classNames = []
indent = ' '
sameLineBraces = True
compiled = {}
def module_rename(aNode):
if aNode.func.print_c() == 'vex.pragma':
asC = '#pragma '
useComma = False
pragmaDirective = aNode.args.pop(0)
asC += pragmaDirective.s
if aNode.args:
asC += '('
for arg in aNode.args:
if useComma:
asC += ', '
else:
useComma = True
asC += arg.print_c()
asC += ')'
asC += '\n'
return asC
elif aNode.func.print_c() == 'vex.motor':
asC = 'motor[' + aNode.args[0].print_c()
asC += '] = ' + aNode.args[1].print_c()
return asC
elif aNode.func.print_c() == 'vex.slaveMotors':
masterMotor = aNode.args.pop(0).print_c()
asC = ''
for slave in aNode.args:
asC += 'slaveMotor(' + slave.print_c() + ', ' + masterMotor + ');\n'
return asC[:-2]
elif aNode.func.print_c() == 'vex.motorReversed':
asC = 'bMotorReflected[' + aNode.args[0].print_c()
asC += '] = ' + aNode.args[1].print_c()
return asC
return 'Unknown function. This should not happen'
def escape_string(s, unicode = False, max_length = 200):
ret = []
# Try to split on whitespace, not in the middle of a word.
split_at_space_pos = max_length - 10
if split_at_space_pos < 10:
split_at_space_pos = None
position = 0
if unicode:
position += 1
ret.append('L')
ret.append('"')
position += 1
for c in s:
newline = False
if c == "\n":
to_add = r"\n"
newline = True
elif ord(c) < 32 or 0x80 <= ord(c) <= 0xff:
to_add = r"\x{:02X}".format(ord(c))
elif ord(c) > 0xff:
if not unicode:
raise ValueError("string contains unicode character but unicode=False")
to_add = r"\u{:04X}".format(ord(c))
elif r'\"'.find(c) != -1:
to_add = r"\{}".format(c)
else:
to_add = c
ret.append(to_add)
position += len(to_add)
if newline:
position = 0
if split_at_space_pos is not None and position >= split_at_space_pos and " \t".find(c) != -1:
ret.append("\\\n")
position = 0
elif position >= max_length:
ret.append("\\\n")
position = 0
ret.append('"')
return "".join(ret)
class C_Module(ast.Module):
def prepare(self):
pass
def print_c(self):
asC = ''
for node in self.body:
try:
asC += node.print_c()
except Exception as e:
print(traceback.format_exc())
print("Current code:")
print(asC)
return asC
class C_Bytes(ast.Bytes):
def prepare(self):
pass
def print_c(self):
return escape_string(self.s.decode('utf-8'),True)
class C_Str(ast.Str):
def prepare(self):
pass
def print_c(self):
return escape_string(self.s)
class C_Num(ast.Num):
def prepare(self):
pass
def print_c(self):
return str(self.n)
class C_FunctionDef(ast.FunctionDef):
def prepare(self):
"""Prepare for writing. Take note of return types, class names, etc..."""
if self.returns:
userFunctions[self.name] = self.returns.print_c()
def print_c(self):
asC = '\n'
if ast.get_docstring(self):
asC += '/*\n'
asC += ast.get_docstring(self)
self.body.pop(0)
asC += '\n*/\n'
asC += self.returns.id + ' ' + self.name + '('
isFirst = True
for i, argNode in enumerate(self.args.args):
arg = argNode.arg
try:
argType = argNode.annotation.print_c()
except:
argType = argNode.annotation
if isFirst:
isFirst = False
else:
asC += ', '
asC += argType + ' ' + arg
if i >= self.args.minArgs:
asC += ' = ' + (self.args.defaults[i - self.args.minArgs]).print_c()
if sameLineBraces:
asC += ') {\n'
else:
asC += ')\n{\n'
for childNode in self.body:
try:
unindented = childNode.print_c()
unindented = '\n'.join([indent + x for x in unindented.split('\n')])
if not unindented.endswith('}'):
unindented += ';'
unindented += '\n'
asC += unindented
except Exception as e:
print(traceback.format_exc())
print(ast.dump(childNode))
return asC
asC += '}\n'
return asC
class C_arguments(ast.arguments):
def prepare(self):
self.minArgs = len(self.args) - len(self.defaults)
self.maxArgs = len(self.args)
def print_c(self):
return self
class C_Name(ast.Name):
def prepare(self):
pass
def print_c(self):
if self.id == 'True':
return 'true'
elif self.id == 'False':
return 'false'
elif self.id == 'None':
return '0'
return self.id
if "NameConstant" in ast.__dict__:
class C_NameConstant(ast.NameConstant):
def prepare(self):
pass
def print_c(self):
if self.value == True:
# True
return 'true'
elif self.value == False:
# False
return 'false'
else:
return '0'
class C_Expr(ast.Expr):
def prepare(self):
pass
def print_c(self):
return self.value.print_c()
class C_UnaryOp(ast.UnaryOp):
def prepare(self):
pass
def print_c(self):
return self.op.print_c() + self.operand.print_c()
class C_UAdd(ast.UAdd):
def prepare(self):
pass
def print_c(self):
return '+'
class C_USub(ast.USub):
def prepare(self):
pass
def print_c(self):
return '-'
class C_Not(ast.Not):
def prepare(self):
pass
def print_c(self):
return '!'
class C_Invert(ast.Invert):
def prepare(self):
pass
def print_c(self):
return '~'
class C_BinOp(ast.BinOp):
def prepare(self):
pass
def print_c(self):
return '({left} {op} {right})'.format(
left = self.left.print_c(),
op = self.op.print_c(),
right = self.right.print_c())
class C_Add(ast.Add):
def prepare(self):
pass
def print_c(self):
return '+'
class C_Sub(ast.Sub):
def prepare(self):
pass
def print_c(self):
return '-'
class C_Mult(ast.Mult):
def prepare(self):
pass
def print_c(self):
return '*'
class C_Div(ast.Div):
def prepare(self):
pass
def print_c(self):
return '/'
class C_Mod(ast.Mod):
def prepare(self):
pass
def print_c(self):
return '%'
class C_LShift(ast.LShift):
def prepare(self):
pass
def print_c(self):
return '<<'
class C_RShift(ast.RShift):
def prepare(self):
pass
def print_c(self):
return '>>'
class C_BitOr(ast.BitOr):
def prepare(self):
pass
def print_c(self):
return '|'
class C_BitXor(ast.BitXor):
def prepare(self):
pass
def print_c(self):
return '^'
class C_BitAnd(ast.BitAnd):
def prepare(self):
pass
def print_c(self):
return '&'
class C_BoolOp(ast.BoolOp):
def prepare(self):
pass
def print_c(self):
asC = '(' + self.values.pop(0).print_c()
for value in self.values:
asC += ' ' + self.op.print_c() + ' '
asC += value.print_c()
return asC + ')'
class C_And(ast.And):
def prepare(self):
pass
def print_c(self):
return '&&'
class C_Or(ast.Or):
def prepare(self):
pass
def print_c(self):
return '||'
class C_Compare(ast.Compare):
def prepare(self):
pass
def print_c(self):
asC = ''
self.comparators.insert(0,self.left)
addAnd = False
for i,op in enumerate(self.ops):
if addAnd:
asC += ' && '
else:
addAnd = True
asC += '(' + self.comparators[i].print_c() + ' '
asC += op.print_c()
asC += ' ' + self.comparators[i + 1].print_c() + ')'
return asC
class C_Eq(ast.Eq):
def prepare(self):
pass
def print_c(self):
return '=='
class C_NotEq(ast.NotEq):
def prepare(self):
pass
def print_c(self):
return '!='
class C_Lt(ast.Lt):
def prepare(self):
pass
def print_c(self):
return '<'
class C_LtE(ast.LtE):
def prepare(self):
pass
def print_c(self):
return '<='
class C_Gt(ast.Gt):
def prepare(self):
pass
def print_c(self):
return '>'
class C_GtE(ast.GtE):
def prepare(self):
pass
def print_c(self):
return '>='
class C_Call(ast.Call):
def prepare(self):
pass
def print_args(self):
asC = ''
for arg in self.args:
asC += ', '
asC += arg.print_c()
return asC
def print_c(self):
if self.func.print_c() in renames:
return module_rename(self)
if isinstance(self.func,C_Attribute):
# Convert OOP calls to regular function calls
self.args.insert(0,self.func.value)
self.func = C_Name(self.func.attr,None)
asC = self.func.print_c() + '('
useComma = False
for arg in self.args:
if useComma:
asC += ', '
else:
useComma = True
asC += arg.print_c()
asC += ')'
return asC
class C_IfExp(ast.IfExp):
def prepare(self):
pass
def print_c(self):
asC = '(' + self.test.print_c()
asC += ' ? ' + self.body.print_c()
asC += ' : ' + self.orelse.print_c() + ')'
return asC
class C_Attribute(ast.Attribute):
def prepare(self):
pass
def print_c(self):
return self.value.print_c() + '.' + self.attr
class C_Subscript(ast.Subscript):
def prepare(self):
pass
def print_c(self):
return self.value.print_c() + '[' + self.slice.print_c() + ']'
class C_Index(ast.Index):
def prepare(self):
pass
def print_c(self):
return self.value.print_c()
class C_Assign(ast.Assign):
def prepare(self):
pass
def print_c(self):
asC = ''
for target in self.targets:
asC += target.print_c() + ' = '
asC += self.value.print_c()
return asC
if "AnnAssign" in ast.__dict__:
class C_AnnAssign(ast.AnnAssign):
def prepare(self):
pass
def print_c(self):
asC = self.annotation.print_c() + ' '
asC += self.target.print_c()
if isinstance(self.value, C_Call) and self.value.func.print_c() in classNames:
asC += ';\n'
asC += self.value.func.print_c() + '___init__('
asC += self.target.print_c()
asC += self.value.print_args() + ')'
else:
if self.value:
asC += ' = ' + self.value.print_c()
return asC
class C_AugAssign(ast.AugAssign):
def prepare(self):
pass
def print_c(self):
asC = self.target.print_c() + ' '
asC += self.op.print_c() + '= '
asC += self.value.print_c()
return asC
class C_Assert(ast.Assert):
def prepare(self):
pass
def print_c(self):
return 'VERIFY(' + self.test.print_c() + ')'
class C_Pass(ast.Pass):
def prepare(self):
pass
def print_c(self):
return ''
class C_Import(ast.Import):
def prepare(self):
pass
def print_c(self):
importName = '/'.join(self.names[0].name.split('.'))
return '#include ' + importName + '.c\n'
class C_If(ast.If):
def prepare(self):
pass
def print_c(self):
asC = 'if ('
asC += self.test.print_c()
if sameLineBraces:
asC += ') {\n'
else:
asC += ')\n{\n'
for childNode in self.body:
try:
unindented = childNode.print_c()
unindented = '\n'.join([indent + x for x in unindented.split('\n')])
if not unindented.endswith('}'):
unindented += ';'
unindented += '\n'
asC += unindented
except Exception as e:
print(traceback.format_exc())
print(ast.dump(childNode))
return asC
asC += '}'
if self.orelse:
if sameLineBraces:
asC += ' else {\n'
else:
asC += '\nelse\n{\n'
for childNode in self.orelse:
try:
unindented = childNode.print_c()
unindented = '\n'.join([indent + x for x in unindented.split('\n')])
if not unindented.endswith('}'):
unindented += ';'
unindented += '\n'
asC += unindented
except Exception as e:
print(traceback.format_exc())
print(ast.dump(childNode))
return asC
asC += '}'
return asC
class C_For(ast.For):
def prepare(self):
pass
def print_c(self):
# Only supports for _ in range() for now
asC = ''
var = self.target.print_c()
low = '0'
step = '1'
if len(self.iter.args) > 1:
low = self.iter.args[0].print_c()
high = self.iter.args[1].print_c()
if len(self.iter.args) > 2:
step = self.iter.args[2].print_c()
else:
high = self.iter.args[0].print_c()
asC += 'for (' + var + ' = '
asC += low
asC += '; ' + var + ' < ' + high + '; ' + var + ' += ' + step
if sameLineBraces:
asC += ') {\n'
else:
asC += ')\n{\n'
for childNode in self.body:
try:
unindented = childNode.print_c()
unindented = '\n'.join([indent + x for x in unindented.split('\n')])
if not unindented.endswith('}'):
unindented += ';'
unindented += '\n'
asC += unindented
except Exception as e:
print(traceback.format_exc())
print(ast.dump(childNode))
return asC
return asC + '}'
class C_While(ast.While):
def prepare(self):
pass
def print_c(self):
asC = 'while (' + self.test.print_c()
if sameLineBraces:
asC += ') {\n'
else:
asC += ')\n{\n'
for childNode in self.body:
try:
unindented = childNode.print_c()
unindented = '\n'.join([indent + x for x in unindented.split('\n')])
if not unindented.endswith('}'):
unindented += ';'
unindented += '\n'
asC += unindented
except Exception as e:
print(traceback.format_exc())
print(ast.dump(childNode))
return asC
return asC + '}'
class C_Break(ast.Break):
def prepare(self):
pass
def print_c(self):
return 'break'
class C_Continue(ast.Continue):
def prepare(self):
pass
def print_c(self):
return 'continue'
class C_Return(ast.Return):
def prepare(self):
pass
def print_c(self):
return 'return ' + self.value.print_c()
class C_ClassDef(ast.ClassDef):
def prepare(self):
classNames.append(self.name)
def print_c(self):
asC = '/*** Class: ' + self.name + ' ***/\n'
varNames = ClassVariables.scanIn(self)
if ast.get_docstring(self):
asC += '/*\n'
asC += ast.get_docstring(self)
self.body.pop(0)
asC += '\n*/\n'
asC += 'typedef struct'
if sameLineBraces:
asC += ' {\n'
else:
asC += '\n{\n'
for var,type in varNames.items():
asC += indent + type + ' ' + var + ';\n'
asC += '} ' + self.name + ';\n'
for node in self.body:
try:
asC += node.print_c()
except Exception as e:
print(traceback.format_exc())
print("Current code:")
print(asC)
asC += '\n/*** End Class: ' + self.name + ' ***/\n'
return asC
class ClassVariables(ast.NodeVisitor):
def __init__(self,*args,**kwargs):
super(ClassVariables,self).__init__(*args,**kwargs)
self.varNames = {}
def visit_C_AnnAssign(self, aNode):
if aNode.target.print_c().startswith('self.'):
if aNode.target.attr in self.varNames:
if not self.varNames[aNode.target.attr] == aNode.annotation.print_c():
raise TypeError("Redefining a type not permitted in {}->{}".format(self.parentNode.name,aNode.target.print_c()))
else:
self.varNames[aNode.target.attr] = aNode.annotation.print_c()
aNode.__class__ = C_Assign
aNode.targets = [aNode.target]
self.generic_visit(aNode)
@classmethod
def scanIn(cls, aNode):
walker = cls()
walker.parentNode = aNode
walker.visit(aNode)
return walker.varNames
class CNodeTransformer(ast.NodeVisitor):
def __init__(self, *args, **kwargs):
self.toPrepare = []
self.currentClass = None
super(CNodeTransformer,self).__init__(*args,**kwargs)
def visit_C_Import(self, aNode):
# Make sure that we've compiled this file.
filePath = '/'.join(aNode.names[0].name.split('.')) + '.py'
compile_to_c(filePath)
def visit_C_ClassDef(self, aNode):
previousClass = self.currentClass
self.currentClass = aNode
self.generic_visit(aNode)
self.currentClass = previousClass
def visit_C_FunctionDef(self, aNode):
if self.currentClass:
# Since we're scanning this anyways, get this function ready for a class!
if aNode.name == '__init__':
aNode.name = self.currentClass.name + '_' + aNode.name
aNode.args.args[0].annotation = self.currentClass.name # Force use of class
self.generic_visit(aNode)
def visit(self, node):
"""Visit a node."""
if 'C_' + node.__class__.__name__ in globals():
node.__class__ = globals()['C_' + node.__class__.__name__]
self.toPrepare.append(node)
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
visitor(node) # Recursively replace classes
def compile_to_c(filename):
if not os.path.exists(filename):
if os.path.exists(os.path.join(os.path.dirname(os.path.realpath(__file__)),filename)):
filename = os.path.join(os.path.dirname(os.path.realpath(__file__)),filename)
else:
if os.path.exists(os.path.join(os.path.dirname(os.path.realpath(sys.argv[1])),filename)):
filename = os.path.join(os.path.dirname(os.path.realpath(sys.argv[1])),filename)
else:
raise FileNotFoundError(filename)
if not os.path.abspath(filename) in compiled:
module = ast.parse(open(filename, 'r').read())
compiled[os.path.abspath(filename)] = '' # At least fill it in
transformer = CNodeTransformer()
transformer.visit(module)
for nodeToPrepare in transformer.toPrepare:
nodeToPrepare.prepare()
compiled[os.path.abspath(filename)] = module.print_c()
def commonprefix(l):
# this unlike the os.path.commonprefix version
# always returns path prefixes as it compares
# path component wise
cp = []
ls = [p.split(os.path.sep) for p in l]
ml = min( len(p) for p in ls )
for i in range(ml):
s = set( p[i] for p in ls )
if len(s) != 1:
break
cp.append(s.pop())
return os.path.sep.join(cp)
if __name__ == '__main__':
if len(sys.argv) < 2:
print(f"Usage: {__file__} [file]")
sys.exit(1)
compile_to_c(sys.argv[1])
common = commonprefix(compiled)
withRelNames = {os.path.relpath(abspath,common):contents for abspath,contents in compiled.items()}
for file,contents in withRelNames.items():
filename = os.path.join(os.path.dirname(os.path.realpath(sys.argv[1])),os.path.join('output',os.path.splitext(file)[0] + '.c'))
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename,'w') as c_file:
c_file.write(contents)
|
NoMod-Programming/PyRobotC
|
pyRobotC.py
|
Python
|
mit
| 19,127
|
# -*- coding: utf-8 -*-
# from __future__ import (unicode_literals, nested_scopes, generators, division,
# absolute_import, with_statement, print_function)
import datetime
import math
class Funcionario(object):
def __init__(self, nome = "Albert", sobrenome = "Einstein", idade = 29, salario = 2000, cargo = "Chefe"):
self.nome = nome
self.sobrenome = sobrenome
self.idade = idade
self.salario = salario
self.cargo = cargo
print "criou"
nome = raw_input("digite seu nome: ")
sobrenome = raw_input("digite seu sobrenome: ")
idade = raw_input("digite seu idade: ")
salario = raw_input("digite seu salario: ")
cargo = raw_input("digite seu cargo: ")
# class Programador(Funcionario):
# cargo = "Programador"
#
#
# class Professor(Funcionario):
# cargo = "Professor"
#
# class Doido(Programador, Professor):
# cargo = "Programador e Professor"
# # fim das classes
#
adriano = Funcionario(nome, sobrenome, idade, salario) #crio uma instância de uma classe
print adriano.nome
print adriano.sobrenome
print adriano.idade
print adriano.salario
print adriano.cargo
# adriano.especializacao = "PHP" #crio um atributo fora do escopo da classe
# print "NOME: ", adriano.nome
# print funcionario.sobrenome
# print funcionario.cargo
# print funcionario.salario
#
#
#
# marcos = Funcionario()
# print marcos.sobrenome
# print marcos.nome
#
# programador = Programador()
# print programador.sobrenome
# print programador.cargo
# print programador.salario
# print type([])
# print type((1,))
# print type(Usuario)
# print type(usuario)
# print Funcionario.__dict__
# print Funcionario.__bases__
|
josecostamartins/pythonreges
|
aula10/funcionario.py
|
Python
|
mit
| 1,679
|
""" This module provides a lexical scanner component for the `parser` package.
"""
class SettingLexer(object):
""" Simple lexical scanner that tokenizes a stream of configuration data.
See ``SettingParser`` for further information about grammar rules and
specifications.
Example Usage::
>>> lexer = SettingLexer("task.cfg")
>>> [(line_no, token) for line_no, token in lexer.tokens]
[(1, 'task'), (1, '{'), (2, 'duration'), (2, '30'), (2, ';'),
(3, '}')]
>>> lexer = SettingLexer()
>>> lexer.read("task.cfg")
True
>>> [(line_no, token) for line_no, token in lexer.tokens]
[(1, 'task'), (1, '{'), (2, 'duration'), (2, '30'), (2, ';'),
(3, '}')]
>>> lexer = SettingLexer()
>>> with open('task.cfg', 'r') as f:
... lexer.readstream(f)
>>> [(line_no, token) for line_no, token in lexer.tokens]
[(1, 'task'), (1, '{'), (2, 'duration'), (2, '30'), (2, ';'),
(3, '}')]
"""
# character classes
WHITESPACE = ' \n\r\t'
COMMENT_START = '#'
NEWLINES = '\n\r'
TOKENS = '{},;'
QUOTES = '\'"'
ESCAPE = '\\'
SPACE = ' '
# lexer states
ST_TOKEN = 1
ST_STRING = 2
ST_COMMENT = 3
def __init__(self, filename=None):
self._tokens = []
self._filename = None
self._token_info = {}
self._state_info = {}
self._reset_token()
self._state = self.ST_TOKEN
if filename:
self.read(filename)
def _reset_token(self):
""" Resets current token information.
"""
self._token_info = {'line_no': 1,
'chars': []}
def _new_token(self, chars=None, line_no=None):
""" Appends new token to token stream.
`chars`
List of token characters. Defaults to current token list.
`line_no`
Line number for token. Defaults to current line number.
"""
if not line_no:
line_no = self._line_no
if not chars:
chars = self._token_chars
if chars:
# add new token
self._tokens.append((line_no, ''.join(chars)))
self._token_chars = [] # clear values
def _process_newline(self, char):
""" Process a newline character.
"""
state = self._state
# inside string, just append char to token
if state == self.ST_STRING:
self._token_chars.append(char)
else:
# otherwise, add new token
self._new_token()
self._line_no += 1 # update line counter
# finished with comment
if state == self.ST_COMMENT:
self._state = self.ST_TOKEN
def _process_string(self, char):
""" Process a character as part of a string token.
"""
if char in self.QUOTES:
# end of quoted string:
# 1) quote must match original quote
# 2) not escaped quote (e.g. "hey there" vs "hey there\")
# 3) actual escape char prior (e.g. "hey there\\")
if (char == self._last_quote and
not self._escaped or self._double_escaped):
# store token
self._new_token()
self._state = self.ST_TOKEN
return # skip adding token char
elif char == self.ESCAPE:
# escape character:
# double escaped if prior char was escape (e.g. "hey \\ there")
if not self._double_escaped:
self._double_escaped = self._escaped
else:
self._double_escaped = False
self._token_chars.append(char)
def _process_tokens(self, char):
""" Process a token character.
"""
if (char in self.WHITESPACE or char == self.COMMENT_START or
char in self.QUOTES or char in self.TOKENS):
add_token = True
# escaped chars, keep going
if char == self.SPACE or char in self.TOKENS:
if self._escaped:
add_token = False
# start of comment
elif char == self.COMMENT_START:
self._state = self.ST_COMMENT
# start of quoted string
elif char in self.QUOTES:
if self._escaped:
# escaped, keep going
add_token = False
else:
self._state = self.ST_STRING
self._last_quote = char # store for later quote matching
if add_token:
# store token
self._new_token()
if char in self.TOKENS:
# store char as a new token
self._new_token([char])
return # skip adding token char
self._token_chars.append(char)
def _tokenize(self, stream):
""" Tokenizes data from the provided string.
``stream``
``File``-like object.
"""
self._tokens = []
self._reset_token()
self._state = self.ST_TOKEN
for chunk in iter(lambda: stream.read(8192), ''):
for char in chunk:
if char in self.NEWLINES:
self._process_newline(char)
else:
state = self._state
if state == self.ST_STRING:
self._process_string(char)
elif state == self.ST_TOKEN:
self._process_tokens(char)
def read(self, filename):
""" Reads the file specified and tokenizes the data for parsing.
"""
try:
with open(filename, 'r') as _file:
self._filename = filename
self.readstream(_file)
return True
except IOError:
self._filename = None
return False
def readstream(self, stream):
""" Reads the file specified and tokenizes the data for parsing.
``stream``
``File``-like object.
"""
self._tokenize(stream)
def get_token(self):
""" Pops the next element off the internal token stack and returns.
Returns tuple (line_no, token) or ``None``.
"""
if not self._tokens:
return None
else:
return self._tokens.pop(0)
def push_token(self, line_no, token):
""" Pushes a token back on the internal token stack.
"""
self._tokens.insert(0, (line_no, token))
@property
def _last_quote(self):
""" Gets the last quote character encountered.
"""
return self._state_info['last_quote']
@_last_quote.setter
def _last_quote(self, value):
""" Sets the last quote character encountered.
"""
self._state_info['last_quote'] = value
@property
def _double_escaped(self):
""" Gets if last escape character was escaped.
"""
return bool(self._state_info['double_esc'])
@_double_escaped.setter
def _double_escaped(self, value):
""" Sets if last escape character was escaped.
"""
self._state_info['double_esc'] = value
@property
def _state(self):
""" Gets the current state of the lexer.
"""
return self._state_info['state']
@_state.setter
def _state(self, value):
""" Sets the current state of the lexer.
"""
self._state_info = {'state': value,
'last_quote': None,
'double_esc': False}
@property
def _line_no(self):
""" Gets the current line number.
"""
return self._token_info['line_no']
@_line_no.setter
def _line_no(self, value):
""" Sets the current line number.
"""
self._token_info['line_no'] = value
@property
def _token_chars(self):
""" Gets the accumulated characters for current token.
"""
return self._token_info['chars']
@_token_chars.setter
def _token_chars(self, value):
""" Sets the accumulated characters for current token.
"""
self._token_info['chars'] = value
@property
def _escaped(self):
""" Escape character is at end of accumulated token
character list.
"""
chars = self._token_info['chars']
count = len(chars)
# prev char is escape, keep going
if count and chars[count - 1] == self.ESCAPE:
chars.pop() # swallow escape char
return True
else:
return False
@property
def filename(self):
""" Returns filename for lexed file.
"""
return self._filename
@property
def tokens(self):
""" Returns lexed tokens.
"""
for token in self._tokens:
yield token
|
xtrementl/focus
|
focus/parser/lexer.py
|
Python
|
mit
| 9,227
|
from datetime import date
from workalendar.tests import GenericCalendarTest
from workalendar.asia import HongKong, Japan, Qatar, Singapore
from workalendar.asia import SouthKorea, Taiwan, Malaysia
class HongKongTest(GenericCalendarTest):
cal_class = HongKong
def test_year_2010(self):
""" Interesting because Christmas fell on a Saturday and CNY fell
on a Sunday, so didn't roll, and Ching Ming was on the same day
as Easter Monday """
holidays = self.cal.holidays_set(2010)
self.assertIn(date(2010, 1, 1), holidays) # New Year
self.assertIn(date(2010, 2, 13), holidays) # Chinese new year (shift)
self.assertIn(date(2010, 2, 15), holidays) # Chinese new year
self.assertIn(date(2010, 2, 16), holidays) # Chinese new year
self.assertNotIn(date(2010, 2, 17), holidays) # Not Chinese new year
self.assertIn(date(2010, 4, 2), holidays) # Good Friday
self.assertIn(date(2010, 4, 3), holidays) # Day after Good Friday
self.assertIn(date(2010, 4, 5), holidays) # Easter Monday
self.assertIn(date(2010, 4, 6), holidays) # Ching Ming (shifted)
self.assertIn(date(2010, 5, 1), holidays) # Labour Day
self.assertIn(date(2010, 5, 21), holidays) # Buddha's Birthday
self.assertIn(date(2010, 6, 16), holidays) # Tuen Ng Festival
self.assertIn(date(2010, 7, 1), holidays) # HK SAR Establishment Day
self.assertIn(date(2010, 9, 23), holidays) # Day after Mid-Autumn
self.assertIn(date(2010, 10, 1), holidays) # National Day
self.assertIn(date(2010, 10, 16), holidays) # Chung Yeung Festival
self.assertIn(date(2010, 12, 25), holidays) # Christmas Day
self.assertIn(date(2010, 12, 27), holidays) # Boxing Day (shifted)
def test_year_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays) # New Year
self.assertIn(date(2013, 2, 11), holidays) # Chinese new year
self.assertIn(date(2013, 2, 12), holidays) # Chinese new year
self.assertIn(date(2013, 2, 13), holidays) # Chinese new year
self.assertIn(date(2013, 3, 29), holidays) # Good Friday
self.assertIn(date(2013, 3, 30), holidays) # Day after Good Friday
self.assertIn(date(2013, 4, 1), holidays) # Easter Monday
self.assertIn(date(2013, 4, 4), holidays) # Ching Ming
self.assertIn(date(2013, 5, 1), holidays) # Labour Day
self.assertIn(date(2013, 5, 17), holidays) # Buddha's Birthday
self.assertIn(date(2013, 6, 12), holidays) # Tuen Ng Festival
self.assertIn(date(2013, 7, 1), holidays) # HK SAR Establishment Day
self.assertIn(date(2013, 9, 20), holidays) # Day after Mid-Autumn
self.assertIn(date(2013, 10, 1), holidays) # National Day
self.assertIn(date(2013, 10, 14), holidays) # Chung Yeung Festival
self.assertIn(date(2013, 12, 25), holidays) # Christmas Day
self.assertIn(date(2013, 12, 26), holidays) # Boxing Day
def test_year_2016(self):
holidays = self.cal.holidays_set(2016)
self.assertIn(date(2016, 1, 1), holidays) # New Year
self.assertIn(date(2016, 2, 8), holidays) # Chinese new year
self.assertIn(date(2016, 2, 9), holidays) # Chinese new year
self.assertIn(date(2016, 2, 10), holidays) # Chinese new year
self.assertIn(date(2016, 3, 25), holidays) # Good Friday
self.assertIn(date(2016, 3, 26), holidays) # Day after Good Friday
self.assertIn(date(2016, 3, 28), holidays) # Easter Monday
self.assertIn(date(2016, 4, 4), holidays) # Ching Ming
self.assertIn(date(2016, 5, 2), holidays) # Labour Day (shifted)
self.assertIn(date(2016, 5, 14), holidays) # Buddha's Birthday
self.assertIn(date(2016, 6, 9), holidays) # Tuen Ng Festival
self.assertIn(date(2016, 7, 1), holidays) # HK SAR Establishment Day
self.assertIn(date(2016, 9, 16), holidays) # Day after Mid-Autumn
self.assertIn(date(2016, 10, 1), holidays) # National Day
self.assertIn(date(2016, 10, 10), holidays) # Chung Yeung Festival
self.assertIn(date(2016, 12, 26), holidays) # Christmas Day (shifted)
self.assertIn(date(2016, 12, 27), holidays) # Boxing Day (shifted)
def test_year_2017(self):
holidays = self.cal.holidays_set(2017)
self.assertIn(date(2017, 1, 2), holidays) # New Year (shifted)
self.assertIn(date(2017, 1, 28), holidays) # Chinese new year
self.assertIn(date(2017, 1, 30), holidays) # Chinese new year
self.assertIn(date(2017, 1, 31), holidays) # Chinese new year
self.assertIn(date(2017, 4, 4), holidays) # Ching Ming
self.assertIn(date(2017, 4, 14), holidays) # Good Friday
self.assertIn(date(2017, 4, 15), holidays) # Day after Good Friday
self.assertIn(date(2017, 4, 17), holidays) # Easter Monday
self.assertIn(date(2017, 5, 1), holidays) # Labour Day
self.assertIn(date(2017, 5, 3), holidays) # Buddha's Birthday
self.assertIn(date(2017, 5, 30), holidays) # Tuen Ng Festival
self.assertIn(date(2017, 7, 1), holidays) # HK SAR Establishment Day
self.assertIn(date(2017, 10, 2), holidays) # National Day (shifted)
self.assertIn(date(2017, 10, 5), holidays) # Day after Mid-Autumn
self.assertIn(date(2017, 10, 28), holidays) # Chung Yeung Festival
self.assertIn(date(2017, 12, 25), holidays) # Christmas Day
self.assertIn(date(2017, 12, 26), holidays) # Boxing Day
def test_chingming_festival(self):
# This is the same as the Taiwan test, just different spelling
# Could move this into a Core test
self.assertIn(date(2005, 4, 5), self.cal.holidays_set(2005))
self.assertIn(date(2006, 4, 5), self.cal.holidays_set(2006))
self.assertIn(date(2007, 4, 5), self.cal.holidays_set(2007))
self.assertIn(date(2008, 4, 4), self.cal.holidays_set(2008))
self.assertIn(date(2010, 4, 5), self.cal.holidays_set(2010))
self.assertIn(date(2011, 4, 5), self.cal.holidays_set(2011))
self.assertIn(date(2012, 4, 4), self.cal.holidays_set(2012))
self.assertIn(date(2013, 4, 4), self.cal.holidays_set(2013))
self.assertIn(date(2014, 4, 5), self.cal.holidays_set(2014))
self.assertIn(date(2015, 4, 4), self.cal.holidays_set(2015))
self.assertIn(date(2016, 4, 4), self.cal.holidays_set(2016))
self.assertIn(date(2017, 4, 4), self.cal.holidays_set(2017))
self.assertIn(date(2018, 4, 5), self.cal.holidays_set(2018))
class JapanTest(GenericCalendarTest):
cal_class = Japan
def test_year_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays) # new year
self.assertIn(date(2013, 2, 11), holidays) # Foundation Day
self.assertIn(date(2013, 3, 20), holidays) # Vernal Equinox Day
self.assertIn(date(2013, 4, 29), holidays) # Showa Day
self.assertIn(date(2013, 5, 3), holidays) # Constitution Memorial Day
self.assertIn(date(2013, 5, 4), holidays) # Greenery Day
self.assertIn(date(2013, 5, 5), holidays) # Children's Day
self.assertIn(date(2013, 9, 23), holidays) # Autumnal Equinox Day
self.assertIn(date(2013, 11, 3), holidays) # Culture Day
self.assertIn(date(2013, 11, 23), holidays) # Labour Thanksgiving Day
self.assertIn(date(2013, 12, 23), holidays) # The Emperor's Birthday
# Variable days
self.assertIn(date(2013, 1, 14), holidays) # Coming of Age Day
self.assertIn(date(2013, 7, 15), holidays) # Marine Day
self.assertIn(date(2013, 9, 16), holidays) # Respect-for-the-Aged Day
self.assertIn(date(2013, 10, 14), holidays) # Health and Sports Day
def test_year_2016(self):
# Before 2016, no Mountain Day
holidays = self.cal.holidays_set(2014)
self.assertNotIn(date(2014, 8, 11), holidays) # Mountain Day
holidays = self.cal.holidays_set(2015)
self.assertNotIn(date(2015, 8, 11), holidays) # Mountain Day
# After 2016, yes
holidays = self.cal.holidays_set(2016)
self.assertIn(date(2016, 8, 11), holidays) # Mountain Day
holidays = self.cal.holidays_set(2017)
self.assertIn(date(2017, 8, 11), holidays) # Mountain Day
class MalaysiaTest(GenericCalendarTest):
cal_class = Malaysia
def test_year_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays) # New Year's Day
self.assertIn(date(2013, 1, 28), holidays) # Thaipusam
self.assertIn(date(2013, 2, 1), holidays) # Federal Territory Day
self.assertIn(date(2013, 2, 11), holidays) # 2nd day of Lunar NY
self.assertIn(date(2013, 2, 12), holidays) # 1st day (Sun lieu)
self.assertIn(date(2013, 5, 1), holidays) # Workers' Day
self.assertIn(date(2013, 5, 24), holidays) # Vesak Day
self.assertIn(date(2013, 8, 8), holidays) # 1st day eid-al-fitr
self.assertIn(date(2013, 8, 9), holidays) # 2nd day eid-al-fitr
self.assertIn(date(2013, 8, 31), holidays) # National Day
self.assertIn(date(2013, 9, 16), holidays) # Malaysia Day
self.assertIn(date(2013, 10, 15), holidays) # Hari Raya Haji
self.assertIn(date(2013, 11, 2), holidays) # Deepavali
self.assertIn(date(2013, 11, 5), holidays) # Islamic New Year
self.assertIn(date(2013, 12, 25), holidays) # Xmas
def test_year_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 1), holidays) # New Year's Day
self.assertIn(date(2012, 1, 24), holidays) # Federal Territory Day
self.assertIn(date(2012, 2, 1), holidays) # 2nd day of Lunar NY
self.assertIn(date(2012, 5, 1), holidays) # 1st day (Sun lieu)
self.assertIn(date(2012, 5, 5), holidays) # Workers' Day
self.assertIn(date(2012, 8, 19), holidays) # 1st day eid-al-fitr
self.assertIn(date(2012, 8, 20), holidays) # 2nd day eid-al-fitr
self.assertIn(date(2012, 8, 31), holidays) # National Day
self.assertIn(date(2012, 9, 16), holidays) # Malaysia Day
self.assertIn(date(2012, 10, 26), holidays) # Hari Raya Haji
self.assertIn(date(2012, 11, 13), holidays) # Islamic New Year
self.assertIn(date(2012, 11, 15), holidays) # Deepavali
self.assertIn(date(2012, 12, 25), holidays) # Xmas
def test_nuzul_al_quran(self):
holidays = self.cal.holidays_set(2017)
self.assertIn(date(2017, 6, 12), holidays)
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 6, 1), holidays)
class QatarTest(GenericCalendarTest):
cal_class = Qatar
def test_year_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 7, 9), holidays) # start ramadan
# warning, the official date was (2013, 8, 10)
self.assertIn(date(2013, 8, 8), holidays) # eid al fitr
# The official date was (2013, 10, 14)
self.assertIn(date(2013, 10, 15), holidays) # eid al adha
self.assertIn(date(2013, 10, 16), holidays) # eid al adha
self.assertIn(date(2013, 10, 17), holidays) # eid al adha
self.assertIn(date(2013, 10, 18), holidays) # eid al adha
self.assertIn(date(2013, 12, 18), holidays) # National Day
def test_weekend(self):
# In Qatar, Week-end days are Friday / Sunday.
weekend_day = date(2017, 5, 12) # This is a Friday
non_weekend_day = date(2017, 5, 14) # This is a Sunday
self.assertFalse(self.cal.is_working_day(weekend_day))
self.assertTrue(self.cal.is_working_day(non_weekend_day))
class SingaporeTest(GenericCalendarTest):
cal_class = Singapore
def test_CNY_2010(self):
holidays = self.cal.holidays_set(2010)
self.assertIn(date(2010, 2, 14), holidays) # CNY1
self.assertIn(date(2010, 2, 15), holidays) # CNY2
self.assertIn(date(2010, 2, 16), holidays) # Rolled day for CNY
def test_year_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays) # New Year
self.assertIn(date(2013, 2, 10), holidays) # CNY1
self.assertIn(date(2013, 2, 11), holidays) # CNY2
self.assertIn(date(2013, 2, 12), holidays) # Rolled day for CNY
self.assertIn(date(2013, 3, 29), holidays) # Good Friday
self.assertIn(date(2013, 5, 1), holidays) # Labour Day
self.assertIn(date(2013, 5, 24), holidays) # Vesak Day
self.assertIn(date(2013, 8, 8), holidays) # Hari Raya Puasa
self.assertIn(date(2013, 8, 9), holidays) # National Day
self.assertIn(date(2013, 10, 15), holidays) # Hari Raya Haji
self.assertIn(date(2013, 11, 3), holidays) # Deepavali
self.assertIn(date(2013, 11, 4), holidays) # Deepavali shift
self.assertIn(date(2013, 12, 25), holidays) # Christmas Day
def test_year_2018(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 1, 1), holidays) # New Year
self.assertIn(date(2018, 2, 16), holidays) # CNY
self.assertIn(date(2018, 2, 17), holidays) # CNY
self.assertIn(date(2018, 3, 30), holidays) # Good Friday
self.assertIn(date(2018, 5, 1), holidays) # Labour Day
self.assertIn(date(2018, 5, 29), holidays) # Vesak Day
self.assertIn(date(2018, 6, 15), holidays) # Hari Raya Puasa
self.assertIn(date(2018, 8, 9), holidays) # National Day
self.assertIn(date(2018, 8, 22), holidays) # Hari Raya Haji
self.assertIn(date(2018, 11, 6), holidays) # Deepavali
self.assertIn(date(2018, 12, 25), holidays) # Christmas Day
def test_fixed_holiday_shift(self):
# Labour Day was on a Sunday in 2016
holidays = self.cal.holidays_set(2016)
# Labour Day (sunday)
self.assertIn(date(2016, 5, 1), holidays)
# Shifted day (Monday)
self.assertIn(date(2016, 5, 2), holidays)
class SouthKoreaTest(GenericCalendarTest):
cal_class = SouthKorea
def test_year_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays) # new year
self.assertIn(date(2013, 3, 1), holidays) # Independence day
self.assertIn(date(2013, 5, 5), holidays) # children's day
self.assertIn(date(2013, 6, 6), holidays) # Memorial day
self.assertIn(date(2013, 8, 15), holidays) # Liberation day
self.assertIn(date(2013, 10, 3), holidays) # National Foundation Day
self.assertIn(date(2013, 10, 9), holidays) # Hangul Day
self.assertIn(date(2013, 12, 25), holidays) # Christmas
# Variable days
self.assertIn(date(2013, 2, 9), holidays)
self.assertIn(date(2013, 2, 10), holidays)
self.assertIn(date(2013, 2, 11), holidays)
self.assertIn(date(2013, 5, 17), holidays)
self.assertIn(date(2013, 9, 18), holidays)
self.assertIn(date(2013, 9, 19), holidays)
self.assertIn(date(2013, 9, 20), holidays)
class TaiwanTest(GenericCalendarTest):
cal_class = Taiwan
def test_year_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays) # New Year
self.assertIn(date(2013, 2, 9), holidays) # Chinese new year's eve
self.assertIn(date(2013, 2, 10), holidays) # Chinese new year
self.assertIn(date(2013, 2, 11), holidays) # Spring Festival
self.assertIn(date(2013, 2, 12), holidays) # Spring Festival
self.assertIn(date(2013, 2, 28), holidays) # 228 Peace Memorial Day
self.assertIn(date(2013, 4, 4), holidays) # Children's Day
self.assertIn(date(2013, 6, 12), holidays) # Dragon Boat Festival
self.assertIn(date(2013, 9, 19), holidays) # Mid-Autumn Festival
self.assertIn(date(2013, 10, 10), holidays) # National Day
def test_qingming_festival(self):
self.assertIn(date(2001, 4, 5), self.cal.holidays_set(2001))
self.assertIn(date(2002, 4, 5), self.cal.holidays_set(2002))
self.assertIn(date(2005, 4, 5), self.cal.holidays_set(2005))
self.assertIn(date(2006, 4, 5), self.cal.holidays_set(2006))
self.assertIn(date(2007, 4, 5), self.cal.holidays_set(2007))
self.assertIn(date(2008, 4, 4), self.cal.holidays_set(2008))
self.assertIn(date(2010, 4, 5), self.cal.holidays_set(2010))
self.assertIn(date(2011, 4, 5), self.cal.holidays_set(2011))
self.assertIn(date(2012, 4, 4), self.cal.holidays_set(2012))
self.assertIn(date(2013, 4, 4), self.cal.holidays_set(2013))
self.assertIn(date(2014, 4, 4), self.cal.holidays_set(2014))
|
sayoun/workalendar
|
workalendar/tests/test_asia.py
|
Python
|
mit
| 17,173
|
import logging
from ask import alexa
import car_accidents
import expected_population
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def lambda_handler(request_obj, context=None):
return alexa.route_request(request_obj)
@alexa.default
def default_handler(request):
logger.info('default_handler')
return alexa.respond("Sorry, I don't understand.", end_session=True)
@alexa.request("LaunchRequest")
def launch_request_handler(request):
logger.info('launch_request_handler')
return alexa.respond('Ask me about any public data about Sweden.', end_session=True)
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
logger.info('session_ended_request_handler')
return alexa.respond('Goodbye.', end_session=True)
@alexa.intent('AMAZON.CancelIntent')
def cancel_intent_handler(request):
logger.info('cancel_intent_handler')
return alexa.respond('Okay.', end_session=True)
@alexa.intent('AMAZON.HelpIntent')
def help_intent_handler(request):
logger.info('help_intent_handler')
return alexa.respond('You can ask me about car accidents.', end_session=True)
@alexa.intent('AMAZON.StopIntent')
def stop_intent_handler(request):
logger.info('stop_intent_handler')
return alexa.respond('Okay.', end_session=True)
@alexa.intent('CarAccidents')
def car_accidents_intent_handler(request):
logger.info('car_accidents_intent_handler')
logger.info(request.get_slot_map())
city = request.get_slot_value('city')
year = request.get_slot_value('year')
if not city:
return alexa.respond('Sorry, which city?')
num_card_acc = car_accidents.get_num_accidents(year=int(year), city=city)
logger.info('%s accidents in %s in %s', num_card_acc, city, year)
return alexa.respond(
'''
<speak>
There were
<say-as interpret-as="cardinal">%s</say-as>
car accidents in %s in
<say-as interpret-as="date" format="y">%s</say-as>,
</speak>
''' % (num_card_acc, city, year),
end_session=True, is_ssml=True)
@alexa.intent('PopulationSweden')
def population_intent_handler(request):
logger.info('population_sweden_intent_handler')
logger.info(request.get_slot_map())
year = request.get_slot_value('year')
return alexa.respond(
'''
<speak>
in
<say-as interpret-as="date" format="y">%s</say-as>,
The expected population of Sweden is going to be
<say-as interpret-as="cardinal">%s</say-as>
</speak>
''' % (year, expected_population.get_expected_population(year)),
end_session=True, is_ssml=True)
@alexa.intent('WaterUsage')
def water_usage_stockholm(request):
year = request.get_slot_value('year')
logger.info('water_usage_stockholm')
logger.info(request.get_slot_map())
return alexa.respond(
'''
<speak>
the water consumption in Stockholm in <say-as interpret-as="date" format="y">%s</say-as>,
is <say-as interpret-as="cardinal">%s</say-as>
</speak>
''' % (year, car_accidents.get_water_usage_stockholm(year)),
end_session=True, is_ssml=True)
@alexa.intent('Apartments')
def housing_numbers(request):
year = request.get_slot_value('year')
logger.info('apartments')
logger.info(request.get_slot_map())
return alexa.respond(
'''
<speak>
the number of apartments built during that year in Stockholm, is <say-as interpret-as="cardinal">%s</say-as>
</speak>
''' % (car_accidents.get_num_apartments_stockholm(year)),
)
|
geoaxis/ask-sweden
|
ask_sweden/lambda_function.py
|
Python
|
mit
| 3,656
|
def lucky_search(index, ranks, keyword):
urls = index.get(keyword)
if urls and ranks:
return max(urls, key = lambda x: ranks[x])
else:
return None
def ordered_search(index, ranks, keyword):
urls = index.get(keyword)
if urls and ranks:
return sorted(urls, key = lambda x: ranks[x])
else:
return None
|
CrazyWearsPJs/minimalist_web_crawler
|
src/search.py
|
Python
|
mit
| 314
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class Operations(object):
"""Operations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.kubernetesconfiguration.v2021_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ResourceProviderOperationList"]
"""List all the available operations the KubernetesConfiguration resource provider supports.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceProviderOperationList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.kubernetesconfiguration.v2021_03_01.models.ResourceProviderOperationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceProviderOperationList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ResourceProviderOperationList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.KubernetesConfiguration/operations'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/kubernetesconfiguration/azure-mgmt-kubernetesconfiguration/azure/mgmt/kubernetesconfiguration/v2021_03_01/operations/_operations.py
|
Python
|
mit
| 4,985
|
from django.shortcuts import render
from rest_framework import viewsets
from basin.models import Task
from basin.serializers import TaskSerializer
def index(request):
context = {}
return render(request, 'index.html', context)
def display(request):
state = 'active'
if request.method == 'POST':
state = request.POST['state']
submit = request.POST['submit']
tid = request.POST['id']
if submit == 'check':
task = Task.objects.get(id=tid)
task.completed = not task.completed
task.save()
elif request.method == 'GET':
if 'state' in request.GET:
state = request.GET['state']
context = {
'task_list': Task.objects.state(state),
'state': state,
}
return render(request, 'display.html', context)
class ActiveViewSet(viewsets.ModelViewSet):
queryset = Task.objects.active()
serializer_class = TaskSerializer
class SleepingViewSet(viewsets.ModelViewSet):
queryset = Task.objects.sleeping()
serializer_class = TaskSerializer
class BlockedViewSet(viewsets.ModelViewSet):
queryset = Task.objects.blocked()
serializer_class = TaskSerializer
class DelegatedViewSet(viewsets.ModelViewSet):
queryset = Task.objects.delegated()
serializer_class = TaskSerializer
class CompletedViewSet(viewsets.ModelViewSet):
queryset = Task.objects.filter(completed=True, trashed=False)
serializer_class = TaskSerializer
class TaskViewSet(viewsets.ModelViewSet):
model = Task
serializer_class = TaskSerializer
def get_queryset(self):
if 'state' in self.request.QUERY_PARAMS:
state = self.request.QUERY_PARAMS['state']
return Task.objects.state(state)
return Task.objects.all()
|
Pringley/basinweb
|
basin/views.py
|
Python
|
mit
| 1,780
|
"""
This module allows you to mock the config file as needed.
A default fixture that simply returns a safe-to-modify copy of
the default value is provided.
This can be overridden by parametrizing over the option you wish to
mock.
e.g.
>>> @pytest.mark.parametrize("extension_initial_dot", (True, False))
... def test_fixture(mock_config, extension_initial_dot):
... import bids
... assert bids.config.get_option("extension_initial_dot") == extension_initial_dot
"""
from unittest.mock import patch
import pytest
@pytest.fixture
def config_paths():
import bids.config
return bids.config.get_option('config_paths').copy()
@pytest.fixture
def extension_initial_dot():
import bids.config
return bids.config.get_option('extension_initial_dot')
@pytest.fixture
def mock_config(config_paths, extension_initial_dot):
import bids.config
with patch.dict('bids.config._settings'):
bids.config._settings['config_paths'] = config_paths
bids.config._settings['extension_initial_dot'] = extension_initial_dot
yield
|
INCF/pybids
|
bids/conftest.py
|
Python
|
mit
| 1,063
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
This module defines how cells are stored as tunacell's objects
"""
from __future__ import print_function
import numpy as np
import warnings
import treelib as tlib
from tunacell.base.observable import Observable, FunctionalObservable
from tunacell.base.datatools import (Coordinates, compute_rates,
extrapolate_endpoints,
derivative, logderivative, ExtrapolationError)
class CellError(Exception):
pass
class CellChildsError(CellError):
pass
class CellParentError(CellError):
pass
class CellDivisionError(CellError):
pass
class Cell(tlib.Node):
"""General class to handle cell data structure.
Inherits from treelib.Node class to facilitate tree building.
Parameters
----------
identifier : str
cell identifier
container : :class:`Container` instance
container to which cell belongs
Attributes
----------
container : :class:`Container` instance
container to chich cell belongs
childs : list of :class:`Cell` instances
daughter cells of current cell
parent : :class:`Cell` instance
mother cell of current cell
birth_time : float (default None)
time of cell birth (needs to be computed)
division_time : float (default None)
time of cell division (needs to be computed)
Methods
-------
set_division_events()
computes birth/division times when possible
build(obs)
builds timeseries, uses one of the following methods depending on obs
build_timelapse(obs)
builds and stores timeseries associated to obs, in 'dynamics' mode
build_cyclized(obs)
builds and stores cell-cycle value associated to obs, not in 'dynamics'
mode
"""
def __init__(self, identifier=None, container=None):
tlib.Node.__init__(self, identifier=identifier)
self._childs = []
self._parent = None
self._birth_time = None
self._division_time = None
self._sdata = {} # dictionary to contain computed data
self._protected_against_build = set() # set of obs not to re-build
self.container = container # point to Container instance
# cells are built from a specific container instance
# container can be a given field of view, a channel, a microcolony, ...
return
# We add few definitions to be able to chain between Cell instances
@property
def childs(self):
"Get list of child instances."
return self._childs
@childs.setter
def childs(self, value):
if value is None:
self._childs = []
elif isinstance(value, list):
for item in value:
self.childs = item
elif isinstance(value, Cell):
self._childs.append(value)
else:
raise CellChildsError
@property
def parent(self):
"Get parent instance."
return self._parent
@parent.setter
def parent(self, pcell):
if pcell is None:
self._parent = None
elif isinstance(pcell, Cell):
self._parent = pcell
else:
raise CellParentError
@property
def birth_time(self):
"Get cell cycle start time. See below for Setter."
return self._birth_time
@birth_time.setter
def birth_time(self, value):
"Set cell cycle start time. See above for Getter."
self._birth_time = value
@property
def division_time(self):
"Get cell cycle end time. See below for Setter."
return self._division_time
@division_time.setter
def division_time(self, value):
"Set cell cycle end time. See above for Getter."
if self.birth_time is not None:
if value < self.birth_time:
raise CellDivisionError
self._division_time = value
def set_division_event(self):
"method to call when parent is identified"
previous_frame = None
if (self.parent is not None) and (self.parent.data is not None):
previous_frame = self.parent.data['time'][-1]
first_frame = None
if self.data is not None:
first_frame = self.data['time'][0]
if previous_frame is not None and first_frame is not None:
div_time = (previous_frame + first_frame)/2. # halfway
self.birth_time = div_time
self.parent.division_time = div_time
return
def __repr__(self):
cid = str(self.identifier)
if self.parent:
pid = str(self.parent.identifier)
else:
pid = '-'
if self.childs:
ch = ','.join(['{}'.format(c.identifier) for c in self.childs])
else:
ch = '-'
return cid+';p:'+pid+';ch:'+ch
def info(self):
dic = {}
dic['a. Identifier'] = '{}'.format(self.identifier)
pid = 'None'
if self.parent:
pid = '{}'.format(self.parent.identifier)
dic['b. Parent id'] = pid
chids = 'None'
if self.childs:
chids = ', '.join(['{}'.format(ch.identifier)
for ch in self.childs])
dic['c. Childs'] = chids
dic['d. Birth time'] = '{}'.format(self.birth_time)
dic['e. Division time'] = '{}'.format(self.division_time)
if self.data is not None:
dic['f. N_frames'] = '{}'.format(len(self.data))
return dic
def protect_against_build(self, obs):
"""Protect current cell against building obs array/value"""
self._protected_against_build.add(obs)
return
def build(self, obs):
"""Builds timeseries"""
if obs in self._protected_against_build:
return
if isinstance(obs, FunctionalObservable):
# first build every single Observable
for item in obs.observables:
self.build(item)
arrays = [self._sdata[item.label] for item in obs.observables]
self._sdata[obs.label] = obs.f(*arrays)
elif isinstance(obs, Observable):
if obs.mode == 'dynamics':
self.build_timelapse(obs)
else:
self.compute_cyclized(obs)
else:
raise TypeError('obs must be of type Observable or FunctionalObservable')
def build_timelapse(self, obs):
"""Builds timeseries corresponding to observable of mode 'dynamics'.
Result is an array of same length as time array, stored in a dictionary
_sdata, which keys are obs.label. When using sliding windows,
estimate in a given cell actualize data in its parent cell, if and only
if it has not been actualized before (check disjoint time intervals).
Parameters
----------
obs : Observable instance
mode must be 'dynamics'
Note
-----
Some observables carry the 'local_fit' option True. In this case,
local fits over shifting time-windows are performed. If one would keep
only a given cell's data, then the constraints on shifting time-window
would let some 'empty' times, at which no evaluation can be performed.
This is solved by getting data from the cell's parent cell's data. This
operation computes time-window fiited data in the cell's parent cycle.
Two precautions must then be taken:
1. a given cell's data must be used only once for evaluating parent
cell's data,
2. when data has been used from one daughter cell, concatenate
the current cell's evaluated data to it.
.. warning::
For some computations, the time interval between consecutive
acquisitions is needed. If it's defined in the container or the
experiment metadata, this parameter will be imported; otherwise if
there are at least 2 consecutive values, it will be inferred from
data (at the risk of making mistakes if there are too many missing
values)
"""
label = str(obs.label)
raw = obs.raw
coords = Coordinates(self.data['time'], self.data[raw])
if self.parent is not None and len(self.parent.data) > 0:
anteriors = Coordinates(self.parent.data['time'],
self.parent.data[raw])
else:
anteriors = Coordinates(np.array([], dtype=float),
np.array([], dtype=float))
# if empty, return empty array of appropriate type
if len(self.data) == 0: # there is no data, but it has some dtype
return Coordinates(np.array([], dtype=float),
np.array([], dtype=float))
dt = self.container.period
if dt is None:
# automatically finds dt
if len(self.data) > 1:
arr = self.data['time']
time_increments = arr[1:] - arr[:-1]
dt = np.round(np.amin(np.abs(time_increments)), decimals=2)
# case : no local fit, use data, or finite differences
if not obs.local_fit:
if obs.differentiate:
if obs.scale == 'linear':
new = derivative(coords)
elif obs.scale == 'log':
new = logderivative(coords)
else:
new = coords
self._sdata[label] = new.y
# case : local estimates using compute_rates
else:
r, f, ar, af, xx, yy = compute_rates(coords.x, coords.y,
x_break=self.birth_time,
anterior_x=anteriors.x,
anterior_y=anteriors.y,
scale=obs.scale,
time_window=obs.time_window,
dt=dt,
join_points=obs.join_points)
if obs.differentiate:
to_cell = r
to_parent = ar
if len(ar) != len(anteriors.x):
print('This is awkward')
else:
to_cell = f
to_parent = af
self._sdata[label] = to_cell
if self.parent is not None and (not np.all(np.isnan(to_parent))):
if label not in self.parent._sdata.keys():
self.parent._sdata[label] = to_parent
else:
existing = self.parent._sdata[label]
# if existing is nan, try to put addedum values
self.parent._sdata[label] = np.where(np.isnan(existing), to_parent, existing)
return
def compute_cyclized(self, obs):
"""Computes observable when mode is different from 'dynamics'.
Parameters
----------
obs : Observable instance
mode must be different from 'dynamics'
Raises
------
ValueError
when Observable mode is 'dynamics'
Note
----
To compute a cell-cycle observable (e.g. birth growth rate), it is
necessary to know the value of the timelapse counterpart (e.g. growth
rate here). The timelapse observable may work by joining values at
divisions, and hence a single call to Cell.build_timelapse() will
result in a different result array than when it has beenalso called in
a daughter cell (potentially affecting values toward the end of current
cell cycle). Hence, in that circumstances when continuity is used to
join timeseries at divisions, enhancing results with fitting
over sliding windows, it is the user's task to compute first the
timelapse observable over the entire lineage, and only then, evaluate
cell-cycle values. This is why the function below tries first to read
an already present array from timelapse counterpart, and only if it
fails will it compute it using only this current cell data.
"""
scale = obs.scale
npts = obs.join_points
label = obs.label
if obs.mode == 'dynamics':
raise ValueError('Called build_cyclized for dynamics mode')
# associate timelapse counterpart
cobs = obs.as_timelapse()
clabel = cobs.label
time = self.data['time']
# if it has been computed already, the clabel key exists in sdata
try:
array = self._sdata[clabel]
# otherwise compute the timelapse counterpart
except KeyError:
self.build_timelapse(cobs)
array = self._sdata[clabel]
# get value
try:
if obs.mode == 'birth':
value = extrapolate_endpoints(time, array, self.birth_time,
scale=scale, join_points=npts)
elif obs.mode == 'division':
value = extrapolate_endpoints(time, array, self.division_time,
scale=scale, join_points=npts)
elif 'net-increase' in obs.mode:
dval = extrapolate_endpoints(time, array, self.division_time,
scale=scale, join_points=npts)
bval = extrapolate_endpoints(time, array, self.birth_time,
scale=scale, join_points=npts)
if obs.mode == 'net-increase-additive':
value = dval - bval
elif obs.mode == 'net-increase-multiplicative':
value = dval/bval
elif obs.mode == 'average':
value = np.nanmean(array)
elif obs.mode == 'rate':
if len(array) < 2:
value = np.nan # not enough values to estimate rate
if obs.scale == 'log':
array = np.log(array)
value, intercept = np.polyfit(time, array, 1)
except ExtrapolationError as err:
# msg = '{}'.format(err)
# warnings.warn(msg)
value = np.nan # missing information
self._sdata[label] = value
return
def _disjoint_time_sets(ts1, ts2):
if len(ts1) == 0 or len(ts2) == 0:
return True
min1, min2 = map(np.nanmin, [ts1, ts2])
max1, max2 = map(np.nanmax, [ts1, ts2])
return max1 < min2 or max2 < min1
def filiate_from_bpointer(cells):
"""Build in place parent/childs attributes in a set of filiated cells
Parameters
----------
cells : list of Cell instances
"""
for cell in cells:
childs = []
for cc in cells:
if cc.bpointer == cell.identifier:
childs.append(cc)
cc.parent = cell
cc.set_division_event()
cell.childs = childs
|
LeBarbouze/tunacell
|
tunacell/base/cell.py
|
Python
|
mit
| 15,188
|
# -*- coding: utf-8 -*-
import scrapy
import numpy
import quandl
from mykgb import indicator
from myapp.models import Quandlset
from mykgb.items import MykgbItem
quandl.ApiConfig.api_key = "taJyZN8QXqj2Dj8SNr6Z"
quandl.ApiConfig.api_version = '2015-04-09'
class QuandlDataSpider(scrapy.Spider):
name = "quandl_data"
allowed_domains = ["www.baidu.com"]
start_urls = ['http://www.baidu.com/']
custom_settings = {
'ITEM_PIPELINES': {
# 'mykgb.pipelines.DestinyPipeline': 100
'mykgb.pipelines.MykgbPipeline': 100
},
'DEFAULT_REQUEST_HEADERS': {
'Referer': 'http://www.baidu.com'
}
}
def parse(self, response):
Quandlset.objects.update(actived=True)
qs = Quandlset.objects.filter(actived=True)
for p in qs:
symbol = p.quandlcode + "1"
if p and p.namezh:
code_str = p.namezh + ' ' + p.exchange + ' ' + p.name
else:
code_str = p.exchange + ' ' + p.name
try:
df = quandl.get(symbol)[-100:]
except:
print("error", symbol)
p.actived = False
p.save()
continue
if 'Last' in df.columns:
df = df.rename(
# columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Volume': 'volume', 'Last': 'close'})
columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Last': 'close'})
elif 'Close' in df.columns:
df = df.rename(
# columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Volume': 'volume', 'Close': 'close'})
columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Close': 'close'})
elif 'Settle' in df.columns:
df = df.rename(
# columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Volume': 'volume', 'Settle': 'close'})
columns={'Open': 'open', 'High': 'high', 'Low': 'low', 'Settle': 'close'})
else:
p.actived = False
p.save()
continue
# df[df['volume'] == 0] = numpy.nan
df = df.dropna()
if not df.empty and df.shape[0] > 50:
item = MykgbItem()
item['title'] = 'sleepless money'
item['code'] = code_str
macd = indicator.get_macd(df)
kdj = indicator.get_kdj(df)
rsi = indicator.get_rsi(df)
cci = indicator.get_cci(df)
item['macd'] = sum(macd.values())
item['kdj'] = sum(kdj.values())
item['rsi'] = sum(rsi.values())
item['cci'] = sum(cci.values())
yield item
|
back1992/mezzanine-api-docker
|
web/mykgb/spiders/quandl_data.py
|
Python
|
mit
| 2,840
|
def settings(request):
"""
Add settings (or some) to the templates
"""
from django.conf import settings
tags = {}
tags['GOOGLE_MAPS_KEY'] = settings.GOOGLE_MAPS_KEY
tags['GOOGLE_ANALYTICS_ENABLED'] = getattr(settings, 'GOOGLE_ANALYTICS_ENABLED', True)
tags['MAP_PROVIDER'] = settings.MAP_PROVIDER
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
tags['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
return tags
|
JustinWingChungHui/electionleaflets
|
electionleaflets/apps/core/context_processors.py
|
Python
|
mit
| 467
|
from flask import Flask, request, jsonify
import random
import re
import sys
app = Flask(__name__)
SPEC = re.compile('^(\d+)d(\d+) ?(\w+)?$')
HIDDEN = ('hide', 'hidden', 'invisible', 'ephemeral', 'private')
USAGE = 'USAGE:\n' \
'`/roll [n]d[x] [options]`\n' \
'where:\n' \
' n == number of dice\n' \
' x == number of sides on each die\n' \
'e.g. `/roll 3d6` will roll 3 6-sided dice. ' \
'[options] may be any of (hide|hidden|invisible|ephemeral|private) ' \
'for a private roll.'
def do_roll(spec):
match = SPEC.match(spec)
if match is None:
return {
'response_type': 'ephemeral',
'text': 'ERROR: invalid roll command `%s`\n\n%s' % (
spec, USAGE)
}
num = int(match.group(1))
size = int(match.group(2))
flag = match.group(3)
if flag is not None and flag not in HIDDEN:
return {
'response_type': 'ephemeral',
'text': 'ERROR: unrecognized modifier `%s`' % flag
}
vals = []
for i in range(0, num):
vals.append(random.randint(1, size))
data = {
'response_type': 'ephemeral' if flag in HIDDEN else 'in_channel'
}
if num == 1:
data['text'] = str(vals[0])
else:
data['text'] = '%s = %d' % (
' + '.join([str(v) for v in vals]), sum(vals))
return data
@app.route("/", methods=['GET', 'POST'])
def roll():
try:
if request.method == 'POST':
spec = request.form['text']
else:
spec = request.args['spec']
return jsonify(do_roll(spec))
except:
return jsonify({
'response_type': 'ephemeral',
'text': USAGE
})
if __name__ == "__main__":
app.run(debug=True)
|
NUKnightLab/slackdice
|
app.py
|
Python
|
mit
| 1,779
|
# coding=utf-8
"""
desc: 错误处理handler
author: congqing.li
date: 2016-10-28
"""
from werkzeug.exceptions import HTTPException
class CustomError(HTTPException):
code = None
description = "NIMABI"
def __init__(self, description=None, response=None):
if isinstance(description, tuple):
self.description, self.code = description
class ObjectNotExists(CustomError):
pass
class TokenExpired(CustomError):
pass
class BadToken(CustomError):
pass
class MissToken(CustomError):
pass
class Forbidden(CustomError):
pass
class BadRequest(CustomError):
pass
class UpdateError(CustomError):
pass
|
levi-lq/flask-vue-example
|
api_rest/error_handlers.py
|
Python
|
mit
| 669
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "radiocontrol.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Simon-Hohberg/Pi-Radio
|
radiocontrol/manage.py
|
Python
|
mit
| 255
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import hashlib
from django.utils import six
URL_LIST_CACHE = 'powerpages:url_list'
SITEMAP_CONTENT = 'powerpages:sitemap'
def get_cache_name(prefix, name):
"""
Cache name constructor. Uses the same methods as django cache system
Examples:
*) prefix=profile.cache, name=<requestuser.id>
*) prefix=template.cache.sidebar, name=<requestuser.id>
"""
return '{0}.{1}'.format(
prefix, hashlib.md5(six.text_type(name).encode('utf-8')).hexdigest()
)
def template_source(page_pk):
"""Create cache key for page template"""
return 'powerpages:template:{0}'.format(page_pk)
def rendered_source_for_user(page_pk, user_id):
"""Create cache key for rendered page source based on current user"""
return 'powerpages:rendered_source_user:{0}:{1}'.format(page_pk, user_id)
def rendered_source_for_lang(page_pk, lang):
"""Create cache key for rendered page source based on current language"""
return 'powerpages:rendered_source_lang:{0}:{1}'.format(page_pk, lang)
def url_cache(name, *args, **kwargs):
"""
Creates cache key for url of CMS page or standard Django URL
based on hashed serialized name with optional *args and **kwargs
"""
serialized_url = json.dumps([name, args, kwargs], sort_keys=True)
return get_cache_name('powerpages:urls', serialized_url)
|
Open-E-WEB/django-powerpages
|
powerpages/cachekeys.py
|
Python
|
mit
| 1,418
|
# -*- coding: utf-8 -*-
__author__ = 'Sergey Sobko'
class HashSet(object):
_set_dict = None
def __init__(self):
self._set_dict = dict()
def add(self, key, value):
self._set_dict[hash(key)] = value
def get(self, key):
return self._set_dict.get(hash(key))
def __repr__(self):
return self._set_dict
class LexNode(object):
letter = None
data = None
next_letters = None
def __init__(self, letter):
self.letter = letter
self.next_letters = list()
class TreeSet(object):
root_letter = None
def __init__(self):
self.root_letter = LexNode(None)
def add(self, key, value):
assert isinstance(key, basestring)
current_node = self.root_letter
for letter in key:
pass
# FIXME: Step into next letter's LexNode
|
profitware/python-sandbox-algo
|
sandboxalgo/sets.py
|
Python
|
mit
| 861
|
import pyglet
import tkinter
WINDOW_WIDTH = 800 # x
WINDOW_HEIGHT = 600 # y
LEVEL = 6 # original was 4
X_SIZE = 2 * LEVEL
Y_SIZE = 3 * LEVEL
def get_resolution():
root = tkinter.Tk()
return root.winfo_screenwidth(), root.winfo_screenheight()
def get_position(x, y):
return ((x // X_SIZE) * X_SIZE, (y // Y_SIZE) * Y_SIZE)
def cursor(pos):
x1 = pos[0]
x2 = x1 + X_SIZE
y1 = pos[1]
y2 = y1 + Y_SIZE
return [x1, y1, x2, y1,
x2, y1, x2, y2,
x1, y1, x1, y2,
x1, y2, x2, y2]
class Rail:
def __init__(self, batch, pos):
x1 = pos[0]
x2 = x1 + X_SIZE
y1 = pos[1] + Y_SIZE / 2
batch.add(6, pyglet.gl.GL_LINES, None,
('v2f', [x1, y1-1, x2, y1-1,
x1, y1, x2, y1,
x1, y1+1, x2, y1+1]),
('c3B', (177, 57, 57,
177, 57, 57,
255, 82, 82,
255, 82, 82,
177, 57, 57,
177, 57, 57)))
class AppWin(pyglet.window.Window):
def __init__(self, **kwargs):
kwargs.update(dict(
caption='SimCTC',
))
super(AppWin, self).__init__(**kwargs)
self.batch = pyglet.graphics.Batch()
self.x = -1
self.y = -1
for m in range(0, WINDOW_WIDTH+1, X_SIZE):
for n in range(0, WINDOW_HEIGHT+1, Y_SIZE):
self.batch.add(1, pyglet.gl.GL_POINTS, None,
('v2i', (m, n)),
('c3B', (0, 255, 0)))
def on_draw(self):
self.clear()
self.batch.draw()
def on_mouse_motion(self, x, y, dx, dy):
replot = False
if self.x == -1 and self.y == -1:
pos = get_position(x, y)
self.cursor = self.batch.add(8, pyglet.gl.GL_LINES, None,
('v2f', cursor(pos)),
('c3B', (255, 0, 0)*8))
self.x = x
self.y = y
if abs(dx) > 0:
self.x = self.x + dx
replot = True
if abs(dy) > 0:
self.y = self.y + dy
replot = True
if replot:
self.cursor.vertices = cursor(get_position(self.x, self.y))
def on_mouse_release(self, x, y, button, modifiers):
if button == pyglet.window.mouse.LEFT:
Rail(self.batch, get_position(self.x, self.y))
if __name__ == '__main__':
window = AppWin(width=WINDOW_WIDTH, height=WINDOW_HEIGHT)
window.set_exclusive_mouse(True)
window.set_mouse_visible(False)
pyglet.app.run()
|
celestian/simCTC
|
sctc.py
|
Python
|
mit
| 2,726
|
import re
import numpy as np
from scipy import ndimage, spatial
import bresenham
import mpl_tools
import vtk_tools
def load_pdb(name):
with open(name+'.pdb') as fp:
points = []
conns = []
for line in fp:
if line.startswith('HET'):
pattern = r'(-?\d+.\d\d\d)'
x, y, z = (float(c) for c in re.findall(pattern, line))
points.append([x, y, z])
elif line.startswith('CON'):
pattern = r'(\d+)'
ids = (int(c) for c in re.findall(pattern, line))
first = next(ids)
conns.extend([(first-1, other-1) for other in ids])
return points, conns
def extract_spheres(im):
'''
credit to untubu @ stackoverflow for this
still needs a lot of improvement
'''
im = np.atleast_3d(im)
data = ndimage.morphology.distance_transform_edt(im)
max_data = ndimage.filters.maximum_filter(data, 10)
maxima = data==max_data # but this includes some globally low voids
min_data = ndimage.filters.minimum_filter(data, 10)
diff = (max_data - min_data) > 1
maxima[diff==0] = 0
labels, num_maxima = ndimage.label(maxima)
centers = [ndimage.center_of_mass(labels==i) for i in range(1, num_maxima+1)]
radii = [data[center] for center in centers]
return np.array(centers), np.array(radii)
def rasterize():
pass
solid_nodes, solid_edges = map(np.array, load_pdb('CHA'))
solid_nodes -= solid_nodes.min(axis=0)
solid_nodes *= 4
coord_pair = solid_nodes[solid_edges]
discretized = []
for a, b in coord_pair:
point_list = bresenham.bresenhamline(np.atleast_2d(a), b, -1).astype(int).tolist()
discretized.extend([tuple(point) for point in point_list])
array = np.array(discretized)
size = array.max(axis=0) - array.min(axis=0) + 1
canvas = np.ones(size, dtype=bool)
offset = array.min(axis=0)
for idx, _ in np.ndenumerate(canvas):
if idx in discretized:
canvas[idx] = 0
# mpl_tools.visualize(canvas)
centers, radii = extract_spheres(canvas)
vtk_tools.visualize(solid_nodes, solid_edges, centers, radii)
|
RodericDay/CIF-characterize
|
characterize.py
|
Python
|
mit
| 2,124
|