repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
algorithm-ninja/konig | legacy/legacy.py | Python | apache-2.0 | 21,057 | 0.010448 | #!/usr/bin/env pypy
# Libreria di funzioni sui grafi.
#
# classe astratta graph, derivate ugraph (grafo non diretto) e dgraph(grafo diretto).
#
# costruttore:
# graph(G[,w]) grafo copia di G
# graph(N[,w]) grafo vuoto con N nodi
# graph(N,E[,w]) grafo con lista di archi assegnata
# graph(N,[M=...],type='graph_type') grafo di tipo graph_type con N nodi,
# se type = 'forest' allora M=numero di archi.
#
# print:
# stampa in formato olimpico randomizzando l'ordine degli archi.
# se w era stato specificato, aggiunge dei pesi a ogni arco ottenendoli tramite w(e).
#
# confronti:
# i grafi si confrontano con le relazioni insiemistiche sugli archi (sottoinsieme...).
#
# container:
# l'iterazione su un oggetto graph e' sulla lista ordinata dei suoi archi.
#
# operazioni tra grafi:
# + unione disgiunta (o aggiunta di un arco)
# * prodotto cartesiano
# & intersezione
# | unione
# ~ complementare
# - trasposto (solo grafi diretti)
#
# funzioni random:
# shuffle() permuta i label dei nodi del grafo
# connect() aggiunge il minimo numero di archi necessario per connettere il grafo
# addedges(K, [candidates]) aggiunge K archi random (tra i candidates, oggetto edgerange)
#
# varie:
# N() quantita' di nodi
# M() quantita' di archi
# add(e) aggiunge un arco
# discard(e) rimuove un arco
#
#
# classe edgerange
#
# rappresenta un range di archi.
# instanziazione:
#
# edgerange(g,[0,3],[3,6]) tutti gli archi da 0,1,2 a 3,4,5
# edgerange(g,[[[0,3],[3,6]],[[6,7],[1,5]]]) come prima piu' quelli da 6 a 1,2,3,4
# edgerange(g,[0,3],[3,6]) + edgerange([6,7],[1,5]) come prima
#
# utilizzo:
#
# r = edgerange(...)
# for i in edgerange(...)
# if e in edgerange(...)
# temp = r[i] (sconsigliato)
from random import sample, shuffle, choice, randint as _randint
from math import sqrt
from bisect import bisect_right, bisect_left
from sys import stderr, maxint
from sortedcontainers import SortedSet as sortedset
# random da A a B pero' con variabili long
def randint(A, B=None):
"""Random da A a B che funziona con valori long."""
if B is None:
return _randint(0, A-1)
return _randint(A, B-1)
# campiono K elementi in Tot evitando quelli in Pop (assume Pop sottoinsieme di Tot)
def lsample(Tot, K, Pop = xrange(0)):
"""Ritorna una lista con un campione di K elementi in Tot ma non in Pop."""
if isinstance(Tot,int):
Tot = xrange(Tot)
if isinstance(Tot,long):
Tot = lrange(Tot)
if not isinstance(Tot, lrange) and not (0 <= K and len(Pop) + K <= len(Tot)):
raise StandardError("Parameter K out of bounds.")
Add = [randint((Tot.n if isinstance(Tot,lrange) else len(Tot))-len(Pop)-K+1) for i in xrange(K)]
Add.sort()
for i in xrange(K):
Add[i] += i
if isinstance(Tot, set):
Tot = list(Tot)
if isinstance(Tot, list):
Tot.sort()
if isinstance(Pop, set):
Pop = list(Pop)
if isinstance(Pop, list):
Pop.sort()
i = j = 0
ip = Pop.__iter__()
p = ip.next() if i < len(Pop) else None
while j < K:
if i >= len(Pop) or p > Tot[Add[j]+i]:
Add[j] = Tot[Add[j]+i]
j+=1
else:
i+=1
p = ip.next() if i < len(Pop) else None
return Add
class graph:
"""Implementa il concetto di grafo allo scopo di generare grafi pseudo-casuali con proprieta' fissate.
Il grafo viene rappresentato come insieme ordinato di archi.
Il costruttore consente di creare un qualsiasi tipo speciale noto di grafi, le usuali operazioni aritmetiche consentono di effettuare analoghe operazioni combinatoriali, mentre le usuali operazioni logiche consentono di effettuare le analoghe operazioni insiemistiche.
E' inoltre possibile aggiungere archi a caso (con il metodo addedges) o aggiungere archi in modo da connettere il grafo (con il metodo connect)."""
# costruttore
def __init__(self, N=0, E=None, type=None, w=None, M=None, lbl=None):
"""Costruisce un grafo vuoto con N vertici, e insieme di archi E (se specificato).
Se type e' specificato, costruisce invece un grafo di quel tipo.
I valori ammissibili per type sono cycle,path,tree,forest,clique.
Se w e' specificato, il grafo viene considerato pesato, con pesi generati dalla funzione w().
E' anche ammessa l'instanziazione graph(G) con G un grafo gia' esistente."""
if isinstance(N,graph) and E is None and M is None and type is None:
E = sortedset(N.E)
if w is None:
w = N.w
if lbl is None:
lbl = list(N.lbl)
N = N.V
if not isinstance(N,int):
raise StandardError("Incompatible parameters specified.")
if isinstance(E,int):
M=E
E=None
if E is None:
E = sortedset([])
else:
if (M is not None) or (type is not None):
raise StandardError("Incompatible par | ameters specified.")
if len(E) > 0 and isinstance(E.__iter__().next(),list):
E = [self.cod(e) for e in E]
E = sortedset(E)
if len(E)==0 and N > 1:
if type == 'cycle':
for i in xrange(N):
E.add(self.cod([i,(i+1)%N]))
if type == 'path':
for i in xrange(N-1):
E.add(self.cod([i,i+1]))
if type == 'tree':
for i | in xrange(1,N):
E.add(self.cod([randint(i),i]))
if type == 'forest':
if not (0 <= M < N):
raise StandardError("Parameter M out of bounds.")
for i in lsample(N-1,M):
E.add(self.cod([randint(i+1),i+1]))
if type == 'clique':
for i in xrange(N-1):
for j in xrange(i+1,N):
E.add(self.cod([i,j]))
if type == 'star':
for i in xrange(1,N):
E.add(self.cod([0,i]))
if type == 'wheel':
for i in xrange(1,N):
E.add(self.cod([0,i]))
E.add(self.cod([i,(i+1)%N]))
# eventualmente aggiungere: gear, caterpillar/lobster
self.V=N
self.lbl = lbl if lbl is not None else range(self.V)
if w is not None:
try:
w([0,1])
self.w=w
except TypeError:
self.w = lambda e: w()
else:
self.w=w
self.E=E
# funzioni di stampa
def __repr__(self):
"""Rappresentazione python del grafo."""
s = self.__class__.__name__ + '(' + str(self.V) + ',' + str([e for e in self])
if self.w is not None:
s += ', w=' + repr(self.w)
if not all([self.lbl[i] == i for i in xrange(self.V)]):
s += ', lbl=' + repr(self.lbl)
return s + ')'
def __str__(self):
"""Rappresentazione olimpica del grafo."""
s = str(self.V) + ' ' + str(self.M()) + '\n' + self.printedges()
return s.rstrip()
def printedges(self, zero_based = False):
"""Rappresentazione olimpica del grafo, senza la prima riga."""
s = bytearray()
Ed = list(self.E)
shuffle(Ed)
for e in Ed:
de = self.dec(e)
s += str(self.lbl[de[0]]+(0 if zero_based else 1)) + ' ' + str(self.lbl[de[1]]+(0 if zero_based else 1))
if self.w is not None:
s += ' ' + str(self.w(de))
s += '\n'
return str(s).rstrip()
# funzioni di confronto
def __lt__(self,other):
"""Relazione di sottoinsieme proprio."""
return self.E < other.E
def __le__(self,other):
"""Relazione di sottoinsieme."""
return self.E <= other.E
def __eq__(self,other):
"""Relazione di uguaglianza degli archi."""
return self.E == other.E
def __ne__(self,other):
"""Relazione di disuguaglianza degli archi."""
return self.E != other.E
|
MikrosAnim/vulture | vulture/__init__.py | Python | gpl-3.0 | 28 | 0 | f | rom vultu | re import Vulture
|
mikeh77/mi-instrument | mi/instrument/seabird/sbe16plus_v2/test/test_driver.py | Python | bsd-2-clause | 61,730 | 0.007112 | """
@package mi.instrument.seabird.sbe16plus_v2.test.test_driver
@file mi/instrument/seabird/sbe16plus_v2/test/test_driver.py
@author David Everett
@brief Test cases for InstrumentDriver
USAGE:
Make tests verbose and provide stdout
* From the IDK
$ bin/test_driver
$ bin/test_driver -u
$ bin/test_driver -i
$ bin/test_driver -q
* From pyon
$ bin/nosetests -s -v .../mi/instrument/seabird/sbe16plus_v2/ooicore
$ bin/nosetests -s -v .../mi/instrument/seabird/sbe16plus_v2/ooicore -a UNIT
$ bin/nosetests -s -v .../mi/instrument/seabird/sbe16plus_v2/ooicore -a INT
$ bin/nosetests -s -v .../mi/instrument/seabird/sbe16plus_v2/ooicore -a QUAL
"""
__author__ = 'David Everett'
__license__ = 'Apache 2.0'
# Standard lib imports
import time
import unittest
# 3rd party imports
from nose.plugins.attrib import attr
from mock import Mock
# MI logger
from mi.core.log import get_logger ; log = get_logger()
from mi.core.instrument.instrument_driver import DriverAsyncEvent
from mi.core.instrument.chunker import StringChunker
# from interface.objects import AgentCommand
from mi.idk.unit_test import DriverTestMixin
from mi.idk.unit_test import ParameterTestConfigKey
from mi.idk.unit_test import AgentCapabilityType
from mi.core.exceptions import InstrumentParameterException
from mi.core.exceptions import InstrumentProtocolException
from mi.core.exceptions import InstrumentCommandException
from mi.core.exceptions import InstrumentTimeoutException
from mi.instrument.seabird.sbe16plus_v2.driver import SBE16Protocol
from mi.instrument.seabird.sbe16plus_v2.driver import SBE16InstrumentDriver
from mi.instrument.seabird.sbe16plus_v2.driver import DataParticleType
from mi.instrument.seabird.sbe16plus_v2.driver import ConfirmedParameter
from mi.instrument.seabird.sbe16plus_v2.driver import NEWLINE
from mi.instrument.seabird.sbe16plus_v2.driver import SBE16DataParticleKey
from mi.instrument.seabird.sbe16plus_v2.driver import SBE16StatusParticleKey
from mi.instrument.seabird.sbe16plus_v2.driver import SBE16CalibrationParticleKey
from mi.instrument.seabird.sbe16plus_v2.driver import ProtocolState
from mi.instrument.seabird.sbe16plus_v2.driver import ProtocolEvent
from mi.instrument.seabird.sbe16plus_v2.driver import ScheduledJob
from mi.instrument.seabird.sbe16plus_v2.driver import Capability
from mi.instrument.seabird.sbe16plus_v2.driver import Parameter
from mi.instrument.seabird.sbe16plus_v2.driver import Command
from mi.instrument.seabird.sbe16plus_v2.driver import Prompt
from mi.instrument.seabird.driver import SBE_EPOCH
from mi.instrument.seabird.test.test_driver import SeaBirdUnitTest
from mi.instrument.seabird.test.test_driver import SeaBirdIntegrationTest
from mi.instrument.seabird.test.test_driver import SeaBirdQualificationTest
from mi.instrument.seabird.test.test_driver import SeaBirdPublicationTest
from mi.core.instrument.instrument_driver import DriverConnectionState
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.instrument.instrument_driver import ResourceAgentState
class SeaBird16plusMixin(DriverTestMixin):
InstrumentDriver = SBE16InstrumentDriver
'''
Mixin class used for storing data particle constants and common data assertion methods.
'''
# Create some short names for the parameter test config
TYPE = ParameterTestConfigKey.TYPE
READONLY = ParameterTestConfigKey.READONLY
STARTUP = ParameterTestConfigKey.STARTUP
DA = ParameterTestConfigKey.DIRECT_ACCESS
VALUE = ParameterTestConfigKey.VALUE
REQUIRED = ParameterTestConfigKey.REQUIRED
DEFAULT = ParameterTestConfigKey.DEFAULT
STATES = ParameterTestConfigKey.STATES
###
# Instrument output (driver input) Definitions
###
VALID_SAMPLE = "#0409DB0A738C81747A84AC0006000A2E541E18BE6ED9" + NEWLINE
| VALID_SAMPLE2 = "0409DB0A738C81747A84AC0006000A2E541E18BE6ED9" + NEWLINE
VALID_DS_RESPONSE = 'SBE 16plus V 2.5 SERIAL NO. 6841 28 Feb 2013 16:39:31' + NEWLINE + \
'vbatt = 23.4, vlith = 8.0, ioper = 61.4 ma, ipump = 0.3 ma,' + NEWLINE + \
'status = not logging' + NEWLINE + \
'samples = 0, free = 4386542' + NEWLINE + \
'sample inter | val = 10 seconds, number of measurements per sample = 4' + NEWLINE + \
'pump = run pump during sample, delay before sampling = 0.0 seconds, delay after sampling = 0.0 seconds' + NEWLINE + \
'transmit real-time = yes' + NEWLINE + \
'battery cutoff = 7.5 volts' + NEWLINE + \
'pressure sensor = strain gauge, range = 160.0' + NEWLINE + \
'SBE 38 = no, SBE 50 = no, WETLABS = no, OPTODE = no, SBE63 = no, Gas Tension Device = no' + NEWLINE + \
'Ext Volt 0 = yes, Ext Volt 1 = yes' + NEWLINE + \
'Ext Volt 2 = yes, Ext Volt 3 = yes' + NEWLINE + \
'Ext Volt 4 = yes, Ext Volt 5 = yes' + NEWLINE + \
'echo characters = yes' + NEWLINE + \
'output format = raw HEX' + NEWLINE + \
'serial sync mode disabled' + NEWLINE
VALID_DCAL_QUARTZ = 'SBE 16plus V 2.5 SERIAL NO. 6841 28 Feb 2013 18:37:40' + NEWLINE + \
'temperature: 18-May-12' + NEWLINE + \
' TA0 = 1.561342e-03' + NEWLINE + \
' TA1 = 2.561486e-04' + NEWLINE + \
' TA2 = 1.896537e-07' + NEWLINE + \
' TA3 = 1.301189e-07' + NEWLINE + \
' TOFFSET = 0.000000e+00' + NEWLINE + \
'conductivity: 18-May-11' + NEWLINE + \
' G = -9.896568e-01' + NEWLINE + \
' H = 1.316599e-01' + NEWLINE + \
' I = -2.213854e-04' + NEWLINE + \
' J = 3.292199e-05' + NEWLINE + \
' CPCOR = -9.570000e-08' + NEWLINE + \
' CTCOR = 3.250000e-06' + NEWLINE + \
' CSLOPE = 1.000000e+00' + NEWLINE + \
'pressure S/N = 125270, range = 1000 psia: 02-nov-12' + NEWLINE + \
' PC1 = -4.642673e+03' + NEWLINE + \
' PC2 = -4.611640e-03' + NEWLINE + \
' PC3 = 8.921190e-04' + NEWLINE + \
' PD1 = 7.024800e-02' + NEWLINE + \
' PD2 = 0.000000e+00' + NEWLINE + \
' PT1 = 3.022595e+01' + NEWLINE + \
' PT2 = -1.549720e-04' + NEWLINE + \
' PT3 = 2.677750e-06' + NEWLINE + \
' PT4 = 1.705490e-09' + NEWLINE + \
' PSLOPE = 1.000000e+00' + NEWLINE + \
' POFFSET = 0.000000e+00' + NEWLINE + \
'volt 0: offset = -4.650526e-02, slope = 1.246381e+00' + NEWLINE + \
'volt 1: offset = -4.618105e-02, slope = 1.247197e+00' + NEWLINE + \
'volt 2: offset = -4.659790e-02, slope = 1.247601e+00' + NEWLINE + \
'volt 3: offset = -4.502421e-02, slope = 1.246911e+00' + NEWLINE + \
'volt 4: offset = -4.589158e-02, slope = 1.246346e+00' + NEWLINE + \
'volt 5: offset = -4.609895e-02, slope = 1.247868e+00' + NEWLINE + \
' EXTFREQSF = 9.999949e-01' + NEWLINE
VALID_DCAL_STRAIN ='SBE 16plus V 2.5 SERIAL NO. 6841 28 Feb 2013 18:37:40' + NEWLINE + \
'temperature: 18-May-12' + NEWLINE + \
' TA0 = 1.561342e-03' + NEWLINE + \
' TA1 = 2.561486e-04' + NEWLINE + \
' TA2 = 1.896537e-07' + NEWLINE + \
' TA3 = 1.301189e-07' + NEWLINE + \
' TOFFSET = 0.000000e+00' + NEWLINE + \
'conductivity: 18-May-11' + NEWLINE + \
' G = -9.896568e-01' + NEWLINE + \
' H = 1.316599e-01' + NEWLINE + \
' I = -2.213854e-04' + NEWLINE + \
' J = 3.292199e-05' + NEWLINE + \
' CPCOR = -9.570000e-08' + NEWLINE + \
' CTCOR = 3.250000e-06' + NEWLINE + \
' CSLOPE = 1.000000e+00' + NEWLINE + \
'pressure S/N = 3230195, range = 160 psia: 11-May-11' + NEWLINE + \
' PA0 = 4.960417e-02' + NEWLINE + \
' PA1 = 4.883682e-04' + NEWLINE + \
' PA2 = -5.687309e-12' + NEWLINE + \
' PTCA0 = 5.249802e+05' + NEWLINE + \
' PTCA1 = 7.595719e+00' + NEWLINE + \
' PTCA2 = -1.322776e-01' + NEWLINE + \
' PTCB0 = 2.503125e+01' + NEWLINE + \
' PTCB1 = 5.000000e-05' + |
SureshMatsui/SaveCoin | contrib/linearize/linearize.py | Python | mit | 3,354 | 0.034287 | #!/usr/bin/python
#
# linearize.py: Construct a linear, no-fork, best version of the blockchain.
#
#
# Copyright (c) 2013 The SaveCoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import json
import struct
import re
import base64
import httplib
import sys
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
class SaveCoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read() |
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblock(self, hash, verbose=True):
return self.rpc('getblock', [hash | , verbose])
def getblockhash(self, index):
return self.rpc('getblockhash', [index])
def getblock(rpc, settings, n):
hash = rpc.getblockhash(n)
hexdata = rpc.getblock(hash, False)
data = hexdata.decode('hex')
return data
def get_blocks(settings):
rpc = SaveCoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword'])
outf = open(settings['output'], 'ab')
for height in xrange(settings['min_height'], settings['max_height']+1):
data = getblock(rpc, settings, height)
outhdr = settings['netmagic']
outhdr += struct.pack("<i", len(data))
outf.write(outhdr)
outf.write(data)
if (height % 1000) == 0:
sys.stdout.write("Wrote block " + str(height) + "\n")
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: linearize.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'netmagic' not in settings:
settings['netmagic'] = 'f9beb4d9'
if 'output' not in settings:
settings['output'] = 'bootstrap.dat'
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'min_height' not in settings:
settings['min_height'] = 0
if 'max_height' not in settings:
settings['max_height'] = 279000
if 'rpcuser' not in settings or 'rpcpassword' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['netmagic'] = settings['netmagic'].decode('hex')
settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height'])
get_blocks(settings)
|
gewaltig/cython-neuron | topology/examples/grid_iaf_irr.py | Python | gpl-2.0 | 1,646 | 0.001823 | #! /usr/bin/env python
#
# grid_iaf_irr.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
'''
NEST Topology Module Example
Create layer of 12 freely placed iaf_neurons, visualize
BCCN Tutorial @ CNS*09
Hans Ekkehard Plesser, UMB
'''
import nest
import pylab
import random
import nest.topology as topo
pylab.ion()
nest.ResetKernel()
# generate list of 12 (x,y) pairs
pos = [[random.uniform(-0.75,0.75), random.uniform(-0.5,0.5)]
for j in range(12)]
l1 = topo.CreateLayer({'extent': [2., 1.5],
'positions': pos,
'elements': 'iaf_neuron'})
nest.PrintNetwork()
nest.PrintNetwork(2)
nest.PrintNetwork(2, l1)
topo.PlotLayer(l1, nodesize=50)
# beautify
pylab.axis([-1.0, 1.0, -0.75, 0.75])
pylab.axes().set_aspect('eq | ual', 'box')
pylab.axes().set_xticks((-0.75, -0.25, 0.25, 0.75))
pylab.axes().set_yticks((-0.5, 0, 0.5))
pylab.grid(True)
pylab.xlabel('4 Columns, Extent: 1.5')
pylab.ylabel('2 R | ows, Extent: 1.0')
# pylab.savefig('grid_iaf_irr.png')
|
maciejkula/glove-python | glove/corpus.py | Python | apache-2.0 | 2,667 | 0.000375 | # Cooccurrence matrix construction tools
# for fitting the GloVe model.
import numpy as np
try:
# Python 2 compat
import cPickle as pickle
except ImportError:
import pickle
from .corpus_cython import construct_cooccurrence_matrix
class Corpus(object):
"""
Class for constructing a cooccurrence matrix
from a corpus.
A dictionry mapping words to ids can optionally
be supplied. If left None, it will be constructed
from the corpus.
"""
def __init__(self, dictionary=None):
self.dictionary = {}
self.dictionary_supplied = False
self.matrix = None
if dictionary is not None:
self._check_dict(dictionary)
self.dictionary = dictionary
self.dictionary_supplied = True
def _check_dict(self, dictionary):
if (np.max(list(dictionary.values())) != (len(dictionary) - 1)):
raise Exception('The largest id in the dictionary '
'should be equal to its length minus one.')
if np.min(list(dictionary.values())) != 0:
raise Exception('Dictionary ids should start at zero')
def fit(self, corpus, window=10, ignore_missing=False):
"""
Perform a pass through the corpus to construct
the cooccurrence matrix.
Parameters:
- iterable of lists of strings corpus
- int window: the length of the (symmetric)
context window used for cooccurrence.
- bool ignore_missing: whether to ignore words missing from
the dictionar | y (if it was supplied).
| Context window distances will be preserved
even if out-of-vocabulary words are
ignored.
If False, a KeyError is raised.
"""
self.matrix = construct_cooccurrence_matrix(corpus,
self.dictionary,
int(self.dictionary_supplied),
int(window),
int(ignore_missing))
def save(self, filename):
with open(filename, 'wb') as savefile:
pickle.dump((self.dictionary, self.matrix),
savefile,
protocol=pickle.HIGHEST_PROTOCOL)
@classmethod
def load(cls, filename):
instance = cls()
with open(filename, 'rb') as savefile:
instance.dictionary, instance.matrix = pickle.load(savefile)
return instance
|
escapewindow/python-scriptharness | scriptharness/structures.py | Python | mpl-2.0 | 25,153 | 0.000398 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Data structures for configs.
There are two config dict models here:
* LoggingDict logs any changes to the dict or its children. When debugging,
config changes will be marked in the log. This is the default model.
* ReadOnlyDict recursively locks the dictionary. This is to aid in debugging;
one can assume the config hasn't changed from the moment of locking.
This is the original `mozharness` model.
Attributes:
DEFAULT_LEVEL (int): the default logging level to set
DEFAULT_LOGGER_NAME (str): the default logger name to use
QUOTES (Tuple[str, ...]): the order of quotes to use for key logging
LOGGING_STRINGS (Dict[str, Dict[str, str]]): a dict of strings to use for
logging, for easier unittesting and potentially for future localization.
MUTED_LOGGING_STRINGS (Dict[str, Dict[str, str]): a dict of strings to use for logging when
the values in the list/dict shouldn't be logged
SUPPORTED_LOGGING_TYPES (Dict[TypeVar, Class]): a non-logging to logging class map, e.g.
dict: LoggingDict. Not currently supporting sets or collections.
"""
from __future__ import absolute_import, division, print_function, \
unicode_literals
from copy import deepcopy
from scriptharness.exceptions import ScriptHarnessException
import six
import logging
import pprint
# Constants {{{1
DEFAULT_LEVEL = logging.INFO
DEFAULT_LOGGER_NAME = 'scriptharness.data_structures'
QUOTES = ("'", '"', "'''", '"""')
LOGGING_STRINGS = {
# position, self, item
"list": {
"delitem": "__delitem__ %(item)s",
"log_self": "now looks like %(self)s",
"setitem": "__setitem__ %(position)d to %(item)s",
"append": "appending %(item)s",
"extend": "extending with %(item)s",
"insert": "inserting %(item)s at position %(position)s",
"remove": "removing %(item)s",
"pop_no_args": "popping",
"pop_args": "popping position %(position)s",
"sort": "sorting",
"reverse": "reversing",
},
# key, value, default
"dict": {
"delitem": "__delitem__ %(key)s",
"setitem": "__setitem__ %(key)s to %(value)s",
"clear": "clearing dict",
"pop": {
"message_no_default": "popping dict key %(key)s",
"message_default": "popping dict key %(key)s (default %(default)s)",
},
"popitem": {
"message": "popitem",
"changed": "the popitem removed the key %(key)s",
},
"setdefault": {
"message": "setdefault %(key)s to %(default)s",
"unchanged": "setdefault: %(key)s unchanged",
"changed": "setdefault: %(key)s now %(value)s",
},
"update": {
"message": "update %(key)s to %(value)s",
"changed": "update: %(key)s now %(value)s",
"unchanged": "update: %(key)s unchanged",
},
},
}
MUTED_LOGGING_STRINGS = {
# position, self, item
"list": {
"delitem": "__delitem__ %(item)s",
"setitem": "__setitem__ %(position)d ...",
"append": "appending ...",
"extend": "extending ...",
"insert": "inserting at position %(position)s",
"remove": "removing ...",
"pop_no_args": "popping",
"pop_args": "popping position %(position)s",
"sort": "sorting",
"reverse": "reversing",
},
# key, value, default
"dict": {
"delitem": "__delitem__ %(key)s",
"setitem": "__setitem__ %(key)s ...",
"clear": "clearing dict",
"pop": {
"message_no_default": "popping dict key %(key)s",
"message_default": "popping dict key %(key)s with default ...",
},
"popitem": {
"message": "popitem",
"changed": "the popitem removed the key %(key)s",
},
"setdefault": {
"message": "setdefault %(key)s ...",
"unchanged": "setdefault: %(key)s unchanged",
"changed": "setdefault: %(key)s changed",
},
"update": {
"message": "update %(key)s ...",
"changed": "update: %(key)s changed",
"unchanged": "update: %(key)s unchanged",
},
},
}
def iterate_pairs(data):
"""Iterate over pairs of a data structure.
Usage:: for key, value in iterate_pairs(data_structure)::
Args:
data (Sequence[Any, Any]): a dict, iterable-of-iterable pairs
"""
if isinstance(data, dict):
if six.PY2:
iterable = data.iteritems()
else:
iterable = data.items()
else:
iterable = data
if len(data) >= 2 and not isinstance(data[0], (tuple, list)):
iterable = zip(data[::2], data[1::2])
return iterable
# LoggingClasses and helpers {{{1
# LoggingClass {{{2
class LoggingClass(object):
"""General logging methods for the Logging* classes to subclass.
Attributes:
level (int): the logging level for changes
logger_name (str): the logger name to use
name (str): the name of the class for logs
parent (str): the name of the parent, if applicable, for logs
"""
name = None
parent = None
level = None
logger_name = None
def items(self):
"""Return dict.items() for dicts, and enumerate(self) for lists+tuples.
This both simplifies recursively_set_parent() and silences pylint
complaining that LoggingClass doesn't have an items() method.
The main negative here might be adding an attr items to non-dict
data types.
"""
if issubclass(self.__class__, dict):
return super(LoggingClass, self).items()
else:
return enumerate(self)
def recursively_set_parent(self, name=None, parent=None):
"""Recursively set name + parent.
If our LoggingDict is a multi-level nested Logging* instance, then
seeing a log message that something in one of the Logging* instances
has changed can be confusing. If we know that it's
grandparent[parent][self][child] that has changed, then the log
message is helpful.
For each child, set name automatically. For dicts, the name is the
key. For everything else, the name is the index.
Args:
name (Optional[str]): set self.name, for later logging purposes.
Defaults to None.
parent (Optional[Logging*]): set self.parent, for logging purposes.
Defaults to None.
"""
if name is not None:
self.name = name
if parent is not None:
self.parent = parent
for child_name, child in self.items():
if is_logging_class(child):
child.recursively_set_parent(
child_name, self
)
def _child_set_parent(self, child, child_name):
| """If child is a Logging* instance, set its parent and name.
Args:
child (Any): an object, which might be a Logging* instance
| child_name (str): the name to set in the child
"""
if is_logging_class(child):
child.recursively_set_parent(child_name, parent=self)
def ancestor_child_list(self, child_list=None):
"""Get the original ancestor of self, and the descending, linear list
of descendents' names leading up to (and including) self.
Args:
child_list (List[str]): in a multi-level nested
Logging* class, generate the list of children's names. This list
will be built by prepending our name and calling
ancestor_child_list() on self.parent.
Returns:
LoggingClass, List[str]: (ancestor, child_list) for self.full_name and
self.log_change support
"""
child_list = child_list or []
if self.parent:
child_list.insert(0, self.name)
return self.parent.ancestor_child_list(child_list=child_list)
else:
return self, child_list
def full_name(self):
"""Get the full name of self.
This will call self.ancestor_c |
ctSkennerton/BioSQL-Extensions | scripts/add_annotation_to_protein.py | Python | mit | 9,369 | 0.004483 | #!/usr/bin/env python
from __future__ import print_function
import sys
from BioSQL import BioSeqDatabase
from BioSQL import Loader
from common import standard_options, get_seqfeature_id_from_qv
import csv
class CustomDBLoader(Loader.DatabaseLoader):
"""This is a slightly modified version of Loader.DatabaseLoader
"""
def _load_seqfeature_qualifiers(self, qualifiers, seqfeature_id):
"""Insert the (key, value) pair qualifiers relating to a feature (PRIVATE).
Qualifiers should be a dictionary of the form:
{key : [value1, value2]}
Before insertion, each qualifier will | be checked to make sure that there
is no collision between current annotations, if there | is the rank of the
new features will be changed to prevent collisions
"""
tag_ontology_id = self._get_ontology_id('Annotation Tags')
for qualifier_key in qualifiers:
# Treat db_xref qualifiers differently to sequence annotation
# qualifiers by populating the seqfeature_dbxref and dbxref
# tables. Other qualifiers go into the seqfeature_qualifier_value
# and (if new) term tables.
if qualifier_key != 'db_xref':
qualifier_key_id = self._get_term_id(qualifier_key,
ontology_id=tag_ontology_id)
# now add all of the values to their table
entries = qualifiers[qualifier_key]
if not isinstance(entries, list):
# Could be a plain string, or an int or a float.
# However, we exect a list of strings here.
entries = [entries]
# check for any rows for this term_id in this seqfeature_id
# and return the max rank
sql = "SELECT max(rank) FROM seqfeature_qualifier_value" \
" WHERE term_id = %s AND seqfeature_id = %s"
qual_rank_start = self.adaptor.execute_and_fetchall(sql, \
(qualifier_key_id, seqfeature_id))[0][0]
if not qual_rank_start:
# if there are no rows
qual_rank_start = 1
for i, qualifier_value in enumerate(entries):
# -1 cause enumerate begins with 1 rather than 0
qual_value_rank = i + qual_rank_start - 1
#print("qual_rank_start", qual_rank_start, i, qual_rank_start)
qualifier_value = entries[qual_value_rank]
if qualifier_value != "":
sql = r"INSERT INTO seqfeature_qualifier_value "\
r" (seqfeature_id, term_id, rank, value) VALUES"\
r" (%s, %s, %s, %s)"
self.adaptor.execute(sql, (seqfeature_id,
qualifier_key_id,
qual_value_rank,
qualifier_value))
else:
# The dbxref_id qualifier/value sets go into the dbxref table
# as dbname, accession, version tuples, with dbxref.dbxref_id
# being automatically assigned, and into the seqfeature_dbxref
# table as seqfeature_id, dbxref_id, and rank tuples
self._load_seqfeature_dbxref(qualifiers[qualifier_key],
seqfeature_id)
def parse_input(infile, key):
mapping = {}
with open(infile) as fp:
reader = csv.DictReader(fp, delimiter="\t")
for row in reader:
mapping[(key, row[key])] = {}
for k, v in row.items():
if k != key:
try:
mapping[(key, row[key])][k].append(v)
except KeyError:
mapping[(key, row[key])][k] = [v]
return mapping
def parse_gff(infile):
from BCBio import GFF
mapping = {}
with open(infile) as fp:
for rec in GFF.parse(fp):
for feature in rec.features:
try:
protein_id = feature.qualifiers['ID'][0]
mapping[('ID', protein_id)] = feature.qualifiers
except KeyError:
try:
protein_id = feature.qualifiers['locus_tag'][0]
mapping[('locus_tag', protein_id)] = feature.qualifiers
except KeyError:
pass
return mapping
def add_annotation(db, mapping, isSeqfeatureAlready=False, replace=False):
# this may be a little tricky depending on how the database is set up
# since a bioentry is equivelent to a genbank file but genbank files could
# be created from a whole chromosome or from an individual protein.
# If it is from a single protein then the protein ID will be the bioentry_id
# but if it is from a whole genome then it will be a seqfeature_id
db_loader = CustomDBLoader(db.adaptor, db.dbid, False)
for (term_name, protein), values in mapping.items():
# Start by looking for bioentries that have the name
if not isSeqfeatureAlready:
seqfeature_id = get_seqfeature_id_from_qv(db, term_name, protein)
else:
seqfeature_id = int(protein)
#print("loading ",kegg_id," into ",protein,"(",seqfeature_id,")")
# now add in our qualifier and value onto that seqfeature
if replace:
for qualifier in values.keys():
if qualifier == 'db_xref':
print('Cannot remove any current db_xref, you must do this manually for seqfeature {}'.format(seqfeature_id), file=sys.stderr)
else:
#print("removing current annotations for <<{}>> tag in seqfeature {}".format(qualifier, seqfeature_id), file=sys.stderr)
db.adaptor.execute("delete from seqfeature_qualifier_value where term_id = \
(select term_id from term where ontology_id = \
(select ontology_id from ontology where name = 'Annotation Tags')\
and name = %s)\
and seqfeature_id = %s", (qualifier, seqfeature_id))
try:
db_loader._load_seqfeature_qualifiers(values, seqfeature_id)
except:
print("Fatal Error: failed to load {} with values {}".format(protein, values), file=sys.stderr)
print("Check the input file for possible errors", file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
from getpass import getpass
parser = standard_options()
parser.add_argument('-D', '--database-name', default=None,
dest='dbname', help='name of the sub-database')
# only one of the input formats can be given, but at least one must be given
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-i', '--input', help='provide text file, tab delimited, where the first column is the name of the sequence feature and the following columns are the values of the annotation that you want to add. The first line must be a header line, which will be used as the name of the qualifier for the seqfeature.')
group.add_argument('-g', '--gff', help='provide a gff3 formatted file whose attributes will be added to existing sequence features. Only the information in the last column of the gff file will be utilized so you must make sure that either the ID or locus_tag qualifiers are present in the gff file. If both are present then ID will be preferred over locus_tag. If neither are present then the record will be skipped. Make sure that the ID or locus_tag are unique (and present) in the database otherwise the attributes will not be loaded.')
parser.add_argum |
richardcornish/django-itunespodcast | podcast/tests/__init__.py | Python | bsd-3-clause | 5,519 | 0.003443 | from __future__ import unicode_literals
import os
import datetime
from django.test import TestCase, Client, override_settings
from django.utils import timezone
from ..models import Show, Episode, Enclosure
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
@override_settings(PODCAST_SINGULAR=False)
class PodcastTestCase(TestCase):
fixtures = [
'podcast_category.json',
]
def setUp(self):
super(PodcastTestCase, self).setUp()
self.client = Client()
# show
show = Show.objects.create(
title='All About Everything',
slug='everything',
description='All About Everything is a show about everything. Each week we dive into any subject known to man and talk about it as much as we can. Look for our podcast in the Podcasts app or in the iTunes Store',
managing_editor='john.doe@example.com',
webmaster='',
ttl=60,
subtitle='A show about everything',
summary='',
author_name='John Doe',
author_email='',
owner_name='John Doe',
owner_email='john.doe@example.com',
copyright='John Doe & Family',
image='podcast/tests/static/everything/AllAboutEverything.jpg',
explicit=False,
block=False,
complete=False,
)
show.categories.add(1, 4, 62, 63, 67)
# episode 1
episode_1 = Episode.objects.create(
show=show,
title='Shake Shake Shake Your Spices',
slug='shake-shake-shake-your-spices',
description='This week we talk about <a href="https://itunes/apple.com/us/book/antique-trader-salt-pepper/id429691295?mt=11">salt and pepper shakers</a>, comparing and contrasting pour rates, construction materials, and overall aesthetics. Come and join the party!',
pub_date=timezone.make_aware(datetime.datetime.strptime('2016-03-08T12:00:00', '%Y-%m-%dT%H:%M:%S')),
summary='A short primer on table spices',
image='podcast/tests/static/everything/AllAboutEverything/Episode1.jpg',
explicit=False,
block=False,
)
# episode 2
episode_2 = Episode.objects.create(
show=show,
title='Socket Wrench Shootout',
slug='socket-wrench-shootout',
description='This week we talk about metric vs. Old English socket wrenches. Which one is better? Do you really need both? Get all of your answers here.',
pub_date=timezone.make_aware(datetime.datetime.strptime('2016-03-09T18:00:00', '%Y-%m-%dT%H:%M:%S')),
summary='Comparing socket wrenches is fun!',
author_name='Jane Doe',
image='podcast/tests/static/everything/AllAboutEverything/Episode2.jpg',
explicit=False,
block=False,
)
# episode 3
episode_3 = Episode.objects.create(
show=show,
title='The Best Chili',
slug='best-chili',
description='This week we talk about the best Chili in the world. Which chili is better?',
pub_date=timezone.make_awa | re(datetime.datetime.strptime('2016-03-10T09:00:00', '%Y-%m-%dT%H:%M:%S')),
summary='Jane and Eric',
author_name='Jane Doe',
image='podcast/tests/static/everything/AllAboutEverything/Episode3.jpg',
explicit=False,
block=False,
)
# episode 4
episode_4 = Episode. | objects.create(
show=show,
title='Red,Whine, & Blue',
slug='red-whine-blue',
description='This week we talk about surviving in a Red state if you are a Blue person. Or vice versa.',
pub_date=timezone.make_aware(datetime.datetime.strptime('2016-03-10T22:15:00', '%Y-%m-%dT%H:%M:%S')),
summary='Red + Blue != Purple',
author_name='Various',
image='podcast/tests/static/everything/AllAboutEverything/Episode4.jpg',
explicit=False,
block=False,
)
# enclosure 1
Enclosure.objects.create(
episode=episode_1,
file='podcast/tests/static/everything/AllAboutEverythingEpisode3.m4a',
type='audio/x-m4a',
cc=False,
)
# enclosure 2
Enclosure.objects.create(
episode=episode_2,
file='podcast/tests/static/everything/AllAboutEverythingEpisode2.mp4',
type='video/mp4',
cc=False,
)
# enclosure 3
Enclosure.objects.create(
episode=episode_3,
file='podcast/tests/static/everything/AllAboutEverythingEpisode2.m4v',
type='video/x-m4v',
cc=True,
)
# enclosure 4
Enclosure.objects.create(
episode=episode_4,
file='podcast/tests/static/everything/AllAboutEverythingEpisode4.mp3',
type='audio/mpeg',
cc=False,
)
def test_show_feed(self):
response = self.client.get(reverse('podcast:show_feed'))
with open(os.path.join(os.path.dirname(__file__), 'feed.xml'), 'r') as file_1:
xml_1 = file_1.read()
xml_2 = response.content.decode('utf-8').replace('http://testserverpodcast', 'http://testserver/podcast')
self.maxDiff = None
self.assertXMLEqual(xml_1, xml_2)
|
shankari/e-mission-server | emission/core/wrapper/section.py | Python | bsd-3-clause | 2,128 | 0.012688 | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import logging
import emission.core.wrapper.wrapperbase as ecwb
import emission.core.wrapper.motionactivity as ecwm
# TODO: Figure out whether we should parse the geojson back to a geojson object
class Section(ecwb.WrapperBase):
props = {"trip_id": ecwb.WrapperBase.Access.WORM, # *******the trip that this is part of
"start_ts": ecwb.WrapperBase.Access.WORM, # ****start UTC timestamp (in secs)
"start_local_dt": ecwb.WrapperBase.Access.WORM, # ********searchable datatime in local time of start location
"start_fmt_time": ecwb.WrapperBase.Access.WORM, # ******start formatted time (in timezone of point)
"end_ts": ecwb.WrapperBase.Access.WORM, # ******end UTC timestamp (in secs)
"end_local_dt": ecwb.WrapperBase.Access.WORM, | #***** searchable datetime in local time of end location
"end_fmt_time": ecwb.WrapperBase.Access.WORM, # ******end formatted time (in timezone of point)
"start_stop": ecwb.WrapperBase.Access.WORM, # _id of place object before this one
"end_stop": ecwb.WrapperBase.Access.WORM, # _id of place object after this one
"start_loc": ecwb.WrapperBase.Access.WORM, # location of start point in geojson format
| "end_loc": ecwb.WrapperBase.Access.WORM, # location of end point in geojson format
"duration": ecwb.WrapperBase.Access.WORM, # duration of the trip in secs
"sensed_mode": ecwb.WrapperBase.Access.WORM, # the sensed mode used for the segmentation
"source": ecwb.WrapperBase.Access.WORM} # the method used to generate this trip
enums = {"sensed_mode": ecwm.MotionTypes}
geojson = ["start_loc", "end_loc"]
nullable = ["start_stop", "end_stop"]
local_dates = ['start_local_dt', 'end_local_dt']
def _populateDependencies(self):
pass
|
postla/e2-gui | lib/python/Components/opkg.py | Python | gpl-2.0 | 1,535 | 0.033225 | import os
def enumFeeds():
for fn in os.listdir('/etc/opkg'):
if fn.endswith('-feed.conf'):
try:
for feed in open(os.path.join('/etc/opkg', fn)):
yield feed.split()[1]
except IndexError:
pass
except IOError:
pass
def enumPlugins(filter_start=''):
for feed in enumFeeds():
package = None
try:
for line in open('/var/lib/opkg/lists/%s' % feed, 'r'):
if line.startswith('Package:'):
package = line.split(":",1)[1].strip()
version = ''
description = ''
if package.startswith(filter_start) and not package.endswith('-dev') and not package.endswith('-staticdev') and not package.endswith('-dbg') and not package.endswith('-doc') and not package.endswith('-src') and not package.endswith('-po'):
continue
package = None
if package is None:
continue
| if line.startswith('Version:'):
version = line.split(":",1)[1].strip()
elif line.startswith('Description:'):
description = line.split(":",1)[1].strip()
elif description and li | ne.startswith(' '):
description += line[:-1]
elif len(line) <= 1:
d = description.split(' ',3)
if len(d) > 3:
# Get rid of annoying "version" and package repeating strings
if d[1] == 'version':
description = d[3]
if description.startswith('gitAUTOINC'):
description = description.split(' ',1)[1]
yield package, version, description.strip()
package = None
except IOError:
pass
if __name__ == '__main__':
for p in enumPlugins('enigma'):
print p
|
yank-team/yank-server | auth/models.py | Python | apache-2.0 | 248 | 0.012097 | from django.db | import models
class YankUser(models.Model):
username = models.CharField(max_length=98, unique=True)
password_digest = models.CharField(max_l | ength=128)
api_key = models.CharField(max_length=128, null=True)
|
sergeimoiseev/othodi_code | old/mpl_msk_spb.py | Python | mit | 1,018 | 0.034381 | # -*- coding: utf-8 -*-
# http://matplotlib.org/basemap/users/examples.html
from mpl_toolkits.basemap import Basemap
import numpy as np
import matplotlib.pyplot as plt
# create new figure, axes instances.
fig=plt.figure()
ax=fig.add_axes([0.1,0.1,0.8,0.8])
# setup mercator map projection.
m = Basemap(llcrnrlon=0.,llcrnrlat=20.,urcrnrlon=80.,urcrnrlat=70.,\
rsphere=(6378137.00,6356752.3142),\
resolution='l',projection='merc',\
lat_0=55.,lon_0=37.,lat_ts=20.)
# nylat, nylon are lat/lon of New York
nylat = 55.7522200; nylon = 37.6155600
# lonlat, lonlon are lat/lon of Lo | ndon.
lonlat = 59.9386300; lonlon = 30.3141300
# draw great circle route between NY and London
m.drawgreatcircle(nylon,nylat,lonlon,lonlat,linewidth=2,color='b')
m.drawcoastlines()
m.fillcontinents()
# draw parallels
m.drawparallels(np.arange(-20 | ,0,20),labels=[1,1,0,1])
# draw meridians
m.drawmeridians(np.arange(-180,180,30),labels=[1,1,0,1])
ax.set_title('Great Circle from New York to London')
plt.show() |
izhan/Stream-Framework | stream_framework/serializers/aggregated_activity_serializer.py | Python | bsd-3-clause | 4,616 | 0.00065 | from stream_framework.activity import Activity
from stream_framework.exceptions import SerializationException
from stream_framework.serializers.activity_serializer import ActivitySerializer
from stream_framework.serializers.utils import check_reserved
from stream_framework.utils import epoch_to_datetime, datetime_to_epoch
from stream_framework.serializers.base import BaseAggregatedSerializer
import six
import re
class AggregatedActivitySerializer(BaseAggregatedSerializer):
'''
Optimized version of the Activity serializer for AggregatedActivities
v3group;;created_at;;updated_at;;seen_at;;read_at;;aggregated_activities
Main advantage is that it prevents you from increasing the storage of
a notification without realizing you are adding the extra data
Depending on dehydrate it will either dump dehydrated aggregated activities
or store the full aggregated activity
'''
#: indicates if dumps returns dehydrated aggregated activities
dehydrate = True
identifier = 'v3'
date_fields = ['created_at', 'updated_at', 'seen_at', 'read_at']
activity_serializer_class = ActivitySerializer
def dumps(self, aggregated):
self.check_type(aggregated)
activity_serializer = self.activity_serializer_class(Activity)
# start by storing the group
parts = [aggregated.group]
check_reserved(aggregated.group, [';;'])
# store the dates
for date_field in self.date_fields:
value = getattr(aggregated, date_field)
if value is not None:
# keep the milliseconds
epoch = '%.6f' % datetime_to_epoch(value)
else:
epoch = -1
parts += [epoch]
# add the activities serialization
serialize | d_activities = []
if self.dehydrate:
if no | t aggregated.dehydrated:
aggregated = aggregated.get_dehydrated()
serialized_activities = map(str, aggregated._activity_ids)
else:
for activity in aggregated.activities:
serialized = activity_serializer.dumps(activity)
# we use semicolons as delimiter, so need to escape
serialized = serialized.replace(";", "\;")
serialized_activities.append(serialized)
serialized_activities_part = ';'.join(serialized_activities)
parts.append(serialized_activities_part)
# add the minified activities
parts.append(aggregated.minimized_activities)
# stick everything together
serialized_aggregated = ';;'.join(map(str, parts))
serialized = '%s%s' % (self.identifier, serialized_aggregated)
return serialized
def loads(self, serialized_aggregated):
activity_serializer = self.activity_serializer_class(Activity)
try:
serialized_aggregated = serialized_aggregated[2:]
parts = serialized_aggregated.split(';;')
# start with the group
group = parts[0]
aggregated = self.aggregated_activity_class(group)
# get the date and activities
date_dict = dict(zip(self.date_fields, parts[1:5]))
for k, v in date_dict.items():
date_value = None
if v != '-1':
date_value = epoch_to_datetime(float(v))
setattr(aggregated, k, date_value)
# looks for ; not \;
unescaped_semicolons_regex = re.compile("(?<=[^\\\]);")
# write the activities
serializations = unescaped_semicolons_regex.split(parts[5])
if self.dehydrate:
activity_ids = list(map(int, serializations))
aggregated._activity_ids = activity_ids
aggregated.dehydrated = True
else:
activities = []
for s in serializations:
s = s.replace("\;", ";")
deserialized = activity_serializer.loads(s)
activities.append(deserialized)
aggregated.activities = activities
aggregated.dehydrated = False
# write the minimized activities
minimized = int(parts[6])
aggregated.minimized_activities = minimized
return aggregated
except Exception as e:
msg = six.text_type(e)
raise SerializationException(msg)
class NotificationSerializer(AggregatedActivitySerializer):
#: indicates if dumps returns dehydrated aggregated activities
dehydrate = False
|
tuanquanghpvn/rest_exam | apps/category/mappers.py | Python | mit | 2,523 | 0.000793 | from marshmallow import (Schema, fields, post_load, validates_schema)
from contracts.category import (GetCategoryRequest, GetCategoryResponse,
DetailCategoryRequest, DetailCategoryResponse,
PostCategoryRequest, PutCategoryRequest, DeleteCategoryRequest)
from contracts.category import (CATEGORY_ID, NAME, SLUG)
from apps.base.mappers import (PagingRequestSchema, PagingResponseSchema, _check_include_fields)
# Request
class CategorySchema(Schema):
id = fields.Int()
name = fields.Str()
slug = fields.Str()
class GetCategoryRequestSchema(PagingRequestSchema):
@post_load
def make_contract(self, data):
return GetCategoryRequest(**data)
class DetailCategoryRequestSchema(CategorySchema):
@post_load
def make_contract(self, data):
return DetailCategoryRequest(**data)
@validates_schema
def check_require_include_fields(self, data):
require_key = [CATEGORY_ID] |
include_key = [CATEGORY_ID]
_check_include_fields(data, require_key, include_key)
class PostCategoryRequestSchema(CategorySchema) | :
@post_load
def make_contract(self, data):
return PostCategoryRequest(**data)
@validates_schema
def check_require_include_fields(self, data):
require_key = [NAME, SLUG]
include_key = [NAME, SLUG]
_check_include_fields(data, require_key, include_key)
class PutCategoryRequestSchema(CategorySchema):
@post_load
def make_contract(self, data):
return PutCategoryRequest(**data)
@validates_schema
def check_require_include_fields(self, data):
require_key = [CATEGORY_ID]
include_key = [CATEGORY_ID, NAME, SLUG]
_check_include_fields(data, require_key, include_key)
class DeleteCategoryRequestSchema(CategorySchema):
@post_load
def make_contract(self, data):
return DeleteCategoryRequest(**data)
@validates_schema
def check_require_include_fields(self, data):
require_key = [CATEGORY_ID]
include_key = [CATEGORY_ID]
_check_include_fields(data, require_key, include_key)
# Response
class GetCategoryResponseSchema(PagingResponseSchema):
results = fields.Nested(CategorySchema, many=True)
class DetailCategoryResponseSchema(CategorySchema):
pass
class PostCategoryResponseSchema(CategorySchema):
pass
class PutCategoryResponseSchema(CategorySchema):
pass
class DeleteCategoryResponseSchema(CategorySchema):
pass
|
robin1885/algorithms-exercises-using-python | source-code-from-author-book/Listings-for-Second-Edition/listing_6_20.py | Python | mit | 488 | 0.006148 | def percDo | wn(self,i):
while (i * 2) <= self.currentSize:
mc = self.minChild(i)
if self.heapList[i] > self.heapList[mc]:
tmp = self.heapList[i]
self.heapList[i] = self.heapList[mc]
self.heapList[mc] = tmp
i = mc
def minChild(self,i):
if i * 2 + 1 > self.currentSize:
return i * 2
else:
if self.heapList[i*2] < self.heapList[i*2+1]:
return i * 2
else:
return i * 2 + | 1
|
diana-hep/femtocode | lang/femtocode/thirdparty/meta/asttools/visitors/copy_tree.py | Python | apache-2.0 | 1,255 | 0.003984 | '''
Created on Dec 12, 2011
@author: sean
'''
from . import Visitor
import ast
#FIXME: add tests
class CopyVisitor(Visitor):
'''
Copy only ast nodes and lists
'''
def visitDefault(self, node):
Node = type(node)
new_node = Node()
for _field in Node._fields:
if hasattr(node, _field):
field = getattr(node, _field)
if isinstance(field, (list, tuple)):
new_list = []
for item in field:
if isinstance(item, ast.AST):
new_item = self.visit(item)
else:
new_item = item
new_list.append(new_item)
|
setattr(new_node, _field, new_list)
elif isinstance(field, ast.AST):
setattr(new_node, _field, self.visit(field))
else:
setattr(new_node, _field, field)
for _at | tr in node._attributes:
if hasattr(node, _attr):
setattr(new_node, _attr, getattr(node, _attr))
return new_node
def copy_node(node):
return CopyVisitor().visit(node)
|
Geheimorganisation/sltv | sltv/effect/video_effect.py | Python | gpl-2.0 | 1,698 | 0.002357 | # -*- coding: utf-8 -*-
# Copyright (C) 2010 Holoscópio Tecnologia
# Author: Luciana Fujii Pontello <luciana@holoscopio.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gobject
import pygst
pygst.require("0.10")
import gst
from core import Effect
class VideoEffect(Effect):
def __init__(self, effect_name):
E | ffect.__init__(self)
self.convertion1 = gst.element_factory_make(
"ffmpegcolorspace", "effect_colorspace1"
)
self.add(self.convertion1)
self.convertion2 = gst.element_factory_make(
"ffmpegcolorspace", "effect_colorspace2"
)
self.add(self.convertion2)
self.effect_element | = gst.element_factory_make(effect_name, effect_name)
self.add(self.effect_element)
gst.element_link_many(
self.convertion1, self.effect_element, self.convertion2
)
self.sink_pad.set_target(self.convertion1.sink_pads().next())
self.src_pad.set_target(self.convertion2.src_pads().next())
|
mLewisLogic/foursquare | foursquare/tests/test_oauth.py | Python | mit | 444 | 0.004505 | #!/usr/bin/env python
# -*- | coding: UTF-8 -*-
# (c) 2020 Mike Lewis
import logging
log = logging.getLogger(__name__)
from . import BaseAuthenticationTestCase
import six
class OAuthEndpointTestCase(BaseAuthenticationTestCase):
def test_auth_url(self):
url = self.api.oauth.auth_url()
assert isinstance(url, six.string_types)
def test_get_token(se | lf):
# Honestly, not much we can do to test here
pass
|
bitmovin/bitmovin-python | bitmovin/resources/enums/h264_b_pyramid.py | Python | unlicense | 107 | 0 | import enum
class H264BPyramid(enum.Enum):
NONE = 'NONE'
STR | ICT = 'STRIC | T'
NORMAL = 'NORMAL'
|
asyncee/cookiecutter-django | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/taskapp/celery.py | Python | bsd-3-clause | 1,701 | 0.01117 | {% if cookiecutter.use_celery == 'y' %}
import os
from celery import Celery
from django. | apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the | default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('{{cookiecutter.project_slug}}')
class CeleryConfig(AppConfig):
name = '{{cookiecutter.project_slug}}.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
{% if cookiecutter.use_sentry_for_error_reporting == 'y' -%}
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
{%- endif %}
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request)) # pragma: no cover
{% else %}
# Use this as a starting point for your project with celery.
# If you are not using celery, you can remove this app
{% endif -%}
|
kitaro-tn/msgiver | msgiver/configure.py | Python | mit | 1,918 | 0.00365 | # -*- coding: utf-8 -*-
import os
import yaml
from six.moves import input
class Configure:
"""
msgiver configuration class
"""
CONF_FILE_PATH = os.path.join(os.environ.get("HOME"), ".msgiver.yml")
def __init__(self):
self.__conf = None
if os.path.exists(self.CONF_FILE_PATH):
with open(self.CONF_FILE_PATH) as config_file:
self.__conf = yaml.load(config_file.read())
def all(self):
return self.__conf
def slack(self):
if self.__conf is None:
return {'token': os.environ.get("SLACK_TOKEN"), 'channel': os.environ.get("MSGIVER_SLACK_DEFAULT_CHANNE | L"), | 'bot_icon': None}
else:
return self.__conf['slack']
def generate(self):
config_data = {}
config_data["slack"] = self.__input_slack()
with open(self.CONF_FILE_PATH, "w") as config_file:
yaml.dump(config_data, config_file, default_flow_style=False)
return config_data
def __input_slack(self):
slack_conf = self.slack()
while True:
token = input("Please type for Slack api token. [required] %s : " % slack_conf["token"])
if not token:
if slack_conf["token"]:
token = slack_conf["token"]
break
else:
continue
else:
break
default_chanel = input("Please type for default channel. [not required] %s : " % slack_conf["channel"])
if not default_chanel and slack_conf["channel"]:
default_chanel = slack_conf["channel"]
bot_icon = input("Please type for image url. [not required] %s : " % slack_conf["bot_icon"])
if not bot_icon and slack_conf["bot_icon"]:
bot_icon = slack_conf["bot_icon"]
return { "token": token, "channel": default_chanel, "bot_icon": bot_icon }
|
3D-e-Chem/rdkit-react | setup.py | Python | apache-2.0 | 639 | 0.001565 | from setuptools import setup, find_packages
exec(open('react/version.py').read())
setup(
name='reac | t',
description='Generate fragments of a molecule using smirks',
version=__version__,
packages=find_packages(),
url='https://github.com/3D-e-Chem/python-modified-tanimoto',
author='Stefan Verhoeven',
author_email='s.verhoeven@esciencecenter.nl',
install_requires=['nose', 'coverage', 'mock'],
entry_points={
'console_scripts': [
'react=react.script:main',
],
},
license='Apache',
classifiers=[
'License :: OSI Approved :: Apache | Software License'
]
) |
mahim97/zulip | zerver/tests/test_typing.py | Python | apache-2.0 | 11,021 | 0.002359 | # -*- coding: utf-8 -*-
import ujson
from typing import Any, Mapping, List
from zerver.lib.test_helpers import tornado_redirected_to_list, get_display_recipient
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.models import get_realm, get_user
class TypingNotificationOperatorTest(ZulipTestCase):
def test_missing_parameter(self) -> None:
"""
Sending typing notification without op parameter fails
"""
sender = self.example_email("hamlet")
recipient = self.example_email("othello")
result = self.client_post('/api/v1/typing', {'to': recipient},
**self.api_auth(sender))
self.assert_json_error(result, 'Missing \'op\' argument')
def test_invalid_parameter(self) -> None:
"""
Sending typing notification with invalid value for op parameter fails
"""
sender = self.example_email("hamlet")
recipient = self.example_email("othello")
result = self.client_post('/api/v1/typing', {'to': recipient, 'op': 'foo'},
**self.api_auth(sender))
self.assert_json_error(result, 'Invalid \'op\' value (should be start or stop)')
class TypingNotificationRecipientsTest(ZulipTestCase):
def test_missing_recipient(self) -> None:
"""
Sending typing notification without recipient fails
"""
sender = self.example_email("hamlet")
result = self.client_post('/api/v1/typing', {'op': 'start'},
**self.api_auth(sender))
self.assert_json_error(result, 'Missing parameter: \'to\' (recipient)')
def test_invalid_recipient(self) -> None:
"""
Sending typing notification to invalid recipient fails
"""
sender = self.example_email("hamlet")
invalid = 'invalid email'
result = self.client_post('/api/v1/typing', {'op': 'start', 'to': invalid},
**self.api_auth(sender))
self.assert_json_error(result, 'Invalid email \'' + invalid + '\'')
def test_single_recipient(self) -> None:
"""
Sending typing notification to a single recipient is successful
"""
sender = self.example_user('hamlet')
recipient = self.example_user('othello')
expected_recipients = set([sender, recipient])
expected_recipient_emails = set([user.email for user in expected_recipients])
expected_recipient_ids = set([user.id for user in expected_recipients])
events = [] # type: List[Mapping[str, Any]]
with tornado_redirected_to_list(events):
result = self.client_post('/api/v1/typing', {'to': recipient.email,
'op': 'start'},
**self.api_auth(sender.email))
self.assert_json_success(result)
self.assertEqual(len(events), 1)
event = events[0]['event']
event_recipient_emails = set(user['email'] for user in event['recipients'])
event_user_ids = set(events[0]['users'])
event_recipient_user_ids = set(user['user_id'] for user in event['recipients'])
self.assertEqual(expected_recipient_ids, event_recipient_user_ids)
self.assertEqual(expected_recipient_ids, event_user_ids)
self.assertEqual(event['sender']['email'], sender.email)
self.assertEqual(event_recipient_emails, expected_recipient_emails)
self.assertEqual(event['type'], 'typing')
self.assertEqual(event['op'], 'start')
def test_multiple_recipients(self) -> None:
"""
Sending typing notification to a single recipient is successful
"""
sender = self.example_user('hamlet')
recipient = [self.example_user('othello'), self.example_user('cordelia')]
expected_recipients = set(recipient) | set([sender])
expected_recipient_emails = set([user.email for user in expected_recipients])
expected_recipient_ids = set([user.id for user in expected_recipients])
events = [] # type: List[Mapping[str, Any]]
with tornado_redirected_to_list(events):
result = self.client_post('/api/v1/typing', {'to': ujson.dumps([user.email for user in recipient]),
'op': 'start'},
**self.api_auth(sender.email))
self.assert_json_success(result)
self.assertEqual(len(events), 1)
event = events[0]['event']
event_recipient_emails = set(user['email'] for user in event['recipients'])
event_user_ids = set(events[0]['users'])
event_recipient_user_ids = set(user['user_id'] for user in event['recipients'])
self.assertEqual(expected_recipient_ids, event_recipient_user_ids)
self.assertEqual(expected_recipient_ids, event_user_ids)
self.assertEqual(event['sender']['email'], sender.email)
self.assertEqual(event_recipient_emails, expected_recipient_emails)
self.assertEqual(event['type'], 'typing')
self.assertEqual(event['op'], 'start')
class TypingStartedNotificationTest(ZulipTestCase):
def test_send_notification_to_self_event(self) -> None:
"""
Sending typing notification to yourself
is successful.
"""
user = self.example_user('hamlet')
email = user.email
expected_recipient_emails = set([email])
expected_recipient_ids = set([user.id])
events = [] # type: List[Mapping[str, Any]]
with tornado_redirected_to_list(events):
result = self.client_post('/api/v1/typing', {'to': email,
'op': 'start'},
**self.api_auth(email))
self.assert_json_success(result)
self.assertEqual(len(events), 1)
event = events[0]['event']
event_recipient_emails = set(user['email'] for user in event['recipients'])
event_user_ids = set(events[0]['users'])
event_recipient_user_ids = set(user['user_id'] for user in event['recipients'])
self.assertEqual(expected_recipient_ids, event_recipient_user_ids)
self.assertEqual(expected_recipient_ids, event_user_ids)
self.assertEqual(event_recipient_emails, expected_recipient_emails)
self.assertEqual(event['sender']['email'], email)
self.assertEqual(event['type'], 'typing')
self.assertEqual(event['op'], 'start')
def test_send_notification_to_another_user_event(self) -> None:
"""
Sending typing notification to another user
is successful.
"""
sender = self.example_user('hamlet')
recipient = self.example_user('othello')
expected_recipients = set([sender, recipient])
expected_recipient_emails = set([user.email for user in expected_recipients])
expected_recipient_ids = set([user.id for user in expected_recipients])
events = [] # type: List[Mapping[str, Any]]
with tornado_redirected_to_list(events):
result = self.client_post('/api/v1/typing', {'to': recipient.email,
'op': 'start'},
**self.api_auth(sender.email))
self.assert_json_success(result)
self.assertEqual(len(events), 1)
event = events[0]['event']
event_recipient_emails = | set(user['email'] for user in event['recipients'])
event_user_ids = set(events[0]['users'])
event_recipient_user_ids = set(user['user_id'] for user in event['recipients'])
self.assertEqual(expected_recipient_ids, event_recipient_user_ids)
self.assertEqual(expected_recipient_ids, even | t_user_ids)
self.assertEqual(event_recipient_emails, expected_recipient_emails)
self.assertEqual(event['sender']['email'], sender.email)
self.assertEqual(event['type'], 'typing')
self.assertEqual(event['op'], 'start')
class StoppedTypingNotificationTest(ZulipTestCase):
def test_send_notification_to_self |
plotly/plotly.py | packages/python/plotly/plotly/validators/bar/hoverlabel/_alignsrc.py | Python | mit | 403 | 0.002481 | import _plotly_utils.basevalidators
class AlignsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(s | elf, plotly_name="alignsrc", parent_name="bar.hoverlabel", **kwargs):
super(AlignsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type | ", "none"),
**kwargs
)
|
jvictor0/TiaraBoom | tiara/data_gatherer.py | Python | mit | 46,191 | 0.00656 |
import database as db
import vocab as v
import conversation
import random
import time
import json
import datetime
import os
import math
import union_find
from twitter.status import Status
from twitter.user import User
from evidence import data_manager
from util import *
import threading
import tiara_ddl
import traceback
MODES = 4
AFFLICT_FOLLOWBACK = 1
AFFLICT_DELETER = 2
AFFLICT_PROTECTED = 3
AFFLICT_SUSPENDED = 4
AFFLICT_BLOCKER = 5
AFFLICT_DEACTIVATED = 6
AFFLICT_NON_ENGLISH = 7
UNFOLLOW_REASON_NO_RESPOND = 1
class FakeGData:
def __init__(self, name):
self.myName = name
self.logger = logging.getLogger('TiaraBoom')
def TraceWarn(self,a):
self.logger.warn("(FakeGData) %s" % a)
def TraceInfo(self,a):
self.logger.info("(FakeGData) %s" % a)
def ApiHandler(self):
assert False, "Dont twitter ops with FakeGData"
def MakeFakeDataMgr(name = ""):
abs_prefix = os.path.join(os.path.dirname(__file__), "../data")
with open(abs_prefix + '/config.json','r') as f:
confs = json.load(f)
dbhost = confs["dbHost"]
return DataManager(FakeGData(name), dbhost, no_ddl = True)
class DataManager:
def __init__(self, g_data, host, no_ddl=False):
self.con = None
self.g_data = g_data
self.g_data.dbmgr = self
self.shard = 0
self.con = db.ConnectToMySQL(host=host)
self.con.query("create database if not exists tiaraboom")
self.con.query("use tiaraboom")
self.con.query('set names "utf8mb4" collate "utf8mb4_bin"')
self.user_id = None
self.horde = {}
self.hordeUpserts = {}
self.needsUpdateBotTweets = False
self.xact = False
self.timedQueryLimit = 1.0
self.extra_expr = "1"
self.evidence_mgr = None
if no_ddl:
return
self.apiHandler = g_data.ApiHandler()
self.DDL()
self.updatedUserDocumentFreq = False
try:
self.con.query("insert into bots values (%d,'%s', 10)" % (self.GetUserId(), self.g_data.myName))
except Exception as e:
assert e[0] == 1062, e # dup key
def GetEvidenceManager(self, infile=None):
if self.evidence_mgr is None:
self.evidence_mgr = data_manager.EvidenceDataMgr(self.con, self, infile=infile)
self.SetExtraAggLikeList(self.evidence_mgr.ctx.WeightedTags())
return self.evidence_mgr
def Begin(self):
assert not self.xact
self.xact = True
self.con.query("begin")
def Commit(self):
assert self.xact
self.xact = False
try:
for k,val in self.horde.iteritems():
if k in self.hordeUpserts:
values = ",".join(["(%s)" % ",".join([str(a) for a in ki + (vi,)])
for ki,vi in val.iteritems()])
q = "insert into %s values %s" % (k, values)
if self.hordeUpserts[k] is not None:
q = q + " on duplicate key update " + self.hordeUpserts[k]
else:
q = "insert into %s values %s" % (k,",".join(val))
self.con.query(q)
self.con.query("commit")
self.horde = {}
self.hordeUpserts = {}
if self.needsUpdateBotTweets:
self.UpdateConversationIds()
self.needsUpdateBotTweets = False
except Exception as e:
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
self.horde = {}
self.hordeUpserts = {}
self.needsUpdateBotTweets = False
self.con.query("rollback")
raise e
def Rollback(self):
assert self.xact
self.xact = False
self.horde = {}
self.needsUpdateBotTweets = False
self.hordeUpserts = {}
self.con.query("rollback")
def Horde(self, table, values, *parameters):
parameters = [unidecode(p) if isinstance(p,unicode) else p for p in parameters]
if parameters != None and parameters != ():
values = values % tuple([self.con._db.escape(p, self.con.encoders) for p in parameters])
if table not in self.horde:
self.horde[table] = []
self.horde[table].append(values)
def HordeUpsert(self, table, key, value, upsert):
assert isinstance(key,tuple), key
assert isinstance(value,int), value
self.hordeUpserts[table] = upsert
if table not in self.horde:
self.horde[table] = {}
if key not in self.horde[table]:
self.horde[table][key] = value
else:
self.horde[table][key] += value
def TimedQuery(self, q, name, *largs):
t0 = time.time()
result = self.con.query(q, *largs)
if time.time() - t0 > self.timedQueryLimit:
self.g_data.TraceInfo("%s took %f secs" % (name, time.time() - t0))
return result
def ApiHandler(self):
return self.apiHandler
def DDL(self):
tiara_ddl.TiaraCreateTables(self.con)
self.views = tiara_ddl.TiaraCreateViews(self.con)
def UpdateTweets(self):
if self.con is None:
return
max_id = None
count = 0
while True:
count += 1
assert count <= 30
statuses = self.ApiHandler().ShowStatuses(screen_name=self.g_data.myName,
max_id=max_id)
if statuses is None:
self.g_data.TraceWarn("Failed to update tweets")
return None
if len(statuses) == 0:
break
max_id = min([s.GetId() for s in statuses]) - 1
def ProcessUnprocessedTweets(self):
tweets = self.GetUnprocessed()
self.g_data.TraceInfo("There are currently %d unprocessed tweets" % len(tweets))
count = min(30,len(tweets))
random.shuffle(tweets)
for i in xrange(count):
self.InsertTweetById(tweets[i]["parent"])
# NOTE: this requires running RepareDocFreq eventually
#
def PurgeTweet(self, uid, tid, body):
assert False, "broken by columnarize user_token_frequency"
self.Begin()
try:
assert self.con.query("delete from tweets where user_id = %d and id = %d" % (uid,tid)) == 1
assert self.con.query("delete from tweets_storage where user_id = %d and id = %d" % (uid,tid)) == 1
tokens = [self.Feat2Id(a) for a in set(v.Vocab(self.g_data).Tokenize(body))]
for t in tokens:
assert self.con.query("update user_token_frequency set count = count - 1 where user_id = %d and token = %s" % (uid, t)) == 1
except Exception as e:
self.g_data.TraceWarn("Rollback in PurgeTweet")
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
self.Rollback()
raise e
self.Commit()
def InsertTweetById(self, tid):
s = self.ApiHandler().ShowStatus(tid, cache=False)
if not s is None:
self.InsertTweet(s)
return True
else:
if self.ApiHandler().errno in [34,179,63,144]: # | not found, not allowed, suspended, not existing
self.InsertUngettable(tid, self.ApiHandler().errno)
return True
self.g_data.Trac | eWarn("Unhandled error in InsertTweetById %d" % self.ApiHandler().errno)
assert False
return None
def InsertUser(self, user):
uid = str(user.GetId())
sn = "'%s'" % user.GetScreenName().encode("utf8")
folls = str(user.GetFollowersCount())
friens = str(user.GetFriendsCount())
language = "'%s'" % user.GetLang()
im_url = user.profile_image_url
values = ",".join([uid, sn, folls, friens, language, "NOW()", "'%s'" % im_url])
updates = ["num_followers = %s" % folls,
"num_friends = %s" % friens,
|
BorgERP/borg-erp-6of3 | verticals/garage61/acy_partner_vehicle/invoice.py | Python | agpl-3.0 | 1,362 | 0.002937 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010 Acysos S.L. (http://acysos.com) All Rights Reserved.
# Ignacio Ibeas <ignacio@acysos.com>
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu. | org/licenses/>.
#
##############################################################################
from osv import osv, fields
import tools
import os
| # Sale order
class account_invoice(osv.osv):
_inherit = 'account.invoice'
_columns = {
'vehicle_id': fields.many2one('res.partner.vehicle', 'Vehicle', readonly=True, states={'draft': [('readonly', False)]}, required=False),
}
account_invoice() |
josiahhardacre/Week-Four-Assignment | pigify.py | Python | mit | 562 | 0.019573 | #
# File Header
#
# Define vowels
vowels = "aeiouAEIOU" #I solved the case sensitivity by adding the vowels in both lowercase and uppercase.
# Ask for word
word = input("Please enter a word: ")
if (originalwo | rd[0] in vowels):
print((originalword + endofvowel).capitalize())
else:
print((originalword[1:] + originalword[0] + endofconsonant).capitalize())
# Loop through word, one letter at a time
for letter in word:
if letter in vowels:
pig = word + "yay"
else:
# False? | Consonant
pig = word[1:] + word[0] + "ay"
print(pig)
|
leosartaj/autosign | tests/test_removeSign.py | Python | mit | 1,228 | 0.004886 | #!/usr/bin/env python2
##
# autosign
# https://github.com/leosartaj/autosign.git
#
# copyright (c) 2014 sartaj singh
# licensed under the mit license.
##
import unittest
import os, shutil
import helper
from autosign.main import removeSign, isSign
from autosign.exce import UnsignedError
class TestremoveSign(unittest.TestCase):
"""
tests the removeSign function in main module
"""
def setUp(self):
self.dire = os.path.dirname(__file__)
self.signedfile = os.path.join(self.dire, 'testData/toBeSigned.py')
self.signed = os.path.join(self.dire, 'testData/test_signedfile.py')
shu | til.copyfile(self.signedfile, self.signed)
self.unsigned = os.path.join(self.dire, 'testData/test_unsignedfile.py')
helper.newFile(self.unsigned)
| helper.readrc(self)
def test_remove_from_unsigned_file(self):
self.assertRaises(UnsignedError, removeSign, self.unsigned, self.options_py)
def test_remove_from_signed_file(self):
self.assertTrue(isSign(self.signed, self.options_py))
removeSign(self.signed, self.options_py)
self.assertFalse(isSign(self.signed, self.options_py))
def tearDown(self):
os.remove(self.unsigned)
|
Gebesa-Dev/Addons-gebesa | mrp_segment/models/procurement_order.py | Python | agpl-3.0 | 327 | 0 | # -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
related_segment = fields.Char(
strin | g='Rela | tad Segment',
default='',
)
|
open-rnd/ros3d-www | ros3dui/system/camera.py | Python | mit | 3,863 | 0.001812 | #
# Copyright (c) 2015 Open-RnD Sp. z o.o.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import dbus
import glib
from dbus.mainloop.glib import DBusGMainLoop
import logging
_log = logging.getLogger(__name__)
class CameraManagerError(Exception):
pass
class PropertiesWrapper(object):
def __init__(self, busobj):
_log.debug('properties wrapper for %s @ %s',
busobj.dbus_interface, busobj.object_path)
self.busobj = busobj
def __getattr__(self, key):
_log.debug('get property %s', key)
return self.busobj.Get(self.busobj.dbus_interface,
key,
dbus_interface=dbus.PROPERTIES_IFACE)
class CameraManager(object):
CM_SERVICE_NAME = 'org.ros3d.CameraController'
CM_SERVICE_PATH = '/org/ros3d/controller'
CM_MANAGER_IFACE = 'org.ros3d.CameraController'
CM_DEVICE_IFACE = 'org.ros3d.Camera'
def __init__(self):
self.bus = dbus.SystemBus()
# proxy to camera controller iface
self.cm = None
def _connect(self):
cmobj = self.bus.get_object(self.CM_SERVICE_NAME,
self.CM_SERVICE_PATH)
self.cm = dbus.Interface(cmobj, self.CM_MANAGER_IFACE)
def _get_bus_iface(self, devpath, iface):
"""Get proxy to interface and proxy for accessing interface properties
for bus object at path `devpath` and interface `iface`
"""
devobj = self.bus.get_object(self.CM_SERVICE_NAME,
devpath)
dev = dbus.Interface(devobj, iface)
return dev, PropertiesWrapper(dev)
def _get_device(self, devpath):
"""Proxy to device interface and it's properties"""
return self._get_bus_iface(devpath,
self.CM_DEVICE_IFACE)
def get_details(self):
try:
self._connect()
assert self.cm != None
devices = self.cm.listCameras()
_log.debug('devices: %s', devices)
camera_data = []
for devpath in devices:
_, props = self._get_device(devpath)
_log.debug('dev id: %s', props.Id)
| _log.debug('device state: %d', p | rops.State)
cam = dict(name=props.Id, value=props.State)
camera_data.append(cam)
except dbus.exceptions.DBusException, error:
_log.error('camera controller service unavaialble, error: %s', error)
raise CameraManagerError('Camera Controller service unavailable')
return camera_data
def get_camera_manager():
"""Get an instance of NetworkManagerProvider"""
glib.threads_init()
dbus.mainloop.glib.threads_init()
DBusGMainLoop(set_as_default=True)
return CameraManager()
|
zenoss/ZenPacks.community.Cfengine | ZenPacks/community/Cfengine/modeler/plugins/community/cfenginemodeler.py | Python | gpl-2.0 | 2,493 | 0.010429 | import re, os
from Products.ZenUtils.Utils import zenPath
from Products.DataCollector.plugins.DataMaps import MultiArgs
from Products.DataCollector.plugins.CollectorPlugin import PythonPlugin
class cfenginemodeler(PythonPlugin):
"""
Parse the zCfengineComplianceFile to get the compliance status for devices.
"""
deviceProperties = PythonPlugin.deviceProperties + ('zCfengineComplianceFile', )
# 62.109.39.157,/Server/Linux,97
# 62.109.39.156,/Server/Linux,96
# 128.39.89.233,/Server/Solaris,73
# 62.109.39.155,/Server/Darwin,95
# 62.109.39.151,/Server/Windows,19
# 62.109.39.152,/Ping,92
# 62.109.39.150,/Ping,80
def findPath(self):
path = []
for p in __file__.split(os.sep):
if p == 'modeler': break
path.append(p)
return os.sep.join(path)
#get the results we're looking for
def collect(self, device, log):
log.info('Parsing cfengine client list for device %s' % device.id)
compliancefile = device.zCfengineComplianceFile
log.debug('compliancefile %s' % compliancefile)
try:
output = open(compliancefile, 'r')
except IOError:
log.error('Can\'t open %s for reading.' % compliancefile)
return None
results = output.read()
output.close()
if results is None or output == '':
log.info('cfengine zCfengineComplianceFile: Unable to connect or denied access?')
return None
return resul | ts
#push the results into the model
def process(self, device, results, log):
log.info('Processing cfengine client list for device %s' % device.id)
self.modname = 'ZenPacks.community.Cfengine.CfengineClient'
self.relname = "cfengineclients"
serverId = device.getId()
rm = self.relMap()
rlines = results.split("\n")
for line in rlines:
om = self.objectMap()
i | f re.search(',', line):
om.cfcDisplayName, om.cfcDeviceClass, value = line.split(',')
if om.cfcDeviceClass == "any":
om.cfcDeviceClass == "/Discovered"
om.cfcCompliance = int(value)
log.debug('Collecting cfengine client list for device %s: Found client = %s' % (device.id,om.cfcDisplayName))
om.id = self.prepId(om.cfcDisplayName)
om.setCfengineClient = MultiArgs(om.id, om.cfcDeviceClass, serverId)
rm.append(om)
log.debug(rm)
return rm
|
eta4ever/dimmer485 | host/test2.py | Python | gpl-3.0 | 523 | 0.022945 | from comm485 import Conn485
from device485 import Device
import time
conn = Conn485()
time | .sleep(0.1)
# led1 = Device("led1","pwm")
# led1.init_as_hardware(60, [0,0], conn)
enc1 = Device("enc1", "encoder", 1)
enc1.init_as_hardware(50, [0,0], conn)
# dummy = input(">")
try:
while True:
# time.sleep(0.1)
# print(led1.write_registers([100,1],conn))
# time | .sleep(0.1)
# print(led1.write_registers([200,0],conn))
time.sleep(0.1)
enc1.read_registers(conn)
print (enc1.get_registers())
finally:
del conn |
trigger-happy/conan-packages | libharu/conanfile.py | Python | mit | 1,457 | 0.002059 | from conans import ConanFile, AutoToolsBuildEnvironment, tools
import os
class LibHaruConn | (ConanFile):
name = "libharu"
version = "2.3.0"
license = "ZLIB https://github.com/libharu/libharu/bl | ob/master/LICENCE"
url = "https://github.com/trigger-happy/conan-packages"
description = "C library for generating PDF documents"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
generators = "cmake"
def source(self):
pkgLink = 'https://github.com/libharu/libharu/archive/RELEASE_2_3_0.tar.gz'
self.run("curl -JOL " + pkgLink)
self.run("tar xf libharu-RELEASE_2_3_0.tar.gz")
def build(self):
env_build = AutoToolsBuildEnvironment(self)
install_prefix=os.getcwd()
with tools.chdir("libharu-RELEASE_2_3_0"):
with tools.environment_append(env_build.vars):
self.run("touch include/config.h.in")
self.run("aclocal")
self.run("libtoolize")
self.run("automake --add-missing")
self.run("autoconf")
self.run("./configure --prefix={0}".format(install_prefix))
self.run("make install")
def package(self):
self.copy("lib/*", dst="lib", keep_path=False)
self.copy("include/*", dst=".", keep_path=True)
def package_info(self):
self.cpp_info.libs = ["hpdf"]
|
tadhg-ohiggins/regulations-parser | regparser/tree/depth/derive.py | Python | cc0-1.0 | 5,957 | 0 | from collections import namedtuple
from constraint import Problem
from regparser.tree.depth import markers, rules
from regparser.tree.depth.pair_rules import pair_rules
from regparser.tree.struct import Node
# A paragraph's type, index, depth assignment
ParAssignment = namedtuple('ParAssignment', ('typ', 'idx', 'depth'))
class Solution(object):
"""A collection of assignments + a weight for how likely this solution is
(after applying heuristics)"""
def __init__(self, assignment, weight=1.0):
self.weight = weight
self.assignment = []
if isinstance(assignment, list):
self.assignment = assignment
else: # assignment is a dict (as returned by constraint solver)
for i in range(len(assignment) // 3): # for (type, idx, depth)
self.assignment.append(
ParAssignment(assignment['type' + str(i)],
assignment['idx' + str(i)],
assignment['depth' + str(i)]))
def copy_with_penalty(self, penalty):
"""Immutable copy while modifying weight"""
sol = Solution([], self.weight * (1 - penalty))
sol.assignment = self.assignment
return sol
def __iter__(self):
return iter(self.assignment)
def pretty_str(self):
return "\n".join(" " * 4 * par.depth + par.typ[par.idx]
for par in self.assignment)
def _compress_markerless(marker_list):
"""Remove repeated MARKERLESS markers. This will speed up depth
computations as these paragraphs are redundant for its purposes"""
result = []
saw_markerless = False
for marker in marker_list:
if not Node.is_markerless_label([marker]):
saw_markerless = False
result.append(marker)
elif not saw_markerless:
saw_markerless = True
result.append(marker)
return result
def _decompress_markerless(assignment, marker_list):
"""Now that we have a specific solution, add back in the compressed
MARKERLESS markers."""
result = {}
saw_markerless = False
a_idx = -1 # idx in the assignment dict
for m_idx, marker in enumerate(marker_list):
if not Node.is_markerless_label([marker]):
saw_markerless = False
a_idx += 1
elif not saw_markerless:
saw_markerless = True
a_idx += 1
result['type{0}'.format(m_idx)] = assignment['type{0}'.format(a_idx)]
result['idx{0}'.format(m_idx)] = assignment['idx{0}'.format(a_idx)]
result['depth{0}'.format(m_idx)] = assignment['depth{0}'.format(a_idx)]
return result
def derive_depths(original_markers, additional_constraints=None):
"""Use constraint pro | gramming to derive the paragraph depths associated
with a list of paragraph markers. Additional constraints (e.g. expected
marker types, etc.) can also be added. Such constraints are functions of
tw | o parameters, the constraint function (problem.addConstraint) and a
list of all variables"""
if additional_constraints is None:
additional_constraints = []
if not original_markers:
return []
problem = Problem()
marker_list = _compress_markerless(original_markers)
# Depth in the tree, with an arbitrary limit of 10
problem.addVariables(["depth" + str(i) for i in range(len(marker_list))],
range(10))
# Always start at depth 0
problem.addConstraint(rules.must_be(0), ("depth0",))
all_vars = []
for idx, marker in enumerate(marker_list):
type_var = "type{0}".format(idx)
depth_var = "depth{0}".format(idx)
# Index within the marker list. Though this variable is redundant, it
# makes the code easier to understand and doesn't have a significant
# performance penalty
idx_var = "idx{0}".format(idx)
typ_opts = [t for t in markers.types if marker in t]
idx_opts = [i for t in typ_opts for i in range(len(t))
if t[i] == marker]
problem.addVariable(type_var, typ_opts)
problem.addVariable(idx_var, idx_opts)
problem.addConstraint(rules.type_match(marker), [type_var, idx_var])
all_vars.extend([type_var, idx_var, depth_var])
if idx > 0:
pairs = all_vars[3 * (idx - 1):]
problem.addConstraint(pair_rules, pairs)
if idx > 1:
pairs = all_vars[3 * (idx - 2):]
problem.addConstraint(rules.triplet_tests, pairs)
# separate loop so that the simpler checks run first
for idx in range(1, len(marker_list)):
# start with the current idx
params = all_vars[3 * idx:3 * (idx + 1)]
# then add on all previous
params += all_vars[:3 * idx]
problem.addConstraint(rules.continue_previous_seq, params)
# @todo: There's probably efficiency gains to making these rules over
# prefixes (see above) rather than over the whole collection at once
problem.addConstraint(rules.same_parent_same_type, all_vars)
for constraint in additional_constraints:
constraint(problem.addConstraint, all_vars)
solutions = []
for assignment in problem.getSolutionIter():
assignment = _decompress_markerless(assignment, original_markers)
solutions.append(Solution(assignment))
return solutions
def debug_idx(marker_list, constraints=None):
"""Binary search through the markers to find the point at which
derive_depths no longer works"""
if constraints is None:
constraints = []
working, not_working = -1, len(marker_list)
while working != not_working - 1:
midpoint = (working + not_working) // 2
solutions = derive_depths(marker_list[:midpoint + 1], constraints)
if solutions:
working = midpoint
else:
not_working = midpoint
return not_working
|
david-ragazzi/nupic | examples/opf/clients/hotgym/simple/model_params.py | Python | gpl-3.0 | 8,905 | 0.002021 |
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
MODEL_PARAMS = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [('consumption', 'sum')],
'hours': 1,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalMultiStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Include the encoders we use
'encoders': {
u'consumption': {
'fieldname': u'consumption',
'resolution': 0.88,
'seed': 1,
'name': u'consumption',
'type': 'RandomDistributedScalarEncoder',
},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (21, 1),
'type': 'DateEncoder'},
'timestamp_weekend': { 'fieldname': u'timestamp',
'name': u'timestamp_weekend',
'type': 'DateEncoder',
'weekend': 21}
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
# Spatial Pooler implementation selector.
# Options: 'py', 'cpp' (speed optimized, new)
'spatialImp' : 'cpp',
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses.
'potentialPct': 0.85,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10.
'synPermConnected': 0.1,
'synPermActiveInc': 0.04,
'synPermInactiveDec': 0.005,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# | The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selecto | r (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
|
fcalo/bbqtv | bbqtv/services/db.py | Python | gpl-3.0 | 1,892 | 0.016913 | from pymongo import MongoClient
from datetime import datetime
class DB(object):
def __init__(self):
pass
def init_app(self, app):
self.db = MongoClient(host = app.config['DB_HOST'],
port = app.config['DB_PORT'])[app.config['DB_NAME']]
def get_channels(self, limit = None):
if limit:
return self.db.channels.find().limit(limit).sort("order", 1)
else:
return | self.db.channels.find().sort("order", 1)
def get_grid(self):
grid = {}
for c in self.get_channels(limit = 7):
grid[c['name']] = self.get_channel_day(c['_id'], datetime.now())
return grid
def get_now(self):
grid = {}
for c in self.get_channels(limit = 7):
now = datetime.now()
day = self.get_channel_day(c['_id'], now)
hour, minute = (now.hour, now.minute)
last_pro | gramme = None
grid[c['name']] = {}
for programme in day['programmes']:
hour_pro, minute_pro = [int(a) for a in programme['time'].split(":")]
if hour < hour_pro or ( hour == hour_pro and minute <= minute_pro):
grid[c['name']]['now'] = last_programme
grid[c['name']]['next'] = programme
break
last_programme = programme
return grid
def get_channel_day(self, collection_name, date):
channel_data = self.db[collection_name].find_one({"date" : datetime(date.year, date.month, date.day) })
if not channel_data:
channel_data = {}
channel = self.db.channels.find_one({"_id" : collection_name})
if channel:
channel_data['name'] = channel['name']
return channel_data
|
tommeagher/pycar14 | project4/step_3_complete.py | Python | mit | 1,081 | 0.002775 | #!/usr/bin/env python
import csv
import json
import requests
def main():
# We'll use a local version of this file from now on to save on
# bandwith.
with open('bills.json', 'r') as f:
data = json.load(f)
objects = data['objects']
# Create a csv file to output
with open('bills.csv', 'w') as o:
# Create a csv writer. This will help us format the file
# corr | ectly.
writer = csv.writer(o)
# Write out the header row
writer.writerow([
u'title',
u'label',
u'number',
u'current_status'
])
# Iterate through each dict in the array `objects`
for bill in o | bjects:
writer.writerow([
bill['title_without_number'].encode('utf-8'),
bill['bill_type_label'].encode('utf-8'),
bill['number'],
bill['current_status'].encode('utf-8')
])
if __name__ == '__main__':
main()
|
ilya-ilya/nikolayfs | fs.py | Python | gpl-2.0 | 3,194 | 0.038823 | """
Main module that use fuse to provide filesystem
"""
import os
import sys
import llfuse
import errno
import auth
import requests
import json
import re
import datetime
import time
FILES = "https://www.googleapis.com/drive/v2/files/"
def countMode(meta):
"""
count file mode
"""
mod | e = 0
if meta["mimeType"].split(".")[-1] == u"folder":
mode +=
def timeRFC3339(dateStr):
"""
Extract time from Epoch from date string
"""
mat = re.match(r"^(\d+)-(\d+)-$",\ |
dateStr)
return time.mktime(\
datetime.datetime(\
int(mat.group(1)),\
int(mat.group(2)),\
int(mat.group(3)),\
int(mat.group(4)),\
int(mat.group(5)),\
int(mat.group(6))\
).timetuple()\
)
def id2inode(sid):
"""
convert google id to inode number
"""
if sid == u"root":
return 1
else:
inode = 0
for e in sid:
inode *= 128
inode += ord(e)
return inode
def inode2id(inode):
"""
convert inode number to google id
"""
if inode == 1:
return u"root"
else:
sid = ""
while inode > 0:
sid += chr(inode % 128)
inode /= 128
return sid[::-1]
class Operations(llfuse.Operations):
"""
Redirector (Broker) of system calls to google
"""
def __init__(self, init=False):
super(Operations, self).__init__()
self.auth = auth.Auth(u"native.json", init)
def access(self, inode, mode, ctx):
print "access(%s, %s, %s)" % (inode, mode, ctx)
def create(self, inode_parent, name, mode, flags, ctx):
print "create(%s, %s, %s, %s, %s)" % (inode_parent, name, mode, flags, ctx)
def flush(self, fh):
print "flush(%s)" % (fh, )
def forget(self, inode_list):
print "forget(%s)" % (inode_list, )
def fsync(self, fh, datasync):
print "fsync(%s, %s)" % (fh, datasync)
def fsyncdir(self, fh, datasync):
print "fsyncdir(%s, %s)" % (fh, datasync)
def getattr(self, inode):
print "getattr(%s)" % (inode, )
self.auth.check()
sid = inode2id(inode)
response = requests.get(FILES + sid, headers={\
u"Authorization" : "%s %s" % (self.auth.tokentype, self.auth.token)\
})
print response.text
meta = json.loads(response.text)
entry = llfuse.EntryAttributes()
entry.st_ino = inode
entry.generation = 0
#TODO:modes
entry.st_mode = countMode(meta)
entry.st_nlink = 2
entry.st_uid = os.getuid()
entry.st_gid = os.getgid()
entry.st_rdev = 0
entry.st_size = meta.get(u"fileSize", 0)
entry.st_blksize = 512
entry.st_blocks = 1
#entry.st_atime = timeRFC3339(meta.get(u"lastViewedByMeDate"))
entry.st_atime = 1
entry.st_ctime = 1
entry.st_mtime = 1
entry.attr_timeout = 300
entry.entry_timeout = 300
return entry
def getxattr(self, inode, name):
print "getxattr(%s, %s)" % (inode, name)
raise llfuse.FUSEError(llfuse.ENOATTR)
def link(self, inode, new_parent_inode, new_name):
print "link(%s, %s, %s)" % (inode, new_parent_inode, new_name)
def listxattr(self, inode):
print "listxattr(%s)" % (inode,)
def lookup(self, parent_inode, name):
print "lookup(%s, %s)" % (parent_inode, name)
if __name__ == "__main__":
mountpoint = sys.argv[1]
first = False
if len(sys.argv) > 2:
first = True
operations = Operations(init=first)
llfuse.init(operations, mountpoint, [])
llfuse.main()
llfuse.close()
|
lfasmpao/safecore-api | dashboard/forms.py | Python | agpl-3.0 | 4,140 | 0.007488 | from flask_wtf import FlaskForm
from . import app
from wtforms import StringField, PasswordField, SelectField, DateField, IntegerField
from wtforms.validators import DataRequired, Length, Email, EqualTo, Optional, NumberRange
from wtfrecaptcha.fields import RecaptchaField
from flask_wtf.file import FileField, FileAllowed, FileRequired
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email(), Length(min=6, max=40)])
password = PasswordField('Password', validators=[DataRequired()])
class RegistrationForm(FlaskForm):
first_name = StringField('First Name', validators=[DataRequired(), Length(min=2, max=40)])
last_name = StringField('Last Name', validators=[DataRequired(), Length(min=2, max=40)])
email = StringField('Email', validators=[DataRequired(), Email(), Length(min=6, max=40)])
username = StringField('Username', validators=[DataRequired(), Length(min=4, max=40)])
password = PasswordField('New Password', [
DataRequired(),
EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
captcha = RecaptchaField(public_key=app.config['RECAPTCHA_PUB_KEY'], private_key=app.config['RECAPTCHA_PRIV_KEY'],
| secure=True)
class LicenseKeyForm(FlaskForm):
category = SelectField('Category', choices=[(1, 'Premium'), (2, 'VIP')], coerce=int)
license_key = StringField('License Key', validators=[DataRequired(), Length(min=29, max=29)])
username = StringField('Sender Username', validators=[DataRequired(), Length(min=4, max=40)])
class CreditKeyForm(FlaskForm):
license_key = StringField('License Key', validators=[DataRequired(), Length(min=29, max=29)])
username = | StringField('Sender Username', validators=[DataRequired(), Length(min=4, max=40)])
class ImageUpload(FlaskForm):
upload = FileField('Image Upload', validators=[FileRequired(), FileAllowed(['jpg', 'png'], 'Images only!')])
class BuyLicense(FlaskForm):
category = SelectField('Category', choices=[(1, 'VIP'), (2, 'Premium')], coerce=int)
qty = SelectField('Qty', choices=[(1, '30 Days'), (2, '60 Days'), (3, '90 Days')], coerce=int)
class CreateLicense(FlaskForm):
category = SelectField('Category', choices=[(1, 'Premium'), (2, 'VIP')], coerce=int)
qty = SelectField('Qty', choices=[(1, '30 Days'), (2, '60 Days'), (3, '90 Days')], coerce=int)
valid_date = DateField('Expiration Date', format='%d/%m/%Y', validators=(DataRequired(),))
class ShareCredits(FlaskForm):
qty = SelectField('Credits', choices=[(1, '10'), (2, '20'), (3, '25')], coerce=int)
class ShareKey(FlaskForm):
category = SelectField('Category', choices=[(1, 'Premium'), (2, 'VIP')], coerce=int)
qty = IntegerField('Qty', validators=[DataRequired(), NumberRange(min=1, max=100)])
valid_date = DateField('Expiration Date', format='%d/%m/%Y', validators=(DataRequired(),))
class GenerateCredits(FlaskForm):
qty = IntegerField('Credits', validators=[DataRequired(), NumberRange(min=1, max=100)])
class TrialForm(FlaskForm):
password = StringField('Password', validators=[DataRequired(), Length(min=6, max=29)])
class MyPage(FlaskForm):
facebook_url = StringField('Facebook', validators=[DataRequired(), Length(min=6, max=29)])
twitter_url = StringField('Twitter', validators=[Optional(), Length(min=6, max=29)])
class MyPageInfo(FlaskForm):
info = StringField('Information', validators=[DataRequired()])
payment_method = StringField('Payment Method', validators=[DataRequired()])
class ChangePassword(FlaskForm):
password = PasswordField('New Password', [
DataRequired(),
EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
class LockAccount(FlaskForm):
confirm = StringField('Confirm', validators=[DataRequired(), Length(min=3, max=50)])
class ProfileSettings(FlaskForm):
first_name = StringField('First Name', validators=[DataRequired(), Length(min=2, max=40)])
last_name = StringField('Last Name', validators=[DataRequired(), Length(min=2, max=40)]) |
avkhadiev/bbtoDijet | bbtoDijetAnalyzer/test/crab_test_bbtoDijetAnalyzer_test/crab_bbtoDijetAnalyzer_test/inputs/PSet.py | Python | mit | 101 | 0 | import FWCo | re.ParameterSet.Config as cms
import pickle
proce | ss = pickle.load(open('PSet.pkl', 'rb'))
|
atwardowski/Dramiel | src/twisted/mod/tableflip.py | Python | mit | 320 | 0.009804 | # -*- coding: utf-8 -*-
f | rom modules.upsidedown.upsidedown import transform as upsidedown
def tableflip(tbot, user, channel, msg):
text = msg.replace('!tableflip', '').strip()
flipped = upsidedown(text)
tbot.msg(channel,u"%s: (╯°□°)╯︵ %s" % (user, flipped))
tableflip.rule = "!tableflip | (.*)"
|
magfest/ubersystem | alembic/versions/bba880ef5bbd_add_is_loud_and_pronouns_columns_to_.py | Python | agpl-3.0 | 2,103 | 0.005231 | """Add is_loud and pronouns columns to PanelApplicant
Revision ID: bba880ef5bbd
Revises: 8f8419ebcf27
Create Date: 2019-07-20 02:57:17.794469
"""
# revision identifiers, used by Alembic.
revision = 'bba880ef5bbd'
down_revision = '8f8419ebcf27'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
try:
is_sqlite = op.get_context().dialect.name == 'sqlite'
except Exception:
is_sqlite = False
if is_sqlite:
op.get_context().connection.execute('PRAGMA foreign_keys=ON;')
utcnow_server_default = "(datetime('now', 'utc'))"
else:
utcnow_server_default = "timezone('utc', current_timestamp)"
def sqlite_column_reflect_listener(inspector, table, column_info):
"""Adds parenthesis around SQLite datetime defaults for utcnow."""
if column_info['default'] == "datetime('now', 'utc')":
column_info['default'] = utcnow_server_default
sqlite_reflect_kwargs = {
'listeners': [('column_reflect', sqlite_column_reflect_listener)]
}
# ===========================================================================
# HOWTO: Handle alter statements in SQLite
#
# def upgrade():
# if is_sqlite:
# with op.batch_alter_table('table_name', reflect_kwargs=sqlite_reflect_kwargs) as batch_op:
# ba | tch_op.alter_column('column_name', type_=sa.Unicode(), ser | ver_default='', nullable=False)
# else:
# op.alter_column('table_name', 'column_name', type_=sa.Unicode(), server_default='', nullable=False)
#
# ===========================================================================
def upgrade():
op.add_column('panel_applicant', sa.Column('other_pronouns', sa.Unicode(), server_default='', nullable=False))
op.add_column('panel_applicant', sa.Column('pronouns', sa.Unicode(), server_default='', nullable=False))
op.add_column('panel_application', sa.Column('is_loud', sa.Boolean(), server_default='False', nullable=False))
def downgrade():
op.drop_column('panel_application', 'is_loud')
op.drop_column('panel_applicant', 'pronouns')
op.drop_column('panel_applicant', 'other_pronouns')
|
jeromecc/doctoctocbot | src/bot/migrations/0005_auto_20200603_0508.py | Python | mpl-2.0 | 633 | 0 | # Generated by Django 2.2.12 on 2020-06-03 03:08
from django.db imp | ort migrations, models
class Migration(migrations.Migration):
dependencies = [
('bot', '0004_auto_20191013_2322'),
]
operations = [
migrations.AddField(
| model_name='account',
name='email',
field=models.CharField(default='', max_length=128),
preserve_default=False,
),
migrations.AddField(
model_name='account',
name='password',
field=models.CharField(default='', max_length=128),
preserve_default=False,
),
]
|
brainheart/dce_lti_py | dce_lti_py/tool_consumer.py | Python | mit | 2,381 | 0.0021 |
from requests import Request
from oauthlib.common import unquote
from requests_oauthlib import OAuth1
from requests_oauthlib.oauth1_auth import SIGNATURE_TYPE_BODY
from tool_base import ToolBase
from launch_params import LAUNCH_PARAMS_REQUIRED
from utils import parse_qs, InvalidLTIConfigError, generate_identifier
class ToolConsumer(ToolBase):
def __init__(self, consumer_key, consumer_secret,
params=None, launch_url=None):
'''
Create new ToolConsumer.
'''
# allow launch_url to be specified in launch_params for
# backwards compatibility
if launch_url is None:
if 'launch_url' not in params:
raise InvalidLTIConfigError('missing \'launch_url\' arg!')
else:
launch_url = params['launch_url']
del params['launch_url']
self.launch_url = launch_url
s | uper(ToolConsumer, self).__init__(consumer_key, consumer_secret,
params=params)
def has_required_params(self):
return all([
self.launch_params.get(x) for x in LAUNCH_PARAMS_REQUIRED
])
def generate_launch_request(self, **kwargs):
"""
returns a Oauth v1 "signed" requests.PreparedRequest instance
"""
if not self.has_required_params():
raise Invalid | LTIConfigError(
'Consumer\'s launch params missing one of ' \
+ str(LAUNCH_PARAMS_REQUIRED)
)
# if 'oauth_consumer_key' not in self.launch_params:
# self.launch_params['oauth_consumer_key'] = self.consumer_key
params = self.to_params()
r = Request('POST', self.launch_url, data=params).prepare()
sign = OAuth1(self.consumer_key, self.consumer_secret,
signature_type=SIGNATURE_TYPE_BODY, **kwargs)
return sign(r)
def generate_launch_data(self, **kwargs):
"""
Provided for backwards compatibility
"""
r = self.generate_launch_request(**kwargs)
return parse_qs(unquote(r.body))
def set_config(self, config):
'''
Set launch data from a ToolConfig.
'''
if self.launch_url == None:
self.launch_url = config.launch_url
self.launch_params.update(config.custom_params)
|
wodo/django-ct | tests/settings.py | Python | bsd-2-clause | 5,129 | 0.002925 | # Django settings for tests project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'tests/data.sqlite', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/Berlin'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGES = (
('en', 'English'),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '72il!jy-w%wo*964y9za4i@q4ooi20(j*xks)#0z*g2ff5odzy'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'tests.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" | or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
#'django.contrib.auth',
#'django.contrib.contenttypes',
#'django.con | trib.sessions',
#'django.contrib.sites',
#'django.contrib.messages',
#'django.contrib.staticfiles',
#'django.contrib.admin',
'ct',
'tests',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
Dawny33/Code | CodeChef/CDAV14/PND.py | Python | gpl-3.0 | 134 | 0 | T = | input()
while(T):
T -= 1
s = input()
| s = str(oct(s))
if s == s[::-1]:
print 1
else:
print -1
|
InUrSys/PescArt2.0 | GeneratedFiles/ui_addRulesToForm.py | Python | gpl-3.0 | 5,232 | 0.002867 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/Users/chernomirdinmacuvele/Documents/workspace/PescArt2.0/UserInt/ui_addRulesToForm.ui'
#
# Created by: PyQt5 UI code generator 5.8.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(838, 550)
self.TVForms = QtWidgets.QTableView(Form)
self.TVForms.setGeometry(QtCore.QRect(10, 90, 821, 441))
self.TVForms.setObjectName("TVForms")
self.layoutWidget = QtWidgets.QWidget(Form)
self.layoutWidget.setGeometry(QtCore.QRect(10, 10, 421, 71))
self.layoutWidget.setObjectName("layoutWidget")
self.gridLayout = QtWidgets.QGridLayout(self.layoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.CBRules = QtWidgets.QComboBox(self.layoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.CBRules.sizePolicy().hasHeightForWidth())
self.CBRules.setSizePolicy(sizePolicy)
self.CBRules.setObjectName("CBRules")
self.gridLayout.addWidget(self.CBRules, 1, 0, 1, 1)
self.label = QtWidgets.QLabel(self.layoutWidget)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.layoutWidget1 = QtWidgets.QWidget(Form)
self.layoutWidget1.setGeometry(QtCore.QRect(470, 10, 291, 71))
self.layoutWidget1.setObjectName("layoutWidget1")
self.gridLayout_2 = QtWidgets.QGridLayout(self.layoutWidget1)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_2 = QtWidgets.QLabel(self.layoutWidget1)
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)
self.CBForm = QtWidgets.QComboBox(self.layoutWidget1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.CBForm.sizePolicy().hasHeightForWidth())
self.CBForm.setSizePolicy(sizePolicy)
self.CBForm.setObjectName("CBForm")
self.gridLayout_2.addWidget(self.CBForm, 1, 0, 1, 1)
self.layoutWidget2 = QtWidgets.QWidget(Form)
self.layoutWidget2.setGeometry(QtCore.QRect(770, 0, 62, 81))
self.layoutWidget2.setObjectName("layoutWidget2")
self.gridLayout_3 = QtWidgets.QGridLayout(self.layoutWidget2)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.PBAdd = QtWidgets.QPushButton(self.layoutWidget2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.PBAdd.sizePolicy().hasHeightForWidth())
self.PBAdd.setSizePolicy(sizePolicy)
self.PBAdd.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/plus.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.PBAdd.setIcon(icon)
self.PBAdd.setObjectName("PBAdd")
self.gridLayout_3. | addWidget(self.PBAdd, 1, 1, 1, 1)
self.PBEdit = QtWidgets.QPushButton(self.layoutWidget2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.PBEdit.sizePolicy().hasHeightForWidth())
self.PBEdit.setSizePolicy(sizePolicy)
| self.PBEdit.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/005-writer.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.PBEdit.setIcon(icon1)
self.PBEdit.setObjectName("PBEdit")
self.gridLayout_3.addWidget(self.PBEdit, 0, 1, 1, 1)
self.PBDelete = QtWidgets.QPushButton(self.layoutWidget2)
self.PBDelete.setText("")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/003-error.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.PBDelete.setIcon(icon2)
self.PBDelete.setObjectName("PBDelete")
self.gridLayout_3.addWidget(self.PBDelete, 2, 1, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.label.setText(_translate("Form", "Descricao da Regra"))
self.label_2.setText(_translate("Form", "Nome do Form "))
import icons_rc
|
dahlstrom-g/intellij-community | python/testData/inspections/PyAbstractClassInspection/quickFix/AddABCToSuperclasses/main_import.py | Python | apache-2.0 | 85 | 0.023529 | import abc
class A1(abc.ABC):
| @abc.abstractmethod
def m1(self): |
pass |
ormnv/os_final_project | osfinalproject/migrations/0001_initial.py | Python | bsd-3-clause | 1,936 | 0.002066 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer_text', models.TextField()),
('pub_date', models.DateTimeField(auto_now_add=True)),
('votes', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Question',
fields=[
| ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('question_text', m | odels.TextField()),
('pub_date', models.DateTimeField(auto_now_add=True)),
('votes', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('pub_date', models.DateTimeField(auto_now_add=True)),
('up_or_down', models.BooleanField(default=False)),
('answer_id', models.ForeignKey(to='osfinalproject.Answer')),
('question_id', models.ForeignKey(to='osfinalproject.Question')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='answer',
name='question_id',
field=models.ForeignKey(to='osfinalproject.Question'),
preserve_default=True,
),
]
|
lilida/teletraan | deploy-agent/tests/unit/deploy/staging/test_helper.py | Python | apache-2.0 | 4,299 | 0.001861 | # Copyright 2016 Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import os.path
import shutil
import unittest
import tempfile
import getpass
from deployd.common.status_code import Status
from deployd.staging.stager import Stager
class TestHelper(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.base_dir = tempfile.mkdtemp()
builds_dir = os.path.join(cls.base_dir, 'builds')
target = os.path.join(cls.base_dir, 'test')
cls.target = target
cls.builds_dir = builds_dir
cls.user_role = getpass.getuser()
if not os.path.exists(builds_dir):
os.mkdir(builds_dir)
def mock_get_var(var_name):
if var_name == 'builds_dir':
return builds_dir
elif var_name == 'env_directory':
return cls.base_dir
elif var_name == 'package_format':
return 'tar.gz'
elif var_name == "deploy_agent_dir":
return cls.base_dir
elif var_name == "user_role":
return getpass.getuser()
cls.config = mock.Mock()
cls.config.get_var = mock.MagicMock(side_effect=mock_get_var)
cls.config.get_target = mock.MagicMock(return_value=target)
cls.config.get_builds_directory = mock.MagicMock(return_value=builds_dir)
cls.transformer = mock.Mock()
cls.transformer.dict_size = mock.Mock(return_value=1)
cls.transformer.transform_scripts = mock.Mock()
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.base_dir)
def test_enable_new(self):
tarball_dir = os.path.join(self.builds_dir, '24714bc')
self.config.get_user_role = mock.MagicMock(return_value=self.us | er_role)
if not os.path.exists(tarball_dir):
os | .mkdir(tarball_dir)
script_dir = os.path.join(tarball_dir, "teletraan")
if not os.path.exists(script_dir):
os.mkdir(script_dir)
self.assertTrue(Stager(config=self.config, transformer=self.transformer,
build="24714bc", target=self.target,
env_name="test").enable_package() == Status.SUCCEEDED)
self.assertTrue(os.path.exists(self.target))
self.assertTrue(os.path.exists(os.path.join(self.target, "teletraan_template")))
self.assertEqual(os.readlink(self.target), os.path.join(self.builds_dir, '24714bc'))
os.remove(self.target)
def test_enable_build_with_old(self):
self.config.get_user_role = mock.MagicMock(return_value=self.user_role)
old_tarball_dir = os.path.join(self.builds_dir, '24714bc')
if not os.path.exists(old_tarball_dir):
os.mkdir(old_tarball_dir)
os.symlink(os.path.join(self.builds_dir, '24714bc'), self.target)
tarball_dir = os.path.join(self.builds_dir, '1234567')
if not os.path.exists(tarball_dir):
os.mkdir(tarball_dir)
Stager(config=self.config, transformer=self.transformer,
build="1234567", target=self.target,
env_name="test").enable_package()
self.assertEqual(os.readlink(self.target), os.path.join(self.builds_dir, '1234567'))
os.remove(self.target)
def test_get_enabled_build(self):
missing_target = os.path.join(self.builds_dir, 'foo')
os.symlink(missing_target, self.target)
stager = Stager(config=self.config, transformer=self.transformer,
build="24714bc", target=self.target, env_name="test")
self.assertEqual(None, stager.get_enabled_build())
# now let's make our missing_target a real target!
os.mkdir(missing_target)
self.assertEqual('foo', stager.get_enabled_build())
|
brennie/reviewboard | reviewboard/scmtools/tests/test_svn.py | Python | mit | 22,242 | 0 | # coding=utf-8
from __future__ import unicode_literals
import os
from hashlib import md5
import nose
from django.conf import settings
from kgb import SpyAgency
from reviewboard.diffviewer.diffutils import patch
from reviewboard.scmtools.core import (Branch, Commit, Revision, HEAD,
PRE_CREATION)
from reviewboard.scmtools.errors import SCMError, FileNotFoundError
from reviewboard.scmtools.models import Repository, Tool
from reviewboard.scmtools.svn import recompute_svn_backend
from reviewboard.scmtools.tests.testcases import SCMTestCase
class CommonSVNTestCase(SpyAgency, SCMTestCase):
"""Common unit tests for Subversion.
This is meant to be subclassed for each backend that wants to run
the common set of tests.
"""
backend = None
backend_name = None
fixtures = ['test_scmtools']
def setUp(self):
super(CommonSVNTestCase, self).setUp()
self._old_backend_setting = settings.SVNTOOL_BACKENDS
settings.SVNTOOL_BACKENDS = [self.backend]
recompute_svn_backend()
self.svn_repo_path = os.path.abspath(
os.path.join(os.path.dirname(__file__),
'..', 'testdata', 'svn_repo'))
self.svn_ssh_path = ('svn+ssh://localhost%s'
% self.svn_repo_path.replace('\\', '/'))
self.repository = Repository(name='Subversion SVN',
path='file://' + self.svn_repo_path,
tool=Tool.objects.get(name='Subversion'))
try:
self.tool = self.repository.get_scmtool()
except ImportError:
raise nose.SkipTest('The %s backend could not be used. A '
'dependency may be missing.'
% self.backend)
assert self.tool.client.__class__.__module__ == self.backend
def tearDown(self):
super(CommonSVNTestCase, self).tearDown()
settings.SVNTOOL_BACKENDS = self._old_backend_setting
recompute_svn_backend()
def shortDescription(self):
desc = super(CommonSVNTestCase, self).shortDescription()
desc = desc.replace('<backend>', self.backend_name)
return desc
def test_ssh(self):
"""Testing SVN (<backend>) with a SSH-backed Subversion repository"""
self._test_ssh(self.svn_ssh_path, 'trunk/doc/misc-docs/Makefile')
def test_ssh_with_site(self):
"""Testing SVN (<backend>) with a SSH-backed Subversion repository
with a LocalSite
"""
self._test_ssh_with_site(self.svn_ssh_path,
'trunk/doc/misc-docs/Makefile')
def test_get_file(self):
"""Testing SVN (<backend>) get_file"""
expected = (b'include ../tools/Makefile.base-vars\n'
b'NAME = misc-docs\n'
b'OUTNAME = svn-misc-docs\n'
b'INSTALL_DIR = $(DESTDIR)/usr/share/doc/subversion\n'
b'include ../tools/Makefile.base-rules\n')
# There are 3 versions of this test in order to get 100% coverage of
# the svn module.
rev = Revision('2')
file = 'trunk/doc/misc-docs/Makefile'
value = self.tool.get_file(file, rev)
self.assertTrue(isinstance(value, bytes))
self.assertEqual(value, expected)
self.assertEqual(self.tool.get_file('/' + file, rev), expected)
self.assertEqual(
self.tool.get_file(self.repository.path + '/' + file, rev),
expected)
self.assertTrue(self.tool.file_exists('trunk/doc/misc-docs/Makefile'))
self.assertTrue(
not self.tool.file_exists('trunk/doc/misc-docs/Makefile2'))
self.assertRaises(FileNotFoundError, lambda: self.tool.get_file(''))
self.assertRaises(FileNotFoundError,
lambda: self.tool.get_file('hello', PRE_CREATION))
def test_revision_parsing(self):
"""Testing SVN (<backend>) revision number parsing"""
self.assertEqual(
self.tool.parse_diff_revision('', '(working copy)')[1],
HEAD)
self.assertEqual(
self.tool.parse_diff_revision('', ' (revision 0)')[1],
PRE_CREATION)
self.assertEqual(self.tool.parse_diff_revision('', '(revision 1)')[1],
'1')
self.assertEqual(self.tool.parse_diff_revision('', '(revision 23)')[1],
'23')
# Fix for bug 2176
self.assertEqual(
self.tool.parse_diff_revision('', '\t(revision 4)')[1], '4')
self.assertEqual(
self.tool.parse_diff_revision(
'', '2007-06-06 15:32:23 UTC (rev 10958)')[1],
'10958')
# Fix for bug 2632
self.assertEqual(self.tool.parse_diff_revision('', '(revision )')[1],
PRE_CREATION)
self.assertRaises(SCMError,
lambda: self.tool.parse_diff_revision('', 'hello'))
# Verify that 'svn diff' localized revision strings parse correctly.
self.assertEqual(self.tool.parse_diff_revision('', '(revisión: 5)')[1],
'5')
self.assertEqual(self.tool.parse_diff_revision('',
'(リビジョン 6)')[1], '6')
self.assertEqual(se | lf.tool.parse_diff_revision('', '(版本 7)')[1],
'7')
def test_revision_parsing_with_nonexistent(self):
"""Testing SVN (<backend>) revision parsing with "(nonexistent)"
revision indicator
"""
# English
self.assertEqual(
self.tool.parse_diff_revision('', '(nonexistent)')[1],
PRE_CREATION)
# German
self.assertEqual(
self.tool.parse_diff_revision('', '(nicht existent)')[1],
| PRE_CREATION)
# Simplified Chinese
self.assertEqual(
self.tool.parse_diff_revision('', '(不存在的)')[1],
PRE_CREATION)
def test_revision_parsing_with_nonexistent_and_branches(self):
"""Testing SVN (<backend>) revision parsing with relocation
information and nonexisitent revision specifier.
"""
self.assertEqual(
self.tool.parse_diff_revision(
'', '(.../trunk) (nonexistent)')[1],
PRE_CREATION)
self.assertEqual(
self.tool.parse_diff_revision(
'', '(.../branches/branch-1.0) (nicht existent)')[1],
PRE_CREATION)
self.assertEqual(
self.tool.parse_diff_revision(
'', ' (.../trunk) (不存在的)')[1],
PRE_CREATION)
def test_interface(self):
"""Testing SVN (<backend>) with basic SVNTool API"""
self.assertFalse(self.tool.diffs_use_absolute_paths)
self.assertRaises(NotImplementedError,
lambda: self.tool.get_changeset(1))
def test_binary_diff(self):
"""Testing SVN (<backend>) parsing SVN diff with binary file"""
diff = (b'Index: binfile\n'
b'============================================================'
b'=======\n'
b'Cannot display: file marked as a binary type.\n'
b'svn:mime-type = application/octet-stream\n')
file = self.tool.get_parser(diff).parse()[0]
self.assertEqual(file.origFile, 'binfile')
self.assertEqual(file.binary, True)
def test_keyword_diff(self):
"""Testing SVN (<backend>) parsing diff with keywords"""
# 'svn cat' will expand special variables in svn:keywords,
# but 'svn diff' doesn't expand anything. This causes the
# patch to fail if those variables appear in the patch context.
diff = (b'Index: Makefile\n'
b'==========================================================='
b'========\n'
b'--- Makefile (revision 4)\n'
b'+++ Makefile (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b' # $Id$\n'
b' # $Rev$\n'
b' # $Revi |
lkhomenk/integration_tests | cfme/tests/configure/test_tag_category.py | Python | gpl-2.0 | 3,472 | 0.000864 | # -*- coding: utf-8 -*-
import fauxfactory
import pytest
from cfme.configure.configuration.region_settings import Category
from cfme.rest.gen_data import categories as _categories
from cfme.utils.rest import (
assert_response,
delete_resources_from_collection,
delete_resources_from_detail,
)
from cfme.utils.update import update
from cfme.utils.wait import wait_for
@pytest.mark.tier(2)
@pytest.mark.sauce
def test_category_crud():
cg = Category(name=fauxfactory.gen_alphanumeric(8).lower(),
description=fauxfactory.gen_alphanumeric(32),
display_name=fauxfactory.gen_alphanumeric(32))
cg.create()
with update(cg):
cg.description = fauxfactory.gen_alphanumeric(32)
cg.delete(cancel=False)
class TestCategoriesViaREST(object):
@pytest.fixture(scope="function")
def categories(self, request, appliance):
response = _categories(request, appliance.rest_api, num=5)
assert_response(appliance)
assert len(response) == 5
return response
@pytest.mark.tier(3)
def test_create_categories(self, appliance, categories):
"""Tests creating categories.
Metadata:
test_flag: rest
"""
for ctg in categories:
record = appliance.rest_api.collections.categories.get(id=ctg.id)
assert_response(appliance)
assert record.name == ctg.name
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"multiple", [False, True],
ids=["one_request", "multiple_requests"])
def test_edit_categories(self, appliance, categories, multiple):
"""Tests editing ca | tegories.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.categories
categories_len = len(categories)
new = []
for _ in range(categories_len):
new.append(
{'description': 'test_category_{}'.format(fauxfactory.gen_alphanumeric().lower())})
if multiple:
for index in range(categories_len):
new[index].up | date(categories[index]._ref_repr())
edited = collection.action.edit(*new)
assert_response(appliance)
else:
edited = []
for index in range(categories_len):
edited.append(categories[index].action.edit(**new[index]))
assert_response(appliance)
assert categories_len == len(edited)
for index in range(categories_len):
record, _ = wait_for(
lambda: collection.find_by(description=new[index]['description']) or False,
num_sec=180,
delay=10,
)
assert record[0].id == edited[index].id
assert record[0].description == edited[index].description
@pytest.mark.tier(3)
@pytest.mark.parametrize("method", ["post", "delete"], ids=["POST", "DELETE"])
def test_delete_categories_from_detail(self, categories, method):
"""Tests deleting categories from detail.
Metadata:
test_flag: rest
"""
delete_resources_from_detail(categories, method=method)
@pytest.mark.tier(3)
def test_delete_categories_from_collection(self, categories):
"""Tests deleting categories from collection.
Metadata:
test_flag: rest
"""
delete_resources_from_collection(categories, not_found=True)
|
jaeminSon/V-GAN | codes/train.py | Python | mit | 6,553 | 0.020449 | import numpy as np
from model import GAN, discriminator_pixel, discriminator_image, discriminator_patch1, discriminator_patch2, generator, discriminator_dummy
import utils
import os
from PIL import Image
import argparse
from keras import backend as K
# arrange arguments
parser=argparse.ArgumentParser()
parser.add_argument(
'--ratio_gan2seg',
type=int,
help="ratio of gan loss to seg loss",
required=True
)
parser.add_argument(
'--gpu_index',
type=str,
help="gpu index",
required=True
)
parser.add_argument(
'--discriminator',
type=str,
help="type of discriminator",
required=True
)
parser.add_argument(
'--batch_size',
type=int,
help="batch size",
required=True
)
parser.add_argument(
'--dataset',
type=str,
help="dataset name",
required=True
)
FLAGS,_= parser.parse_known_args()
# training settings
os.environ['CUDA_VISIBLE_DEVICES']=FLAGS.gpu_index
n_rounds=10
batch_size=FLAGS.batch_size
n_filters_d=32
n_filters_g=32
val_ratio=0.05
init_lr=2e-4
schedules={'lr_decay':{}, # learning rate and step have the same decay schedule (not necessarily the values)
'step_decay':{}}
alpha_recip=1./FLAGS.ratio_gan2seg if FLAGS.ratio_gan2seg>0 else 0
rounds_for_evaluation=range(n_rounds)
# set dataset
dataset=FLAGS.dataset
img_size= (640,640) if dataset=='DRIVE' else (720,720) # (h,w) [original img size => DRIVE : (584, 565), STARE : (605,700) ]
img_out_dir="{}/segmentation_results_{}_{}".format(FLAGS.dataset,FLAGS.discriminator,FLAGS.ratio_gan2seg)
model_out_dir="{}/model_{}_{}".format(FLAGS.dataset,FLAGS.discriminator,FLAGS.ratio_gan2seg)
auc_out_dir="{}/auc_{}_{}".format(FLAGS.dataset,FLAGS.discriminator,FLAGS.ratio_gan2seg)
train_dir="../data/{}/training/".format(dataset)
test_dir="../data/{}/test/".format(dataset)
if not os.path.isdir(img_out_dir):
os.makedirs(img_out_dir)
if not os.path.isdir(model_out_dir):
os.makedirs(model_out_dir)
if not os.path.isdir(auc_out_dir):
os.makedirs(auc_out_dir)
# set training and validation dataset
train_imgs, train_vessels =utils.get_imgs(train_dir, augmentation=True, img_size=img_size, dataset=dataset)
train_vessels=np.expand_dims(train_vessels, axis=3)
n_all_imgs=train_imgs.shape[0]
n_train_imgs=int((1-val_ratio)*n_all_imgs)
train_indices=np.random.choice(n_all_imgs,n_train_imgs,replace=False)
train_batch_fetcher=utils.TrainBatchFetcher(train_imgs[train_indices,...], train_vessels[train_indices,...], batch_size)
val_imgs, val_vessels=train_imgs[np.delete(range(n_all_imgs),train_indices),...], train_vessels[np.delete(range(n_all_imgs),train_indices),...]
# set test dataset
test_imgs, test_vessels, test_masks=utils.get_imgs(test_dir, augmentation=False, img_size=img_size, dataset=dataset, mask=True)
# create networks
g = generator(img_size, n_filters_g)
if FLAGS.discriminator=='pixel':
d, d_out_shape = discriminator_pixel(img_size, n_filters_d,init_lr)
elif FLAGS.discriminator=='patch1':
d, d_out_shape = discriminator_patch1(img_size, n_filters_d,init_lr)
elif FLAGS.discriminator=='patch2':
d, d_out_shape = discriminator_patch2(img_size, n_filters_d,init_lr)
elif FLAGS.discriminator=='image':
d, d_out_shape = discriminator_image(img_size, n_filters_d,init_lr)
else:
d, d_out_shape = discriminator_dummy(img_size, n_filters_d,init_lr)
gan=GAN(g,d,img_size, n_filters_g, n_filters_d,alpha_recip, init_lr)
g.summary()
d.summary()
gan.summary()
# start training
scheduler=utils.Scheduler(n_train_imgs//batch_size, n_train_imgs//batch_size, schedules, init_lr) if alpha_recip>0 else utils.Scheduler(0, n_train_imgs//batch_size, schedules, init_lr)
print "training {} images :".format(n_train_imgs)
for n_round in range(n_rounds):
# train D
utils.make_trainable(d, True)
for i in range(scheduler.get_dsteps()):
real_imgs, real_vessels = next(train_batch_fetcher)
d_x_batch, d_y_batch = utils.input2discriminator(real_imgs, real_vessels, g.predict(real_imgs,batch_size=batch_size), d_out_shape)
d.train_on_batch(d_x_batch, d_y_batch)
# train G (freeze discriminator)
utils.make_trainable(d, False)
for i in range(scheduler.get_gsteps()):
real_imgs, real_vessels = next(train_batch_fetcher)
g_x_batch, g_y_batch=utils.input2gan(real_imgs, real_vessels, d_out_shape)
gan.train_on_batch(g_x_batch, g_y_batch)
# evaluate on validation set
if n_round in rounds_for_evaluation:
# D
d_x_test, d_y_test=utils.input2discriminator(val_imgs, val_vessels, g.predict(val_imgs,batch_size=batch_size), d_out_shape)
loss, acc=d.evaluate(d_x_test,d_y_test, batch_size=batch_size, verbose=0)
utils.print_metrics(n_round+1, loss=loss, acc=acc, type='D')
# G
gan_x_test, gan_y_test=utils.input2gan(val_imgs, val_vessels, d_out_shape)
loss,acc=gan.evaluate(gan_x_test,gan_y_test, batch_size=batch_size, verbose=0)
utils.print_metrics(n_round+1, acc=acc, loss=loss, type='GAN')
# save the model and weights with the best validation loss
with open(os.path.join(model_out_dir,"g_{}_{}_{}.json".format( | n_round,FLAGS.discriminator,FLAGS.ratio_gan2seg)),'w') as f:
f.write(g.to_json())
g.save_weights(os.path.join(model_out_dir,"g_{}_{}_{}.h5".format(n_round,FLAGS.discriminator,FLAGS.ratio_gan2seg) | ))
# update step sizes, learning rates
scheduler.update_steps(n_round)
K.set_value(d.optimizer.lr, scheduler.get_lr())
K.set_value(gan.optimizer.lr, scheduler.get_lr())
# evaluate on test images
if n_round in rounds_for_evaluation:
generated=g.predict(test_imgs,batch_size=batch_size)
generated=np.squeeze(generated, axis=3)
vessels_in_mask, generated_in_mask = utils.pixel_values_in_mask(test_vessels, generated , test_masks)
auc_roc=utils.AUC_ROC(vessels_in_mask,generated_in_mask,os.path.join(auc_out_dir,"auc_roc_{}.npy".format(n_round)))
auc_pr=utils.AUC_PR(vessels_in_mask, generated_in_mask,os.path.join(auc_out_dir,"auc_pr_{}.npy".format(n_round)))
utils.print_metrics(n_round+1, auc_pr=auc_pr, auc_roc=auc_roc, type='TESTING')
# print test images
segmented_vessel=utils.remain_in_mask(generated, test_masks)
for index in range(segmented_vessel.shape[0]):
Image.fromarray((segmented_vessel[index,:,:]*255).astype(np.uint8)).save(os.path.join(img_out_dir,str(n_round)+"_{:02}_segmented.png".format(index+1)))
|
dubrayn/dubrayn.github.io | examples/threading/example0.py | Python | mit | 143 | 0.048951 | #! | /usr/bin/env python3
import threading
def worker():
print('new worker')
for i in range(8):
threading.Thread( | target = worker).start()
|
AuthentiqID/examples-flask | example_dance.py | Python | mit | 2,000 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Flask-Dance example.
This example demonstrates how to integrate a server application with
Authentiq Connect. It uses the popular Flask-Dance package to make
this trivial in Flask.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals
)
import os
from flask import Flask, redirect, url_for
from flask_dance.contrib.authentiq import make_authentiq_blueprint, authentiq
from werkzeug.contrib.fixers imp | ort ProxyFix
class Config(object):
"""
Flask configuration container.
"""
DEBUG = True
TESTING = False
SECRET_KEY = "aicahquohzieRah5ZooLoo3a"
# The following app is registered at Authentiq Connect.
CLIENT_ID = | os.environ.get("CLIENT_ID", "examples-flask-basic")
CLIENT_SECRET = os.environ.get("CLIENT_SECRET", "ed25519")
# Personal details requested from the user. See the "scopes_supported" key in
# the following JSON document for an up to date list of supported scopes:
#
# https://connect.authentiq.io/.well-known/openid-configuration
#
REQUESTED_SCOPES = ["openid", "aq:name", "email~s", "aq:push", "userinfo"]
PORT = 8000
REDIRECT_URL = "http://localhost:%d/authorized" % PORT
app = Flask(__name__)
app.config.from_object(Config)
app.wsgi_app = ProxyFix(app.wsgi_app)
blueprint = make_authentiq_blueprint(
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
scope=" ".join(REQUESTED_SCOPES),
)
app.register_blueprint(blueprint, url_prefix="/login")
@app.route("/")
def index():
if not authentiq.authorized:
return redirect(url_for("authentiq.login"))
resp = authentiq.get("/userinfo")
assert resp.ok
data = resp.json()
return "You are {name} on Authentiq!".format(
name=data.get("name") or data.get("email") or "anonymous")
if __name__ == "__main__":
if app.debug:
import os
# Allow insecure oauth2 when debugging
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
app.run()
|
openai/universe | universe/wrappers/experimental/action_space.py | Python | mit | 6,525 | 0.003525 | import logging
import gym
import numpy as np
from universe import spaces
from universe import vectorized
from universe.wrappers.gym_core import gym_core_action_space
logger = logging.getLogger(__name__)
def slither_vnc(space=False, left=False, right=False):
return [spaces.KeyEvent.by_name('space', down=space),
spaces.KeyEvent.by_name('left', down=left),
spaces.KeyEvent.by_name('right', down=right)]
def racing_vnc(up=False, left=False, right=False):
return [spaces.KeyEvent.by_name('up', down=up),
spaces.KeyEvent.by_name('left', down=left),
| spaces.KeyEvent.by_name('right', down=right)]
def platfor | m_vnc(up=False, left=False, right=False, space=False):
return [spaces.KeyEvent.by_name('up', down=up),
spaces.KeyEvent.by_name('left', down=left),
spaces.KeyEvent.by_name('right', down=right),
spaces.KeyEvent.by_name('space', down=space)]
class SafeActionSpace(vectorized.Wrapper):
"""
Recall that every universe environment receives a list of VNC events as action.
There exist many environments for which the set of relevant action is much smaller
and is known. For example, Atari environments have a modest number of keys,
so this wrapper, when applied to an Atari environment will reduce its action space.
Doing so is very convenient for research, since today's RL algorithms rely on random
exploration, which is hurt by small action spaces. As our algorithms get better
and we switch to using the raw VNC commands, this wrapper will become less important.
"""
def __init__(self, env):
super(SafeActionSpace, self).__init__(env)
if self.spec.tags.get('runtime') == 'gym-core':
self.action_space = gym_core_action_space(self.spec._kwargs['gym_core_id'])
elif self.spec is None:
pass
elif self.spec.id in ['internet.SlitherIO-v0',
'internet.SlitherIOErmiyaEskandaryBot-v0',
'internet.SlitherIOEasy-v0']:
self.action_space = spaces.Hardcoded([slither_vnc(left=True),
slither_vnc(right=True),
slither_vnc(space=True),
slither_vnc(left=True, space=True),
slither_vnc(right=True, space=True)])
elif self.spec.id in ['flashgames.DuskDrive-v0']:
# TODO: be more systematic
self.action_space = spaces.Hardcoded([racing_vnc(up=True),
racing_vnc(left=True),
racing_vnc(right=True)])
elif self.spec.id in ['flashgames.RedBeard-v0']:
self.action_space = spaces.Hardcoded([platform_vnc(up=True),
platform_vnc(left=True),
platform_vnc(right=True),
platform_vnc(space=True)])
class SoftmaxClickMouse(vectorized.ActionWrapper):
"""
Creates a Discrete action space of mouse clicks.
This wrapper divides the active region into cells and creates an action for
each which clicks in the middle of the cell.
"""
def __init__(self, env, active_region=(10, 75 + 50, 10 + 160, 75 + 210), discrete_mouse_step=10, noclick_regions=[]):
super(SoftmaxClickMouse, self).__init__(env)
logger.info('Using SoftmaxClickMouse with action_region={}, noclick_regions={}'.format(active_region, noclick_regions))
xlow, ylow, xhigh, yhigh = active_region
xs = range(xlow, xhigh, discrete_mouse_step)
ys = range(ylow, yhigh, discrete_mouse_step)
self.active_region = active_region
self.discrete_mouse_step = discrete_mouse_step
self.noclick_regions = noclick_regions
self._points = []
removed = 0
for x in xs:
for y in ys:
xc = min(x+int(discrete_mouse_step/2), xhigh-1) # click to center of a cell
yc = min(y+int(discrete_mouse_step/2), yhigh-1)
if any(self.is_contained((xc, yc), r) for r in noclick_regions):
removed += 1
continue
self._points.append((xc, yc))
logger.info('SoftmaxClickMouse noclick regions removed {} of {} actions'.format(removed, removed + len(self._points)))
self.action_space = gym.spaces.Discrete(len(self._points))
def _action(self, action_n):
return [self._discrete_to_action(int(i)) for i in action_n]
def _discrete_to_action(self, i):
xc, yc = self._points[i]
return [
spaces.PointerEvent(xc, yc, buttonmask=0), # release
spaces.PointerEvent(xc, yc, buttonmask=1), # click
spaces.PointerEvent(xc, yc, buttonmask=0), # release
]
def _reverse_action(self, action):
xlow, ylow, xhigh, yhigh = self.active_region
try:
# find first valid mousedown, ignore everything else
click_event = next(e for e in action if isinstance(e, spaces.PointerEvent) and e.buttonmask == 1)
index = self._action_to_discrete(click_event)
if index is None:
return np.zeros(len(self._points))
else:
# return one-hot vector, expected by demo training code
# FIXME(jgray): move one-hot translation to separate layer
return np.eye(len(self._points))[index]
except StopIteration:
# no valid mousedowns
return np.zeros(len(self._points))
def _action_to_discrete(self, event):
assert isinstance(event, spaces.PointerEvent)
x, y = event.x, event.y
step = self.discrete_mouse_step
xlow, ylow, xhigh, yhigh = self.active_region
xc = min((int((x - xlow) / step) * step) + xlow + step / 2, xhigh - 1)
yc = min((int((y - ylow) / step) * step) + ylow + step / 2, yhigh - 1)
try:
return self._points.index((xc, yc))
except ValueError:
# ignore clicks outside of active region or in noclick regions
return None
@classmethod
def is_contained(cls, point, coords):
px, py = point
x, width, y, height = coords
return x <= px <= x + width and y <= py <= y + height
|
mkaluza/python-enum | enum/__init__.py | Python | gpl-3.0 | 70 | 0 | from e | num import Enum, EnumFactory
__all | __ = ["Enum", "EnumFactory"]
|
igmhub/pyLyA | py/picca/fitter2/parser.py | Python | gpl-3.0 | 8,093 | 0.007167 | from functools import partial
import sys
import numpy as np
import os.path
from pkg_resources import resource_filename
if (sys.version_info > (3, 0)):
# Python 3 code in this block
import configparser as ConfigParser
else:
import ConfigPars | er
import fitsio
from picca.utils import userprint
from . import data, utils, priors
def parse_chi2(filename):
cp = ConfigParser.ConfigParser()
cp.optionxform=str
cp.read(filename)
dic_init = {}
dic_init['fiducial'] = {}
p = cp.get('fiducial','filename')
p = os.path.expandvars(p)
if not os.path.isfile(p):
p = resource_filename('picca', 'fitter2')+'/models/{}'.fo | rmat(p)
userprint('INFO: reading input Pk {}'.format(p))
h = fitsio.FITS(p)
zref = h[1].read_header()['ZREF']
dic_init['fiducial']['zref'] = zref
dic_init['fiducial']['Om'] = h[1].read_header()['OM']
dic_init['fiducial']['OL'] = h[1].read_header()['OL']
dic_init['fiducial']['k'] = h[1]['K'][:]
dic_init['fiducial']['pk'] = h[1]['PK'][:]
dic_init['fiducial']['pksb'] = h[1]['PKSB'][:]
h.close()
try: ## For Python2.7 compatibility
dic_init['fiducial']['full-shape'] = int(cp['fiducial']['full-shape'])==1
except (KeyError, AttributeError):
dic_init['fiducial']['full-shape'] = False
if dic_init['fiducial']['full-shape']:
userprint('WARNING!!!: Using full-shape fit to the correlation function. Sailor you are reaching unexplored territories, precede at your own risk.')
zeff = float(cp.get('data sets','zeff'))
dic_init['data sets'] = {}
dic_init['data sets']['zeff'] = zeff
dic_init['data sets']['data'] = [data.data(parse_data(os.path.expandvars(d),zeff,dic_init['fiducial'])) for d in cp.get('data sets','ini files').split()]
utils.cosmo_fit_func = getattr(utils, cp.get('cosmo-fit type','cosmo fit func'))
dic_init['outfile'] = cp.get('output','filename')
if 'verbosity' in cp.sections():
dic_init['verbosity'] = int(cp.get('verbosity','level'))
if 'hesse' in cp.sections():
dic_init['hesse'] = int(cp.get('hesse','level'))==1
if 'fast mc' in cp.sections():
dic_init['fast mc'] = {}
dic_init['fast mc']['fiducial'] = {}
dic_init['fast mc']['fiducial']['values'] = {}
dic_init['fast mc']['fiducial']['fix'] = {}
for item, value in cp.items('fast mc'):
if item in ['niterations','seed']:
dic_init['fast mc'][item] = int(value)
elif item=='forecast':
dic_init['fast mc'][item] = bool(value)
elif item=='covscaling':
value = value.split()
dic_init['fast mc'][item] = np.array(value).astype(float)
if not len(dic_init['fast mc'][item])==len(dic_init['data sets']['data']):
raise AssertionError()
else:
value = value.split()
dic_init['fast mc']['fiducial']['values'][item] = float(value[0])
if not value[1] in ['fixed','free']:
raise AssertionError()
dic_init['fast mc']['fiducial']['fix']['fix_'+item] = value[1] == 'fixed'
if cp.has_section('minos'):
dic_init['minos'] = {}
for item, value in cp.items('minos'):
if item=='sigma':
value = float(value)
elif item=='parameters':
value = value.split()
dic_init['minos'][item] = value
if cp.has_section('chi2 scan'):
dic_init['chi2 scan'] = parse_chi2scan(cp.items('chi2 scan'))
# Extract the settings for the sampler
# These are just passed to PolyChord
if cp.has_section('Polychord'):
dic_init['Polychord'] = cp['Polychord']
# Extract control settings. Used by the control classes
if cp.has_section('control'):
dic_init['control'] = cp['control']
return dic_init
def parse_data(filename,zeff,fiducial):
cp = ConfigParser.ConfigParser()
cp.optionxform=str
cp.read(filename)
dic_init = {}
dic_init['data'] = {}
userprint("INFO: reading {}".format(filename))
for item, value in cp.items('data'):
if item == "rp_binsize" or value == "rt_binsize":
value = float(value)
if item == "ell-max":
value = int(value)
dic_init['data'][item] = value
dic_init['cuts'] = {}
for item, value in cp.items('cuts'):
dic_init['cuts'][item] = float(value)
dic_init['model'] = {}
dic_init['model']['zeff'] = zeff
dic_init['model']['zref'] = fiducial['zref']
dic_init['model']['Om'] = fiducial['Om']
dic_init['model']['OL'] = fiducial['OL']
dic_init['model']['pk'] = fiducial['pk']
for item, value in cp.items('model'):
dic_init['model'][item] = value
if 'hcd_model' in cp.sections():
dic_init['hcd_model'] = {}
for item, value in cp.items('hcd_model'):
dic_init['hcd_model'][item] = value
dic_init['parameters'] = {}
dic_init['parameters']['values'] = {}
dic_init['parameters']['errors'] = {}
dic_init['parameters']['limits'] = {}
dic_init['parameters']['fix'] = {}
for item, value in cp.items('parameters'):
value = value.split()
dic_init['parameters']['values'][item] = float(value[0])
dic_init['parameters']['errors']['error_'+item] = float(value[1])
lim_inf = None
lim_sup = None
if value[2] != 'None': lim_inf = float(value[2])
if value[3] != 'None': lim_sup = float(value[3])
dic_init['parameters']['limits']['limit_'+item]=(lim_inf,lim_sup)
assert value[4] == 'fixed' or value[4] == 'free'
dic_init['parameters']['fix']['fix_'+item] = value[4] == 'fixed'
if 'metals' in cp.sections():
dic_init['metals']={}
for item, value in cp.items('metals'):
dic_init['metals'][item] = value
if 'in tracer1' in dic_init['metals']:
dic_init['metals']['in tracer1'] = dic_init['metals']['in tracer1'].split()
if 'in tracer2' in dic_init['metals']:
dic_init['metals']['in tracer2'] = dic_init['metals']['in tracer2'].split()
if 'broadband' in cp.sections():
dic_init['broadband'] = []
for item, value in cp.items('broadband'):
dic_bb = {}
value = value.split()
assert value[0] == 'add' or value[0] == 'mul'
dic_bb['type'] = value[0]
assert value[1] == 'pre' or value[1] == 'pos'
dic_bb['pre'] = value[1]
assert value[2]=='rp,rt' or value[2]=='r,mu'
dic_bb['rp_rt'] = value[2]
if len(value)==6:
dic_bb['func'] = value[5]
else:
dic_bb['func'] = 'broadband'
deg_r_min,deg_r_max,ddeg_r = value[3].split(':')
dic_bb['deg_r_min'] = int(deg_r_min)
dic_bb['deg_r_max'] = int(deg_r_max)
dic_bb['ddeg_r'] = int(ddeg_r)
deg_mu_min, deg_mu_max, ddeg_mu = value[4].split(':')
dic_bb['deg_mu_min'] = int(deg_mu_min)
dic_bb['deg_mu_max'] = int(deg_mu_max)
dic_bb['ddeg_mu'] = int(ddeg_mu)
dic_init['broadband'].append(dic_bb)
if 'priors' in cp.sections():
for item, value in cp.items('priors'):
if item in priors.prior_dic.keys():
userprint("WARNING: prior on {} will be overwritten".format(item))
value = value.split()
priors.prior_dic[item] = partial(getattr(priors, value[0]), prior_pars=np.array(value[1:]).astype(float), name=item)
return dic_init
def parse_chi2scan(items):
assert len(items)==1 or len(items)==2
dic_init = {}
for item, value in items:
dic = {}
value = value.split()
dic['min'] = float(value[0])
dic['max'] = float(value[1])
dic['nb_bin'] = int(value[2])
dic['grid'] = np.linspace(dic['min'],dic['max'],num=dic['nb_bin'],endpoint=True)
dic_init[item] = dic
return dic_init
|
hzlf/openbroadcast | website/apps/actstream/views.py | Python | gpl-3.0 | 6,439 | 0.004659 | from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpResponse
from django.views.generic import DetailView, ListView, FormView, UpdateView
from django.views.generic.detail import SingleObjectTemplateResponseMixin
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
from django.contrib.auth.models import User
from actstream import actions, models
from pure_pagination.mixins import PaginationMixin
from pure_pagination import Paginator, EmptyPage, PageNotAnInteger
from actstream.models import *
PAGINATE_BY = getattr(settings, 'ACTSTREAM_PAGINATE_BY', (30,60,120))
PAGINATE_BY_DEFAULT = getattr(settings, 'ACTSTREAM_PAGINATE_BY_DEFAULT', 30)
class ActionListView(PaginationMixin, ListView):
context_object_name = "action_list"
# template_name = "alibrary/artist_list.html"
paginate_by = PAGINATE_BY_DEFAULT
def get_paginate_by(self, queryset):
ipp = self.request.GET.get('ipp', None)
if ipp:
try:
if int(ipp) in PAGINATE_BY:
return int(ipp)
except Exception, e:
pass
return self.paginate_by
def get_queryset(self):
kwargs = {}
qs = Action.objects.filter(**kwargs)
user_filter = self.request.GET.get('username', None)
if user_filter:
user = get_object_or_404(User, username=user_filter)
qs = qs.filter(actor_object_id=user.pk).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(ActionListView, self).get_context_data(**kwargs)
context['user_stream'] = actor_stream(self.request.user)
return context
c | lass ActionDetailView(DetailView):
context_object_name = "action"
model = Action
def render_to_response(self, context):
return super(ActionDe | tailView, self).render_to_response(context, mimetype="text/html")
def get_context_data(self, **kwargs):
context = super(ActionDetailView, self).get_context_data(**kwargs)
return context
def respond(request, code):
"""
Responds to the request with the given response code.
If ``next`` is in the form, it will redirect instead.
"""
if 'next' in request.REQUEST:
return HttpResponseRedirect(request.REQUEST['next'])
return type('Response%d' % code, (HttpResponse, ), {'status_code': code})()
@login_required
@csrf_exempt
def follow_unfollow(request, content_type_id, object_id, do_follow=True, actor_only=True):
"""
Creates or deletes the follow relationship between ``request.user`` and the
actor defined by ``content_type_id``, ``object_id``.
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
actor = get_object_or_404(ctype.model_class(), pk=object_id)
if do_follow:
actions.follow(request.user, actor, actor_only=actor_only)
return respond(request, 201) # CREATED
actions.unfollow(request.user, actor)
return respond(request, 204) # NO CONTENT
@login_required
def stream(request):
"""
Index page for authenticated user's activity stream. (Eg: Your feed at
github.com)
"""
return render_to_response(('actstream/actor.html', 'activity/actor.html'), {
'ctype': ContentType.objects.get_for_model(User),
'actor': request.user, 'action_list': models.user_stream(request.user)
}, context_instance=RequestContext(request))
def followers(request, content_type_id, object_id):
"""
Creates a listing of ``User``s that follow the actor defined by
``content_type_id``, ``object_id``.
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
actor = get_object_or_404(ctype.model_class(), pk=object_id)
return render_to_response(('actstream/followers.html', 'activity/followers.html'), {
'followers': models.followers(actor), 'actor': actor
}, context_instance=RequestContext(request))
def following(request, user_id):
"""
Returns a list of actors that the user identified by ``user_id`` is following (eg who im following).
"""
user = get_object_or_404(User, pk=user_id)
return render_to_response(('actstream/following.html', 'activity/following.html'), {
'following': models.following(user), 'user': user
}, context_instance=RequestContext(request))
def user(request, username):
"""
``User`` focused activity stream. (Eg: Profile page twitter.com/justquick)
"""
user = get_object_or_404(User, username=username, is_active=True)
return render_to_response(('actstream/actor.html', 'activity/actor.html'), {
'ctype': ContentType.objects.get_for_model(User),
'actor': user, 'action_list': models.user_stream(user)
}, context_instance=RequestContext(request))
def detail(request, action_id):
"""
``Action`` detail view (pretty boring, mainly used for get_absolute_url)
"""
return render_to_response(('actstream/detail.html', 'activity/detail.html'), {
'action': get_object_or_404(models.Action, pk=action_id)
}, context_instance=RequestContext(request))
def actor(request, content_type_id, object_id):
"""
``Actor`` focused activity stream for actor defined by ``content_type_id``,
``object_id``.
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
actor = get_object_or_404(ctype.model_class(), pk=object_id)
return render_to_response(('actstream/actor.html', 'activity/actor.html'), {
'action_list': models.actor_stream(actor), 'actor': actor,
'ctype': ctype
}, context_instance=RequestContext(request))
def model(request, content_type_id):
"""
``Actor`` focused activity stream for actor defined by ``content_type_id``,
``object_id``.
"""
ctype = get_object_or_404(ContentType, pk=content_type_id)
actor = ctype.model_class()
return render_to_response(('actstream/actor.html', 'activity/actor.html'), {
'action_list': models.model_stream(actor), 'ctype': ctype,
'actor': ctype
}, context_instance=RequestContext(request))
|
catapult-project/catapult | trace_processor/trace_uploader/appengine_config.py | Python | bsd-3-clause | 1,441 | 0.004858 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""App Engine config.
This module is loaded before others and can be used to set up the
App Engine environment. See:
https://cloud.google.com/appengine/docs/python/tools/appengineconfig
"""
import os
from google.appengine.ext import vendor
appstats_SHELL_OK = True
# Directories in catapult/third_party required by uploader/corpus cleanup.
THIRD_PARTY_LIBRARIES = [
'apiclient',
'cloudstorage',
'uritemplate',
]
# Libraries bundled with the App Engine SDK.
THIRD_PARTY_LIBRARIES_IN_SDK = [
'httplib2',
'oauth2client',
'six',
]
def _AddThirdPartyLibraries():
"""Registers the third party libraries with App Engine.
In order for third-party libraries to be available in the App Engine
runtime environment, they must be added | with vendor.add. The directories
added this way must be inside the App Engine project directory.
"""
# The deploy script is expected to add links to third party libraries
# before deploying. If the directories aren't there (e.g. when running tests)
# then just ignore it.
for library_dir in (THIRD_PARTY_LIBRARIES +
THIRD_PARTY_LIBRARIES_IN_SDK):
if os.path.exists(library_dir):
| vendor.add(os.path.join(os.path.dirname(__file__), library_dir))
_AddThirdPartyLibraries()
|
google/yapf | yapftests/__init__.py | Python | apache-2.0 | 596 | 0 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "Lice | nse");
# you may not use this file except in compliance with the License.
# You may o | btain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
orangain/helloscrapy | helloscrapy/settings.py | Python | mit | 525 | 0.00381 | # Scrapy settings for helloscrapy project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http:// | doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'helloscr | apy'
SPIDER_MODULES = ['helloscrapy.spiders']
NEWSPIDER_MODULE = 'helloscrapy.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'helloscrapy (+http://www.yourdomain.com)'
DOWNLOAD_DELAY = 3
ROBOTSTXT_OBEY = True
|
coddingtonbear/jirafs | jirafs/commands/create.py | Python | mit | 2,936 | 0.001362 | from jirafs import utils
from jirafs.plugin import CommandPlugin
from jirafs.utils import run_command_method_with_kwargs
class Command(CommandPlugin):
"""Create a new Jira issue"""
MIN_VERSION = "2.0.0"
MAX_VERSION = "3.0.0"
AUTOMATICALLY_INSTANTIATE_FOLDER = False
FIELDS = (
{
"name": "project",
"prompt": "Project",
"required": True,
"path": "project.key",
},
{
"name": "issuetype",
"prompt": "Issue Type",
"default": "Task",
"path": "issuetype.name",
}, |
{"name": "summary", "prompt": "Summary", "required": True},
{"name": "description", "prompt": "Description", "required": False},
)
def set_field_value(self, data, field, value):
starting_reference = data
path = field.get("path", field["name"])
for part in path.split("."):
if part not in data:
data[part] = {}
last_reference = data
data = data[part]
| last_reference[path.split(".")[-1]] = value
return starting_reference
def prompt_for_input(self, field):
while True:
if field.get("default"):
value = input(
"%s (%s): "
% (
field.get("prompt"),
field.get("default"),
)
)
else:
value = input("%s: " % (field.get("prompt")))
value = value.strip()
if value:
return value
elif not field.get("required") and field.get("default"):
return field.get("default")
elif not field.get("required"):
return value
def main(self, args, jira, path, parser, **kwargs):
server = args.server
if not server:
server = utils.get_default_jira_server()
issue_data = {}
for field in self.FIELDS:
if getattr(args, field["name"]) is not None:
self.set_field_value(issue_data, field, getattr(args, field["name"]))
elif args.quiet:
self.set_field_value(issue_data, field, field.get("default"))
else:
self.set_field_value(issue_data, field, self.prompt_for_input(field))
jira_client = jira(server)
issue = jira_client.create_issue(issue_data)
return run_command_method_with_kwargs(
"clone",
path=None,
url=issue.permalink(),
jira=jira,
)
def add_arguments(self, parser):
parser.add_argument("--server", default=None)
parser.add_argument("--quiet", "-q", default=False, action="store_true")
for argument in self.FIELDS:
parser.add_argument("--%s" % argument["name"], default=None, type=str)
|
drx/rom-info | handlers/dreamcast.py | Python | mit | 4,779 | 0.003139 | import mmap
import os.path
import re
from collections import OrderedDict
from .base_handler import BaseHandler
from .iso9660 import ISO9660Handler
from utils import MmappedFile, ConcatenatedFile
class GDIParseError(ValueError):
pass
class GDIHandler(BaseHandler):
def test(self):
if not re.match('^.*\.gdi', self.file_name, re.IGNORECASE):
return False
try:
self.parse()
except GDIParseError:
return False
return True
def parse(self):
text = self.read(0, 8*1024)
lines = text.decode('ascii').splitlines()
if len(lines) == 1:
raise GDIParseError
try:
n_tracks = int(lines.pop(0))
except ValueError:
raise GDIParseError
if len(lines) != n_tracks:
print(len(lines), n_tracks)
raise GDIParseError
# TODO figure out complete format
tracks = []
for track_i, line in enumerate(lines):
try:
match = re.match('(?P<index>\d+) (?P<sector>\d+) (?P<type>\d+) (?P<sector_size>\d+)'
' (?P<file_name>\S+) (\d+)', line)
if not match:
raise GDIParseError
track = match.groupdict()
for key in ('index', 'sector', 'type', 'sector_size'):
track[key] = int(track[key])
if track['index'] != track_i + 1:
raise GDIParseError
tracks.append(track)
except ValueError:
raise GDIParseError
return tracks
def get_info(self):
tracks = self.parse()
for track in tracks:
track['path'] = os.path.join(os.path.dirname(self.file_name), track['file_name'])
if len(tracks) > 3 and tracks[2]['type'] == 4 and tracks[-1]['type'] == 4:
# Dreamcast discs often contain two data tracks (track 3 and the last track) in addition to track 1.
mixed_mode = True
else:
mixed_mode = False
track_info = OrderedDict()
for track in tracks:
if mixed_mode and track == tracks[-1]:
continue
if mixed_mode and track['index'] == 3:
last_track = tracks[-1]
offset_gap = (last_track['sector'] - track['sector']) * 2352
track_name = 'Track {}+{}'.format(track['index'], last_track['index'])
file = ConcatenatedFile(file_names=[track['path'], last_track['path']],
offsets=[0, offset_gap]) # TODO handle different sector sizes
else:
track_name = 'Track {}'.format(track['index'])
file = MmappedFile(track['path'])
with file:
if track['type'] == 4:
handler = DCDataTrackHandler(file=file, file_name=track['file_name'], sector_offset=track['sector'], track_name=track_name)
if handler.test():
handler.get_info()
track_info[track_name] = handler.info
else:
track_info[track_name] = 'Data track in unknown format'
elif track['type'] == 0:
track_info[track_name] = 'Audio track'
else:
track_info[track_name] = 'Unknown'
self.info['Tracks'] = track_info
class DCDataTrackHandler(ISO9660Handler):
def test | (self):
if not super().test():
return False
if self.read(0, 16) == b'SEGA SEGAKATANA ':
return True
else:
return False
def get_info(self):
header_info = OrderedDict()
header_info['Hardware ID'] = self.unpack('string' | , 0x00, 16, 0)
header_info['Maker ID'] = self.unpack('string', 0x10, 16, 0)
header_info['CRC'] = self.unpack('string', 0x20, 4, 0)
header_info['Device'] = self.unpack('string', 0x25, 6, 0)
header_info['Disc'] = self.unpack('string', 0x2b, 3, 0)
header_info['Region'] = self.unpack('string', 0x30, 8, 0).strip()
header_info['Peripherals'] = self.unpack('string', 0x38, 8, 0)
header_info['Product number'] = self.unpack('string', 0x40, 10, 0)
header_info['Product version'] = self.unpack('string', 0x4a, 6, 0)
header_info['Release date'] = self.unpack('string', 0x50, 16, 0)
header_info['Boot file'] = self.unpack('string', 0x60, 16, 0)
header_info['Company name'] = self.unpack('string', 0x70, 16, 0)
header_info['Software name'] = self.unpack('string', 0x80, 16, 0)
self.info['Header'] = header_info
super().get_info()
|
floppp/programming_challenges | project_euler/001-050/25.py | Python | mit | 344 | 0.011628 | import time
start = time.time()
def fib():
a, b = 0, 1
while True:
yield a
| a, b = b, a + b
f = fib()
for i in range(10000):
d = next(f)
if len(str(d)) == 1000:
print(i)
break
print("t = {:.5f} seg".format(time.time() - start))
# 4782
# t = 0. | 03068 seg
10
print(len(str(10)))
# 2 |
isaac-philip/loolu | common/django/contrib/admin/views/decorators.py | Python | mit | 3,535 | 0.002263 | import base64
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.3, 2.4 fallback.
from django import http, template
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from django.shortcuts import render_to_response
from django.utils.translation import ugettext_lazy, ugettext as _
ERROR_MESSAGE = ugettext_lazy("Please enter a correct username and password. Note that both fields are case-sensitive.")
LOGIN_FORM_KEY = 'this_is_the_login_form'
def _display_login_form(request, error_message=''):
request.session.set_test_cookie()
return render_to_response('admin/login.html', {
'title': _('Log in'),
'app_path': request.get_full_path(),
'error_message': error_message
}, context_instance=template.RequestContext(request))
def staff_member_required(view_func):
"""
Decorator for views that checks that the user is logged in and is a staff
member, displaying the login page if necessary.
"""
def _checklogin(request, *args, **kwargs):
if request.user.is_authenticated() and request.user.is_staff:
# The user is valid. Continue to the admin page.
return view_func(request, *args, **kwargs)
assert hasattr(request, 'session'), "The Django admin requires session middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'."
# If this isn't already the login page, display it.
if LOGIN_FORM_KEY not in request.POST:
if request.POST:
message = _("Please log in again, because your session has expired.")
else:
message = ""
return _display_login_form(request, message)
# Check that the user accepts cookies.
if not request.session.test_cookie_worked():
message = _("Looks like your browser isn't configured to accept cookies. Please enable cookies, reload this page, and try again.")
return _display_login_form(request, message)
else:
request.session.delete_test_cookie()
# Check the password.
username = request.POST.get('username', None)
password = request.POST.get('password', None)
user = authenticate(username=username, password=password)
if user is None:
message = ERROR_MESSAGE
if '@' in username:
# Mistakenly entered e-mail address instead of username? Look it up.
users = list(User.all().filter('email =', username))
if len(users) == 1 and users[0].check_password(password):
message = _("Your e-mail address is not your username. Try '%s' instead.") % users[0].username
else:
# Either | we cannot find the user, or if more than 1
# we cannot guess which user is the correct one.
message = _("Usernames cannot contain the '@' character.")
return _display_login_form(request, message)
# The user data is correct; log in the user in and continue.
else:
if user.is_active and user.is_staff:
login(request, user)
return http.HttpR | esponseRedirect(request.get_full_path())
else:
return _display_login_form(request, ERROR_MESSAGE)
return wraps(view_func)(_checklogin)
|
Janhouse/usb-guard | usb-guard.py | Python | mit | 16,696 | 0.012578 | #!/usr/bin/env python2
#
# Copyright 2012 Janis Jansons (janis.jansons@janhouse.lv)
#
import gi
# make sure you use gtk+ 3
gi.require_version('Gtk', '3.0')
import sys, os
import struct
from gi.repository import GObject, Polkit, GLib, Notify, GUdev, Gio, Gtk
class UsbGuard:
def __init__(self):
print "Starting USB Guard"
### Udev
# empty array for all subsystems, char array for subsystem e.g. ["usb","video4linux"] etc
self.client = GUdev.Client (subsystems=[])
# or client = GUdev.Client.new ([])
# Start listening to udev events
self.client.connect("uevent", self.on_uevent)
self.devices = self.client.query_by_subsystem("usb")
### Random stuff
# USB device relation store
self.devtree={}
self.notifications={}
### Main loop
self.mainloop = GLib.MainLoop ()
### Systray stuff
self.statusicon = Gtk.StatusIcon()
self.statusicon.set_from_icon_name("security-medium")
self.statusicon.set_tooltip_text("USB Guard")
#self.statusicon.connect("popup-menu", self.OnShowPopupMenu)
### Polkit
"""
#mainloop = GObject.MainLoop()
authority = Polkit.Authority.get()
subject = Polkit.UnixProcess.new(os.getppid())
cancellable = Gio.Cancellable()
GObject.timeout_add(10 * 1000, do_cancel, cancellable)
authority.check_authorization(subject,
"org.freedesktop.policykit.exec", #"org.freedesktop.policykit.exec",
None,
Polkit.CheckAuthorizationFlags.ALLOW_USER_INTERACTION,
cancellable,
check_authorization_cb,
mainloop)
"""
### Main GTK window
self.builder = Gtk.Builder()
self.builder.add_from_file("main.ui")
self.window = self.builder.get_object("window1")
self.style = self.window.get_style_context()
# pack the table into the scrolled window
self.container = self.builder.get_object("dev_container")#.add_with_viewport(table)
self.container_box = Gtk.Box(spacing=10, orientation=Gtk.Orientation.VERTICAL)
self.container.add_with_viewport(self.container_box)
self.container_box.show()
###############
self.sort_devices()
#for device in self.devices:
# self.print_device(device)
###############
# Make the main window visible
self.window.show()
# Start the main loop
self.mainloop.run ()
return
def print_device(self, device):
print "subsystem", device.get_subsystem()
print "devtype", device.get_devtype()
print "name", device.get_name()
print "number", device.get_number()
print "sysfs_path:", device.get_sysfs_path()
print "driver:", device.get_driver()
print "action:", device.get_action()
print "seqnum:", device.get_seqnum()
print "device type:", device.get_device_type()
print "device number:", device.get_device_number()
print "device file:", device.get_device_file()
print "device file symlinks:", ", ".join(device.get_device_file_symlinks())
print "device keys:", ", ".join(device.get_property_keys())
#for device_key in device.get_property_keys():
# print " device property %s: %s" % (device_key, device.get_property(device_key))
def on_tensec_timeout(self, loop):
print("Ten seconds have passed. Now exiting.")
loop.quit()
return False
def device_enabled(self, path):
f = open(path+'authorized', 'r')
enabled=f.read().rstrip()
f.close
#print enabled
if enabled == '1':
return True
elif enabled == '0':
return False
def check_authorization_cb(self, authority, res, loop):
try:
result = authority.check_authorization_finish(res)
if result.get_is_authorized():
print("Authorized")
elif result.get_is_challenge():
print("Challenge")
else:
print("Not authorized")
except GObject.GError as error:
print("Error checking authorization: %s" % error.message)
print("Authorization check has been cancelled "
"and the dialog should now be hidden.\n"
"This process will exit in ten seconds.")
GObject.timeout_add(10000, on_tensec_timeout, loop)
def do_cancel(self ,cancellable):
print("Timer has expired; cancelling authorization check")
cancellable.cancel()
return False
def action_whitelist(self, notifyObj, two, three):
print "Device added to whitelist."
notifyObj.close()
def action_once(self, notifyObj, two, three):
print "Enabling device once."
notifyObj.close()
def action_toggle(self, button, something, dev):
if not dev in self.devtree:
print "Device "+dev+" not found in device list"
return False
try:
if self.devtree[dev]['enabled'] == True:
self.action_disable(dev)
button.set_label("Enable device")
else:
self.action_enable(dev)
button.set_label("Disable device")
except RuntimeError:
button.set_label("Error...")
raise
#else:
#button.set_label("Error...")
#print "Clicked button"
def action_enable(self, dev):
self.action_set(self.devtree[dev]['path']+'authorized', '1')
self.devtree[dev]['enabled'] = True
def action_disable(self, dev):
self.action_set(self.devtree[dev]['path'] | +'authorized', '0')
self.devtree[dev]['enabled'] = False
def action_set(self, path, value):
f = open(path, 'w')
enabled=f.write(value)
f.close
def device_vendor_string(self, device):
if device.get_property("ID_VENDOR_FROM_DATABASE"):
return device.get_property("ID_VENDOR_FROM_DATABASE")
elif device.get_property("ID_VENDOR_ENC"):
return device.get_property("ID_VENDOR_ENC")
|
else:
return device.get_property("ID_VENDOR")
def device_model_string(self, device):
if device.get_property("ID_PRODUCT_FROM_DATABASE"):
return device.get_property("ID_PRODUCT_FROM_DATABASE")
elif device.get_property("ID_MODEL_ENC"):
return device.get_property("ID_MODEL_ENC")
else:
return device.get_property("ID_MODEL")
def on_change(self, device):
if not device.get_property("DEVNUM"):
return False
devname = get_dev_path(device)
if device_is_hub(device):
# check if authorized_default has changed
print "Checking authorized_default state"
# check if device authorized state has changed
print "Checking device state"
def on_remove(self, device):
if not device.get_property("DEVNUM"):
return False
devname = get_dev_path(device)
if devname in self.notifications:
del self.notifications[devname]
def on_add(self, device):
if not device.get_property("DEVNUM"):
return False
devname = get_dev_path(device)
if devname not in self.notifications:
text="%s\n%s" % ( self.device_vendor_string(device), self.device_model_string(device) )
Notify.init('My Application Name')
self.notifications[devname]=(device)
self.notifications[devname] = Notify.Notification.new(
'New USB device connected',
text,
'security-medium'
)
self.notifications[devname].add_action('whitelis |
jrleeman/MetPy | metpy/plots/ctables.py | Python | bsd-3-clause | 8,974 | 0.002006 | # Copyright (c) 2014,2015,2017 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Work with custom color tables.
Contains a tools for reading color tables from files, and creating instances based on a
specific set of constraints (e.g. step size) for mapping.
.. plot::
import numpy as np
import matplotlib.pyplot as plt
import metpy.plots.ctables as ctables
def plot_color_gradients(cmap_category, cmap_list, nrows):
fig, axes = plt.subplots(figsize=(7, 6), nrows=nrows)
fig.subplots_adjust(top=.93, bottom=0.01, left=0.32, right=0.99)
axes[0].set_title(cmap_category + ' colormaps', fontsize=14)
for ax, name in zip(axes, cmap_list):
ax.imshow(gradient, aspect='auto', cmap=ctables.registry.get_colortable(name))
pos = list(ax.get_position().bounds)
x_text = pos[0] - 0.01
y_text = pos[1] + pos[3]/2.
fig.text(x_text, y_text, name, va='center', ha='right', fontsize=10)
# Turn off *all* ticks & spines, not just the ones with colormaps.
for ax in axes:
ax.set_axis_off()
cmaps = list(ctables.registry)
cmaps = [name for name in cmaps if name[-2:]!='_r']
nrows = len(cmaps)
gradient = np.linspace(0, 1, 256)
gradient = np.vstack((gradient, gradient))
plot_color_gradients('MetPy', cmaps, nrows)
plt.show()
"""
from __future__ import division
import ast
import contextlib
import glob
import logging
import os.path
import posixpath
import matplotlib.colors as mcolors
from pkg_resources import resource_listdir, resource_stream
from ..package_tools import Exporter
exporter = Exporter(globals())
TABLE_EXT = '.tbl'
log = logging.getLogger(__name__)
def _parse(s):
if hasattr(s, 'decode'):
s = s.decode('ascii')
if not s.startswith('#'):
return ast.literal_eval(s)
return None
@exporter.export
def read_colortable(fobj):
r"""Read colortable information from a file.
Reads a colortable, which consists of one color per line of the file, where
a color can be one of: a tuple of 3 floats, a string with a HTML color name,
or a string with a HTML hex color.
Parameters
----------
fobj : a file-like object
A file-like object to read the colors from
Returns
-------
List of tuples
A list of the RGB color values, where each RGB color is a tuple of 3 floats in the
range of [0, 1].
"""
ret = []
try:
for line in fobj:
literal = _parse(line)
if literal:
ret.append(mcolors.colorConverter.to_rgb(literal))
return ret
except (SyntaxError, ValueError):
raise RuntimeError('Malformed colortable.')
def convert_gempak_table(infile, outfile):
r"""Convert a GEMPAK color table to one MetPy can read.
Reads lines from a GEMPAK-style color table file, and writes them to another file in
a format that MetPy can parse.
Parameters
----------
infile : file-like object
The file-like object to read from
outfile : file-like object
The file-like object to write to
"""
for line in infile:
if not line.startswith('!') and line.strip():
r, g, b = map(int, line.split())
outfile | .write('({0:f}, {1:f}, {2:f})\n'.format(r / 255, g / 255, b / 255))
class ColortableRegistr | y(dict):
r"""Manages the collection of color tables.
Provides access to color tables, read collections of files, and generates
matplotlib's Normalize instances to go with the colortable.
"""
def scan_resource(self, pkg, path):
r"""Scan a resource directory for colortable files and add them to the registry.
Parameters
----------
pkg : str
The package containing the resource directory
path : str
The path to the directory with the color tables
"""
for fname in resource_listdir(pkg, path):
if fname.endswith(TABLE_EXT):
table_path = posixpath.join(path, fname)
with contextlib.closing(resource_stream(pkg, table_path)) as stream:
self.add_colortable(stream,
posixpath.splitext(posixpath.basename(fname))[0])
def scan_dir(self, path):
r"""Scan a directory on disk for color table files and add them to the registry.
Parameters
----------
path : str
The path to the directory with the color tables
"""
for fname in glob.glob(os.path.join(path, '*' + TABLE_EXT)):
if os.path.isfile(fname):
with open(fname, 'r') as fobj:
try:
self.add_colortable(fobj, os.path.splitext(os.path.basename(fname))[0])
log.debug('Added colortable from file: %s', fname)
except RuntimeError:
# If we get a file we can't handle, assume we weren't meant to.
log.info('Skipping unparsable file: %s', fname)
def add_colortable(self, fobj, name):
r"""Add a color table from a file to the registry.
Parameters
----------
fobj : file-like object
The file to read the color table from
name : str
The name under which the color table will be stored
"""
self[name] = read_colortable(fobj)
self[name + '_r'] = self[name][::-1]
def get_with_steps(self, name, start, step):
r"""Get a color table from the registry with a corresponding norm.
Builds a `matplotlib.colors.BoundaryNorm` using `start`, `step`, and
the number of colors, based on the color table obtained from `name`.
Parameters
----------
name : str
The name under which the color table will be stored
start : float
The starting boundary
step : float
The step between boundaries
Returns
-------
`matplotlib.colors.BoundaryNorm`, `matplotlib.colors.ListedColormap`
The boundary norm based on `start` and `step` with the number of colors
from the number of entries matching the color table, and the color table itself.
"""
from numpy import arange
# Need one more boundary than color
num_steps = len(self[name]) + 1
boundaries = arange(start, start + step * num_steps, step)
return self.get_with_boundaries(name, boundaries)
def get_with_range(self, name, start, end):
r"""Get a color table from the registry with a corresponding norm.
Builds a `matplotlib.colors.BoundaryNorm` using `start`, `end`, and
the number of colors, based on the color table obtained from `name`.
Parameters
----------
name : str
The name under which the color table will be stored
start : float
The starting boundary
end : float
The ending boundary
Returns
-------
`matplotlib.colors.BoundaryNorm`, `matplotlib.colors.ListedColormap`
The boundary norm based on `start` and `end` with the number of colors
from the number of entries matching the color table, and the color table itself.
"""
from numpy import linspace
# Need one more boundary than color
num_steps = len(self[name]) + 1
boundaries = linspace(start, end, num_steps)
return self.get_with_boundaries(name, boundaries)
def get_with_boundaries(self, name, boundaries):
r"""Get a color table from the registry with a corresponding norm.
Builds a `matplotlib.colors.BoundaryNorm` using `boundaries`.
Parameters
----------
name : str
The name under which the color table will be stored
boundaries : array_like
The list of boundaries for the norm
Returns
-------
`matplotlib.colors.BoundaryNorm`, `matplotlib.color |
soarlab/FPTaylor | scripts/plot/gappa_data.py | Python | mit | 6,162 | 0.001623 | #!/usr/bin/env python
import sys
import os
import re
import glob
import shutil
import argparse
from fractions import *
import common
log = common.get_log()
# Global paths
base_path = os.path.dirname(os.path.normpath(sys.argv[0]))
tmp_base_path = os.path.join(base_path, "tmp")
tmp_path = os.path.join(tmp_base_path, "tmp_gappa_data")
cache_path = os.path.join(tmp_base_path, "cache_gappa_data")
fptaylor_base = os.path.normpath(os.path.join(base_path, ".."))
fptaylor_tmp = os.path.join(tmp_base_path, "tmp_fptaylor")
fptaylor_log = os.path.join(tmp_base_path, "log_export_fptaylor")
fptaylor = os.path.join(fptaylor_base, "fptaylor")
gappa = os.path.expanduser(os.path.normpath("~/Work/tools/gappa-1.3.1/src/gappa"))
fpbench_path = os.path.normpath(
os.path.join(base_path, "..", "..", "forks", "FPBench", "tools"))
core2gappa = os.path.join(fpbench_path, "core2gappa.rkt")
racket = "racket"
def basename(fname):
return os.path.splitext(os.path.basename(fname))[0]
# Parse arguments
parser = argparse.ArgumentParser(
description="Splits input intervals into small pieces and runs Gappa on each subinterval")
parser.add_argument('--debug', action='store_true',
help="debug mode")
parser.add_argument('-e', '--error', choices=['abs', 'rel'], default='abs',
help="error type")
parser.add_argument('-t', '--type', default='64',
choices=['16', '32', '64', 'real'],
help="default type of variables and rounding operations")
parser.add_argument('-v', '--verbosity', type=int, default=0,
help="FPTaylor's verbosity level")
parser.add_argument('-o', '--output-path', default=".",
help="specifies where to save data files")
parser.add_argument('-n', '--segments', type=int, default=100,
help="number of subintervals")
parser.add_argument('input',
help="input FPTaylor file")
args = parser.parse_args()
if args.debug:
log.setLevel(logging.DEBUG)
if not os.path.isdir(tmp_path):
os.makedirs(tmp_path)
if not os.path.isdir(cache_path):
os.makedirs(cache_path)
common.remove_all(tmp_path, "*")
def decode_binary(s):
pat = r'([0-9+-]+)(b([0-9+-]+))?'
m = re.match(pat, s)
v = Fraction(m.group(1))
if m.group(3):
p = Fraction(2) ** int(m.group(3))
v *= p
return v
def get_input_bounds(input_file):
pat = r'in[\s]*\[([0-9.e+-]+)[\s]*,[\s]*([0-9.e+-]+)\]'
with open(input_file, 'r') as f:
data = f.read()
m = re.search(pat, data)
return float(m.group(1)), float(m.group(2))
class GappaTask:
def __init__(self, name):
self.input_files = []
self.name = name
def __repr__(self):
s = "GappaTask({0}): {1}".format(self.name, self.input_files)
return s
def create_data(self, out_file):
pat = r'in[\s]*\[([0-9b+-]+)[\s]*(\{[^}]*\})?,[\s]*([0-9b+-]+)'
self.input_files.sort(key=lambda x: x[0])
data = []
log.info("Running Gappa...")
total = len(self.input_files)
i = 0
for _, input_file in self.input_files:
bounds = get_input_bounds(input_file)
cmd = [gappa, input_file]
output = common.run_output(cmd, silent=True)
m = re.search(pat, output)
v1 = decode_binary(m.group(1))
v2 = decode_binary(m.group(3))
v = max(abs(v1), abs(v2))
data.append((bounds, v))
i += 1
sys.stdout.write("\r{0}/{1} ".format(i, total))
print("")
log.info("done")
with open(out_file, 'w') as f:
n = len(data)
if n != args.segments:
log.error("Wrong number of results: {0} (expected {1})".format(n, args.segments))
lo = 1
hi = 2
else:
lo = data[0][0][0]
hi = dat | a[n - 1][0][1]
f.write("[Gappa]{0}\n".format(self.name))
i = 1
for ((low, high), v) in data:
if args.error == 'abs':
abs_err = float(v)
rel_err = 0
else:
abs_err = 0
| rel_err = float(v)
if i == 1:
low = lo
if i == n:
high = hi
f.write("{0}, {1}, {2}, {3}, {4}, 0\n".format(i, low, high, abs_err, rel_err))
i += 1
# Export FPTaylor tasks to FPCore
out_file = os.path.join(tmp_path, basename(args.input) + ".fpcore")
cmd = [fptaylor, args.input,
"--fpcore-out", out_file,
"--log-base-dir", fptaylor_log,
"--log-append-date", "none",
"--tmp-base-dir", fptaylor_tmp,
"--tmp-date", "false",
"-v", str(args.verbosity)]
rnd_types = {
"16": ("float16", "rnd16", "binary16"),
"32": ("float32", "rnd32", "binary32"),
"64": ("float64", "rnd64", "binary64"),
"real": ("real", "rnd64", "real")
}
var_type, rnd_type, fpcore_type = rnd_types[args.type]
cmd += ["--default-var-type", var_type]
cmd += ["--default-rnd", rnd_type]
common.run(cmd, log=log)
# Run core2gappa.rkt
cmd = [racket, core2gappa,
"--var-precision", fpcore_type,
"--split", str(args.segments)]
if args.error == 'rel':
cmd += ["--rel-error"]
cmd += ["--out-path", tmp_path]
common.run(cmd + ["--", out_file], log=log)
# Collect files corresponding to each task
gappa_tasks = dict()
for file_path in glob.glob(os.path.join(tmp_path, "*.g")):
pat = r'(.+)\_case([0-9]+)\.g'
fname = os.path.basename(file_path)
m = re.match(pat, fname)
if not m:
task_name = os.path.splitext(fname)[0]
case = 0
else:
task_name = m.group(1)
case = int(m.group(2))
if task_name not in gappa_tasks:
gappa_tasks[task_name] = GappaTask(task_name)
gappa_tasks[task_name].input_files.append((case, file_path))
for task_name, task in gappa_tasks.iteritems():
out_file = os.path.join(args.output_path, "gappa-data-" + task_name + ".txt")
task.create_data(out_file) |
klmcwhirter/huntwords | manager/__main__.py | Python | mit | 1,563 | 0.003839 | '''
Usage:
manager puzzle_load (--file <filename>) [--url <url>]
manager puzzle_del (--name <name>) [--url <url>]
manager puzzles [--url <url>]
manager puzzleboards_clear [--url <url>]
manager puzzleboard_consume [--async-url <url>] (--name <name>) [--size <size>]
manager puzzleboard_pop (--name <name>) [--url <url>]
Options:
--async-url <url> The url to the async function endpoint [default: http://localhost:8080/async-function/huntwordsapi]
--url <url> The url to the function [default: http://localhost:8080/function/huntwordsapi]
--file <filename> The filename from which to read the words; one per line
--name <name> The puzzle name to give the dictionary of words
--size <size> The length of a side of the grid on which to place words [default: 15]
-h, --help Print this help text and exit
--version Print the version and exit
'''
from docopt import docopt
from .commands_puzzleboard import command_puzzleboards_clear, command_puzzleboard_consume, command_puzzleboard_pop
from .commands_puzzle import command_puzzle_load, command_puzzles
# Command pattern
verbs = {
'puzzle_load': command_puzzle_load,
'puzzles': command_puzzles,
'puzzleboards_clear': command_puzzleboards_clear,
'puzzleboard_consume': command_puzzleboard_consume,
'puzzleboard_pop': command_puzzleboard_pop
}
if __name__ == '__main__':
opts = docopt(__d | oc__, version='0.1')
comm | and = [v for k, v in verbs.items() if opts[k]][0]
command(**opts)
|
ForceBru/PyVM | test_memory.py | Python | mit | 3,339 | 0.001198 | import unittest
import os
import ctypes
from VM.Memory import Memory
class TestMemory(unittest.TestCase):
MEM_SIZE = 512
MAX_RANDOM_REPEAT = 10_000
def setUp(self):
self.mem = Memory(self.MEM_SIZE)
self.random_data = os.urandom(self.MEM_SIZE)
ctypes.memmove(self.mem.mem, self.random_data, self.MEM_SIZE)
def test_1_size(self):
self.assertEqual(self.mem.size, self.MEM_SIZE)
def test_get_8(self):
for offset in range(self.mem.size):
ret = self.mem.get(offset, 1)
correct = self.random_data[offset]
self.assertEqual(ret, correct)
def do_test_get(self, size: int):
for offset in range(self.mem.size - size):
ret = self.mem.get(offset, size)
correct = int.from_bytes(self.random_data[offset:offset + size], 'little')
self.assertEqual(ret, correct)
for offset in range(1, self.mem.size - size):
ret = self.mem.get(offset, size)
correct = int.from_bytes(self.random_data[offset:offset + size], 'little')
self.assertEqual(ret, correct)
if size == 4:
# test all possible permutations
for offset in range(2, self.mem.size - size):
ret = self.mem.get(offset, size)
correct = int.from_bytes(self.random_data[offset:offset + size], 'little')
self.assertEqual(ret, correct)
for offset in range(3, self.mem.size - size):
ret = self.mem.get(offset, size)
correct = int.from_bytes(self.random_data[offset:offset + size], 'little')
self.assertEqual(ret, correct)
def test_get_16(self):
self.do_test_get(2)
def test_get_32(self):
self.do_test_get(4)
def test_set_8(self):
for offset in range(self.mem.size):
correct, = os.urandom(1)
self.mem.set(offset, 1, correct)
ret = self.mem.get(offset, 1)
self.assertEqual(ret, correct)
def do_test_set(self, size: int):
for offset in range(self.mem.size - size):
correct = int.from_bytes(os.urandom(size), 'little')
self.mem.set(offset, size, correct)
ret = self.mem.get(offset, size)
self.assertEqual(ret, correct)
for offset in range(1, self.mem.size - size):
correct = int.from_bytes(os.urandom(size), 'littl | e')
self.mem.set(offset, size, correct)
ret = self.mem.get(offset, size)
self.assertEqual(ret, correct)
if size == 4:
for offset in range(2, self.mem.size - size):
correct = int.from_bytes(os.urandom(size), 'little')
self.mem.set(offset, size, correct)
ret = s | elf.mem.get(offset, size)
self.assertEqual(ret, correct)
for offset in range(3, self.mem.size - size):
correct = int.from_bytes(os.urandom(size), 'little')
self.mem.set(offset, size, correct)
ret = self.mem.get(offset, size)
self.assertEqual(ret, correct)
def test_set_16(self):
self.do_test_set(2)
def test_set_32(self):
self.do_test_set(4)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
acklabs/kytos | kytos/rest.py | Python | gpl-3.0 | 10,831 | 0.003324 | # This file is part of kytos.
#
# Copyright (c) 2014 by ACK Labs
#
# Authors:
# Beraldo Leal <beraldo AT acklabs DOT io>
# Gabriel von. Winckler <winckler AT acklabs DOT io>
# Gustavo Luiz Duarte <gustavo AT acklabs DOT io>
#
# kytos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kytos is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Gener | al Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Foobar. If not, see <http://www.gnu.org/licenses/>.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# | -*- coding: utf-8 -*-
#!/usr/bin/env python
from kytos import models
from kytos.db import Session
from flask import Flask, redirect
from flask import url_for
from flask.ext.restful import abort, Resource, fields, reqparse, marshal_with, Api
from sqlalchemy.orm import subqueryload, scoped_session
from sqlalchemy.orm import exc
db_session = scoped_session(Session)
webserver = Flask(__name__)
api = Api(webserver)
@webserver.route('/')
def index():
return redirect("/static/index.html", code=302)
#Proposal /api/v1/
#topology:
# - GET (list):
# return list of nodes with attributes type (switch, nic, host),
# name, resource_uri, connections (list of nodes index)
# eg: [ {name: switch1, type: switch,
# resource_uri: /api/v1/switch/1/, connections:[1]},
# {name: host1, type: host,
# resource_uri: /api/v1/host/1/, connections:[0]}
#switch:
# - GET (list):
# return list of all switches with basic attibutes only: name, resource_uri.
# - GET <ID> (show):
# return all attibutes (relations to {switch, nic, host, flow,
# segment} by resource_uri, others expanded)
# - PATCH <ID> (edit):
# change posted attributes on the model. O2M and M2M replace all
# existent values. return GET<ID> equivalence.
#host:
# - GET (list):
# return list of all hosts with basic attibutes only: name, resource_uri.
# - GET <ID> (show):
# return all attibutes (relations to {switch, nic, host, flow,
# segment} by resource_uri, others expanded)
# - POST (create):
# create a new object with posted attributes. return GET<ID> equivalence.
# - PATCH <ID> (edit):
# change posted attributes on the model. O2M and M2M replace all
# existent values. return GET<ID> equivalence.
# - DELETE <ID> (delete):
# delete the object. no return value (except 200 response code)
#nic:
# - GET (list):
# return list of all nics with basic attibutes only: name, resource_uri.
# - GET <ID> (show):
# return all attibutes (relations to {switch, nic, host, flow,
# segment} by resource_uri, others expanded)
# - POST (create):
# create a new object with posted attributes. return GET<ID> equivalence.
# - PATCH <ID> (edit):
# change posted attributes on the model. O2M and M2M replace all
# existent values. return GET<ID> equivalence.
# - DELETE <ID> (delete):
# delete the object. no return value (except 200 response code)
#segment:
# - GET (list):
# return list of all network segments with basic attibutes only:
# name, resource_uri.
# - GET <ID> (show):
# return all attibutes (relations to {switch, nic, host, flow,
# segment} by resource_uri, others expanded)
# - POST (create):
# create a new object with posted attributes. return GET<ID> equivalence.
# - PATCH <ID> (edit):
# change posted attributes on the model. O2M and M2M replace all
# existent values. return GET<ID> equivalence.
# - DELETE <ID> (delete):
# delete the object. no return value (except 200 response code)
#flow:
# - GET (list):
# return list of all switches with basic attibutes only:
#resource_uri. Will allow search in future.
# - GET <ID> (show):
# return all attibutes (relations to {switch, nic, host, flow,
# segment} by resource_uri, others expanded)
#TODO: A better json serialize (with datetime)
# Marshal Templates
flow = {
'id': fields.Integer,
'created_at': fields.DateTime,
'is_active': fields.String,
'duration_sec': fields.Integer,
'in_port': fields.Integer,
'dl_src': fields.String,
'dl_dst': fields.String,
'dl_vlan': fields.Integer,
'dl_vlan_pcp': fields.Integer,
'dl_type': fields.Integer,
'nw_proto': fields.Integer,
'nw_src': fields.String,
'nw_dst': fields.String,
'nw_tos': fields.Integer,
'tp_src': fields.Integer,
'tp_dst': fields.Integer,
'packet_count': fields.Integer,
'byte_count': fields.Integer,
}
flows = {
'flows': fields.List(fields.Nested(flow)),
}
port_detail = {
'id': fields.Integer,
'port_number': fields.Integer,
'state': fields.String,
'speed': fields.Integer,
'hardware_address': fields.String,
}
switch_list = {
'id': fields.Integer,
'resource_uri': fields.Url('switch'),
'name': fields.String,
'datapath_id': fields.String
}
switch_detail = {
'id': fields.Integer,
'resource_uri': fields.Url('switch'),
'name': fields.String,
'datapath_id': fields.String,
'description': fields.String,
'manufacturer': fields.String,
'serial_number': fields.String,
'version': fields.String,
'address': fields.String,
'source_port': fields.String,
'capabilities': fields.String,
'last_seen': fields.DateTime,
'is_active': fields.String,
'ports': fields.List(fields.Nested(port_detail)),
'uplink': fields.List(fields.Nested(switch_list)),
'flows_count': fields.Integer
}
nic_list = {
'id': fields.Integer,
'resource_uri': fields.Url('nic'),
'name': fields.String,
}
nic_detail = {
'id': fields.Integer,
'resource_uri': fields.Url('nic'),
'name': fields.String,
'description': fields.String,
'hardware_address': fields.String,
'last_seen': fields.DateTime,
'port': fields.Integer(attribute='port_id'),
'switch': fields.String(attribute='port.switch.name'),
'segment': fields.String(attribute='segment.name'),
}
host_list = {
'id': fields.Integer,
'resource_uri': fields.Url('host'),
'name': fields.String,
}
host_detail = {
'id': fields.Integer,
'resource_uri': fields.Url('host'),
'name': fields.String,
'description': fields.String,
'nics': fields.List(fields.Nested(nic_detail)),
}
class Topology(Resource):
# list
def get(self):
topology = { 'nodes': [], 'segments': [] }
segments = db_session.query(models.Segment).all()
for s in segments:
topology['segments'].append({'id':s.id, 'name':s.name})
nodes = topology['nodes']
switches = db_session.query(models.Switch).all()
switches_map = {}
for s in switches:
nodes.append({'name': s.name,
'resource_uri': url_for('switch', id=s.id),
'type': 'switch',
'segments': [],
'connections':[]})
switches_map[s.id] = len(nodes) - 1
hosts = db_session.query(models.Host).all()
hosts_map = {}
for h in hosts:
nodes.append({'name': h.name,
'resource_uri': url_for('host', id=h.id),
'type': 'host',
'segments': map(lambda x: x.segment_id, h.nics),
'connections':[]})
hosts_map[h.id] = len(nodes) - 1
for s in switches:
node = nodes[switches_map[s.id]]
# connect to other switches
for neighbour in s.get_neighbours():
node['connections'].append(switches_map[neighbour.id])
# connect to hosts
for nic in s.get_all_nics():
if nic.host:
# connect to the host
node['connections'].append(hosts_map[nic.host_id])
for h in hosts:
node = nodes[hosts_ma |
wonjunetai/pulse | features/uniprot_core.py | Python | mit | 2,151 | 0.003719 | # reads uniprot core file and generates core features
from features_helpers import score_differences
def build_uniprot_to_index_to_core(sable_db_obj):
uniprot_to_index_to_core = {}
for line in sable_db_obj:
tokens = line.split()
try:
# PARSING ID
prot = tokens[0]
index = int(tokens[1])
core = tokens[2]
# PARSING ID
if uniprot_to_index_to_core.has_key(prot):
uniprot_to_index_to_core[prot][index] = core
else:
uniprot_to_index_to_core[prot] = {index: core}
except ValueError:
print "Cannot parse: " + line[0:len(line) - 1]
return uniprot_to_index_to_core
def get_sable_scores(map_file, f_sable_db_location, uniprot_core_output_location): |
map_file_obj = open(map_file, 'r')
sable_db_obj = open(f_sable_db_location, 'r')
write_to = open(uniprot_core_output_location, 'w')
uniprot_to_index_to_core = build_uniprot_to_index_to_core(sable_db_obj)
for line in map_file_obj:
tokens = line.split()
asid = tokens[0].split("_")[0]
prot = tokens[1]
sstart = int(tokens[2])
start = int(toke | ns[3])
end = int(tokens[4])
eend = int(tokens[5])
rough_a_length = int(int(tokens[0].split("_")[-1].split("=")[1]) / 3)
if asid[0] == "I":
rough_a_length = 0
c1_count = 0
a_count = 0
c2_count = 0
canonical_absolute = 0
if prot in uniprot_to_index_to_core:
c1_count = score_differences(uniprot_to_index_to_core, prot, sstart, start)
a_count = score_differences(uniprot_to_index_to_core, prot, start, end)
c2_count = score_differences(uniprot_to_index_to_core, prot, end, eend)
prot_len = int(line.split("\t")[7].strip())
canonical_absolute = score_differences(uniprot_to_index_to_core, prot, 1, prot_len)
print >> write_to, tokens[0] + "\t" + prot + "\t" + repr(c1_count) + "\t" + repr(a_count) + "\t" + repr(
c2_count) + "\t" + repr(canonical_absolute)
write_to.close() |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/operations/_network_interfaces_operations.py | Python | mit | 64,353 | 0.005035 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfacesOperations(object):
"""NetworkInterfacesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal p | olling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(res | ponse)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_interface_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkInterface"
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the dir |
tnewman/newmansound | server/newmansound/model.py | Python | gpl-3.0 | 1,028 | 0 | from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
Base = declarative_base()
class Artist(Base):
__tablename__ = 'artist'
id = Column(Integer, primary_key=True)
name = Column(String)
class Album(Base):
__tablename__ = 'album'
id = Column(Integer, primary_key=True)
name = Col | umn(String)
artist_id = Column(Integer, ForeignKey('artist.id'))
artist = relationship('Artist')
class Song(Base):
__tablename__ = 'song'
id = Column(Integer, primary_key=True)
name = Column(String)
album_id = Column(Integer, ForeignKey('album.id'))
album = relationship('Album')
path = Column(String)
class Playlist(Base):
__tablename__ = 'playlist'
id = Column(Integer, primary_key=True)
song_id = Column(Integer, ForeignKey('song.id'))
positi | on = Column(Integer)
song = relationship('Song')
|
otsaloma/helsinki-transit-stops | hts/test/test_util.py | Python | gpl-3.0 | 1,528 | 0 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hts.test
import os
import tempfile
class TestModule(hts.test.TestCase):
def test_atomic_open__file_exists(self):
text = "testing\ntesting\n"
handle, path = tempfile.mkstemp()
with hts.util.atomic_open(path, "w") as f:
f.write(text)
assert open(path, "r").read() == text
os.remove(path)
def test_atomic_open__new_file(self):
text = "test | ing\ntesting\n"
handle, path = tempfile.mkstemp()
os.remove(path)
with hts.util.atomic_open(path, "w") as f:
f.write(text)
asser | t open(path, "r").read() == text
os.remove(path)
def test_calculate_distance(self):
# From Helsinki to Lissabon.
dist = hts.util.calculate_distance(24.94, 60.17, -9.14, 38.72)
assert round(dist/1000) == 3361
|
conda/kapsel | conda_kapsel/test/__init__.py | Python | bsd-3-clause | 331 | 0 | # -*- coding: utf-8 -*-
# ------------------------------------ | ----------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# --------------------------- | -------------------------------------------------
|
tatigo/XBMC-BestRussianTVPlugin | archive.py | Python | gpl-3.0 | 9,702 | 0.004741 | # coding=utf-8
#
# <BestRussianTV plugin for XBMC>
# Copyright (C) <2011> <BestRussianTV>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import httplib, urllib, urllib2, re
import xml.parsers.expat
import config1
class GetChannels:
req = \
'<?xml version="1.0" encoding="utf-8"?>' \
'<soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">' \
'<soap:Body>' \
'<GetPVRChannels xmlns="http://iptv-distribution.net/ds/epg">' \
'<sApplicationName>{AppName}</sApplicationName>' \
'<sUserLogin>{Username}</sUserLogin>' \
'<sUserPassword>{Password}</sUserPassword>' \
'</GetPVRChannels>' \
'</soap:Body>' \
'</soap:Envelope>'
channels = []
id = ""
name = ""
icon = ""
element = None
def __init__(self, Username, Pa | ssword):
self.req = self.req.replace('{SiteId}', config1.siteId).replace('{AppName}', config1.appName) \
.replace('{Username}', Username).replace('{Password}', Password)
def Request(self):
conn = httplib.HTTPConnection('iptv-distribution.net')
conn.request('POST', config1.epgService, self.req, {
|
'SOAPAction': 'http://iptv-distribution.net/ds/epg/GetPVRChannels',
'Content-Type': 'text/xml; charset=utf-8'
})
response = conn.getresponse()
data = response.read()
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = self.start_element
p.EndElementHandler = self.end_element
p.CharacterDataHandler = self.char_data
p.Parse(str(data))
return self.channels
def start_element(self, name, attrs):
#print 'Start element:', name, attrs
if name == 'TVChannelData':
self.id = ""
self.name = ""
self.icon = ""
self.element = name
def end_element(self, name):
if name == 'TVChannelData':
temp = str(len(self.channels) + 1).zfill(2) + '. ' + self.name
#temp = temp.encode('utf-8')
self.channels.append((temp, self.id, self.icon))
self.id = None
self.name = None
def char_data(self, data):
if data.strip():
data = data.encode('utf-8')
if self.element == 'ID':
self.id += data
elif self.element == 'Name':
self.name += data
elif self.element == 'IconURL':
self.icon += data
class GetPVREPG:
req = \
'<?xml version="1.0" encoding="utf-8"?>' \
'<soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">' \
'<soap:Body>' \
'<GetPVREPG xmlns="http://iptv-distribution.net/ds/epg">' \
'<sApplicationName>{AppName}</sApplicationName>' \
'<dtStart>{Date}</dtStart>' \
'<nPeriod>1440</nPeriod>' \
'<nChannelID>{Channel}</nChannelID>' \
'<sUserLogin>{Username}</sUserLogin>' \
'<sUserPassword>{Password}</sUserPassword>' \
'</GetPVREPG>' \
'</soap:Body>' \
'</soap:Envelope>'
programs = []
start = None
id = None
name = None
description = None
isWatchable = None
element = None
def __init__(self, Username, Password, Channel, Date):
self.req = self.req.replace('{AppName}', config1.appName) \
.replace('{Username}', Username).replace('{Password}', Password) \
.replace('{Channel}', Channel).replace('{Date}', Date)
def Request(self):
conn = httplib.HTTPConnection('iptv-distribution.net', 80)
conn.request('POST', config1.epgService, self.req, {
'Host' : 'iptv-distribution.net',
'SOAPAction': 'http://iptv-distribution.net/ds/epg/GetPVREPG',
'Content-Type': 'text/xml; charset=utf-8'
})
response = conn.getresponse()
data = response.read()
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = self.start_element
p.EndElementHandler = self.end_element
p.Parse(str(data))
return self.programs
def start_element(self, name, attrs):
if name == 'TVProgramData':
self.id = attrs['TvGuideID']
self.name = attrs['TvProgramName']
# description is not returned by the service
self.description = ""
#self.description = attrs['Description']#.encode('utf-8')
self.start = attrs['Date'][11:16].encode('utf-8')
self.isWatchable = attrs['IsWatchable']
def end_element(self, name):
if name == 'TVProgramData' and self.isWatchable != 'false':
test = self.start + ' ' + self.name
test = test.encode('utf-8')
self.programs.append((test, self.id, self.description))
self.id = None
self.start = None
self.name = None
self.description = None
class GetArchStream:
req = \
'<?xml version="1.0" encoding="utf-8"?>' \
'<soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">' \
'<soap:Body>' \
'<GetPVRStreamURL xmlns="http://www.iptv-distribution.com/ucas/">' \
'<nPVRID>{Program}</nPVRID>' \
'<sUserLogin>{Username}</sUserLogin>' \
'<sUserPassword>{Password}</sUserPassword>' \
'<sVideoProtocol>{Protocol}</sVideoProtocol>' \
'</GetPVRStreamURL>' \
'</soap:Body>' \
'</soap:Envelope>'
streamUrl = None
def __init__(self, Username, Password, Program):
self.req = self.req.replace('{AppName}', config1.appName).replace('{Protocol}', config1.protocol) \
.replace('{Username}', Username).replace('{Password}', Password).replace('{Program}', Program)
def Request(self):
conn = httplib.HTTPConnection('iptv-distribution.net', 80)
conn.request('POST', config1.vodService, self.req, {
'Host': 'iptv-distribution.net',
'SOAPAction': 'http://www.iptv-distribution.com/ucas/GetPVRStreamURL',
'Content-Type': 'text/xml; charset=utf-8'
})
response = conn.getresponse()
data = response.read()
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = self.start_element
p.CharacterDataHandler = self.char_data
p.Parse(str(data))
return self.streamUrl
element = None
def start_element(self, name, attrs):
if name == 'GetPVRStreamURLResult':
self.streamUrl = ""
self.element = name
def char_data(self, data):
if self.element == 'GetPVRStreamURLResult':
self.streamUrl += data
class GetPvrPlaylist:
req = \
'<?xml version="1.0" encoding="utf-8"?>' \
'<soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">' \
'<soap:Body>' \
'<GetPvrPlaylist xmlns="http://iptv-distribution.net/ds/cas/streams/generic">' \
'<sApplicationName>{AppName}</sApplicationName>' \
' |
Teknologforeningen/teknologr.io | teknologr/registration/migrations/0004_auto_20190610_2250.py | Python | mit | 350 | 0 | # Generated by Django 2.2.1 on 2019-06-10 | 19:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('registration', '0003_limbomember_mother_tongue'),
]
operations = [
migrations.RenameModel(
old_name='LimboMember',
new_name='Ap | plicant',
),
]
|
tillraab/thunderfish | docs/pulseplots.py | Python | gpl-3.0 | 40,213 | 0.035635 | """
## Plot and save key steps in pulses.py for visualizing the alorithm.
"""
import pickle, glob
import numpy as np
from scipy import stats
from matplotlib import rcParams, gridspec, ticker
import matplotlib.pyplot as plt
try:
from matplotlib.colors import colorConverter as cc
except ImportError:
import matplotlib.colors as cc
try:
from matplotlib.colors import to_hex
except ImportError:
from matplotlib.colors import rgb2hex as to_hex
from matplotlib.patches import ConnectionPatch, Rectangle
from matplotlib.lines import Line2D
import warnings
def warn(*args,**kwargs):
"""
Ignore all warnings.
"""
pass
warnings.warn=warn
# plotting parameters and colors
rcParams['font.family'] = 'monospace'
cmap = plt.get_cmap("Dark2")
c_g = cmap(0)
c_o = cmap(1)
c_grey = cmap(7)
cmap_pts = [cmap(2),cmap(3)]
def darker(color, saturation):
"""Make a color darker.
From bendalab/plottools package.
Parameters
----------
color: dict or matplotlib color spec
A matplotlib color (hex string, name color string, rgb tuple)
or a dictionary with an 'color' or 'facecolor' key.
saturation: float
The smaller the saturation, the darker the returned color.
A saturation of 0 returns black.
A saturation of 1 leaves the color untouched.
A saturation of 2 returns white.
Returns
-------
color: string or dictionary
The darker color as a hexadecimal RGB string (e.g. '#rrggbb').
If `color` is a dictionary, a copy of the dictionary is returned
with the value of 'color' or 'facecolor' set to the darker color.
"""
try:
c = color['color']
cd = dict(**color)
cd['color'] = darker(c, saturation)
return cd
except (KeyError, TypeError):
try:
c = color['facecolor']
cd = dict(**color)
cd['facecolor'] = darker(c, saturation)
return cd
except (KeyError, TypeError):
if saturation > 2:
sauration = 2
if saturation > 1:
return lighter(color, 2.0-saturation)
if saturation < 0:
saturation = 0
r, g, b = cc.to_rgb(color)
rd = r*saturation
gd = g*saturation
bd = b*saturation
return to_hex((rd, gd, bd)).upper()
def lighter(color, lightness):
"""Make a color lighter.
From bendalab/plottools package.
Parameters
----------
color: dict or matplotlib color spec
A matplotlib color (hex string, name color string, rgb tuple)
or a dictionary with an 'color' or 'facecolor' key.
lightness: float
The smaller the lightness, the lighter the returned color.
A lightness of 0 returns white.
A lightness of 1 leaves the color untouched.
A lightness of 2 returns black.
Returns
-------
color: string or dict
The lighter color as a hexadecimal RGB string (e.g. '#rrggbb').
If `color` is a dictionary, a copy of the dictionary is returned
with the value of 'color' or 'facecolor' set to the lighter color.
"""
try:
c = color['color']
cd = dict(**color)
cd['color'] = lighter(c, lightness)
return cd
except (KeyError, TypeError):
try:
c = color['facecolor']
cd = dict(**color)
cd['facecolor'] = lighter(c, lightness)
return cd
except (KeyError, TypeError):
if lightness > 2:
lightness = 2
if lightness > 1:
return darker(color, 2.0-lightness)
if lightness < 0:
lightness = 0
r, g, b = cc.to_rgb(color)
rl = r + (1.0-lightness)*(1.0 - r)
gl = g + (1.0-lightness)*(1.0 - g)
bl = b + (1.0-lightness)*(1.0 - b)
return to_hex((rl, gl, bl)).upper()
def xscalebar(ax, x, y, width, wunit=None, wformat=None, ha='left', va='bottom',
lw=None, color=None, capsize=None, clw=None, **kwargs):
"""Horizontal scale bar with label.
From bendalab/plottools package.
Parameters
----------
ax: matplotlib axes
Axes where to draw the scale bar.
x: float
x-coordinate where to draw the scale bar in relative units of the axes.
y: float
y-coordinate where to draw the scale bar in relative units of the axes.
width: float
Length of the scale bar in units of the data's x-values.
wunit: string or None
Optional unit of the data's x-values.
wformat: string or None
Optional format string for formatting the label of the scale bar
or simply a string used for labeling the scale bar.
ha: 'left', 'right', or 'center'
Scale bar aligned left, right, or centered to (x, y)
va: 'top' or 'bottom'
Label of the scale bar either above or below the scale bar.
lw: int, float, None
Line width of the scale bar.
color: matplotlib color
Color of the scalebar.
capsize: float or None
If larger then zero draw cap lines at the ends of the bar.
The length of the line | s is given in points (same unit as linewidth).
clw: int, float, None
Line width of the cap lines.
kwargs: key-word arguments
Passed on to `ax.text()` used to print the scale bar label.
"""
ax.autoscale(False)
# ax dimensions:
pixelx = np.abs(np.diff(ax.get_window_extent().get_points()[:,0]))[0]
pixely = np.abs(np.diff(ax.get_window_extent().get_points()[:,1]))[0]
xmin, xmax = a | x.get_xlim()
ymin, ymax = ax.get_ylim()
unitx = xmax - xmin
unity = ymax - ymin
dxu = np.abs(unitx)/pixelx
dyu = np.abs(unity)/pixely
# transform x, y from relative units to axis units:
x = xmin + x*unitx
y = ymin + y*unity
# bar length:
if wformat is None:
wformat = '%.0f'
if width < 1.0:
wformat = '%.1f'
try:
ls = wformat % width
width = float(ls)
except TypeError:
ls = wformat
# bar:
if ha == 'left':
x0 = x
x1 = x+width
elif ha == 'right':
x0 = x-width
x1 = x
else:
x0 = x-0.5*width
x1 = x+0.5*width
# line width:
if lw is None:
lw = 2
# color:
if color is None:
color = 'k'
# scalebar:
lh = ax.plot([x0, x1], [y, y], '-', color=color, lw=lw,
solid_capstyle='butt', clip_on=False)
# get y position of line in figure pixel coordinates:
ly = np.array(lh[0].get_window_extent(ax.get_figure().canvas.get_renderer()))[0,1]
# caps:
if capsize is None:
capsize = 0
if clw is None:
clw = 0.5
if capsize > 0.0:
dy = capsize*dyu
ax.plot([x0, x0], [y-dy, y+dy], '-', color=color, lw=clw,
solid_capstyle='butt', clip_on=False)
ax.plot([x1, x1], [y-dy, y+dy], '-', color=color, lw=clw,
solid_capstyle='butt', clip_on=False)
# label:
if wunit:
ls += u'\u2009%s' % wunit
if va == 'top':
th = ax.text(0.5*(x0+x1), y, ls, clip_on=False,
ha='center', va='bottom', **kwargs)
# get y coordinate of text bottom in figure pixel coordinates:
ty = np.array(th.get_window_extent(ax.get_figure().canvas.get_renderer()))[0,1]
dty = ly+0.5*lw + 2.0 - ty
else:
th = ax.text(0.5*(x0+x1), y, ls, clip_on=False,
ha='center', va='top', **kwargs)
# get y coordinate of text bottom in figure pixel coordinates:
ty = np.array(th.get_window_extent(ax.get_figure().canvas.get_renderer()))[1,1]
dty = ly-0.5*lw - 2.0 - ty
th.set_position((0.5*(x0+x1), y+dyu*dty))
return x0, x1, y
def yscalebar(ax, x, y, height, hunit=None, hformat=None, ha='left', v |
creasyw/IMTAphy | modules/phy/imtaphy/testConfigs/config.py | Python | gpl-2.0 | 29,929 | 0.021718 | ################################################################################
# This file is part of IMTAphy
# _____________________________________________________________________________
#
# Copyright (C) 2011
# Institute of Communication Networks (LKN)
# Department of Electrical Engineering and Information Technology (EE & IT)
# Technische Universitaet Muenchen
# Arcisstr. 21
# 80333 Muenchen - Germany
# http://www.lkn.ei.tum.de/~jan/imtaphy/index.html
#
# _____________________________________________________________________________
#
# IMTAphy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# IMTAphy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with IMTAphy. If not, see <http://www.gnu.org/licenses/>.
#
#################################################################################
import openwns
import openwns.node
import openwns.geometry.position
import imtaphy.Station
import imtaphy.Logger
import imtaphy.Channel
import imtaphy.Pathloss
import imtaphy.Scanner
import imtaphy.LinkManagement
import imtaphy.SCM
import imtaphy.ScenarioSupport
import imtaphy.Antenna
import imtaphy.Logger
import imtaphy.Receiver
import imtaphy.covarianceEstimation
import imtaphy.channelEstimation
import imtaphy.Feedback
import openwns.probebus
from openwns import dB, dBm, fromdB, fromdBm
from openwns.evaluation import *
import math
import random
import ltea.dll.schedulers.downlink
import ltea.dll.schedulers.uplink
import ltea.dll.linkAdaptation.downlink
import ltea.dll.linkAdaptation.uplink
import ltea.evaluation.default
import ltea.helper
simTime = 0.11 # total simulation duration in seconds; choose simTime slightly larger than setting + N*windowSize
windowSize = 0.0750 # window size during which to measure, e.g., throughput
settlingTime = 0.0250 # time at the beginning during which no measurements are taken; windowing starts after settling time
# makes the UEs (see end of file) probe time/frequency samples of the channel gain
# and installs suitable probes
# visualize channels with, e.g. /testConfigs/plotChannel.py output/channelGain_UE3_scmLinkId0_antennaPair00_max.m
dumpChannel = False
# for plotting a scenario view (e.g. SINR / geometry over area)
# enables suitable probes and places mobiles on a uniform grid to sample whole area
plotting = False
# define the resolution for the grid in x and y direction
class probeConfig:
xBins = 25
yBins = 25
# dumps a trace file of all received uplink and downlink transmissions to the output
# directory; can be viewed with IMTAphyViewer. Disabled by default. See bottom of
# config for further options (e.g. restricting to certain cells for speed/file size reasons)
phyTracing = False
# When running standalone, comment the "from openw..." import
# When running a campaign, uncomment the import statement and comment the 2 lines
# class params:
# pass
# For a campaign, comment the params definitions that are set in the campaign config.
#from openwns.wrowser.simdb.SimConfig import params
class params:
pass
params.fdd = "DL" # "DL", "DUPLEX"
params.scenario = "UMa" # "InH", "UMa", "UMi", "RMa", "SMa"
params.scmLinks = "all" #"serving" "all" or "no"
params.seed = 42
params.fullBuffer = True
if not params.fullBuffer:
params.offeredDLtrafficBps = 1E7 #
params.offeredULtrafficBps = 1E7 #
packetSize = 500 # bytes
params.receiver = "MRC" #"NoFilter" # "MMSE" # "MRC"
params.numBSAntennas = 2
params.numMSAntennas = 2
params.numMSperBS = 10
params.msSpeed = 0 # speed in km/h, negative values (msSpeed < 0) means scenario-specific default speed
params.numULPRBs = 50
params.numDLPRBs = 50
params.feedbackDelay = 6
params.cqiUpdateFrequency = 5
params.dlScheduler = "ProportionalFair" #"ProportionalFair" # "ZF""ProportionalFair" #"PU2RC" # ProportionalFair "RoundRobin"
params.pfAlpha = 0.001 # ProportionalFair scheduling fairness tuner with 0 => greedy, 1 => fair
params.laThreshold = 0 #positive value in dB => more conservative link-adaptation
params.precodingMode = "ClosedLoopCodebookBased" #"SingleAntenna" #"NoPrecoding", "ClosedLoopCodebookBased"
params.fixedPMIs = False # true: assign fixed PMIs to each PRB, see below
params.outdoorOnlyUMiLoS = True # assign UMi LoS probabiltiy on outdoor part of distance only. 3GPP pathgain+geometry assumes False, otherwise True is used
params.powerControl = "calibration" # "calibration" or "3GPPdefault"
params.thresholdUL = 0 # uplink LA offset in dB
params.adaptiveUplinkLA = True
params.bsAntennaConfiguration = "C" #"BASESTATIONITU" # "BASESTATIONITU", or "A", "B", "C", "D", "E" for the corresponding 3GPP configs from 36.814
params.channelEstimation = "perfect" # "thermalNoiseBased", "IandNCovarianceBased" with further parameters see below
params.covarianceEstimation = "perfect"# "Wishart32.829" # "Wishart32.829" "None" "equalDiagonal", "perfect", "gaussianError" and "distinguish" (with further parameters)
params.maxRank = 4 # affectes MMSE only: 0 means determine from min(numRx,numTx) antennas; MRC is rank 1 by default
params.pmis = 5 # 1,2, 3, 4, 5, or 15
numberOfCircle | s = 1 # tier of cell sites surrounding center site (0: 1 site, 1: 7 sites, 2: 19 sites)
random.seed(params.seed) # this fixes the seed for Python within this config.py
# simulator setup stuff
WNS = openwns.Simulator(si | mulationModel = openwns.node.NodeSimulationModel())
openwns.setSimulator(WNS)
WNS.maxSimTime = simTime
WNS.rng.seed = params.seed # this fixes the seed for the C++ simulator#
WNS.masterLogger.backtrace.enabled = False
WNS.masterLogger.enabled = True #False
WNS.outputStrategy = openwns.simulator.OutputStrategy.DELETE
WNS.statusWriteInterval = 30 # in seconds
WNS.probesWriteInterval = 3600 # in seconds
######## scenario params ########
wrapAround = True # allows evaluating all cells because it virtually surrounds all cells by all others
msHeight = 1.5 # meters
scenarioConfig = imtaphy.ScenarioSupport.Scenario(params.scenario, numberOfCircles, msHeight)
if plotting:
scenarioConfig.extendBoundingBoxToMultiplesOf(probeConfig.xBins, probeConfig.yBins)
if params.msSpeed < 0:
msSpeedKmh = scenarioConfig.msSpeedKmh
else:
msSpeedKmh = params.msSpeed
if wrapAround and not (params.scenario == 'InH'):
wrapAroundShiftVectors = imtaphy.LinkManagement.computeShiftVectors(scenarioConfig.getInterSiteDistance(), numberOfCircles)
else:
wrapAroundShiftVectors = []
# "scenario" is the variable the wrowser looks for to display the scenario
scenario = scenarioConfig.getBoundingBox()
if params.receiver == "MMSE":
filter = imtaphy.Receiver.MMSEFilter(maxRank = params.maxRank)
# covarianceEstimation = imtaphy.covarianceEstimation.Diagonal()
elif params.receiver == "MMSE-IRC":
filter = imtaphy.Receiver.MMSEFilter(maxRank = params.maxRank)
# covarianceEstimation = imtaphy.covarianceEstimation.Perfect()
elif params.receiver == "MRC":
filter = imtaphy.Receiver.MRCFilter()
# actually, the MRC doees not care about the covariance
# covarianceEstimation = imtaphy.covarianceEstimation.Diagonal()
else:
raise Exception("Bad receiver filter option")
#covarianceEstimation = imtaphy.covarianceEstimation.GaussianError(relativeError_dB = 0.0)
#channelEstimation = imtaphy.channelEstimation.ThermalNoiseBasedGaussianError(errorPowerRelativeToNoise_dB = 3)
if params.channelEstimation == "perfect":
channelEstimation = None
elif params.channelEstimation == "thermalNoiseBased":
channelEstimation = imtaphy.channelEstimation.ThermalNoiseBasedGaussianError(errorPowerRelativeToNoise_dB = 3)
elif params.channelEstimation == "IandNCovarianceBased":
channelEstimation = imtaphy.channelEstimation.IandNCovarianceBasedGaussianError(gainOverIandN_dB = 10, |
jaywink/diaspora-hub | thefederation/migrations/0001_initial.py | Python | agpl-3.0 | 4,467 | 0.003582 | # Generated by Django 2.0.3 on 2018-03-25 19:18
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
import enumfields.fields
import thefederation.enums
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Node',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(auto_created=True, db_index=True, unique=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('admin_email', models.EmailField(blank=True, max_length=254)),
('blocked', models.BooleanField(default=False)),
('country', django_countries.fields.CountryField(blank=True, max_length=2)),
('failures', models.PositiveIntegerField(default=0)),
('features', django.contrib.postgres.fields.jsonb.JSONField(default={})),
('hide_from_list', models.BooleanField(default=False)),
('host', models.CharField(max_length=128, unique=True)),
('ip', models.GenericIPAddressField(blank=True, null=True)),
('name', models.CharField(max_length=300)),
('open_signups', models.BooleanField()),
('relay', enumfields.fields.EnumField(default='none', enum=thefederation.enums.Relay, max_length=10)),
('server_meta', django.contrib.postgres.fields.jsonb.JSONField(default={})),
('version', models.CharField(blank=True, max_length=128)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Platform',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
| ('uuid', models.UUIDField(auto_created=True, db_index=True, unique=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('latest_version', models.CharField(blank=True, max_length=128)),
('icon', models | .CharField(default='unknown', max_length=80)),
('name', models.CharField(max_length=80, unique=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Protocol',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(auto_created=True, db_index=True, unique=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=80, unique=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Service',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(auto_created=True, db_index=True, unique=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=80, unique=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='node',
name='platform',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='nodes', to='thefederation.Platform'),
),
migrations.AddField(
model_name='node',
name='protocols',
field=models.ManyToManyField(related_name='nodes', to='thefederation.Protocol'),
),
migrations.AddField(
model_name='node',
name='services',
field=models.ManyToManyField(related_name='nodes', to='thefederation.Service'),
),
]
|
ihmeuw/vivarium | src/vivarium/examples/boids/visualization.py | Python | bsd-3-clause | 500 | 0 | import matplotlib.pyplot as plt
def plot_birds(simulation, plot_velocity=False):
width = simulation.configuration.location.width
height = simulation.configuration.location.height
pop = simulation.get_population()
plt.figure(figsize=[12, 12])
plt.scatter(pop.x, pop.y, color=pop.color)
if plot_velocity:
plt.quiver(pop.x, pop.y, pop.vx, pop.v | y, color=pop.color, width=0.00 | 2)
plt.xlabel("x")
plt.ylabel("y")
plt.axis([0, width, 0, height])
plt.show()
|
jmp0xf/raven-python | setup.py | Python | bsd-3-clause | 3,611 | 0.000831 | #!/usr/bin/env python
"""
Raven
=====
Raven is a Python client for `Sentry <http://getsentry.com/>`_. It provides
full out-of-the-box support for many of the popular frameworks, including
`Django <djangoproject.com>`_, `Flask <http://flask.pocoo.org/>`_, and `Pylons
<http://www.pylonsproject.org/>`_. Raven also includes drop-in support for any
`WSGI <http://wsgi.readthedocs.org/>`_-compatible web application.
"""
# Hack to prevent stupid "TypeError | : 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function whe | n running `python
# setup.py test` (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
for m in ('multiprocessing', 'billiard'):
try:
__import__(m)
except ImportError:
pass
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import sys
setup_requires = [
'pytest',
]
dev_requires = [
'flake8>=2.0,<2.1',
]
unittest2_requires = ['unittest2']
flask_requires = [
'Flask>=0.8',
'blinker>=1.1',
]
flask_tests_requires = [
'Flask-Login>=0.2.0',
]
webpy_tests_requires = [
'paste',
'web.py',
]
# If it's python3, remove unittest2 & web.py
if sys.version_info[0] == 3:
unittest2_requires = []
webpy_tests_requires = []
tests_require = [
'bottle',
'celery>=2.5',
'Django>=1.4',
'django-celery>=2.5',
'exam>=0.5.2',
'logbook',
'mock',
'nose',
'pep8',
'pytz',
'pytest>=2.7.0,<2.8.0',
'pytest-cov>=1.4',
'pytest-django>=2.8.0,<2.7.0',
'pytest-timeout==0.4',
'requests',
'tornado',
'webob',
'webtest',
'anyjson',
] + (flask_requires + flask_tests_requires +
unittest2_requires + webpy_tests_requires)
class PyTest(TestCommand):
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='raven',
version='5.7.0.dev0',
author='David Cramer',
author_email='dcramer@gmail.com',
url='https://github.com/getsentry/raven-python',
description='Raven is a client for Sentry (https://www.getsentry.com)',
long_description=__doc__,
packages=find_packages(exclude=("tests", "tests.*",)),
zip_safe=False,
extras_require={
'flask': flask_requires,
'tests': tests_require,
'dev': dev_requires,
},
license='BSD',
tests_require=tests_require,
cmdclass={'test': PyTest},
include_package_data=True,
entry_points={
'console_scripts': [
'raven = raven.scripts.runner:main',
],
'paste.filter_app_factory': [
'raven = raven.contrib.paste:sentry_filter_factory',
],
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python',
'Topic :: Software Development',
],
)
|
ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-1.2/django/contrib/flatpages/admin.py | Python | bsd-3-clause | 1,072 | 0.00653 | from django import forms
from django.contrib import admin
from django.contrib. | flatpages.models import FlatPage
from django.utils.translation import ugettext_lazy as _
class FlatpageForm(forms.ModelForm):
url = forms.RegexField(label=_("URL"), max_length=100, regex=r'^[-\w/]+$',
help_text = _("Example: '/about/contact/'. Make sure to have leading"
" and trailing slashes."),
| error_message = _("This value must contain only letters, numbers,"
" underscores, dashes or slashes."))
class Meta:
model = FlatPage
class FlatPageAdmin(admin.ModelAdmin):
form = FlatpageForm
fieldsets = (
(None, {'fields': ('url', 'title', 'content', 'sites')}),
(_('Advanced options'), {'classes': ('collapse',), 'fields': ('enable_comments', 'registration_required', 'template_name')}),
)
list_display = ('url', 'title')
list_filter = ('sites', 'enable_comments', 'registration_required')
search_fields = ('url', 'title')
admin.site.register(FlatPage, FlatPageAdmin)
|
wnereiz/pxeat | pxeat/views.py | Python | gpl-3.0 | 12,105 | 0.012557 | #!/usr/bin/python3
import sqlite3
import os, sys, time, datetime, random, string
import urllib.request, urllib.error
import configparser
from flask import Flask, request, session, redirect
from flask import render_template, g, flash, url_for
from contextlib import closing
from .modules import Pagi
from pxeat import app
from config import *
def prt_help():
print("To start the service:\n\n\t" + sys.argv[0] + " server\n")
print("Listen to \"localhost:5000\" by default, \nDeploy on production (Apache, Nginx...) with \"pxeat.wsgi\"")
def chk_args():
if len(sys.argv) == 2:
if sys.argv[1] == 'server':
if not os.path.isfile(DATABASE):
print("Database is not available!\nCreate with --initdb")
sys.exit()
if not os.path.isfile(PXE_FILE):
print("PXE file is not available!\nPlease check the configuration")
sys.exit()
if not os.path.isfile("./config.py"):
print("PXEAT Config file is missing!")
sys.exit()
elif sys.argv[1] == '--initdb':
init_db()
else:
prt_help()
sys.exit()
else:
prt_help()
sys.exit()
# Defaults
items_num = int(ITEMS_NUM)
form_default = ["", \
"http://", \
REPO_KERNEL_DEFAULT, \
REPO_INITRD_DEFAULT, \
"def", \
""]
loader_dir = TFTP_ROOT + LOADER_PATH
postfix_kernelfn = '-0'
postfix_initrdfn = '-1'
items = {}
def chk_input(chk_string, chk_type):
if chk_type == 'pxe_title':
if chk_string == '':
raise ValueError("The title can not be empty!")
return
elif chk_type == 'file_path':
if chk_string[0] != '/' or chk_string[-1] == '/':
raise ValueError("Path format is invalid!")
return
elif chk_type == 'repo_url':
chk_elements = chk_string.split('//')
if chk_elements[1] == '':
raise ValueError("The repository can not be empty!")
return
elif chk_elements[0] not in ['http:','https:']:
raise ValueError("Invalid format!"+\
" (Only support http:// or https://)")
return
else:
sys.exit("chk_type error!")
def grab_file(base_url, file_path, saved_file):
errmsg0 = "<br />Something wrong, please contact the administrator."
errmsg1 = "<br />Something wrong, please check the repository link \
and kernel/initrd file path."
dbginfo_local = "Debug info: Configuration error! \
Failed to open/write local kernel&initrd file. \
Check your \'LOADER_PATH\' setting in config file. \
Make sure the path exist and you have permission to write.\n\
Current path: " + saved_file
file_url = base_url + file_path
try:
f = urllib.request.urlopen(file_url)
except urllib.error.HTTPError as e:
return str(e.code) + " " + str(e.reason) + str(errmsg1)
except:
return str(errmsg0)
try:
local_file = open(saved_file, "wb")
local_file.write(f.read())
except:
print(dbginfo_local)
return str(errmsg0)
local_file.close()
def boot_opts_gen(opt_flag):
if opt_flag == "vnc":
return(DEFAULT_BOOT_OPTS + \
" console=ttyS0 vnc=1 vncpassword=" + \
VNC_PASSWD)
elif opt_flag == "ssh":
return(DEFAULT_BOOT_OPTS + \
" console=ttyS0 usessh=1 sshpassword=" + \
SSH_PASSWD)
else:
return(DEFAULT_BOOT_OPTS)
def connect_db():
return sqlite3.connect(DATABASE)
def init_db():
with closing(connect_db()) as db:
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
@app.before_request
def before_request():
g.db = connect_db()
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
db.close()
@app.route('/')
def form():
default_val = {}
for i,k in enumerate(['title', \
'repo_url', \
'repo_kernel', \
'repo_initrd', \
'inst_method', \
'comment']):
default_val[k] = form_default[i]
return render_template('form.html', default_val=default_val)
@app.route('/history/', defaults={'page': 1})
@app.route('/history/page/<int:page>')
def history(page):
count = g.db.execute('select count(*) from pxeitems').fetchone()[0]
per_page = 10
pagination = Pagi(page, per_page, count)
try:
cur = g.db.execute('select id,\
pxe_title,\
repo_url,\
repo_kernel,\
repo_initrd,\
pxe_comment,\
unix_time,\
inst_flag from pxeitems order by id desc')
except sqlite3.Error as e:
return render_template('failed.html', \
failed_msg = "Database error: "+str(e))
history_entries = [ dict(pxe_id=row[0], \
pxe_title=row[1], \
repo_url=row[2], \
repo_kernel=row[3], \
repo_initrd=row[4], \
pxe_comment=row[5], \
unix_time=datetime.datetime.fromtimestamp(int(row[6])), \
inst_flag=row[7]) \
for row in cur.fetchall()[(page-1)*per_page:page*per_page]\
]
if not history_entries and page != 1:
#Shoud do something here other than pass or abort(404)
pass
return render_template('history.html',\
pagination=pagination,\
history_entries=history_entries)
@app.route('/clone/<int:clone_id>')
def clone(clone_id):
row = g.db.execute('select pxe_title,\
repo_url,\
repo_kernel,\
repo_initrd,\
inst_flag,\
pxe_comment from pxeitems where id=?',[clone_id]).fetchone()
default_val = {}
for i,k in enumerate(['title', \
'repo_url', \
'repo_kernel', \
'repo_initrd', \
'inst_method', \
'comment']):
default_val[k] = row[i]
flash(u'Cloned Entry!','green')
return render_template('form.html', default_val=default_val)
@app.route('/about')
@app.route('/about/')
def about():
return render_template('about.html')
# For the pagination
def url_for_ot | her_page(page):
args = request.view_args.copy()
args['page'] = page
return url_for(request.endpoint, **args)
app.jinja_env.globals['url_for_other_page'] = url_for_other_page
@app.route('/confirm', methods | =['POST'])
def confirm_entry():
#Input checking
try:
for x,y in [[request.form['pxe_title'],'pxe_title'], \
[request.form['repo_url'], 'repo_url'], \
[request.form['repo_kernel'], 'file_path'], \
[request.form['repo_initrd'], 'file_path']]:
chk_input(x,y)
except ValueError as e:
flash(e.args[0],'error')
return redirect(url_for('form'))
# Assign to the dictionary
items['repo_kernel'] = request.form['repo_kernel']
items['repo_url'] = request.form['repo_url']
items['repo_initrd'] = request.form['repo_initrd']
items['pxe_title'] = request.form['pxe_title']
items['pxe_comment'] = request.form['pxe_comment']
items['inst_flag'] = request.form['inst_method']
# Generate a random string
items['random_str'] = ''.join(random.choice(string.ascii_lowercase) for _ in range(4))
items['unix_time'] = ''
# Show the entry which will be generated on the confirm page
gen_format = ["menu label ^a - " + items['pxe_title'], \
|
youtube/cobalt | build/android/gyp/apkbuilder.py | Python | bsd-3-clause | 22,278 | 0.009202 | #!/usr/bin/env python3
#
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Adds the code parts to a resource APK."""
import argparse
import logging
import os
import shutil
import sys
import tempfile
import zipfile
import zlib
import finalize_apk
from util import build_utils
from util import diff_utils
from util import zipalign
# Input dex.jar files are zipaligned.
zipalign.ApplyZipFileZipAlignFix()
# Taken from aapt's Package.cpp:
_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
'.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
'.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
'.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
'.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
def _ParseArgs(args):
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
parser.add_argument(
'--assets',
help='GYP-list of files to add as assets in the form '
'"srcPath:zipPath", where ":zipPath" is optional.')
parser.add_argument(
'--java-resources', help='GYP-list of java_resources JARs to include.')
parser.add_argument('--write-asset-list',
action='store_true',
help='Whether to create an assets/assets_list file.')
parser.add_argument(
'--uncompressed-assets',
help='Same as --assets, except disables compression.')
parser.add_argument('--resource-apk',
help='An .ap_ file built using aapt',
required=True)
parser.add_argument('--output-apk',
help='Path to the output file',
required=True)
parser.add_argument('--format', choices=['apk', 'bundle-module'],
default='apk', help='Specify output format.')
parser.add_argument('--dex-file',
help='Path to the classes.dex to use')
parser.add_argument(
'--jdk-libs-dex-file',
help='Path to classes.dex created by dex_jdk_libs.py')
parser.add_argument('--uncompress-dex', action='store_true',
help='Store .dex files uncompressed in the APK')
parser.add_argument('--native-libs',
action='append',
help='GYP-list of native libraries to include. '
'Can be specified multiple times.',
default=[])
parser.add_argument('--secondary-native-libs',
action='append',
help='GYP-list of native libraries for secondary '
'android-abi. Can be specified multiple times.',
default=[])
parser.add_argument('--android-abi',
help='Android architecture to use for native libraries')
parser.add_argument('--secondary-android-abi',
help='The secondary Android architecture to use for'
'secondary native libraries')
parser.add_argument(
'--is-multi-abi',
action='store_true',
help='Will add a placeholder for the missing ABI if no native libs or '
'placeholders are set for either the primary or secondary ABI. Can only '
'be set if both --android-abi and --secondary-android-abi are set.')
parser.add_argument(
'--native-lib-placeholders',
help='GYP-list of native library placeholders to add.')
parser.add_argument(
'--secondary-native-lib-placeholders',
help='GYP-list of native library placeholders to add '
'for the secondary ABI')
parser.add_argument('--uncompress-shared-libraries', default='False',
choices=['true', 'True', 'false', 'False'],
help='Whether to uncompress native shared libraries. Argument must be '
'a boolean value.')
parser.add_argument(
'--apksigner-jar', help='Path to the apksigner executable.')
parser.add_argument('--zipalign-path',
help='Path to the zipalign executable.')
parser.add_argument('--key-path',
help='Path to keystore for signing.')
parser.add_argument('--key-passwd',
help='Keystore password')
parser.add_argument('--key-name',
help='Keystore name')
parser.add_argument(
'--min-sdk-version', required=True, help='Value of APK\'s minSdkVersion')
parser.add_argument(
'--best-compression',
action='store_true',
help='Use zip -9 rather than zip -1')
parser.add_argument(
'--library-always-compress',
action='append',
help='The list of library files that we always compress.')
parser.add_argument(
'--library-renames',
action='append',
help='The list of library files that we prepend crazy. to their names.')
parser.add_argument('--warnings-as-errors',
action='store_true',
help='Treat all warnings as errors.')
diff_utils.AddCommandLineFlags(parser)
options = parser.parse_args(args)
options.assets = build_utils.ParseGnList(options.assets)
options.uncompressed_assets = build_utils.ParseGnList(
options.uncompressed_assets)
options.native_lib_placeholders = build_utils.ParseGnList(
options.native_lib_placeholders)
options.secondary_native_lib_placeholders = build_utils.ParseGnList(
options.secondary_native_lib_placeholders)
options.java_resources = build_utils.ParseGnList(options.java_resources)
options.native_libs = build_utils.ParseGnList(options.native_libs)
options.secondary_native_libs = build_utils.ParseGnList(
options.secondary_native_libs)
options.library_always_compress = build_utils.ParseGnList(
options.library_always_compress)
options.library_renames = build_utils.ParseGnList(options.library_renames)
# --apksigner-jar, --zipalign-path, --key-xxx arguments are
# required when building an APK, but not a bundle module.
if options.format == 'apk':
required_args = [
'apksigner_jar', 'zipalign_path', 'key_path', 'key_passwd', 'key_name'
]
for required in required_args:
if not vars(options)[required]:
raise Exception('Argument --%s is required for APKs.' % (
required.replace('_', '-')))
options.uncompress_shared_libraries = \
options.uncompress_shared_libraries in [ 'true', 'True' ]
if not options.android_abi and (options.native_libs or
options.native_lib_placeholders):
raise Exception('Must specify --android-abi with --native-libs')
if not options.secondary_android_abi and (options.secondary_native_libs or
options.secondary_native_lib_placeholders):
raise Exception('Must specify --secondary-android-abi with'
' --secondary-native-libs')
if options.is_multi_abi and not (options.android_abi
and options.secondary_android_abi):
raise Exception('Must specify --is-multi-abi with both --android-abi '
'and --secondary-android-abi.')
return options
def _SplitAssetPath(path):
"""Returns (src, dest) given an asset path in the form src[:dest]."""
path_parts = path.split(':')
src_path = path_parts[0]
if len(path_parts) > 1:
dest_path = path_parts[1]
else:
dest_path = os.path.basename(src_path)
return src_path, dest_path
def _ExpandPaths(paths):
"""Converts src:dst into tuples and enumerates files within directories.
Args:
paths: Paths in the form "src_path:dest_path"
Returns:
A list of (src_path, dest_path) tuples sorted by dest_path (for stable
ordering within output .apk).
"""
ret = []
for path in paths: |
src_path, dest_path = _SplitAssetPath(path)
if os.path.isdir(src_path):
for f in build_utils.FindInDirectory(src_path, '*'):
ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
else:
ret.append((src_path, dest_path))
ret.sort(key=lambda t:t[1])
return ret
def _GetAssetsToAdd(path_tu | ples,
|
msadegh97/IoT_first-project | server/server/urls.py | Python | mit | 326 | 0.01227 | from django.conf.urls import url,include
from django.contrib import admin
from cn_device import views
urlpatterns = [
url(r'^admin/', admin.site | .urls),
url(r'^send/(?P<id_ras>[0-9]+)/$',views.payam,name='send condition'),
url(r'^give/(?P<id_ras>[0-9]+)/(?P<bl>[0-1])/$', views.give_req | , name='give condition'),
]
|
F5Networks/f5-common-python | f5/bigip/tm/asm/policies/test/unit/test_character_sets.py | Python | apache-2.0 | 1,194 | 0 | # Copyright 2015 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed | under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from f5.bigip.tm.asm.policies.character_sets import Character_Sets
from f5.sdk_exception import UnsupportedOperation
import mock
import pytest
@pytest.fixture
def FakeChar():
fake_policy = mock.MagicMock()
fake_e = Character_Sets(fake_policy)
fake | _e._meta_data['bigip'].tmos_version = '11.6.0'
return fake_e
class TestCharacterSets(object):
def test_create_raises(self, FakeChar):
with pytest.raises(UnsupportedOperation):
FakeChar.create()
def test_delete_raises(self, FakeChar):
with pytest.raises(UnsupportedOperation):
FakeChar.delete()
|
ewheeler/rapidsms-core | lib/rapidsms/backends/email.py | Python | lgpl-3.0 | 4,516 | 0.007972 | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from __future__ import absolute_import
from rapidsms.message import Message
from rapidsms.connection import Connection
from . import backend
from email import message_from_string
from django.core.mail import *
class Backend(backend.Backend):
'''Uses the django mail object utilities to send outgoing messages
via email. Messages can be formatted in standard smtp, and these
parameters will end up going into the subject/to/from of the
email. E.g.
==
Subject: Test message
Hello Alice.
This is a test message with 5 header fields and 4 lines in the message body.
Your friend,
Bob
==
The following defaults are currently used in place of the expected
fields from smtp:
From: <configured login>
To: <connection identity>
Date: <datetime.now()>
'''
_title = "Email"
_connection = None
def configure(self, host="localhost", port=25, username="demo-user@domain.com",
password="secret", use_tls=True, fail_silently=False):
# the default information will not work, users need to configure this
# in their settings
# this is some commented out code that doesn't call django email packages
self._username = username
self._host = host
self._port = port
self._password = password
self._use_tls = use_tls
self._fail_silently = fail_silently
self._connection = SMTPConnection(username=username,
port=port,
host=host,
password=password,
use_tls=use_tls,
fail_silently=fail_silently)
def send(self, message):
destination = "%s" % (message.connection.identity)
subject, from_email, to_email, text = self._get_email_params(message)
email_message = EmailMessage(subject, text, from_email, to_email,
connection=self._connection)
# this is a fairly ugly hack to get html emails working properly
if text.startswith("<html>"):
email_message.content_subtype = "html"
result = email_message.send(fail_silently=self._fail_silently)
def start(self):
backend.Backend.start(self)
def stop(self):
backend.Backend.stop(self)
self.info("Shutting down...")
def _get_email_params(self, message):
"""Get the parameters needed by the Django email client
from a rapidsms message object. What is returned is a
4-element tuple containing:
(subject: a string
from_email: a string
to_email: a tuple
text: the message body )
"""
# todo: parsing of the subject/other params
# check CRLFs and USE THEM! if there are only newlines.
# this assumes that the message contains all or no CRLFs.
# see: http://tools.ietf.org/html/rfc2822.html
# another thing to note: this format doesn't like unicode
text = str(message.text)
if not "\r\n" in text:
text = text.replace("\n", "\r\n")
email_message = message_from_string(text)
# amazingly these keys are actually not case sensitive
if email_message.has_key("subject"):
subject = email_message["subject"]
else:
subject = ""
# todo: Django email doesn't appear to honor this.
# Maybe that's a good thing, as it prevents easy spoofing.
if email_message.has_key("from"):
from_string = email_m | essage[" | from"]
else:
from_string = self._username
# always use the identity in the message for "to",
# even if they specified one in the headers
to_string = message.connection.identity
if "," in to_string:
to_ple = to_string.split(",")
else:
to_ple = (to_string, )
# todo: honor dates? other params? would all be
# made much easier by moving to standard python email
# instead of django. left as a future decision.
return (subject, from_string, to_ple, email_message.get_payload())
|
ajaygarg84/sugar | src/jarabe/model/session.py | Python | gpl-2.0 | 3,705 | 0.00081 | # Copyright (C) 2008, Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import Gtk
import dbus
import os
import signal
import sys
import logging
from sugar3 import session
from sugar3 import env
_session_manager = None
def have_systemd():
return os.access("/run/systemd/seats", 0) >= 0
class SessionManager(session.SessionManager):
MODE_LOGOUT = 0
MODE_SHUTDOWN = 1
MODE_REBOOT = 2
def __init__(self):
session.SessionManager.__init__(self)
self._logout_mode = None
def logout(self):
self._logout_mode = self.MODE_LOGOUT
self.initiate_shutdown()
def shutdown(self):
self._logout_mode = self.MODE_SHUTDOWN
self.initiate_shutdown()
def reboot(self):
self._logout_mode = self.MODE_REBOOT
self.initiate_shutdown()
def shutdown_completed(self):
if env.is_emulator():
self._close_emulator()
elif self._logout_mode != self.MODE_LOGOUT:
bus = dbus.SystemBus()
if have_systemd():
try:
proxy = bus.get_object('org.freedesktop.login1',
'/org/freedesktop/login1')
pm = dbus.Interface(proxy,
'org.freedesktop.login1.Manager')
if self._logout_mode == self.MODE_SHUTDOWN:
pm.PowerOff(False)
elif self._logout_mode == self.MODE_REBOOT:
pm.Reboot(True)
except:
logging.exception('Can not stop sugar')
self.session.cancel_shutdown()
return
else:
CONSOLEKIT_DBUS_PATH = '/org/freedesktop/ConsoleKit/Manager'
try:
proxy = bus.get_object('org.freedesktop.ConsoleKit',
CONSOLEKIT_DBUS_PATH)
pm = dbus.Interface(proxy,
'org.freedesktop.ConsoleKit.Manager')
if self._logout_mode == self.MODE_SHUTDOWN:
pm.Stop()
| elif self._logout_m | ode == self.MODE_REBOOT:
pm.Restart()
except:
logging.exception('Can not stop sugar')
self.session.cancel_shutdown()
return
session.SessionManager.shutdown_completed(self)
Gtk.main_quit()
def _close_emulator(self):
Gtk.main_quit()
if 'SUGAR_EMULATOR_PID' in os.environ:
pid = int(os.environ['SUGAR_EMULATOR_PID'])
os.kill(pid, signal.SIGTERM)
# Need to call this ASAP so the atexit handlers get called before we
# get killed by the X (dis)connection
sys.exit()
def get_session_manager():
global _session_manager
if _session_manager == None:
_session_manager = SessionManager()
return _session_manager
|
adsorensen/girder | plugins/user_quota/server/__init__.py | Python | apache-2.0 | 1,930 | 0.002073 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2015 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitat | ions under the Licens | e.
###############################################################################
from girder import events
from girder.models.model_base import ValidationException
from girder.utility import setting_utilities
from . import constants
from .quota import QuotaPolicy, ValidateSizeQuota
@setting_utilities.validator((
constants.PluginSettings.QUOTA_DEFAULT_USER_QUOTA,
constants.PluginSettings.QUOTA_DEFAULT_COLLECTION_QUOTA
))
def validateSettings(doc):
val = doc['value']
val, err = ValidateSizeQuota(val)
if err:
raise ValidationException(err, 'value')
doc['value'] = val
def load(info):
quota = QuotaPolicy()
info['apiRoot'].collection.route('GET', (':id', 'quota'), quota.getCollectionQuota)
info['apiRoot'].collection.route('PUT', (':id', 'quota'), quota.setCollectionQuota)
info['apiRoot'].user.route('GET', (':id', 'quota'), quota.getUserQuota)
info['apiRoot'].user.route('PUT', (':id', 'quota'), quota.setUserQuota)
events.bind('model.upload.assetstore', 'userQuota', quota.getUploadAssetstore)
events.bind('model.upload.save', 'userQuota', quota.checkUploadStart)
events.bind('model.upload.finalize', 'userQuota', quota.checkUploadFinalize)
|
LiquidCode/Leap | leap/static_pages.py | Python | mit | 585 | 0 | from pyramid.httpexceptions import HTTPFound
from pyramid.view import view_config
@view_config(
route_name='leap.projects',
renderer='projects.mako',
)
def blankPage(request): return {}
@view_config(route_name='leap.source')
def github(request):
return HTTPFound(location="https://github.com/LiquidCode")
def configure(config):
config.add_navigation_link(route="leap.projects", text="Projects")
config.add_navigation_link(ro | ute="leap.source", text="Sources")
config.add_route("leap.projects", "/projects") |
config.add_route("leap.source", "/sources")
|
rkhleics/wagtailmenus | wagtailmenus/__init__.py | Python | mit | 1,864 | 0 | from wagtailmenus.utils.version import get_version, get_stable_branch_name
# major.minor.patch.release.number
# release must be one of alpha, beta, rc, or final
VERSION = (3, 1, 0, "alpha", 0)
__version__ = get_version(VERSION)
stable_branch_name = get_stable_branch_name(VERSION)
default_app_config = "wagtailmenus.apps.WagtailMenusConfig"
def get_main_menu_model_string():
"""
Get the dotted ``app.Model`` name for the main menu model as a string.
Useful for developers extending wagtailmenus, that need to refer to the
main menu model (such as in foreign keys), but the model itself is not
required.
"""
| from wagtailmenus.conf import settings
return settings.MAIN_MENU_MODEL
def get_flat_menu_model_string():
"""
Get the dotted ``app.Model`` name for the flat menu model as a string.
| Useful for developers extending wagtailmenus, that need to refer to the
flat menu model (such as in foreign keys), but the model itself is not
required.
"""
from wagtailmenus.conf import settings
return settings.FLAT_MENU_MODEL
def get_main_menu_model():
"""
Get the model from the ``WAGTAILMENUS_MAIN_MENU_MODEL`` setting.
Useful for developers extending wagtailmenus, and need the actual model.
Defaults to the standard :class:`~wagtailmenus.models.MainMenu` model
if no custom model is defined.
"""
from wagtailmenus.conf import settings
return settings.models.MAIN_MENU_MODEL
def get_flat_menu_model():
"""
Get the model from the ``WAGTAILMENUS_FLAT_MENU_MODEL`` setting.
Useful for developers extending wagtailmenus, and need to the actual model.
Defaults to the standard :class:`~wagtailmenus.models.FlatMenu` model
if no custom model is defined.
"""
from wagtailmenus.conf import settings
return settings.models.FLAT_MENU_MODEL
|
vedujoshi/os_tempest | tempest/api_schema/compute/v3/keypairs.py | Python | apache-2.0 | 1,333 | 0 | # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in complianc | e with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND | , either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api_schema.compute import keypairs
get_keypair = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'keypair': {
'type': 'object',
'properties': {
'public_key': {'type': 'string'},
'name': {'type': 'string'},
'fingerprint': {'type': 'string'}
},
'required': ['public_key', 'name', 'fingerprint']
}
},
'required': ['keypair']
}
}
create_keypair = {
'status_code': [201],
'response_body': keypairs.create_keypair
}
delete_keypair = {
'status_code': [204],
}
|
AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_10_01/operations/virtual_network_gateway_connections_operations.py | Python | mit | 38,583 | 0.002514 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import Clo | udError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class VirtualNetworkGatewayConnectionsOperations(object):
"""VirtualNetworkGatewayConnectionsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deseriali | zer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2017-10-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-10-01"
self.config = config
def _create_or_update_initial(
self, resource_group_name, virtual_network_gateway_connection_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualNetworkGatewayConnection')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGatewayConnection', response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGatewayConnection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, virtual_network_gateway_connection_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a virtual network gateway connection in the
specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the
virtual network gateway connection.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the create or update virtual
network gateway connection operation.
:type parameters:
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGatewayConnection
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns
VirtualNetworkGatewayConnection or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGatewayConnection]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = self._deserialize('VirtualNetworkGatewayConnection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, virtual_network_gateway_connection_name, custom_headers=None, raw=False, **operation_config):
"""Gets the specified virtual network gateway connection by resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the
virtual network gateway connection.
:type virtual_network_gateway_connection_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: VirtualNetworkGatewayConnection or ClientRawResponse if
raw=true
:rtype:
~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGatewayConnection
or ~msrest.pipeline.ClientRawResponse |
thomasem/nova | nova/scheduler/client/__init__.py | Python | apache-2.0 | 2,731 | 0 | # Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo_utils import importutils
from nova.scheduler import utils
class LazyLoader(object):
def __init__(self, klass, *args, **kwargs):
self.klass = klass
self.args = args
self.kwargs = kwargs
self.instance = None
def __getattr__(self, name):
return functools.partial(self.__run_method, name)
def __run_method(self, __name, *args, **kwargs):
if self.instance is None:
self.instance = self.klass(*self.args, **self.kwargs)
return getattr(self.instance, __name)(*args, **kwargs)
class SchedulerClient(object):
"""Client library for placing calls to the scheduler."""
def __init__(self):
self.queryclient = LazyLoader(importutils.import_class(
'nova.scheduler.client.query.SchedulerQueryClient'))
self.reportclient = LazyLoader(importutils.import_class(
'nova.scheduler.client.report.SchedulerReportClient'))
@utils.retry_select_destinations
def select_destinations(self, context, request_spec, filter_properties):
return self.queryclient.select_destinations(
context, request_spec, filter_properties)
def update_aggregates(self, context, aggregates):
self.queryclient.update_aggregates(context, aggregates)
def delete_aggregate(self, context, aggrega | te):
self.queryclient.delete_aggregate(context, aggregate)
def update_resource_stats(self, context, name, stats):
self.reportclient.update_resource_stats(context, name, stats)
def update_instance_info(self, context, host_name, instance_info):
self.queryclient.up | date_instance_info(context, host_name,
instance_info)
def delete_instance_info(self, context, host_name, instance_uuid):
self.queryclient.delete_instance_info(context, host_name,
instance_uuid)
def sync_instance_info(self, context, host_name, instance_uuids):
self.queryclient.sync_instance_info(context, host_name, instance_uuids)
|
jacksingleton/twitter-censorship | main.py | Python | mit | 623 | 0.009631 | #!/usr/bin/env python3
import os
import logging
import download_table_pages
i | mport download_notices
import parse_notice_tweets
import find_censored_tweets
#logging.basicConfig(level=logging.DEBUG)
for path in ['store/table_pages', 'store/notices', 'store/tweets']:
if not os.path.exists(path):
os.mkdirs(path)
withheld_tweets = (tweet
for table_page in download_table_page | s.fetch_table_pages()
for notice in download_notices.fetch_notices(table_page)
for tweet in parse_notice_tweets.parse_tweets_from(notice)
if find_censored_tweets.is_withheld(tweet))
for t in withheld_tweets: print(t)
|
PirateLearner/pi | PirateLearner/core/forms.py | Python | gpl-2.0 | 725 | 0.011034 | from djan | go import forms
from django.contrib.admin import widgets
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from crispy_forms.layout import Layout, Field, Fieldset, ButtonHolder
class PreviewForm(forms.Form):
object_list = forms.CharField(
label = "List of objects",
max_length = 500,
)
def __init__(self, *args, **kwargs):
super(PreviewForm, self).__init__(*args, **kwargs)
self.helper = | FormHelper()
self.helper.form_id = 'id-previewForm'
self.helper.form_class = 'blueForms'
self.helper.form_method = 'post'
self.helper.form_action = ''
self.helper.add_input(Submit('submit', 'Submit')) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.