repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
mlperf/training_results_v0.7 | Google/benchmarks/gnmt/implementations/gnmt-research-TF-tpu-v4-16/utils/nmt_utils.py | Python | apache-2.0 | 1,514 | 0.007926 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain | a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distribut | ed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions specifically for NMT."""
from __future__ import print_function
from REDACTED.tensorflow_models.mlperf.models.rough.nmt.utils import misc_utils as utils
__all__ = ["get_translation"]
def get_translation(nmt_outputs, tgt_eos, subword_option):
"""Given batch decoding outputs, select a sentence and turn to text."""
if tgt_eos: tgt_eos = tgt_eos.encode("utf-8")
# Select a sentence
output = nmt_outputs.tolist()
# If there is an eos symbol in outputs, cut them at that point.
if tgt_eos and tgt_eos in output:
output = output[:output.index(tgt_eos)]
if subword_option == "bpe": # BPE
translation = utils.format_bpe_text(output)
elif subword_option == "spm": # SPM
translation = utils.format_spm_text(output)
else:
translation = utils.format_text(output)
return translation
|
STLInspector/STLInspector | STLInspector/core/temporallogic/next.py | Python | apache-2.0 | 4,414 | 0.004078 | from clause import *
class NEXT(Clause):
"""
This class represents the temporal logic operation 'next'.
The class can be used for both STL and LTL Next-operators.
When it is used for LTL the interval bound is simply set to None.
Attributes:
operand (Clause) : Operand of this unary temporal logic expression.
bound (int) : Boundary for STL Nexts.
Clause attributes that are set: namestring (str) : String "N" used for printing.
Examples:
NEXT(AP("a")) - N (a)
NEXT(FINALLY(AP("a"), 1, 2), 3) - N[3](F[1,2](a))
NEXT(AND(AP("a"), OR(AP("b"), NEXT(AP("c")))) - N(a & (b | N(c)))
"""
def __init__(self, operand, bound=None):
Clause.__init__(self, "N", operand, None, bound)
# Overwrites method of class Clause
def expand(self):
return NEXT(self.operand1.expand(), self.lower_bound)
# Overwrites method of class Clause
def negationnormalform(self):
return NEXT(self.operand1.negationnormalform(), self.lower_bound)
# Overwrites method of class Clause
def nextnormalform(self, ininterval=0):
return NEXT(self.operand1.nextnormalform(ininterval), self.lower_bound)
# Overwrites method of class Clause
def encode(self, state=0):
state += 1 if self.lower_bound is None else self.lower_bound
return self.operand1.encode(state)
# Overwrites method of class Clause
def get_aps(self):
return self.operand1.get_aps()
def length(self):
""" Calculates the length (number of time steps) of a given formula. """
return self.operand1.length() + self.lower_bound
def adjust(self, c):
""" Adjusts formula to a given step width c by dividing all interval bounds by c. """
return NEXT(self.operand1.adjust(c), self.lower_bound / float(c))
def getLimits(self):
""" Returns list of integers used as interval limits. """
return self.operand1.getLimits() + [self.lower_bound]
@remove_duplicates
def aso(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.aso()]
@remove_duplicates
def mto(self):
return [self.operand1] + [NEXT(y, self.lower_bound) for y in self.operand1.mto()]
@remove_duplicates
def tio(self, lower_bound=None, upper_bound=None):
return [NEXT(y, self.lower_bound) for y in self.operand1.tio(lower_bound, upper_bound)]
@remove_duplicates
def ano(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.ano()]
@remove_duplicates
def oro(self, atomic_props):
return [NEXT(y, self.lower_bound) for y in self.operand1.oro(atomic_props)]
@remove_duplicates
def lro(self):
return [NEXT(y, self.lower_bound) for | y in self.operand1.lro()]
@remove_duplicates
def tro(self):
from _finally import FINALLY
from globally import GLOBALLY
return [FINALLY(self.operand1, self.lower_bound, self.lower_bound),
GLOBALLY(self.operand1, self.lower_bound, self.lower_bound)] \
| + [NEXT(y, self.lower_bound) for y in self.operand1.tro()]
@remove_duplicates
def io(self):
return [NEXT(self.operand1, self.lower_bound + 1)] \
+ [NEXT(self.operand1, self.lower_bound - 1) for fake_iterator in [1] if not self.lower_bound == 0] \
+ [NEXT(y, self.lower_bound) for y in self.operand1.io()]
@remove_duplicates
def eno(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.eno()]
@remove_duplicates
def rro(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.rro()]
@remove_duplicates
def mco(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.mco()]
@remove_duplicates
def sto(self, onezero):
return [NEXT(y, self.lower_bound) for y in self.operand1.sto(onezero)]
def ufc_plus(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.ufc_plus()]
def ufc_minus(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.ufc_minus()]
@remove_duplicates
def picc(self):
return [NEXT(y, self.lower_bound) for y in self.operand1.picc()]
|
red-hood/calendarserver | txdav/common/datastore/upgrade/sql/upgrades/addressbook_upgrade_from_1_to_2.py | Python | apache-2.0 | 3,445 | 0.002903 | # -*- test-case-name: txdav.common.datastore.upgrade.sql.test -*-
# #
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is dist | ributed on an "AS I | S" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# #
from twext.enterprise.dal.syntax import Update
from twisted.internet.defer import inlineCallbacks
from txdav.base.propertystore.base import PropertyName
from txdav.common.datastore.sql_tables import _ABO_KIND_GROUP, schema
from txdav.common.datastore.upgrade.sql.upgrades.util import updateAddressBookDataVersion, \
doToEachHomeNotAtVersion, removeProperty, cleanPropertyStore, \
logUpgradeStatus
from txdav.xml import element
"""
AddressBook Data upgrade from database version 1 to 2
"""
UPGRADE_TO_VERSION = 2
@inlineCallbacks
def doUpgrade(sqlStore):
"""
fill in members tables and increment data version
"""
yield populateMemberTables(sqlStore)
yield removeResourceType(sqlStore)
# bump data version
yield updateAddressBookDataVersion(sqlStore, UPGRADE_TO_VERSION)
@inlineCallbacks
def populateMemberTables(sqlStore):
"""
Set the group kind and and members tables
"""
@inlineCallbacks
def doIt(txn, homeResourceID):
"""
KIND is set to person by schema upgrade.
To upgrade MEMBERS and FOREIGN_MEMBERS:
1. Set group KIND (avoids assert)
2. Write groups. Write logic will fill in MEMBERS and FOREIGN_MEMBERS
(Remember that all members resource IDs must already be in the address book).
"""
home = yield txn.addressbookHomeWithResourceID(homeResourceID)
abObjectResources = yield home.addressbook().objectResources()
for abObject in abObjectResources:
component = yield abObject.component()
lcResourceKind = component.resourceKind().lower() if component.resourceKind() else component.resourceKind()
if lcResourceKind == "group":
# update kind
abo = schema.ADDRESSBOOK_OBJECT
yield Update(
{abo.KIND: _ABO_KIND_GROUP},
Where=abo.RESOURCE_ID == abObject._resourceID,
).on(txn)
abObject._kind = _ABO_KIND_GROUP
# update rest
yield abObject.setComponent(component)
logUpgradeStatus("Starting Addressbook Populate Members")
# Do this to each calendar home not already at version 2
yield doToEachHomeNotAtVersion(sqlStore, schema.ADDRESSBOOK_HOME, UPGRADE_TO_VERSION, doIt, "Populate Members")
@inlineCallbacks
def removeResourceType(sqlStore):
logUpgradeStatus("Starting Addressbook Remove Resource Type")
sqlTxn = sqlStore.newTransaction(label="addressbook_upgrade_from_1_to_2.removeResourceType")
yield removeProperty(sqlTxn, PropertyName.fromElement(element.ResourceType))
yield sqlTxn.commit()
yield cleanPropertyStore()
logUpgradeStatus("End Addressbook Remove Resource Type")
|
ApocalypticOctopus/automaticat | msbuild.py | Python | gpl-2.0 | 345 | 0.005797 | '''
Created on Oct 29, 2015
@author: Apocalyptic Oct | opus
'''
from subprocess import Popen, PIPE
ms | build_path = "C:\\Program Files (x86)\\MSBuild\\14.0\\Bin\\amd64\\MSBuild.exe"
def build(path):
cmd = '"' + msbuild_path + '" "' + path + '"'
print(cmd)
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
|
SALib/SALib | src/SALib/analyze/morris.py | Python | mit | 16,738 | 0.00006 | from typing import Dict, List
import numpy as np
from scipy.stats import norm
from . import common_args
from ..util import (read_param_file, compute_groups_matrix, ResultDict,
_define_problem_with_groups, _compute_delta, _check_groups)
def analyze(problem: Dict, X: np.ndarray, Y: np.ndarray,
num_resamples: int = 100, conf_level: float = 0.95,
print_to_console: bool = False, num_levels: int = 4,
seed=None) -> np.ndarray:
"""Perform Morris Analysis on model outputs.
Returns a dictionary with keys 'mu', 'mu_star', 'sigma', and
'mu_star_conf', where each entry is a list of parameters containing
the indices in the same order as the parameter file.
Notes
-----
Compatible with:
`morris` : :func:`SALib.sample.morris.sample`
Parameters
----------
problem : dict
The problem definition
X : numpy.array
The NumPy matrix containing the model inputs of dtype=float
Y : numpy.array
The NumPy array containing the model outputs of dtype=float
num_resamples : int
The number of resamples used to compute the confidence
intervals (default 1000)
conf_level : float
The confidence interval level (default 0.95)
print_to_console : bool
Print results directly to console (default False)
num_levels : int
The number of grid levels, must be identical to the value
passed to SALib.sample.morris (default 4)
seed : int
Seed to generate a random number
Returns
-------
Si : dict
A dictionary of sensitivity indices containing the following entries.
- `mu` - the mean elementary effect
- `mu_star` - the absolute of the mean elementary effect
- `sigma` - the standard deviation of the elementary effect
- `mu_star_conf` - the bootstrapped confidence interval
- `names` - the names of the parameters
References
----------
.. [1] Morris, M. (1991). "Factorial Sampling Plans for Preliminary
Computational Experiments." Technometrics, 33(2):161-174,
doi:10.1080/00401706.1991.10484804.
.. [2] Campolongo, F., J. Cariboni, and A. Saltelli (2007). "An effective
screening design for sensitivity analysis of large models."
Environmental Modelling & Software, 22(10):1509-1518,
doi:10.1016/j.envsoft.2006.10.004.
Examples
--------
>>> X = morris.sample(problem, 1000, num_levels=4)
>>> Y = Ishigami.evaluate(X)
>>> Si = morris.analyze(problem, X, Y, conf_level=0.95,
>>> print_to_console=True, num_levels=4)
"""
if seed:
np.random.seed(seed)
_define_problem_with_groups(problem)
_check_if_array_of_floats(X)
_check_if_array_of_floats(Y)
delta = _compute_delta(num_levels)
num_vars = problem['num_vars']
groups = _check_groups(problem)
if not groups:
number_of_groups = num_vars
else:
groups, unique_group_names = compute_groups_matrix(groups)
number_of_groups = len(set(unique_group_names))
# End if
num_trajectories = int(Y.size / (number_of_groups + 1))
trajectory_size = int(Y.size / num_trajectories)
elementary_effects = _compute_elementary_effects(X, Y,
trajectory_size, delta)
Si = _compute_statistical_outputs(elementary_effects, num_vars,
num_resamples, conf_level, groups,
unique_group_names)
if print_to_console:
print(Si.to_df())
return Si
def _compute_statistical_outputs(elementary_effects: np.ndarray, num_vars: int,
num_resamples: int, conf_level: float,
groups: np.ndarray,
unique_group_names: List) -> ResultDict:
""" Computes the statistical parameters related to Morris method.
Parameters
----------
elementary_effects: np.ndarray
Morris elementary effects.
num_vars: int
Number of problem's variables
num_resamples: int
Number of resamples
conf_level: float
Confidence level
groups: np.ndarray
Array defining the distribution of groups
unique_group_names: List
Names of the groups
Returns
-------
Si: ResultDict
Morris statistical parameters.
"""
Si = ResultDict((k, [None] * num_vars) for k in ['names', 'mu', 'mu_star',
'sigma', 'mu_star_conf'])
mu = np.average(elementary_effects, 1)
mu_star = np.average(np.abs(elementary_effects), 1)
sigma = np.std(elementary_effects, axis=1, ddof=1)
mu_star_conf = _compute_mu_star_confidence(elementary_effects, num_vars,
num_resamples, conf_level)
Si['names'] = unique_group_names
Si['mu'] = _compute_grouped_sigma(mu, groups)
Si['mu_star'] = _compute_grouped_metric(mu_star, groups)
Si['sigma'] = _compute_grouped_sigma(sigma, groups)
Si['mu_star_conf'] = _compute_grouped_metric(mu_star_conf, groups)
return Si
def _compute_grouped_sigma(ungrouped_sigma: np.ndarray,
groups: np.ndarray) -> np.ndarray:
""" Sigma values for the groups.
Returns sigma for the groups of parameter values in the argument
ungrouped_metric where the group consists of no more than
one parameter
Parameters
----------
ungrouped_sigma: np.ndarray
Sigma values calculated without considering the groups
groups: np.ndarray
Array defining the distribution of groups
Returns
-------
sigma: np.ndarray
Sigma values for the groups.
"""
sigma_agg = _compute_grouped_metric(ungrouped_sigma, groups)
sigma = np.zeros(groups.shape[1], dtype=float)
np.copyto(sigma, sigma_agg, where=groups.sum(axis=0) == 1)
np.copyto(sigma, np.NAN, where=groups.sum(axis=0) != 1)
return sigma
def _compute_grouped_metric(ungrouped_metric: np.ndarray,
groups: np.ndarray) -> np.ndarray:
""" Computes the mean value for the groups of parameter values.
Parameters
----------
ungrouped_metric: np.ndarray
Metric calculated without considering the groups
groups: np.ndarray
Array defining the distribution of groups
Returns
-------
mean_of_mu_star: np.ndarray
Mean value f | or the groups of parameter values
"""
groups = np.array(groups, dtype=bool)
mu_star_masked = np.ma.masked_array(u | ngrouped_metric * groups.T,
mask=(groups ^ 1).T)
mean_of_mu_star = np.ma.mean(mu_star_masked, axis=1)
return mean_of_mu_star
def _reorganize_output_matrix(output_array: np.ndarray,
value_increased: np.ndarray,
value_decreased: np.ndarray,
increase: bool = True) -> np.ndarray:
"""Reorganize the output matrix.
This method reorganizes the output matrix in a way that allows the
elementary effects to be computed as a simple subtraction between two
arrays. It repositions the outputs in the output matrix according to the
order they changed during the formation of the trajectories.
Parameters
----------
output_array: np.ndarray
Matrix of model output values
value_increased: np.ndarray
Input variables that had their values increased when forming the
trajectories matrix
value_decreased: np.ndarray
Input variables that had their values decreased when forming the
trajectories matrix
increase: bool
Direction to consider (values that increased or decreased). "Increase"
is the default value.
Returns
-------
"""
if increase:
pad_up = (1, 0)
pad_lo = (0, 1)
else:
pad_up = (0, 1)
pad_lo = (1, 0)
value_increased = np.pad(value_increased, ((0, 0), pad_up, ( |
DavidMikeSimon/bolero | plottimes.py | Python | gpl-3.0 | 1,597 | 0.035066 | #!/usr/bin/python
import sys, numpy, pylab, matplotlib, re
pat = re.compile(r"[^-]+-[^-]+-(\S+)")
# First pass | : Figure out the minimum and maximum values
vmin, vmax = -1, -1
for name in sys.argv[1:]:
match = pat.match(name)
if match:
fh = open(name)
vals = []
for line in fh:
vals.append(int(line.strip())/1000.0)
if vmin == -1:
vmin = numpy.min(vals)
else:
v | min = min(numpy.min(vals), vmin)
if vmax == -1:
vmax = numpy.max(vals)
else:
vmax = max(numpy.max(vals), vmax)
fh.close()
binwidth = (vmax-vmin)/200.0
# Second pass: Generate the plots
fig = pylab.figure(facecolor = "white")
a1rect = (0.09, 0.08, 0.67, 0.85)
a2rect = (0.82, 0.08, 0.16, 0.85)
lines = []
ptitles = []
n = 0
for name in sys.argv[1:]:
match = pat.match(name)
if match:
fh = open(name)
vals = []
for line in fh:
vals.append(int(line.strip())/1000.0)
# Time graph
chr = ('x', 'o', 's')[n]
n += 1
a = fig.add_axes(a1rect)
lines.append(a.plot(vals, chr))
# Histogram
flabels = []
fvals = []
x = vmin
while x < vmax:
flabels.append(x + binwidth/2)
fvals.append(len([v for v in vals if v >= x and v < (x+binwidth)]))
x += binwidth
a = fig.add_axes(a2rect)
a.plot(fvals, flabels, '-')
ptitles.append(match.group(1))
fh.close()
# Time graph
a = fig.add_axes(a1rect)
a.set_xlabel('Test #')
a.set_ylabel('Elapsed (sec)')
# Frequency graph
a = fig.add_axes(a2rect)
a.set_title("Distribution")
a.set_xticks([])
fig.legend(lines, ptitles, 'upper center', prop = matplotlib.font_manager.FontProperties(size='smaller'))
pylab.show()
|
schollz/extract_recipe | get_recipes/downloadRecipes.py | Python | apache-2.0 | 3,312 | 0.023249 | import json
import os
import urllib2
import urllib
import html2text
from unidecode import unidecode
import time
import urllib
import logging
import os
import os.path
import sys
import threading
if os.path.isfile('recipeitems-latest.json'):
pass
else:
os.system('wget http://openrecipes.s3.amazonaws.com/recipeitems-latest.json.gz')
os.system('gunzip recipeitems-latest.json.gz')
if not os.path.exists('recipes'):
os.makedirs('recipes')
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M:%S',
filename='log',
filemode='a')
def get_url_markdown(baseurl,start,increment):
'''
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0')]
try:
j = opener.open(baseurl)
except:
return None
data = j.read()
'''
urlHandler = urllib2.urlopen(baseurl)
data = urlHandler.read()
'''
os.system('wget -O temp' + str(start)+"_"+str(increment) + ' ' + baseurl)
data = open('temp' + s | tr(start)+"_"+str(increment),'rU').read() |
'''
h = html2text.HTML2Text()
h.ignore_links = True
h.ignore_images = True
h.body_width = 10000
data = h.handle(unidecode(unicode(data,errors='ignore')))
return unidecode(data)
def worker(start,increment):
logger = logging.getLogger('worker'+str(start)+"_"+str(increment))
"""thread worker function"""
print 'Worker: %s/%s' % (start,increment)
indexFile = 'recipes/index'+str(start)+"_"+str(increment)+'.txt'
lastLine = ""
if os.path.isfile(indexFile):
with open(indexFile,'rb') as f:
for line in f:
lastLine = line
lastfileNum = int(lastLine.split()[0])
else:
lastfileNum = -1
fileNum = 0
t = time.time()
with open('recipeitems-latest.json','rb') as f:
for line in f:
fileNum = fileNum + 1
if fileNum % increment == start:
folderSave = str(int(fileNum/500))
if not os.path.exists('recipes/' + folderSave):
os.makedirs('recipes/' + folderSave)
if fileNum>lastfileNum:
recipe = json.loads(line)
logger.info(str(fileNum) + "\t" + recipe['url'] + '\t' + recipe['name'])
t=time.time()
recipeMD = get_url_markdown(recipe['url'],start,increment)
logger.info('%s seconds' % str(round(time.time()-t,1)))
if recipeMD is not None:
with open('recipes/' + folderSave + '/' + str(fileNum) + '.md','wb') as g:
g.write(recipeMD)
#os.system('bzip2 ' + 'recipes/' + folderSave + '/' + str(fileNum) + '.md')
with open(indexFile,'a') as g:
g.write(str(fileNum) + "\t" + recipe['url'] + '\t' + unidecode(recipe['name']) + '\n')
else:
with open(indexFile,'a') as g:
g.write(str(fileNum) + "\t" + recipe['url'] + '\t' + 'None' + '\n')
return
threads = []
numThreads = 15
for i in range(numThreads):
t = threading.Thread(target=worker, args=(i,numThreads,))
threads.append(t)
t.start() |
stormi/tsunami | src/secondaires/navigation/commandes/matelot/renommer.py | Python | bsd-3-clause | 3,241 | 0.00031 | # -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO Ematelot SHA | LL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED T | O, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'renommer' de la commande 'matelot'."""
from primaires.interpreteur.masque.parametre import Parametre
from secondaires.navigation.equipage.ordres.revenir import Revenir
class PrmRenommer(Parametre):
"""Commande 'matelot renommer'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "renommer", "rename")
self.schema = "<ancien:nom_matelot> <nouveau:nom_matelot>"
self.tronquer = True
self.aide_courte = "renomme un matelot"
self.aide_longue = \
"Cette commande permet de changer le nom d'un matelot. " \
"Vous devez entrer en premier paramètre son ancien nom " \
"et en second paramètre son nouveau nom (un mot seulement)."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
nouveau = self.noeud.get_masque("nouveau")
nouveau.proprietes["nouveau"] = "True"
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
salle = personnage.salle
navire = salle.navire
matelot = dic_masques["ancien"].matelot
nouveau_nom = dic_masques["nouveau"].nom_matelot.capitalize()
equipage = navire.equipage
if not navire.a_le_droit(personnage, "maître d'équipage"):
personnage << "|err|Vous ne pouvez donner d'ordre sur ce " \
"navire.|ff|"
return
personnage << "{} se nomme désormais {}.".format(
matelot.nom.capitalize(), nouveau_nom)
equipage.renommer_matelot(matelot, nouveau_nom)
|
rflynn/sqlacodegen | sqlacodegen/main.py | Python | mit | 2,382 | 0.007137 | """ """
from __future__ import unicode_literals, division, print_function, absolute_import
import argparse
import codecs
import sys
from sqlalchemy.engine import create_engine
from sqlalchemy.schema import MetaData
from sqlacodegen.codegen import CodeGenerator
import sqlacodegen
def main():
parser = argparse.ArgumentParser(description='Generates SQLAlchemy model code from an existing database.')
parser.add_argument('url', nargs='?', help='SQLAlchemy url to the database')
parser.add_argument('--version', action='store_true', help="print the version number and exit")
parser.add_argument('--schema', help='load tables from an alternate schema')
parser.add_argument('--tables', help='tables to process (comma-separated, default: all)')
parser.add_argument('--noviews', action='store_true', help="ignore views")
parser.add_argument('--noindexes', action='store_true', | help='ignore indexes')
parser.add_argument('--noconstraints', action='store_true', help='ig | nore constraints')
parser.add_argument('--nojoined', action='store_true', help="don't autodetect joined table inheritance")
parser.add_argument('--noinflect', action='store_true', help="don't try to convert tables names to singular form")
parser.add_argument('--noclasses', action='store_true', help="don't generate classes, only tables")
parser.add_argument('--alwaysclasses', action='store_true', help="always generate classes")
parser.add_argument('--nosequences', action='store_true', help="don't auto-generate postgresql sequences")
parser.add_argument('--outfile', help='file to write output to (default: stdout)')
args = parser.parse_args()
if args.version:
print(sqlacodegen.version)
return
if not args.url:
print('You must supply a url\n', file=sys.stderr)
parser.print_help()
return
engine = create_engine(args.url)
metadata = MetaData(engine)
tables = args.tables.split(',') if args.tables else None
metadata.reflect(engine, args.schema, not args.noviews, tables)
outfile = codecs.open(args.outfile, 'w', encoding='utf-8') if args.outfile else sys.stdout
generator = CodeGenerator(metadata, args.noindexes, args.noconstraints, args.nojoined, args.noinflect,
args.noclasses, args.alwaysclasses, args.nosequences)
generator.render(outfile)
|
rspeer/solvertools | scripts/build_search_index.py | Python | mit | 3,318 | 0.000904 | from solvertools.wordlist import WORDS
from solvertools.normalize import slugify, sanitize
from solvertools.util import data_path, corpus_path
from whoosh.fields import Schema, ID, TEXT, KEYWORD, NUMERIC
from whoosh.analysis import StandardAnalyzer
from whoosh.index import create_in
import nltk
import os
from tqdm import tqdm
schema = Schema(
slug=ID,
text=TEXT(stored=True, analyzer=StandardAnalyzer()),
definition=TEXT(stored=True, analyzer=StandardAnalyzer()),
length=NUMERIC
)
def init_search_index():
nltk.download('wordnet')
from nltk.corpus import wordnet
get_synset = wordnet._synset_from_pos_and_offset
def get_adjacent(synset):
return [
name
for pointer_tuples in synset._pointers.values()
for pos, offset in pointer_tuples
for name in get_synset(pos, offset).lemma_names()
]
os.makedirs(data_path('search'), exist_ok=True)
ix = create_in(data_path('search'), schema)
writer = ix.writer(procs=4)
# Add Wikipedia links
for line in tqdm(o | pen(data_path('corpora/wikipedia.txt')), desc='wikipedia'):
title, summary = line.split('\t', 1)
summary = summary.rstrip()
if title and summary:
slug = slugify(title)
writer | .add_document(
slug=slug,
text=title,
definition=summary,
length=len(slug)
)
# Add lookups from a phrase to a word in that phrase
for slug, freq, text in tqdm(WORDS.iter_all_by_freq(), desc='phrases'):
words = text.split()
if freq < 10000:
break
if len(words) > 1:
for word in words:
if WORDS.logprob(word) < -7:
writer.add_document(
slug=slug,
text=word,
definition=text,
length=len(slug)
)
# Add crossword clues
for corpus in ('crossword_clues.txt', 'more_crossword_clues.txt'):
for line in tqdm(open(corpus_path(corpus), encoding='utf-8'), desc=corpus):
text, defn = line.rstrip().split('\t')
slug = slugify(text)
writer.add_document(
slug=slug,
text=text,
definition=defn,
length=len(slug)
)
# Add WordNet glosses and links
synsets = wordnet.all_synsets()
for syn in tqdm(synsets, desc='wordnet'):
lemmas = [lem.replace('_', ' ') for lem in syn.lemma_names()]
related = [lem.replace('_', ' ') for lem in get_adjacent(syn)]
related2 = lemmas + related
links = ', '.join(related2).upper()
defn_parts = [syn.definition()]
for example in syn.examples():
defn_parts.append('"%s"' % example)
defn_parts.append(links)
defn = '; '.join(defn_parts)
for name in lemmas:
this_slug = slugify(name)
writer.add_document(
slug=this_slug,
text=name.upper(),
definition=defn,
length=len(this_slug)
)
print("Committing.")
writer.commit(optimize=True)
return ix
if __name__ == '__main__':
init_search_index()
|
org-arl/dronesim | dronesim.py | Python | bsd-3-clause | 6,968 | 0.014351 | ###############################################################################
#
# Drone Simulator
#
# Copyright (c) 2017, Mandar Chitre
#
# This file is part of dronesim which is released under Simplified BSD License.
# See file LICENSE or go to http://www.opensource.org/licenses/BSD-3-Clause
# for full license details.
#
# Developed at the National University of Singapore (NUS)
# as part of EG1112: Engineering Principles & Practice (EPP) II
#
###############################################################################
import numpy as _np
import vpython as _vp
import transforms3d.euler as _euler
### settings
_dt = 0.025
_update_dt = 0.1
_follow_drone = True
_size = 0.5
_mass = 1.0
_air_density = 1.2
_gravity = 9.8
_ground_friction = 0.8
_lin_drag_coef = 0.5 * _air_density * _np.pi * 0.47
_rot_drag_coef = _size**2
_power_coef = 5.0
### initialize public variables
time = 0.0
canvas = _vp.canvas(background=_vp.color.cyan, range=10, forward=_vp.vector(1,-0.2,0), caption='')
ground = _vp.box(pos=_vp.vector(0,-_size-0.1,0), length=1000, height=0.2, width=1000, color=_vp.color.green, texture=_vp.textures.rough)
### class definition
class Drone:
def __init__(self):
self.updated = None
self.size = _size
self.cgpos = -0.25 * _size
self.energy = 0.0
self.body = _vp.sphere(radius=1.0*_size, color=_vp.color.red)
self.top = _vp.sphere(radius=0.2*_size, color=_vp.color.blue)
self.prop1 = _vp.ring(radius=0.3*_size, thickness=0.05*_size, color=_vp.color.orange)
self.prop2 = _vp.ring(radius=0.3*_size, thickness=0.05*_size, color=_vp.color.blue)
self.prop3 = _vp.ring(radius=0.3*_size, thickness=0.05*_size, color=_vp.color.blue)
self.prop4 = _vp.ring(radius=0.3*_size, thickness=0.05*_size, color=_vp.color.blue)
self.set_mass(_mass)
self.set_wind(0.0)
self.set_thrust(0.0, 0.0, 0.0, 0.0)
self.reset()
def reset(self):
self.mass = _mass
self.xyz = _vp.vector(0,0,0)
self.xyz_dot = _vp.vector(0,0,0)
self.pqr = _vp.vector(0,0,0)
self.pqr_dot = _vp.vector(0,0,0)
self.wind = _vp.vector(0,0,0)
self.draw()
def draw(self):
axis, theta = _euler.euler2axangle(self.pqr.x, self.pqr.y, self.pqr.z)
axis = _vp.vector(axis[0], axis[1], axis[2])
up = _vp.rotate(_vp.vector(0,1,0), theta, axis)
self.body.pos = self.xyz
self.top.pos = self.xyz + up*_size
self.prop1.pos = self.xyz + _vp.rotate(_vp.vector(1.3*_size,0,0), theta, axis)
self.prop2.pos = self.xyz + _vp.rotate(_vp.vector(0,0,1.3*_size), theta, axis)
self.prop3.pos = self.xyz + _vp.rotate(_vp.vector(-1.3*_size,0,0), theta, axis)
self.prop4.pos = self.xyz + _vp.rotate(_vp.vector(0,0,-1.3*_size), theta, axis)
self.prop1.axis = up
self.prop2.axis = up
self.prop3.axis = up
self.prop4.axis = up
if _follow_drone:
canvas.center = self.xyz
canvas.caption = 'time = %0.1f, pos = (%0.1f, %0.1f, %0.1f), energy = %0.1f' % (time, self.xyz.x, self.xyz.y, self.xyz.z, self.energy)
def update(self, dt):
# forces
axis, theta = _euler.euler2axangle(self.pqr.x, self.pqr.y, self.pqr.z)
axis = _vp.vector(axis[0], axis[1], axis[2])
up = _vp.rotate(_vp.vector(0,1,0), theta, axis)
a = _vp.vector(0, -_gravity, 0)
a = a + (self.thrust1+self.thrust2+self.thrust3+self.thrust4)/self.mass * up + self.wind/self.mass
a = a - (_lin_drag_coef * _vp.mag(self.xyz_dot)**2)/self.mass * self.xyz_dot
self.xyz_dot = self.xyz_dot + a * dt
# torques (ignoring propeller torques)
cg = self.cgpos * up
tpos1 = _vp.rotate(_vp.vector(1.3*_size,0,0), theta, axis)
tpos2 = _vp.rotate(_vp.vector(0,0,1.3*_size), theta, axis)
tpos3 = _vp.rotate(_vp.vector(-1.3*_size,0,0), theta, axis)
tpos4 = _vp.rotate(_vp.vector(0,0,-1.3*_size), theta, axis)
torque = _vp.cross(cg, _vp.vector(0, -_gravity, 0))
torque = torque + _vp.cross(tpos1, self | .thrust1 * up)
torque = torque + _vp.cross(tpos2, self.thrust2 * up)
torque = torque + _vp.cross(tpos3, self.thrust3 * up)
torque = torque + _vp.cross(tpos4, self.thrust4 * up)
torque = torque - _rot_drag_coef * self.pqr_dot
aa = torque/self.inertia
if _vp.mag(aa) > 0:
| aai, aaj, aak = _euler.axangle2euler((aa.x, aa.y, aa.z), _vp.mag(aa))
aa = _vp.vector(aai, aaj, aak)
self.pqr_dot = self.pqr_dot + aa * dt
else:
self.pqr_dot = _vp.vector(0,0,0)
# ground interaction
if self.xyz.y <= 0:
self.xyz.y = 0
if self.xyz_dot.y <= 0:
self.xyz_dot.x = self.xyz_dot.x * _ground_friction
self.xyz_dot.y = 0
self.xyz_dot.z = self.xyz_dot.z * _ground_friction
self.pqr_dot = self.pqr_dot * _ground_friction
# energy update
self.energy += _power_coef * (self.thrust1**1.5 + self.thrust2**1.5 + self.thrust3**1.5 + self.thrust4**1.5) * dt
# time update
self.xyz += self.xyz_dot * dt
self.pqr += self.pqr_dot * dt
# callback
if self.updated is not None:
self.updated(self)
self.draw()
def altitude(self):
return self.xyz.y
def roll(self):
return self.pqr.x
def yaw(self):
return self.pqr.y
def pitch(self):
return self.pqr.z
def x(self):
return self.xyz.x
def y(self):
return self.xyz.y
def z(self):
return self.xyz.z
def set_mass(self, m):
self.mass = m
self.inertia = 2.0/3.0 * self.mass * self.size**2
def set_wind(self, v):
self.wind = v * _vp.vector(_np.random.normal(), 0.0, _np.random.normal())
def set_thrust(self, t1, t2, t3, t4):
self.thrust1 = t1
self.thrust2 = t2
self.thrust3 = t3
self.thrust4 = t4
def set_updated_callback(self, cb):
self.updated = cb
### initialize drone (public variable)
drone = Drone()
### utility functions (public)
def reset():
global canvas, drone, time
time = 0.0
drone.reset()
canvas.waitfor('redraw')
def delay(t):
global canvas, _update_dt, time, _dt, drone
t0 = time
t1 = time + t
while time < t1:
drone.update(_dt)
time += _dt
if time-t0 > _update_dt:
canvas.waitfor('redraw')
t0 = time
canvas.waitfor('redraw')
def thrust(t1, t2=None, t3=None, t4=None):
global drone
if t2 == None: t2 = t1
if t3 == None: t3 = t1
if t4 == None: t4 = t1
drone.set_thrust(t1, t2, t3, t4)
def wind(v):
global drone
drone.set_wind(v)
|
Mikhus/wsgikit | setup.py | Python | mit | 1,542 | 0.049287 | from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
import re
module_src = "wsgikit/wsgikit.pyx"
def version():
fp = open( module_src)
version = re.search( "^__version__\s*=\s*['\"]([^'\"]*)['\"]", fp.read(), re.M).group(1)
fp.close()
return version
__version__ = version()
ext_modules = cythonize([
Extension( "wsgikit", [module_src])
]) + [
Extension( "wsgikit", [module_src])
]
setup(
name = "wsgikit",
version = __version__,
description = "Python tools for WSGI applications",
author = "Mykhailo Stadnyk",
author_email = "mikhus@gmail.com",
url = "https://github.com/Mikhus/wsgikit",
download_url = "https://github.com/Mikhus/wsgikit/zipball/master",
keywords = ["HTTP request", "file upload"],
platforms = ['OS Independent'],
license = 'MIT License',
ext_modules = ext_modules,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
long_description = """\
Python tools for WSGI applications
-------------------------------------
Fast HTTP request parsing, PHP-like params represe | ntation,
file upload handling, HTTP requests s | ecurity, etc.
"""
)
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit | esstoolkit/external/networkx/algorithms/tests/test_link_prediction.py | Python | gpl-3.0 | 18,091 | 0.000884 | import math
from functools import partial
import pytest
import networkx as nx
def _test_func(G, ebunch, expected, predict_func, **kwargs):
result = predict_func(G, ebunch, **kwargs)
exp_dict = {tuple(sorted([u, v])): score for u, v, score in expected}
res_dict = {tuple(sorted([u, v])): score for u, v, score in result}
assert len(exp_dict) == len(res_dict)
for p in exp_dict:
assert nx.testing.almost_equal(exp_dict[p], res_dict[p])
class TestResourceAllocationIndex:
@classmethod
def setup_class(cls):
cls.func = staticmethod(nx.resource_allocation_index)
cls.test = partial(_test_func, predict_func=cls.func)
def test_K5(self):
G = nx.complete_graph(5)
self.test(G, [(0, 1)], [(0, 1, 0.75)])
def test_P3(self):
G = nx.path_graph(3)
self.test(G, [(0, 2)], [(0, 2, 0.5)])
def test_S4(self):
G = nx.star_graph(4)
self.test(G, [(1, 2)], [(1, 2, 0.25)])
def test_notimplemented(self):
assert pytest.raises(
nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiDiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
def test_no_common_neighbor(self):
G = nx.Graph()
G.add_nodes_from([0, 1])
self.test(G, [(0, 1)], [(0, 1, 0)])
def test_equal_nodes(self):
G = nx.complete_graph(4)
self.test(G, [(0, 0)], [(0, 0, 1)])
def test_all_nonexistent_edges(self):
G = nx.Graph()
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
class TestJaccardCoefficient:
@classmethod
def setup_class(cls):
cls.func = staticmethod(nx.jaccard_coefficient)
cls.test = partial(_test_func, predict_func=cls.func)
def test_K5(self):
G = nx.complete_graph(5)
self.test(G, [(0, 1)], [(0, 1, 0.6)])
def test_P4(self):
G = nx.path_graph(4)
self.test(G, [(0, 2)], [(0, 2, 0.5)])
def test_notimplemented(self):
assert pytest.raises(
nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiDiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
def test_no_common_neighbor(self):
G = nx.Graph()
G.add_edges_from([(0, 1), (2, 3)])
self.test(G, [(0, 2)], [(0, 2, 0)])
def test_isolated_nodes(self):
G = nx.Graph()
G.add_nodes_from([0, 1])
self.test(G, [(0, 1)], [(0, 1, 0)])
def test_all_nonexistent_edges(self):
G = nx.Graph()
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
class TestAdamicAdarIndex:
@classmethod
def setup_class(cls):
cls.func = staticmethod(nx.adamic_adar_index)
cls.test = partial(_test_func, predict_func=cls.func)
def test_K5(self):
G = nx.complete_graph(5)
self.test(G, [(0, 1)], [(0, 1, 3 / math.log(4))])
def test_P3(self):
G = nx.path_graph(3)
self.test(G, [(0, 2)], [(0, 2, 1 / math.log(2))])
def test_S4(self):
G = nx.star_graph(4)
self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))])
def test_notimplemented(self):
assert pytest.raises(
nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiDiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
def test_no_common_neighbor(self):
G = nx.Graph()
G.add_nodes_from([0, 1])
self.test(G, [(0, 1)], [(0, 1, 0)])
def test_equal_nodes(self):
G = nx.complete_graph(4)
self.test(G, [(0, 0)], [(0, 0, 3 / math.log(3))])
def test_all_nonexistent_edges(self):
G = nx.Graph()
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
self.test(
G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), (1, 3, 0)]
)
class TestPreferentialAttachment:
@classmethod
def setup_class(cls):
cls.func = staticmethod(nx.preferential_attachment)
cls.test = partial(_test_func, predict_func=cls.func)
def test_K5(self):
G = nx.complete_graph(5)
self.test(G, [(0, 1)], [(0, 1, 16)])
def test_P3(self):
G = nx.path_graph(3)
self.test(G, [(0, 1)], [(0, 1, 2)])
def test_S4(self):
G = nx.star_graph(4)
self.test(G, [(0, 2)], [(0, 2, 4)])
def test_notimplemented(self):
assert pytest.raises(
nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
assert pytest.raises(
nx.NetworkXNotImplemented,
self.func,
nx.MultiDiGraph([(0, 1), (1, 2)]),
[(0, 2)],
)
def test_zero_degrees(self):
G = nx.Graph()
G.add_nodes_from([0, 1])
self.test(G, [(0, 1)], [(0, 1, 0)])
def test_all_nonexistent_edges(self):
G = nx.Graph()
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
self.test(G, None, [(0, 3, 2), (1, 2, 2), (1, 3, 1)])
class TestCNSoundarajanHopcroft:
@classmethod
def setup_class(cls):
cls.func = staticmethod(nx.cn_soundarajan_hopcroft)
cls.test = partial(_test_func, predict_func=cls.func, community="community")
def test_K5(self):
G = nx.complete_graph(5)
G.nodes[0]["community"] = 0
G.nodes[1]["community"] = 0
G.nodes[2]["community"] = 0
G.nodes[3]["community"] = 0
G.nodes[4]["community"] = 1
self.test(G, [(0, 1)], [(0, 1, 5)])
def test_P3(self):
G = nx.path_graph(3)
G.nodes[0]["community"] = 0
G.nodes[1]["community"] = 1
G.nodes[2]["community"] = 0
self.test(G, [(0, 2)], [(0, 2, 1)])
def test_S4(self):
G = nx.star_graph(4)
G.nodes[0]["community"] = 1
G.nodes[1]["community"] = 1
G.nodes[2]["community"] = 1
G.nodes[3]["community"] = 0
G.nodes[4]["communi | ty"] = 0
self.test(G, [(1, 2)], [(1, 2, 2)])
def test_notimplemented(self):
G = nx.DiGraph([(0, 1), (1, 2)])
G.add | _nodes_from([0, 1, 2], community=0)
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
G = nx.MultiGraph([(0, 1), (1, 2)])
G.add_nodes_from([0, 1, 2], community=0)
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
G = nx.MultiDiGraph([(0, 1), (1, 2)])
G.add_nodes_from([0, 1, 2], community=0)
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
def test_no_common_neighbor(self):
G = nx.Graph()
G.add_nodes_from([0, 1])
G.nodes[0]["community"] = 0
G.nodes[1]["community"] = 0
self.test(G, [(0, 1)], [(0, 1, 0)])
def test_equal_nodes(self):
G = nx.complete_graph(3)
G.nodes[0]["community"] = 0
G.nodes[1]["community"] = 0
G.nodes[2]["community"] = 0
self.test(G, [(0, 0)], [(0, 0, 4)])
def test_different_community(self):
G = nx.Graph()
G.add_edges_from([( |
LICEF/edx-platform | cms/djangoapps/contentstore/management/commands/course_id_clash.py | Python | agpl-3.0 | 2,127 | 0.003761 | """
Script for finding all courses whose org/name pairs == other courses when ignoring case
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
#
# To run from command line: ./manage.py cms --settings dev course_id_clash
#
class Command(BaseCommand):
"""
Script for finding all courses in the Mongo Modulestore whose org/name pairs == other courses when ignoring case
"""
help = 'List all courses ids in the Mongo Modulestore which may collide when ignoring case'
def handle(self, *args, **options):
mstore = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo) # pylint: disable=protected-access
if hasattr(mstore, 'collection'):
map_fn = '''
function () {
emit(this._id.org.toLowerCase()+this._id.course.toLowerCase(), {target: this._id});
}
'''
reduce_fn = '''
function (idpair, matches) {
var result = {target: []};
matches.forEach(function (match) {
result.target.push(match.target);
});
return result;
}
'''
finalize = '''
function(key, reduced) {
if (Array.isArray(reduced.target)) {
return reduced;
}
else {return null;}
}
'''
results = mstore.collection.map_reduce(
map_fn, reduce_fn, {'inline': True | }, query={'_id.category': 'course'}, finalize=finalize
)
results = results.get('results')
for entry in results:
if entry.get('value') is not None:
print '{:-^40}'.format | (entry.get('_id'))
for course_id in entry.get('value').get('target'):
print ' {}/{}/{}'.format(course_id.get('org'), course_id.get('course'), course_id.get('name'))
|
andrew-lundgren/gwpy | gwpy/plotter/axes.py | Python | gpl-3.0 | 7,620 | 0.000262 | # -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2013)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Extension of the :class:`~matplotlib.axes.Axes` class with
user-friendly attributes
"""
from six import string_types
from matplotlib.axes import Axes as _Axes
from matplotlib.artist import Artist
from matplotlib.projections import register_projection
from .decorators import auto_refresh
from . import (rcParams, tex, html)
__author__ = 'Duncan Macleod <duncan.macleod@ligo.org>'
class Axes(_Axes):
"""An extension of the core matplotlib :class:`~matplotlib.axes.Axes`.
These custom `Axes` provide only some simpler attribute accessors.
Notes
-----
A new set of `Axes` should be constructed via::
>>> plot.add_subplots(111, projection='xxx')
where plot is a :class:`~gwpy.plotter.Plot` figure, and ``'xxx'``
is the name of the `Axes` you want to add.
"""
projection = 'rectilinear'
def __init__(self, *args, **kwargs):
super(Axes, self).__init__(*args, **kwargs)
self.xaxis.labelpad = 10
__init__.__doc__ = _Axes.__init__.__doc__
# -----------------------------------------------
# text properties
# x-axis label
@property
def xlabel(self):
"""Label for the x-axis
:type: :class:`~matplotlib.text.Text`
"""
return self.xaxis.label
@xlabel.setter
@auto_refresh
def xlabel(self, text):
if isinstance(text, string_types):
self.set_xlabel(text)
else:
self.xaxis.label = text
@xlabel.deleter
@auto_refresh
def xlabel(self):
self.set_xlabel("")
# y-axis label
@property
def ylabel(self):
"""Label for the y-axis
:type: :class:`~matplotlib.text.Text`
"""
return self.yaxis.label
@ylabel.setter
@auto_refresh
def ylabel(self, text):
if isinstance(text, string_types):
self.set_ylabel(text)
else:
self.yaxis.label = text
@ylabel.deleter
@au | to_refresh
def ylabel(self):
self.set_ylabel("")
# -----------------------------------------------
# limit properties
@property
def xlim(self):
"""Limits for the x-axis
:type: `tuple`
"""
return self.get_xlim()
@xlim.setter
@auto_refr | esh
def xlim(self, limits):
self.set_xlim(*limits)
@xlim.deleter
@auto_refresh
def xlim(self):
self.relim()
self.autoscale_view(scalex=True, scaley=False)
@property
def ylim(self):
"""Limits for the y-axis
:type: `tuple`
"""
return self.get_ylim()
@ylim.setter
@auto_refresh
def ylim(self, limits):
self.set_ylim(*limits)
@ylim.deleter
def ylim(self):
self.relim()
self.autoscale_view(scalex=False, scaley=True)
# -----------------------------------------------
# scale properties
@property
def logx(self):
"""Display the x-axis with a logarithmic scale
:type: `bool`
"""
return self.get_xscale() == "log"
@logx.setter
@auto_refresh
def logx(self, log):
if log and not self.logx:
self.set_xscale('log')
elif self.logx and not log:
self.set_xscale('linear')
@property
def logy(self):
"""Display the y-axis with a logarithmic scale
:type: `bool`
"""
return self.get_yscale() == "log"
@logy.setter
@auto_refresh
def logy(self, log):
if log and not self.logy:
self.set_yscale('log')
elif self.logy and not log:
self.set_yscale('linear')
# -------------------------------------------
# Axes methods
@auto_refresh
def resize(self, pos, which='both'):
"""Set the axes position with::
pos = [left, bottom, width, height]
in relative 0,1 coords, or *pos* can be a
:class:`~matplotlib.transforms.Bbox`
There are two position variables: one which is ultimately
used, but which may be modified by :meth:`apply_aspect`, and a
second which is the starting point for :meth:`apply_aspect`.
"""
return super(Axes, self).set_position(pos, which=which)
@auto_refresh
def add_label_unit(self, unit, axis='x'):
label = getattr(self, 'get_%slabel' % axis)()
if not label:
label = unit.__doc__
if rcParams.get("text.usetex", False):
unitstr = tex.unit_to_latex(unit)
else:
unitstr = unit.to_string()
set_ = getattr(self, 'set_%slabel' % axis)
if label:
set_("%s [%s]" % (label, unitstr))
else:
set_(unitstr)
def legend(self, *args, **kwargs):
# set kwargs
alpha = kwargs.pop("alpha", 0.8)
linewidth = kwargs.pop("linewidth", 8)
# make legend
legend = super(Axes, self).legend(*args, **kwargs)
# find relevant axes
if legend is not None:
lframe = legend.get_frame()
lframe.set_alpha(alpha)
[l.set_linewidth(linewidth) for l in legend.get_lines()]
return legend
legend.__doc__ = _Axes.legend.__doc__
def html_map(self, imagefile, data=None, **kwargs):
"""Create an HTML map for some data contained in these `Axes`
Parameters
----------
data : `~matplotlib.artist.Artist`, `~gwpy.types.Series`, `array-like`
data to map, one of an `Artist` already drawn on these axes (
via :meth:`plot` or :meth:`scatter`, for example) or a data set
imagefile : `str`
path to image file on disk for the containing `Figure`
mapname : `str`, optional
ID to connect <img> tag and <map> tags, default: ``'points'``. This
should be unique if multiple maps are to be written to a single
HTML file.
shape : `str`, optional
shape for <area> tag, default: ``'circle'``
standalone : `bool`, optional
wrap map HTML with required HTML5 header and footer tags,
default: `True`
title : `str`, optional
title name for standalone HTML page
jquery : `str`, optional
URL of jquery script, defaults to googleapis.com URL
Returns
-------
HTML : `str`
string of HTML markup that defines the <img> and <map>
"""
if data is None:
artists = self.lines + self.collections + self.images
if len(artists) != 1:
raise ValueError("Cannot determine artist to map, %d found."
% len(artists))
data = artists[0]
if isinstance(data, Artist):
return html.map_artist(data, imagefile, **kwargs)
else:
return html.map_data(data, self, imagefile, **kwargs)
register_projection(Axes)
|
zrhans/python | exemplos/Examples.lnk/bokeh/plotting/file/bollinger.py | Python | gpl-2.0 | 567 | 0 | import numpy as np
from bokeh.plotting import *
# | Define Bollinger Bands.
upperband = np.random.random_integers(100, 150, size=100)
lowerband = upperband - 100
x_data = np.arange(1, 101)
# Bollinger shading glyph:
band_x = np.append(x_data, x_data[::-1])
band_y = np.append(lowerband, upperband[::-1])
output_file('bollinger.html', title='Bollinger bands (file)')
p = figure(x_axis_type='datetime')
p.patch(band_x, band_y, color='#7570B3', f | ill_alpha=0.2)
p.title = 'Bollinger Bands'
p.plot_height = 600
p.plot_width = 800
p.grid.grid_line_alpha = 0.4
show(p)
|
phrocker/accumulo | test/system/auto/simple/nativeMap.py | Python | apache-2.0 | 1,323 | 0.003779 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work | for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless requi | red by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import time
from TestUtils import TestUtilsMixin
class NativeMapTest(TestUtilsMixin, unittest.TestCase):
"Native Map Unit Test"
order = 21
testClass=""
def setUp(self):
pass
def runTest(self):
handle = self.runClassOn('localhost', 'org.apache.accumulo.test.functional.NativeMapTest', [])
self.waitForStop(handle, 20)
def tearDown(self):
pass
def suite():
result = unittest.TestSuite()
result.addTest(NativeMapTest())
return result
|
lihui7115/ChromiumGStreamerBackend | tools/telemetry/telemetry/internal/platform/power_monitor/powermetrics_power_monitor_unittest.py | Python | bsd-3-clause | 3,002 | 0.007328 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import unittest
from telemetry.core import os_version
from telemetry.core import util
from telemetry import decorators
from telemetry.internal.platform import mac_platform_backend
from telemetry.interna | l.platform.power_monitor import powermetrics_power_monitor
def _parsePowerMetricsDataFromTestFile(output_file):
test_data_path = os.path.join(util.GetUnittestDataDir(), output_file)
with open(test_data_path, 'r') as f | :
process_output = f.read()
return (powermetrics_power_monitor.PowerMetricsPowerMonitor.
ParsePowerMetricsOutput(process_output))
class PowerMetricsPowerMonitorTest(unittest.TestCase):
@decorators.Enabled('mac')
def testCanMonitorPowerUsage(self):
backend = mac_platform_backend.MacPlatformBackend()
power_monitor = powermetrics_power_monitor.PowerMetricsPowerMonitor(backend)
mavericks_or_later = (
backend.GetOSVersionName() >= os_version.MAVERICKS)
# Should always be able to monitor power usage on OS Version >= 10.9 .
self.assertEqual(power_monitor.CanMonitorPower(), mavericks_or_later,
"Error checking powermetrics availability: '%s'" % '|'.join(os.uname()))
@decorators.Enabled('mac')
def testParseEmptyPowerMetricsOutput(self):
# Important to handle zero length powermetrics outout - crbug.com/353250 .
self.assertIsNone(powermetrics_power_monitor.PowerMetricsPowerMonitor.
ParsePowerMetricsOutput(''))
@decorators.Enabled('mac')
def testParsePowerMetricsOutputFromVM(self):
# Don't fail when running on VM - crbug.com/423688.
self.assertEquals({},
_parsePowerMetricsDataFromTestFile('powermetrics_vmware.output'))
@decorators.Enabled('mac')
def testParsePowerMetricsOutput(self):
power_monitor = powermetrics_power_monitor.PowerMetricsPowerMonitor(
mac_platform_backend.MacPlatformBackend())
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
# Not supported on Mac at this time.
self.assertFalse(power_monitor.CanMeasurePerApplicationPower())
# Supported hardware reports power samples and energy consumption.
result = _parsePowerMetricsDataFromTestFile('powermetrics_output.output')
self.assertTrue(result['energy_consumption_mwh'] > 0)
# Verify that all component entries exist in output.
component_utilization = result['component_utilization']
for k in ['whole_package', 'gpu'] + ['cpu%d' % x for x in range(8)]:
self.assertTrue(component_utilization[k]['average_frequency_hz'] > 0)
self.assertTrue(component_utilization[k]['idle_percent'] > 0)
# Unsupported hardware doesn't.
result = _parsePowerMetricsDataFromTestFile(
'powermetrics_output_unsupported_hardware.output')
self.assertNotIn('energy_consumption_mwh', result)
|
glaudsonml/kurgan-ai | libs/WebServer.py | Python | apache-2.0 | 3,716 | 0.016954 | '''
Kurgan AI Web Application Security Analyzer.
http://www.kurgan.com.br/
Author: Glaudson Ocampos - <glaudson@vortexai.com.br>
Created in May, 11th 2016.
'''
import db.db as db
import config as cf
class WebServer(object):
banner = None
os = None
server = None
framework = None
version = None
options = None
def set_banner(self, val):
self.banner = val
def get_banner(self):
return self.banner
def set_os(self, val):
self.os = val
def get_os(self):
return self.os
def set_server(self, val):
self.server = val
def get_server(self):
return self.server
def set_version(self,val):
self.version = val
def get_version(self):
return self.version
def set_options(self,val):
self.options = val
def get_options(self):
return self.options
def check_os(self):
os_possibles = {"Deb | ian","Fedora","Windows","SuSE","marrakesh","RedHat","Unix"}
for i in | os_possibles:
if i in self.banner:
self.os = i
break
def check_server(self):
#server_possibles = {"nginx", "Apache", "Tomcat", "JBoss", "IIS", "X-Varnish"}
mydb = db.DB();
query = "SELECT DISTINCT name FROM server"
database = cf.DB_WEBSERVERS
servers_in_database = mydb.getData(query,database)
server_possibles = list(servers_in_database)
for j in server_possibles:
for i in j:
if i in self.banner:
self.server = i
break
def check_version(self):
if self.server is None:
return None
else:
mydb = db.DB();
name = self.server;
query = "SELECT DISTINCT version FROM server WHERE name='" + name + "'"
database = cf.DB_WEBSERVERS
servers_in_database = mydb.getData(query,database)
v_possibles = list(servers_in_database)
for j in v_possibles:
for i in j:
if i in self.banner:
self.version = i
break
def check_options(self):
op_possibles = {'GET','POST','PUT','HEAD','OPTIONS','DELETE','TRACE','PATCH','CONNECT'}
op_in_server = []
for i in op_possibles:
if i in self.options:
op_in_server.append(i)
return op_in_server
class Framework(object):
framework = None
X_Powered_By = None
def set_X_Powered_By(self, val):
self.X_Powered_By = val
def get_X_Powered_By(self):
return self.X_Powered_By
def set_framework(self, val):
self.framework = val
def get_framework(self):
return self.framework
#checar extensao tambem
def check_framework(self):
fw_possibles = {"PHP","ASP.NET","JSP","Perl","CGI"}
for i in fw_possibles:
if i in self.X_Powered_By:
self.framework = i
break
class Application(object):
extension = None
cookie = None
has_javascript = None
def set_extension(self, val):
self.extension = val
def get_extension(self):
return self.extension
def set_cookie(self, val):
self.cookie = val
def get_cookie(self):
return self.cookie
def set_has_javascript(self, val):
self.has_javascript = val
def get_has_javascript(self):
return self.has_javascript
def check_extension(self):
if self.extension is 'html':
weight_html_framework += 10
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/libxml2.py | Python | gpl-3.0 | 341,257 | 0.004542 | import libxml2mod
import types
import sys
# The root of all libxml2 errors.
class libxmlError(Exception): pass
#
# id() is sometimes negative ...
#
def pos_id(o):
i = id(o)
if (i < 0):
return (sys.maxint - i)
return i
#
# Errors raised by the wrappers when some tree handling failed.
#
class treeError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class parserError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class uriError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class xpathError(libxmlError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class ioWrapper:
def __init__(self, _obj):
self.__io = _obj
self._o = None
def io_close(self):
if self.__io == None:
return(-1)
self.__io.close()
self.__io = None
return(0)
def io_flush(self):
if self.__io == None:
return(-1)
self.__io.flush()
return(0)
def io_read(self, len = -1):
if self.__io == None:
return(-1)
if len < 0:
return(self.__io.read())
return(self.__io.read(len))
def io_write(self, str, len = -1):
if self.__io == None:
return(-1)
if len < 0:
return(self.__io.write(str))
return(self.__io.write(str, len))
class ioReadWrapper(ioWrapper):
def __init__(self, _obj, enc = ""):
ioWrapper.__init__(self, _obj)
self._o = libxml2mod.xmlCreateInputBuffer(self, enc)
def __del__(self):
print "__del__"
self.io_close()
if self._o != None:
libxml2mod.xmlFreeParserInputBuffer(self._o)
self._o = None
def close(self):
self.io_close()
if self._o != None:
libxml2mod.xmlFreeParserInputBuffer(self._o)
self._o = None
class ioWriteWrapper(ioWrapper):
def __init__(self, _obj, enc = ""):
# print "ioWriteWrapper.__init__", _obj
| if type(_obj) == type(''):
print "write io from a string"
self.o = None
elif type(_obj) == types.InstanceType:
print "write io from instance of %s" % (_obj.__class__)
ioWrapper.__init__(self, _obj)
self._o = libxml2mod.xmlCreateOutputBuffer(self, enc)
else:
file = libxml2mod.outputBufferGetPythonFile(_obj)
| if file != None:
ioWrapper.__init__(self, file)
else:
ioWrapper.__init__(self, _obj)
self._o = _obj
def __del__(self):
# print "__del__"
self.io_close()
if self._o != None:
libxml2mod.xmlOutputBufferClose(self._o)
self._o = None
def flush(self):
self.io_flush()
if self._o != None:
libxml2mod.xmlOutputBufferClose(self._o)
self._o = None
def close(self):
self.io_flush()
if self._o != None:
libxml2mod.xmlOutputBufferClose(self._o)
self._o = None
#
# Example of a class to handle SAX events
#
class SAXCallback:
"""Base class for SAX handlers"""
def startDocument(self):
"""called at the start of the document"""
pass
def endDocument(self):
"""called at the end of the document"""
pass
def startElement(self, tag, attrs):
"""called at the start of every element, tag is the name of
the element, attrs is a dictionary of the element's attributes"""
pass
def endElement(self, tag):
"""called at the start of every element, tag is the name of
the element"""
pass
def characters(self, data):
"""called when character data have been read, data is the string
containing the data, multiple consecutive characters() callback
are possible."""
pass
def cdataBlock(self, data):
"""called when CDATA section have been read, data is the string
containing the data, multiple consecutive cdataBlock() callback
are possible."""
pass
def reference(self, name):
"""called when an entity reference has been found"""
pass
def ignorableWhitespace(self, data):
"""called when potentially ignorable white spaces have been found"""
pass
def processingInstruction(self, target, data):
"""called when a PI has been found, target contains the PI name and
data is the associated data in the PI"""
pass
def comment(self, content):
"""called when a comment has been found, content contains the comment"""
pass
def externalSubset(self, name, externalID, systemID):
"""called when a DOCTYPE declaration has been found, name is the
DTD name and externalID, systemID are the DTD public and system
identifier for that DTd if available"""
pass
def internalSubset(self, name, externalID, systemID):
"""called when a DOCTYPE declaration has been found, name is the
DTD name and externalID, systemID are the DTD public and system
identifier for that DTD if available"""
pass
def entityDecl(self, name, type, externalID, systemID, content):
"""called when an ENTITY declaration has been found, name is the
entity name and externalID, systemID are the entity public and
system identifier for that entity if available, type indicates
the entity type, and content reports it's string content"""
pass
def notationDecl(self, name, externalID, systemID):
"""called when an NOTATION declaration has been found, name is the
notation name and externalID, systemID are the notation public and
system identifier for that notation if available"""
pass
def attributeDecl(self, elem, name, type, defi, defaultValue, nameList):
"""called when an ATTRIBUTE definition has been found"""
pass
def elementDecl(self, name, type, content):
"""called when an ELEMENT definition has been found"""
pass
def entityDecl(self, name, publicId, systemID, notationName):
"""called when an unparsed ENTITY declaration has been found,
name is the entity name and publicId,, systemID are the entity
public and system identifier for that entity if available,
and notationName indicate the associated NOTATION"""
pass
def warning(self, msg):
#print msg
pass
def error(self, msg):
raise parserError(msg)
def fatalError(self, msg):
raise parserError(msg)
#
# This class is the ancestor of all the Node classes. It provides
# the basic functionalities shared by all nodes (and handle
# gracefylly the exception), like name, navigation in the tree,
# doc reference, content access and serializing to a string or URI
#
class xmlCore:
def __init__(self, _obj=None):
if _obj != None:
self._o = _obj;
return
self._o = None
def __eq__(self, other):
if other == None:
return False
ret = libxml2mod.compareNodesEqual(self._o, other._o)
if ret == None:
return False
return ret == True
def __ne__(self, other):
if other == None:
return True
ret = libxml2mod.compareNodesEqual(self._o, other._o)
return not ret
def __hash__(self):
ret = libxml2mod.nodeHash(self._o)
return ret
def __str__(self):
return self.serialize()
def get_parent(self):
ret = libxml2mod.parent(self._o)
if ret == None:
return None
return xmlNode(_obj=ret)
def get_children(self):
ret = libxml2mod.children(self._o)
if ret == None:
return None
return xmlNode(_obj=ret)
def get_last(self):
ret = libxml2mod.la |
stackdump/txbitwrap | txbitwrap/test/__init__.py | Python | mit | 2,474 | 0.002425 | """
run tests against a webserver running in the same reactor
NOTE: this test uses port 8888 on localhost
"""
import os
import ujson as json
import cyclone.httpclient
from twisted.internet import defer
from twisted.application import internet
from twisted.trial.unittest import TestCase
from twisted.python import log
from txbitwrap.api import factory as Api
from txbitwrap.machine import set_pnml_path
import txbitwrap.event
IFACE = '127.0.0.1'
PORT = 8888
OPTIONS = {
'listen-ip': IFACE,
'listen-port': PORT,
'machine-path': os.path.abspath(os.path.dirname(__file__) + '/../../schemata'),
'pg-host': '127.0.0.1',
'pg-port': 5432,
'pg-username': 'bitwrap',
'pg-password': 'bitwrap',
'pg-database': 'bitwrap'
}
class ApiTest(TestCase):
""" setup rpc endpoint and invoke ping method """
def setUp(self):
""" start tcp endpoint """
set_pnml_path(OPTIONS['machine-path'])
self.options = OPTIONS
#pylint: disable=no-member
self.service = internet.TCPServer(PORT, Api(self.options), interface=self.options['listen-ip'])
#pylint: enable=no-member
self.service.startService()
@defer.inlineCallbacks
def tearDown(self):
""" stop tcp endpoint """
self.service.stopService()
yield txbitwrap.event.rdq.stop()
@staticmethod
def url(resource):
""" bulid a url using test endpoint """
return 'http://%s:%s/%s' % (IFACE, PORT, resource)
@staticmethod
def client(resource):
""" rpc client """
return cyclone.httpclient.JsonRPC(ApiTest.url(resource))
@staticmethod
def fetch(resource, **kwargs):
""" async request with httpclient"""
return cyclone.httpclient.fetch(ApiTest.url(resource), **kwargs)
@staticmethod
def dispatch(**event):
""" rpc client """
resource = 'dispatch/%s/%s/%s' % (event['schema'], event['oid'], event['action'])
url = ApiTest.url(resource)
if isinstance(event['payload'], str):
data = event['payload']
else:
data = json.dumps(event['payload'])
return cyclone.httpclient.fetch | (url, postdata=data)
@staticmethod
def broadcast(**event):
""" rpc client """
resource = 'broadcast/%s/%s' % (event['schema'], event['id'])
url = ApiTest.url(resource)
data = json.dumps(event)
return cyclone.httpclient | .fetch(url, postdata=data)
|
tuanvu216/udacity-course | designing-restful-apis/Lesson_3/06_Adding Features to your Mashup/Starter Code/findARestaurant.py | Python | mit | 3,690 | 0.01084 | # -*- coding: utf-8 -*-
import json
import httplib2
import sys
import codecs
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
sys.stderr = codecs.getwriter('utf8')(sys.stderr)
foursquare_client_id = 'SMQNYZFVCIOYIRAIXND2D5SYBLQUOPDB4HZTV13TT22AGACD'
foursquare_client_secret = 'IHBS4VBHYWJL53NLIY2HSVI5A1144GJ3MDTYYY1KLKTMC4BV'
google_api_key = 'AIzaSyBz7r2Kz6x7wO1zV9_O5Rcxmt8NahJ6kos'
def getGeocodeLocation(inputString):
#Replace Spaces with '+' in URL
locationString = inputString.replace(" ", "+")
url = ('https://maps.googleapis.com/maps/api/geocode/json?address=%s&key=%s'% (locationString, google_api_key))
h = httplib2.Http()
result = json.loads(h.request(url,'GET')[1])
#print response
latitude = result['results'][0]['geometry']['location']['lat']
longitude = result['results'][0]['geometry']['location']['lng']
return (latitude,longitude)
#This function takes in a string representation of a location and cuisine type, geocodes the location, and then pass in the latitude and longitude coordinates to the Foursquare API
def findARestaurant(mealType, location):
latitude, longitude = getGeocodeLocation(location)
url = ('https://api.foursquare.com/v2/venues/search?client_id=%s&client_secret=%s&v=20130815&ll=%s,%s&query=%s' % (foursquare_client_id, foursquare_client_secret,latitude,longitude,mealType))
h = httplib2.Http()
result = json.loads(h.request(url,'GET')[1])
if result['response']['venues']:
#Grab the first restaurant
restaurant = result['response']['venues'][0]
venue_id = restaurant['id']
restaurant_name = restaurant['name']
restaurant_address = restaurant['location']['formattedAddress']
#Format the Restaurant Address into one string
address = ""
for i in restaurant_address:
address += i + " "
restaurant_address = address
#Get a 300x300 picture of the restaurant using the venue_id (you can change this by altering the 300x300 value in the URL or replacing it with 'orginal' to get the original picture
url = ('https://api.foursquare.com/v2/venues/%s/photos?client_id=%s&v=20150603&client_secret=%s' % ((venue_id,foursquare_client_id,foursquare_client_secret)))
result = json.loads(h.request(url,'GET')[1])
#Grab the first image
#if no image available, insert default image url
if result['response']['photos']['items']:
firstpic = result['response']['photos']['items'][0]
prefix = firstpic['prefix']
suffix = firstpic['suffix']
imageURL = prefix + "300x300" + suffix
else:
imageURL = "http://pixabay.com/get/8926af5eb597ca51ca4c/1433440765/cheeseburger-34314_1280.png?direct"
restaurantInfo = {'name':restaurant_name, 'address':restaurant_address, 'image':imageURL}
#print "Restaurant Name: %s " % restaurantInfo['name']
#print "Restaurant Address: %s " % restaurantInfo['address']
#print "Image: %s \n " % restaurant | Info['image']
return restaurantInfo
else:
#print "No Restaurants Found for %s" % l | ocation
return "No Restaurants Found"
if __name__ == '__main__':
findARestaurant("Pizza", "Tokyo, Japan")
findARestaurant("Tacos", "Jakarta, Indonesia")
findARestaurant("Tapas", "Maputo, Mozambique")
findARestaurant("Falafel", "Cairo, Egypt")
findARestaurant("Spaghetti", "New Delhi, India")
findARestaurant("Cappuccino", "Geneva, Switzerland")
findARestaurant("Sushi", "Los Angeles, California")
findARestaurant("Steak", "La Paz, Bolivia")
findARestaurant("Gyros", "Sydney Austrailia") |
daniel-j/lutris | lutris/sysoptions.py | Python | gpl-3.0 | 12,134 | 0.000165 | """Options list for system config."""
import os
from collections import OrderedDict
from lutris import runners
from lutris.util import display, system
def get_optirun_choices():
"""Return menu choices (label, value) for Optimus"""
choices = [("Off", "off")]
if system.find_executable("primusrun"):
choices.append(("primusrun", "primusrun"))
if system.find_executable("optirun"):
choices.append(("optirun/virtualgl", "optirun"))
return choices
system_options = [ # pylint: disable=invalid-name
{
"option": "game_path",
"type": "directory_chooser",
"label": "Default installation folder",
"default": os.path.expanduser("~/Games"),
"scope": ["runner", "system"],
"help": "The default folder where you install your games."
},
{
"option": "disable_runtime",
"type": "bool",
"label": "Disable Lutris Runtime",
"default": False,
"help": (
"The Lutris Runtime loads some libraries before running the "
"game. Which can cause some incompatibilities in some cases. "
"Check this option to disable it."
),
},
{
"option": "prefer_system_libs",
"type": "bool",
"label": "Prefer system libraries",
"default": True,
"help": (
"When the runtime is enabled, prioritize the system libraries"
" over the provided ones."
),
},
{
"option": "reset_desktop",
"type": "bool",
"label": "Restore resolution on game exit",
"default": False,
"help": (
"Some games don't restore your screen resolution when \n"
"closed or when they crash. This is when this option comes \n"
"into play to save your bacon."
),
},
{
"option": "single_cpu",
"type": "bool",
"label": "Restrict to single core",
"advanced": True,
"default": False,
"help": "Restrict the game to a single CPU core.",
},
{
"option": "restore_gamma",
"type": "bool",
"default": False,
"label": "Restore gamma on game exit",
"advanced": True,
"help": (
"Some games don't correctly restores gamma on exit, making "
"your display too bright. Select this option to correct it."
),
},
{
"option": "disable_compositor",
"label": "Disable desktop effects",
"type": "bool",
"default": False,
"advanced": True,
"help": (
"Disable desktop effects while game is running, "
"reducing stuttering and increasing performance"
),
},
{
"option": "reset_pulse",
"type": "bool",
"label": "Reset PulseAudio",
"default": False,
"advanced": True,
"condition": system.find_executable("pulseaudio"),
"help": "Restart PulseAudio before launching the game.",
},
{
"option": "pulse_latency",
"type": "bool",
"label": "Reduce PulseAudio latency",
"default": False,
"advanced": True,
"condition": system.find_executable("pulseaudio"),
"help": (
"Set the environment variable PULSE_LATENCY_MSEC=60 "
"to improve audio quality on some games"
),
},
{
"option": "use_us_layout",
"type": "bool",
"label": "Switch to US keyboard layout",
"default": False,
"advanced": True,
"help": "Switch to US keyboard qwerty layout while game is running",
},
{
"option": "optimus",
"type": "choice",
"default": "off",
"choices": get_optirun_choices,
"label": "Optimus launcher (NVIDIA Optimus laptops)",
"advanced": True,
"help": (
"If you have installed the primus or bumblebee packages, "
"select what launcher will run the game with the command, "
"activating your NVIDIA graphic chip for high 3D "
"performance. primusrun normally has better performance, but"
"optirun/virtualgl works better for more games."
),
},
{
"option": "fps_limit",
"type": "string",
"size": "small",
"label": "Fps limit",
"advanced": True,
"condition": bool(system.find_executable("strangle")),
"help": "Limit the game's fps to desired number",
},
{
"option": "gamemode",
"type": "bool",
"default": system.LINUX_SYSTEM.is_feature_supported("GAMEMODE"),
"condition": system.LINUX_SYSTEM.is_feature_supported("GAMEMODE"),
"label": "Enable Feral gamemode",
"help": "Request a set of optimisations be temporarily applied to the host OS",
},
{
"option": "dri_prime",
"type": "bool",
"default": False,
"condition": display.USE_DRI_PRIME,
"label": "Use PRIME (hybrid graphics on laptops)",
"advanced": True,
"help": (
"If you have open source graphic drivers (Mesa), selecting this "
"option will run the game with the 'DRI_PRIME=1' environment variable, "
"activating your discrete graphic chip for high 3D "
"performance."
),
},
{
"option": "sdl_video_fullscreen",
"type": "choice",
"label": "SDL 1.2 Fullscreen Monitor",
"choices": display.get_output_list,
"default": "off",
"advanced": True,
"help": (
"Hint SDL 1.2 games to use a specific monitor when going "
"fullscreen by setting the SDL_VIDEO_FULLSCREEN "
"environment variable"
),
},
{
"option": "display",
"type": "choice",
"label": "Turn off monitors except",
"choices": display.get_output_choices,
"default": "off",
"advanced": True,
"help": (
"Only keep the selected screen active while the game is "
"running. \n"
"This is useful if you have a dual-screen setup, and are \n"
"having display issues when running a game in fullscreen."
),
},
{
"option": "resolution",
"type": "choice",
"label": "Switch resolution to",
"choices": display.get_resolution_choices,
"default": "off",
"help": "Switch to this screen resolution while the game is running.",
},
{
"option": "terminal",
"label": "Run in a terminal",
"type": "bool",
"default": False,
"advanced": True,
"help": "Run the game in a new terminal window.",
},
{
"option": "terminal_app",
"label": "Terminal application",
"type": "choice_with_entry",
"choices": system.get_terminal_apps,
"default": system.get_default_terminal(),
"advanced": True,
"help": (
"The terminal emulator to be run with the previous option."
"Choose from the list of detected terminal apps or enter "
"the ter | minal's command or path."
"Note: Not all terminal emulators are guaranteed to work."
),
},
{
"option": "env",
"type": "mapping",
"label": "Environment variables",
"help": "Environment variables loaded at run time",
},
{
"option": "prefix_command",
"typ | e": "string",
"label": "Command prefix",
"advanced": True,
"help": (
"Command line instructions to add in front of the game's "
"execution command."
),
},
{
"option": "manual_command",
"type": "file",
"label": "Manual command",
"advanced": True,
"help": ("Script to execute from the game's contextual menu"),
},
{
"option": "prelaunch_command",
"type": "file",
"label": "Pre-launch command",
"advanced": True,
"help": "Script to execute before the game starts",
},
{
"option": "prelaunch_wait",
"t |
obulpathi/poppy | poppy/transport/validators/schemas/flavor.py | Python | apache-2.0 | 2,741 | 0 | # Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from poppy.transport.validators import schema_base
class FlavorSchema(schema_base.SchemaBase):
"""JSON Schmema validation for /flavor."""
schema = {
"flavor": {
"POST": {
"type": "object",
"properties": {
"id": {
"type": "string",
"minLength": 3,
"maxLength": 64,
"required": True
},
"providers": {
"type": "array",
"required": True,
"items": {
"type": "object",
"properties": {
"provider": {
"type": "string",
"required": True
},
"links": {
"type": "array",
"required": True,
"items": {
"type": "object",
"properties": {
"href": {
"type": "string",
"minLength": 2,
"required": Tr | ue
},
"rel": {
"type": "string",
"enum": ["provider_url"],
"required": True
| }
}
},
"minItems": 1
}
}
},
"minItems": 1
}
}
}
}
}
|
Architektor/PySnip | venv/lib/python2.7/site-packages/twisted/internet/test/test_iocp.py | Python | gpl-3.0 | 5,182 | 0.002316 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.internet.iocpreactor}.
"""
import errno
from array import array
from struct import pack
from socket import AF_INET6, AF_INET, SOCK_STREAM, SOL_SOCKET, error, socket
from zope.interface.verify import verifyClass
from twisted.trial import unittest
from twisted.python.log import msg
from twisted.internet.interfaces import IPushProducer
try:
from twisted.internet.iocpreactor import iocpsupport as _iocp, tcp, udp
from twisted.internet.ioc | preactor.reactor import IOCPReactor, EVENTS_PER_LOOP, KEY_NORMAL
from twisted.internet.iocpreactor.interfaces import IReadWriteHandle
from twisted.internet.iocpreactor.c | onst import SO_UPDATE_ACCEPT_CONTEXT
from twisted.internet.iocpreactor.abstract import FileHandle
except ImportError:
skip = 'This test only applies to IOCPReactor'
try:
socket(AF_INET6, SOCK_STREAM).close()
except error, e:
ipv6Skip = str(e)
else:
ipv6Skip = None
class SupportTests(unittest.TestCase):
"""
Tests for L{twisted.internet.iocpreactor.iocpsupport}, low-level reactor
implementation helpers.
"""
def _acceptAddressTest(self, family, localhost):
"""
Create a C{SOCK_STREAM} connection to localhost using a socket with an
address family of C{family} and assert that the result of
L{iocpsupport.get_accept_addrs} is consistent with the result of
C{socket.getsockname} and C{socket.getpeername}.
"""
msg("family = %r" % (family,))
port = socket(family, SOCK_STREAM)
self.addCleanup(port.close)
port.bind(('', 0))
port.listen(1)
client = socket(family, SOCK_STREAM)
self.addCleanup(client.close)
client.setblocking(False)
try:
client.connect((localhost, port.getsockname()[1]))
except error, (errnum, message):
self.assertIn(errnum, (errno.EINPROGRESS, errno.EWOULDBLOCK))
server = socket(family, SOCK_STREAM)
self.addCleanup(server.close)
buff = array('c', '\0' * 256)
self.assertEqual(
0, _iocp.accept(port.fileno(), server.fileno(), buff, None))
server.setsockopt(
SOL_SOCKET, SO_UPDATE_ACCEPT_CONTEXT, pack('P', server.fileno()))
self.assertEqual(
(family, client.getpeername()[:2], client.getsockname()[:2]),
_iocp.get_accept_addrs(server.fileno(), buff))
def test_ipv4AcceptAddress(self):
"""
L{iocpsupport.get_accept_addrs} returns a three-tuple of address
information about the socket associated with the file descriptor passed
to it. For a connection using IPv4:
- the first element is C{AF_INET}
- the second element is a two-tuple of a dotted decimal notation IPv4
address and a port number giving the peer address of the connection
- the third element is the same type giving the host address of the
connection
"""
self._acceptAddressTest(AF_INET, '127.0.0.1')
def test_ipv6AcceptAddress(self):
"""
Like L{test_ipv4AcceptAddress}, but for IPv6 connections. In this case:
- the first element is C{AF_INET6}
- the second element is a two-tuple of a hexadecimal IPv6 address
literal and a port number giving the peer address of the connection
- the third element is the same type giving the host address of the
connection
"""
self._acceptAddressTest(AF_INET6, '::1')
if ipv6Skip is not None:
test_ipv6AcceptAddress.skip = ipv6Skip
class IOCPReactorTests(unittest.TestCase):
def test_noPendingTimerEvents(self):
"""
Test reactor behavior (doIteration) when there are no pending time
events.
"""
ir = IOCPReactor()
ir.wakeUp()
self.assertFalse(ir.doIteration(None))
def test_reactorInterfaces(self):
"""
Verify that IOCP socket-representing classes implement IReadWriteHandle
"""
self.assertTrue(verifyClass(IReadWriteHandle, tcp.Connection))
self.assertTrue(verifyClass(IReadWriteHandle, udp.Port))
def test_fileHandleInterfaces(self):
"""
Verify that L{Filehandle} implements L{IPushProducer}.
"""
self.assertTrue(verifyClass(IPushProducer, FileHandle))
def test_maxEventsPerIteration(self):
"""
Verify that we don't lose an event when more than EVENTS_PER_LOOP
events occur in the same reactor iteration
"""
class FakeFD:
counter = 0
def logPrefix(self):
return 'FakeFD'
def cb(self, rc, bytes, evt):
self.counter += 1
ir = IOCPReactor()
fd = FakeFD()
event = _iocp.Event(fd.cb, fd)
for _ in range(EVENTS_PER_LOOP + 1):
ir.port.postEvent(0, KEY_NORMAL, event)
ir.doIteration(None)
self.assertEqual(fd.counter, EVENTS_PER_LOOP)
ir.doIteration(0)
self.assertEqual(fd.counter, EVENTS_PER_LOOP + 1)
|
mlflow/mlflow | examples/hyperparam/search_hyperopt.py | Python | apache-2.0 | 6,367 | 0.002984 | """
Example of hyperparameter search in MLflow using Hyperopt.
The run method will instantiate and run Hyperopt optimizer. Each parameter configuration is
evaluated in a new MLflow run invoking main entry point with selected parameters.
The runs are evaluated based on validation set loss. Test set score is calculated to verify the
results.
This example currently does not support parallel execution.
"""
import click
import numpy as np
from hyperopt import fmin, hp, tpe, rand
import mlflow.projects
from mlflow.tracking.client import MlflowClient
_inf = np.finfo(np.float64).max
@click.command(
help="Perform hyperparameter search with Hyperopt library." "Optimize dl_train target."
)
@click.option("--max-runs", type=click.INT, default=10, help="Maximum number of runs to evaluate.")
@click.option("--epochs", type=click.INT, default=500, help="Number of epochs")
@click.option("--metric", type=click.STRING, default="rmse", help="Metric to optimize on.")
@click.option("--algo", type=click.STRING, default="tpe.suggest", help="Optimizer algorithm.")
@click.option("--seed", type=click.INT, default=97531, help="Seed for the random generator")
@click.argument("training_data")
def train(training_data, max_runs, epochs, metric, algo, seed):
"""
Run hyperparameter optimization.
"""
# create random file to store run ids of the training tasks
tracking_client = mlflow.tracking.MlflowClient()
def new_eval(
nepochs, experiment_id, null_train_loss, null_valid_loss, null_test_loss, return_all=False
):
"""
Create a new eval function
:param nepochs: Number of epochs to train the model.
:experiment_id: Experiment id for the training run
:valid_null_loss: Loss of a null model on the validation dataset
:test_null_loss: Loss of a null model on the test dataset.
:return_test_loss: Return both validation and test loss if set.
:return: new eval function.
"""
def eval(params):
"""
Train Keras model with given parameters by invoking MLflow run.
Notice we store runUuid and resulting metric in a file. We will later use these to pick
the best run and to log the runUuids of the child runs as an artifact. This is a
temporary workaround until MLflow offers better mechanism of linking runs together.
:param params: Parameters to the train_keras script we optimize over:
learning_rate, drop_out_1
:return: The metric value evaluated on the validation data.
"""
import mlflow.tracking
lr, momentum = params
with mlflow.start_run(nested=True) as child_run:
p = mlflow.projects.run(
uri=".",
entry_point="train",
run_id=child_run.info.run_id,
parameters={
"training_data": training_data,
"epochs": str(nepochs),
"learning_rate": str(lr),
"momentum": str(momentum),
"seed": seed,
},
experiment_id=experiment_id,
use_conda=False, # We are already in the environment
synchronous=False, # Allow the run to fail if a model is not properly created
)
succeeded = p.wait()
mlflow.log_params({"lr": lr, "momentum": momentum})
if succeeded:
training_run = tracking_client.get_run(p.run_id)
metrics = training_run.data.metrics
# cap the loss at the loss of the null model
train_loss = min(null_train_loss, metrics["train_{}".format(metric)])
valid_loss = min(null_valid_loss, metrics["val_{}".format(metric)])
test_loss = min(null_test_loss, metrics["test_{}".format(metric)])
else:
# run failed => return null loss
tracking_client.set_terminated(p.run_id, "FAILED")
train_loss = null_train_loss
valid_loss = null_valid_loss
test_loss = null_test_loss
mlflow.log_metrics(
{
"train_{}".format(metric): train_loss,
"val_{}".format(metric): valid_loss,
"test_{}".format(metric): test_loss,
}
)
if return_all:
return train_loss, valid_loss, test_loss
else:
return valid_loss
return eval
space = [
hp.uniform("lr", 1e-5, | 1e-1),
hp.uniform("momentum", 0.0, 1.0),
]
with mlflow.start_run() as run:
experiment_id = run.info.experiment_id
# Evaluate null model first.
train_null_loss, valid_null_loss, test_null_loss = new_eval(
0, experiment_ | id, _inf, _inf, _inf, True
)(params=[0, 0])
best = fmin(
fn=new_eval(epochs, experiment_id, train_null_loss, valid_null_loss, test_null_loss),
space=space,
algo=tpe.suggest if algo == "tpe.suggest" else rand.suggest,
max_evals=max_runs,
)
mlflow.set_tag("best params", str(best))
# find the best run, log its metrics as the final metrics of this run.
client = MlflowClient()
runs = client.search_runs(
[experiment_id], "tags.mlflow.parentRunId = '{run_id}' ".format(run_id=run.info.run_id)
)
best_val_train = _inf
best_val_valid = _inf
best_val_test = _inf
best_run = None
for r in runs:
if r.data.metrics["val_rmse"] < best_val_valid:
best_run = r
best_val_train = r.data.metrics["train_rmse"]
best_val_valid = r.data.metrics["val_rmse"]
best_val_test = r.data.metrics["test_rmse"]
mlflow.set_tag("best_run", best_run.info.run_id)
mlflow.log_metrics(
{
"train_{}".format(metric): best_val_train,
"val_{}".format(metric): best_val_valid,
"test_{}".format(metric): best_val_test,
}
)
if __name__ == "__main__":
train()
|
wanghongjuan/crosswalk-test-suite | apptools/apptools-android-tests/apptools/manifest_multiple_icons.py | Python | bsd-3-clause | 7,176 | 0.002508 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Liu, Yun <yunx.liu@intel.com>
import unittest
import os
import comm
from xml.etree import ElementTree
import json
class TestCrosswalkApptoolsFunctions(unittest.TestCase):
def test_icon_change_size(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDict["icons"][0]["sizes"] = "528x528"
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test") |
self.assertEquals(buildstatus, 0)
def test_icon_change_any_size(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDi | ct["icons"][0]["sizes"] = "any"
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(buildstatus, 0)
def test_icon_gif(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDict["icons"][0]["src"] = "../../../icon/icon.gif"
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(buildstatus, 0)
def test_icon_jpg(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDict["icons"][0]["src"] = "../../../icon/icon.jpg"
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(buildstatus, 0)
def test_icon_bmp(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDict["icons"][0]["src"] = "../../../icon/icon.bmp"
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(buildstatus, 0)
def test_icon_webp(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDict["icons"][0]["src"] = "../../../icon/icon.webp"
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(buildstatus, 0)
def test_non_exist_icon(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDict["icons"][0]["src"] = "icon/icon.png"
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertNotEquals(buildstatus, 0)
def test_icons_default(self):
comm.setUp()
comm.create(self)
os.chdir('org.xwalk.test')
jsonfile = open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "r")
jsons = jsonfile.read()
jsonfile.close()
jsonDict = json.loads(jsons)
jsonDict["icons"] = []
json.dump(jsonDict, open(comm.ConstPath + "/../tools/org.xwalk.test/app/manifest.json", "w"))
buildcmd = comm.HOST_PREFIX + comm.PackTools + "crosswalk-app build"
buildstatus = os.system(buildcmd)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(buildstatus, 0)
if __name__ == '__main__':
unittest.main()
|
chyla/pat-lms | web/slas-web/web/views.py | Python | mit | 912 | 0.004386 | # -*- coding: utf-8 -*-
from django.shortcuts import render_to_response, render, redirect
from django.http import HttpResponseRedirect
from django.contrib import auth
from django.contrib.auth import authenticate, login, logout
def user_login(request):
return render(request,
'user/login.html',
{})
def user_auth(request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = authenticate(username=username, password=password)
if user is not None | :
login(request, user)
return HttpResponseRedirect("/")
else:
return HttpResponseRedirect("/user/invalid_login")
def user_logout(request):
logout(request)
return HttpResponseRedirect(" | /user/login")
def user_invalid_login(request):
return render(request,
'user/invalid_login.html',
{})
|
robhudson/zamboni | apps/zadmin/tests/test_views.py | Python | bsd-3-clause | 87,057 | 0.000138 | # -*- coding: utf-8 -*-
import csv
import json
from cStringIO import StringIO
from datetime import datetime
from django.conf import settings
from django.core import mail, management
from django.core.cache import cache
import mock
from nose.plugins.attrib import attr
from nose.tools import eq_
from piston.models import Consumer
from pyquery import PyQuery as pq
import amo
import amo.tests
from amo.tests import (assert_no_validation_errors, assert_required, formset,
initial)
from access.models import Group, GroupUser
from addons.models import Addon, CompatOverride, CompatOverrideRange
from amo.urlresolvers import reverse
from amo.utils import urlparams
from applications.models import AppVersion
from bandwagon.models import FeaturedCollection, MonthlyPick
from compat.cron import compatibility_report
from compat.models import CompatReport
from devhub.models import ActivityLog
from files.models import Approval, File
from stats.models import UpdateCount
from users.models import UserProfile
from users.utils import get_task_user
from versions.models import ApplicationsVersions, Version
from zadmin import forms, tasks
from zadmin.forms import DevMailerForm
from zadmin.models import EmailPreviewTopic, ValidationJob, ValidationResult
from zadmin.views import completed_versions_dirty, find_files
no_op_validation = dict(errors=0, warnings=0, notices=0, messages=[],
compatibility_summary=dict(errors=0, warnings=0,
notices=0))
class TestSiteEvents(amo.tests.TestCase):
fixtures = ['base/users', 'zadmin/tests/siteevents']
def setUp(self):
self.client.login(username='admin@mozilla.com', password='password')
def test_get(self):
url = reverse('zadmin.site_events')
response = self.client.get(url)
eq_(response.status_code, 200)
events = response.context['events']
eq_(len(events), 1)
def test_add(self):
url = reverse('zadmin.site_events')
new_event = {
'event_type': 2,
'start': '2012-01-01',
'description': 'foo',
}
response = self.client.post(url, new_event, follow=True)
eq_(response.status_code, 200)
events = response.context['events']
eq_(len(events), 2)
def test_edit(self):
url = reverse('zadmin.site_events', args=[1])
modified_event = {
'event_type': 2,
'start': '2012- | 01-01',
'description': 'bar',
}
| response = self.client.post(url, modified_event, follow=True)
eq_(response.status_code, 200)
events = response.context['events']
eq_(events[0].description, 'bar')
def test_delete(self):
url = reverse('zadmin.site_events.delete', args=[1])
response = self.client.get(url, follow=True)
eq_(response.status_code, 200)
events = response.context['events']
eq_(len(events), 0)
class TestFlagged(amo.tests.TestCase):
fixtures = ['base/users', 'zadmin/tests/flagged']
def setUp(self):
super(TestFlagged, self).setUp()
self.client.login(username='admin@mozilla.com', password='password')
self.url = reverse('zadmin.flagged')
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_get(self):
response = self.client.get(self.url, follow=True)
addons = dict((a.id, a) for a in response.context['addons'])
eq_(len(addons), 3)
# 1. an addon should have latest version and approval attached
addon = Addon.objects.get(id=1)
eq_(addons[1], addon)
eq_(addons[1].version.id,
Version.objects.filter(addon=addon).latest().id)
eq_(addons[1].approval.id,
Approval.objects.filter(addon=addon).latest().id)
# 2. missing approval is ok
addon = Addon.objects.get(id=2)
eq_(addons[2], addon)
eq_(addons[2].version.id,
Version.objects.filter(addon=addon).latest().id)
eq_(addons[2].approval, None)
# 3. missing approval is ok
addon = Addon.objects.get(id=3)
eq_(addons[3], addon)
eq_(addons[3].approval.id,
Approval.objects.filter(addon=addon).latest().id)
eq_(addons[3].version, None)
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_post(self):
response = self.client.post(self.url, {'addon_id': ['1', '2']},
follow=True)
self.assertRedirects(response, self.url)
assert not Addon.objects.no_cache().get(id=1).admin_review
assert not Addon.objects.no_cache().get(id=2).admin_review
addons = response.context['addons']
eq_(len(addons), 1)
eq_(addons[0], Addon.objects.get(id=3))
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_empty(self):
Addon.objects.update(admin_review=False)
res = self.client.get(self.url)
eq_(set(res.context['addons']), set([]))
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_addons_only(self):
Addon.objects.get(id=2).update(type=amo.ADDON_WEBAPP)
res = self.client.get(self.url)
eq_(set([r.pk for r in res.context['addons']]),
set([1, 3]))
class BulkValidationTest(amo.tests.TestCase):
fixtures = ['base/apps', 'base/platforms', 'base/addon_3615',
'base/appversion', 'base/users']
def setUp(self):
assert self.client.login(username='admin@mozilla.com',
password='password')
self.addon = Addon.objects.get(pk=3615)
self.creator = UserProfile.objects.get(username='editor')
self.version = self.addon.get_version()
ApplicationsVersions.objects.filter(
application=1, version=self.version).update(
max=AppVersion.objects.get(application=1, version='3.7a1pre'))
self.application_version = self.version.apps.all()[0]
self.application = self.application_version.application
self.min = self.application_version.min
self.max = self.application_version.max
self.curr_max = self.appversion('3.7a1pre')
self.counter = 0
self.old_task_user = settings.TASK_USER_ID
settings.TASK_USER_ID = self.creator.id
def tearDown(self):
settings.TASK_USER_ID = self.old_task_user
def appversion(self, version, application=amo.FIREFOX.id):
return AppVersion.objects.get(application=application,
version=version)
def create_job(self, **kwargs):
kw = dict(application_id=amo.FIREFOX.id,
curr_max_version=kwargs.pop('current', self.curr_max),
target_version=kwargs.pop('target',
self.appversion('3.7a3')),
creator=self.creator)
kw.update(kwargs)
return ValidationJob.objects.create(**kw)
def create_file(self, version=None, platform_id=amo.PLATFORM_ALL.id):
if not version:
version = self.version
return File.objects.create(version=version,
filename='file-%s' % self.counter,
platform_id=platform_id,
status=amo.STATUS_PUBLIC)
def create_result(self, job, f, **kwargs):
self.counter += 1
kw = dict(file=f,
validation='{}',
errors=0,
warnings=0,
notices=0,
validation_job=job,
task_error=None,
valid=0,
completed=datetime.now())
kw.update(kwargs)
return ValidationResult.objects.create(**kw)
def start_validation(self, new_max='3.7a3'):
self.new_max = self.appversion(new_max)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_vers |
totoro72/pt1 | ep/tests/test_item_32_use_get_attr_for_lazy_attributes.py | Python | mit | 459 | 0.004357 | import unittest
from item_32_use_get_attr_for_lazy_attributes import EasyLookup
cl | ass TestGetSetAttr(unittest.TestCase):
def test_easy_lookup(self):
"""simple est to make sure it runs"""
d = {'asdf': 1, 'APPLE': 'hi'}
r = EasyLookup(d)
self.assertEqual(r.asdf, 1)
self.assertEqual(r.APPLE, 'hi')
with self.assertRaises(Attrib | uteError):
r._data # because _data is not in self._data! muahaha
|
ngannguyen/immunoseq | src/clusterBlastXml.py | Python | mit | 16,441 | 0.011313 | #!/usr/bin/env python2.6
"""
Tue Dec 4 11:54:18 PST 2012
Parse Blast XML output file and cluster sequences using greedy approach.
Input: Blast xml file
Output: Text file, each line = 1 cluster, each element of a cluster is space-separated
Algorithm summary:
Sorted sequences by descending in size
Start with the largest sequence and use that as seed
For each sequence:
Search for the closest seed that have >= %X similarity cutoff
If found such seed: add the sequence to the seed's cluster
else: the sequence becomes a seed of a new cluster
Cluster types:
1/ Seed has multiple expanded clones as matches.
1.1: The expanded clones are from the same sample with seed
1.2: The expanded clones are from at least one sample different from seed sample
2/ Seed has many | small clones | with similar motifs
Cutoffs includes: a/ minimum number of clones contribute to one motif, b/ number of samples
2.1: the clones carrying the motif are from the same sample with seed
2.2: the clones carrying the motif are from at least one different sample than seed sample
3/ Seed has no similar clones:
3.1: seed is expanded
3.2: seed is not expanded
4/ Everything else (similar to type 2 but did not pass the cutoffs, i.e seed has a small number of low-frequency hits, or too many motifs but not enough clones to support a single motif)
"""
import os, re, sys
from Bio.Blast import NCBIXML
from optparse import OptionParser
def getCloneInfo(clonestr):
#as11D;183042;size=8925
items = clonestr.lstrip('>').split(';')
sample = items[0]
size = int(items[-1].lstrip("size="))
id = items[1]
return sample, id, size
class Clone():
def __init__(self, clonestr):
sample, id, size = getCloneInfo(clonestr)
self.desc = clonestr.lstrip('>')
self.sample = sample
self.id = id
self.size = size
self.seq = ''
self.hits = {} #key = hitCloneId, val = Hit
def setSeq(self, seq):
self.seq = seq
def addHit(self, hitid, hit):
self.hits[hitid] = hit
def setFreq(self, total):
if total == 0:
raise ValueError("Error: Total sequences of sample %s is 0." %(self.sample))
else:
self.freq = 100.0*self.size/total
def __cmp__(self, other):
return cmp(self.size, other.size)
class Cluster():
def __init__(self, seed):
self.clones = [seed]
self.totalReads = seed.size
self.numClones = 1
self.seed = seed
self.motif2count = {}
def addClone(self, clone):
if clone not in self.clones:
self.totalReads += clone.size
self.numClones += 1
self.clones.append(clone)
def setType(self, type):
self.type = type
def setMotifs(self, motif2count):
self.motif2count = motif2count
def __cmp__(self, other):
return cmp(self.totalReads, other.totalReads)
def typeid2desc(id):
id2desc = { 1.1: "Multiple expanded clones from 1 sample",
1.2: "Multiple expanded clones from at least 2 samples",
2.1: "Multiple non-expanded clones carrying the same motif, from 1 sample",
2.2: "Multiple non-expanded clones carrying the same motif, from >=2 samples",
3.1: "Clone with no hit, expanded",
3.2: "Clone with no hit, non-expanded",
4: "Others"}
return id2desc[id]
def isExpanded(clone, minSize, minFreq):
if clone.size >= minSize and clone.freq >= minFreq: #expanded
return True
return False
def isSuper(motif1, motif2):
#Return True if motif1 is a superset of motif2, otherwise return False
if motif1 == motif2 or len(motif1) != len(motif2):
return False
for i, m1 in enumerate(motif1):
if m1 != '.' and m1 != motif2[i]:
return False
return True
def getClusterType(seed2cluster, options):
for seed, cluster in seed2cluster.iteritems():
seedclone = cluster.seed
if cluster.numClones == 1: #single element cluster
if isExpanded( seedclone, options.minExpSize, options.minExpFreq ):
cluster.setType(3.1)
else:
cluster.setType(3.2)
else:
numExp = 0
expSamples = []
motif2count = {}
motif2samples = {}
if isExpanded(seedclone, options.minExpSize, options.minExpFreq):
numExp += 1
expSamples.append(seedclone.sample)
for hitclone in cluster.clones:
if hitclone.desc != seed:
if isExpanded(hitclone, options.minExpSize, options.minExpFreq):
numExp += 1
if hitclone.sample not in expSamples:
expSamples.append(hitclone.sample)
if hitclone.desc not in seedclone.hits:
hit = hitclone.hits[seed]
#sys.stderr.write("Seed: %s. Hitclone: %s is not in the hits list: %s.\n" %(seed, hitclone.desc, " ".join(seedclone.hits.keys())))
else:
hit = seedclone.hits[hitclone.desc]
motif = ""
for i, q in enumerate(hit.query):
s = hit.sbjct[i]
if q == s:
motif += q
else:
motif += "."
#Search to see if any existing motif is a superset of current motif or if current motif is a super set of existing motif:
added = False
for prevmotif in motif2count.keys():
if motif == prevmotif or isSuper(prevmotif, motif):#prevmotif is a superset of current motif, update its count and don't add curr motif
motif2count[prevmotif] += 1
if hitclone.sample not in motif2samples[prevmotif]:
motif2samples[prevmotif].append(hitclone.sample)
added = True
break
if not added: #no prev motif is a super set of current motif
#check if current motif is a superset of prevmotif, add curr motif, remove previous motif
for prevmotif in motif2count.keys():
if isSuper(motif, prevmotif):
if motif not in motif2count:
motif2count[motif] = motif2count[prevmotif]
motif2samples[motif] = [ seedclone.sample ]
else:
motif2count[motif] += motif2count[prevmotif]
for sample in motif2samples[prevmotif]:
if sample not in motif2samples[motif]:
motif2samples[motif].append(sample)
del motif2count[prevmotif]
del motif2samples[prevmotif]
if motif not in motif2count:
motif2count[motif] = 1
motif2samples[motif] = [ seedclone.sample ]
else:
motif2count[motif] += 1
if hitclone.sample not in motif2samples[motif]:
motif2samples[motif].append(hitclone.sample)
if numExp >= options.minExpClones: #type 1
if len(expSamples) == 1: #only the seed clone
type = 1.1
else:
type = 1.2
else:
type = 4
for motif, count in motif2count.iteritems():
if count >= options.minMotifClones:#type 2
if len( motif2samples[motif] ) == 1:
type = 2.1
else:
type = 2.2
|
tgquintela/Mscthesis | FirmsLocations/IO/io_aggfile.py | Python | mit | 557 | 0.001795 |
"""
Module which groups all the aggregated precomputed information in order to
save computational power.
"""
import pandas as pd
fr | om FirmsLocations.Preprocess.preprocess_cols import cp2str
def read_agg(filepath):
"Read file of aggregated info."
table = pd.read_csv(filepath, sep=';')
table = cp2str(table)
return table
def read_aggregation(filepath, typevars):
## TODO
aggtable = read_agg(filepath)
aggfeatures = aggtable[typevars['feat_vars']]
agglocs = aggtable[typevars['loc_vars']]
return agglocs, aggfea | tures
|
meissnert/StarCluster-Plugins | root_6_04_14.py | Python | mit | 1,099 | 0.018198 | from starcluster.clustersetup import ClusterSetup
from starcluster.logger import log
class RootInstaller(ClusterSetup):
def run(self, nodes, master, user, user_shell, volumes):
for node in nodes:
log.info("Installing Root 6.04/14 on %s " % (node.alias))
node.ssh.execute('mkdir -p /opt/software/root')
node.ssh.execute('wget -c -P /opt/software/root https://root.cern.ch/download/root_v6.04.14.source.tar.gz')
node.ssh.execute('tar -xf /opt/software/root/root_v6.04.14.source.tar.gz -C /opt/software/root/')
node.ssh.execute('cd /opt/software/root/root-6.04.14 && ./configure')
node.ssh.execute('cd /opt/software/root/root-6.04.14 && | make')
node.ssh.execute('mkdir -p /usr/local/Modules/applications/root/;touch /usr/local/Modules/applications/root/6.04.14')
node.ssh.execute('echo "#%Module" >> /usr/local/Modules/applications/root/6.04.14')
node.ssh.execute('echo "set root /opt/software/root/root-6.04.14" >> /usr/local/Modules/applications/root/6.04.14')
node.ssh.execute('echo -e "prepen | d-path\tPATH\t\$root" >> /usr/local/Modules/applications/root/6.04.14')
|
mbouchar/xc2424scan | src/xc2424scan/ui/widgets/scanwidget.py | Python | gpl-2.0 | 27,979 | 0.004147 | # -*- coding: utf-8 -*-
# This file is part of the xc2424scan package
# Copyright (C) 2005 Mathieu Bouchard | <mbouchar@bioinfo.ulaval.ca>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY o | r FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
This is the main widget of the xc2424scan application
This widget is self contained and can be included in any other Qt4
application.
"""
__all__ = ["ScanWidget"]
from PyQt4.QtCore import QDir, QObject, QRect, Qt, SIGNAL
from PyQt4.QtGui import QWidget, QFileDialog, QListWidgetItem, QPixmap, \
QIcon, QMessageBox, QInputDialog, QLineEdit, QPainter, \
QProgressDialog, QMessageBox, QSizePolicy, QDialog, \
QLabel, QVBoxLayout, QHBoxLayout, QSpacerItem, \
QSizePolicy, QPushButton
import os
from xc2424scan import config
from xc2424scan.threadedscanlib import ThreadedXeroxC2424
from xc2424scan.scanlib import ProtectedError, SocketError, NoPreviewError
from xc2424scan.ui.widgets.scanwidgetbase import Ui_ScanWidgetBase
class ProgressFullDialog(QProgressDialog):
def __init__(self, parent = None):
QProgressDialog.__init__(self, parent)
self.setWindowTitle(_("Downloading"))
# Top level fixed size dialog
self.setWindowModality(Qt.WindowModal)
# Do not close when reaching 100%
self.setAutoClose(False)
self.setAutoReset(False)
self.__nbr_pages_ = -1
def setNbrPages(self, nbr_pages):
self.__nbr_pages_ = nbr_pages
def newpage(self, current_page, file_size):
if self.isVisible():
# Set progress value to 0 and range to file size
self.setValue(0)
self.setRange(0, file_size)
# Set label text
if self.__nbr_pages_ == 1:
self.setLabelText(_("Getting page %d") % current_page)
else:
self.setLabelText(_("Getting page %d of %d") % \
(current_page, self.__nbr_pages_))
def progress(self, received_size):
if self.isVisible():
self.setValue(self.value() + received_size)
class ProgressDialog(QDialog):
def __init__(self, parent = None):
QDialog.__init__(self, parent)
self.setWindowTitle(_("Downloading"))
# Top level fixed size dialog
self.setWindowModality(Qt.WindowModal)
self.__page_ = QLabel(self)
self.__progress_ = QLabel(self)
self.__cancel_ = QPushButton(self)
self.__downloaded_ = 0
self.__nbr_pages_ = 0
vboxlayout = QVBoxLayout(self)
# Page status
labellayout = QHBoxLayout()
labellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
labellayout.addWidget(self.__page_)
labellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
vboxlayout.addLayout(labellayout)
# Progress status
progresslayout = QHBoxLayout()
progresslayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
progresslayout.addWidget(self.__progress_)
progresslayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
vboxlayout.addLayout(progresslayout)
# Cancel button
cancellayout = QHBoxLayout()
cancellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
cancellayout.addWidget(self.__cancel_)
vboxlayout.addLayout(cancellayout)
self.__cancel_.setDefault(True)
self.__cancel_.setText("Cancel")
QObject.connect(self.__cancel_, SIGNAL("clicked()"),
self.__ui_progress_canceled_)
QObject.connect(self, SIGNAL("rejected()"),
self.__ui_progress_canceled_)
def __ui_progress_canceled_(self):
self.emit(SIGNAL("canceled()"))
def setLabelText(self, text):
self.__page_.setText(text)
def setValue(self, value):
self.__downloaded_ = value
self.progress(0)
def setNbrPages(self, nbr_pages):
self.__nbr_pages_ = nbr_pages
def newpage(self, current_page, file_size = None):
if self.isVisible():
# Set progress value to 0
self.setValue(0)
# Set label text
if self.__nbr_pages_ == 0:
# Only happens when getting a pdf file
self.__page_.setText(_("Getting file"))
elif self.__nbr_pages_ == 1:
self.__page_.setText(_("Getting page %d") % current_page)
else:
self.__page_.setText(_("Getting page %d of %d") % \
(current_page, self.__nbr_pages_))
def progress(self, received_size):
self.__downloaded_ += received_size
if self.isVisible():
size = self.__downloaded_ / 1024
if size > 1024:
size = float(size) / 1024
self.__progress_.setText("Received %.3f mb" % size)
else:
self.__progress_.setText("Received %d kb" % size)
class ProgressWrapper(QObject):
def __init__(self, parent = None):
QObject.__init__(self)
self.__progress_full_ = ProgressFullDialog(parent)
self.__progress_ = ProgressDialog(parent)
self.__current_ = None
QObject.connect(self.__progress_full_, SIGNAL("canceled()"),
self.__ui_progress_canceled_)
QObject.connect(self.__progress_, SIGNAL("canceled()"),
self.__ui_progress_canceled_)
def show(self, format, nbr_pages):
if format in ["tiff", "bmp"]:
self.__current_ = self.__progress_full_
else:
self.__current_ = self.__progress_
self.__current_.setLabelText(_("Waiting for transfer to begin"))
self.__current_.setValue(0)
self.__current_.setNbrPages(nbr_pages)
self.__current_.show()
def __ui_progress_canceled_(self):
self.emit(SIGNAL("canceled()"))
def newpage(self, current_page, file_size):
if self.__current_ is not None:
self.__current_.newpage(current_page, file_size)
def progress(self, received_size):
if self.__current_ is not None:
self.__current_.progress(received_size)
def isVisible(self):
if self.__current_ is not None:
return self.__current_.isVisible()
else:
return False
def hide(self):
if self.__current_ is not None:
self.__current_.hide()
class ScanWidget(QWidget):
"""The main scanning widget"""
def __init__(self, parent = None):
"""Create a new scanning widget
@param parent: The parent widget
@type parent: QWidget
"""
QWidget.__init__(self, parent)
self.__basewidget_ = Ui_ScanWidgetBase()
self.__basewidget_.setupUi(self)
# The threaded scanner object
self.__scanner_ = ThreadedXeroxC2424()
# List of files available on the scanner
self.__scanned_files_ = None
# Last folder visited
self.__old_folder_ = "Public"
# Progress dialog
self.__progress_ = ProgressWrapper(self)
# UI: Buttons
QObject.connect(self.__basewidget_.refresh, SIGNAL("clicked()"),
self |
openNSS/enigma2 | RecordTimer.py | Python | gpl-2.0 | 54,247 | 0.028739 | import os
from enigma import eEPGCache, getBestPlayableServiceReference, eStreamServer, eServiceReference, iRecordableService, quitMainloop, eActionMap, setPreferredTuner
from Components.config import config
from Components.UsageConfig import defaultMoviePath
from Components.SystemInfo import SystemInfo
from Components.TimerSanityCheck import TimerSanityCheck
from Screens.MessageBox import MessageBox
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
import Screens.InfoBar
import Components.ParentalControl
from Tools import Directories, Notifications, ASCIItranslit, Trashcan
from Tools.XMLTools import stringToXML
from Tools.Alternatives import ResolveCiAlternative
from Tools.CIHelper import cihelper
import timer
import xml.etree.cElementTree
import NavigationInstance
from ServiceReference import ServiceReference, isPlayableForCur
from time import localtime, strftime, ctime, time
from bisect import insort
from sys import maxint
# ok, for descriptions etc we have:
# service reference (to get the service name)
# name (title)
# description (description)
# event data (ONLY for time adjustments etc.)
# parses an event, and gives out a (begin, end, name, duration, eit)-tuple.
# begin and end will be corrected
def parseEvent(ev, description = True):
if description:
name = ev.getEventName()
description = ev.getShortDescription()
if description == "":
description = ev.getExtendedDescription()
else:
name = ""
description = ""
begin = ev.getBeginTime()
end = begin + ev.getDuration()
eit = ev.getEventId()
begin -= config.recording.margin_before.value * 60
end += config.recording.margin_after.value * 60
return (begin, end, name, description, eit)
class AFTEREVENT:
NONE = 0
STANDBY = 1
DEEPSTANDBY = 2
AUTO = 3
def findSafeRecordPath(dirname):
if not dirname:
return None
from Components import Harddisk
dirname = os.path.realpath(dirname)
mountpoint = Harddisk.findMountPoint(dirname)
if mountpoint in ('/', '/media'):
print '[RecordTimer] media is not mounted:', dirname
return None
if not os.path.isdir(dirname):
try:
os.makedirs(dirname)
except Exception, ex:
print '[RecordTimer] Failed to create dir "%s":' % dirname, ex
return None
return dirname
def checkForRecordings():
if NavigationInstance.instance.getRecordings():
return True
rec_time = NavigationInstance.instance.RecordTimer.getNextTimerTime(isWakeup=True)
return rec_time > 0 and (rec_time - time()) < 360
def createRecordTimerEntry(timer):
return RecordTimerEntry(timer.service_ref, timer.begin, timer.end, timer.name, timer.description,\
timer.eit, timer.disabled, timer.justplay, timer.afterEvent, dirname = timer.dirname,\
tags = timer.tags, descramble = timer.descramble, record_ecm = timer.record_ecm, always_zap = timer.always_zap,\
zap_wakeup = timer.zap_wakeup, rename_repeat = timer.rename_repeat, conflict_detection = timer.conflict_detection,\
pipzap = timer.pipzap)
# please do not translate log messages
class RecordTimerEntry(timer.TimerEntry, object):
######### the following static methods and members are only in use when the box is in (soft) standby
wasInStandby = False
wasInDeepStandby = False
receiveRecordEvents = False
@staticmethod
def keypress(key=None, flag=1):
if flag and (RecordTimerEntry.wasInStandby or RecordTimerEntry.wasInDeepStandby):
RecordTimerEntry.wasInStandby = False
RecordTimerEntry.wasInDeepStandby = False
eActionMap.getInstance().unbindAction('', RecordTimerEntry.keypress)
@staticmethod
def setWasInDeepStandby():
RecordTimerEntry.wasInDeepStandby = Tru | e
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerE | ntry.keypress)
@staticmethod
def setWasInStandby():
if not RecordTimerEntry.wasInStandby:
if not RecordTimerEntry.wasInDeepStandby:
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress)
RecordTimerEntry.wasInDeepStandby = False
RecordTimerEntry.wasInStandby = True
@staticmethod
def shutdown():
quitMainloop(1)
@staticmethod
def staticGotRecordEvent(recservice, event):
if event == iRecordableService.evEnd:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evEnd)"
if not checkForRecordings():
print "No recordings busy of sceduled within 6 minutes so shutdown"
RecordTimerEntry.shutdown() # immediate shutdown
elif event == iRecordableService.evStart:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evStart)"
@staticmethod
def stopTryQuitMainloop():
print "RecordTimer.stopTryQuitMainloop"
NavigationInstance.instance.record_event.remove(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = False
@staticmethod
def TryQuitMainloop():
if not RecordTimerEntry.receiveRecordEvents and Screens.Standby.inStandby:
print "RecordTimer.TryQuitMainloop"
NavigationInstance.instance.record_event.append(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = True
# send fake event.. to check if another recordings are running or
# other timers start in a few seconds
RecordTimerEntry.staticGotRecordEvent(None, iRecordableService.evEnd)
#################################################################
def __init__(self, serviceref, begin, end, name, description, eit, disabled = False, justplay = False, afterEvent = AFTEREVENT.AUTO, checkOldTimers = False, dirname = None, tags = None, descramble = True, record_ecm = False, always_zap = False, zap_wakeup = "always", rename_repeat = True, conflict_detection = True, pipzap = False):
timer.TimerEntry.__init__(self, int(begin), int(end))
if checkOldTimers:
if self.begin < time() - 1209600:
self.begin = int(time())
if self.end < self.begin:
self.end = self.begin
assert isinstance(serviceref, ServiceReference)
if serviceref and serviceref.isRecordable():
self.service_ref = serviceref
else:
self.service_ref = ServiceReference(None)
self.eit = eit
self.dontSave = False
self.name = name
self.description = description
self.disabled = disabled
self.timer = None
self.__record_service = None
self.rec_ref = None
self.start_prepare = 0
self.justplay = justplay
self.always_zap = always_zap
self.zap_wakeup = zap_wakeup
self.pipzap = pipzap
self.afterEvent = afterEvent
self.dirname = dirname
self.dirnameHadToFallback = False
self.autoincrease = False
self.autoincreasetime = 3600 * 24 # 1 day
self.tags = tags or []
self.descramble = descramble
self.record_ecm = record_ecm
self.rename_repeat = rename_repeat
self.conflict_detection = conflict_detection
self.external = self.external_prev = False
self.setAdvancedPriorityFrontend = None
if SystemInfo["DVB-T_priority_tuner_available"] or SystemInfo["DVB-C_priority_tuner_available"] or SystemInfo["DVB-S_priority_tuner_available"] or SystemInfo["ATSC_priority_tuner_available"]:
rec_ref = self.service_ref and self.service_ref.ref
str_service = rec_ref and rec_ref.toString()
if str_service and '%3a//' not in str_service and not str_service.rsplit(":", 1)[1].startswith("/"):
type_service = rec_ref.getUnsignedData(4) >> 16
if type_service == 0xEEEE:
if SystemInfo["DVB-T_priority_tuner_available"] and config.usage.recording_frontend_priority_dvbt.value != "-2":
if config.usage.recording_frontend_priority_dvbt.value != config.usage.frontend_priority.value:
self.setAdvancedPriorityFrontend = config.usage.recording_frontend_priority_dvbt.value
if SystemInfo["ATSC_priority_tuner_available"] and config.usage.recording_frontend_priority_atsc.value != "-2":
if config.usage.recording_frontend_priority_atsc.value != config.usage.frontend_priority.value:
self.setAdvancedPriorityFrontend = config.usage.recording_frontend_priority_atsc.value
elif type_service == 0xFFFF:
if SystemInfo["DVB-C_priority_tuner_available"] and config.usage.recording_frontend_priority_dvbc.value != "-2":
if config.usage.recording_frontend_priority_dvbc.value != config.usage.frontend_priority.value:
self.setAdvancedPriorityFrontend = config.usage.recording_fronte |
sgraham/nope | tools/telemetry/telemetry/core/backends/chrome/cros_unittest.py | Python | bsd-3-clause | 4,879 | 0.009633 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry import decorators
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.core.backends.chrome import cros_test_case
class CrOSCryptohomeTest(cros_test_case.CrOSTestCase):
@decorators.Enabled('chromeos')
def testCryptohome(self):
"""Verifies cryptohome mount status for regular and guest user and when
logged out"""
with self._CreateBrowser() as b:
self.assertEquals(1, len(b.tabs))
self.assertTrue(b.tabs[0].url)
self.assertTrue(self._IsCryptohomeMounted())
# TODO(achuith): Remove dependency on /home/chronos/user.
chronos_fs = self._cri.FilesystemMountedAt('/home/chronos/user')
self.assertTrue(chronos_fs)
if self._is_guest:
self.assertEquals(chronos_fs, 'guestfs')
else:
crypto_fs = self._cri.FilesystemMountedAt(
self._cri.CryptohomePath(self._username))
self.assertEquals(crypto_fs, chronos_fs)
self.assertFalse(self._IsCryptohomeMounted())
self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'),
'/dev/mapper/encstateful')
class CrOSLoginTest(cros_test_case.CrOSTestCase):
@decorators.Enabled('chromeos')
def testLoginStatus(self):
"""Tests autotestPrivate.loginStatus"""
if self._is_guest:
return
with self._CreateBrowser(autotest_ext=True) as b:
login_status = self._GetLoginStatus(b)
self.assertEquals(type(login_status), dict)
self.assertEquals(not self._is_guest, login_status['isRegular | User'])
self.assertEquals(self._is_guest, login_status['isGuest'])
self.asser | tEquals(login_status['email'], self._username)
self.assertFalse(login_status['isScreenLocked'])
@decorators.Enabled('chromeos')
def testLogout(self):
"""Tests autotestPrivate.logout"""
if self._is_guest:
return
with self._CreateBrowser(autotest_ext=True) as b:
extension = self._GetAutotestExtension(b)
try:
extension.ExecuteJavaScript('chrome.autotestPrivate.logout();')
except exceptions.Error:
pass
util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)
@decorators.Enabled('chromeos')
def testGaiaLogin(self):
"""Tests gaia login. Credentials are expected to be found in a
credentials.txt file, with a single line of format username:password."""
if self._is_guest:
return
(username, password) = self._Credentials('credentials.txt')
if username and password:
with self._CreateBrowser(gaia_login=True,
username=username,
password=password):
self.assertTrue(util.WaitFor(self._IsCryptohomeMounted, 10))
class CrOSScreenLockerTest(cros_test_case.CrOSTestCase):
def _IsScreenLocked(self, browser):
return self._GetLoginStatus(browser)['isScreenLocked']
def _LockScreen(self, browser):
self.assertFalse(self._IsScreenLocked(browser))
extension = self._GetAutotestExtension(browser)
self.assertTrue(extension.EvaluateJavaScript(
"typeof chrome.autotestPrivate.lockScreen == 'function'"))
logging.info('Locking screen')
extension.ExecuteJavaScript('chrome.autotestPrivate.lockScreen();')
logging.info('Waiting for the lock screen')
def ScreenLocked():
return (browser.oobe_exists and
browser.oobe.EvaluateJavaScript("typeof Oobe == 'function'") and
browser.oobe.EvaluateJavaScript(
"typeof Oobe.authenticateForTesting == 'function'"))
util.WaitFor(ScreenLocked, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _AttemptUnlockBadPassword(self, browser):
logging.info('Trying a bad password')
def ErrorBubbleVisible():
return not browser.oobe.EvaluateJavaScript('''
document.getElementById('bubble').hidden
''')
self.assertFalse(ErrorBubbleVisible())
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', 'bad');
''' % self._username)
util.WaitFor(ErrorBubbleVisible, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _UnlockScreen(self, browser):
logging.info('Unlocking')
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', '%s');
''' % (self._username, self._password))
util.WaitFor(lambda: not browser.oobe_exists, 10)
self.assertFalse(self._IsScreenLocked(browser))
@decorators.Disabled
def testScreenLock(self):
"""Tests autotestPrivate.screenLock"""
if self._is_guest:
return
with self._CreateBrowser(autotest_ext=True) as browser:
self._LockScreen(browser)
self._AttemptUnlockBadPassword(browser)
self._UnlockScreen(browser)
|
leleopard/pyXPArduino | gui/alert_dialog.py | Python | gpl-3.0 | 2,716 | 0.002946 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'alert_dialog.ui'
#
# Created by: PyQt5 UI code generator 5.10
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(458, 128)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.icon_label = QtWidgets.QLabel(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.icon_label.sizePolicy().hasHeightForWidth())
self.icon_label.setSizePolicy(sizePolicy)
self.icon_label.setText("")
self.icon_label.setPixmap(QtGui.QPixmap(":/newPrefix/error_icon.png"))
self.icon_label.setObjectName("icon_label")
self.horizontalLayout.addWidget(self.icon_label)
self.msg_label = QtWidgets.QLabel(Dialog)
self.msg_label.setWordWrap(True)
self.msg_label.setObjectName("msg_label")
self.horizontalLayout.addWidget(self.msg_label)
self.verticalLayout.addLayout(self.horizontalLayout)
self.exception_label = QtWidgets.QLabel(Dialog)
font = QtGui.QFont()
font.setPointSize(9)
self.exception_label.setFont(font)
self.exception_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.exception_label.setWordWrap(True)
self.exception_label.setObjectName("exception_label")
self.verticalLayout.addWidget(self.exception_label)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Close)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectS | lotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.msg_label.setText(_translate("Dialog", "T | extLabel"))
self.exception_label.setText(_translate("Dialog", "TextLabel"))
import resources_rc
|
gangadharkadam/shfr | frappe/website/doctype/blog_post/blog_post.py | Python | mit | 3,543 | 0.024273 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, re
from frappe.website.website_generator import WebsiteGenerator
from frappe.website.render import clear_cache
from frappe.utils import today, cint, global_date_format, get_fullname
from frappe.website.utils import find_first_image, get_comment_list
from frappe.templates.pages.blog import get_children
class BlogPost(WebsiteGenerator):
condition_field = "published"
template = "templates/generators/blog_post.html"
save_versions = True
order_by = "published_on desc"
parent_website_route_field = "blog_category"
page_title_field = "title"
def validate(self):
super(BlogPost, self).validate()
if not self.blog_intro:
self.blog_intro = self.content[:140]
self.blog_intro = re.sub("\<[^>]*\>", "", self.blog_intro)
if self.blog_intro:
self.blog_intro = self.blog_intro[:140]
if self.published and not self.published_on:
self.published_on = today()
# update posts
frappe.db.sql("""update tabBlogger set posts=(select count(*) from `tabBlog Post`
where ifnull(blogger,'')=tabBlogger.name)
where name=%s""", (self.blogger,))
def on_update(self):
WebsiteGenerator.on_update(self)
clear_cache("writers")
def get_context(self, context):
# this is for double precaution. usually it wont reach this code if not published
if not cint(self.published):
raise Exception, "This blog has not been published yet!"
# temp fields
context.full_name = get_fullname(self.owner)
context.updated = global_date_format(self.published_on)
if self.blogger:
context.blogger_info = frappe.get_doc("Blogger", self.blogger).as_dict()
context.description = self.blog_intro or self.content[:140]
context.metatags = {
"name": self.title,
"description": context.description,
}
image = find_first_image(self.content)
if image:
context.metatags["image"] = image
context.categories = frappe.db.sql_list("""select name from
`tabBlog Category` order by name""")
context.comment_list = get_comment_list(self.doctype, self.name)
context.children = get_children()
return context
def clear_blog_cache():
for blog in frappe.db.sql_list("""select page_name from
`tabBlog Post` where ifnull(published,0)=1"""):
clear_cache(blog)
clear_cache("writers")
@frappe.whitelist(allow_guest=True)
def get_blog_list(start=0, by=None, category=None):
condition = ""
if by:
condition = " and t1.blogger='%s'" % by.replace("'", "\'")
| if category:
condition += " and t1.blog_category='%s' | " % category.replace("'", "\'")
query = """\
select
t1.title, t1.name,
concat(t1.parent_website_route, "/", t1.page_name) as page_name,
t1.published_on as creation,
day(t1.published_on) as day, monthname(t1.published_on) as month,
year(t1.published_on) as year,
ifnull(t1.blog_intro, t1.content) as content,
t2.full_name, t2.avatar, t1.blogger,
(select count(name) from `tabComment` where
comment_doctype='Blog Post' and comment_docname=t1.name) as comments
from `tabBlog Post` t1, `tabBlogger` t2
where ifnull(t1.published,0)=1
and t1.blogger = t2.name
%(condition)s
order by published_on desc, name asc
limit %(start)s, 20""" % {"start": start, "condition": condition}
result = frappe.db.sql(query, as_dict=1)
# strip html tags from content
for res in result:
res['published'] = global_date_format(res['creation'])
res['content'] = res['content'][:140]
return result
|
byrnereese/uphold-sdk-python | uphold/uphold.py | Python | mit | 15,514 | 0.00419 | """
Uphold Python SDK
This is a python module to ease integration between python apps and the Uphold API.
Repo: http://github.com/byrnereese/uphold-python-sdk
TODO
* Create custom exceptions for common errors
* Add support for updating records
* Turn off authentication/authorization for public transactions (or make that optional)
* Transmit a User-Agent field
METHODS TO ADD SUPPORT FOR
url = 'https://api.uphold.com/v1/me/transactions'
url = 'https://api.uphold.com/v1/me/cards/2b2eb351-b1cc-48f7-a3d0-cb4f1721f3a3'
url = 'https://api.uphold.com/v1/me/cards/2b2eb351-b1cc-48f7-a3d0-cb4f1721f3a3/transactions'
url = 'https://api.uphold.com/v1/reserve/transactions/a97bb994-6e24-4a89-b653-e0a6d0bcf634'
"""
from __future__ import print_function, unicode_literals
import urllib3
import requests
import json
import ssl
from .version import __version__
class VerificationRequired(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class RateLimitError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class NotSupportedInProduction(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Uphold(object):
"""
Use this SDK to simplify interaction with the Uphold API
"""
def __init__(self, sandbox=False):
if sandbox:
self.host = 'api-sandbox.uphold.com'
else:
self.host = 'api.uphold.com'
self.in_sandbox = sandbox
self.debug = False
self.version = 0
self.session = requests.Session()
self.headers = {
'Content-type': 'application/x-www-form-urlencoded',
'User-Agent': 'uphold-python-sdk/' + __version__
}
self.pat = None
self.otp = None
def _debug(self, s):
if self.debug:
print(s)
def verification_code(self, code):
self.otp = code
def auth_basic(self, username, password):
"""
Authenticates against the Uphold backend using a username and password. Uphold
return an User Auth Token, which is persisted for the life of the session.
:param String username An Uphold username or email address.
:param String password The password corresponding to the specified username.
"""
self.username = username
self.password = password
self.pat = None
def auth_pat(self, pat):
"""
Sets the authentication method to PAT, or "Personal Access Token." Before calling this
method, a PAT needs to be created using the create_path() method.
:param String pat The personal access token
"""
self.username = None
self.password = None
self.pat = pat
def create_pat(self, desc):
"""
Creates a personal access token.
:param String desc A description for the token
:rtype:
A string representing the Personal Access Token
"""
params = {
'description': desc
}
self.headers['Content-Type'] = 'application/json'
data = self._post('/me/tokens', params)
return data.get('accessToken')
def get_pats(self):
"""
Returns a list of personal access tokens.
:rtype:
A list of personal access tokens
"""
self.headers['Content-Type'] = 'application/json'
data = self._get('/me/tokens')
return data
def get_me(self):
"""
Returns a hash containing a comprehensive summary of the current user in content. The data
returned contains profile data, a list of the users cards, recent transactions and more.
:rtype:
A hash containing all user's properties.
"""
return self._get('/me')
def get_contacts(self):
"""
Returns all of the contacts associated with the current users.
:rtype:
An array of hashes containing all the contacts of the current user's properties.
"""
return self._get('/me/contacts')
def get_contact(self, contact):
"""
Returns the contact associated with the contact id.
:rtype:
An hash containing the contact requested.
"""
return self._get('/me/contacts/{}'.format(contact))
def create_contact(self, first_name, last_name, company, emails=[], bitcoin_addresses=[]):
fields = {
'firstName': first_name,
'lastName': last_name,
'company': company,
'emails': emails,
'addresses': bitcoin_addresses
}
return self._post('/me/contacts', fields)
def get_cards(self):
"""
Returns all of the cards associated with the current users.
:rtype:
An array of hashes containing all the cards of the current user.
"""
return self._get('/me/cards')
def get_card(self, c):
"""
Return the details of a single card belonging to the current user.
:param String card_id The card ID of the card you wish to retrieve.
:rtype:
An array of hashes containing all the cards of the current user.
"""
return self._get('/me/cards/' + c)
def get_card_transactions(self, card):
"""
Requests a list of transactions associated with a specific card.
:rtype:
An array of hashes containing all the card transactions.
"""
return self._get('/me/cards/{}/transactions'.format(card))
def get_phones(self):
"""
Returns all of the phone numbers associated with the current user.
:rtype:
An array of hashes containing all the phone numbers of the current user.
"""
return self._get('/me/phones')
def get_reserve_statistics(self):
return self._get('/reserve/statistics')
def get_reserve_ledger(self):
"""
Returns all the rows belowing to the ledger. Each row documents a change in
the reserve's assets or its liabilities.
:rtype:
An array of ledger entries.
"""
return self._get('/reserve/ledger')
def get_reserve_chain(self):
"""
Returns the entire Reservechain consisting of all of the transactions conducted
by its members. These transactions are 100% anonymous.
:rtype:
An array of transactions.
"""
return self._get('/reserve/transactions')
def get_reserve_transaction(self, transaction):
"""
Returns a public transaction from the Reservechain. These transactions are 100% anonymous.
:rtype:
An array with the transaction.
"""
return self._get('/reserve/transactions/{}'.forma | t(transaction))
def get_transactions(self):
"""
Requests a list of transactions associated | with the current user.
:rtype:
An array of hashes containing all the current user's transactions.
"""
return self._get('/me/transactions')
def prepare_txn(self, card, to, amount, denom):
"""
Developers can optionally prepare a transaction in order to preview a transaction
prior to it being executed. A prepared transaction has a TTL (time-to-live) of 30
seconds. Within that time, the transaction can be executed at a guaranteed price.
:param String card_id The card ID from which to draw funds.
:param String to The recipient of the funds. Can be in the form of a bitcoin
address, an email address, or an Uphold username.
:param Float/Decimal amount The amount to send.
:param String denom The denomination to send. Permissible values are USD, GBP,
CNY, JPY, EUR, and BTC.
:rtype:
A transaction object.
"""
fields = {
'denomination[currency]': denom,
'denomination[amount]' |
carlospalol/money | money/tests/test_docs.py | Python | mit | 424 | 0.004717 | # -*- coding: utf-8 -*-
"""
Money doctests as unittest Suite
"""
# RADAR: Python2
from __future__ import absolute_import
import doctest
import unittest
# RADAR: Python2
import money.six
FILES | = (
'../../README.rst',
)
def load_tests(loader, tests, pattern):
# RADAR Python 2.x
if money.six.PY2:
# Doc tests are Python 3.x
return | unittest.TestSuite()
return doctest.DocFileSuite(*FILES)
|
dhh1128/lect | docs/source/conf.py | Python | apache-2.0 | 9,538 | 0.007968 | # -*- coding: utf-8 -*-
#
# lect documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 2 17:40:25 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all pos | sible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commen | ted out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Lect'
copyright = u'(for docs) 2013, Daniel Hardman'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'lectdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'lect.tex', u'lect Documentation',
u'Daniel Hardman', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'lect', u'lect Documentation',
[u'Daniel Hardman'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'lect', u'lect Documentation',
u'Daniel Hardman', 'lect', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'lect'
epub_author = u'Daniel Hardman'
epub_publisher = u'Daniel Hardman'
epub_copyright = u'2013, Dani |
cppisfun/GameEngine | foreign/boost/tools/build/v2/test/conditionals3.py | Python | gpl-3.0 | 713 | 0.001403 | #!/usr/bin/python
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Test that conditional properties work, even if property is free, and | value
# includes a colon.
import BoostBuild
t = BoostBuild.Tester()
t.write("jamroot.jam", """
exe hello : hello.cpp : <variant>debug:<define>CLASS=Foo::Bar ;
""")
t.write("hello.cpp", """
namespace Foo { class Bar { } ; }
int main()
{
CLASS c;
c; // Disables the unused variable warning.
}
""")
t.run_build_system(stdout=None, stderr=None)
t.exp | ect_addition("bin/$toolset/debug/hello.exe")
t.cleanup()
|
Zen-CODE/kivy | kivy/adapters/dictadapter.py | Python | mit | 5,874 | 0.00017 | '''
DictAdapter
===========
.. versionadded:: 1.5
.. warning::
This code is still experimental, and its API is subject to change in a
future version.
A :class:`~kivy.adapters.dictadapter.DictAdapter` is an adapter around a
python dictionary of records. It extends the list-like capabilities of the
:class:`~kivy.adapters.listadapter.ListAdapter`.
If you wish to have a bare-bones list adapter, without selection, use the
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter`.
'''
__all__ = ('DictAdapter', )
from kivy.properties import ListProperty, DictProperty
from kivy.adapters.listadapter import ListAdapter
class DictAdapter(ListAdapter):
'''A :class:`~kivy.adapters.dictadapter.DictAdapter` is an adapter around a
python dictionary of records. It extends the list-like capabilities of
the :class:`~kivy.adapters.listadapter.ListAdapter`.
'''
sorted_keys = ListProperty([])
'''The sorted_keys list property contains a list of hashable objects (can
be strings) that will be used directly if no args_converter function is
provided. If there is an args_converter, the record received from a
lookup of the data, using keys from sorted_keys, will be passed
to it for instantiation of list item view class instances.
:attr:`sorted_keys` is a :class:`~kivy.properties.ListProperty` and
defaults to [].
'''
data = DictProperty(None)
'''A dict that indexes records by keys that are equivalent to the keys in
sorted_keys, or they are a superset of the keys in sorted_keys.
The values can be strings, class instances, dicts, etc.
:attr:`data` is a :class:`~kivy.properties.DictProperty` and defaults
to None.
'''
def __init__(self, **kwargs):
if 'sorted_keys' in kwargs:
if type(kwargs['sorted_keys']) not in (tuple, list):
msg = 'DictAdapter: sorted_keys must be tuple or list'
raise Exception(msg)
else:
self.sorted_keys = sorted(kwargs['data'].keys())
super(DictAdapter, self).__init__(**kwargs)
self.bind(sorted_keys=self.initialize_sorted_keys)
def bind_triggers_to_view(self, func):
self.bind(sorted_keys=func)
self.bind(data=func)
# self.data is paramount to self.sorted_keys. If sorted_keys is reset to
# mismatch data, force a reset of sorted_keys to data.keys(). So, in order
# to do a complete reset of da | ta and sorted_keys, data must be reset
# first, followed by a reset of sorted_keys, if needed.
def initialize_sorted_keys(self, *args):
stale_sorted_keys = False
for key in self.sorted_keys:
if not key in self.data:
| stale_sorted_keys = True
break
else:
if len(self.sorted_keys) != len(self.data):
stale_sorted_keys = True
if stale_sorted_keys:
self.sorted_keys = sorted(self.data.keys())
self.delete_cache()
self.initialize_selection()
# Override ListAdapter.update_for_new_data().
def update_for_new_data(self, *args):
self.initialize_sorted_keys()
# Note: this is not len(self.data).
def get_count(self):
return len(self.sorted_keys)
def get_data_item(self, index):
if index < 0 or index >= len(self.sorted_keys):
return None
return self.data[self.sorted_keys[index]]
# [TODO] Also make methods for scroll_to_sel_start, scroll_to_sel_end,
# scroll_to_sel_middle.
def trim_left_of_sel(self, *args):
'''Cut list items with indices in sorted_keys that are less than the
index of the first selected item, if there is a selection.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
first_sel_index = self.sorted_keys.index(selected_keys[0])
desired_keys = self.sorted_keys[first_sel_index:]
self.data = dict([(key, self.data[key]) for key in desired_keys])
def trim_right_of_sel(self, *args):
'''Cut list items with indices in sorted_keys that are greater than
the index of the last selected item, if there is a selection.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
last_sel_index = self.sorted_keys.index(selected_keys[-1])
desired_keys = self.sorted_keys[:last_sel_index + 1]
self.data = dict([(key, self.data[key]) for key in desired_keys])
def trim_to_sel(self, *args):
'''Cut list items with indices in sorted_keys that are les than or
greater than the index of the last selected item, if there is a
selection. This preserves intervening list items within the selected
range.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
first_sel_index = self.sorted_keys.index(selected_keys[0])
last_sel_index = self.sorted_keys.index(selected_keys[-1])
desired_keys = self.sorted_keys[first_sel_index:last_sel_index + 1]
self.data = dict([(key, self.data[key]) for key in desired_keys])
def cut_to_sel(self, *args):
'''Same as trim_to_sel, but intervening list items within the selected
range are also cut, leaving only list items that are selected.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
self.data = dict([(key, self.data[key]) for key in selected_keys])
|
thusser/rtml-parse | rtmlparse/__init__.py | Python | mit | 227 | 0.004405 | __desc | ription__ = "MASTER event parser and VOEvent publisher"
__url__ = "http://rtml.saao.ac.za/"
__author__ = | "Tim-Oliver Husser"
__contact__ = "husser@astro.physik.uni-goettingen.de"
__version__ = "0.1"
from rtml import RTML |
mvidalgarcia/indico | indico/modules/events/tracks/operations.py | Python | mit | 2,614 | 0.001913 | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from flask import session
from indico.core.db import db
from indico.modules.events.logs import EventLogKind, EventLogRealm
from indico.modules.events.tracks import logger
from indico.modules.events.tracks.models.groups import TrackGroup
from indico.modules.events.tracks.models.tracks import Track
from indico.modules.events.tracks.settings import track_settings
def crea | te_track(event, data):
track = Track(event=event)
track.populate_from_dict(data)
db.session.flush()
logger.info('Track %r created by %r', track, session.user)
event.log(EventLogRealm.management, EventLogKind.positive, 'Tracks',
'Track "{}" has been created.'.format(track.title), session.user)
return track
| def update_track(track, data):
track.populate_from_dict(data)
db.session.flush()
logger.info('Track %r modified by %r', track, session.user)
track.event.log(EventLogRealm.management, EventLogKind.change, 'Tracks',
'Track "{}" has been modified.'.format(track.title), session.user)
def delete_track(track):
db.session.delete(track)
logger.info('Track deleted by %r: %r', session.user, track)
def update_program(event, data):
track_settings.set_multi(event, data)
logger.info('Program of %r updated by %r', event, session.user)
event.log(EventLogRealm.management, EventLogKind.change, 'Tracks', 'The program has been updated', session.user)
def create_track_group(event, data):
track_group = TrackGroup()
track_group.event = event
track_group.populate_from_dict(data)
db.session.flush()
logger.info('Track group %r created by %r', track_group, session.user)
event.log(EventLogRealm.management, EventLogKind.positive, 'Track Groups',
'Track group "{}" has been created.'.format(track_group.title), session.user)
def update_track_group(track_group, data):
track_group.populate_from_dict(data)
db.session.flush()
logger.info('Track group %r updated by %r', track_group, session.user)
track_group.event.log(EventLogRealm.management, EventLogKind.positive, 'Track Groups',
'Track group "{}" has been updated.'.format(track_group.title), session.user)
def delete_track_group(track_group):
db.session.delete(track_group)
logger.info('Track group deleted by %r: %r', session.user, track_group)
|
sonntagsgesicht/regtest | .aux/venv/lib/python3.9/site-packages/pygments/lexers/teal.py | Python | apache-2.0 | 3,519 | 0.001137 | # -*- coding: utf-8 -*-
"""
pygments.lexers.teal
~~~~~~~~~~~~~~~~~~~~
Lexer for TEAL.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, bygroups, include, words
from pygments.token import Comment, Name, Number, String, Text, Keyword
__all__ = ['TealLexer']
class TealLexer(RegexLexer):
"""
For the `Transaction Execution Approval Language (TEAL)
<https://developer.algorand.org/docs/reference/teal/specification/>`
For more information about the grammar, see:
https://github.com/algorand/go-algorand/blob/master/data/transactions/logic/assembler.go
.. versionadded:: 2.9
"""
name = 'teal'
aliases = ['teal']
filenames = ['*.teal']
keywords = words({
'Sender', 'Fee', 'FirstValid', 'FirstValidTime', 'LastValid', 'Note',
'Lease', 'Receiver', 'Amount', 'CloseRemainderTo', 'VotePK',
'SelectionPK', 'VoteFirst', 'VoteLast', 'VoteKeyDilution', 'Type',
'TypeEnum', 'XferAsset', 'AssetAmount', 'AssetSender', 'AssetReceiver',
'AssetCloseTo', 'GroupIndex', 'TxID', 'ApplicationID', 'OnCompletion',
'ApplicationArgs', 'NumAppArgs', 'Accounts', 'NumAccounts',
'ApprovalProgram', 'ClearStateProgram', 'RekeyTo', 'ConfigAsset',
'ConfigAssetTotal', 'ConfigAssetDecimals', 'ConfigAssetDefaultFrozen',
'ConfigAssetUnitName', 'ConfigAssetName', 'ConfigAssetURL',
'ConfigAssetMetadataHash', 'ConfigAssetManager', 'ConfigAssetReserve',
'ConfigAssetFreeze', 'ConfigAssetClawback', 'FreezeAsset',
'FreezeAssetAccount', 'FreezeAssetFrozen',
'NoOp', 'OptIn', 'CloseOut', 'ClearState', 'UpdateApplication',
'DeleteApplication',
'MinTxnFee', 'MinBalance', 'MaxTxnLife', 'ZeroAddress', 'GroupSize',
'LogicSigVersion', 'Round', 'LatestTimestamp', 'CurrentApplicationID',
'AssetBalance', 'AssetFrozen',
'AssetTotal', 'AssetDecimals', 'AssetDefaultFrozen', 'AssetUnitName',
'AssetName', 'AssetURL', 'AssetMetadataHash', 'AssetManager',
'AssetReserve', 'AssetFreeze', 'AssetClawback',
| }, suffix = r'\b')
identifier = r'[^ \t\n]+(?=\/\/)|[^ \t\n | ]+'
newline = r'\r?\n'
tokens = {
'root': [
include('whitespace'),
# pragmas match specifically on the space character
(r'^#pragma .*' + newline, Comment.Directive),
# labels must be followed by a space,
# but anything after that is ignored
('(' + identifier + ':' + ')' + '([ \t].*)',
bygroups(Name.Label, Comment.Single)),
(identifier, Name.Function, 'function-args'),
],
'function-args': [
include('whitespace'),
(r'"', String, 'string'),
(r'(b(?:ase)?(?:32|64) ?)(\(?[a-zA-Z0-9+/=]+\)?)',
bygroups(String.Affix, String.Other)),
(r'[A-Z2-7]{58}', Number), # address
(r'0x[\da-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
(keywords, Keyword),
(identifier, Name.Attributes), # branch targets
(newline, Text, '#pop'),
],
'string': [
(r'\\(?:["nrt\\]|x\d\d)', String.Escape),
(r'[^\\\"\n]+', String),
(r'"', String, '#pop'),
],
'whitespace': [
(r'[ \t]+', Text),
(r'//[^\n]+', Comment.Single),
],
}
|
bndr/pycycle | tests/_projects/large_without_circle/a_module/b_module/b_file.py | Python | mit | 58 | 0.034483 | from c_module.c_file | im | port c_func
def b_func():
pass |
xsunfeng/cir | cir/phase5.py | Python | mit | 3,009 | 0.02559 | import json
from django.template.loader import render_to_string
from django.http import HttpResponse
from django.utils impor | t timezone
from django.shortcuts import render_to_re | sponse
from cir.models import *
import claim_views
from cir.phase_control import PHASE_CONTROL
import utils
def get_statement_comment_list(request):
response = {}
context = {}
forum = Forum.objects.get(id = request.session['forum_id'])
thread_comments = ForumComment.objects.filter(forum = forum)
print thread_comments
context['comments'] = thread_comments
response['forum_comment'] = render_to_string("phase5/forum-comment.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def put_statement_comment(request):
response = {}
context = {}
author = request.user
parent_id = request.REQUEST.get('parent_id')
text = request.REQUEST.get('text')
created_at = timezone.now()
forum = Forum.objects.get(id = request.session['forum_id'])
if parent_id == "": #root node
newForumComment = ForumComment(author = author, text = text, forum = forum, created_at = created_at)
else:
parent = ForumComment.objects.get(id = parent_id)
newForumComment = ForumComment(author = author, text = text, forum = forum, parent = parent, created_at = created_at)
newForumComment.save()
return HttpResponse(json.dumps(response), mimetype='application/json')
def vote_issue(request):
reason = request.REQUEST.get('reason')
author = request.user
forum = Forum.objects.get(id = request.session['forum_id'])
support = True
if (request.REQUEST.get('support') == "false"): support = False
vote, created = ForumVote.objects.get_or_create(forum = forum, author = author)
vote.reason = reason
vote.support = support
vote.save()
response = {}
return HttpResponse(json.dumps(response), mimetype='application/json')
def render_support_bar(request):
author = request.user
forum = Forum.objects.get(id = request.session['forum_id'])
response = {}
response["num_support"] = ForumVote.objects.filter(forum = forum, support = True).count()
response["num_oppose"] = ForumVote.objects.filter(forum = forum, support = False).count()
if request.user.is_authenticated():
response["my_num_support"] = ForumVote.objects.filter(forum = forum, support = True, author = author).count()
response["my_num_oppose"] = ForumVote.objects.filter(forum = forum, support = False, author = author).count()
return HttpResponse(json.dumps(response), mimetype='application/json')
def view_vote_result(request):
author = request.user
forum = Forum.objects.get(id = request.session['forum_id'])
response = {}
context = {}
context["entries"] = ForumVote.objects.filter(forum = forum)
response["vote_result_table"] = render_to_string('phase5/vote-result-table.html', context)
return HttpResponse(json.dumps(response), mimetype='application/json') |
enfancemill/litchi | litchi/urls.py | Python | gpl-3.0 | 894 | 0 | """litchi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including anot | her URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^captcha/', | include('captcha.urls')),
url(r'^session/', include('apps.session.urls', namespace='session')),
]
|
gjermv/potato | sccs/gpx/dtmdata.py | Python | gpl-2.0 | 6,663 | 0.025514 | '''
Created on 14 Jun 2016
@author: gjermund.vingerhagen
'''
import numpy as np
import scipy.interpolate as intp
import linecache
import utmconverter as utm
def splitHead(inp):
return inp
def lineToArr(l1):
arra = np.array(np.fromstring(l1[144:1024],dtype=int,sep=' '))
for i in | range(1,30):
arra = np.append(arra,np.fromstring( | l1[1024*i:1024*(i+1)],dtype=int,sep=' '))
return arra
def findClosestPoint(east,north):
try:
dtminfo = getDTMFile(east,north)
eastLine = round((east-dtminfo[1])//10)
northLine = round((north-dtminfo[2])//10)
east_delta = (east-dtminfo[1])%10
north_delta = (north-dtminfo[1])%10
return [eastLine,northLine,dtminfo[0],east_delta,north_delta,dtminfo[1],dtminfo[2]]
except:
raise Exception("Closest point has no DTM file ")
def readFile(filename):
line1 = open("C:\\python\\dtms\\{}".format(filename), 'r').read(500000)
print(line1[0:134])
print(line1[150:156])
print(line1[156:162])
print(line1[162:168])
print(line1[529:535])
print(line1[535:541])
print('{:9}{}'.format('MinEast:',line1[546:570]))
print('{:9}{}'.format('MinNorth:',line1[570:594]))
print(line1[594:618])
print(line1[618:642])
print(line1[642:666])
print(line1[666:690])
print(line1[690:714])
print(line1[714:738])
print(line1[738:762])
print(line1[762:786])
print('{:9}{}'.format('dy:',line1[816:828]))
print('{:9}{}'.format('dx:',line1[828:840]))
print('{:10}{}'.format('Rows:',line1[858:864]))
print('-----')
print()
minEast = float(line1[546:570])
minNorth = float(line1[570:594])
print(line1[1024+30720*0:1024+144+30720*0])
#===============================================================================
# print(line1[1168:2048])
# print(line1[1024*2:1024*3])
# print(line1[1024*4:1024*5])
#===============================================================================
def getElevation(eastL,northL,dtmfile):
rows = 5041
head = 1024
lhead = 144
blockSize = 30720
eastLine = eastL
northLine = northL
with open("C:\\python\\dtms\\{}".format(dtmfile), 'r') as fin:
fin.seek(head+blockSize*eastLine)
data = fin.read(blockSize)
if northLine < 146:
s = 144+northLine*6
else:
c = (northLine-146) // 170 +1
d = (northLine-146) % 170
s = 1024*(c)+d*6
return float(data[s:s+6])/10
def getElevationArea(eastLmin,northLmin,eastLmax,northLmax,dtmfile):
rows = 5041
head = 1024
lhead = 144
blockSize = 30720
rect = []
with open("C:\\python\\dtms\\{}".format(dtmfile), 'r') as fin:
for eastLine in range(eastLmin,eastLmax+1):
line = []
fin.seek(head+blockSize*eastLine)
data = fin.read(blockSize)
for northLine in range(northLmin,northLmax):
if northLine < 146:
s = 144+northLine*6
else:
c = (northLine-146) // 170 +1
d = (northLine-146) % 170
s = 1024*(c)+d*6
line.append(int(data[s:s+6]))
rect.append(line)
return rect
def calculateEle(x,y,coordsys='utm'):
if coordsys == 'latlon':
east, north, zone_number, zone_letter = utm.from_latlon(x, y)
else:
east,north = x,y
try:
p = findClosestPoint(east, north)
dpx = p[3]
dpy = p[4]
ele1 = getElevation(p[0], p[1],p[2])
ele2 = getElevation(p[0]+1, p[1],p[2])
ele3 = getElevation(p[0], p[1]+1,p[2])
ele4 = getElevation(p[0]+1, p[1]+1,p[2])
#c_ele = getInterpolatedEle(ele1,ele2,ele3,ele4,[dpx,dpy])[2]
d_ele = interpolateEle2(ele1,ele2,ele3,ele4,[dpx,dpy])
return d_ele
except Exception:
raise Exception("Something went wrong")
def getInterpolatedEle(p1e=10,p2e=5,p3e=5,p4e=0,pxc=[5,5]):
if sum(pxc)>10:
p1 = np.array([10,10,p4e])
else:
p1 = np.array([0,0,p1e])
p2 = np.array([10,0,p2e])
p3 = np.array([0,10,p3e])
px = np.array([pxc[0],pxc[1]])
a = p2-p1
b = p3-p1
N = np.cross(a,b)
c = px-p1[:2]
x = -(N[0]*c[0]+N[1]*c[1]) / N[2]
C = np.array([c[0],c[1],x])
p4 = p1 + C
return p4
def interpolateEle2(p1e=10,p2e=5,p3e=5,p4e=0,pxc=[5,5]):
x = np.array([0,10])
y = np.array( [0,10])
z = np.array([[p1e,p3e],[p2e,p4e]])
p1=pxc[0]
p2=pxc[1]
f = intp.RectBivariateSpline(x,y,z,kx=1, ky=1, s=0)
return f(p1,p2)[0][0]
def getDTMFile(east,north):
try:
dtmfile = getDTMdict()
for key in dtmfile:
if north>=dtmfile[key][1] and north<=dtmfile[key][1]+50000:
if east>=dtmfile[key][0] and east<=dtmfile[key][0]+50000:
return [key,int(dtmfile[key][0]),int(dtmfile[key][1])]
except:
raise Exception('DTM file not available')
def getDTMdict():
dtmfile = dict()
dtmfile['6404_3_10m_z32.dem'] = [399800,6399900]
dtmfile['6404_4_10m_z32.dem'] = [399800,6449800]
dtmfile['7005_2_10m_z32.dem'] = [549800,6999800]
dtmfile['6503_3_10m_z32.dem'] = [299800,6499800]
dtmfile['6903_1_10m_z32.dem'] = [349800,6949800]
dtmfile['6904_4_10m_z32.dem'] = [399795,6949795]
dtmfile['6505_4_10m_z32.dem'] = [499800,6549800]
dtmfile['6504_1_10m_z32.dem'] = [449800,6549800]
dtmfile['6604_2_10m_z32.dem'] = [449800,6599800]
dtmfile['6605_3_10m_z32.dem'] = [499800,6599800]
dtmfile['6603_2_10m_z32.dem'] = [349800,6599800]
dtmfile['6506_1_10m_z32.dem'] = [649800,6549800]
dtmfile['6506_2_10m_z32.dem'] = [649800,6503000]
dtmfile['6506_3_10m_z32.dem'] = [599800,6503000]
dtmfile['6506_4_10m_z32.dem'] = [599800,6549800]
return dtmfile
def hasDTMFile(minEast, minNorth,maxEast,maxNorth):
dtmfile = getDTMdict()
dtm = getDTMFile(minEast, minNorth)
if dtm != -1:
if (maxEast-50000)< dtm[1] and (maxNorth-50000)<dtm[2]:
return True
return False
if __name__ == "__main__":
readFile('6506_3_10m_z32.dem')
|
Parcks/core | test/domain/parse/test_installation_file_parser.py | Python | gpl-2.0 | 4,254 | 0.007522 | """
Scriptable Packages Installer - Parcks
Copyright (C) 2017 JValck - Setarit
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
Setarit - parcks[at]setarit.com
"""
from __future__ import absolute_import
import unittest, shutil, tempfile
from os import path
from src.domain.log.logger import Logger
from src.exceptions.invalid_installation_file_extension_error import InvalidInstallationFileExtensionError
import json
from src.domain.parse.installation_file_parser import InstallationFileParser
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from unittest.mock import patch
except ImportError:
from mock import patch
class TestInstallationFileParser(unittest.TestCase):
def setUp(self):
self.testDir = tempfile.mkdtemp()
self.tempFile = self.create_test_install_file()
self.invalidTempFile = self.create_invalid_test_install_file()
self.invalid_extension_file = self.create_invalid_file_extension_install_file()
Logger.disable_all()
def tearDown(self):
shutil.rmtree(self.testDir)
Logger.enable()
def create_test_install_file(self):
jsonData = self.create_valid_json()
tempJsonPath = path.join(self.testDir,"test.parcks")
with open(tempJsonPath ,'w') as ofile:
json.dump(jsonData, ofile)
return tempJsonPath
def cr | eate_valid_json(self):
JSON = """\
{
"name":"PHP installer",
"install": [
{"package": "php",
"post-installation": []
}
]
}
"""
return json.loads(JSON)
def create_invalid_test_install_file(self):
JSON = """\
{
"name":"PHP installer"
}
"""
jsonData = json.loads(JSON)
tempJsonPath = path.join(self.testDir,"invalidTest.parcks")
with open(tempJsonPath | ,'w') as ofile:
json.dump(jsonData, ofile)
return tempJsonPath
def create_invalid_file_extension_install_file(self):
jsonData = self.create_valid_json()
tempJsonPath = path.join(self.testDir,"invalid_extension.json")
with open(tempJsonPath ,'w') as ofile:
json.dump(jsonData, ofile)
return tempJsonPath
def test_parse_returns_software_catalog_on_valid_installation_file(self):
parser = InstallationFileParser(self.tempFile)
self.assertNotEqual(None, parser.parse())
def test_parse_raises_file_not_found_error_on_non_existing_file(self):
parser = InstallationFileParser("fakeFile.parcks")
with self.assertRaises(FileNotFoundError):
parser.parse()
def test_parse_raises_key_error_on_invalid_install_file(self):
parser = InstallationFileParser(self.invalidTempFile)
with self.assertRaises(KeyError):
parser.parse()
def test_parse_sets_correct_software_catalog_name(self):
parser = InstallationFileParser(self.tempFile)
catalog = parser.parse()
self.assertEqual("PHP installer", catalog.name)
def test_validate_file_extension_raises_InvalidInstallationFileExtensionError_if_no_valid_extension(self):
parser = InstallationFileParser(self.invalid_extension_file)
with self.assertRaises(InvalidInstallationFileExtensionError):
parser.validate_file_extension()
@patch.object(InstallationFileParser, 'validate_file_extension')
def test_parse_calls_validate_file_extension(self, mock):
parser = InstallationFileParser(self.tempFile)
parser.parse()
self.assertEqual(1, mock.call_count)
|
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractMiratlsWordpressCom.py | Python | bsd-3-clause | 554 | 0.034296 |
def extractMiratlsWordpressCom(item):
'''
Parser for 'miratls.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = | [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, | tl_type=tl_type)
return False
|
xczheng/swift-encrypt-compress-middleware | myswift/encrypt.py | Python | apache-2.0 | 3,355 | 0.004173 | from swift.common.swob import Request
from swift.common.utils import split_path
from Crypto.Cipher import AES
from Crypto import Random
import hashlib
AES_BLOCK = 32
def create_encrypt(raw, password):
raw = _pad(raw)
key = hashlib.sha256(password.encode()).digest()
iv = Random.new().read(AES.block_size)
cipher = AES.new(key, AES.MODE_CBC, iv)
return iv + cipher.encrypt(raw)
def create_decrypt(enc, password):
key = hashlib.sha256(password.encode()).digest()
iv = enc[:AES.block_size]
cipher = AES.new(key, AES.MODE_CBC, iv)
# return _unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
return _unpad(cipher.decrypt(enc[AES.block_size:]))
def _pad(s):
return s + (AES_BLOCK - len(s) % AES_BLOCK) * chr(AES_BLOCK - len(s) % AES_BLOCK)
def _unpad(s):
return s[:-ord(s[len(s)-1:])]
class EncryptMiddleware(object):
"""
Encrypt middleware used for object Encryption
"""
def __init__(self, app, conf):
self.app = app
self.conf = conf
self.encrypt_suffix = conf.get('encrypt_suffix', '')
self.password = conf.get('password', '')
def __call__(self, env, start_response):
request = Request(env)
try:
(version, account, container, objname) = split_path(request.path_info, 1, 4, True)
except ValueError:
response = request.get_response(self.app)
return response(env, start_response)
if not objname:
| response = request.get_response(self.app)
if container:
if not request.params.has_key('encrypt'):
response | .body = response.body.replace(self.encrypt_suffix, '')
return response(env, start_response)
original_path_info = request.path_info
request.path_info += self.encrypt_suffix
if request.method == 'GET':
if not request.params.has_key('encrypt'):
# we need to decrypt
response = request.get_response(self.app)
if response.status_int == 404:
# it may not be encrypted, if admin added the encrypt filter after
# some files have been uploaded
request.path_info = original_path_info
response = request.get_response(self.app)
return response(env, start_response)
response.body = create_decrypt(response.body, self.password)
return response(env, start_response)
if request.method == 'PUT':
if hasattr(request, 'body_file'):
data = ""
while True:
chunk = request.body_file.read()
if not chunk:
break
data += chunk
encrypt_data = create_encrypt(data, self.password)
else:
encrypt_data = create_encrypt(request.body, self.password)
if encrypt_data:
request.body = encrypt_data
response = request.get_response(self.app)
return response(env, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def encrypt_filter(app):
return EncryptMiddleware(app, conf)
return encrypt_filter
|
lawrencebenson/thefuck | tests/shells/conftest.py | Python | mit | 459 | 0 | import pytest
@pytest.fixture
def builtins_open(mock | er):
return mocker.patch('six.moves.builtins.open')
@pytest.fixture
def isfile(mocker):
return | mocker.patch('os.path.isfile', return_value=True)
@pytest.fixture
@pytest.mark.usefixtures('isfile')
def history_lines(mocker):
def aux(lines):
mock = mocker.patch('io.open')
mock.return_value.__enter__ \
.return_value.readlines.return_value = lines
return aux
|
ngtuna/kubeless | docker/runtime/python-3.6/http-trigger/kubeless.py | Python | apache-2.0 | 2,152 | 0.003717 | #!/usr/bin/env python
import os
import imp
from multiprocessing import Process, Queue
import bottle
import prometheus_client as prom
mod = imp.load_source('function',
'/kubeless/%s.py' % os.getenv('MOD_NAME'))
func = getattr(mod, os.getenv('FUNC_HANDLER'))
func_port = os.getenv('FUNC_PORT', 8080)
timeout = float(os.getenv('FUNC_TIMEOUT', 180))
app = application = bottle.app()
func_hist = prom.Histogram('function_duration_seconds',
'Duration of user function in seconds',
['method'])
func_calls = prom.Counter('function_calls_total',
'Number of calls to user function',
['method'])
func_errors = prom.Counter('function_failures_total',
'Number of exceptions in user function',
['method'])
def funcWrap(q, req):
if req is None:
q.put(func())
else:
q.put(func(req))
@app.route('/', method=['GET', 'POST', 'PATCH', 'DELETE'])
def handler():
req = bottle.request
method = req.method
func_calls.labels(method).inc()
with func_errors.labels(method).count_e | xceptions():
with func_hist.labels(method).time():
q = Queue()
p = Process(target=funcWrap, args=(q,bottle.request,))
| p.start()
p.join(timeout)
# If thread is still active
if p.is_alive():
p.terminate()
p.join()
return bottle.HTTPError(408, "Timeout while processing the function")
else:
return q.get()
@app.get('/healthz')
def healthz():
return 'OK'
@app.get('/metrics')
def metrics():
bottle.response.content_type = prom.CONTENT_TYPE_LATEST
return prom.generate_latest(prom.REGISTRY)
if __name__ == '__main__':
import logging
import sys
import requestlogger
loggedapp = requestlogger.WSGILogger(
app,
[logging.StreamHandler(stream=sys.stdout)],
requestlogger.ApacheFormatter())
bottle.run(loggedapp, server='cherrypy', host='0.0.0.0', port=func_port)
|
jf87/smap | python/tinyos/__init__.py | Python | bsd-2-clause | 1,659 | 0 | #
# Copyright (c) 2005
# The President and Fellows of Harvard College.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the University nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OR CONTRIBUTORS BE LIABLE
# FOR A | NY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
| # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# Author: Geoffrey Mainland <mainland@eecs.harvard.edu>
#
__all__ = ["message", "packet", "utils", "tossim"]
|
openconnectome/ocptilecache | tilecache/migrations/0003_auto_20160317_1521.py | Python | apache-2.0 | 512 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-17 19:21
from __future__ import unicode_literals
from django.db import mig | rations
class Migration(migrations.Migration):
dependencies = [
('tilecache', '0002_auto_20160317_1519'),
]
| operations = [
migrations.AlterModelOptions(
name='channel',
options={'managed': True},
),
migrations.AlterModelTable(
name='channel',
table='channels',
),
]
|
dronekit/dronekit-python | dronekit/test/sitl/test_waypoints.py | Python | apache-2.0 | 4,352 | 0.001838 | import time
from dronekit import connect, LocationGlobal, Command
from pymavlink import mavutil
from dronekit.test import with_sitl
from nose.tools import assert_not_equals, assert_equals
@with_sitl
def test_empty_clear(connpath):
vehicle = connect(connpath)
# Calling clear() on an empty object should not crash.
vehicle.commands.clear()
vehicle.commands.upload()
assert_equals(len(vehicle.commands), 0)
vehicle.close()
@with_sitl
def test_set_home(connpath):
vehicle = connect(connpath, wait_ready=True)
# Wait for home position to be real and not 0, 0, 0
# once we request it via cmds.download()
time.sleep(10)
vehicle.commands.download()
vehicle.commands.wait_ready()
assert_not_equals(vehicle.home_location, None)
# Note: If the GPS values differ heavily from EKF values, this command
# will basically fail silently. This GPS coordinate is tailored for that
# the with_sitl initializer uses to not fail.
vehicle.home_location = LocationGlobal(-35, 149, 600)
vehicle.commands.download()
vehicle.commands.wait_ready()
assert_equals(vehicle.home_location.lat, -35)
assert_equals(vehicle.home_location.lon, 149)
assert_equals(vehicle.home_location.alt, 600)
vehicle.close()
@with_sitl
def test_parameter(connpath):
vehicle = connect(connpath, wait_ready=True)
# Home should be None at first.
assert_equals(vehicle.home_location, None)
# Wait for home position to be real and not 0, 0, 0
# once we request it via cmds.download()
time.sleep(10)
# Initial
vehicle.commands.download()
vehicle.commands.wait_ready()
assert_equals(len(vehicle.commands), 0)
assert_not_equals(vehicle.home_location, None)
# Save home for comparison.
home = vehicle.home_location
# After clearing
vehicle.commands.clear()
vehicle.commands.upload()
vehicle.commands.download()
vehicle.commands.wait_ready()
assert_equals(len(vehicle.commands), 0)
# Upload
for command | in [
Command(0, 0, 0, 0, 16, 1, 1, 0.0, 0.0, 0.0, 0.0, -35.3605, 149.172363, 747.0),
Command(0, 0, 0, 3, 22, 0, 1, 0.0, 0.0, 0.0, 0.0, -35.359831, 149.166334, 100.0),
Command(0, 0, 0, 3, 16, 0, 1, 0.0, 0.0, 0.0, 0.0, -35.363489, 149.167213, 100.0),
Command(0, 0, 0, 3, 16, 0, 1, 0.0, 0.0, 0.0, 0.0, -35.355491, 149.169595, 100.0),
Command(0, 0, 0, 3, 16, 0, 1, 0.0, 0.0, 0.0, 0.0, -35.355071, 14 | 9.175839, 100.0),
Command(0, 0, 0, 3, 113, 0, 1, 0.0, 0.0, 0.0, 0.0, -35.362666, 149.178715, 22222.0),
Command(0, 0, 0, 3, 115, 0, 1, 2.0, 22.0, 1.0, 3.0, 0.0, 0.0, 0.0),
Command(0, 0, 0, 3, 16, 0, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
]:
vehicle.commands.add(command)
vehicle.commands.upload()
# After upload
vehicle.commands.download()
vehicle.commands.wait_ready()
assert_equals(len(vehicle.commands), 8)
# Test iteration.
count = 0
for cmd in vehicle.commands:
assert_not_equals(cmd, None)
count += 1
assert_equals(count, 8)
# Test slicing
count = 3
for cmd in vehicle.commands[2:5]:
assert_not_equals(cmd, None)
assert_equals(cmd.seq, count)
count += 1
assert_equals(count, 6)
# Test next property
assert_equals(vehicle.commands.next, 0)
vehicle.commands.next = 3
while vehicle.commands.next != 3:
time.sleep(0.1)
assert_equals(vehicle.commands.next, 3)
# Home should be preserved
assert_equals(home.lat, vehicle.home_location.lat)
assert_equals(home.lon, vehicle.home_location.lon)
assert_equals(home.alt, vehicle.home_location.alt)
vehicle.close()
@with_sitl
def test_227(connpath):
"""
Tests race condition when downloading items
"""
vehicle = connect(connpath, wait_ready=True)
def assert_commands(count):
vehicle.commands.download()
vehicle.commands.wait_ready()
assert_equals(len(vehicle.commands), count)
assert_commands(0)
vehicle.commands.add(Command(0, 0, 0, mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT,
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, 0, 0, 0, 0, 0, 10, 10,
10))
vehicle.flush() # Send commands
assert_commands(1)
vehicle.close()
|
lavish/drs | snippets/tracker/tracker.py | Python | mit | 1,608 | 0.005597 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division
from time import sleep
from ev3.ev3dev import Motor
from ev3.lego import LargeMotor
from ev3.lego import ColorSensor
import sys
# left motor
motor_b = LargeMotor(port = Motor.PORT.B)
# right motor
motor_c = LargeMotor(port = Motor.PORT.C)
color = ColorSensor()
tic = 0.01
def main():
speed = 30
black = 16
white = 21
gray = (black + white) // 2
motor_b.run_forever(speed, regulation_mode=False)
motor_c.run_forever(speed, regulation_mode=False)
while True:
try:
sleep(tic)
reflect = color.reflect
if reflect > gray:
# turn left
cur_speed = int(round(max(min((white - reflect) * (speed * 2) / (white - gray) - speed, speed), -speed)))
motor_b.run_forever(cur_speed, regulation_mode=False)
motor_c.run_forever(speed, regulation_mode=False)
elif reflect < gray:
# turn right
cur_speed = int(round(max(min((black - reflect) * (speed * 2) / (black - gray | ) - speed, speed), -speed)))
motor_b.run_forever(speed, regulation_mode=False)
motor_c.run_forever(cur_spee | d, regulation_mode=False)
else:
# go straight
motor_b.run_forever(speed, regulation_mode=False)
motor_c.run_forever(speed, regulation_mode=False)
except KeyboardInterrupt:
motor_b.stop()
motor_c.stop()
sys.exit()
if __name__ == '__main__':
main()
|
PythonT/Crawler | pickling.py | Python | apache-2.0 | 800 | 0.016949 | #!/usr/bin/python
#coding:utf-8
# Filename: pickling.py
import pickle
# the name of the file where we will store the o | bject
shoplistfile = 'shoplist.data'
# the list of things to buy
shoplist = ['apple', 'mango', 'carrot']
# Write to the file
f = open(shoplistfile, 'wb')
pickle.dump(shoplist, f) # 转储对象到文件
f.close()
del shoplist # 销毁shoplist变量
# 从文件找回对象
f = open(shoplistfile, 'rb')
storedlist = pickle.load(f) # 从文件加载对象
print(storedlist)
#为了将对象存储到文件,我们必须首先’wb’写二进制文件模式打开文件然后调用pickle模块的dump函数。这个过程叫做封藏(pickling)对象。
#接下来我们使用pickle的load函数重新找回对象。这个过程叫做解 | 封(unpickling)对象。 |
frankwiles/django-pagebits | pagebits/utils.py | Python | bsd-3-clause | 171 | 0.005848 | fro | m django.conf import settings
def bitgroup_cache_key(slug):
return "%s:%s" % (
getattr(settings, 'PAGEBIT_CACHE_PREFIX', 'pagebits'),
| slug
)
|
SenseHawk/dispatch | app.py | Python | apache-2.0 | 2,442 | 0.009009 | #!flask/bin/python
from flask import Flask, request, abort, render_template
import config
import sqlite3
import emails
from data import func
import json
# FLASK part
app = Flask(__name__)
@app.route('/send', methods=[ | 'POST'])
def capture_post_send():
if not request.json or not 'subject' in request.json:
abort(400)
data = request.get_json()
send = send_email(data['html'], data['raw'], data['subject'], da | ta['email_from'], data['email_to'])
return send
def send_email(html, raw, subject, email_from, email_to):
email_to = str(email_to.encode('utf8'))
email_to = email_to.split(",")
# Sending the email
email = emails.Email(to=email_to, from_addr=email_from, subject=subject)
email.text(raw)
email.html(html)
email.send()
return "Sent", 201
@app.route('/add_template', methods=['POST'])
def add_template():
if not request.json or not 'subject' in request.json:
abort(400)
data = request.get_json()
email_type = data['email_type']
subject = data['subject']
html = data['html']
raw = data['raw']
email_from = data['email_from']
id = func.template_create(email_type, subject, html, raw, email_from)
return str(id), 201
@app.route('/get_templates', methods=['GET'])
def get_template():
templates = func.get_templates()
return json.dumps(templates)
@app.route('/get_template_variables', methods=['POST'])
def get_template_vars():
if not request.json or not 'id' in request.json:
abort(400)
data = request.get_json()
variables = func.get_template_variables(data['id'])
return json.dumps(variables)
@app.route('/send_template_email', methods=['POST'])
def send_template_email():
if not request.json or not 'id' in request.json:
abort(400)
data = request.get_json()
email_vars = func.replace_and_return(data['id'],data['variables'])
send_email(email_vars['html'], email_vars['raw'], email_vars['subject'], email_vars['email_from'], data['to'])
return json.dumps(email_vars)
@app.route('/admin/templates')
def templates():
return render_template('/admin/templates.html', name='name')
@app.route('/static/<path:path>')
def send_js(path):
return send_from_directory('/static', path)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True) |
amancevice/agador | agador/microservice.py | Python | mit | 1,138 | 0.000879 | """ Agador Metaservice
Usage:
agador [options]
Options:
-c --config URI # Service config URI
-d --debug # Run in debug mode
-h --host HOST # Host IP [default: 0.0.0.0]
-p --port PORT # Port no [default: 8500]
"""
import furi
from envopt import envopt
from flask import Flask
from flask imp | ort jsonify
from . import __version__
APP = Flask("Agador")
OPT = envopt(__doc__, env_prefix="MRMET")
def config():
""" Helper to get server config. """
try:
return dict(furi.map(OPT["--config"]))
except AttributeError:
return {}
@APP.route("/")
def version():
""" Health Check. """
| return jsonify(version=__version__, services=config())
@APP.route("/<svc>")
def service(svc):
""" Get microservice. """
return jsonify(service=config().get(svc, {}))
def runserver():
""" Run microservice. """
# Strip -- from opts
opts = dict((key.lstrip("-"), val) for key, val in OPT.iteritems())
opts["port"] = int(opts["port"])
del opts["config"]
# Try to load config
config()
# Start engine!
APP.run(**opts)
|
plotly/python-api | packages/python/plotly/plotly/validators/scatter3d/marker/_symbol.py | Python | mit | 863 | 0.001159 | import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="symbol", parent_name="scatter3d.marker", **kwargs):
super(SymbolValidator, self). | __init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop(
"values",
[
"circle",
"circle-open",
"square",
"square-open",
"diamond",
| "diamond-open",
"cross",
"x",
],
),
**kwargs
)
|
operasoftware/dragonfly-build-tools | df2/codegen/node.py | Python | apache-2.0 | 4,544 | 0.006382 | INDENT = " "
def escape_text_html(str): return str.replace("&", "&").replace("<", "<")
def escape_attr_html(str): return str.replace("\"", """)
class Node(object):
ELEMENT = 1
TEXT = 2
ROOT = 3
type = 0
parent = None
def insert_before(self, node):
if node.parent: node.parent.remove(node)
children = self.parent.children
children.insert(children.index(self), node)
node.parent = self.parent
return node
def insert_after(self, node):
try: self.next.insert_before(node)
except AttributeError: self.parent.append(node)
return node
@property
def depth(self):
depth = 0
node = self
while node.parent:
depth += 1
node = node.parent
return depth
@property
def next(self):
try:
children = self.parent.children
return children[children.index(self) + 1]
except (AttributeError, IndexError): return None
@property
def previous(self):
try:
children = self.parent.children
return children[children.index(self) - 1]
except (AttributeError, IndexError): return None
@property
def is_text(self): return self.type == Node.TEXT
@property
def is_element(self): return self.type == Node.ELEMENT
@property
def is_root(self): return self.type == Node.ROOT
def __str__(self):
return self.serialize(-1)
class Text(Node):
type = Node.TEXT
name = "#text"
def __init__(self, value=""):
self.value = value
def split(self, pos):
text = Text(self.value[pos:])
self.insert_after(text)
self.value = self.value[0:pos]
return text
def serialize(self, initial_depth=0):
return escape_text_html(self.value)
class Element(Node):
type = Node.ELEMENT
BLOCKLEVELS = ["ul", "li", "div", "p", "h2", "pre", "ol", "table", "tr", "td", "th"]
def __init__(self, name, text=""):
self.children = []
self.name = name
self.attrs = {}
if text: self.append(Text(text))
def append(self, node):
if node.parent: node.parent.remove(node)
node.parent = self
self.children.append(node)
return node
def remove(self, node):
try: self.children.pop(self.children.index(node))
except IndexError: pass
return node
def normalize(self):
node = self.first_child
text_node = None
while node:
if node.is_text:
if text_node:
text_node.value += node.parent.remove(node).value
node = text_node
else: text_node = node
else: text_node = None
node = node.next
def set_attr(self, key, value):
self.attrs[key] = value
@property
def first_child(self):
try: return self.children[0]
except IndexError: return None
@property
def is_blocklevel(self):
return self.name in self.BLOCKLEVELS
@property
def contains_blocklevel(self):
for child in self.children:
if child.is_element and (child.is_blocklevel or child.contains_blocklevel):
return True
return False
@property
def text_content(self):
text = []
for child in self.children:
if child | .is_text: text.append(child.value)
elif child.is_element: text.append(child.text_content)
return "".join(text)
@text_content.setter
def text_content(self, value):
self.children = [Text(value)]
def serialize(self, initial_depth=0):
attrs = "".join((" %s=\"%s\"" % (key, escape_attr_html(value)) for key, value in self.attrs.items()))
content = "".join((child.serialize(initial_depth) for child in self.children))
name = self.name
indent | = (initial_depth + self.depth) * INDENT
if self.contains_blocklevel:
return "\n%s<%s%s>%s\n%s</%s>" % (indent, name, attrs, content, indent, name)
if self.is_blocklevel:
return "\n%s<%s%s>%s</%s>" % (indent, self.name, attrs, content, self.name)
return "<%s%s>%s</%s>" % (self.name, attrs, content, self.name)
class Root(Element):
type = Node.ROOT
name = "#root"
def __init__(self):
self.children = []
def serialize(self, initial_depth=0):
return "".join((child.serialize(initial_depth) for child in self.children))
|
fgaudin/aemanager | core/templatetags/filetags.py | Python | agpl-3.0 | 268 | 0.003731 | import os
from django import template
from django.template.default | filters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def base | name(path):
"""
Returns os.path.basename from a path
"""
return os.path.basename(path)
|
steveb/heat | heat/tests/test_validate.py | Python | apache-2.0 | 54,212 | 0 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_messaging.rpc import dispatcher
import six
import webob
from heat.common import exception
from heat.common.i18n import _
from heat.common import template_format
from heat.engine.clients.os import glance
from heat.engine.clients.os import nova
from heat.engine import environment
from heat.engine.hot import template as hot_tmpl
from heat.engine import resources
from heat.engine import service
from heat.engine import stack as parser
from heat.engine import template as tmpl
from heat.tests import common
from heat.tests.openstack.nova import fakes as fakes_nova
from heat.tests import utils
test_template_volumeattach = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"DeletionPolicy": "Delete",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": "test_KeyName"
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "WikiDatabase" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/%s"
}
}
}
}
'''
test_template_ref = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
"Description" : "Name of an existing EC2KeyPair",
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": { "Ref" : "KeyName" }
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "%s" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/vdb"
}
}
}
}
'''
test_template_findinmap_valid = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
"Description" : "Name of an existing EC2KeyPair",
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": { "Ref" : "KeyName" }
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "WikiDatabase" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/vdb"
}
}
}
}
'''
test_template_findinmap_invalid = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
"Description" : "Name of an existing EC2KeyPair",
"Type" : "String"
}
},
"Mappings" : {
"AWSInstanceType2Arch" : {
"t1.micro" : { "Arch" : "64" },
"m1.small" : { "Arch" : "64" },
"m1.medium" : { "Arch" : "64" },
"m1.large" : { "Arch" : "64" },
"m1.xlarge" : { "Arch" : "64" },
"m2.xlarge" : { "Arch" : "64" },
"m2.2xlarge" : { "Arch" : "64" },
"m2.4xlarge" : { "Arch" : "64" },
"c1.medium" : { "Arch" : "64" },
"c1.xlarge" : { "Arch" : "64" },
"cc1.4xlarge" : { "Arch" : "64HVM" },
"cc2.8xlarge" : { "Arch" : "64HVM" },
"cg1.4xlarge" : { "Arch" : "64HVM" }
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId" : {
"Fn::FindInMap" : [
"DistroArch2AMI", { "Ref" : "LinuxDistribution" },
{ "Fn::FindInMap" : [
"AWSInstanceType2Arch",
{ "Ref" : "InstanceType" }, "Arch" ] } ]
},
"InstanceType": "m1.large",
"KeyName": { "Ref" : "KeyName"}
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "6",
"AvailabilityZone" : "nova"
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "WikiDatabase" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/vdb"
}
}
}
}
'''
test_template_invalid_resources = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "AWS CloudFormation Sample Template for xyz.",
"Parameters" : {
"InstanceType" : {
"Description" : "Defined instance type",
"Type" : "String",
"Default" : "node.ee",
"AllowedValues" : ["node.ee", "node.apache", "node.api"],
"ConstraintDescription" : "must be a valid instance type."
}
},
"Resources" : {
"Type" : "AWS::EC2::Instance"
}
}
'''
test_template_invalid_property = '''
{
"AWSTemplateFormatVersion" : "2010-09-09 | ",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
"Description" : "Name of an existing EC2 KeyPai",
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": { "Ref" : "KeyName" },
"UnknownProperty": "unk | nown"
}
}
}
}
'''
test_template_unimplemented_property = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
"Description" : "Name of an existing EC2KeyPair",
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": { "Ref" : "KeyName" },
"SourceDestCheck": "false"
}
}
}
}
'''
test_template_invalid_deletion_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
"Description" : "Name of an existing EC2KeyPair",
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"DeletionPolicy": "Destroy",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": { "Ref" : "KeyName" }
}
}
}
}
'''
test_template_snapshot_deletion_policy = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Parameters" : {
"KeyName" : {
"Description" : "Name of an existing EC2KeyPair",
"Type" : "String"
}
},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"DeletionPolicy": "Snapshot",
"Properties": {
"ImageId": "image_name",
"InstanceType": "m1.large",
"KeyName": { "Ref" : "KeyName" }
}
}
}
}
'''
test_template_volume_snapshot = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "test.",
"Resources" : {
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"DeletionPoli |
sander76/home-assistant | tests/components/onewire/test_switch.py | Python | apache-2.0 | 2,860 | 0.001049 | """Tests for 1-Wire devices connected on OWServer."""
import copy
from unittest.mock import patch
import pytest
from homeassistant.components.onewire.switch import DEVICE_SWITCHES
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TOGGLE, STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from . import setup_onewire_patched_owserver_integration, setup_owproxy_mock_devices
from .const import MOCK_OWPROXY_DEVICES
from tests.common import mock_registry
MOCK_SWITCHES = {
key: value
for (key, value) in MOCK_OWPROXY_DEVICES.items()
if SWITCH_DOMAIN in value
}
@pytest.mark.parametrize("device_id", MOCK_SWITCHES.keys())
@patch("homeassistant.components.onewire.onewirehub.protocol.proxy")
async def test_owserver_switch(owproxy, hass, device_id):
"""Test for 1-Wire switch.
This test forces all entities to be enabled.
"""
await async_setup_component(hass, "persistent_notification", {})
entity_registry = mock_registry(hass)
setup_owproxy_mock_devices(owproxy, SWITCH_DOMAIN, [device_id])
mock_device = MOCK_SWITCHES[device_id]
expected_entities = mock_device[SW | ITCH_DOMAIN]
# Force enable switches
patch_device_switches = copy.deepcopy(DEVICE_SWITCHES)
for item in patch_device_switches[device_id[0:2]]:
item.entity_registry_enabled_default = True
with patch(
"homeassistant.components.onewire.PLATFORMS", [SWITCH_DOMAIN]
), patch.dict(
"homeassistant.components.onewire.switch.DEVICE_SWITCHES", patc | h_device_switches
):
await setup_onewire_patched_owserver_integration(hass)
await hass.async_block_till_done()
assert len(entity_registry.entities) == len(expected_entities)
for expected_entity in expected_entities:
entity_id = expected_entity["entity_id"]
registry_entry = entity_registry.entities.get(entity_id)
assert registry_entry is not None
state = hass.states.get(entity_id)
assert state.state == expected_entity["result"]
if state.state == STATE_ON:
owproxy.return_value.read.side_effect = [b" 0"]
expected_entity["result"] = STATE_OFF
elif state.state == STATE_OFF:
owproxy.return_value.read.side_effect = [b" 1"]
expected_entity["result"] = STATE_ON
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TOGGLE,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == expected_entity["result"]
assert state.attributes["device_file"] == expected_entity.get(
"device_file", registry_entry.unique_id
)
|
vsoch/singularity-python | singularity/package/clone.py | Python | agpl-3.0 | 2,880 | 0.007292 | '''
Copyright (C) 2017-2019 Vanessa Sochat.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
from spython.main import Client
import tempfile
from singularity.logger import bot
from singularity.utils import run_command
import platform
import os
import sys
def package_node(root=None, name=None):
'''package node aims to package a (present working node) for a user into
a container. This assumes that the node is a si | ngle partition.
:param root: the root of the node to package, default is /
:param name: the name for the image. If not specified, will use machine's
psutil.disk_partitions()
'''
if | name is None:
name = platform.node()
if root is None:
root = "/"
tmpdir = tempfile.mkdtemp()
image = "%s/%s.tgz" %(tmpdir, name)
excludes = ['--exclude', "'/tmp'", '--exclude', image]
print("Preparing to package root %s into %s" %(root, name))
cmd = ["tar","--one-file-system","-czvSf", image, root] + excludes
output = run_command(cmd)
return image
def unpack_node(image_path, name=None, output_folder=None, size=None):
'''unpackage node is intended to unpackage a node that was packaged with
package_node. The image should be a .tgz file. The general steps are to:
1. Package the node using the package_node function
2. Transfer the package somewhere that Singularity is installed'''
if not image_path.endswith(".tgz"):
bot.error("The image_path should end with .tgz. Did you create with package_node?")
sys.exit(1)
if output_folder is None:
output_folder = os.path.dirname(os.path.abspath(image_path))
image_name = os.path.basename(image_path)
if name is None:
name = image_name.replace('.tgz','.img')
if not name.endswith('.img'):
name = "%s.img" %(name)
bot.debug("Preparing to unpack %s to %s." %(image_name,name))
unpacked_image = "%s/%s" %(output_folder,name)
if not os.path.exists(unpacked_image):
os.mkdir(unpacked_image)
cmd = ["gunzip","-dc",image_path,"|","sudo","singularity","import", unpacked_image]
output = run_command(cmd)
# TODO: singularity mount the container, cleanup files (/etc/fstab,...)
# and add your custom singularity files.
return unpacked_image
|
wangwei7175878/tutorials | theanoTUT/theano7_activation_function.py | Python | mit | 705 | 0.007092 | # View more python tutorials on my Youtube | and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutorial: http://i.youku.com/pythontutorial
# 7 - Activation function
"""
The available activation functions in theano ca | n be found in this link:
http://deeplearning.net/software/theano/library/tensor/nnet/nnet.html
The activation functions include but not limited to softplus, sigmoid, relu, softmax, elu, tanh...
For the hidden layer, we could use relu, tanh, softplus...
For classification problems, we could use sigmoid or softmax for the output layer.
For regression problems, we could use a linear function for the output layer.
""" |
GoogleCloudPlatform/python-compat-runtime | appengine-compat/exported_appengine_sdk/google/net/proto2/python/public/descriptor.py | Python | apache-2.0 | 31,685 | 0.006596 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Descriptors essentially contain exactly the information found in a .proto
file, in types that make this information accessible in Python.
"""
from google.net.proto2.python.internal import api_implementation
_USE_C_DESCRIPTORS = False
if api_implementation.Type() == 'cpp':
import os
import uuid
from google.net.proto2.python.internal.cpp import _message
_USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False)
class Error(Exception):
"""Base error for this module."""
class TypeTransformationError(Error):
"""Error transforming between python proto type and corresponding C++ type."""
if _USE_C_DESCRIPTORS:
class DescriptorMetaclass(type):
def __instancecheck__(cls, obj):
if super(DescriptorMetaclass, cls).__instancecheck__(obj):
return True
if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
return True
return False
else:
DescriptorMetaclass = type
class DescriptorBase(object):
"""Descriptors base class.
This class is the base of all descriptor classes. It provides common options
related functionality.
Attributes:
has_options: True if the descriptor has non-default options. Usually it
is not necessary to read this -- just ca | ll GetOptions() which will
happily return the default instance. However, it's sometimes useful
for efficiency, and also useful inside the protobuf implementation to
avoid some bootstrapping issues.
"""
__metaclass__ = DescriptorMetaclass
if _USE_C_DESCRIPTORS:
_C_DESCRIPTOR_CLASS = ()
def __init__(self, options, options_class_name):
"""Initialize the descriptor gi | ven its options message and the name of the
class of the options message. The name of the class is required in case
the options message is None and has to be created.
"""
self._options = options
self._options_class_name = options_class_name
self.has_options = options is not None
def _SetOptions(self, options, options_class_name):
"""Sets the descriptor's options
This function is used in generated proto2 files to update descriptor
options. It must not be used outside proto2.
"""
self._options = options
self._options_class_name = options_class_name
self.has_options = options is not None
def GetOptions(self):
"""Retrieves descriptor options.
This method returns the options set or creates the default options for the
descriptor.
"""
if self._options:
return self._options
from google.net.proto2.proto import descriptor_pb2
try:
options_class = getattr(descriptor_pb2, self._options_class_name)
except AttributeError:
raise RuntimeError('Unknown options class name %s!' %
(self._options_class_name))
self._options = options_class()
return self._options
class _NestedDescriptorBase(DescriptorBase):
"""Common class for descriptors that can be nested."""
def __init__(self, options, options_class_name, name, full_name,
file, containing_type, serialized_start=None,
serialized_end=None):
"""Constructor.
Args:
options: Protocol message options or None
to use default message options.
options_class_name: (str) The class name of the above options.
name: (str) Name of this protocol message type.
full_name: (str) Fully-qualified name of this protocol message type,
which will include protocol "package" name and the name of any
enclosing types.
file: (FileDescriptor) Reference to file info.
containing_type: if provided, this is a nested descriptor, with this
descriptor as parent, otherwise None.
serialized_start: The start index (inclusive) in block in the
file.serialized_pb that describes this descriptor.
serialized_end: The end index (exclusive) in block in the
file.serialized_pb that describes this descriptor.
"""
super(_NestedDescriptorBase, self).__init__(
options, options_class_name)
self.name = name
self.full_name = full_name
self.file = file
self.containing_type = containing_type
self._serialized_start = serialized_start
self._serialized_end = serialized_end
def GetTopLevelContainingType(self):
"""Returns the root if this is a nested type, or itself if its the root."""
desc = self
while desc.containing_type is not None:
desc = desc.containing_type
return desc
def CopyToProto(self, proto):
"""Copies this to the matching proto in descriptor_pb2.
Args:
proto: An empty proto instance from descriptor_pb2.
Raises:
Error: If self couldnt be serialized, due to to few constructor arguments.
"""
if (self.file is not None and
self._serialized_start is not None and
self._serialized_end is not None):
proto.ParseFromString(self.file.serialized_pb[
self._serialized_start:self._serialized_end])
else:
raise Error('Descriptor does not contain serialization.')
class Descriptor(_NestedDescriptorBase):
"""Descriptor for a protocol message type.
A Descriptor instance has the following attributes:
name: (str) Name of this protocol message type.
full_name: (str) Fully-qualified name of this protocol message type,
which will include protocol "package" name and the name of any
enclosing types.
containing_type: (Descriptor) Reference to the descriptor of the
type containing us, or None if this is top-level.
fields: (list of FieldDescriptors) Field descriptors for all
fields in this type.
fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
objects as in |fields|, but indexed by "number" attribute in each
FieldDescriptor.
fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
objects as in |fields|, but indexed by "name" attribute in each
FieldDescriptor.
nested_types: (list of Descriptors) Descriptor references
for all protocol message types nested within this one.
nested_types_by_name: (dict str -> Descriptor) Same Descriptor
objects as in |nested_types|, but indexed by "name" attribute
in each Descriptor.
enum_types: (list of EnumDescriptors) EnumDescriptor references
for all enums contained within this type.
enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
objects as in |enum_types|, but indexed by "name" attribute
in each EnumDescriptor.
enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
from enum value name to EnumValueDescriptor for that value.
extensions: (list of FieldDescriptor) All extensions defined directly
within this message type (NOT within a nested type).
extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
objects as |extensions|, but indexed by "name" attribute of each
FieldDescriptor.
is_extendable: Does this type define any extension ranges?
options: (descriptor_pb2.MessageOptions) Protocol message options or None
to use default message options.
oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields
in this message.
oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|,
but indexed by "name" attribute.
file: (FileDescriptor) Reference to file descriptor.
"""
if _USE_C_DESCRIPTORS:
_C_DESCRIPTOR_CLASS = _message.Descriptor
def __new__(cls, name, full_name, |
quattor/aquilon | lib/aquilon/worker/commands/reconfigure_membersof.py | Python | apache-2.0 | 1,488 | 0 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2016 Contributor
#
# Licensed under the Apache Lice | nse, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the | License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq reconfigure --membersof`."""
from aquilon.aqdb.model import Cluster, MetaCluster
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.reconfigure_list import CommandReconfigureList
class CommandReconfigureMembersof(CommandReconfigureList):
required_parameters = ["membersof"]
def get_hostlist(self, session, membersof, **_):
# TODO: add eager loading options
dbcluster = Cluster.get_unique(session, membersof, compel=True)
if isinstance(dbcluster, MetaCluster):
hostlist = []
for member in dbcluster.members:
hostlist.extend(member.hosts)
return hostlist
else:
return dbcluster.hosts[:]
|
GoogleCloudPlatform/functions-framework-python | src/functions_framework/_function_registry.py | Python | apache-2.0 | 4,301 | 0.000698 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib.util
import os
import sys
import types
from functions_framework.exceptions import (
InvalidConfigurationException,
InvalidTargetTypeException,
MissingTargetException,
)
DEFAULT_SOURCE = os.path.realpath("./main.py")
FUNCTION_SIGNATURE_TYPE = "FUNCTION_SIGNATURE_TYPE"
HTTP_SIGNATURE_TYPE = "http"
CLOUDEVENT_SIGNATURE_TYPE = "cloudevent"
BACKGROUNDEVENT_SIGNATURE_TYPE = "event"
# REGISTRY_MAP stores the registered functions.
# Keys are user function names, values are user function signature types.
REGISTRY_MAP = {}
def get_user_function(source, source_module, target):
"""Returns user function, raises exception for invalid function."""
# Extract the target function from the source file
if not hasattr(source_module, target):
raise MissingTargetException(
"File {source} is expected to contain a function named {target}".format(
source=source, target=target
)
)
function = getattr(source_module, target)
# Check that it is a function
if not isinstance(function, types.FunctionType):
raise InvalidTargetTypeException(
"The function defined in file {source} as {target} | needs to be of "
"type function. Got: invalid type {target_type}".format(
source=source, target=target, target_type=type(function)
)
)
return function
def load_function_module(source):
"""Load user function source file."""
# 1. Extract the module name from the source path
realpath = os.path.realpath(source)
directory, filename = os.path.split(realpath)
name, | extension = os.path.splitext(filename)
# 2. Create a new module
spec = importlib.util.spec_from_file_location(
name, realpath, submodule_search_locations=[directory]
)
source_module = importlib.util.module_from_spec(spec)
# 3. Add the directory of the source to sys.path to allow the function to
# load modules relative to its location
sys.path.append(directory)
# 4. Add the module to sys.modules
sys.modules[name] = source_module
return source_module, spec
def get_function_source(source):
"""Get the configured function source."""
source = source or os.environ.get("FUNCTION_SOURCE", DEFAULT_SOURCE)
# Python 3.5: os.path.exist does not support PosixPath
source = str(source)
return source
def get_function_target(target):
"""Get the configured function target."""
target = target or os.environ.get("FUNCTION_TARGET", "")
# Set the environment variable if it wasn't already
os.environ["FUNCTION_TARGET"] = target
if not target:
raise InvalidConfigurationException(
"Target is not specified (FUNCTION_TARGET environment variable not set)"
)
return target
def get_func_signature_type(func_name: str, signature_type: str) -> str:
"""Get user function's signature type.
Signature type is searched in the following order:
1. Decorator user used to register their function
2. --signature-type flag
3. environment variable FUNCTION_SIGNATURE_TYPE
If none of the above is set, signature type defaults to be "http".
"""
registered_type = REGISTRY_MAP[func_name] if func_name in REGISTRY_MAP else ""
sig_type = (
registered_type
or signature_type
or os.environ.get(FUNCTION_SIGNATURE_TYPE, HTTP_SIGNATURE_TYPE)
)
# Set the environment variable if it wasn't already
os.environ[FUNCTION_SIGNATURE_TYPE] = sig_type
# Update signature type for legacy GCF Python 3.7
if os.environ.get("ENTRY_POINT"):
os.environ["FUNCTION_TRIGGER_TYPE"] = sig_type
return sig_type
|
openNSS/enigma2 | lib/python/Screens/SleepTimerEdit.py | Python | gpl-2.0 | 10,279 | 0.02724 | from Screens.InfoBar import InfoBar
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Components.config import config, getConfigListEntry
from enigma import eEPGCache
from time import time, localtime, mktime
class SleepTimerEdit(ConfigListScreen, Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = ["SleepTimerSetup", "Setup"]
self.setup_title = _("SleepTimer Configuration")
self.setTitle(self.setup_title)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["description"] = Label("")
self.list = []
ConfigListScreen.__init__(self, self.list, session = session)
self.createSetup()
self["setupActions"] = ActionMap(["SetupActions", "ColorActions"],
{
"green": self.ok,
"red": self.cancel,
"cancel": self.cancel,
"ok": self.ok,
}, -2)
def createSetup(self):
self.list = []
if InfoBar.instance and InfoBar.instance.sleepTimer.isActive():
statusSleeptimerText = _("(activated +%d min)") % InfoBar.instance.sleepTimerState()
else:
statusSleeptimerText = _("(not activated)")
self.list.append(getConfigListEntry(_("Sleeptimer") + " " + statusSleeptimerText,
config.usage.sleep_timer,
_("Configure the duration in minutes for the sleeptimer. Select this entry and click OK or green to start/stop the sleeptimer")))
self.list.append(getConfigListEntry(_("Inactivity Sleeptimer"),
config.usage.inactivity_timer,
_("Configure the duration in hours the receiver should go to standby when the receiver is not controlled.")))
if int(config.usage.inactivity_timer.value):
self.list.append(getConfigListEntry(_("Specify timeframe to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime,
_("When enabled you can specify a timeframe when the inactivity sleeptimer is ignored. Not the detection is disabled during this timeframe but the inactivity timeout is disabled")))
if config.usage.inactivity_timer_blocktime.value:
self.list.append(getConfigListEntry(_("Set blocktimes by weekday"),
config.usage.inactivity_timer_blocktime_by_weekdays,
_("Specify if you want to set the blocktimes separately by weekday")))
if config.usage.inactivity_timer_blocktime_by_weekdays.value:
for i in range(7):
self.list.append(getConfigListEntry([_("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), _("Friday"), _("Saturday"), _("Sunday")][i],
config.usage.inactivity_timer_blocktime_day[i]))
if config.usage.inactivity_timer_blocktime_day[i].value:
self.list.append(getConfigListEntry(_("Start time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_begin_day[i],
_("Specify the start time when the inactivity sleeptimer should be ignored")))
self.list.append(getConfigListEntry(_("End time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_end_day[i],
_("Specify the end time until the inactivity sleeptimer should be ignored")))
self.list.append(getConfigListEntry(_("Specify extra timeframe to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_extra_day[i],
_("When enabled you can specify an extra timeframe when the inactivity sleeptimer is ignored. Not the detection is disabled during this timeframe but the inactivity timeout is disabled")))
if config.usage.inactivity_timer_blocktime_extra_day[i].value:
self.list.append(getConfigListEntry(_("Extra start time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_extra_begin_day[i],
_("Specify the extra start time when the inactivity sleeptimer should be ignored")))
self.list.append(getConfigListEntry(_("Extra end time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_extra_end_day[i],
_("Specify the extra end time until the inactivity sleeptimer should be ignored")))
else:
self.list.append(getConfigListEntry(_("Start time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_begin,
_("Specify the start time when the inactivity sleeptimer should be ignored")))
self.list.append(getConfigListEntry(_("End time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_end,
_("Specify the end time until the inactivity sleeptimer should be ignored")))
self.list.append(getConfigListEntry(_("Specify extra timeframe to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_extra,
_("When enabled you can specify an extra timeframe when the inactivity sleeptimer is ignored. Not the detection is disabled during this timeframe but the inactivity timeout is disabled")))
if config.usage.inactivity_timer_blocktime_extra.value:
self.list.append(getConfigListEntry(_("Extra start time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_extra_begin,
_("Specify the extra start time when the inactivity sleeptimer should be ignored")))
self.list.append(getConfigListEntry(_("Extra end time to ignore inactivity sleeptimer"),
config.usage.inactivity_timer_blocktime_extra_end,
_("Specify the extra end time until the inactivity sleeptimer should be ignored")))
self.list.append(getConfigListEntry(_("Shutdown when in Standby"),
config.usage.standby_to_shutdown_timer,
_("Configure the duration when the receiver should go to shut down in case the receiver is in standby mode.")))
if int(config.usage.standby_to_shutdown_timer.value):
self.list.append(getConfigListEntry(_("Specify timeframe to ignore the shutdown in standby"),
config.usage.standby_to_shutdown_timer_blocktime,
_("When enabled you can specify a timeframe to ignore the shutdown timer when the receiver is in standby mode")))
if config.usage.standby_to_shutdown_timer_blocktime.value:
self.list.append(getConfigListEntry(_("Start time to ignore shutdown in standby"),
config.usage.standby_to_shutdown_timer_blocktime_begin,
_("Specify the start time to ignore the shutdown timer when the receiver is in standby mode")))
self.list.append(getConfigListEntry(_("End time to ignore shutdown in standby"),
config.usage.standby_to_shutdown_timer_blocktime_end,
_("Specify the end time to ignore the shutdown timer when the receiver is in standby mode")))
self.list.append(getConfigListEntry(_("Enable wakeup timer"),
config.usage.wakeup_enabled,
_("Note: when enabled, and you do want standby mode after wake up, set option 'Startup to Standby' as 'No, except Wakeup timer'.")))
if config.usage.wakeup_enabled.value != "no":
for i in range(7):
self.list.append(getConfigListEntry([_("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), _("Friday"), _("Saturday"), _("Sunday")][i],
config.usage.wakeup_day[i]))
if config.usage.wakeup_day[i].value:
self.list.append(getConfigListEntry(_("Wakeup time"),
config.usage.wakeup_time[i]))
self["config"].list = self.list
self["config"].l.setList(self.list | )
def ok(self):
if self["config"].isChanged():
for x in self["config"].list:
x[1].save()
if self.getCurrentEntry().startswith(_ | ("Sleeptimer")):
sleepTimer = config.usage.sleep_timer.value
if sleepTimer == "event_standby":
sleepTimer = self.currentEventTime()
else:
sleepTimer = int(sleepTimer)
if sleepTimer or not self.getCurrentEntry().endswith(_("(not activated)")):
InfoBar.instance.setSleepTimer(sleepTimer)
self.close(True)
self.close()
def cancel(self, answer = None):
if answer is None:
if self["config"].isChanged():
self.session.openWithCallback(self.cancel, MessageBox, _("Really close without saving settings?"))
else:
self.close()
elif answer:
for x in self["config"].list:
x[1].cancel()
self.close()
def keyLeft(self):
ConfigListScreen.keyLeft( |
Carryzhou/MyPython- | bubble_sort.py | Python | gpl-3.0 | 511 | 0.037182 | #!/usr/bin/python
#Python version: bubble sort algorithm
import random
def bubble_sort(numbers):
for j in xrange(len(numbers) - 1, -1, | -1):
for i in xrange(j):
if numbers[ i ] > numbers[ i + 1 ]:
numbers[ i ], numbers[ i + 1 ] = numbers[ i + 1], numbers[ i ]
print numbers
def main():
| numbers = []
for i in range(10):
numbers.append(random.randint(1,100))
bubble_sort(numbers)
print numbers
if __name__ == '__main__':
main() |
astropy/astropy | astropy/io/misc/asdf/tags/coordinates/spectralcoord.py | Python | bsd-3-clause | 1,569 | 0.000637 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from asdf.tags.core import NDArrayType
from astropy.coordinates.spectral_coordinate import SpectralCoord
from astropy.io.misc.asdf.types import AstropyType
from astropy.io.misc.asdf.tags.unit.unit import UnitType
__all__ = ['SpectralCoordType']
class SpectralCoordType(AstropyType):
"""
ASDF tag implementation used to serialize/derialize SpectralCoord objects
"""
name = 'coordinates/spectralcoord'
types = [SpectralCoord]
version = '1.0.0'
@classmethod
def to_tree(cls, spec_coord, ctx):
node = {}
if isinstance(spec_coord, SpectralCoord):
node['value'] = spec_coord.value
node['unit'] = spec_coord.unit
if spec_coord.observer is not None:
node['observer'] = spec_coord.observer
if spec_coord.target is not None:
node['target'] = spec_coord.target
return node
raise TypeError(f"'{spec_coord}' is not a valid SpectralCoord")
@classmethod
def from | _tree(cls, node, ctx):
if isinstance(node, SpectralCoord):
return node
| unit = UnitType.from_tree(node['unit'], ctx)
value = node['value']
observer = node['observer'] if 'observer' in node else None
target = node['target'] if 'observer' in node else None
if isinstance(value, NDArrayType):
value = value._make_array()
return SpectralCoord(value, unit=unit, observer=observer, target=target)
|
heuermh/cloudbiolinux | contrib/flavor/pjotrp/biotest/biotestflavor.py | Python | mit | 1,800 | 0.007778 | from fabric.api import *
from fabric.contrib.files import *
from cloudbio.flavor import Flavor
from cloudbio.custom.shared import (_fetch_and_unpack)
class BioTestFlavor(Flavor):
"""A Flavor for cross Bio* tests
"""
def __init__(self, env):
Flavor.__init__(self,env)
self.name = "Bio* cross-lang flavor"
def rewrite_config_items(self, name, items):
if name == "packages":
# list.remove('screen')
# list.append('test')
return items
elif name == "python":
return [ 'biopython' ]
elif name == "perl":
return [ 'bioperl' ]
elif name == "ruby":
return [ 'bio' ]
elif name == "custom":
return []
else:
return item | s
def post_install(self):
env.logger.info("Starting post-install")
env.logger.info("Load Scalability tests")
if exists('Scalability'):
with cd('Scalability'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com | /pjotrp/Scalability.git")
# Now run a post installation routine (for the heck of it)
run('./Scalability/scripts/hello.sh')
env.logger.info("Load Cross-language tests")
if exists('Cross-language-interfacing'):
with cd('Cross-language-interfacing'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Cross-language-interfacing.git")
# Special installs for the tests
with cd('Cross-language-interfacing'):
sudo('./scripts/install-packages-root.sh ')
run('./scripts/install-packages.sh')
run('./scripts/create_test_files.rb')
env.flavor = BioTestFlavor(env)
|
googleapis/python-aiplatform | samples/generated_samples/aiplatform_v1beta1_generated_job_service_list_custom_jobs_sync.py | Python | apache-2.0 | 1,526 | 0.000655 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListCustomJobs
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_v1beta1_generated_JobService_ListCustomJobs_sync]
from google | .cloud import aiplatform_v1beta1
def sample_list_custom_jobs():
# Create a client
client = aiplatform_v1beta1.JobServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.ListCustomJobsRequest(
parent="parent_value",
)
# Make the | request
page_result = client.list_custom_jobs(request=request)
# Handle the response
for response in page_result:
print(response)
# [END aiplatform_v1beta1_generated_JobService_ListCustomJobs_sync]
|
DFEC-R2D2/r2d2 | pygecko/states/remote.py | Python | mit | 11,721 | 0.013224 | import time
import random
from random import randint
# from library import Trigger, Axis
# from library import PS4
from library import Joystick
import RPi.GPIO as GPIO # remove!!!
from emotions import angry, happy, confused
# from pysabertooth import Sabertooth
# from smc import SMC
from library import LEDDisplay
from library import factory
from library import reset_all_hw
# Leg Motor Speed Global
global_LegMotor = 70
# # Happy Emotion
# def happy(leds, servos, mc, audio):
# print("4")
# print("Happy")
#
# # Dome Motor Initialization
# # mc = SMC(dome_motor_port, 115200)
# # mc.init()
#
# # Spins Motor
# # mc.init()
# mc.speed(3200)
#
# # LED Matrix Green
# # breadboard has mono
# # R2 has bi-color leds
# # mono:0 bi:1
# # led_type = 0
# # leds = [0]*5
# # leds[1] = LEDDisplay(0x70, led_type)
# # leds[2] = LEDDisplay(0x71, led_type)
# # leds[3] = LEDDisplay(0x72, led_type)
# # leds[4] = LEDDisplay(0x73, led_type)
#
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [0, 1, 2, 3, 4, 5, 6, 7]:
# for i in range(1, 5):
# leds[i].set(x, y, 1)
#
# for i in range(1, 5):
# leds[i].write()
#
# # Servo Wave
# # s0.angle = 0
# # time.sleep(0.2)
# # s1.angle = 0
# # time.sleep(0.2)
# # s2.angle = 0
# # time.sleep(0.2)
# # s3.angle = 0
# # time.sleep(0.2)
# # s4.angle = 0
# # time.sleep(0.5)
# # s4.angle = 130
# # time.sleep(0.2)
# # s3.angle = 130
# # time.sleep(0.2)
# # s2.angle = 130
# # time.sleep(0.2)
# # s1.angle = 130
# # time.sleep(0.2)
# # s0.angle = 130
#
# for a in [0, 130]:
# for i in range(4):
# servos[i].angle = a
# time.sleep(0.2)
# time.sleep(0.5)
#
# time.sleep(1.5)
# mc.stop()
# time.sleep(1.5)
# for i in range(1, 5):
# leds[i].clear()
#
#
# # Confused Emotion
# def confused(leds, servos, mc, audio):
# print("5")
# print("Confused")
# # LED Matrix Yellow
# # leds = [0]*5
# # leds[1] = LEDDisplay(0x70, 1)
# # leds[2] = LEDDisplay(0x71, 1)
# # leds[3] = LEDDisplay(0x72, 1)
# # leds[4] = LEDDisplay(0x73, 1)
#
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [0, 1, 2, 3, 4, 5, 6, 7]:
# for i in range(1, 5):
# leds[i].set(x, y, 3)
# for i in range(1, 5):
# leds[i].write()
# time.sleep(3)
# for i in range(1, 5):
# leds[i].clear()
#
#
# # Angry Emotion
# def angry(leds, servos, mc, audio):
# print("6")
# print("Angry")
# # LED Matrix Red
# # leds = [0]*5
# # leds[1] = LEDDisplay(0x70, 1)
# # leds[2] = LEDDisplay(0x71, 1)
# # leds[3] = LEDDisplay(0x72, 1)
# # leds[4] = LEDDisplay(0x73, 1)
#
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [0, 1, 2, 3, 4, 5, 6, 7]:
# for i in range(1, 5):
# leds[i].set(x, y, 2)
#
# for i in range(1, 5):
# leds[i].write()
#
# # Plays Imperial Theme Sound
# audio.sound('imperial')
#
# # Servo Open and Close
# # s0.angle = 0
# # s1.angle = 0
# # s2.angle = 0
# # s3.angle = 0
# # s4.angle = 0
# # time.sleep(1)
# # s4.angle = 130
# # s3.angle = 130
# # s2.angle = 130
# # s1.angle = 130
# # s0.angle = 130
#
# for a in [0, 130]:
# for i in range(5):
# servos[i].angle = a
# time.sleep(1)
#
# time.sleep(3)
# for i in range(1, 5):
# leds[i].clear()
#######################################
# original remote
#######################################
# # Remote Mode
# def remote(remoteflag, namespace):
# print("Remote")
#
# # create objects
# (leds, dome, legs, servos, Flash) = factory(['leds', 'dome', 'legs', 'servos', 'flashlight'])
#
# # initalize everything
# dome.init()
# dome.speed(0)
#
# legs.drive(1, 0)
# legs.drive(2, 0)
#
# for s in servos:
# s.angle = 0
# time.sleep(0.25)
#
# # what is this???
# GPIO.setmode(GPIO.BCM)
# GPIO.setwarnings(False)
# GPIO.setup(26, GPIO.OUT)
#
# # Joystick Initialization
# js = Joystick()
#
# # get audio
# audio = namespace.audio
#
# # Flash = FlashlightPWM(15)
# # Flash = namespace.flashlight
#
# while(remoteflag.is_set()):
# try:
# # Button Initialization
# ps4 = js.get()
# btnSquare = ps4.buttons[0]
# btnTriangle = ps4.buttons[1]
# btnCircle = ps4.buttons[2]
# btnX = ps4.buttons[3]
# btnLeftStickLeftRight = ps4.leftStick.y
# btnLeftStickUpDown = ps4.leftStick.x
# btnRightStickLeftRight = ps4.rightStick.y
# btnRightStickUpDown = ps4.rightStick.x
# Left1 = ps4.shoulder[0]
# Right1 = ps4.shoulder[1]
# Left2 = ps4.triggers.x
# Right2 = ps4.triggers.y
# hat = ps4.hat
#
# # print("PRINT")
#
# # Button Controls
# if hat == 1:
# # Happy Emotion
# print("Arrow Up Pressed")
# happy(leds, servos, dome, audio) # namespace.emotions['happy'](leds, servos, mc, audio)
# if hat == 8:
# # Confused Emotion
# print("Arrow Left Pressed")
# confused(leds, servos, dome, audio)
# if hat == 2:
# # Angry Emotion
# print("Arrow Right Pressed")
# angry(leds, servos, dome, audio)
# if hat == 4:
# print("Arrow Down Pressed")
# if btnSquare == 1:
# # word = random_char(2)
# audio.speak_random(2)
# time.sleep(0.5)
# if btnTriangle == 1:
# # FlashLight ON
# GPIO.output(26, GPIO.HIGH)
# | Flash.pwm.set_pwm(15, 0, 130)
# if btnCircle == 1:
# # FlashLight OFF
# GPIO.output(26, GPIO.LOW)
# Flash.pwm.set_pwm(15, 0, 0)
# if btnX == 1:
# for x in [0, 1, 2, 3, 4, 5, 6, 7]:
# for y in [ | 0, 1, 2, 3, 4, 5, 6, 7]:
# if x == randint(0, 8) or y == randint(0, 8):
# for i in range(1, 5):
# leds[i].set(x, y, randint(0, 4))
# else:
# for i in range(1, 5):
# leds[i].set(x, y, 4)
# for i in range(1, 5):
# leds[i].write()
# time.sleep(0.1)
# for i in range(1, 5):
# leds[i].clear()
# if Left1 == 1:
# # Dome Motor Forward
# dome.speed(3200)
# time.sleep(2)
# dome.speed(0)
# if Right1 == 1:
# # Dome Motor Backward
# dome.speed(-3200)
# time.sleep(2)
# dome.speed(0)
# # if Left1 == 0 or Right1 == 0:
# # # Dome Motor Stop
# # dome.speed(0)
# # if Left2 > 1:
# # # Servo Open
# # s0.angle = 0
# # s1.angle = 0
# # s2.angle = 0
# # s3.angle = 0
# # s4.angle = 0
# # Flash.pwm.set_pwm(15, 0, 3000)
# #
# # if Right2 > 1:
# # # Servo Close
# # s0.angle = 130
# # s1.angle = 130
# # s2.angle = 130
# # s3.angle = 130
# # s4.angle = 130
# # Flash.pwm.set_pwm(15, 0, 130)
# if Left2 > 1:
# for s in servos:
# s.angle = 0
# time.sleep(0.25)
# Flash.pwm.set_pwm(15, 0, 300)
# if Right2 > 1:
# for s in servos:
# s.angle = 130
# time.sleep(0.25)
# Flash.pwm.set_pwm(15, 0, 130)
# if btnLeftStickLeftRight < 0.3 and btnLeftStickLeftRight > -0.3:
# legs.drive(1, 0)
# if btnRightStickUpDown < 0.3 and btnRightStickUpDown > -0.3:
# legs.drive(2, 0)
# if btnRightStickUpDown >= 0.3:
# # Right and Left Motor Forward
# legs.drive(1, btnRightStickUpDown*global_LegMotor)
# legs.drive(2, btnRightStickUpDown*-global_LegMotor)
# if btnRightStickUpDown <= -0.3:
# # Right and Left Motor Backward
# legs.drive(1, btnRightStickUpDown*global_LegMotor)
# legs.drive(2, btnRightStickUpDown*-global_LegMotor)
# if btnLeftStickLeftRight <= 0.3:
# # Turn Left
# legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
# legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
# if btnLeftStickLeftRight >= -0.3:
# # Turn Right
# legs.drive(1, btnLeftStickLeftRight*(-global_LegMotor))
# legs.drive(2, btnLeftStickLeftRight*-global_LegMotor)
#
# except KeyboardInterrupt:
# print('js exiting ...')
# return
# return
def remote_func(hw, ns):
print("Remote")
dome = hw['dome']
dome.speed(0)
legs = hw['legs']
legs.drive(1, 0)
legs.drive(2, 0)
flashlight = hw['flashlight']
audio = hw['audio']
audio.speak('start')
while ns.current_state == 3:
print('remote ...')
spd = random.randint(0, 40)
legs.drive(1, spd)
legs.drive(2, spd)
dome.speed(spd)
time.sleep(0.5)
legs.drive(1, 0)
legs.drive(2, 0)
dome.speed(0)
time.sleep(0.1)
return
###### real loop here #####
# Joystick Initialization
js = Joystick()
while ns. |
pataquets/namecoin-core | test/functional/feature_maxuploadtarget.py | Python | mit | 6,579 | 0.001368 | #!/usr/bin/env python3
# Copyright (c) 2015-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of -maxuploadtarget.
* Verify that getdata requests for old blocks (>1week) are dropped
if uploadtarget has been reached.
* Verify that getdata requests for recent blocks are respected even
if uploadtarget has been reached.
* Verify that the upload counters are reset after 24 hours.
"""
from collections import defaultdict
import time
from test_framework.messages import CInv, MSG_BLOCK, msg_getdata
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, mine_large_block
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.block_receive_map = defaultdict(int)
def on_inv(self, message):
pass
def on_block(self, message):
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
class MaxUploadTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[
"-maxuploadtarget=800M",
"-acceptnonstdtxn=1",
]]
self.supports_cli = False
# Cache for utxos, as the listunspent may take a long time later in the test
self.utxo_cache = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Before we connect anything, we first set the time on the node
# to be in the past, otherwise things break because the CNode
# time counters can't be reset backward after initialization
old_time = int(time.time() - 2*60*60*24*7)
self.nodes[0].setmocktime(old_time)
# Generate some old blocks
self.generate(self.nodes[0], 130)
# p2p_conns[0] will only request old blocks
# p2p_conns[1] will only request new blocks
# p2p_conns[2] will test resetting the counters
p2p_conns = []
for _ in range(3):
p2p_conns.append(self.nodes[0].add_p2p_connection(TestP2PConn()))
# Now mine a big block
mine_large_block(self, self.nodes[0], self.utxo_cache)
# Store the hash; we'll request this later
big_old_block = self.nodes[0].getbestblockhash()
old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
big_old_block = int(big_old_block, 16)
# Advance to two days ago
self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
# Mine one more block, so that the prior block looks old
mine_large_block(self, self.nodes[0], self.utxo_cache)
# We'll be requesting this new block too
big_new_block = self.nodes[0].getbestblockhash()
big_new_block = int(big_new_block, 16)
# p2p_conns[0] will test what happens if we just keep requesting the
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(MSG_BLOCK, big_old_block))
max_bytes_per_day = 800*1024*1024
daily_buffer = 144 * 4000000
max_bytes_available = max_bytes_per_day - daily_buffer
success_count = max_bytes_available // old_block_size
# 576MB will be reserved for relaying new blocks, so expect this to
# succeed for ~235 tries.
for i in range(success_count):
p2p_conns[0].send_and_ping(getdata_request)
assert_equal(p2p_conns[0].block_receive_map[big_old_block], i+1)
assert_equal(len(self.nodes[0].getpeerinfo()), 3)
# At most a couple more tries should succeed (depending on how long
# the test has been running so far).
for _ in range(3):
p2p_conns[0].send_message(getdata_request)
p2p_conns[0].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
self.log.info("Peer 0 disconnected after downloading old block too many times")
# Requesting the current block on p2p_conns[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 800 times
getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
for i in range(800):
p2p_conns[1].send_and_ping(getdata_request)
assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1)
self.log.info("Peer 1 able to repeatedly download new block")
# But if p2p_conns[1] tries for an old block, it gets disconnected too.
getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
p2p_conns[1].send_message(getdata_request)
p2p_conns[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
self.log.info("Peer 1 disconnected after trying to download old block")
self.log.info("Advancing system time on node to clear counters...")
# If we advance the time by 24 hours, then the counters should reset,
# and p2p_conns[2] should be able to retrieve the old block.
self.nodes[0].setmocktime(int(time.time()))
p2p_conns[2].sync_with_ping()
p2p_conns[2].send_and_ping(getdata_request)
assert_equal(p2p_conns[2].block_receive_map[big_old_block], 1)
self.log.info("Peer 2 able to download old block")
self.nodes[0].disconnect_p2ps()
self.log.info("Restarting node 0 with download permission and 1MB maxuploadtarget")
self.restart_node(0, ["-whitelist=download@127.0.0.1", "-maxuploadtarget=1"])
# Reconnect to self.nodes[0]
peer = self.nodes[0].add_p2p_connection(TestP2PConn())
#retrieve 20 blocks which should be enough to break the 1MB limit
getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
for i in range(20):
peer.send_and_ping(getdata_request)
assert_equal(peer.block_receive_map[big_new_block], i+1)
getdata_request.inv = [CInv(MSG_BLOCK, bi | g_old_block)]
peer.send_and_ping(getdata_request)
self.log.info("Peer still connected after trying to download old block (download permission)")
peer_info = self.nodes[0].getpeerinf | o()
assert_equal(len(peer_info), 1) # node is still connected
assert_equal(peer_info[0]['permissions'], ['download'])
if __name__ == '__main__':
MaxUploadTest().main()
|
saltstack/salt | salt/modules/ifttt.py | Python | apache-2.0 | 2,334 | 0.002142 | """
Support for IFTTT
.. versionadded:: 2015.8.0
Requires an ``api_key`` in ``/etc/salt/minion``:
.. code-block:: yaml
ifttt:
secret_key: '280d4699-a817-4719-ba6f-ca56e573e44f'
"""
import logging
import time
import salt.utils.http
import salt.utils.json
log = logging.getLogger(__name__)
def __virtual__():
"""
Only load the module if apache is installed
"""
if not __salt__["config.get"]("ifttt.secret_key") and not __salt__["config.get"](
"ifttt:secret_key"
):
return (False, "IFTTT Secret Key Unavailable, not loading.")
return True
def _query(event=None, method="GET", args=None, header_dict=None, data=None):
"""
Make a web call to IFTTT.
"""
secret_key = __salt__["config.get"]("ifttt.secret_key") or __salt__["config.get"](
"ifttt:secret_key"
)
path = "https://maker.ifttt.com/trigger/{}/with/key/{}".format(event, secret_key)
if header_dict is None:
header_dict = {"Content-type": "application/json"}
if method != "POST":
header_dict["Accept"] = "application/json"
result = salt.utils.http.query(
path,
method,
params={},
data=data,
header_dict=header_dict,
decode=True,
decode_type="auto",
text=True,
status=True,
cookies=True,
persist_session=True,
opts=__opts__,
backend="requests",
)
return result
def trigger_event(event=None, **kwargs):
"""
Trigger a configured event in IFTTT.
| :param event: The name of the event to trigger.
:return: A dictionary with status, text, and error if result was failure.
"""
| res = {"result": False, "message": "Something went wrong"}
data = {}
for value in ("value1", "value2", "value3", "Value1", "Value2", "Value3"):
if value in kwargs:
data[value.lower()] = kwargs[value]
data["occurredat"] = time.strftime("%B %d, %Y %I:%M%p", time.localtime())
result = _query(event=event, method="POST", data=salt.utils.json.dumps(data))
if "status" in result:
if result["status"] == 200:
res["result"] = True
res["message"] = result["text"]
else:
if "error" in result:
res["message"] = result["error"]
return res
|
rueberger/MJHMC | mjhmc/samplers/markov_jump_hmc.py | Python | gpl-2.0 | 14,817 | 0.002025 | """
This file contains the core MJHMC algorithm, as well the algorithms for several HMC variants
including standard HMC
As there is a significant amount of logic common to all algorithms, all of the different variants
are implemented as classes that inherit from a common base class.
"""
import numpy as np
from mjhmc.misc.utils import overrides, min_idx, draw_from
from mjhmc.misc.distributions import Distribution
from .hmc_state import HMCState
#pylint: disable=too-many-instance-attributes
#pylint: disable=too-many-arguments
class HMCBase(object):
"""
The base class for all HMC samplers in this file.
Not a useful sampler in of itself but provides a useful structure
and serves as a control
"""
def __init__(self, Xinit=None, E=None, dEdX=None,
epsilon=1e-4, alpha=0.2, beta=None,
num_leapfrog_steps=5, distribution=None):
""" Construct and return a new HMCBase instance
:param Xinit: Initial configuration for position variables. Of shape (n_dims, n_batch)
:param E: function: R^{n_dims x n_batch} -> R^{n_batch}.
Specifies the energy of the current configuration
:param dEdX: function: R^{n_dims x n_batch} -> R^{n_batch}
Specifies the energy gradient of the current configuration
:param distribution: Optional. An instance of mjhmc.misc.distributions.Distribution
Specifies E and dEdX in place of explicit keyword arguments
:param epsilon: step length for leapfrog integrator
:param alpha: specifies momentum corruption rate in terms of fraction
of momentum corrupted per sample step
:param beta: specifies momentum corruption rate
:param num_leapfrog_steps: number of leapfrog integration steps per application
of L operator
:returns: a new instance
:rtype: HMCBase
"""
# do not execute this block if I am an instance of MarkovJumpHMC
if not isinstance(self, MarkovJumpHMC):
if isinstance(distribution, Distribution):
distribution.mjhmc = False
distribution.reset()
self.ndims = distribution.Xinit.shape[0]
self.nbatch = distribution.Xinit.shape[1]
self.energy_func = distribution.E
self.grad_func = distribution.dEdX
self.state = HMCState(distribution.Xinit.copy(), self)
self.distribution = distribution
else:
assert Xinit is not None
assert E is not None
assert dEdX is not None
self.ndims = Xinit.shape[0]
self.nbatch = Xinit.shape[1]
self.energy_func = E
self.grad_func = dEdX
self.state = HMCState(Xinit.copy(), self)
self.num_leapfrog_steps = num_leapfrog_steps
self.epsilon = epsilon
self.beta = beta or alpha**(1./(self.epsilon*self.num_leapfrog_steps))
self.original_epsilon = epsilon
self.original_l = self.num_leapfrog_steps
self.n_burn_in = 500
# these settings for the base class only
self.p_flip = 0.5
self.p_r = 1
# total operator counts. counted per particle
self.l_count = 0
self.f_count = 0
# this one is necessary since we're not always flipping the momentum
self.fl_count = 0
self.r_count = 0
# only approximate!! lower bound
self.grad_per_sample_step = self.num_leapfrog_steps
# to deprecate
def E(self, X):
"""compute energy function at X"""
E = self.energy_func(X).reshape((1,-1))
return E
# to deprecate
def dEdX(self, X):
"""compute energy function gradient at X"""
dEdX = self.grad_func(X)
return dEdX
def leap_prob(self, Z1, Z2):
"""
Metropolis-Hastings Probability of transitioning from state Z1 to
state Z2.
"""
Ediff = Z1.H() - Z2.H()
p_acc = np.ones((1, Ediff.shape[1]))
p_acc[Ediff < 0] = np.exp(Ediff[Ediff < 0])
return p_acc
def sampling_iteration(self):
"""Perform a single sampling step
"""
# FL operator
proposed_state = self.state.copy().L().F()
# Metropolis-Hasting acceptance probabilities
p_acc = self.leap_prob(self.state, proposed_state)
# accepted states
fl_idx = np.arange(self.nbatch).reshape(1, self.nbatch)[np.random.rand(self.nbatch) < p_acc]
#update accepted FL transitions
self.state.update(fl_idx, proposed_state)
# flip momentum with prob p_flip (.5 for control)
# crank p_flip up to 1 to recover standard HMC
p_half = self.p_flip * np.ones((1, self.nbatch))
flip_idx = np.arange(self.nbatch).reshape(1, self.nbatch)[np.random.rand(self.nbatch) < p_half]
curr_state = self.state.copy().F()
self.state.update(flip_idx, curr_state)
# do it particle | wise
if np.random.random() < self.p_r:
# corrupt the momentum
self.r_count += self.nbatch
self.state.R()
FL_idx | = set(fl_idx)
F_idx = set(flip_idx)
self.l_count += len(FL_idx & F_idx)
self.f_count += len(F_idx - FL_idx)
self.fl_count += len(FL_idx - F_idx)
def sample(self, n_samples=1000, preserve_order=False):
"""
Draws nsamples, returns them all
Args:
n_samples: number of samples to draw - int
preserve_order: if True, time is given it's own axis.
otherwise, it is rolled into the batch axis
Returns:
if preserve_order:
samples - [n_dim, n_batch, n_samples]
else:
samples - [n_dim, n_batch * n_samples]
"""
# to do: unroll samples
samples = []
for _ in xrange(n_samples):
self.sampling_iteration()
samples.append(self.state.copy().X)
if preserve_order:
return np.stack(samples, axis=-1)
else:
return np.concatenate(samples, axis=1)
def burn_in(self):
"""Runs the sample for a number of burn in sampling iterations
"""
for _ in xrange(self.n_burn_in):
self.sampling_iteration()
class HMC(HMCBase):
"""Implements standard HMC
"""
def __init__(self, *args, **kwargs):
super(HMC, self).__init__(*args, **kwargs)
self.p_flip = 1
class ControlHMC(HMCBase):
"""Standard HMC but randomize all of the momentum some of the time
"""
def __init__(self, *args, **kwargs):
super(ControlHMC, self).__init__(*args, **kwargs)
self.p_flip = 1
self.p_r = - np.log(1 - self.beta) * 0.5
# tells hmc state to randomize all of the momentum when R is called
self.beta = 1
class ContinuousTimeHMC(HMCBase):
"""Base class for all markov jump HMC samplers
"""
def __init__(self, *args, **kwargs):
""" Initalizer method for continuous-time samplers
:param resample: boolean flag whether to resample or not. ALWAYS set to true unless you
have a specific reason not to. Produced samples will be biased if resample is false
:returns: the constructed instance
:rtype: ContinuousTimeHMC
"""
self.resample = kwargs.pop('resample', True)
distribution = kwargs.get('distribution')
super(ContinuousTimeHMC, self).__init__(*args, **kwargs)
# transformation from discrete beta to insure matching autocorrelation
# maybe assert that beta is less than 1 if necessary
# corrupt all of the momentum with some fixed probability
self.p_r = - np.log(1 - self.beta) * 0.5
# tells hmc state to randomize all of the momentum when R is called
self.beta = 1
if isinstance(distribution, Distribution):
distribution.mjhmc = True
if not distribution.generation_instance:
distribution.reset()
self.ndims = distribution.Xinit.shape[0]
|
DigitalPublishingToolkit/epubtrailer.py | epubtrailer.py | Python | lgpl-3.0 | 8,278 | 0.022348 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
(C) 2014 Contributors to the Digital Publishing Toolkit
License: GPL3
This code has been developed as part of the [Digital Publishing Toolkit](http://digitalpublishingtoolkit.org).
with the support of Institute for [Network Cultures](http://networkcultures.org)
and [Creating 010](http://creating010.com).
"""
import sys, zipfile, os, shutil, glob, textwrap, struct
from os.path import join
from xml.etree import ElementTree as ET
from PIL import Image, ImageFont, ImageDraw
from images2gif import writeGif
import argparse
def decode_hexcolor (p):
"#FFFFFF => (255, 255, 255)"
if p.startswith("#"):
p = p.lstrip("#")
return struct.unpack('BBB',p.decode('hex'))
###############################
# Parse the arguments
###############################
parser = argparse.ArgumentParser(description='Make a gif-format book trailer from an epub')
parser.add_argument('epub', help='epub file')
parser.add_argument('-o', '--output', help='output file')
parser.add_argument('--width', type=int, default=720, help='Width (default: 720)')
parser.add_argument('--height', type=int, default=576, help='Width (default: 576)')
parser.add_argument('--duration', type=float, default=0.25, help='Base slide duration (Default: 0.25 secs)')
parser.add_argument('--backgroundcolor', default="#FFFFFF", help='Background color (default: #FFFFFF aka white)')
parser.add_argument('--textcolor', default="#000000", help='Text color (default: #000000 aka black)')
parser.add_argument('--font', help='Font to use (default: search epub for font, use first found)')
parser.add_argument('--fontratio', type=float, help='Used to compute font sizes (default is width/80)')
parser.add_argument('--padding', type=float, help='Used to position text (default is width/7)')
parser.add_argument('--wrapchars', default=20, type=int, help='Charwidth for text wrapping (default: 20)')
parser.add_argument('--valign', default="top", help='Vertical alignment for text, can be top (default), center, or bottom')
quality=100
args = parser.parse_args()
duration = args.duration # 0.25
W = args.width # 720
H = args.height # 576
titleDuration = 1
epubFont = 1
if args.fontratio:
fontratio = args.fontratio
else:
fontratio = int(W/80.0) # 6
bgColor = decode_hexcolor(args.backgroundcolor)
fontColor = decode_hexcolor(args.textcolor)
if args.padding:
padding = args.padding
else:
padding = int(W/7.0)
wrapchars = args.wrapchars
filename = args.epub
outfilename = args.output
###############################
# Make the Trailer
###############################
# copy file
copy = 'new-' + filename
shutil.copy2(filename, 'new-' + filename)
# rename file with zip extension
if filename.endswith('.epub'):
os.rename(copy, copy[:-4] + 'zip')
zipname = copy[:-4] + 'zip'
print "converted extension for " + str(zipname)
else:
print "File is not an Epub"
zipname = filename
# unzip ePub
fh = open(str(zipname), 'rb')
z = zipfile.ZipFile(fh)
for name in z.namelist():
outpath = "temp"
z.extract(name, outpath)
fh.close()
# remove copy
os.remove(zipname)
# find content.opf
lookfor = "content.opf"
for root, dirs, files in os.walk('temp'):
print "searching", root
if lookfor in files:
opfpath = join(root, lookfor)
print "found: %s" % join(root, lookfor)
break
def innerhtml (tag):
return (tag.text or '') + ''.join(ET.tostring(e) for e in tag)
t = ET.parse(opfpath)
#Get Title
titletag = t.findall('.//{http://purl.org/dc/elements/1.1/}title')
title = titletag[0].text
# Get Authors
cap = t.findall('.//{http://purl.org/dc/elements/1.1/}creator')
authors = []
for tag in cap:
inner = innerhtml(tag)
authors.append(inner)
# Get Publisher
pubtag = t.findall('.//{http://purl.org/dc/elements/1.1/}publisher')
publisher = None
if pubtag:
publisher = pubtag[0].text
# Get Date
datetag = t.findall('.//{http://purl.org/dc/elements/1.1/}date')
date = None
if datetag:
date = datetag[0].text
# Show Metadata
if title:
print "Title:", title
if authors:
print "Authors:"
for n in authors:
print " ",n
if publisher:
print "Publisher:", publisher
if date:
print "Date:", date
# create new directory
if not os.path.exists('new-pictures'):
os.makedirs('new-pictures')
# Search for fonts
fonts = []
if args.font:
fonts.append(args.font)
for root, subdirs, files in os.walk('temp'):
for file in files:
if os.path.splitext(file)[1].lower() in ('.otf', '.ttf'):
fonts.append(os.path.join(root, file))
if len(fonts) == 0:
# try for fonts in the working directory
for root, subdirs, files in os.walk('.'):
for file in files:
if os.path.splitext(file)[1].lower() in ('.otf', '.ttf'):
fonts.append(os.path.join(root, file))
break
if len(fonts) > 0:
break
def _bytes(x, encoding="latin-1"):
""" imagemagick seems to want latin-1 bytes """
if type(x) == unicode:
return x.encode(encoding, errors='ignore')
return x
def screen(text, seq, fontsize, frames, valign=args.valign):
for x in range(0, frames):
seqall = seq + '-' + str(x)
image = Image.new("RGBA", (W,H), bgColor)
if fonts:
if epubFont:
usr_font = ImageFont.truetype(fonts[0], fontsize)
else:
usr_font = ImageFont.load_default() # ImageFont.truetype("resources/NotCourierSans.otf", fontsize)
d_usr = ImageDraw.Draw(image)
# align center
lines = textwrap.wrap(text, width = wrapchars)
# prefly to calculate total height
text_height = 0
for line in lines:
w, h = d_usr.textsize(_bytes(text), usr_font)
linebytes = line
width, height = usr_font.getsize(_bytes(line))
text_height += height
if valign=="center":
y_text = (H/2)-(text_height/2) # padding
elif valign=="bottom":
y_text = H-padding-text_height
else:
# TOP
y_text = padding
for line in lines:
w, h = d_usr.textsize(_bytes(text), usr_font)
linebytes = line
width, height = usr_font.getsize | (_bytes(line))
# d_usr = d_usr.text(((W-w)/2,(H-h)/2), line, fontColor, font=usr_font)
d_usr.text((padding, y_text), _bytes(line), fontColor, font=usr_font)
y_text += height
filename = 'new-pictures/' + seqall + '.jpeg'
image.save(f | ilename, 'jpeg', quality=quality)
# Create Screen for Title
screen(title, '00', int(fontratio * 6), titleDuration*8)
# Create Screen for 'by'
screen('A book by', '01', int(fontratio * 2.8), titleDuration*4)
# Create Screens for Authors
i = 0
for name in authors:
screen(name, '02-' + str(i), int(fontratio * 4), titleDuration*4)
i = i + 1
if pubtag:
# Create Screens for "published by"
screen('Published by', '03', int(fontratio * 2.8), titleDuration*4)
# Create Screen for Publisher
screen(publisher, '04', int(fontratio * 4), titleDuration*4)
# Search for pictures
epubImages = []
for root, subdirs, files in os.walk('temp'):
for file in files:
if os.path.splitext(file)[1].lower() in ('.jpg', '.jpeg', '.png', '.gif', '.bmp'):
epubImages.append(os.path.join(root, file))
# Convert and resize all Picz to Jpeg
i = 0
for picture in epubImages:
background = Image.new("RGBA", (W,H), bgColor)
image = Image.open(picture)
image.thumbnail((W, H), Image.CUBIC)
(w, h) = image.size
background.paste(image, ((W-w)/2,(H-h)/2))
pictitle = 'new-pictures/05-respic' + str(i) + '.jpeg'
background.save(pictitle, quality=quality)
i = i + 1
# add some black frames
for x in range(0, titleDuration):
seq = '06-' + str(x)
image = Image.new("RGBA", (W,H), bgColor)
filename = 'new-pictures/' + seq + '-black.jpeg'
image.save(filename, 'jpeg', quality=quality)
# create screen for date
if date:
screen('In your Public Library since ' + date, '07', int(fontratio * 3), titleDuration*4)
# add some black frames
for x in range(0, titleDuration):
seq = '08-' + str(x)
image = Image.new("RGBA", (W,H), bgColor)
filename = 'new-pictures/' + seq + '-black.jpeg'
image.save(filename, 'jpeg', quality=quality)
# Make gif!!
images = [Image.open(image) for image in sorted(glob.glob("new-pictures/*.jpeg"))]
if outfilename == None:
outfilename = 'trailer-' + title.replace("/", "").replace(" ", "_").strip() + '.gif'
writeGif(outfilename, ima |
jmohr/conrad | conrad/test/resources/rest_test_server.py | Python | bsd-3-clause | 1,320 | 0.007576 | #!/usr/bin/env python
import flask
artist_map = {
1: {
'id': 1,
'name': 'James Brown',
},
2: {
'id': 2,
'name': 'Richard D. James',
},
3: {
'id': 3,
'name': 'Fugazi',
},
4: {
'id': 4,
'name': 'Dinosaur Jr.',
},
}
app = flask.Flask('conrad_test')
@app.route('/api/artist/<int:id>')
def get_artist(id):
if not artist_map.has_key(id):
flask.abort(404)
return flask.jsonify(artist_map[id])
@app.route('/api/artist', methods=['POST'])
def create_artist():
id = max(artist_map.keys()) + 1
arti | st_map[id] = {
'id': id,
'name': flask.request.form['name'],
}
return flask.jsonify(artist_map[id])
@app.route('/api/artist/<int:id>', methods=['PUT'])
def update_artist(id):
if not artist_map. | has_key(id):
flask.abort(404)
artist_map[id] = {
'id': id,
'name': flask.request.form['name'],
}
return flask.jsonify(artist_map[id])
@app.route('/api/artist/<int:id>', methods=['DELETE'])
def delete_artist(id):
if not artist_map.has_key(id):
flask.abort(404)
del artist_map[id]
return flask.jsonify({})
@app.route('/api/artists')
def list_artists():
return flask.jsonify(artist_map)
if __name__ == '__main__': app.run()
|
dimagi/commcare-hq | corehq/apps/registration/migrations/0002_alter_request_ip.py | Python | bsd-3-clause | 477 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-02-13 22:21
from __future__ import unicode_literals
from d | jango.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('registration', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sqlregistrationreque | st',
name='request_ip',
field=models.CharField(max_length=31, null=True),
),
]
|
davidnotplay/mpiece | mpiece/renderer.py | Python | bsd-3-clause | 5,413 | 0.027526 | """
mpiece.renderer
~~~~~~~~~~~~~~~~~~~~~~~~~
Renderer Classes.
:license: BSD, see LICENSE for details.
:author: David Casado Martinez <dcasadomartinez@gmail.com>
"""
class Renderer(object):
"""
Base renderer class.
All renderer classes should be subclasses of this class.
This class and their subclass are used in the ``mpiece.markdown()`` function or
in the ``mpiece.core.MPiece.parse()`` method.
"""
def __init__(self):
self.all_render_funcs = {}
for item in dir(self):
if item.startswith('render_'):
self.all_render_funcs[item[7:]] = getattr(self, item)
def render__only_text(self, text):
return text
def post_process_text(self, text):
""" Process the rendered text.
:param str text: Rendered text
:return str:
"""
return text
class HtmlRenderer(Renderer):
"""
Transform the lexer results in html code.
:param bool use_underline:
- ``True``: The markdown ``_text_`` will transform in ``<ins>text</ins>``
- ``False``: The markdown ``_text_`` will transform in ``<em>text</em>``
:param bool use_paragraph:
- ``True``: The new line in the markdown text will transform in ``<p></p>`` html tag.
- ``False``: The new line in the markdown text will transform in ``<br>`` html tag.
:param bool escape_html:
- ``True``: Escape the html tag in the markdown text.
- ``False``: No escape the html tag in the markdown text.
"""
#: Blacklist of link schemes
scheme_blacklist = ('javascript', 'data', 'vbscript')
def __init__(self, use_underline=True, use_paragraph=True, escape_html=True):
super(HtmlRenderer, self).__init__()
self.use_underline = use_underline
self.use_paragraph = use_paragraph
self.escape_html = escape_html
def escape(self, text):
""" Escape dangerous html characters.
:param str text: Html text without escape.
:return: Html text escaped.
"""
if not self.escape_html or text is None:
return text
return (
text.replace('&', '&').replace('<', '<')
.replace('>', '>').replace('"', '"').replace("'", ''')
)
def escape_args(self, *args):
""" Escape html characters of all arguments
:param [str] \*args: List of html text without escape.
:return: list of all arguments escaped.
"""
return tuple((self.escape(arg) for arg in args))
def escape_link(self, link, smart_amp=True):
""" Check if a link has an invalid scheme.
Also transform the ``&`` character in ``&`` character.
:param str link: Link checked.
:param bool smart_amp: Transform the '&' characters in '&' characters.
:return: Return the link if the scheme is valid. If not return an empty string.
"""
data = link.split(':', 1)
scheme = data[0]
if scheme in self.scheme_blacklist:
return ''
if smart_amp:
return link.replace('&', '&')
return link
#
# Render functions
#
def render_escape_backslash(self, text):
return self.escape(text)
def render_bold(self, text):
return '<strong>%s</strong>' % self.escape(text)
def render_italic(self, text):
return '<em>%s</em>' % self.escape(text)
def render_underline(self, text):
if self.use_underline:
return '<ins>%s</ins>' % self.escape(text)
else:
return self.use_ital | ic(text)
def render_strike(self, text):
return '<del>%s</del>' % self.escape(text)
def render_code_inline(self, code):
return '<code>%s</code>' % self.escape(code)
def render_link(self, text, href, title=''):
text = self.escape(text)
href = self.escape_link(href)
i | f title:
return '<a href="%s" title="%s">%s</a>' % (href, self.escape(title), text)
return '<a href="%s">%s</a>' % (href, text)
def render_image(self, src, alt, title=''):
alt = self.escape(alt)
src = self.escape_link(src)
if title:
title = self.escape(title)
return '<img src="%s" alt="%s" title="%s">' % (src, alt, title)
return '<img src="%s" alt="%s">' % (src, alt)
def render_new_line(self, text):
if self.use_paragraph:
return '<p>%s</p>' % self.escape(text) if text else ''
else:
return '%s<br/>' % self.escape(text) if text else ''
def render_olist(self, text, start):
# text, start = self.escape_args(text, start)
text = self.escape(text)
return '<ol start="%d">%s</ol>' % (start, text)
def render_olist_item(self, text):
return '<li>%s</li>' % self.escape(text)
def render_ulist(self, text, start):
return '<ul>%s</ul>' % self.escape(text)
def render_ulist_item(self, text):
return '<li>%s</li>' % self.escape(text)
def render_blockquote(self, text):
return '<blockquote>%s</blockquote>' % self.escape(text)
def render_header(self, text, level):
return '<h{level}>{text}</h{level}>'.format(level=level, text=self.escape(text))
def render_fenced_code(self, code, lang='', title=''):
return '<pre>%s</pre>' % self.escape(code)
def render_break_line(self, symbol):
return '<hr/>'
def render_table(self, text):
return '<table>%s</table>' % text
def render_table_header(self, text):
return '<thead><tr>%s</tr></thead>' % text
def render_table_header_cell(self, text):
return '<th>%s</th>' % text
def render_table_body(self, text):
return '<tbody>%s</tbody>' % text
def render_table_body_row(self, text):
return '<tr>%s</tr>' % text
def render_table_body_cell(self, text, align=''):
if align and align != 'left':
return '<td style="text-align:%s;">%s</td>' % (align, text)
else:
return '<td>%s</td>' % text
|
ballotify/django-backend | ballotify/apps/accounts/factories.py | Python | agpl-3.0 | 353 | 0 | import factory
from .models import User
USER_PASSWORD = "2fast2furious"
class UserFactory(factory.DjangoModelFactory):
name = "John Doe"
email = factory.Sequence(lambda n: "john{}@example.com".format(n))
pa | ssword = factory.PostGenerationMethodCall('set_pa | ssword', USER_PASSWORD)
gender = "male"
class Meta:
model = User
|
yashchandak/GNN | Sample_Run/DOPE/parser.py | Python | mit | 4,549 | 0.009453 | import argparse
class Parser(object):
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument("--path", default='/home/priyesh/Desktop/Expt_Deep_CC',
help="Base path for the code")
parser.add_argument("--project", default='DOPE', help="Project folder")
parser.add_argument("--folder_suffix", default='Default', help="folder name suffix")
parser.add_argument("--run_test", default=False, type=self.str2bool, help="Run test at every inner fit")
parser.add_argument("--dataset", default='citeseer', help="Dataset to evluate")
parser.add_argument("--labels", default='labels_random', help="Label type")
parser.add_argument("--percents", default='20', help="Training percent")
parser.add_argument("--folds", default='1_2_3_4_5', help="Training folds")
parser.add_argument("--max_walks", default=0, help="Maximum No of walks | 0 - sample by degree", type=int)
parser.add_argument("--max_depth", default=1, help="Maximum path depth", type=int)
parser.add_argument("--retrain", default=False, type=self.str2bool, help="Retrain flag")
parser.add_argument("--debug", default=False, type=self.str2bool, help="Debug flag")
parser.add_argument("--batch_size", default=30, help="Batch size", type=int)
parser.add_argument("--lr", default=0.01, help="Learning rate", type=float)
parser.add_argument("--lu", default=0.75, help="Label update rate", type=float)
parser.add_argument("--l2", default=1e-2, help="L2 loss", type=float)
parser.add_argument("--opt", default='adam', help="Optimizer type (adam, rmsprop, sgd)")
parser.add_argument( | "--pat", default=2, help="Patience", type=int)
parser.add_argument("--pat_inc", default=2, help= | "Patience Increase", type=int)
parser.add_argument("--pat_improve", default=1, help="Improvement threshold for patience", type=float)
parser.add_argument("--save_after", default=0, help="Save after epochs", type=int)
parser.add_argument("--val_freq", default=1, help="Validation frequency", type=int)
parser.add_argument("--bin_upd", default=0, help="Binary updates for labels", type=int)
parser.add_argument("--gradients", default=0, help="Print gradients of trainable variables", type=int)
parser.add_argument("--max_outer", default=1, help="Maximum outer epoch", type=int)
parser.add_argument("--max_inner", default=200, help="Maximum inner epoch", type=int)
parser.add_argument("--boot_epochs", default=1, help="Epochs for first bootstrap", type=int)
parser.add_argument("--boot_reset", default=1, help="Reset weights after first bootstrap", type=int)
parser.add_argument("--concat", default=0, help="Concat attribute to hidden state", type=int)
parser.add_argument("--drop_in", default=0.5, help="Dropout for input", type=float)
parser.add_argument("--drop_out", default=0.6, help="Dropout for pre-final layer", type=float)
parser.add_argument("--wce", default=0, help="Weighted cross entropy", type=int)
parser.add_argument("--attention", default=1, help="Attention module (0: no, 1: HwC, 2: tanh(wH + wC))",
type=int)
parser.add_argument("--ssl", default=0, help="Semi-supervised loss", type=int)
parser.add_argument("--inner_converge", default=0, help="Convergence during bootstrap", type=int)
parser.add_argument("--cell", default='myRNN', help="RNN cell (LSTM, myLSTM, GRU, LSTMgated)")
parser.add_argument("--reduce", default=0, help="Reduce Attribute dimensions to", type=int)
parser.add_argument("--hidden", default=16, help="Hidden units", type=int)
parser.add_argument("--node_loss", default=0, help="Node Loss", type=int)
parser.add_argument("--path_loss", default=0, help="Path Loss", type=int)
parser.add_argument("--consensus_loss", default=0, help="Consensus Loss", type=int)
self.parser = parser
def str2bool(self, text):
if text == 'True':
arg = True
elif text == 'False':
arg = False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
return arg
def get_parser(self):
return self.parser
|
MungoRae/home-assistant | tests/components/alarm_control_panel/test_manual.py | Python | apache-2.0 | 16,012 | 0 | """The tests for the manual Alarm Control Panel component."""
from datetime import timedelta
import unittest
from unittest.mock import patch
from homeassistant.setup import setup_component
from homeassistant.const import (
STATE_ALARM_DISARMED, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_NIGHT, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED)
from homeassistant.components import alarm_control_panel
import homeassistant.util.dt as dt_util
from tests.common import fire_time_changed, get_test_home_assistant
CODE = 'HELLO_CODE'
class TestAlarmControlPanelManual(unittest.TestCase):
"""Test the manual alarm module."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_arm_home_no_pending(self):
"""Test arm home method."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
'code': CODE,
'pending_time': 0,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_home(self.hass, CODE)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_ARME | D_HOME,
self.hass.states.get(entity_id).state)
def test_arm_home_with_pending(self):
"""Test arm home method."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
| 'code': CODE,
'pending_time': 1,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_home(self.hass, CODE, entity_id)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_PENDING,
self.hass.states.get(entity_id).state)
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(('homeassistant.components.alarm_control_panel.manual.'
'dt_util.utcnow'), return_value=future):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_ARMED_HOME,
self.hass.states.get(entity_id).state)
def test_arm_home_with_invalid_code(self):
"""Attempt to arm home without a valid code."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
'code': CODE,
'pending_time': 1,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_home(self.hass, CODE + '2')
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
def test_arm_away_no_pending(self):
"""Test arm home method."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
'code': CODE,
'pending_time': 0,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_away(self.hass, CODE, entity_id)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_ARMED_AWAY,
self.hass.states.get(entity_id).state)
def test_arm_away_with_pending(self):
"""Test arm home method."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
'code': CODE,
'pending_time': 1,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_PENDING,
self.hass.states.get(entity_id).state)
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(('homeassistant.components.alarm_control_panel.manual.'
'dt_util.utcnow'), return_value=future):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_ARMED_AWAY,
self.hass.states.get(entity_id).state)
def test_arm_away_with_invalid_code(self):
"""Attempt to arm away without a valid code."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
'code': CODE,
'pending_time': 1,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_away(self.hass, CODE + '2')
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
def test_arm_night_no_pending(self):
"""Test arm night method."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
'code': CODE,
'pending_time': 0,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_night(self.hass, CODE)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_ARMED_NIGHT,
self.hass.states.get(entity_id).state)
def test_arm_night_with_pending(self):
"""Test arm night method."""
self.assertTrue(setup_component(
self.hass, alarm_control_panel.DOMAIN,
{'alarm_control_panel': {
'platform': 'manual',
'name': 'test',
'code': CODE,
'pending_time': 1,
'disarm_after_trigger': False
}}))
entity_id = 'alarm_control_panel.test'
self.assertEqual(STATE_ALARM_DISARMED,
self.hass.states.get(entity_id).state)
alarm_control_panel.alarm_arm_night(self.hass, CODE, entity_id)
self.hass.block_till_done()
self.assertEqual(STATE_ALARM_PENDING,
self.hass.states.get(entity_id).state)
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(('homeassistant.components.alarm_control_panel.manua |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/host/lib/scons-2.3.1/SCons/Scanner/Dir.py | Python | gpl-2.0 | 3,822 | 0.002616 | #
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
| __revision__ = "src/engine/SCons/Scanner/Dir.py 2014/03/02 14:18:15 garyo"
import SCons.Node.FS
import SCons.Scanner
def only_dirs(nodes):
is_Dir = lambda n: isinstance(n.disambiguate(), S | Cons.Node.FS.Dir)
return list(filter(is_Dir, nodes))
def DirScanner(**kw):
"""Return a prototype Scanner instance for scanning
directories for on-disk files"""
kw['node_factory'] = SCons.Node.FS.Entry
kw['recursive'] = only_dirs
return SCons.Scanner.Base(scan_on_disk, "DirScanner", **kw)
def DirEntryScanner(**kw):
"""Return a prototype Scanner instance for "scanning"
directory Nodes for their in-memory entries"""
kw['node_factory'] = SCons.Node.FS.Entry
kw['recursive'] = None
return SCons.Scanner.Base(scan_in_memory, "DirEntryScanner", **kw)
skip_entry = {}
skip_entry_list = [
'.',
'..',
'.sconsign',
# Used by the native dblite.py module.
'.sconsign.dblite',
# Used by dbm and dumbdbm.
'.sconsign.dir',
# Used by dbm.
'.sconsign.pag',
# Used by dumbdbm.
'.sconsign.dat',
'.sconsign.bak',
# Used by some dbm emulations using Berkeley DB.
'.sconsign.db',
]
for skip in skip_entry_list:
skip_entry[skip] = 1
skip_entry[SCons.Node.FS._my_normcase(skip)] = 1
do_not_scan = lambda k: k not in skip_entry
def scan_on_disk(node, env, path=()):
"""
Scans a directory for on-disk files and directories therein.
Looking up the entries will add these to the in-memory Node tree
representation of the file system, so all we have to do is just
that and then call the in-memory scanning function.
"""
try:
flist = node.fs.listdir(node.abspath)
except (IOError, OSError):
return []
e = node.Entry
for f in filter(do_not_scan, flist):
# Add ./ to the beginning of the file name so if it begins with a
# '#' we don't look it up relative to the top-level directory.
e('./' + f)
return scan_in_memory(node, env, path)
def scan_in_memory(node, env, path=()):
"""
"Scans" a Node.FS.Dir for its in-memory entries.
"""
try:
entries = node.entries
except AttributeError:
# It's not a Node.FS.Dir (or doesn't look enough like one for
# our purposes), which can happen if a target list containing
# mixed Node types (Dirs and Files, for example) has a Dir as
# the first entry.
return []
entry_list = sorted(filter(do_not_scan, list(entries.keys())))
return [entries[n] for n in entry_list]
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
bmi-forum/bmi-pyre | pythia-0.8/packages/pyre/applications/app.py | Python | gpl-2.0 | 3,881 | 0.002577 | #!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2005 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from pyre.applications.Script import Script
class App(Script):
class Inventory(Script.Inventory):
import pyre.inventory
name = pyre.inventory.str("name", default="simple")
name.meta['tip'] = 'the name of the application to generate'
path = pyre.inventory.list("path")
path.meta['tip'] = 'a list of directories to include in the python path'
def main(self, *args, **kwds):
self.weaver.begin()
self.weaver.contents(self._template())
self.weaver.end()
appname = self.inventory.name.capitalize()
filename = self.inventory.name + '.py'
print "creating application '%s' in '%s'" % (appname, filename)
stream = file(filename, "w")
for line in self.weaver.document():
print >> stream, line
stream.close()
import os
os.chmod(filename, 0775)
return
def __init__(self):
Script.__init__(self, "app")
return
def _init(self):
Script._init(self)
self.weaver.language = 'python'
return
def _template(self):
name = self.inventory.name
appName = name.capitalize()
text = [
"",
"",
"def main():",
"",
"",
" from pyre.applications.Script i | mport Script",
"",
"",
" class %sApp(Script):" % appName,
"",
"",
| " class Inventory(Script.Inventory):",
"",
" import pyre.inventory",
"",
" name = pyre.inventory.str('name', default='world')",
" name.meta['tip'] = 'the entity to greet'",
"",
"",
" def main(self, *args, **kwds):",
" print 'Hello %s!' % self.friend",
" return",
"",
"",
" def __init__(self):",
" Script.__init__(self, %r)" % name,
" self.friend = ''",
" return",
"",
"",
" def _defaults(self):",
" Script._defaults(self)",
" return",
"",
"",
" def _configure(self):",
" Script._configure(self)",
" self.friend = self.inventory.name",
" return",
"",
"",
" def _init(self):",
" Script._init(self)",
" return",
"",
"",
" app = %sApp()" % appName,
" return app.run()",
"",
"",
"# main",
"if __name__ == '__main__':",
]
path = self.inventory.path
if path:
text += [
" # adjust the python path",
" import sys",
" sys.path = %r + sys.path" % path,
""
]
text += [
" # invoke the application shell",
" main()",
"",
]
return text
# main
if __name__ == "__main__":
app = App()
app.run()
# version
__id__ = "$Id: app.py,v 1.3 2005/03/10 21:34:42 aivazis Exp $"
# End of file
|
xuru/pyvisdk | pyvisdk/do/ip_pool.py | Python | mit | 1,191 | 0.010915 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def IpPool(vim, *args, **kwargs):
'''Specifications of the network configuration to be used on a network. This is
used to generate IP addresses and for self-customization of vApps.'''
obj = vim.client.factory.create('ns0:IpPool')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'dnsDomain', 'dnsSearchPath', 'hostPrefix', 'httpProxy', 'id', 'ipv4Config',
'ipv6Config', 'name', 'networkAssociation', 'dynamicProperty', 'dynamicType' ]
for name, arg in | zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if | name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
MLAB-project/weewx | bin/weewx/drivers/cc3000.py | Python | gpl-3.0 | 42,551 | 0.000705 | #!/usr/bin/env python
#
# Copyright 2014 Matthew Wall
# See the file LICENSE.txt for your rights.
"""Driver for CC3000 data logger
http://www.rainwise.com/products/attachments/6832/20110518125531.pdf
There are a few variants:
CC-3000_ - __
| |
| 41 = 418 MHz
| 42 = 433 MHz
| __ = 2.4 GHz (LR compatible)
R = serial (RS232, RS485)
_ = USB 2.0
The CC3000 communicates using FTDI USB serial bridge. The CC3000R has both
RS-232 and RS-485 serial ports, only one of which may be used at a time.
A long range (LR) version transmits up to 2 km using 2.4GHz.
The RS232 communicates using 115200 N-8-1
The instrument cluster contains a DIP switch controls with value 0-3 and a
default of 0. This setting prevents interference when there are multiple
weather stations within radio range.
The CC3000 includes a temperature sensor - that is the source of inTemp. The
manual indicates that the CC3000 should run for 3 or 4 hours before applying
any calibration to offset the heat generated by CC3000 electronics.
The CC3000 uses 4 AA batteries to maintain its clock. Use only rechargeable
NiMH batteries.
The logger contains 2MB of memory, with a capacity of 49834 records (over 11
months of data at a 10 minute logging interval). The exact capacity depends
on the sensors; the basic sensor record is 42 bytes.
The logger does not delete old records when it fills up; once the logger is
full, new data are lost. So the driver must periodically clear the logger
memory.
This driver does not support hardware record_generation. It does support
catchup on startup.
If you request many history records then interrupt the receive, the logger will
continue to send history records until it sends all that were requested. As a
result, any queries made while the logger is still sending will fail.
The rainwise rain bucket measures 0.01 inches per tip. The logger firmware
automatically converts the bucket tip count to the measure of rain in ENGLISH
or METRIC units.
Logger uses the following units:
ENGLISH METRIC
wind mph m/s
rain inch mm
pressure inHg mbar
temperature F C
This driver was tested with:
Rainwise CC-3000 Version: 1.3 Build 006 Sep 04 2013
Rainwise CC-3000 Version: 1.3 Build 016 Aug 21 2014
"""
# FIXME: confirm that rain field in archive records is a total, not a delta
# FIXME: figure out whether logger retains non-fixed interval data
# FIXME: clear logger memory after successful read
# FIXME: periodically clear the logger memory while running
from __future__ import with_statement
import datetime
import serial
import string
import syslog
import | time
import weeutil.weeutil
import weewx.drivers
DRIVER_NAME = 'CC3000'
DRIVER_VERSION = '0.16'
def loader(config_dict, engine):
return CC3000Driver(**config_dict[DRIVER_NAME])
def configurator_loader(config_dict):
return CC3000Configurator()
def confeditor_loader():
return CC3000ConfEditor()
DEBUG_SERIAL = 0
DEBUG_CHECKSUM = 0
DEBUG_OPENCLOSE = 0
def logmsg(level, msg):
syslog.syslog(level, 'cc3000: %s' % msg)
def logdbg(msg):
logmsg(syslog.LOG_DEBUG, msg)
| def loginf(msg):
logmsg(syslog.LOG_INFO, msg)
def logerr(msg):
logmsg(syslog.LOG_ERR, msg)
class ChecksumError(weewx.WeeWxIOError):
def __init__(self, msg):
weewx.WeeWxIOError.__init__(self, msg)
class ChecksumMismatch(ChecksumError):
def __init__(self, a, b, buf=None):
msg = "Checksum mismatch: 0x%04x != 0x%04x" % (a, b)
if buf is not None:
msg = "%s (%s)" % (msg, _fmt(buf))
ChecksumError.__init__(self, msg)
class BadCRC(ChecksumError):
def __init__(self, a, b, buf=None):
msg = "Bad CRC: 0x%04x != '%s'" % (a, b)
if buf is not None:
msg = "%s (%s)" % (msg, _fmt(buf))
ChecksumError.__init__(self, msg)
class CC3000Configurator(weewx.drivers.AbstractConfigurator):
def add_options(self, parser):
super(CC3000Configurator, self).add_options(parser)
parser.add_option("--info", dest="info", action="store_true",
help="display weather station configuration")
parser.add_option("--current", dest="current", action="store_true",
help="display current weather readings")
parser.add_option("--history", dest="nrecords", type=int, metavar="N",
help="display N records (0 for all records)")
parser.add_option("--history-since", dest="nminutes", metavar="N",
type=int, help="display records since N minutes ago")
parser.add_option("--clear-memory", dest="clear", action="store_true",
help="clear station memory")
parser.add_option("--reset-rain", dest="reset", action="store_true",
help="reset the rain counter")
parser.add_option("--get-clock", dest="getclock", action="store_true",
help="display station clock")
parser.add_option("--set-clock", dest="setclock", action="store_true",
help="set station clock to computer time")
parser.add_option("--get-interval", dest="getint", action="store_true",
help="display logger archive interval, in minutes")
parser.add_option("--set-interval", dest="interval", metavar="N",
type=int,
help="set logging interval to N minutes (0-60)")
parser.add_option("--get-units", dest="getunits", action="store_true",
help="show units of logger")
parser.add_option("--set-units", dest="units", metavar="UNITS",
help="set units to METRIC or ENGLISH")
parser.add_option('--get-dst', dest='getdst', action='store_true',
help='display daylight savings settings')
parser.add_option('--set-dst', dest='dst',
metavar='mm/dd HH:MM,mm/dd HH:MM,[MM]M',
help='set daylight savings start, end, and amount')
parser.add_option("--get-channel", dest="getch", action="store_true",
help="display the station channel")
parser.add_option("--set-channel", dest="ch", metavar="CHANNEL",
type=int,
help="set the station channel")
def do_options(self, options, parser, config_dict, prompt):
self.driver = CC3000Driver(**config_dict[DRIVER_NAME])
if options.current:
print self.driver.get_current()
elif options.nrecords is not None:
for r in self.driver.station.gen_records(nrecords):
print r
elif options.clear:
self.clear_memory(prompt)
elif options.reset:
self.reset_rain(prompt)
elif options.getclock:
print self.driver.station.get_time()
elif options.setclock:
self.set_clock(prompt)
elif options.getdst:
print self.driver.station.get_dst()
elif options.dst is not None:
self.set_dst(options.setdst, prompt)
elif options.getint:
print self.driver.station.get_interval() * 60
elif options.interval is not None:
self.set_interval(options.interval / 60, prompt)
elif options.getunits:
print self.driver.station.get_units()
elif options.units is not None:
self.set_units(options.units, prompt)
elif options.getch:
print self.driver.station.get_channel()
elif options.ch is not None:
self.set_channel(options.ch, prompt)
else:
print "firmware:", self.driver.station.get_version()
print "time:", self.driver.station.get_time()
print "dst:", self.driver.station.get_dst()
print "units:", self.driver.station.get_units()
print "memory:", self.driver.station.get_memory_status()
print "interval:", self.driver.station.get_interval() * 60
print "channel: |
loveyoupeng/rt | modules/web/src/main/native/Tools/Scripts/webkitpy/style/checkers/cmake_unittest.py | Python | gpl-2.0 | 5,918 | 0.002366 | # Copyright (C) 2012 Intel Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for cmake.py."""
import unittest2 as unittest
from cmake import CMakeChecker
class CMakeCheckerTest(unittest.TestCase):
"""Tests CMakeChecker class."""
def test_init(self):
"""Test __init__() method."""
def _mock_handle_style_error(self):
pass
checker = CMakeChecker("foo.cmake", _mock_handle_style_error)
self.assertEqual(checker._handle_style_error, _mock_handle_style_error)
def test_check(self):
"""Test check() method."""
errors = []
def _mock_handle_style_error(line_number, category, confidence,
message):
error = (line_number, category, confidence, message)
errors.append(error)
checker = CMakeChecker("foo.cmake", _mock_handle_style_error)
lines = [
'# This file is sample input for cmake_unittest.py and includes below problems:\n',
'IF ()',
'\tmessage("Error line with Tab")\n',
' message("Error line with endding spaces") \n',
' message( "Error line with space after (")\n',
' message("Error line with space before (" )\n',
' MESSAGE("Error line with upper case non-condtional command")\n',
' MESSage("Error line with upper case non-condtional command")\n',
' message("correct message line")\n',
'ENDif ()\n',
'\n',
'if()\n',
'endif ()\n',
'\n',
'macro ()\n',
'ENDMacro()\n',
'\n',
'function ()\n',
'endfunction()\n',
'\n',
'set(name a)\n',
'set(name a b c)\n',
'set(name a\n',
'b)\n',
'set(name',
'abc\n',
')\n',
'list(APPEND name a)\n',
'list(APPEND name\n',
'a\n',
'a\n',
')\n',
'list(APPEND name\n',
'b\n',
'a\n',
'\n',
'c/a.a\n',
'\n',
'c/b/a.a\n',
'${aVariable}\n',
'\n',
'c/c.c\n',
'\n',
'c/b/a.a\n',
')\n',
'list(REMOVE_ITEM name a)\n',
'list(REMOVE_ITEM name\n',
'a\n',
'\n',
'b\n',
')\n',
'list(REMOVE_ITEM name\n',
'a/a.a\n',
'a/b.b\n',
'b/a.a\n',
'\n',
'\n',
'c/a.a\n',
')\n',
]
checker.check(lines)
self.maxDiff = None
self.assertEqual(errors, [
(3, 'whitespace/tab', 5, 'Line contains tab character.'),
(2, 'command/lowercase', 5, 'Use lowercase command "if"'),
(4, 'whitespace/trailing', 5, 'No trailing spaces'),
(5, 'whitespace/parentheses', 5, 'No space after "("'),
(6, 'whitespace/parentheses', 5, 'No space before ")"'),
(7, 'command/lowercase', 5, 'Use lowercase command "message"'),
(8, 'command/lowercase', 5, 'Use lowercase command "message"'),
(10, 'command/lowercase', 5, 'Use lowercase command "endif"'),
(12, 'whitespace/parentheses', 5, 'One space between command "if" and its parentheses, should be "if ("'),
(15, 'whitespace/parentheses', 5, 'No space between command "macro" and its parentheses, should be "macro("'),
(16, 'command/lowercase', 5, 'Use lowercase command "endmacro"'),
(18, 'whitespace/parentheses', 5, 'No space between command "function" and its parentheses, should be "function("'),
(23, 'list/parentheses', 5, 'First listitem "a" should be in a new line.'),
(24, 'list/parentheses', 5, 'The parentheses after the last listitem "b" should be in a new line.'),
(31, 'list/ | duplicate', 5, 'The item "a" should be added only once to the list.'),
(35, 'list/order', 5, 'Alphabetical sorting problem. "a" should | be before "b".'),
(41, 'list/order', 5, 'Alphabetical sorting problem. "c/c.c" should be before "c/b/a.a".'),
(49, 'list/emptyline', 5, 'There should be no empty line between "a" and "b".'),
(54, 'list/emptyline', 5, 'There should be exactly one empty line instead of 0 between "a/b.b" and "b/a.a".'),
(57, 'list/emptyline', 5, 'There should be exactly one empty line instead of 2 between "b/a.a" and "c/a.a".'),
])
|
felipenaselva/repo.felipe | plugin.video.uwc/cam4.py | Python | gpl-2.0 | 3,936 | 0.017022 | '''
Ultimate Whitecream
Copyright (C) 2016 mortael
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib, urllib2, re, cookielib, os, sys, socket
import xbmc, xbmcplugin, xbmcgui, xbmcaddon
import utils, sqlite3
mobileagent = {'User-Agent': 'Mozilla/5.0 (iPad; CPU OS 9_2_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13D15 Safari/601.1'}
def Main():
utils.addDir('[COLOR red]Refresh Cam4 images[/COLOR]','',283,'',Folder=False)
utils.addDir('[COLOR hotpink]Featured[/COLOR]','http://www.cam4.com/featured/1',281,'',1)
utils.addDir('[COLOR hotpink]Females[/COLOR]','http://www.cam4.com/female/1',281,'',1)
utils.addDir('[COLOR hotpink]Couples[/COLOR]','http://www.cam4.com/couple/1',281,'',1)
utils.addDir('[COLOR hotpink]Males[/COLOR]','http://www.cam4.com/male/1',281,'',1)
utils.addDir('[COLOR hotpink]Transsexual[/COLOR]','http://www.cam4.com/shemale/1',281,'',1)
xbmcplugin.endOfDirectory(utils.addon_handle)
def clean_database(showdialog=False):
conn = sqlite3.connect(xbmc.translatePath("special://database/Textures13.db"))
try:
with conn:
list = conn.execute("SELECT id, cachedurl FROM texture WHERE url LIKE '%%%s%%';" % ".systemcdn.net")
for row in list:
conn.execute("DELETE FROM sizes WHERE idtexture LIKE '%s';" % row[0])
try: os.remove(xbmc.translatePath("special://thumbnails/" + row[1]))
except: pass
conn.execute("DELETE FROM texture WHERE url LIKE '%%%s%%';" % ".systemcdn.net")
if showdialog:
utils.notify('Finished','C | am4 images cleared')
except:
pass
def List(url, page):
if utils.addon.getSetting("chaturbate") == "true":
clean_database()
listhtml = utils.getHtml(url, url)
match = re.compile('profileDataBox"> <a href="([^"]+)".*?src="([^"]+)" title | ="Chat Now Free with ([^"]+)"', re.DOTALL | re.IGNORECASE).findall(listhtml)
for videourl, img, name in match:
name = utils.cleantext(name)
videourl = "http://www.cam4.com" + videourl
utils.addDownLink(name, videourl, 282, img, '', noDownload=True)
if re.search('<link rel="next"', listhtml, re.DOTALL | re.IGNORECASE):
npage = page + 1
url = url.replace('/'+str(page),'/'+str(npage))
utils.addDir('Next Page ('+str(npage)+')', url, 281, '', npage)
xbmcplugin.endOfDirectory(utils.addon_handle)
def Playvid(url, name):
listhtml = utils.getHtml(url, '', mobileagent)
match = re.compile('<video id=Cam4HLSPlayer class="SD" controls autoplay src="([^"]+)"> </video>', re.DOTALL | re.IGNORECASE).findall(listhtml)
if match:
videourl = match[0]
iconimage = xbmc.getInfoImage("ListItem.Thumb")
listitem = xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
listitem.setInfo('video', {'Title': name, 'Genre': 'Porn'})
listitem.setProperty("IsPlayable","true")
if int(sys.argv[1]) == -1:
pl = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
pl.clear()
pl.add(videourl, listitem)
xbmc.Player().play(pl)
else:
listitem.setPath(str(videourl))
xbmcplugin.setResolvedUrl(utils.addon_handle, True, listitem) |
plotly/python-api | packages/python/plotly/plotly/validators/sunburst/marker/colorbar/_tickvals.py | Python | mit | 483 | 0.00207 | import _plotly_utils.basevalidators
class TickvalsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(
self, plotly_name="tickvals", parent_name="sunburst.marker.colorbar", **kwargs
):
super(TickvalsValidator, self).__init__(
plotly_name=plotly_name,
p | arent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", | "data"),
**kwargs
)
|
pculka/SkyDrop | skydrop/utils/hex2bin/main.py | Python | gpl-2.0 | 2,182 | 0.017874 | #!/usr/bin/python
import sys
import serial
from intelhex impor | t IntelHex
import time
import datetime
import struct
def add8(a, b):
return (a + b & 0xFF)
page_size = 255
class Hex2BinConv():
def __init__(self, out):
self.hex = IntelHex()
self.out = out
def load(self, filename):
print
| print "Loading application from hex"
self.hex.loadfile(filename, "hex")
size = self.hex.maxaddr() - self.hex.minaddr()
print " size: %0.2f KiB (%d B)" % (size/1024, size)
def conv(self, label):
done = False
adr = self.hex.minaddr()
max_adr = self.hex.maxaddr()
out_file = open(self.out, "wb");
lab_str = '';
if (label == "ee"):
f = open("../utils/build/build_number.txt", "r")
number = int(f.readline())
f.close()
#lab_str += struct.pack("<H", number)
else:
for i in range(32):
if i >= len(label):
c = chr(0)
else:
c = label[i]
lab_str += c
out_file.write(lab_str)
print " label: %s" % lab_str
print "Converting HEX 2 BIN ...",
while(adr <= max_adr):
out_file.write(chr(self.hex[adr]))
adr += 1
out_file.close()
print "Done"
def batch(self, filename, label):
start = time.clock()
self.load(filename)
self.conv(label)
end = time.clock()
print
print "That's all folks! (%.2f seconds)" % (end - start)
if (len(sys.argv) < 3 or len(sys.argv) > 4):
print "Usage %s hex_file output_file [label]" % __file__
sys.exit(-1)
hex = sys.argv[1]
out = sys.argv[2]
label = ""
if (len(sys.argv) == 4):
label = sys.argv[3]
f = open("../utils/build/build_number.txt", "r")
number = int(f.readline())
f.close()
if (label == "" or label == "auto"):
label = "skydrop-build-%04d" % number
a = Hex2BinConv(out)
a.batch(hex, label)
|
dustymabe/ansible-modules-core | cloud/amazon/ec2_elb_lb.py | Python | gpl-3.0 | 52,548 | 0.002836 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: ec2_elb_lb
description:
- Returns information about the load balancer.
- Will be marked changed when called only if state is changed.
short_description: Creates or destroys Amazon ELB.
version_added: "1.5"
author:
- "Jim Dalton (@jsdalton)"
options:
state:
description:
- Create or destroy the ELB
choices: ["present", "absent"]
required: true
name:
description:
- The name of the ELB
required: true
listeners:
description:
- List of ports/protocols for this ELB to listen on (see example)
required: false
purge_listeners:
description:
- Purge existing listeners on ELB that are not found in listeners
required: false
default: true
instance_ids:
description:
- List of instance ids to attach to this ELB
required: false
default: false
version_added: "2.1"
purge_instance_ids:
description:
- Purge existing instance ids on ELB that are not found in instance_ids
required: false
default: false
version_added: "2.1"
zones:
description:
- List of availability zones to enable on this ELB
required: false
purge_zones:
description:
- Purge existing availability zones on ELB that are not found in zones
required: false
default: false
security_group_ids:
description:
- A list of security groups to apply to the elb
require: false
default: None
version_added: "1.6"
security_group_names:
description:
- A list of security group names to apply to the elb
require: false
default: None
version_added: "2.0"
health_check:
description:
- An associative array of health check configuration settings (see example)
require: false
default: None
access_logs:
description:
- An associative array of access logs configuration settings (see example)
require: false
default: None
version_added: "2.0"
subnets:
description:
- A list of VPC subnets to use when creating ELB. Zones should be empty if using this.
required: false
default: None
aliases: []
version_added: "1.7"
purge_subnets:
description:
- Purge existing subnet on ELB that are not found in subnets
required: false
default: false
version_added: "1.7"
scheme:
description:
- The scheme to use when creating the ELB. For a private VPC-visible ELB use 'internal'.
required: false
default: 'internet-facing'
version_added: "1.7"
validate_certs:
description:
- When set to "no", SSL certificates will not be validated for boto versions >= 2.6.0.
required: false
default: "yes"
choices: ["yes", "no"]
aliases: []
version_added: "1.5"
connection_draining_timeout:
description:
- Wait a specified timeout allowing connections to drain before terminating an instance
required: false
aliases: []
version_added: "1.8"
idle_timeout:
description:
- ELB connections from clients and to servers are timed out after this amount of time
required: false
version_added: "2.0"
cross_az_load_balancing:
description:
- Distribute load across all configured Availability Zones
required: false
default: "no"
choices: ["yes", "no"]
aliases: []
version_added: "1.8"
stickiness:
description:
- An associative array of stickness policy settings. Policy will be applied to all listeners ( see example )
required: false
version_added: "2.0"
wait:
description:
- When specified, Ansible will check the status of the load balancer to ensure it has been successfully
removed from AWS.
required: false
default: no
choices: ["yes", "no"]
version_added: "2.1"
wait_timeout:
description:
- Used in conjunction with wait. Number of seconds to wait for the elb to be terminated.
A maximum of 600 seconds (10 minutes) is allowed.
required: false
default: 60
version_added: "2.1"
tags:
description:
- An associative array of tags. To delete all tags, supply an empty dict.
required: false
version_added: "2.1"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic provisioning example (non-VPC)
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
proxy_protocol: True
- protocol: https
load_balancer_port: 443
instance_protocol: http # optional, defaults to value of protocol setting
instance_port: 80
# ssl certificate required for https or ssl
ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/company/servercerts/ProdServerCert"
# Internal ELB example
- local_action:
module: ec2_elb_lb
name: "test-vpc"
scheme: internal
state: present
instance_ids:
- i-abcd1234
purge_instance_ids: true
subnets:
- subnet-abcd1234
- subnet-1a2b3c4d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
# Configure a health check and the access logs
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
health_check:
ping_protocol: http # options are http, https, ssl, tcp
ping_port: 80
ping_path: "/index.html" # not required for tcp or ssl
response_timeout: 5 # seconds
interval: 30 # seconds
unhealthy_threshold: 2
healthy_threshold: 10
access_logs:
interval: 5 # minutes (defaults to 60)
s3_location: "my-bucket" # This value is required if access_logs is set
s3_prefix: "logs"
# Ensure ELB is gone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
# Ensure ELB is gone and wait for check (for default timeout)
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
wait: yes
# Ensure ELB is gone and wait for check with timeout value
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
wait: yes
wait_timeout: 600
# Normally, this module will purge any listeners that exist on the ELB
# but aren't specified in the listeners parameter. If purge_listeners is
# false it leaves them alone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
| load_balancer_port: 80
instance_port: 80
purge_listeners: no
# Normally, this module will leave availability zones that are enabled
# on the ELB alone. If purge_zones is true, then any extraneous zones
# will be removed
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
| purge_zones: yes
# Creates a ELB and |
metpy/MetPy | v0.5/_downloads/GINI_Water_Vapor.py | Python | bsd-3-clause | 1,923 | 0.00156 | # Copyright (c) 2008-2016 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
GINI Water Vapor Imagery
========================
Use MetPy's support for GINI files to read in a water vapor satellite image and plot the
data using CartoPy.
"""
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
from metpy.cbook import get_test_data
from metpy.io import GiniFile
from metpy.plots import ctables
###########################################
# Open the GINI file from the test data
f = GiniFile(get_test_data('WEST-CONUS_4km_WV_20151208_2200.gini'))
print(f)
###########################################
# Get a Dataset view of the data (essentially a NetCDF-like interface to the
# underlying data). Pull out th | e data, (x, y) coordinates, and the projection
# information.
ds = f.to_dataset()
x = ds.variables['x'][:]
y = ds.variables['y'][:]
dat = ds.variables['WV']
proj_var = ds.variables[dat.grid_mappin | g]
print(proj_var)
###########################################
# Create CartoPy projection information for the file
globe = ccrs.Globe(ellipse='sphere', semimajor_axis=proj_var.earth_radius,
semiminor_axis=proj_var.earth_radius)
proj = ccrs.LambertConformal(central_longitude=proj_var.longitude_of_central_meridian,
central_latitude=proj_var.latitude_of_projection_origin,
standard_parallels=[proj_var.standard_parallel],
globe=globe)
###########################################
# Plot the image
fig = plt.figure(figsize=(10, 12))
ax = fig.add_subplot(1, 1, 1, projection=proj)
wv_norm, wv_cmap = ctables.registry.get_with_steps('WVCIMSS', 0, 1)
im = ax.imshow(dat[:], cmap=wv_cmap, norm=wv_norm, zorder=0,
extent=ds.img_extent, origin='upper')
ax.coastlines(resolution='50m', zorder=2, color='black')
plt.show()
|
ESOedX/edx-platform | lms/djangoapps/courseware/date_summary.py | Python | agpl-3.0 | 22,031 | 0.001997 | """
This module provides date summary blocks for the Course Info
page. Each block gives information about a particular
course-run-specific date which will be displayed to the user.
"""
from __future__ import absolute_import
import datetime
import crum
from babel.dates import format_timedelta
from django.conf import settings
from django.urls import reverse
from django.utils.formats import date_format
from django.utils.functional import cached_property
from django.utils.translation import get_language, to_locale
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from lazy import lazy
from pytz import utc
from course_modes.models import CourseMode, get_cosmetic_verified_display_price
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.verify_student.models import VerificationDeadline
from lms.djangoapps.verify_student.services import IDVerificationService
from openedx.core.djangoapps.certificates.api import can_show_certificate_available_date_field
from openedx.core.djangolib.markup import HTML, Text
from openedx.features.course_experience import UPGRADE_DEADLINE_MESSAGE, CourseHomeMessages
from student.models import CourseEnrollment
from .context_processor import user_timezone_locale_prefs
class DateSummary(object):
"""Base class for all date summary blocks."""
# A consistent representation of the current time.
_current_time = None
@property
def current_time(self):
"""
Returns a consistent current time.
"""
if self._current_time is None:
self._current_time = datetime.datetime.now(utc)
return self._current_time
@property
def css_class(self):
"""
The CSS class of this summary. Indicates the type of information
this summary block contains, and its urgency.
"""
return ''
@property
def title(self):
"""The title of this summary."""
return ''
@property
def description(self):
"""The detail text displayed by this summary."""
return ''
def register_alerts(self, request, course):
"""
Registers any relevant course alerts given the current request.
"""
pass
@property
def date(self):
"""This summary's date."""
return None
@property
def date_format(self):
"""
The format to display this date in. By default, displays like Jan
01, 2015.
"""
return u'%b %d, %Y'
@property
def link(self):
"""The location to link to for more information."""
return ''
@property
def link_text(self):
"""The text of the link."""
return ''
def __init__(self, course, user, course_id=None):
self.course = course
self.user = user
self.course_id = course_id or self.course.id
@property
def relative_datestring(self):
"""
Return this block's date in a human-readable format. If the date
is None, returns the empty string.
"""
if self.date is None:
return ''
locale = to_locale(get_language())
delta = self.date - self.current_time
try:
relative_date = format_timedelta(delta, locale=locale)
# Babel doesn't have translations for Esperanto, so we get
# a KeyError when testing translations with
# ?preview-lang=eo. This should not happen with any other
# languages. See https://github.com/python-babel/babel/issues/107
except KeyError:
relative_date = format_timedelta(delta)
date_has_passed = delta.days < 0
# Translators: 'absolute' is a date such as "Jan 01,
# 2020". 'relative' is a fuzzy description of the time until
# 'absolute'. For example, 'absolute' might be "Jan 01, 2020",
# and if today were December 5th, 2020, 'relative' would be "1
# month".
date_format = _(u"{relative} ago - {absolute}") if date_has_passed else _(u"in {relative} - {absolute}")
return date_format.format(
relative=relative_date,
absolute='{date}',
)
@property
def is_enabled(self):
"""
Whether or not this summary block should be shown.
By default, the summary is only shown if its date is in the
future.
"""
if self.date is not None:
return self.current_time.date() <= self.date.date()
return False
def deadline_has_passed(self):
"""
Return True if a deadline (the date) exists, and has already passed.
Returns False otherwise.
"""
deadline = self.date
return deadline is not None and deadline <= self.current_time
@property
def time_remaining_string(self):
"""
Returns the time remaining as a localized string.
"""
locale = to_locale(get_language())
return format_timedelta(self.date - self.current_time, locale=locale)
def date_html(self, date_format='shortDate'):
"""
Returns a representation of the date as HTML.
Note: this returns a span that will be localized on the client.
"""
locale = to_locale(get_language())
user_timezone = user_timezone_locale_prefs(crum.get_current_request())['user_timezone']
return HTML(
u'<span class="date localized-datetime" data-format="{date_format}" data-datetime="{date_time}"'
u' data-timezone="{user_timezone}" data-language="{user_language}">'
u'</span>'
).format(
date_format=date_format,
date_time=self.date,
user_timezone=user_timezone,
user_language=locale,
)
@property
def long_date_html(self):
"""
Returns a long representation of the date as HTML.
Note: this returns a span that will be localized on the client.
"""
return self.date_html(date_format='shortDate')
@property
def short_time_html(self):
"""
Returns a short representation of the time as HTML.
Note: this returns a span that will be localized on the client.
"""
return self.date_html(da | te_format= | 'shortTime')
def __repr__(self):
return u'DateSummary: "{title}" {date} is_enabled={is_enabled}'.format(
title=self.title,
date=self.date,
is_enabled=self.is_enabled
)
class TodaysDate(DateSummary):
"""
Displays today's date.
"""
css_class = 'todays-date'
is_enabled = True
# The date is shown in the title, no need to display it again.
def get_context(self):
context = super(TodaysDate, self).get_context()
context['date'] = ''
return context
@property
def date(self):
return self.current_time
@property
def title(self):
return 'current_datetime'
class CourseStartDate(DateSummary):
"""
Displays the start date of the course.
"""
css_class = 'start-date'
title = ugettext_lazy('Course Starts')
@property
def date(self):
return self.course.start
def register_alerts(self, request, course):
"""
Registers an alert if the course has not started yet.
"""
is_enrolled = CourseEnrollment.get_enrollment(request.user, course.id)
if not course.start or not is_enrolled:
return
days_until_start = (course.start - self.current_time).days
if course.start > self.current_time:
if days_until_start > 0:
CourseHomeMessages.register_info_message(
request,
Text(_(
"Don't forget to add a calendar reminder!"
)),
title=Text(_(u"Course starts in {time_remaining_string} on {course_start_date}.")).format(
time_remaining_string=self.time_remaining_string,
course_start_date=self.long_date_html,
)
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.