repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
crisely09/horton
|
horton/meanfield/occ.py
|
Python
|
gpl-3.0
| 8,958
| 0.001786
|
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2016 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
"""Occupation number models"""
import numpy as np
from horton.exceptions import ElectronCountError
from horton.quadprog import find_1d_root
from horton.constants import boltzmann
from horton.log import log
from horton.utils import doc_inherit
__all__ = [
'FixedOccModel', 'AufbauOccModel', 'AufbauSpinOccModel', 'FermiOccModel',
]
class OccModel(object):
'''Base class for the occupation models'''
def assign(self, *exps):
'''Assign occupation numbers to the expansion objects
**Arguments:**
exp_alpha, exp_beta, ...
Expansion objects
'''
raise NotImplementedError
def check_dms(self, overlap, *dms, **kwargs):
'''Test if the given density matrices contain the right number of electrons
**Arguments:**
overlap
The overlap operator.
dm1, dm2, ...
|
Density matrices to be tested.
**Optional keyword argument
|
s:**
eps (default=1e-4)
The allowed deviation.
'''
raise NotImplementedError
class FixedOccModel(OccModel):
def __init__(self, *occ_arrays):
self.occ_arrays = occ_arrays
@doc_inherit(OccModel)
def assign(self, *exps):
if len(exps) != len(self.occ_arrays):
raise TypeError('Expected %i expansion objects, got %i.' % (len(self.nocc), len(exps)))
for exp, occ_array in zip(exps, self.occ_arrays):
exp.occupations[:len(occ_array)] = occ_array
exp.occupations[len(occ_array):] = 0.0
@doc_inherit(OccModel)
def check_dms(self, overlap, *dms, **kwargs):
eps = kwargs.pop('eps', 1e-4)
if len(kwargs) > 0:
raise TypeError('Unexpected keyword arguments: %s' % kwargs.keys())
if len(dms) != len(self.occ_arrays):
raise TypeError('The number of density matrices is incorrect.')
for dm, occ_array in zip(dms, self.occ_arrays):
assert abs(overlap.contract_two('ab,ba', dm) - occ_array.sum()) < eps
class AufbauOccModel(OccModel):
'''The standard Aufbau occupation number model.
This model just fills up all the lowest lying orbitals. When the total
number of electrons in one channel is fractional, the fractional electron
is put in the HOMO orbital.
'''
def __init__(self, *noccs):
'''
**Arguments:**
nalpha, nbeta, ...
The number of electrons in each channel.
'''
for nocc in noccs:
if nocc < 0:
raise ElectronCountError('Negative number of electrons is not allowed.')
if sum(noccs) == 0:
raise ElectronCountError('At least one electron is required.')
self.noccs = noccs
@doc_inherit(OccModel)
def assign(self, *exps):
if len(exps) != len(self.noccs):
raise TypeError('Expected %i expansion objects, got %i.' % (len(self.nocc), len(exps)))
for exp, nocc in zip(exps, self.noccs):
if exp.nfn < nocc:
raise ElectronCountError('The number of orbitals must not be lower than the number of alpha or beta electrons.')
# It is assumed that the orbitals are sorted from low to high energy.
if nocc == int(nocc):
exp.occupations[:nocc] = 1.0
exp.occupations[nocc:] = 0.0
else:
exp.occupations[:int(np.floor(nocc))] = 1.0
exp.occupations[int(np.floor(nocc))] = nocc - np.floor(nocc)
exp.occupations[int(np.ceil(nocc)):] = 0.0
@doc_inherit(OccModel)
def check_dms(self, overlap, *dms, **kwargs):
eps = kwargs.pop('eps', 1e-4)
if len(kwargs) > 0:
raise TypeError('Unexpected keyword arguments: %s' % kwargs.keys())
if len(dms) != len(self.noccs):
raise TypeError('The number of density matrices is incorrect.')
for dm, nocc in zip(dms, self.noccs):
assert abs(overlap.contract_two('ab,ba', dm) - nocc) < eps
class AufbauSpinOccModel(OccModel):
'''This Aufbau model only applies to unrestricted wavefunctions'''
def __init__(self, nel):
'''
**Arguments:**
nel
The total number of electrons (alpha + beta)
'''
if nel <= 0:
raise ElectronCountError('The number of electron must be positive.')
self.nel = nel
@doc_inherit(OccModel)
def assign(self, exp_alpha, exp_beta):
nel = self.nel
ialpha = 0
ibeta = 0
while nel > 0:
if exp_alpha.energies[ialpha] <= exp_beta.energies[ibeta]:
exp_alpha.occupations[ialpha] = min(1.0, nel)
ialpha += 1
else:
exp_beta.occupations[ibeta] = min(1.0, nel)
ibeta += 1
nel -= 1
@doc_inherit(OccModel)
def check_dms(self, overlap, *dms, **kwargs):
eps = kwargs.pop('eps', 1e-4)
if len(kwargs) > 0:
raise TypeError('Unexpected keyword arguments: %s' % kwargs.keys())
assert abs(sum(overlap.contract_two('ab,ba', dm) for dm in dms) - self.nel) < eps
class FermiOccModel(AufbauOccModel):
'''Fermi smearing electron occupation model'''
def __init__(self, *noccs, **kwargs):
r'''
**Arguments:**
nalpha, nbeta, ...
The number of electrons in each channel.
**Optional keyword arguments:**
temperature
Controls the width of the distribution (derivative)
eps
The error on the sum of the occupation number when searching for
the right Fermi level.
For each channel, the orbital occupations are assigned with the Fermi
distribution:
.. math::
n_i = \frac{1}{1 + e^{(\epsilon_i - \mu)/k_B T}}
where, for a given set of energy levels, :math:`\{\epsilon_i\}`, the
chemical potential, :math:`\mu`, is optimized as to satisfy the
following constraint:
.. math::
\sum_i n_i = n_\text{occ}
where :math:`n_\text{occ}` can be set per (spin) channel. This is
only a part of the methodology presented in [rabuck1999]_.
'''
temperature = kwargs.pop('temperature', 300)
eps = kwargs.pop('eps', 1e-8)
if len(kwargs) > 0:
raise TypeError('Unknown keyword arguments: %s' % kwargs.keys())
if temperature <= 0:
raise ValueError('The temperature must be strictly positive')
if eps <= 0:
raise ValueError('The root-finder threshold (eps) must be strictly positive.')
self.temperature = float(temperature)
self.eps = eps
AufbauOccModel.__init__(self, *noccs)
log.cite('rabuck1999', 'the Fermi broading method to assign orbital occupations')
@doc_inherit(OccModel)
def assign(self, *exps):
beta = 1.0/self.temperature/boltzmann
for exp, nocc in zip(exps, self.noccs):
def get_occ(mu):
occ = np.zeros(exp.nfn)
mask = exp.energies < mu
e = np.exp(beta*(exp.energies[mask] - mu))
occ[mask] = 1.0/(e + 1.0)
mask = ~mask
|
hobinyoon/apache-cassandra-2.2.3-src
|
mtdb/process-log/calc-latency-multiple-runs/Latency.py
|
Python
|
apache-2.0
| 6,312
| 0.029943
|
import os
import sys
sys.path.insert(0, "../../util/python")
import Cons
import Conf
def Load():
_LoadLogs()
sys.exit(0)
_WritePlotData()
_cass = None
_mutants = None
class LogEntry(object):
# simulation_time_dur_ms simulated_time OpW_per_sec running_behind_cnt read_latency_ms read_cnt
# simulation_time num_OpW_requested running_behind_avg_in_ms write_cnt
# percent_completed write_latency_ms
# running_on_time_cnt
# running_on_time_sleep_avg_in_ms
# 1 2 3 4 5 6 7 8 9 10 11 12 13 14
# 1013 160209-165822.613 100117-105129.802 215 0.4 215 0 0 724 -130 131 168 215 293
# 12345678901234567
def __init__(self, tokens):
self.simulation_time_dur_ms = int(tokens[0])
self.simulation_time = tokens[1]
self.simulated_time = tokens[2]
self.write_latency_ms = int(tokens[10])
self.read_latency_ms = int(tokens[11])
def __str__(self):
return " ".join("%s=%s" % item for item in vars(self).items())
class LoadgenLog(object):
def __init__(self, exp_datetime):
self.exp_datetime = exp_datetime
self.fn_log = "../../logs/loadgen/%s" % self.exp_datetime
self._LoadFile()
def WritePlotData(self):
fn = "plot-data/%s-latency-by-time" % self.exp_datetime
with Cons.MeasureTime("Writing file %s ..." % fn):
with open(fn, "w") as fo:
fmt = "%17s %4d %4d"
for e in _cass.entries:
#Cons.P(fmt % (e.simulated_time, e.write_latency_ms, e.read_latency_ms))
fo.write((fmt + "\n") % (e.simulated_time, e.write_latency_ms, e.read_latency_ms))
Cons.P("Created %s %d" % (fn, os.path.getsize(fn)))
# TODO: Keep all the latencies and generate a CDF
def _LoadFile(self):
self.entries = []
#with Cons.MeasureTime("Loading file %s ..." % self.fn_log):
with open(self.fn_log) as fo:
parse_status = "header"
for line in fo.readlines():
if parse_status == "header":
if len(line) == 0:
continue
t = line.split("simulated_time_years: ")
if len(t) == 2:
self.simulated_time_years = float(t[1])
t = line.split("simulation_time_in_min: ")
if len(t) == 2:
self.simulattion_time_mins = float(t[1])
t = line.split()
detect_body_start = 0
if len(t) > 5 and t[0] == "#":
for i in range(1, 6):
if t[i] == str(i):
detect_body_start += 1
if detect_body_start == 5:
#Cons.P(line)
parse_status = "body"
elif parse_status == "body":
t = line.split()
if (len(t) > 0) and (t[0] == "#"):
parse_status = "footer"
continue
#Cons.P(line)
le = LogEntry(t)
if le.simulation_time_dur_ms > (int(Conf.Get("simulation_time_exclude_first_secs")) * 1000):
self.entries.append(le)
#Cons.P("%s: Loaded %d log entries" % (self.exp_datetime, len(self.entries)))
self._GenStat()
def _GenStat(self):
w_sum = 0
r_sum = 0
time_intervals = []
time_prev = None
first = True
for e in self.entries:
if first:
w_min = e.write_latency_ms
w_max
|
= e.write_latency_ms
r_min = e.read_latency_ms
r_max = e.read_latency_ms
first = False
else:
w_min = min(w_min, e.write_latency_ms)
w_max = max(w_max, e.write_latency_ms)
r
|
_min = min(r_min, e.read_latency_ms)
r_max = max(r_max, e.read_latency_ms)
time_intervals.append(e.simulation_time_dur_ms - time_prev)
w_sum += e.write_latency_ms
r_sum += e.read_latency_ms
time_prev = e.simulation_time_dur_ms
self.w_avg = float(w_sum) / len(self.entries)
self.r_avg = float(r_sum) / len(self.entries)
self.w_min = w_min
self.w_max = w_max
self.r_min = r_min
self.r_max = r_max
#Cons.P(" w_avg=%f w_min=%d w_max=%d" % (self.w_avg, self.w_min, self.w_max))
#Cons.P(" r_avg=%f r_min=%d r_max=%d" % (self.r_avg, self.r_min, self.r_max))
self.time_intervals_avg_ms = sum(time_intervals) / float(len(time_intervals))
#Cons.P(" avg time interval simulated time days=%f" % (self._ToSimulatedTimeSecs(self.time_intervals_avg_ms) / (24.0 * 3600)))
def _ToSimulatedTimeSecs(self, simulation_time_ms):
return simulation_time_ms / 1000.0 \
* (self.simulated_time_years * 365.25 * 24 * 60) \
/ self.simulattion_time_mins
def _GenStat(logs):
read_lat_ms = []
write_lat_ms = []
for l in logs:
for e in l.entries:
write_lat_ms.append(e.write_latency_ms)
read_lat_ms.append(e.read_latency_ms)
w_avg = sum(write_lat_ms) / float(len(write_lat_ms))
r_avg = sum(read_lat_ms) / float(len(read_lat_ms))
Cons.P("w_avg=%f r_avg=%f" % (w_avg, r_avg))
def _FilterOutSlowestN(logs, ratio):
num_exp_to_filter_out = int(len(logs) * ratio)
Cons.P("Filtering out the slowest %d experiments" % num_exp_to_filter_out)
#Cons.P("Before")
#for l in logs:
# Cons.P(" %s r_max=%d" % (l.exp_datetime, l.r_max))
logs.sort(key=lambda x: x.r_max)
#Cons.P("After sorting")
#for l in logs:
# Cons.P(" %s r_max=%d" % (l.exp_datetime, l.r_max))
logs = logs[:-num_exp_to_filter_out]
#Cons.P("After filtering out")
#for l in logs:
# Cons.P(" %s r_max=%d" % (l.exp_datetime, l.r_max))
return logs
def _LoadLogs():
# TODO
#global _cass, _mutants
# Play around with this. On mt-s7, Mutants beats Cassandra with 0, 0.1, 0.2
filter_out_slowest_n_ratio = 0.3
_cass_logs = []
with Cons.MeasureTime("Loading Cassandra loadgen log files ..."):
for dt in Conf.Get("cassandra_experiment_datetime"):
#Cons.P(dt)
_cass_logs.append(LoadgenLog(dt))
Cons.P("Loaded %d experiments" % len(_cass_logs))
_cass_logs = _FilterOutSlowestN(_cass_logs, filter_out_slowest_n_ratio)
#Cons.P(len(_cass_logs))
_GenStat(_cass_logs)
_mutants_logs = []
with Cons.MeasureTime("Loading Mutants loadgen log files ..."):
for dt in Conf.Get("mutants_experiment_datetime"):
#Cons.P(dt)
_mutants_logs.append(LoadgenLog(dt))
Cons.P("Loaded %d experiments" % len(_mutants_logs))
_FilterOutSlowestN(_mutants_logs, filter_out_slowest_n_ratio)
#Cons.P(len(_cass_logs))
_GenStat(_mutants_logs)
def _WritePlotData():
global _cass, _mutants
_cass.WritePlotData()
_mutants.WritePlotData()
|
disenone/zsync
|
test/parallel_task/ventilator.py
|
Python
|
mit
| 694
| 0.005764
|
# -*- coding: utf-8 -*-
import zmq
import random
import time
def run():
context = zmq.Context()
sender = context.socket(zmq.PUSH)
sender.bind('tcp://*:5557')
sink = context.socket(zmq
|
.PUSH)
sink.connect('tcp://localhost:5558')
print 'Press Enter when the workers are ready: '
_ = raw_input()
print('sending tasks to workders...')
sink.send(b'0')
random.seed()
total_msec = 0
for task_nbr in xrange(100):
workload = random.randint(1, 100)
total_msec += workload
sender.send_string(u'%i' % workload)
print 'Total expected cost: %s msec' % total_msec
time.sleep(1)
if __name
|
__ == '__main__':
run()
|
beeftornado/sentry
|
tests/snuba/api/endpoints/test_discover_saved_queries.py
|
Python
|
bsd-3-clause
| 18,996
| 0.001263
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.discover.models import DiscoverSavedQuery
from sentry.testutils import APITestCase, SnubaTestCase
from sentry.testutils.helpers.datetime import before_now
class DiscoverSavedQueryBase(APITestCase, SnubaTestCase):
def setUp(self):
super(DiscoverSavedQueryBase, self).setUp()
self.login_as(user=self.user)
self.org = self.create_organization(owner=self.user)
self.projects = [
self.create_project(organization=self.org),
self.create_project(organization=self.org),
]
self.project_ids = [project.id for project in self.projects]
self.project_ids_without_access = [self.create_project().id]
query = {"fields": ["test"], "conditions": [], "limit": 10}
model = DiscoverSavedQuery.objects.create(
organization=self.org, created_by=self.user, name="Test query", query=query, version=1
)
model.set_projects(self.project_ids)
class DiscoverSavedQueriesTest(DiscoverSavedQueryBase):
feature_name = "organizations:discover"
def setUp(self):
super(DiscoverSavedQueriesTest, self).setUp()
self.url = reverse("sentry-api-0-discover-saved-queries", args=[self.org.slug])
def test_get(self):
with self.feature(self.feature_name):
response = self.client.get(self.url)
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]["name"] == "Test query"
assert response.data[0]["projects"] == self.project_ids
assert response.data[0]["fields"] == ["test"]
assert response.data[0]["conditions"] == []
assert response.data[0]["limit"] == 10
assert response.data[0]["version"] == 1
assert "createdBy" in response.data[0]
assert response.data[0]["createdBy"]["username"] == self.user.username
def test_get_version_filter(self):
with self.feature(self.feature_name):
response = self.client.get(self.url, format="json", data={"query": "version:1"})
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]["name"] == "Test query"
with self.feature(self.feature_name):
response = self.client.get(self.url, format="json", data={"query": "version:2"})
assert response.status_code == 200, response.content
assert len(response.data) == 0
def test_get_name_filter(self):
with self.feature(self.feature_name):
resp
|
onse = self.client.get(self.url, format="json", data={"query": "Test"})
assert response.status_code == 200, response.content
assert len(re
|
sponse.data) == 1
assert response.data[0]["name"] == "Test query"
with self.feature(self.feature_name):
# Also available as the name: filter.
response = self.client.get(self.url, format="json", data={"query": "name:Test"})
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]["name"] == "Test query"
with self.feature(self.feature_name):
response = self.client.get(self.url, format="json", data={"query": "name:Nope"})
assert response.status_code == 200, response.content
assert len(response.data) == 0
def test_get_all_paginated(self):
for i in range(0, 10):
query = {"fields": ["test"], "conditions": [], "limit": 10}
model = DiscoverSavedQuery.objects.create(
organization=self.org,
created_by=self.user,
name="My query {}".format(i),
query=query,
version=1,
)
model.set_projects(self.project_ids)
with self.feature(self.feature_name):
response = self.client.get(self.url, data={"per_page": 1})
assert response.status_code == 200, response.content
assert len(response.data) == 1
with self.feature(self.feature_name):
# The all parameter ignores pagination and returns all values.
response = self.client.get(self.url, data={"per_page": 1, "all": 1})
assert response.status_code == 200, response.content
assert len(response.data) == 11
def test_get_sortby(self):
query = {"fields": ["message"], "query": "", "limit": 10}
model = DiscoverSavedQuery.objects.create(
organization=self.org,
created_by=self.user,
name="My query",
query=query,
version=2,
date_created=before_now(minutes=10),
date_updated=before_now(minutes=10),
)
model.set_projects(self.project_ids)
sort_options = {
"dateCreated": True,
"-dateCreated": False,
"dateUpdated": True,
"-dateUpdated": False,
"name": True,
"-name": False,
}
for sorting, forward_sort in sort_options.items():
with self.feature(self.feature_name):
response = self.client.get(self.url, data={"sortBy": sorting})
assert response.status_code == 200
values = [row[sorting.strip("-")] for row in response.data]
if not forward_sort:
values = list(reversed(values))
assert list(sorted(values)) == values
def test_get_sortby_myqueries(self):
uhoh_user = self.create_user(username="uhoh")
self.create_member(organization=self.org, user=uhoh_user)
whoops_user = self.create_user(username="whoops")
self.create_member(organization=self.org, user=whoops_user)
query = {"fields": ["message"], "query": "", "limit": 10}
model = DiscoverSavedQuery.objects.create(
organization=self.org,
created_by=uhoh_user,
name="a query for uhoh",
query=query,
version=2,
date_created=before_now(minutes=10),
date_updated=before_now(minutes=10),
)
model.set_projects(self.project_ids)
model = DiscoverSavedQuery.objects.create(
organization=self.org,
created_by=whoops_user,
name="a query for whoops",
query=query,
version=2,
date_created=before_now(minutes=10),
date_updated=before_now(minutes=10),
)
model.set_projects(self.project_ids)
with self.feature(self.feature_name):
response = self.client.get(self.url, data={"sortBy": "myqueries"})
assert response.status_code == 200, response.content
values = [int(item["createdBy"]["id"]) for item in response.data]
assert values == [self.user.id, uhoh_user.id, whoops_user.id]
def test_post(self):
with self.feature(self.feature_name):
response = self.client.post(
self.url,
{
"name": "New query",
"projects": self.project_ids,
"fields": [],
"range": "24h",
"limit": 20,
"conditions": [],
"aggregations": [],
"orderby": "-time",
},
)
assert response.status_code == 201, response.content
assert response.data["name"] == "New query"
assert response.data["projects"] == self.project_ids
assert response.data["range"] == "24h"
assert not hasattr(response.data, "start")
assert not hasattr(response.data, "end")
def test_post_invalid_projects(self):
with self.feature(self.feature_name):
response = self.client.post(
self.url,
{
"name": "New query",
"projects": self.project_ids_without_access,
"fields": [],
"range": "24h",
"limit": 20,
|
KirillTim/crashsimilarity
|
crashsimilarity/cli/signatures_similarity.py
|
Python
|
mpl-2.0
| 1,820
| 0.002747
|
import pprint
from crashsimilarity.downloader import SocorroDownloader
import argparse
import sys
from crashsimilarity.models.gensim_model_wrapper import Doc2vecModelWrapper
from crashsimilarity.models.similarity.doc2vec_similarity import Doc2VecSimilarity
from crashsimilarity.models.wmd_calcula
|
tor impo
|
rt WMDCalculator
from crashsimilarity.utils import StackTracesGetter
def parse_args(args):
parser = argparse.ArgumentParser(description='Test similarities between two signatures')
parser.add_argument('-1', '--one', required=True, help='First signature')
parser.add_argument('-2', '--two', required=True, help='Second signature')
parser.add_argument('-p', '--product', required=True, help='Product for which crash data is needed to be downloaded')
parser.add_argument('-t', '--top', help='Number of top similar and different stack traces(Default 10)', default=10, type=int)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args(sys.argv[1:])
SocorroDownloader.download_and_save_crashes(days=3, product=args.product)
paths = SocorroDownloader.get_dump_paths(days=3, product=args.product)
model_with_corpus = Doc2vecModelWrapper.read_corpus(paths).train_model()
algo = Doc2VecSimilarity(WMDCalculator.build_with_all_distances(model_with_corpus.model, model_with_corpus.corpus))
print(args.one + ' vs ' + args.two)
traces1 = StackTracesGetter.get_stack_traces_for_signature(paths, args.one)
traces2 = StackTracesGetter.get_stack_traces_for_signature(paths, args.two)
similarities = algo.signatures_similarity(traces1, traces2)
print('first signature:')
for t in traces1:
print(t)
print('second signature:')
for t in traces2:
print(t)
print('similarities matrix:')
pprint.pprint(similarities)
|
irisdianauy/bioscripts
|
gbk_to_bed.py
|
Python
|
gpl-2.0
| 1,705
| 0
|
#! /usr/bin/env python
"""
author @irisdianauy
The script converts a multi-genbank file into
a single bed file.
Feature: Specify features to extract (allows all).
"""
import pandas as pd
from os import path
from sys import argv, exit
from Bio import SeqIO
def get_id(ofeat):
sid = (ofeat.qualifiers.get("db_xref", []))[0]
sid = sid.replace("SEED:", "")
return(sid)
def get_cds_info(ofeat):
bpos = ofeat.strand > 0
dcds = {"str": ofeat.location.start.position,
"end": ofeat.location.end.position,
"name": get_id(ofeat),
"dxn": "+" if bpos else "-"}
return(dcds)
def get_all_cds(ogb, lfeats):
lcds_all = []
for orec in ogb:
dcds_ = {"chr": orec.id, "score": 1000}
for ofeat in orec.features:
if ofeat.type in lfeats:
dcds = {**get_cds_info(ofeat), **dcds_}
lcds_all.append(dcds)
return(lcds_all)
def write_bed(lcds):
odf = pd.DataFrame(lcds)
odf.to_csv(pbed, sep="\t", index=False, header=False,
columns=["chr", "str", "end", "name", "score", "dxn"])
def main(pgb, pbed, lfeats):
# Open genbank file
o
|
gb = SeqIO.parse(pgb, "genbank")
# Get all CDSs + info
lcds_all = get_all_cds(ogb, lfeats)
# Write locations in BED format
write_bed(lcds_all)
# """
if __name__ == "__main__":
try:
|
pgb = path.abspath(argv[1])
pbed = path.abspath(argv[2])
lfeats = argv[3:]
except IndexError:
ppy = path.basename(path.abspath(__file__))
shelp = f"Usage:\n{ppy} <input.gbk> <output.bed> <type1 type2 typen>"
print(shelp)
exit()
main(pgb, pbed, lfeats)
# """
|
dimV36/webtests
|
webtests/auth.py
|
Python
|
gpl-2.0
| 486
| 0
|
# coding=utf-8
__author__ = 'dimv
|
36'
from flask_login import AnonymousUserMixin, LoginManager
from webtests.models import User
# Модуль инициализации менеджера входа пользователей
login_manager = LoginManager()
class AnonymousUser(AnonymousUserMixin):
id = None
login_manager.anonymous_user = Anonymou
|
sUser
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
|
Fjobbe/xbmc-swefilmer
|
navigation.py
|
Python
|
gpl-3.0
| 9,678
| 0.000723
|
# -*- coding: utf-8 -*-
from mocks import Xbmc, Xbmcplugin, Xbmcgui, Xbmcaddon
import swefilmer
import sys
import urllib
ACTION_NEW = 'new'
ACTION_TOP = 'top'
ACTION_FAVORITES = 'favorites'
ACTION_CATEGORIES = 'categories'
ACTION_CATEGORY = 'category'
ACTION_SEARCH = 'search'
ACTION_VIDEO = 'video'
ACTION_NEXT_PAGE = 'next'
class Navigation(object):
def __init__(self, xbmc, xbmcplugin, xbmcgui, xbmcaddon, swefilmer,
plugin_url, handle, params):
self.xbmc = xbmc
self.xbmcplugin = xbmcplugin
self.xbmcgui = xbmcgui
self.xbmcaddon = xbmcaddon
self.swefilmer = swefilmer
self.plugin_url = plugin_url
self.handle = int(handle)
self.params = self.swefilmer.parameters_string_to_dict(params)
self.settings = xbmcaddon.Addon(id='plugin.video.swefilmer')
self.localize = self.settings.getLocalizedString
def get_credentials(self):
username = self.settings.getSetting('username')
password = self.settings.getSetting('password')
return (username, password)
def unikeyboard(self, default, message):
keyboard = self.xbmc.Keyboard(default, message)
keyboard.doModal()
if (keyboard.isConfirmed()):
return keyboard.getText()
else:
return None
def quality_select_dialog(self, stream_urls):
qualities = [s[0] for s in stream_urls]
dialog = self.xbmcgui.Dialog()
answer = 0
if len(qualities) > 1:
answer = dialog.select(self.localize(30201), qualities)
if answer == -1:
return None
url = stream_urls[a
|
nswer][1]
return url
def add_menu_item(self, caption, action, total_items, logged_in, url=None):
li = self.xbmcgui.ListItem(caption)
infoLabels = {'Title': caption}
li.setInfo(type='Video', infoLabels=infoLabels)
params = {'action': action, 'logged_in': logged_in}
if url:
params['url'] = url
item_url = self.plugin_url + '?' + urllib.urlencode(params)
self.xbmcplugin.addDire
|
ctoryItem(handle=self.handle, url=item_url,
listitem=li, isFolder=True,
totalItems=total_items)
def add_video_item(self, caption, url, image, action, total_items, logged_in):
li = self.xbmcgui.ListItem(caption)
li.setProperty('IsPlayable', 'true')
if image:
li.setThumbnailImage(image)
infoLabels = {'Title': caption}
li.setInfo(type='Video', infoLabels=infoLabels)
params = {'action': action, 'url': url, 'logged_in': logged_in}
item_url = self.plugin_url + '?' + urllib.urlencode(params)
self.xbmcplugin.addDirectoryItem(handle=self.handle, url=item_url,
listitem=li, isFolder=False,
totalItems=total_items)
def start_menu(self):
logged_in = False
(username, password) = self.get_credentials()
if username and len(username) > 0:
logged_in = self.swefilmer.login(username, password)
if not logged_in:
self.xbmcgui.Dialog().ok(self.localize(30501),
self.localize(30502))
total_items = 5 if logged_in else 4
self.add_menu_item(self.localize(30101), ACTION_NEW, total_items,
logged_in)
self.add_menu_item(self.localize(30102), ACTION_TOP, total_items,
logged_in)
if logged_in:
self.add_menu_item(self.localize(30103), ACTION_FAVORITES,
total_items, logged_in)
self.add_menu_item(self.localize(30104), ACTION_CATEGORIES,
total_items, logged_in)
self.add_menu_item(self.localize(30105), ACTION_SEARCH, total_items,
logged_in)
return True
def new_menu(self):
html = self.swefilmer.new_menu_html()
return self.scrape_list(html)
def top_menu(self):
html = self.swefilmer.top_menu_html()
return self.scrape_list(html)
def favorites_menu(self):
html = self.swefilmer.favorites_menu_html()
return self.scrape_list(html)
def categories_menu(self):
html = self.swefilmer.categories_menu_html()
ret, pagination = self.swefilmer.scrape_categories(html)
total_items = len(ret)
for (url, name), img in ret:
self.add_menu_item(name, ACTION_CATEGORY, total_items,
self.params['logged_in'], url)
return True
def category_menu(self):
if not 'browse-serier' in self.params['url']:
return self.next_page()
html = self.swefilmer.menu_html(self.params['url'])
ret, pagination = self.swefilmer.scrape_series(html)
total_items = len(ret) + len(pagination)
for (url, name), img in ret:
self.add_menu_item(name, ACTION_CATEGORY, total_items,
self.params['logged_in'], url)
if pagination:
self.add_menu_item(self.localize(30301), ACTION_NEXT_PAGE,
total_items,
self.params['logged_in'], pagination[0])
return True
def search_menu(self):
try:
latest_search = self.settings.getSetting("latestSearch")
except KeyError:
latest_search = ""
search_string = self.unikeyboard(latest_search, "")
if search_string == "": return
self.settings.setSetting("latestSearch", search_string)
html = self.swefilmer.search_menu_html(search_string)
return self.scrape_list(html)
def next_page(self):
url = self.params['url']
html = self.swefilmer.menu_html(url)
return self.scrape_list(html)
def scrape_list(self, html):
ret, pagination = self.swefilmer.scrape_list(html)
total_items = len(ret) + len(pagination)
for (url, name), img in ret:
self.add_video_item(name, url, img, ACTION_VIDEO, total_items,
self.params['logged_in'])
if pagination:
self.add_menu_item(self.localize(30301), ACTION_NEXT_PAGE,
total_items, self.params['logged_in'],
pagination[0])
return True
def video(self):
url = self.params['url']
html = self.swefilmer.video_html(url)
result = self.swefilmer.scrape_video(html)
if not result:
if not self.params['logged_in']:
self.xbmcgui.Dialog().ok(self.localize(30401),
self.localize(30402),
self.localize(30403))
else:
self.xbmcgui.Dialog().ok(self.localize(30601),
self.localize(30602))
self.xbmcplugin.setResolvedUrl(
self.handle, succeeded=False,
listitem=self.xbmcgui.ListItem(''))
return False
name, description, img, streams = result
self.xbmc.log('video: name=' + str(name))
self.xbmc.log('video: description=' + str(description))
self.xbmc.log('video: img=' + str(img))
self.xbmc.log('video: streams=' + str(streams))
if len(url) > 1:
url = self.quality_select_dialog(streams)
if not url:
self.xbmcplugin.setResolvedUrl(
self.handle, succeeded=False,
listitem=self.xbmcgui.ListItem(''))
return False
else:
url = url[0][1]
list_item = self.xbmcgui.ListItem(name)
if img:
list_item.setThumbnailImage(img[0])
infoLabels = {'Title': name}
if description:
infoLabels['Plot'] = description[0]
list_item.setInfo(type="Video", infoLabels=infoLabels)
list_item.setPath(url)
self.xbmcplugin.s
|
aitgon/wopmars
|
wopmars/tests/resource/model/FooBaseH2.py
|
Python
|
mit
| 192
| 0
|
from
|
sqlalchemy
|
import Column, String
from FooBaseH import FooBaseH
class FooBaseH2(FooBaseH):
name2 = Column(String)
__mapper_args__ = {
'polymorphic_identity': "2"
}
|
Scalr/packages
|
pkgs/six/setup.py
|
Python
|
apache-2.0
| 274
| 0.00365
|
import setuptools
version = '1.8.0'
setuptools.setup(
name='six',
version=version,
url='https://pypi.python.org/packages/source/s/six/six-%s
|
.tar.gz' % version,
license='MIT License',
author='Benjamin Peterson',
author_email='benjamin@python.or
|
g'
)
|
dufferzafar/mitmproxy
|
mitmproxy/contrib/tnetstring.py
|
Python
|
mit
| 8,799
| 0.000909
|
"""
tnetstring: data serialization using typed netstrings
======================================================
This is a custom Python 3 implementation of tnetstrings.
Compared to other implementations, the main difference
is that this implementation supports a custom unicode datatype.
An ordinary tnetstring is a blob of data prefixed with its length and postfixed
with its type. Here are some examples:
>>> tnetstring.dumps("hello world")
11:hello world,
>>> tnetstring.dumps(12345)
5:12345#
>>> tnetstring.dumps([12345, True, 0])
19:5:12345#4:true!1:0#]
This module gives you the following functions:
:dump: dump an object as a tnetstring to a file
:dumps: dump an object as a tnetstring to a string
:load: load a tnetstring-encoded object from a file
:loads: load a tnetstring-encoded object from a string
Note that since parsing a tnetstring requires reading all the data into memory
at once, there's no efficiency gain from using the file-based versions of these
functions. They're only here so you can use load() to read precisely one
item from a file or socket without consuming any extra data.
The tnetstrings specification explicitly states that strings are binary blobs
and forbids the use of unicode at the protocol level.
**This implementation decodes dictionary keys as surrogate-escaped ASCII**,
all other strings are returned as plain bytes.
:Copyright: (c) 2012-2013 by Ryan Kelly <ryan@rfk.id.au>.
:Copyright: (c) 2014 by Carlo Pires <carlopires@gmail.com>.
:Copyright: (c) 2016 by Maximilian Hils <tnetstring3@maximilianhils.com>.
:License: MIT
"""
import collections
import six
from typing import io, Union, Tuple # noqa
TSerializable = Union[None, bool, int, float, bytes, list, tuple, dict]
def dumps(value):
# type: (TSerializable) -> bytes
"""
This function dumps a python object as a tnetstring.
"""
# This uses a deque to collect output fragments in reverse order,
# then joins them together at the end. It's measurably faster
# than creating all the intermediate strings.
q = collections.deque()
_rdumpq(q, 0, value)
return b''.join(q)
def dump(value, file_handle):
# type: (TSerializable, io.BinaryIO) -> None
"""
This function dumps a python object as a tnetstring and
writes it to the given file.
"""
file_handle.write(dumps(value))
def _rdumpq(q, size, value):
# type: (collections.deque, int, TSerializable) -> int
"""
Dump value as a tnetstring, to a deque instance, last chunks first.
This function generates the tnetstring representation of the given value,
pushing chunks of the output onto the given deque instance. It pushes
the last chunk first, then recursively generates more chunks.
When passed in the current size of the string in the queue, it will return
the new size of the string in the queue.
Operating last-chunk-first makes it easy to calculate the size written
for recursive structures without having to build their representation as
a string. This is measurably faster than generating the intermediate
strings, especially on deeply nested structures.
"""
write = q.appendleft
if value is None:
write(b'0:~')
return size + 3
elif value is True:
write(b'4:true!')
return size + 7
elif value is False:
write(b'5:false!')
return size + 8
elif isinstance(value, six.integer_types):
data = str(value).encode()
ldata = len(data)
span = str(ldata).encode()
write(b'%s:%s#' % (span, data))
return size + 2 + len(span) + ldata
elif isinstance(value, float):
# Use repr() for float rather than str().
# It round-trips more accurately.
# Probably unnecessary in later python versions that
# use David Gay's ftoa routines.
data = repr(value).encode()
ldata = len(data)
span = str(ldata).encode()
write(b'%s:%s^' % (span, data))
return size + 2 + len(span) + ldata
elif isinstance(value, bytes):
data = value
ldata = len(data)
span = str(ldata).encode()
write(b',')
write(data)
write(b':')
write(span)
return size + 2 + len(span) + ldata
elif isinstance(value, six.text_type):
data = value.encode("utf8")
ldata = len(data)
span = str(ldata).encode()
write(b';')
write(data)
write(b':')
write(span)
return size + 2 + len(span) + ldata
elif isinstance(value, (list, tuple)):
write(b']')
init_size = size = size + 1
for item in reversed(value):
size = _rdumpq(q, size, item)
span = str(size - init_size).encode()
write(b':')
write(span)
return size + 1 + len(span)
elif isinstance(value, dict):
write(b'}')
init_size = size = size + 1
for (k, v) in value.items():
size = _rdumpq(q, size, v)
size = _rdumpq(q, size, k)
span = str(size - init_size).encode()
write(b':')
write(span)
return size + 1 + len(span)
else:
raise ValueError("unserializable object: {} ({})".format(value, type(value)))
def loads(string):
# type: (bytes) -> TSerializable
"""
This function parses a tnetstring into a python object.
"""
return pop(string)[0]
def load(file_handle):
# type: (io.BinaryIO) -> TSerializable
"""load(file) -> object
This function reads a tnetstring from a file and parses it into a
python object. The file must support the read() method, and this
function promises not to read more data than necessary.
"""
# Read the length prefix one char at a time.
# Note that the netstring spec explicitly forbids padding zeros.
c = file_handle.read(1)
data_length = b""
while c.isdigit():
data_length += c
if len(data_length) > 9:
raise ValueError("not a tnetstring: absurdly large length prefix")
c = file_handle.read(1)
if c != b":":
raise ValueError("not a tnetstring: missing or invalid length prefix")
data = file_handle.read(int(data_length))
data_type = file_handle.read(1)[0]
return parse(data_type, data)
def parse(data_type, data):
# type: (int, bytes) -> TSerializable
if six.PY2:
data_type = ord(data_type)
if data_type == ord(b','):
return data
if data_type == ord(b';'):
return data.decode("utf8")
if data_type == ord(b'#'):
try:
if six.PY2:
return long(data)
return int(data)
except ValueError:
raise ValueError("not a tnetstring: invalid integer literal: {}".format(data))
if data_type == ord(b'^'):
try:
return float(data)
except ValueError:
r
|
aise ValueError("not a tnetstring: invalid float literal: {}".format(data))
if data_type == ord(b'!'):
if data == b'true':
return True
elif data == b'false':
return False
else:
raise ValueError("not a tnetstring: invalid boolean literal: {}".format(data))
if data_type == ord(b'~'):
if data:
raise ValueError("not a tnetstring: invalid null l
|
iteral")
return None
if data_type == ord(b']'):
l = []
while data:
item, data = pop(data)
l.append(item)
return l
if data_type == ord(b'}'):
d = {}
while data:
key, data = pop(data)
val, data = pop(data)
d[key] = val
return d
raise ValueError("unknown type tag: {}".format(data_type))
def pop(data):
# type: (bytes) -> Tuple[TSerializable, bytes]
"""
This function parses a tnetstring into a python object.
It returns a tuple giving the parsed object and a string
containing any unparsed data from the end of the string.
"""
# Parse out data length, type and remaining string.
try:
length, data = data.split(b':',
|
eroicaleo/LearningPython
|
interview/leet/011_Container_With_Most_Water.py
|
Python
|
mit
| 845
| 0.027219
|
#!/usr/bin/env python
class Solution:
def maxArea(self, height):
"""
:type height: List[int]
:rtype: int
"""
i, j = 0, len(height)-1
l, r = height[i], height[j]
maxArea = (j - i) * min(l, r)
|
while j > i:
if l < r:
while height[i] <= l:
i += 1
elif r < l:
while hei
|
ght[j] <= r:
j -= 1
else:
i, j = i+1, j-1
l, r = height[i], height[j]
print(i, j, l, r)
area = (j - i) * min(l, r)
if area > maxArea:
maxArea = area
return maxArea
sol = Solution()
height_list = [
[1,8,6,2,5,4,8,3,7],
[1,2],
[1,2,4,3],
[2,3,4,5,18,17,6],
]
for height in height_list:
print(sol.maxArea(height))
|
kaeawc/django-auth-example
|
test/controllers/info/test_authors.py
|
Python
|
mit
| 425
| 0
|
# -*- coding: utf-8 -*
from test import DjangoTestCase
class AuthorsSpec(DjangoTestCase):
def test_success(self):
"""
Anyone should be able
|
to view the authors page
"""
response = self.http_get(u"/authors")
assert response is not None
assert response.ok is True, response
assert response.controller == u"authors", response
|
assert response.duration < 10
|
zaibacu/wutu
|
wutu/bench/compiler_bench.py
|
Python
|
mit
| 535
| 0.001869
|
from wutu.compiler.common import create_stream
from wutu.compiler.service import create_service_js
from wutu.compiler.controller import c
|
reate_controller_js
from wutu.util import *
def handle_creation(module, stream):
create_service_js(stream, module)
create_controller_js(stream, module)
def main():
mod = Module()
mod.__name__ = "test_module"
stream = create_stream()
fo
|
r i in range(0, 1000):
handle_creation(mod, stream)
if __name__ == "__main__":
import cProfile
cProfile.run("main()")
|
japaniel/CloudFerry
|
cloudferrylib/os/actions/deploy_snapshots.py
|
Python
|
apache-2.0
| 4,490
| 0
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
from cloudferrylib.base.action import action
from cloudferrylib.os.actions import snap_transfer
from cloudferrylib.os.actions import task_transfer
from cloudferrylib.utils.drivers import ssh_ceph_to_ceph
from cloudferrylib.utils import rbd_util
from cloudferrylib.utils import utils as utl
import copy
OLD_ID = 'old_id'
class DeployVolSnapshots(action.Action):
def run(self, storage_info=None, identity_info=None, **kwargs):
storage_info = copy.deepcopy(storage_info)
deploy_info = copy.deepcopy(storage_info)
deploy_info.update(identity_info)
storage_info.update(identity_info)
volume_resource = self.cloud.resources[utl.STORAGE_RESOURCE]
for vol_id, vol in deploy_info[utl.VOLUMES_TYPE].iteritems():
if vol['snapshots']:
vol_info = vol[utl.VOLUME_BODY]
snapshots_list = \
[snap_info for snap_info in vol['snapshots'].values()]
snapshots_list.sort(key=lambda x: x['created_at'])
for snap in snapshots_list:
if snapshots_list.index(sn
|
ap) == 0:
act_snap_transfer = \
snap_transfer.SnapTransfer(
self.init,
ssh_ceph_to_ceph.SSHCephToCeph,
1)
else:
snap_num = snapshots_list.index(snap)
snap['prev_snapname'] = \
snapshots_list[snap_num - 1]['name']
|
act_snap_transfer = \
snap_transfer.SnapTransfer(
self.init,
ssh_ceph_to_ceph.SSHCephToCeph,
2)
act_snap_transfer.run(volume=vol_info, snapshot_info=snap)
volume_resource.create_snapshot(
volume_id=vol_id,
display_name=snap['display_name'],
display_description=snap['display_description'])
act_snap_transfer = snap_transfer.SnapTransfer(
self.init,
ssh_ceph_to_ceph.SSHCephToCeph,
3)
act_snap_transfer.run(volume=vol_info,
snapshot_info=snapshots_list[-1])
for snap in snapshots_list:
if volume_resource.config.storage.host:
act_delete_redundant_snap = \
rbd_util.RbdUtil(cloud=self.cloud,
config_migrate=self.cfg.migrate,
host=vol_info[utl.HOST_DST])
act_delete_redundant_snap.snap_rm(
vol_info[utl.PATH_DST],
snap['name'])
else:
act_delete_redundant_snap = \
rbd_util.RbdUtil(cloud=self.cloud,
config_migrate=self.cfg.migrate)
act_delete_redundant_snap.snap_rm(
vol_info[utl.PATH_DST],
snap['name'], vol_info[utl.HOST_DST])
else:
one_volume_info = {
'one_volume_info': {
utl.VOLUMES_TYPE: {
vol_id: vol
}
}
}
act_transport_vol_data = \
task_transfer.TaskTransfer(self.init,
'SSHCephToCeph',
input_info='one_volume_info')
act_transport_vol_data.run(**one_volume_info)
return {}
|
mbiciunas/libnix
|
src/libnix/config/script/run_script.py
|
Python
|
gpl-3.0
| 1,491
| 0
|
"""
LibNix
Copyright (C) 2017 Mark Biciunas
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have
|
received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from libnix.config.config import Config
class RunScript:
def __init__(self):
self._config = Config()
def run(self, name: str):
_script = self._config.get_scripts().find_by_name(name)
try:
code = compile(_script.get_code(), name, 'exec')
exec(code)
except SyntaxError as e:
|
# print("Syntax Error: {}".format(e))
print("Syntax Error - filename: {}".format(e.filename))
print("Syntax Error - line: {}".format(e.lineno))
print("Syntax Error - msg: {}".format(e.msg))
print("Syntax Error - offset: {}".format(e.offset))
print("Syntax Error - text: {}".format(e.text))
except NameError as e:
for _arg in e.args:
print("Syntax Error - args: {}".format(_arg))
|
RaitoBezarius/mangaki
|
mangaki/mangaki/management/commands/lookup.py
|
Python
|
agpl-3.0
| 616
| 0.001623
|
from django.core.management.base import BaseCommand, CommandError
from mangaki.models import Work, Rating
from django.db import connection
from django.db.models import Count
from collecti
|
ons import Counter
import sys
class Command(BaseCommand):
args = ''
help = 'Lookup some work'
def handle(self, *args, **options):
work = Work.objects.filter(title__icontains=args[0]).annotate(Count('rating')).order_by('-rating__count')[0]
print(work.title, work.id)
nb = Counter()
|
for rating in Rating.objects.filter(work=work):
nb[rating.choice] += 1
print(nb)
|
vpadillar/pventa
|
pventa/wsgi.py
|
Python
|
mit
| 389
| 0
|
"""
WSGI config for pventa project.
It exposes the WSGI callable as a module-level variable named ``application``.
For
|
more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SET
|
TINGS_MODULE", "pventa.settings")
application = get_wsgi_application()
|
jawilson/home-assistant
|
tests/components/tasmota/test_binary_sensor.py
|
Python
|
apache-2.0
| 13,393
| 0.000971
|
"""The tests for the Tasmota binary sensor platform."""
import copy
from datetime import timedelta
import json
from unittest.mock import patch
from hatasmota.utils import (
get_topic_stat_result,
get_topic_stat_status,
get_topic_tele_sensor,
get_topic_tele_will,
)
from homeassistant.components import binary_sensor
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import (
ATTR_ASSUMED_STATE,
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
)
import homeassistant.core as ha
import homeassistant.util.dt as dt_util
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.common import async_fire_mqtt_message, async_fire_time_changed
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test normal state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"ON"}}'
)
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_ON
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"OFF"}}'
)
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
# Test periodic state update
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"ON"}')
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"OFF"}')
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
# Test polled state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Switch1":"ON"}}'
)
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_ON
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Switch1":"OFF"}}'
)
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
# Test force update flag
entity = hass.data["entity_components"]["binary_sensor"].get_entity(
"binary_sensor.tasmota_binary_sensor_1"
)
assert entity.force_update
async def test_controlling_state_via_mqtt_switchname(hass, mqtt_mock, setup_tasmota):
"""Test state upda
|
te via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
config["swn"][0] = "Custom Name"
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.custom_name")
assert state.state == "unavailable"
assert not state.
|
attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.custom_name")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test normal state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/RESULT", '{"Custom Name":{"Action":"ON"}}'
)
state = hass.states.get("binary_sensor.custom_name")
assert state.state == STATE_ON
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/RESULT", '{"Custom Name":{"Action":"OFF"}}'
)
state = hass.states.get("binary_sensor.custom_name")
assert state.state == STATE_OFF
# Test periodic state update
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Custom Name":"ON"}')
state = hass.states.get("binary_sensor.custom_name")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Custom Name":"OFF"}')
state = hass.states.get("binary_sensor.custom_name")
assert state.state == STATE_OFF
# Test polled state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Custom Name":"ON"}}'
)
state = hass.states.get("binary_sensor.custom_name")
assert state.state == STATE_ON
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Custom Name":"OFF"}}'
)
state = hass.states.get("binary_sensor.custom_name")
assert state.state == STATE_OFF
async def test_pushon_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 13
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test normal state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"ON"}}'
)
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_ON
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"OFF"}}'
)
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
# Test periodic state update is ignored
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"ON"}')
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
# Test polled state update is ignored
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Switch1":"ON"}}'
)
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == STATE_OFF
async def test_friendly_names(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
config["swc"][1] = 1
config["swn"][1] = "Beer"
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == "unavailable"
assert state.attributes.get("friendly_name") == "Tasmota binary_sensor 1"
state = hass.states.get("binary_sensor.beer")
assert state.state == "unavailable"
assert state.attributes.get("friendly_name") == "Beer"
async def test_off_delay(hass, mqtt_mock, setup_tasmota):
"""Test off_delay option."""
config = copy.deepcopy(D
|
tobetter/linaro-image-tools_packaging
|
linaro_image_tools/testing.py
|
Python
|
gpl-3.0
| 2,121
| 0
|
# Copyright (C) 2010, 2011 Linaro
#
# Author: James Westby <james.westby@linaro.org>
#
# This file is part of Linaro Image Tools.
#
# Linaro Image Tools is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Linaro Image Tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Linaro Image Tools; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import os
import tempfile
from testtools import TestCase
class TestCaseWithFixtures(TestCase):
"""A TestCase with the ability to easily add 'fixtures'.
A fixture is an object which can be created and cleaned up, and
this test case knows how to manage them to ensure that they will
always be cleaned up at the end of the test.
"""
def useFixture(self, fixture):
"""Make use of a fixture, ensuring that it will be cleaned up.
Given a fixture, this method will run the `setUp` method of
the fixture, and ensure that its `tearDown` method will be
called at the end of the test, regardless of success or failure.
:param fixture:
|
the fixture to use.
:type fixture: an object with setUp and tearDown methods.
:return: the fixture that was passed in.
"""
self.addCleanup(fixture.tearDown
|
)
fixture.setUp()
return fixture
def createTempFileAsFixture(self, prefix='tmp', dir=None):
"""Create a temp file and make sure it is removed on tearDown.
:return: The filename of the file created.
"""
_, filename = tempfile.mkstemp(prefix=prefix, dir=dir)
self.addCleanup(os.unlink, filename)
return filename
|
jeffery-do/Vizdoombot
|
doom/lib/python3.5/site-packages/dask/tests/test_async.py
|
Python
|
mit
| 6,053
| 0.00033
|
from __future__ import absolute_import, division, print_function
import dask
from dask.async import (start_state_from_dask, get_sync, finish_task, sortkey,
remote_exception)
from dask.order import order
from dask.utils_test import GetFunctionTestMixin, inc, add
fib_dask = {'f0': 0, 'f1': 1, 'f2': 1, 'f3': 2, 'f4': 3, 'f5': 5, 'f6': 8}
def test_start_state():
dsk = {'x': 1, 'y': 2, 'z': (inc, 'x'), 'w': (add, 'z', 'y')}
result = start_state_from_dask(dsk)
expected = {'cache': {'x': 1, 'y': 2},
'dependencies': {'w': set(['y', 'z']),
'x': set([]),
'y': set([]),
'z': set(['x'])},
'dependents': {'w': set([]),
'x': set(['z']),
'y': set(['w']),
'z': set(['w'])},
'finished': set([]),
'released': set([]),
'running': set([]),
'ready': ['z'],
'waiting': {'w': set(['z'])},
'waiting_data': {'x': set(['z']),
'y': set(['w']),
'z': set(['w'])}}
assert result == expected
def test_start_state_looks_at_cache():
dsk = {'b': (inc, 'a')}
cache = {'a': 1}
result = start_state_from_dask(dsk, cache)
assert result['dependencies']['b'] == set(['a'])
assert result['ready'] == ['b']
def test_start_state_with_redirects():
dsk = {'x': 1, 'y': 'x', 'z': (inc, 'y')}
result = start_state_from_dask(dsk)
assert result['cache'] == {'x': 1}
def test_start_state_with_independent_but_runnable_tasks():
assert start_state_from_dask({'x': (inc, 1)})['ready'] == ['x']
def test_start_state_with_tasks_no_deps():
dsk = {'a': [1, (inc, 2)],
'b': [1, 2, 3, 4],
'c': (inc, 3)}
state = start_state_from_dask(dsk)
assert list(state['cache'].keys()) == ['b']
assert 'a' in state['ready'] and 'c' in state['ready']
deps = dict((k, set()) for k in 'abc')
assert state['dependencies'] == deps
assert state['dependents'] == deps
def test_finish_task():
dsk = {'x': 1, 'y': 2, 'z': (inc, 'x'), 'w': (add, 'z', 'y')}
sortkey = order(dsk).get
state = start_state_from_dask(dsk)
state['ready'].remove('z')
state['running'] = set(['z', 'other-task'])
task = 'z'
result = 2
state['cache']['z'] = result
finish_task(dsk, task, state, set(), sortkey)
assert state == {'cache': {'y': 2, 'z': 2},
'dependencies': {'w': set(['y', 'z']),
'x': set([]),
'y': set([]),
'z': set(['x'])},
'finished': set(['z']),
'released': set(['x']),
'running': set(['other-task']),
'dependents': {'w': set([]),
'x': set(['z']),
'y': set(['w']),
'z': set(['w'])},
'ready': ['w'],
'waiting': {},
'waiting_data': {'y': set(['w']),
'z': set(['w'])}}
class TestGetAsync(GetFunctionTestMixin):
get = staticmethod(get_sync)
def test_get_sync_num_workers(self):
self.get({'x': (inc, 'y'), 'y': 1}, 'x', num_workers=2)
def test_cache_options():
try:
from chest import Chest
except ImportError:
return
cache = Chest()
def inc2(x):
assert 'y' in cache
return x + 1
with dask.set_options(cache=cache):
get_sync({'x': (inc2, 'y'), 'y': 1}, 'x')
def test_sort_key():
L = ['x', ('x', 1), ('z', 0), ('x', 0)]
assert sorted(L, key=sortkey) == ['x', ('x', 0), ('x', 1), ('z', 0)]
def test_callback():
f = lambda x: x + 1
dsk = {'a': (f, 1)}
from dask.threaded import get
def start_callback(key, d, state):
|
assert key == 'a' or key is None
assert d == dsk
assert isinstance(state, dict)
|
def end_callback(key, value, d, state, worker_id):
assert key == 'a' or key is None
assert value == 2 or value is None
assert d == dsk
assert isinstance(state, dict)
get(dsk, 'a', start_callback=start_callback, end_callback=end_callback)
def test_order_of_startstate():
dsk = {'a': 1, 'b': (inc, 'a'), 'c': (inc, 'b'),
'x': 1, 'y': (inc, 'x')}
result = start_state_from_dask(dsk)
assert result['ready'] == ['y', 'b']
dsk = {'x': 1, 'y': (inc, 'x'), 'z': (inc, 'y'),
'a': 1, 'b': (inc, 'a')}
result = start_state_from_dask(dsk)
assert result['ready'] == ['b', 'y']
def test_nonstandard_exceptions_propagate():
class MyException(Exception):
def __init__(self, a, b):
self.a = a
self.b = b
def __str__(self):
return "My Exception!"
def f():
raise MyException(1, 2)
from dask.threaded import get
try:
get({'x': (f,)}, 'x')
assert False
except MyException as e:
assert "My Exception!" in str(e)
assert "Traceback" in str(e)
assert 'a' in dir(e)
assert 'traceback' in dir(e)
assert e.exception.a == 1 and e.exception.b == 2
assert e.a == 1 and e.b == 2
def test_remote_exception():
e = TypeError("hello")
a = remote_exception(e, 'traceback')
b = remote_exception(e, 'traceback')
assert type(a) == type(b)
assert isinstance(a, TypeError)
assert 'hello' in str(a)
assert 'traceback' in str(a)
def test_ordering():
L = []
def append(i):
L.append(i)
dsk = {('x', i): (append, i) for i in range(10)}
x_keys = sorted(dsk)
dsk['y'] = (lambda *args: None, list(x_keys))
get_sync(dsk, 'y')
assert L == sorted(L)
|
JustinTulloss/harmonize.fm
|
uploader/publish_win.py
|
Python
|
mit
| 800
| 0.025
|
import os, sys
import config
def notify_user(msg):
sys.stderr.write(msg+'\n')
raw_input('Press enter to exit ')
sys
|
.exit(1)
def run_cmd(cmd):
if os.system(cmd) != 0:
notify_user('Command "%s" failed!'%cmd)
def run_cmds(*cmds):
for cmd in cmds:
run_cmd(cmd)
if config.current != config.production:
notify_user('Change current configuration to production before running')
logo_dir = r'..\sandbox\logos\icons'
run_cmds(
r'png2ico harmonize_icon.ico %s\orangecircle16.png %s\orangecircle32.png %s\orangecircle
|
48.png' % (logo_dir, logo_dir, logo_dir),
'python setup.py py2exe',
r'"C:\Program Files\Inno Setup 5\iscc" windows_installer.iss',
'pscp "Output\Harmonizer Setup.exe" harmonize.fm:/var/opt/uploaders')
raw_input('Publish completed successfully')
|
rkmaddox/mne-python
|
mne/io/meas_info.py
|
Python
|
bsd-3-clause
| 97,239
| 0
|
# -*- coding: utf-8 -*-
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Matti Hämäläinen <msh@nmr.mgh.harvard.edu>
# Teon Brooks <teon.brooks@gmail.com>
# Stefan Appelhoff <stefan.appelhoff@mailbox.org>
#
# License: BSD (3-clause)
from collections import Counter, OrderedDict
import contextlib
from copy import deepcopy
import datetime
from io import BytesIO
import operator
from textwrap import shorten
import numpy as np
from .pick import (channel_type, pick_channels, pick_info,
get_channel_type_constants, pick_types)
from .constants import FIFF, _coord_frame_named
from .open import fiff_open
from .tree import dir_tree_find
from .tag import (read_tag, find_tag, _ch_coord_dict, _update_ch_info_named,
_rename_list)
from .proj import (_read_proj, _write_proj, _uniquify_projs, _normalize_proj,
_proj_equal, Projection)
from .ctf_comp import _read_ctf_comp, write_ctf_comp
from .write import (start_file, end_file, start_block, end_block,
write_string, write_dig_points, write_float, write_int,
write_coord_trans, write_ch_info, write_name_list,
write_julian, write_float_matrix, write_id, DATE_NONE)
from .proc_history import _read_proc_history, _write_proc_history
from ..transforms import invert_transform, Transform, _coord_frame_name
from ..utils import (logger, verbose, warn, object_diff, _validate_type,
_stamp_to_dt, _dt_to_stamp, _pl, _is_numeric,
_check_option, _on_missing, _check_on_missing)
from ._digitization import (_format_dig_points, _dig_kind_proper, DigPoint,
_dig_kind_rev, _dig_kind_ints, _read_dig_fif)
from ._digitization import write_dig as _dig_write_dig
from .compensator import get_current_comp
from ..data.html_templates import info_template
b = bytes # alias
_SCALAR_CH_KEYS = ('scanno', 'logno', 'kind', 'range', 'cal', 'coil_type',
'unit', 'unit_mul', 'coord_frame')
_ALL_CH_KEYS_SET = set(_SCALAR_CH_KEYS + ('loc', 'ch_name'))
# XXX we need to require these except when doing simplify_info
_MIN_CH_KEYS_SET = set(('kind', 'cal', 'unit', 'loc', 'ch_name'))
def _get_valid_units():
"""Get valid units according to the International System of Units (SI).
The International System of Units (SI, :footcite:`WikipediaSI`) is the
default system for describing units in the Brain Imaging Data Structure
(BIDS). For more information, see the BIDS specification
:footcite:`BIDSdocs` and the appendix "Units" therein.
References
----------
.. footbibliography::
"""
valid_prefix_names = ['yocto', 'zepto', 'atto', 'femto', 'pico', 'nano',
'micro', 'milli', 'centi', 'deci', 'deca', 'hecto',
'kilo', 'mega', 'giga', 'tera', 'peta', 'exa',
'zetta', 'yotta']
valid_prefix_symbols = ['y', 'z', 'a', 'f', 'p', 'n', u'µ', 'm', 'c', 'd',
'da', 'h', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
valid_unit_names = ['metre', 'kilogram', 'second', 'ampere', 'kelvin',
'mole', 'candela', 'radian', 'steradian', 'hertz',
'newton', 'pascal', 'joule', 'watt', 'coulomb', 'volt',
'farad', 'ohm', 'siemens', 'weber', 'tesla', 'henry',
'degree Celsius', 'lumen', 'lux', 'becquerel', 'gray',
'sievert', 'katal']
valid_unit_symbols = ['m', 'kg', 's', 'A', 'K', 'mol', 'cd', 'rad', 'sr',
'
|
Hz', 'N', 'Pa', 'J', 'W', 'C', 'V', 'F', u'Ω', 'S',
'Wb', 'T', 'H', u'°C', 'lm', 'lx', 'Bq', 'Gy', 'Sv',
'kat']
# Valid units are all possible combinations of either prefix name or prefix
# symbol together with either unit name or unit symbol. E.g., nV for
# nanovolt
valid_uni
|
ts = []
valid_units += ([''.join([prefix, unit]) for prefix in valid_prefix_names
for unit in valid_unit_names])
valid_units += ([''.join([prefix, unit]) for prefix in valid_prefix_names
for unit in valid_unit_symbols])
valid_units += ([''.join([prefix, unit]) for prefix in valid_prefix_symbols
for unit in valid_unit_names])
valid_units += ([''.join([prefix, unit]) for prefix in valid_prefix_symbols
for unit in valid_unit_symbols])
# units are also valid without a prefix
valid_units += valid_unit_names
valid_units += valid_unit_symbols
# we also accept "n/a" as a unit, which is the default missing value in
# BIDS
valid_units += ["n/a"]
return tuple(valid_units)
@verbose
def _unique_channel_names(ch_names, max_length=None, verbose=None):
"""Ensure unique channel names."""
if max_length is not None:
ch_names[:] = [name[:max_length] for name in ch_names]
unique_ids = np.unique(ch_names, return_index=True)[1]
if len(unique_ids) != len(ch_names):
dups = {ch_names[x]
for x in np.setdiff1d(range(len(ch_names)), unique_ids)}
warn('Channel names are not unique, found duplicates for: '
'%s. Applying running numbers for duplicates.' % dups)
for ch_stem in dups:
overlaps = np.where(np.array(ch_names) == ch_stem)[0]
# We need an extra character since we append '-'.
# np.ceil(...) is the maximum number of appended digits.
if max_length is not None:
n_keep = (
max_length - 1 - int(np.ceil(np.log10(len(overlaps)))))
else:
n_keep = np.inf
n_keep = min(len(ch_stem), n_keep)
ch_stem = ch_stem[:n_keep]
for idx, ch_idx in enumerate(overlaps):
ch_name = ch_stem + '-%s' % idx
if ch_name not in ch_names:
ch_names[ch_idx] = ch_name
else:
raise ValueError('Adding a running number for a '
'duplicate resulted in another '
'duplicate name %s' % ch_name)
return ch_names
class MontageMixin(object):
"""Mixin for Montage setting."""
@verbose
def set_montage(self, montage, match_case=True, match_alias=False,
on_missing='raise', verbose=None):
"""Set EEG sensor configuration and head digitization.
Parameters
----------
%(montage)s
%(match_case)s
%(match_alias)s
%(on_missing_montage)s
%(verbose_meth)s
Returns
-------
inst : instance of Raw | Epochs | Evoked
The instance.
Notes
-----
Operates in place.
"""
# How to set up a montage to old named fif file (walk through example)
# https://gist.github.com/massich/f6a9f4799f1fbeb8f5e8f8bc7b07d3df
from ..channels.montage import _set_montage
info = self if isinstance(self, Info) else self.info
_set_montage(info, montage, match_case, match_alias, on_missing)
return self
def _format_trans(obj, key):
try:
t = obj[key]
except KeyError:
pass
else:
if t is not None:
obj[key] = Transform(t['from'], t['to'], t['trans'])
def _check_ch_keys(ch, ci, name='info["chs"]', check_min=True):
ch_keys = set(ch)
bad = sorted(ch_keys.difference(_ALL_CH_KEYS_SET))
if bad:
raise KeyError(
f'key{_pl(bad)} errantly present for {name}[{ci}]: {bad}')
if check_min:
bad = sorted(_MIN_CH_KEYS_SET.difference(ch_keys))
if bad:
raise KeyError(
f'key{_pl(bad)} missing for {name}[{ci}]: {bad}',)
class Info(dict, MontageMixin):
"""Measurement information.
This data structure behaves like a dictionary. It contains all metadata
that is available for a recording. However, its keys are restricted to
those provided by the
`FIF format specification <https://g
|
Wasper256/evo-departments
|
extentions.py
|
Python
|
gpl-3.0
| 103
| 0
|
"""Initialazation "db" sqlalchemy object."""
from flask_sqlalche
|
my import SQLAlchemy
db = SQLAl
|
chemy()
|
brburns/netdisco
|
setup.py
|
Python
|
apache-2.0
| 513
| 0
|
from setuptools import setup, find_packages
setup(name='netdisco',
version='0.9.2',
description='Discover devices on your local network',
url='https://github.com/home-assistant/netdisco',
author='Paulus Schout
|
sen',
author_email='Paulus@PaulusSchoutsen.nl',
license='Apache License 2.0',
install_requires=['ne
|
tifaces>=0.10.0', 'requests>=2.0',
'zeroconf==0.17.6'],
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False)
|
violine1101/MCEdit-Unified
|
albow/tree.py
|
Python
|
isc
| 20,371
| 0.004124
|
# -*- coding: utf-8 -*-
#
# tree.py
#
# (c) D.C.-G. 2014
#
# Tree widget for albow
#
from albow.widget import Widget
from albow.menu import Menu
from albow.fields import IntField, FloatField, TextFieldWrapped
from albow.controls import CheckBox, AttrRef, Label, Button
from albow.dialogs import ask, alert, input_text_buttons
from albow.translate import _
from extended_widgets import ChoiceButton
from theme import ThemeProperty
from layout import Column, Row
from dialogs import Dialog
from palette_view import PaletteView
from scrollpanel import ScrollRow
from utils import blit_in_rect
from pygame import image, Surface, Rect, SRCALPHA, draw, event
import copy
#-----------------------------------------------------------------------------
item_types_map = {dict: ("Compound", None, {}),
int: ("Integer", IntField, 0),
float: ("Floating point", FloatField, 0.0),
unicode: ("Text", TextFieldWrapped, ""),
bool: ("Boolean", CheckBox, True),
}
def setup_map_types_item(mp=None):
if not mp:
mp = item_types_map
map_types_item = {}
for k, v in mp.items():
if v[0] in map_types_item.keys():
_v = map_types_item.pop(v[0])
map_types_item[u"%s (%s)"%(_(v[0]), _v[0].__name__)] = _v
map_types_item[u"%s (%s)"%(_(v[0]), k.__name__)] = (k, v[1], v[2])
else:
map_types_item[v[0]] = (k, v[1], v[2])
return map_types_item
map_types_item = setup_map_types_item()
#-----------------------------------------------------------------------------
# Tree item builder methods
def create_base_item(self, i_type, i_name, i_value):
return i_name, type(i_type)(i_value)
create_dict = create_int = create_float = create_unicode = create_bool = create_base_item
#-----------------------------------------------------------------------------
class SetupNewItemPanel(Dialog):
def __init__(self, type_string, types=map_types_item, ok_action=None):
self.type_string = type_string
self.ok_action = ok_action
title = Label("Choose default data")
self.t, widget, self.v = types[type_string]
self.n = u""
w_name = TextFieldWrapped(ref=AttrRef(self, 'n'))
self.w_value = self.get_widget(widget)
col = Column([Column([title,]), Label(_("Item Type: %s")%type_string, doNotTranslate=True), Row([Label("Name"), w_name], margin=0), Row([Label("Value"), self.w_value], margin=0), Row([Button("OK", action=ok_action or self.dismiss_ok), Button("Cancel", action=self.dismiss)], margin=0)], margin=0, spacing=2)
Dialog.__init__(self, client=col)
def dismiss_ok(self):
self.dismiss((self.t, self.n, getattr(self.w_value, 'value', map_types_item.get(self.type_string, [None,] * 3)[2])))
def get_widget(self, widget):
if hasattr(widget, 'value'):
value = widget(value=self.v)
elif hasattr(widget, 'text'):
value = widget(text=self.v)
elif widget is None:
value = Label("This item type is a container. Add chlidren later.")
else:
msg = "*** Error in SelectItemTypePanel.__init__():\n Widget <%s> has no 'text' or 'value' member."%widget
print msg
value = Label(msg)
return value
#-----------------------------------------------------------------------------
class SelectItemTypePanel(Dialog):
def __init__(self, title, responses, default=None, ok_action=None):
self.response = responses[0]
self.ok_action = ok_action
title = Label(title)
self.w_type = ChoiceButton(res
|
ponses)
col = Column([title, self.w_type, Row([Button("OK", action=ok_action or self.dismiss_ok), Button("Cancel", action=ok_action or self.dismiss)], margin=0)], margin=0, spacing=2)
Dialog.__init__(self, client=col)
def dismiss_ok(self):
self.dismiss(self.w_type.selectedChoice)
#-----------------------------------------------------------------------------
def select_item_type(ok_action, types=map_types_item)
|
:
if len(types) > 1:
choices = types.keys()
choices.sort()
result = SelectItemTypePanel("Choose item type", responses=choices, default=None).present()
else:
result = types.keys()[0]
if type(result) in (str, unicode):
return SetupNewItemPanel(result, types, ok_action).present()
return None
#-----------------------------------------------------------------------------
class TreeRow(ScrollRow):
def click_item(self, n, e):
self.parent.click_item(n, e.local)
def mouse_down(self, e):
if e.button == 3:
_e = event.Event(e.type, {'alt': e.alt, 'meta': e.meta, 'ctrl': e.ctrl,
'shift': e.shift, 'button': 1, 'cmd': e.cmd,
'local': e.local, 'pos': e.pos,
'num_clicks': e.num_clicks})
ScrollRow.mouse_down(self, _e)
self.parent.show_menu(e.local)
else:
ScrollRow.mouse_down(self, e)
#-----------------------------------------------------------------------------
class Tree(Column):
"""..."""
rows = []
row_margin = 2
column_margin = 2
bullet_size = ThemeProperty('bullet_size')
bullet_color_active = ThemeProperty('bullet_color_active')
bullet_color_inactive = ThemeProperty('bullet_color_inactive')
def __init__(self, *args, **kwargs):
self.menu = [("Add", "add_item"),
("Delete", "delete_item"),
("New child", "add_child"),
("Rename", "rename_item"),
("", ""),
("Cut", "cut_item"),
("Copy", "copy_item"),
("Paste", "paste_item"),
("Paste as child", "paste_child"),
]
if not hasattr(self, 'map_types_item'):
global map_types_item
self.map_types_item = setup_map_types_item()
self.selected_item_index = None
self.selected_item = None
self.clicked_item = None
self.copyBuffer = kwargs.pop('copyBuffer', None)
self._parent = kwargs.pop('_parent', None)
self.styles = kwargs.pop('styles', {})
self.compound_types = [dict,] + kwargs.pop('compound_types', [])
self.item_types = self.compound_types + kwargs.pop('item_types', [a[0] for a in self.map_types_item.values()] or [int, float, unicode, bool])
for t in self.item_types:
if 'create_%s'%t.__name__ in globals().keys():
setattr(self, 'create_%s'%t.__name__, globals()['create_%s'%t.__name__])
self.show_fields = kwargs.pop('show_fields', False)
self.deployed = []
self.data = data = kwargs.pop("data", {})
self.draw_zebra = draw_zebra = kwargs.pop('draw_zebra', True)
# self.inner_width = kwargs.pop('inner_width', 'auto')
self.inner_width = kwargs.pop('inner_width', 500)
self.__num_rows = len(data.keys())
self.build_layout()
# row_height = self.font.size(' ')[1]
row_height = self.font.get_linesize()
self.treeRow = treeRow = TreeRow((self.inner_width, row_height), 10, draw_zebra=draw_zebra)
Column.__init__(self, [treeRow,], **kwargs)
def dispatch_key(self, name, evt):
if not hasattr(evt, 'key'):
return
if name == "key_down":
keyname = self.root.getKey(evt)
if keyname == "Up" and self.selected_item_index > 0:
if self.selected_item_index == None:
self.selected_item_index = -1
self.selected_item_index = max(self.selected_item_index - 1, 0)
elif keyname == "Down" and self.selected_item_index < len(self.rows) - 1:
if self.selected_item_index == None:
self.selected_item_index = -1
self.selected_item_index += 1
elif keyname == 'Page down':
if self.selected_item_index == None:
self.selected_item_index = -1
|
sileht/gnocchi
|
gnocchi/cli/manage.py
|
Python
|
apache-2.0
| 3,776
| 0
|
# Copyright (c) 2013 Mirantis Inc.
#
|
Copyright (c) 2015-2017 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
#
|
implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import os
import sys
import daiquiri
from oslo_config import cfg
from oslo_config import generator
import six
from gnocchi import archive_policy
from gnocchi import incoming
from gnocchi import indexer
from gnocchi import service
from gnocchi import storage
LOG = daiquiri.getLogger(__name__)
def config_generator():
args = sys.argv[1:]
if args is None:
args = ['--output-file', 'etc/gnocchi/gnocchi.conf']
return generator.main(['--config-file',
'%s/../gnocchi-config-generator.conf' %
os.path.dirname(__file__)]
+ args)
_SACK_NUMBER_OPT = cfg.IntOpt(
"sacks-number", min=1, max=65535, required=True,
help="Number of incoming storage sacks to create.")
def upgrade():
conf = cfg.ConfigOpts()
sack_number_opt = copy.copy(_SACK_NUMBER_OPT)
sack_number_opt.default = 128
conf.register_cli_opts([
cfg.BoolOpt("skip-index", default=False,
help="Skip index upgrade."),
cfg.BoolOpt("skip-storage", default=False,
help="Skip storage upgrade."),
cfg.BoolOpt("skip-incoming", default=False,
help="Skip incoming storage upgrade."),
cfg.BoolOpt("skip-archive-policies-creation", default=False,
help="Skip default archive policies creation."),
sack_number_opt,
])
conf = service.prepare_service(conf=conf, log_to_std=True)
if not conf.skip_index:
index = indexer.get_driver(conf)
LOG.info("Upgrading indexer %s", index)
index.upgrade()
if not conf.skip_storage:
s = storage.get_driver(conf)
LOG.info("Upgrading storage %s", s)
s.upgrade()
if not conf.skip_incoming:
i = incoming.get_driver(conf)
LOG.info("Upgrading incoming storage %s", i)
i.upgrade(conf.sacks_number)
if (not conf.skip_archive_policies_creation
and not index.list_archive_policies()
and not index.list_archive_policy_rules()):
if conf.skip_index:
index = indexer.get_driver(conf)
for name, ap in six.iteritems(archive_policy.DEFAULT_ARCHIVE_POLICIES):
index.create_archive_policy(ap)
index.create_archive_policy_rule("default", "*", "low")
def change_sack_size():
conf = cfg.ConfigOpts()
conf.register_cli_opts([_SACK_NUMBER_OPT])
conf = service.prepare_service(conf=conf, log_to_std=True)
s = incoming.get_driver(conf)
try:
report = s.measures_report(details=False)
except incoming.SackDetectionError:
LOG.error('Unable to detect the number of storage sacks.\n'
'Ensure gnocchi-upgrade has been executed.')
return
remainder = report['summary']['measures']
if remainder:
LOG.error('Cannot change sack when non-empty backlog. Process '
'remaining %s measures and try again', remainder)
return
LOG.info("Removing current %d sacks", s.NUM_SACKS)
s.remove_sacks()
LOG.info("Creating new %d sacks", conf.sacks_number)
s.upgrade(conf.sacks_number)
|
ozamiatin/oslo.messaging
|
oslo_messaging/tests/functional/test_functional.py
|
Python
|
apache-2.0
| 13,879
| 0
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import time
import uuid
import concurrent.futures
from oslo_config import cfg
import six.moves
from testtools import matchers
import oslo_messaging
from oslo_messaging.tests.functional import utils
class CallTestCase(utils.SkipIfNoTransportURL):
def setUp(self):
super(CallTestCase, self).setUp(conf=cfg.ConfigOpts())
if self.url.startswith("kafka://"):
self.skipTest("kafka does not support RPC API")
self.conf.prog = "test_prog"
self.conf.project = "test_project"
self.config(heartbeat_timeout_threshold=0,
group='oslo_messaging_rabbit')
def test_specific_server(self):
group = self.useFixture(utils.RpcServerGroupFixture(
self.conf, self.url)
)
client = group.client(1)
client.append(text='open')
self.assertEqual('openstack', client.append(text='stack'))
client.add(increment=2)
self.assertEqual(12, client.add(increment=10))
self.assertEqual(9, client.subtract(increment=3))
self.assertEqual('openstack', group.servers[1].endpoint.sval)
self.assertEqual(9, group.servers[1].endpoint.ival)
for i in [0, 2]:
self.assertEqual('', group.servers[i].endpoint.sval)
self.assertEqual(0, group.servers[i].endpoint.ival)
def test_server_in_group(self):
group = self.useFixture(
utils.RpcServerGroupFixture(self.conf, self.url)
)
client = group.client()
data = [c for c in 'abcdefghijklmn']
for i in data:
client.append(text=i)
for s in group.servers:
self.assertThat(len(s.endpoint.sval), matchers.GreaterThan(0))
actual = [[c for c in s.endpoint.sval] for s in group.servers]
self.assertThat(actual, utils.IsValidDistributionOf(data))
def test_different_exchanges(self):
# If the different exchanges are not honoured, then the
# teardown may hang unless we broadcast all control messages
# to each server
group1 = self.useFixture(
|
utils.RpcServerGroupFixture(self.conf, self.url,
use_fanout_ctrl=True))
group2 = self.useFixture(
utils.RpcServerGroupFixture(self.conf, self.url, exchange="a",
use_fanout_ctrl=True))
group3 = self.useFixture(
utils.RpcServerGroupFixture(self.conf, self.url, exchange="b",
use_fanout_ctrl=True))
client1 = gro
|
up1.client(1)
data1 = [c for c in 'abcdefghijklmn']
for i in data1:
client1.append(text=i)
client2 = group2.client()
data2 = [c for c in 'opqrstuvwxyz']
for i in data2:
client2.append(text=i)
actual1 = [[c for c in s.endpoint.sval] for s in group1.servers]
self.assertThat(actual1, utils.IsValidDistributionOf(data1))
actual1 = [c for c in group1.servers[1].endpoint.sval]
self.assertThat([actual1], utils.IsValidDistributionOf(data1))
for s in group1.servers:
expected = len(data1) if group1.servers.index(s) == 1 else 0
self.assertEqual(expected, len(s.endpoint.sval))
self.assertEqual(0, s.endpoint.ival)
actual2 = [[c for c in s.endpoint.sval] for s in group2.servers]
for s in group2.servers:
self.assertThat(len(s.endpoint.sval), matchers.GreaterThan(0))
self.assertEqual(0, s.endpoint.ival)
self.assertThat(actual2, utils.IsValidDistributionOf(data2))
for s in group3.servers:
self.assertEqual(0, len(s.endpoint.sval))
self.assertEqual(0, s.endpoint.ival)
def test_timeout(self):
transport = self.useFixture(
utils.TransportFixture(self.conf, self.url)
)
target = oslo_messaging.Target(topic="no_such_topic")
c = utils.ClientStub(transport.transport, target, timeout=1)
self.assertThat(c.ping,
matchers.raises(oslo_messaging.MessagingTimeout))
def test_exception(self):
group = self.useFixture(
utils.RpcServerGroupFixture(self.conf, self.url)
)
client = group.client(1)
client.add(increment=2)
self.assertRaises(ValueError, client.subtract, increment=3)
def test_timeout_with_concurrently_queues(self):
transport = self.useFixture(
utils.TransportFixture(self.conf, self.url)
)
target = oslo_messaging.Target(topic="topic_" + str(uuid.uuid4()),
server="server_" + str(uuid.uuid4()))
server = self.useFixture(
utils.RpcServerFixture(self.conf, self.url, target,
executor="threading"))
client = utils.ClientStub(transport.transport, target,
cast=False, timeout=5)
def short_periodical_tasks():
for i in range(10):
client.add(increment=1)
time.sleep(1)
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
future = executor.submit(client.long_running_task, seconds=10)
executor.submit(short_periodical_tasks)
self.assertRaises(oslo_messaging.MessagingTimeout, future.result)
self.assertEqual(10, server.endpoint.ival)
class CastTestCase(utils.SkipIfNoTransportURL):
# Note: casts return immediately, so these tests utilise a special
# internal sync() cast to ensure prior casts are complete before
# making the necessary assertions.
def setUp(self):
super(CastTestCase, self).setUp()
if self.url.startswith("kafka://"):
self.skipTest("kafka does not support RPC API")
def test_specific_server(self):
group = self.useFixture(
utils.RpcServerGroupFixture(self.conf, self.url)
)
client = group.client(1, cast=True)
client.append(text='open')
client.append(text='stack')
client.add(increment=2)
client.add(increment=10)
time.sleep(0.3)
client.sync()
group.sync(1)
self.assertIn(group.servers[1].endpoint.sval,
["openstack", "stackopen"])
self.assertEqual(12, group.servers[1].endpoint.ival)
for i in [0, 2]:
self.assertEqual('', group.servers[i].endpoint.sval)
self.assertEqual(0, group.servers[i].endpoint.ival)
def test_server_in_group(self):
if self.url.startswith("amqp:"):
self.skipTest("QPID-6307")
group = self.useFixture(
utils.RpcServerGroupFixture(self.conf, self.url)
)
client = group.client(cast=True)
for i in range(20):
client.add(increment=1)
for i in range(len(group.servers)):
# expect each server to get a sync
client.sync()
group.sync(server="all")
total = 0
for s in group.servers:
ival = s.endpoint.ival
self.assertThat(ival, matchers.GreaterThan(0))
self.assertThat(ival, matchers.LessThan(20))
total += ival
self.assertEqual(20, total)
def test_fanout(self):
group = self.useFixture(
utils.RpcServerGroupFixture(self.conf, self.url)
)
client = group.client('all', cast=True)
client.append(text='open')
client.append(text='stack')
client.add(increment=2)
|
clarammdantas/Online-Jugde-Problems
|
online_judge_solutions/python.py
|
Python
|
mit
| 531
| 0.037665
|
klein = raw_input()
gross = raw_input()
zahl = []
if gross[0] == '0':
zahl.append((1,0))
else:
zahl.append((0,1))
for i in range(1,len(gross)):
if gross[i] == '0':
zahl.append((zahl[i - 1][0] + 1, zahl[i - 1][1]))
else:
zahl.append((zahl[i - 1][0], zahl[i - 1][1] + 1))
plus = 0
for i in range(l
|
en(klein)):
if klein[i] ==
|
'0':
plus += zahl[len(gross) - len(klein) + i][1]
if i > 0:
plus -= zahl[i - 1][1]
else:
plus += zahl[len(gross) - len(klein) + i][0]
if i > 0:
plus -= zahl[i - 1][0]
print plus
|
TheTimeTunnel/scout-camp
|
PROJECT/src/scoutcamp/database.py
|
Python
|
gpl-3.0
| 7,206
| 0.005291
|
# -*- encoding: utf-8 -*-
import yaml
import sys
import os
import json
import sqlite3
from .exceptions import *
class DataList(object):
__data_list = None
def __init__(self, _path="", _list_file=""):
self.__data_list = list()
self.path = _path
self.listfile = _list_file
self.extension = _list_file.split('.')[-1]
# Abre o arquivo da lista em YAML
try:
list_file = open(_path+_list_file,"r", encoding='utf-8')
except IOError:
DataBaseException("Não foi possível encontrar a lista \""+_path+_list_file+"\"!")
sys.exit(1)
# Passa o arquivo para uma dict
yml_list = yaml.load(list_file.read())
list_file.close()
# Tenta converter a dict para uma list
self.__set_data_list(yml_list)
# Se falhar, lança uma exceção do template
if type(self.__data_list) is not list:
DataBaseException("Erro ao ler a lista \""+_path+_list_file+"\"!")
def __set_data_list(self, temp_dict={}):
"""Método privado __set_data_list
- Percorre uma dict para forçar que seja usada somente a
- primeira posição e atribui o resultado ao atributo privado
- __template_list (list).
Argumentos:
- self (object): instância da própria classe
- temp_dict (dict): dict herdada do yaml (default=dict vazio)
Retorno:
- Sem retorno
"""
temp_list = []
# Percorre a dict para atribuir a uma list
for i in temp_dict:
temp_list.append(temp_dict[i])
list_ = temp_list[0]
if list_[0] == '*':
temp_list = os.listdir(self.path)
list_ = list()
for item in temp_list:
if item == self.listfile: continue
list_.append(item.repla
|
ce('.'+self.extension, ''))
# Atribui à nova lista somente a primeira posição da list
self.__data_list = list_
def get_data_list(self):
"""Método público get_data_list
- Retorna a lista dos templates disponíveis.
|
Argumentos:
- self (object): instância da própria classe
Retorno:
- self.__data_list (list): lista de templates
"""
return self.__data_list
class DataBase(object):
__id = None
__attributes = None
__relations = None
def __init__(self, path="", scout="", ext=".yml"):
try:
scout_file = open(path+scout+ext,'r', encoding='utf-8')
scout_dict = yaml.load(scout_file.read())
scout_file.close()
except IOError:
DataBaseException("Não foi possível encontrar o arquivo \""+path+scout+ext+"\"!")
sys.exit(1)
if "id" in scout_dict:
self.__id = scout_dict["id"]
# Previne instanciamento duplicado
self.__attributes = dict()
self.__relations = dict()
for key in scout_dict.keys():
if type(scout_dict[key]) is not list:
self.__attributes[key] = scout_dict[key]
else:
self.__relations[key] = scout_dict[key]
else:
DataBaseException("O atributo id é obrigatório e não foi definido para {}".format(scout))
sys.exit(1)
def get_attributes(self):
return self.__attributes
def get_id(self):
return self.__id
def get_relations(self):
return self.__relations
class JsonParser(object):
__default_out = None
__default_ext = None
__default_indent = None
def __init__ (self, _default_out, _default_ext=".json", _default_indent=8):
self.__default_out = _default_out
self.__default_ext = _default_ext
self.__default_indent = _default_indent
def to_json(self, _dict, _indent=None):
if _indent is None:
_indent = self.__default_indent
return json.dumps(_dict, sort_keys=True, indent=_indent)
def parse_list(self, _list):
main_string = "{"
for i in _list:
main_string += '\n\t"'+i.get_id()+'":'
main_dict = dict()
main_dict.update(i.get_attributes())
main_dict.update(i.get_relations())
main_string += self.to_json(main_dict)
main_string += ','
main_string = main_string[0:len(main_string)-1]
main_string += '\n}'
return main_string
def save(self, _json, _outfile, _alt_path=""):
output = self.__default_out + _alt_path + _outfile+self.__default_ext
json_output = open(output, "w", encoding='utf-8')
json_output.write(_json)
json_output.close()
def save_all(self, _list, _outfile, _alt_path=""):
self.save(self.parse_list(_list), _outfile, _alt_path)
class SQLiteExport(object):
connection = None
__tables = []
__inserts = []
def __init__(self, database=None):
# Previne instanciamento duplicado
self.__tables = list()
self.__inserts = list()
self.connection = sqlite3.connect(database)
self.cursor = self.connection.cursor()
def save(self):
self.connection.commit()
def close(self):
self.connection.close()
def new_column(self, name, c_type=None):
if not c_type:
c_type = "TEXT"
table = "`"+name+"` "+c_type+","
return table
def new_table(self, columns=None, table=None, relationship=None):
if relationship:
table_header = "CREATE TABLE \""+table+"_"+relationship+"_relationship\"("
table_header += "\n\t"+self.new_column(table+"_id")+"\n\t"+self.new_column(relationship+"_id")
else:
table_header = "CREATE TABLE \""+table+"\"("
for attr in range(len(columns)):
table_header += "\n\t"+self.new_column(columns[attr])
table_header = table_header[0:len(table_header)-1]+")"
self.__tables.append(table_header)
def new_insert(self, table, attributes, relationship=None):
if relationship:
insert_header = "INSERT INTO "+table+"_"+relationship+"_relationship ("+table+"_id, "+relationship+"_id)\n VALUES ("
insert_header += "'"+attributes[0]+"','"+attributes[1]+"',"
else:
insert_header = "INSERT INTO "+table+" ("
for key in attributes.keys():
insert_header += key+","
insert_header = insert_header[0:len(insert_header)-1]+")\n VALUES ("
for val in attributes.values():
val = "%s" % val
insert_header += "'%s'," % val
insert_header = insert_header[0:len(insert_header)-1]+")"
self.__inserts.append(insert_header)
def crate_tables(self):
for table in self.__tables:
print(table)
self.cursor.execute(table)
def insert_into(self):
for insert in self.__inserts:
print(insert)
self.cursor.execute(insert)
if __name__ == '__main__':
pass
|
ichi23de5/ichi_Repo
|
sale_order_line_edit/__openerp__.py
|
Python
|
gpl-3.0
| 342
| 0.04386
|
# -*- coding: utf-8 -*-
{
"name": "Sale Order Line Edit",
"summary": "Edit Sale Order For
|
m",
"version": "9.0.0.1.1",
"category": "Sales",
"website": "http://www.toyo-kiki.co.jp",
"author": "ichi",
"license": "AGPL-3",
"application": Fal
|
se,
"installable": True,
"depends": [
"sale",
],
"data": [
"views/sale_view.xml",
],
}
|
mailund/CoaSim
|
Python/customMarkerTest.py
|
Python
|
gpl-2.0
| 2,572
| 0.018663
|
#!/bin/env python
from CoaSim import *
cm = CustomMarker(0.1)
assert cm.position == 0.1
class MyMarker(CustomMarker):
def __init__(self,pos):
CustomMarker.__init__(self,pos)
def defaultValue(self):
return 1
def mutate(self, parentAllele, edgeLength):
return parentAllele+1
mm = MyMarker(0.5)
assert mm.position == 0.5
# regression test...
assert simulate([mm],5,seed=1).sequences == [[4], [5], [3], [5], [2]]
class NonMarker(object):
pass
try:
simulate([NonMarker()],2)
assert False
except TypeError, e:
assert str(e) == 'arg #1 contains a non-marker'
class Uninitialized(Marker): pass
try:
|
simulate([Uninitialized()],2)
assert False
except ValueError, e:
assert str(e) == 'arg #1 contains an un-initialized marker'
class Uninitialized(CustomMarker):
def __init__(self): pass
try:
simulate([Uninitialized()],2)
assert False
except ValueError, e:
assert str(e) == 'arg #1 contains an un-initialized marker'
class MissingDefaultValue(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
try:
simu
|
late([MissingDefaultValue()],2)
assert False
except AttributeError, e:
assert str(e) == 'defaultValue'
class IncorrectDefaultValue(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self, x):
return 3
try:
simulate([IncorrectDefaultValue()],2)
assert False
except TypeError, e:
pass
class IncorrectDefaultValue(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self):
return None
try:
simulate([IncorrectDefaultValue()],2)
assert False
except TypeError, e:
assert str(e) == 'defaultValue() must return an integer.'
class MissingMutate(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self): return 0
try:
simulate([MissingMutate()],2)
assert False
except AttributeError, e:
assert str(e) == 'mutate'
class IncorrectMutate(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self): return 0
def mutate(self): return 0
try:
simulate([IncorrectMutate()],2)
assert False
except TypeError, e:
pass
class IncorrectMutate(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self): return 0
def mutate(self,parentAllele,edgeLength): return ""
try:
simulate([IncorrectMutate()],2)
assert False
except TypeError, e:
pass
|
ChinaMassClouds/copenstack-server
|
openstack/src/nova-2014.2/nova/api/openstack/compute/plugins/v3/block_device_mapping_v1.py
|
Python
|
gpl-2.0
| 2,738
| 0
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the
|
License.
"""The legacy block device mappings extension."""
from webob import exc
from nova.api.openstack import e
|
xtensions
from nova import block_device
from nova import exception
from nova.i18n import _
from nova.openstack.common import strutils
ALIAS = "os-block-device-mapping-v1"
ATTRIBUTE_NAME = "block_device_mapping"
ATTRIBUTE_NAME_V2 = "block_device_mapping_v2"
class BlockDeviceMappingV1(extensions.V3APIExtensionBase):
"""Block device mapping boot support."""
name = "BlockDeviceMappingV1"
alias = ALIAS
version = 1
def get_resources(self):
return []
def get_controller_extensions(self):
return []
# use nova.api.extensions.server.extensions entry point to modify
# server create kwargs
# NOTE(gmann): This function is not supposed to use 'body_deprecated_param'
# parameter as this is placed to handle scheduler_hint extension for V2.1.
def server_create(self, server_dict, create_kwargs, body_deprecated_param):
block_device_mapping = server_dict.get(ATTRIBUTE_NAME, [])
block_device_mapping_v2 = server_dict.get(ATTRIBUTE_NAME_V2, [])
if block_device_mapping and block_device_mapping_v2:
expl = _('Using different block_device_mapping syntaxes '
'is not allowed in the same request.')
raise exc.HTTPBadRequest(explanation=expl)
for bdm in block_device_mapping:
try:
block_device.validate_device_name(bdm.get("device_name"))
block_device.validate_and_default_volume_size(bdm)
except exception.InvalidBDMFormat as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
if 'delete_on_termination' in bdm:
bdm['delete_on_termination'] = strutils.bool_from_string(
bdm['delete_on_termination'])
if block_device_mapping:
create_kwargs['block_device_mapping'] = block_device_mapping
# Sets the legacy_bdm flag if we got a legacy block device mapping.
create_kwargs['legacy_bdm'] = True
|
lorensen/VTKExamples
|
src/Python/Visualization/ViewFrog.py
|
Python
|
apache-2.0
| 5,808
| 0.001722
|
#!/usr/bin/env python
"""
"""
import vtk
def view_frog(fileName, tissues):
colors = vtk.vtkNamedColors()
tissueMap = CreateTissueMap()
colorLut = CreateFrogLut()
# Setup render window, renderer, and interactor.
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
for tissue in tissues:
actor = CreateFrogActor(fileName, tissueMap[tissue])
actor.GetProperty().SetDiffuseColor( colorLut.GetTableValue(tissueMap[tissue])[:3])
actor.GetProperty().SetSpecular(.5)
actor.GetProperty().SetSpecularPower(10)
renderer.AddActor(actor)
# print("Tissue:", tissue, ", Label:", tissueMap[tissue])
renderer.GetActiveCamera().SetViewUp(0, 0, -1)
renderer.GetActiveCamera().SetPosition(0, -1, 0)
renderer.GetActiveCamera().Azimuth(210)
renderer.GetActiveCamera().Elevation(30)
renderer.ResetCamera()
renderer.ResetCameraClippingRange()
renderer.GetActiveCamera().Dolly(1.5)
renderer.SetBackground(colors.GetColor3d("SlateGray"))
renderWindow.SetSize(640, 480)
renderWindow.Render()
renderWindowInt
|
eractor.Start()
def main():
fileName, tissues = get_program_parameters()
view_frog(fileName, tissues)
def get_program_parameters():
import argparse
description = 'The complete frog without skin.'
epilogue = '''
For Figure 12-9b in the VTK Book:
Specify these tissues as parameters after the file name:
blood brain duodenum eyeRetina eyeWhite heart ileum kidney intestine liver lung nerve skeleton spleen stomach
'''
|
parser = argparse.ArgumentParser(description=description, epilog=epilogue,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('filename', help='frogtissue.mhd.')
parser.add_argument('tissues', nargs='+', help='List of one or more tissues.')
args = parser.parse_args()
return args.filename, args.tissues
def CreateFrogLut():
colors = vtk.vtkNamedColors()
colorLut = vtk.vtkLookupTable()
colorLut.SetNumberOfColors(17)
colorLut.SetTableRange(0, 16)
colorLut.Build()
colorLut.SetTableValue(0, 0, 0, 0, 0)
colorLut.SetTableValue(1, colors.GetColor4d("salmon")) # blood
colorLut.SetTableValue(2, colors.GetColor4d("beige")) # brain
colorLut.SetTableValue(3, colors.GetColor4d("orange")) # duodenum
colorLut.SetTableValue(4, colors.GetColor4d("misty_rose")) # eye_retina
colorLut.SetTableValue(5, colors.GetColor4d("white")) # eye_white
colorLut.SetTableValue(6, colors.GetColor4d("tomato")) # heart
colorLut.SetTableValue(7, colors.GetColor4d("raspberry")) # ileum
colorLut.SetTableValue(8, colors.GetColor4d("banana")) # kidney
colorLut.SetTableValue(9, colors.GetColor4d("peru")) # l_intestine
colorLut.SetTableValue(10, colors.GetColor4d("pink")) # liver
colorLut.SetTableValue(11, colors.GetColor4d("powder_blue")) # lung
colorLut.SetTableValue(12, colors.GetColor4d("carrot")) # nerve
colorLut.SetTableValue(13, colors.GetColor4d("wheat")) # skeleton
colorLut.SetTableValue(14, colors.GetColor4d("violet")) # spleen
colorLut.SetTableValue(15, colors.GetColor4d("plum")) # stomach
return colorLut
def CreateTissueMap():
tissueMap = dict()
tissueMap["blood"] = 1
tissueMap["brain"] = 2
tissueMap["duodenum"] = 3
tissueMap["eyeRetina"] = 4
tissueMap["eyeWhite"] = 5
tissueMap["heart"] = 6
tissueMap["ileum"] = 7
tissueMap["kidney"] = 8
tissueMap["intestine"] = 9
tissueMap["liver"] = 10
tissueMap["lung"] = 11
tissueMap["nerve"] = 12
tissueMap["skeleton"] = 13
tissueMap["spleen"] = 14
tissueMap["stomach"] = 15
return tissueMap
def CreateFrogActor(fileName, tissue):
reader = vtk.vtkMetaImageReader()
reader.SetFileName(fileName)
reader.Update()
selectTissue = vtk.vtkImageThreshold()
selectTissue.ThresholdBetween(tissue, tissue)
selectTissue.SetInValue(255)
selectTissue.SetOutValue(0)
selectTissue.SetInputConnection(reader.GetOutputPort())
gaussianRadius = 1
gaussianStandardDeviation = 2.0
gaussian = vtk.vtkImageGaussianSmooth()
gaussian.SetStandardDeviations(gaussianStandardDeviation, gaussianStandardDeviation, gaussianStandardDeviation)
gaussian.SetRadiusFactors(gaussianRadius, gaussianRadius, gaussianRadius)
gaussian.SetInputConnection(selectTissue.GetOutputPort())
isoValue = 127.5
mcubes = vtk.vtkMarchingCubes()
mcubes.SetInputConnection(gaussian.GetOutputPort())
mcubes.ComputeScalarsOff()
mcubes.ComputeGradientsOff()
mcubes.ComputeNormalsOff()
mcubes.SetValue(0, isoValue)
smoothingIterations = 5
passBand = 0.001
featureAngle = 60.0
smoother = vtk.vtkWindowedSincPolyDataFilter()
smoother.SetInputConnection(mcubes.GetOutputPort())
smoother.SetNumberOfIterations(smoothingIterations)
smoother.BoundarySmoothingOff()
smoother.FeatureEdgeSmoothingOff()
smoother.SetFeatureAngle(featureAngle)
smoother.SetPassBand(passBand)
smoother.NonManifoldSmoothingOn()
smoother.NormalizeCoordinatesOn()
smoother.Update()
normals = vtk.vtkPolyDataNormals()
normals.SetInputConnection(smoother.GetOutputPort())
normals.SetFeatureAngle(featureAngle)
stripper = vtk.vtkStripper()
stripper.SetInputConnection(normals.GetOutputPort())
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(stripper.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
return actor
if __name__ == '__main__':
main()
|
nwjs/chromium.src
|
build/android/pylib/local/device/local_device_gtest_run_test.py
|
Python
|
bsd-3-clause
| 2,907
| 0.005504
|
#!/usr/bin/env vpython3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for local_device_gtest_test_run."""
# pylint: disable=protected-access
import os
import tempfile
import unittest
from pylib.gtest import gtest_test_instance
from pylib.local.device import local_device_environment
from pylib.local.device import local_device_gtest_run
from py_utils import tempfile_ext
import mock # pylint: disable=import-error
class LocalDeviceGtestRunTest(unittest.TestCase):
def setUp(self):
self._obj = local_device_gtest_run.LocalDeviceGtestRun(
mock.MagicMock(spec=local_device_environment.LocalDeviceEnvironment),
mock.MagicMock(spec=gtest_test_instance.GtestTestInstance))
def testExtractTestsFromFilter(self):
# Checks splitting by colons.
self.assertEqual([
'b17',
'm4e3',
'p51',
], local_device_gtest_run._ExtractTestsFromFilter('b17:m4e3:p51'))
# Checks the '-' sign.
self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('-mk2'))
# Checks the more than one asterick.
self.assertIsNone(
local_device_gtest_run._ExtractTestsFromFilter('.mk2*:.M67*'))
# Checks just an asterick without a period
self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('M67*'))
# Checks an asterick at the end with a period.
self.assertEqual(['.M67*'],
local_device_gtest_run._ExtractTestsFromFilter('.M67*'))
def testGetLLVMProfilePath(self):
path = local_device_gtest_run._GetLLVMProfilePath('test_dir', 'sr71', '5')
self.assertEqual(path, os.path.join('test_dir', 'sr71_5_%2m.profraw'))
@mock.patch('subprocess
|
.check_output')
def testMergeCoverageFiles(self, mock_sub):
with tempfile_ext.NamedTemporaryDirectory() as cov_tempd:
pro_tempd = os.path.join(cov_tempd, 'profraw')
os.mkdir(pro_tempd)
profdata = tempfile.NamedTemporaryFile(
dir=p
|
ro_tempd,
delete=False,
suffix=local_device_gtest_run._PROFRAW_FILE_EXTENSION)
local_device_gtest_run._MergeCoverageFiles(cov_tempd, pro_tempd)
# Merged file should be deleted.
self.assertFalse(os.path.exists(profdata.name))
self.assertTrue(mock_sub.called)
@mock.patch('pylib.utils.google_storage_helper.upload')
def testUploadTestArtifacts(self, mock_gsh):
link = self._obj._UploadTestArtifacts(mock.MagicMock(), None)
self.assertFalse(mock_gsh.called)
self.assertIsNone(link)
result = 'A/10/warthog/path'
mock_gsh.return_value = result
with tempfile_ext.NamedTemporaryFile() as temp_f:
link = self._obj._UploadTestArtifacts(mock.MagicMock(), temp_f)
self.assertTrue(mock_gsh.called)
self.assertEqual(result, link)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
ThreatConnect-Inc/tcex
|
tcex/api/tc/v2/threat_intelligence/mappings/group/group_types/campaign.py
|
Python
|
apache-2.0
| 1,625
| 0.004308
|
"""ThreatConnect TI Campaign"""
# standard
|
library
from typing import TYPE_CHECKING
# first-party
from tcex.api.tc.v2.threat_intelligence.mappings.group.group import Group
if TYPE_CHECKING:
# first-party
from tcex.api.tc.v2.threat_intelligence
|
.threat_intelligence import ThreatIntelligence
class Campaign(Group):
"""Unique API calls for Campaign API Endpoints
Args:
ti (ThreatIntelligence): An instance of the ThreatIntelligence Class.
name (str, kwargs): [Required for Create] The name for this Group.
owner (str, kwargs): The name for this Group. Default to default Org when not provided
first_seen (str, kwargs): The first seen datetime expression for this Group.
"""
def __init__(self, ti: 'ThreatIntelligence', **kwargs):
"""Initialize Class Properties."""
super().__init__(
ti, sub_type='Campaign', api_entity='campaign', api_branch='campaigns', **kwargs
)
def first_seen(self, first_seen):
"""Update the campaign with the new first_seen date.
Args:
first_seen (str): The first_seen date. Converted to %Y-%m-%dT%H:%M:%SZ date format
Returns:
requests.Response: The response from the API call.
"""
if not self.can_update():
self._handle_error(910, [self.type])
first_seen = self._utils.any_to_datetime(first_seen).strftime('%Y-%m-%dT%H:%M:%SZ')
self._data['firstSeen'] = first_seen
request = {'firstSeen': first_seen}
return self.tc_requests.update(self.api_type, self.api_branch, self.unique_id, request)
|
estnltk/suffix-lemmatizer
|
bootstrap.py
|
Python
|
gpl-2.0
| 7,459
| 0.00067
|
##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os
import shutil
import sys
import tempfile
from optparse import OptionParser
__version__ = '2015-07-01'
# See zc.buildout's changelog if this version is up to date.
tmpeggs = tempfile.mkdtemp(prefix='bootstrap-')
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --find-links to point to local resources, you can keep
this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("--version",
action="store_true", default=False,
help=("Return bootstrap.py version."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", "--config-file",
help=("Specify the path to the buildout configuration "
"file to be used."))
parser.add_option("-f", "--find-links",
help=("Specify a URL to search for buildout releases"))
parser.add_option("--allow-site-packages",
action="store_true", default=False,
help=("Let bootstrap.py use existing site packages"))
parser.add_option("--buildout-version",
help="Use a specific zc.buildout version")
parser.add_option("--setuptools-version",
help="Use a specific setuptools version")
parser.add_option("--setuptools-to-dir",
help=("Allow for re-use of existing directory of "
"setuptools versions"))
options, args = parser.parse_args()
if options.version:
print("bootstrap.py version %s" % __version__)
sys.exit(0)
######################################################################
# load/install setuptools
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
ez = {}
if os.path.exists('ez_setup.py'):
exec(open('ez_setup.py').read(), ez)
else:
exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez)
if not options.allow_site_packages:
# ez_setup imports site, which adds site packages
# this will remove them from the path to ensure that incompatible versions
# of setuptools are not in the path
import site
# inside a virtualenv, there is no 'getsitepackages'.
# We can't remove these reliably
if hasattr(site, 'getsitepackages'):
for sitepackage_path in site.getsitepackages():
# Strip all site-packages directories from sys.path that
# are not sys.prefix; this is because on Windows
# sys.prefix is a site-package directory.
if sitepackage_path != sys.prefix:
sys.path[:] = [x for x in sys.path
if sitepackage_path not in x]
setup_args = dict(to_dir=tmpeggs, download_delay=0)
if options.setuptools_version is not None:
setup_args['version'] = options.setuptools_version
if options.setuptools_to_dir is not None:
setup_args['to_dir'] = options.setuptools_to_dir
ez['use_setuptools'](**setup_args)
import setuptools
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
######################################################################
# Install buildout
ws = pkg_resources.working_set
setuptools_path = ws.find(
pkg_resources.Requirement.parse('setuptools')).location
# Fix sys.path here as easy_install.pth added before PYTHONPATH
cmd = [sys.executable, '-c',
'import sys; sys.path[0:0] = [%r]; ' % setuptools_path +
'from setuptools.command.easy_install import main; main()',
'-mZqNxd', tmpeggs]
find_links = os.environ.get(
'bootstrap-testing-find-links',
options.find_links or
('http://downloads.buildout.org/'
if options.accept_buildout_test_releases else None)
)
if find_links:
cmd.extend(['-f', find_links])
requirement = 'zc.buildout'
version = options.buildout_version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
try:
return not parsed_version.is_prerelease
except AttributeError:
# Older setuptools
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setuptools_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
|
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
import subprocess
if subprocess.call(cmd) != 0:
raise Exception(
"Failed to execute command:\n%s" % repr(cmd)[1:-1])
##################################################################
|
####
# Import and run buildout
ws.add_entry(tmpeggs)
ws.require(requirement)
import zc.buildout.buildout
if not [a for a in args if '=' not in a]:
args.append('bootstrap')
# if -c was provided, we push it back into args for buildout' main function
if options.config_file is not None:
args[0:0] = ['-c', options.config_file]
zc.buildout.buildout.main(args)
shutil.rmtree(tmpeggs)
|
reyiyo/eventoL
|
eventol/manager/models.py
|
Python
|
gpl-3.0
| 40,847
| 0.001689
|
# pylint: disable=arguments-differ
# pylint: disable=too-many-lines
import datetime
import itertools
import json
import logging
import re
from uuid import uuid4
from random import SystemRandom
from string import digits, ascii_lowercase, ascii_uppercase
from ckeditor.fields import RichTextField
from django.contrib import messages
from django.contrib.auth.models import User
from django.contrib.postgres.fields import JSONField
from django.core.exceptions import ValidationError
from django.db import models
from django.urls import reverse
from django.utils import timezone
from django.utils.formats import date_format
from django.utils.translation import ugettext_lazy as _, ugettext_noop as _noop
from forms_builder.forms.models import AbstractField, AbstractForm
from image_cropping import ImageCropField, ImageRatioField
from vote.models import VoteModel
from manager.utils.report import count_by
from manager.utils.slug import get_unique_slug
logger = logging.getLogger('eventol')
def validate_url(url):
if not re.match('^[a-zA-Z0-9-_]+$', url):
raise ValidationError(_('URL can only contain letters or numbers'))
def generate_ticket_code():
chars = digits + ascii_lowercase + ascii_uppercase
length = 21
return ''.join([SystemRandom().choice(chars) for _ in range(length)])
class EventManager(models.Manager):
def get_queryset(self):
today = timezone.localdate()
return super() \
.get_queryset() \
.annotate(attendees_count=models.Count('attendee', distinct=True)) \
.annotate(last_date=models.Max('eventdate__date')) \
.annotate(activity_proposal_is_open=models.Case(
models.When(models.Q(limit_proposal_date__gte=today), then=True),
default=False,
output_field=models.BooleanField()
)) \
.annotate(registration_is_open=models.Case(
models.When(models.Q(last_date__gte=today), then=True),
default=False,
output_field=models.BooleanField()
))
@staticmethod
def get_event_by_user(user, tag_slug=None):
if user.is_authenticated():
event_users = EventUser.objects.filter(user=user)
event_ids = [event_user.event.pk for event_user in list(event_users)]
queryset = Event.objects.filter(pk__in=event_ids)
if tag_slug:
queryset = queryset.filter(tags__slug=tag_slug)
else:
queryset = Event.objects.none()
return queryset
@staticmethod
def get_event_private_data():
events = []
for event in Event.objects.all():
organizers = Organizer.objects.filter(event_user__event=event)
users = map(lambda organizer: organizer.event_user.user, organizers)
full_names = [user.get_full_name() for user in users]
events.
|
append({
'organizers': ','.join(full_names),
'email': event.email,
'id': event.id
})
return events
class EventTag(models.Model):
"""A Event grouper"""
name = models.CharField(_('EventTag Name'), max_length=50, unique=True,
help_text=_("This name will be used as a slug"))
created_at = models.DateTimeField(_('Created At'), auto_now_add=True)
updated_at = models
|
.DateTimeField(_('Updated At'), auto_now=True)
background = models.ImageField(
null=True, blank=True,
help_text=_("A image to show in the background of"))
logo_header = models.ImageField(
null=True, blank=True,
help_text=_("This logo will be shown in the right corner of the page"))
logo_landing = models.ImageField(
null=True, blank=True,
help_text=_("Logo to show in the center of the page"))
message = models.TextField(max_length=280, null=True, blank=True,
help_text=_("A message to show in the center of the page"))
slug = models.SlugField(_('URL'), max_length=100,
help_text=_('For example: flisol-caba'), unique=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
"""
Override default save
it will add the slug field using slugify.
"""
if not self.slug:
self.slug = get_unique_slug(self, 'name', 'slug')
super().save(*args, **kwargs)
class CustomForm(AbstractForm):
def published(self, for_user=None):
return True
def __str__(self):
return self.title
class Meta:
ordering = ['title']
verbose_name = _('Custom Form')
verbose_name_plural = _('Custom Forms')
class CustomField(AbstractField):
form = models.ForeignKey(CustomForm, related_name='fields', on_delete=models.CASCADE)
order = models.IntegerField(_('Order'), null=False, blank=False)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
fields_after = self.form.fields.filter(order__gte=self.order)
fields_after.update(order=models.F("order") - 1)
super().delete(*args, **kwargs)
def __str__(self):
return '{0}: {1} ({2})'.format(self.form, self.label, self.slug)
class Meta:
ordering = ['form', 'order']
verbose_name = _('Custom Field')
verbose_name_plural = _('Custom Fields')
unique_together = ('form', 'slug',)
class Event(models.Model):
objects = EventManager()
created_at = models.DateTimeField(_('Created At'), auto_now_add=True)
updated_at = models.DateTimeField(_('Updated At'), auto_now=True)
name = models.CharField(_('Event Name'), max_length=50)
abstract = models.TextField(_('Abstract'), max_length=250,
help_text=_('Idea of the event \
(one or two sentences)'))
limit_proposal_date = models.DateField(_('Limit Proposals Date'),
help_text=_('Limit date to submit talk proposals'))
registration_closed = models.BooleanField(
default=False, help_text=_("set it to True to force the registration to be closed"))
tags = models.ManyToManyField(
EventTag, blank=True, help_text=_("Select tags to show this event in the EventTag landing"))
event_slug = models.SlugField(_('URL'), max_length=100,
help_text=_('For example: flisol-caba'), unique=True)
customForm = models.ForeignKey(CustomForm, verbose_name=_noop('Custom form'),
blank=True, null=True)
cname = models.CharField(_('CNAME'), max_length=50, blank=True, null=True,
help_text=_('For example: flisol-caba'),
validators=[validate_url])
registration_code = models.UUIDField(
default=uuid4,
editable=False,
unique=True,
verbose_name=_('code'),
help_text=_('Code validator for in-place event self-registration'),
)
external_url = models.URLField(_('External URL'), blank=True, null=True, default=None,
help_text=_('http://www.my-awesome-event.com'))
email = models.EmailField(verbose_name=_('Email'))
event_information = RichTextField(verbose_name=_('Event Info'),
help_text=_('Event Info HTML'),
blank=True, null=True)
schedule_confirmed = models.BooleanField(_('Schedule Confirmed'), default=False)
use_installations = models.BooleanField(_('Use Installations'), default=True)
use_installers = models.BooleanField(_('Use Installers'), default=True)
use_collaborators = models.BooleanField(_('Use Collaborators'), default=True)
use_proposals = models.BooleanField(_('Use Proposals'), default=True)
use_talks = models.BooleanField(_('Use Talks'), default=True)
is_flisol = models.BooleanField(_('Is FLISoL'), default=False)
use_schedule = models.BooleanField(_('Use Schedule'), default=True)
place = models.TextF
|
mitsuhiko/sentry
|
src/sentry/web/frontend/error_page_embed.py
|
Python
|
bsd-3-clause
| 5,673
| 0.001234
|
from __future__ import absolute_import
from django import forms
from django.db import IntegrityError, transaction
from django.http import HttpResponse
from django.views.generic import View
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_exempt
from sentry.models import (
EventMapping, Group, ProjectKey, ProjectOption, UserReport
)
from sentry.web.helpers import render_to_response
from sentry.utils import json
from sentry.utils.http import is_valid_origin
from sentry.utils.validators import is_event_id
class UserReportForm(forms.ModelForm):
name = forms.CharField(max_length=128, widget=forms.TextInput(attrs={
'placeholder': 'Jane Doe',
}))
email = forms.EmailField(max_length=75, widget=forms.TextInput(attrs={
'placeholder': 'jane@example.com',
'type': 'email',
}))
comments = forms.CharField(widget=forms.Textarea(attrs={
'placeholder': "I clicked on 'X' and then hit 'Confirm'",
}))
class Meta:
model = UserReport
fields = ('name', 'email', 'comments')
class ErrorPageEmbedView(View):
def _get_project_key(self, request):
try:
dsn = request.GET['dsn']
except KeyError:
return
try:
key = ProjectKey.from_dsn(dsn)
except ProjectKey.DoesNotExist:
return
return key
def _get_origin(self, request):
return request.META.get('HTTP_ORIGIN', request.META.get('HTTP_REFERER'))
def _json_response(self, request, context=None, status=200):
if context:
content = json.dumps(context)
else:
content = ''
response = HttpResponse(content, status=status, content_type='application/json')
response['Access-Control-Allow-Origin'] = request.META.get('HTTP_ORIGIN', '')
response['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS'
response['Access-Control-Max-Age'] = '1000'
response['Access-Control-Allow-Headers'] = 'Content-Type, Authorization, X-Requested-With'
return response
@csrf_exempt
def dispatch(self, request):
try:
event_id = request.GET['eventId']
except KeyError:
return self._json_response(request, status=400)
if not is_event_id(event_id):
return self._json_response(request, status=400)
key = self._get_project_key(request)
if not key:
return self._json_response(request, status=404)
origin = self._get_origin(request)
if not origin:
return self._json_response(request, status=403)
if not is_valid_origin(origin, key.project):
return HttpResponse(status=403)
if request.method == 'OPTIONS':
return self._json_response(request)
# TODO(dcramer): since we cant use a csrf cookie we should at the very
# least sign the request / add some kind of nonce
initial = {
'
|
name': request.GET.get('name'),
'email': request.GET.get('email'),
}
form = UserReportForm(request.POST if request.method == 'POST' else None,
initial=initial)
if form.is_valid():
|
# TODO(dcramer): move this to post to the internal API
report = form.save(commit=False)
report.project = key.project
report.event_id = event_id
try:
mapping = EventMapping.objects.get(
event_id=report.event_id,
project_id=key.project_id,
)
except EventMapping.DoesNotExist:
# XXX(dcramer): the system should fill this in later
pass
else:
report.group = Group.objects.get(id=mapping.group_id)
try:
with transaction.atomic():
report.save()
except IntegrityError:
# There was a duplicate, so just overwrite the existing
# row with the new one. The only way this ever happens is
# if someone is messing around with the API, or doing
# something wrong with the SDK, but this behavior is
# more reasonable than just hard erroring and is more
# expected.
UserReport.objects.filter(
project=report.project,
event_id=report.event_id,
).update(
name=report.name,
email=report.email,
comments=report.comments,
date_added=timezone.now(),
)
return self._json_response(request)
elif request.method == 'POST':
return self._json_response(request, {
"errors": dict(form.errors),
}, status=400)
show_branding = ProjectOption.objects.get_value(
project=key.project,
key='feedback:branding',
default='1'
) == '1'
template = render_to_string('sentry/error-page-embed.html', {
'form': form,
'show_branding': show_branding,
})
context = {
'endpoint': mark_safe('*/' + json.dumps(request.build_absolute_uri()) + ';/*'),
'template': mark_safe('*/' + json.dumps(template) + ';/*'),
}
return render_to_response('sentry/error-page-embed.js', context, request,
content_type='text/javascript')
|
srcc-msu/job_statistics
|
modules/job_analyzer/controllers.py
|
Python
|
mit
| 1,969
| 0.028441
|
from functools import partial
import time
from typing import List
from flask import Blueprint, Response, render_template, current_app, request
from core.job.helpers import expand_nodelist
from core.job.models import Job
from application.helpers import requires_auth
from core.monitoring.models import SENSOR_CLASS_MAP
from modules.job_table.helpers import get_color
job_analyzer_pages = Blueprint('job_analyzer', __name__
, template_folder='templates/', static_folder='static')
def assign_job_class(data: dict) -> str:
try:
if int(data["stats"]["cpu"]["avg"]) < 10 and float(data["stats"]["la"]["avg"]) < 0.9:
return "hanged"
else:
return "ok"
except TypeError:
return "no_data"
def get_running_stats(interval: int) -> List[dict]:
jobs = Job.query.filter(Job.state.in_(current_app.app_config.cluster["ACTIVE_JOB_STATES"])).all()
results = []
timestamp = int(time.time())
offset = current_app.app_config.monitoring["aggregation_interval"]
for job in jobs:
data = {
"stats" : {"cpu": {"avg": -1}, "la": {"avg": -1}}
, "job" : job.to_dict() # TODO: ???
}
results.append(data)
if timestamp - job.t_start < interval:
data["class"] = "recent"
continue
if timestamp > job.t_end < interval:
data["class"] = "outdated"
continue
data["stats"]["cpu"] = SENSOR_CLASS_MAP["cpu_user"].get_stats(job.expanded_nodelist, timestamp - interval + offset, timestamp)
data["stats"]["la"] = SENSOR_CLASS_MAP["loadavg"].get_stats(job.expanded_nodelist, timestamp - interval + offset, timestamp)
data["class"] = assign_job_class(data)
return results
@job_analyzer_pages.route("/running")
@requires_auth
def running_stats() -> Response:
interval = int(request.args.get("interval", 60*60)) # last hour by default
return render_template("running.html"
, stats=get_running_stats(interval)
, app_config=current_app.app_config
|
, get_color=parti
|
al(get_color, thresholds=current_app.app_config.monitoring["thresholds"]))
|
srinivasanmit/all-in-all
|
puzzles/isprime.py
|
Python
|
gpl-3.0
| 548
| 0.018248
|
'''
Check for primality for both decimal inputs and binary inputs
'''
lst = [2, 4, 6, 7, 9, 13, 17, 99, 127, 139]
print lst
prime = []
def is_composite(n) :
for i in range(2, n/2 + 1) :
if n % i == 0 :
return True
return False
for n in lst :
if is_composite(n)
|
:
continue
else :
prime.append(n)
print prime
print "Enter number to check for Primality : "
no = raw_input()
if not is_composite(int(no, 2)):
print "Entered number is prime"
else
|
:
print "Entered number is composite"
|
SurfasJones/djcmsrc3
|
venv/lib/python2.7/site-packages/cms/tests/extensions.py
|
Python
|
mit
| 13,596
| 0.003898
|
from django.contrib.auth.models import Permission
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from cms.api import create_page
from cms.constants import PUBLISHER_STATE_DIRTY
from cms.models import Page
from cms.test_utils.project.extensionapp.models import MyPageExtension, MyTitleExtension
from cms.test_utils.testcases import SettingsOverrideTestCase as TestCase
from cms.extensions import extension_pool
from cms.extensions import TitleExtension
from cms.extensions import PageExtension
from cms.tests import AdminTestsBase
from cms.compat import get_user_model
class ExtensionsTestCase(TestCase):
def test_register_extension(self):
initial_extension_count = len(extension_pool.page_extensions)
# --- None extension registering -----------------------------
from cms.exceptions import SubClassNeededError
none_extension = self.get_none_extension_class()
self.assertRaises(SubClassNeededError, extension_pool.register, none_extension)
self.assertEqual(len(extension_pool.page_extensions), initial_extension_count)
self.assertEqual(len(extension_pool.title_extensions), initial_extension_count)
# --- Page registering ---------------------------------------
page_extension = self.get_page_extension_class()
# register first time
extension_pool.register(page_extension)
self.assertEqual(len(extension_pool.page_extensions), initial_extension_count+1)
# register second time
extension_pool.register(page_extension)
self.assertEqual(len(extension_pool.page_extensions), initial_extension_count+1)
self.assertIs(extension_pool.signaling_activated, True)
# --- Title registering --------------------------------------
title_extension = self.get_title_extension_class()
# register first time
extension_pool.register(title_extension)
self.assertEqual(len(extension_pool.title_extensions), initial_extension_count+1)
# register second time
extension_pool.register(title_extension)
self.assertEqual(len(extension_pool.title_extensions), initial_extension_count+1)
self.assertIs(extension_pool.signaling_activated, True)
# --- Unregister ---------------------------------------------
extension_pool.unregister(page_extension)
self.assertEqual(len(extension_pool.page_extensions), initial_extension_count)
extension_pool.unregister(title_extension)
self.assertEqual(len(extension_pool.title_extensions), initial_extension_count)
# Unregister an object that is not registered yet
extension_pool.unregister(page_extension)
extension_pool.unregister(title_extension)
def get_page_extension_class(self):
from django.db import models
class TestPageExtension(PageExtension):
content = models.CharField('Content', max_length=50)
return TestPageExtension
def get_title_extension_class(self):
from django.db import models
class TestTitleExtension(TitleExtension):
content = models.CharField('Content', max_length=50)
return TestTitleExtension
def get_none_extension_class(self):
class TestNoneExtension(object):
pass
return TestNoneExtension
def test_publish_page_extension(self):
page = create_page('Test Page Extension', "nav_playground.html", "en")
page_extension = MyPageExtension(extended_object=page, extra='page extension 1')
page_extension.save()
page.mypageextension = page_extension
# publish first time
page.publish('en')
self.assertEqual(page_extension.extra, page.publisher_public.mypageextension.extra)
self.assertEqual(page.get_publisher_state('en'), 0)
# change and publish again
page = Page.objects.get(pk=page.pk)
page_extension = page.mypageextension
page_extension.extra = 'page extension 1 - changed'
page_extension.save()
self.assertEqual(page.get_publisher_state('en', True), PUBLISHER_STATE_DIRTY)
page.publish('en')
self.assertEqual(page.get_publisher_state('en', True), 0)
# delete
page_extension.delete()
self.assertFalse(MyPageExtension.objects.filter(pk=page_extension.pk).exists())
self.assertEqual(page.get_publisher_state('en', True), PUBLISHER_STATE_DIRTY)
def test_publish_title_extension(self):
page = create_page('Test Title Extension', "nav_playground.html", "en")
title = page.get_title_obj()
title_extension = MyTitleExtension(extended_object=title, extra_title='title extension 1')
title_extension.save()
page.mytitleextension = title_extension
# publish first time
page.publish('en')
# import ipdb; ipdb.set_trace()
self.assertEqual(page.get_publisher_state('en'), 0)
self.assertEqual(title_extension.extra_title, page.publisher_public.get_title_obj().mytitleextension.extra_title)
# change and publish again
page = Page.objects.get(pk=page.pk)
title = page.get_title_obj()
title_extension = title.mytitleextension
title_extension.extra_title = 'title extension 1 - changed'
title_extension.save()
self.assertEqual(page.get_publisher_state('en', True), PUBLISHER_STATE_DIRTY)
page.publish('en')
self.assertEqual(page.get_publisher_state('en', True), 0)
# delete
title_extension.delete()
self.assertFalse(MyTitleExtension.objects.filter(pk=title_extension.pk).exists())
class ExtensionAdminTestCase(AdminTestsBase):
def setUp(self):
User = get_user_model()
self.admin, self.normal_guy = self._get_guys()
if get_user_model().USERNAME_FIELD == 'email':
self.no_page_permission_user = User.objects.create_user('no_page_permission', 'test2@test.com', 'test2@test.com')
else:
self.no_page_permission_user = User.objects.create_user('no_page_permission', 'test2@test.com', 'no_page_permission')
self.no_page_permission_user.is_staff = True
self.no_page_permission_user.is_active = True
self.no_page_permission_user.save()
[self.no_page_permission_user.user_permissions.add(p) for p in Permission.objects.filter(
codename__in=[
'change_mypageextension', 'change_mytitleextension',
'add_mypageextension', 'add_mytitleextension',
'
|
delete_mypageextension', 'delete_mytitleextension',
]
)]
self.site = Site.objects.get(pk=1)
self.page = create_page(
'My Extension Page', 'nav_playground.html', 'en',
|
site=self.site, created_by=self.admin)
self.page_title = self.page.get_title_obj()
self.page_extension = MyPageExtension.objects.create(
extended_object=self.page,
extra="page extension text")
self.title_extension = MyTitleExtension.objects.create(
extended_object=self.page.get_title_obj(),
extra_title="title extension text")
self.page_without_extension = create_page(
'A Page', 'nav_playground.html', 'en',
site=self.site, created_by=self.admin)
self.page_title_without_extension = self.page_without_extension.get_title_obj()
def test_admin_page_extension(self):
with self.login_user_context(self.admin):
# add a new extension
response = self.client.get(
reverse('admin:extensionapp_mypageextension_add') + '?extended_object=%s' % self.page_without_extension.pk
)
self.assertEqual(response.status_code, 200)
# make sure there is no extension yet
self.assertFalse(MyPageExtension.objects.filter(extended_object=self.page_without_extension).exists())
post_data = {
'extra': 'my extra'
}
response = self.client.post(
reverse('admin:extensionapp_mypageextens
|
sebastianlach/zerows
|
zerows/__init__.py
|
Python
|
mit
| 2,638
| 0.002658
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
usage: zerows [-h]
"""
__author__ = "Sebastian Łach"
__copyright__ = "Copyright 2015, Sebastian Łach"
__credits__ = ["Sebastian Łach", ]
__license__ = "MIT"
__version__ = "1.0"
__maintainer__ = "Sebastian Łach"
__email__ = "root@slach.eu"
from json import loads
from zmq import Context as ZMQContext, REQ
from zmq.eventloop.zmqstream import ZMQStream
from zmq.eventloop.ioloop import install as zmq_ioloop_install
zmq_ioloop_install()
import tornado
import tornado.web
import tornado.websocket
from tornado.log import app_log
from tornado.options import define, parse_command_line, options
from tornado.web import Application
from tornado.ioloop import IOLoop
# define application options
define('port', type=int, default=8080, help='application port number')
define('router', type=str, default='tcp://localhost:5559', help='router url')
ERROR_INVALID_REQUEST = b'{"error": "invalid request"}'
def load_message(message):
try:
return loads(message)
except ValueError as e:
app_log.debug(e)
return Non
|
e
class ZeroMQHandler(tornado.websocket.WebSocketHandler):
def __init__(self, *args, **kwargs):
super(ZeroMQHandler, self).__init__(*args, **kwargs)
self.socket = None
self.stream = None
def open(self):
settings = self.application.settings
self.socket = settings['zeromq']['context'].socket(REQ)
self.socket.connect(settings['z
|
eromq']['url'])
self.stream = ZMQStream(self.socket, settings['ioloop'])
self.stream.on_recv(self.on_dispatch)
def on_message(self, message):
request = load_message(message)
if request:
data = message.encode('utf8')
self.stream.send(data)
else:
self.write_message(ERROR_INVALID_REQUEST)
def on_dispatch(self, messages):
for message in messages:
data = message.encode('utf8')
self.write_message(data)
def on_close(self):
self.stream.close()
self.socket.close()
def check_origin(self, origin):
return True
def data_received(self, chunk):
pass
def main():
"""Main entry-point"""
parse_command_line()
application = Application(
[
(r'/', ZeroMQHandler),
],
ioloop=IOLoop.current(),
zeromq=dict(
context=ZMQContext(),
url=options.router,
)
)
app_log.info(application.settings)
application.listen(options.port)
application.settings['ioloop'].start()
if __name__ == '__main__':
main()
|
IS-ENES-Data/esgf-pid
|
tests/testcases/rabbit/asyn/rabbit_asynchronous_tests.py
|
Python
|
apache-2.0
| 18,090
| 0.006578
|
import unittest
import mock
import logging
import datetime
import time
import esgfpid.ra
|
bbit.asynch
|
ronous
from esgfpid.rabbit.asynchronous.exceptions import OperationNotAllowed
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.NullHandler())
# Test resources:
from resources.TESTVALUES import *
import resources.TESTVALUES as TESTHELPERS
import globalvar
if globalvar.QUICK_ONLY:
print('Skipping slow tests in module "%s".' % __name__)
class RabbitAsynConnectorTestCase(unittest.TestCase):
slow_message = '\nRunning a slow test (avoid by using -ls flag).'
def setUp(self):
LOGGER.info('######## Next test (%s) ##########', __name__)
def tearDown(self):
LOGGER.info('#############################')
def assert_messages_are_in_queue(self, msg_queue, list_of_messages):
queue_content = []
while not msg_queue.empty():
queue_content.append(msg_queue.get(False))
for msg in list_of_messages:
self.assertIn(msg, queue_content)
#
# Init
#
'''
Test whether instances of rabbitconnector and thread
are created.
'''
def test_init_ok(self):
# Test variables:
nodemanager = TESTHELPERS.get_nodemanager()
# Run code to be tested:
testrabbit = esgfpid.rabbit.asynchronous.AsynchronousRabbitConnector(nodemanager)
# Check result:
self.assertIsInstance(testrabbit, esgfpid.rabbit.asynchronous.AsynchronousRabbitConnector, 'Constructor fail.')
self.assertTrue(testrabbit._AsynchronousRabbitConnector__not_started_yet)
thread = testrabbit._AsynchronousRabbitConnector__thread
self.assertIsInstance(thread, esgfpid.rabbit.asynchronous.rabbitthread.RabbitThread, 'Constructor fail.')
#
# Start thread
#
'''
Test whether the start method does the necessary things.
We expect it to change the state, and to run the thread.
'''
@unittest.skipIf(globalvar.QUICK_ONLY, '(this test is slow)')
def test_start_thread_ok(self):
print(self.slow_message)
# Preparation:
nodemanager = TESTHELPERS.get_nodemanager()
testrabbit = esgfpid.rabbit.asynchronous.AsynchronousRabbitConnector(nodemanager)
# Mock the builder (so it cannot start a connection):
def side_effect_first_connection():
LOGGER.debug('Pretending to do something in the thread.run()')
time.sleep(0.5)
LOGGER.debug('Finished pretending to do something in the thread.run()')
buildermock = mock.MagicMock()
buildermock.first_connection = mock.MagicMock()
buildermock.first_connection.side_effect = side_effect_first_connection
testrabbit._AsynchronousRabbitConnector__thread._RabbitThread__builder = buildermock
# Check preconditions:
self.assertTrue(testrabbit._AsynchronousRabbitConnector__not_started_yet)
self.assertTrue(testrabbit._AsynchronousRabbitConnector__statemachine.is_NOT_STARTED_YET())
# Run code to be tested:
# This runs the thread, which triggers building a connection.
# In this test, it calls the side effect defined above, which blocks for
# a second. Afterwards, the thread should be finished.
testrabbit.start_rabbit_thread()
# Check results:
# Check if thread is alive:
self.assertTrue(testrabbit._AsynchronousRabbitConnector__thread.is_alive())
# Join the thread...
print("Joining...")
testrabbit._AsynchronousRabbitConnector__thread.join()
print("Joining done...")
# Check if the thread has ended:
self.assertFalse(testrabbit._AsynchronousRabbitConnector__thread.is_alive())
# Check state:
self.assertFalse(testrabbit._AsynchronousRabbitConnector__not_started_yet)
self.assertTrue(testrabbit._AsynchronousRabbitConnector__statemachine.is_WAITING_TO_BE_AVAILABLE())
# Check if run was called:
buildermock.first_connection.assert_called()
#
# Sending messages
#
'''
Test behaviour when we try sending messages but the
thread was not started yet.
It should raise an exception.
'''
def test_send_message_not_started_yet(self):
# Preparation:
nodemanager = TESTHELPERS.get_nodemanager()
testrabbit = esgfpid.rabbit.asynchronous.AsynchronousRabbitConnector(nodemanager)
self.assertTrue(testrabbit._AsynchronousRabbitConnector__not_started_yet)
# Run code to be tested:
with self.assertRaises(OperationNotAllowed):
testrabbit.send_message_to_queue('message-foo')
with self.assertRaises(OperationNotAllowed):
testrabbit.send_many_messages_to_queue(['a','b','c'])
'''
Test behaviour when we try sending messages but the
thread was not started yet.
It should raise an exception.
'''
def test_send_message_not_started_yet_2(self):
# Preparation:
nodemanager = TESTHELPERS.get_nodemanager()
testrabbit = esgfpid.rabbit.asynchronous.AsynchronousRabbitConnector(nodemanager)
testrabbit._AsynchronousRabbitConnector__not_started_yet = False
self.assertFalse(testrabbit._AsynchronousRabbitConnector__not_started_yet)
self.assertTrue(testrabbit._AsynchronousRabbitConnector__statemachine.is_NOT_STARTED_YET())
# Run code to be tested:
with self.assertRaises(OperationNotAllowed):
testrabbit.send_message_to_queue('message-foo')
with self.assertRaises(OperationNotAllowed):
testrabbit.send_many_messages_to_queue(['a','b','c'])
'''
Test behaviour when we send messages when the thread
was properly started.
We expect the message to be put into the queue.
We expect the publish event to be handed by the connection
to the feeder module.
'''
def test_send_message_ok(self):
# Preparations
nodemanager = TESTHELPERS.get_nodemanager()
testrabbit = esgfpid.rabbit.asynchronous.AsynchronousRabbitConnector(nodemanager)
testrabbit._AsynchronousRabbitConnector__statemachine.set_to_available()
testrabbit._AsynchronousRabbitConnector__not_started_yet = False
# Mock the connection (it has to hand the event over to the feeder mock):
connectionmock = testrabbit._AsynchronousRabbitConnector__thread._connection = TESTHELPERS.get_connection_mock()
# Mock the feeder (it has to receive the publish event):
feedermock = testrabbit._AsynchronousRabbitConnector__thread._RabbitThread__feeder = mock.MagicMock()
# Run code to be tested:
testrabbit.send_message_to_queue('foo')
testrabbit.send_many_messages_to_queue(['a','b','c'])
# Check that publish was called:
feedermock.publish_message.assert_called()
self.assertTrue(feedermock.publish_message.call_count>=4)
# Check that the four messages were put into the queue:
msg_queue = testrabbit._AsynchronousRabbitConnector__unpublished_messages_queue
self.assert_messages_are_in_queue(msg_queue, ['foo', 'a', 'b', 'c'])
def test_send_message_waiting(self):
# Preparations
nodemanager = TESTHELPERS.get_nodemanager()
testrabbit = esgfpid.rabbit.asynchronous.AsynchronousRabbitConnector(nodemanager)
testrabbit._AsynchronousRabbitConnector__statemachine.set_to_waiting_to_be_available()
testrabbit._AsynchronousRabbitConnector__not_started_yet = False
# Mock the connection (it has to hand the event over to the feeder mock):
connectionmock = testrabbit._AsynchronousRabbitConnector__thread._connection = TESTHELPERS.get_connection_mock()
# Mock the feeder (it has to receive the publish event):
feedermock = testrabbit._AsynchronousRabbitConnector__thread._RabbitThread__feeder = mock.MagicMock()
# Run code to be tested:
testrabbit.send_many_messages_to_queue(['a','b','c'])
testrabbit.send_message_to_queue('foo')
# Check that publish was NOT called:
feedermock.publish_message.assert_not_calle
|
barche/k3d
|
share/k3d/scripts/MeshSourceScript/cubes.py
|
Python
|
gpl-2.0
| 771
| 0.001297
|
#python
import k3d
k3d.check_node_environment(context, "MeshSourceScript")
# Construct a cube mesh primitive ...
cubes = context.output.primitives().create("cube")
matrices = cubes.topology().create("matrices", "k3d::matrix4")
materials = cubes.topology().create("materials", "k3d::imaterial*")
uniform = cubes.attributes().create("uniform")
color =
|
uniform.create("Cs", "k3d::color")
# Add three cubes ...
matrices.append(k3d.translate3(k3d.vector3(-7, 0, 0)))
materials.append(None)
color.append(k3d.color(1, 0, 0))
matrices.append(k3d.translate3(k3d.vector3(0, 0, 0)))
materials.append(None)
color.append(k3d.color(0, 1, 0))
mat
|
rices.append(k3d.translate3(k3d.vector3(7, 0, 0)))
materials.append(None)
color.append(k3d.color(0, 0, 1))
print repr(context.output)
|
bhermansyah/DRR-datacenter
|
avatar/tests.py
|
Python
|
gpl-3.0
| 5,578
| 0.004661
|
import os.path
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.conf import settings
from django.contrib.auth import get_user_model
from avatar.settings import AVATAR_DEFAULT_URL, AVATAR_MAX_AVATARS_PER_USER
from avatar.util import get_primary_avatar
from avatar.models import Avatar
try:
from PIL import Image
dir(Image) # Placate PyFlakes
except ImportError:
import Image
def upload_helper(o, filename):
f = open(os.path.join(o.testdatapath, filename), "rb")
response = o.client.post(reverse('avatar_add'), {
'avatar': f,
}, follow=True)
f.close()
return response
class AvatarUploadTests(TestCase):
def setUp(self):
self.testdatapath = os.path.join(os.path.dirname(__file__), "testdata")
self.user = get_user_model().objects.create_user('test', 'lennon@thebeatles.com', 'testpassword')
self.user.save()
self.client.login(username='test', password='testpassword')
Image.init()
def testNonImageUpload(self):
response = upload_helper(self, "nonimagefile")
self.failUnlessEqual(response.status_code, 200)
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
def testNormalImageUpload(self):
response = upload_helper(self, "test.png")
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual(len(response.redirect_chain), 1)
self.failUnlessEqual(response.context['upload_avatar_form'].errors, {})
avatar = get_primary_avatar(self.user)
self.failIfEqual(avatar, None)
def testImageWithoutExtension(self):
# use with AVATAR_ALLOWED_FILE_EXTS = ('.jpg', '.png')
response = upload_helper(self, "imagefilewithoutext")
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
def testImageWithWrongExtension(self):
# use with AVATAR_ALLOWED_FILE_EXTS = ('.jpg', '.png')
response = upload_helper(self, "imagefilewithwrongext.ogg")
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
def testImageTooBig(self):
# use with AVATAR_MAX_SIZE = 1024 * 1024
response = upload_helper(self, "testbig.png")
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
def testDefaultUrl(self):
response = self.client.get(reverse('avatar_render_primary', kwargs={
'user': self.user.username,
'size': 80,
}))
loc = response['Location']
base_url = getattr(settings, 'STATIC_URL', None)
if not base_url:
base_url = settings.MEDIA_URL
self.assertTrue(base_url in loc)
self.assertTrue(loc.endswith(AVATAR_DEFAULT_URL))
def testNonExistingUser(self):
a = get_primary_avatar("nonexistinguser")
self.failUnlessEqual(a, None)
def testThereCanBeOnlyOnePrimaryAvatar(self):
for i in range(1, 10):
self.testNormalImageUpload()
count = Avatar.objects.filter(user=self.user, primary=True).count()
self.failUnlessEqual(count, 1)
def testDeleteAvatar(self):
self.testNormalImageUpload()
avatar = Avatar.objects.filter(user=self.user)
self.failUnlessEqual(len(avatar), 1)
response = self.client.post(reverse('avatar_delete'), {
'choices': [avatar[0].id],
}, follow=True)
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual(len(response.redirect_chain), 1)
count = Avatar.objects.filter(user=self.user).count()
self.failUnlessEqual(count, 0)
def testDeletePrimaryAvatarAndNewPrimary(self):
self.testThereCanBeOnlyOnePrimaryAvatar()
primary = get_primary_avatar(self.user)
oid = primary.id
response = self.client.post(reverse('avatar_delete'), {
'choices': [oid],
})
|
primaries = Avatar.objects.filter(user=self.user, primary=True)
self.failUnlessEqual(len(primaries), 1)
self.failIf
|
Equal(oid, primaries[0].id)
avatars = Avatar.objects.filter(user=self.user)
self.failUnlessEqual(avatars[0].id, primaries[0].id)
def testTooManyAvatars(self):
for i in range(0, AVATAR_MAX_AVATARS_PER_USER):
self.testNormalImageUpload()
count_before = Avatar.objects.filter(user=self.user).count()
response = upload_helper(self, "test.png")
count_after = Avatar.objects.filter(user=self.user).count()
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
self.failUnlessEqual(count_before, count_after)
# def testAvatarOrder
# def testReplaceAvatarWhenMaxIsOne
# def testHashFileName
# def testHashUserName
# def testChangePrimaryAvatar
# def testDeleteThumbnailAndRecreation
# def testAutomaticThumbnailCreation
|
adamhaney/airflow
|
airflow/contrib/operators/jenkins_job_trigger_operator.py
|
Python
|
apache-2.0
| 11,480
| 0.001394
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
import socket
import json
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.contrib.hooks.jenkins_hook import JenkinsHook
import jenkins
from jenkins import JenkinsException
from six.moves.urllib.request import Request, urlopen
from six.moves.urllib.error import HTTPError, URLError
try:
basestring
except NameError:
basestring = str # For python3 compatibility
# TODO Use jenkins_urlopen instead when it will be available
# in the stable python-jenkins version (> 0.4.15)
def jenkins_request_with_headers(jenkins_server, req, add_crumb=True):
"""
We need to get the headers in addition to the body answer
to get the location from them
This function is just a copy of the one present in python-jenkins library
with just the return call changed
:param jenkins_server: The server to query
:param req: The request to execute
:param add_crumb: Boolean to indicate if it should add crumb to the request
:return:
"""
try:
if jenkins_server.auth:
req.add_header('Authorization', jenkins_server.auth)
if add_crumb:
jenkins_server.maybe_ad
|
d_crumb(req)
response = urlopen(req, timeout=jenkins_server.timeout)
response_body = response.read()
response_headers = response.info()
if response_body is None:
raise jenkins.EmptyResponseException(
"Error communicating with server[%s]: "
"empty response" % jenkins_server
|
.server)
return {'body': response_body.decode('utf-8'), 'headers': response_headers}
except HTTPError as e:
# Jenkins's funky authentication means its nigh impossible to
# distinguish errors.
if e.code in [401, 403, 500]:
# six.moves.urllib.error.HTTPError provides a 'reason'
# attribute for all python version except for ver 2.6
# Falling back to HTTPError.msg since it contains the
# same info as reason
raise JenkinsException(
'Error in request. ' +
'Possibly authentication failed [%s]: %s' % (
e.code, e.msg)
)
elif e.code == 404:
raise jenkins.NotFoundException('Requested item could not be found')
else:
raise
except socket.timeout as e:
raise jenkins.TimeoutException('Error in request: %s' % e)
except URLError as e:
# python 2.6 compatibility to ensure same exception raised
# since URLError wraps a socket timeout on python 2.6.
if str(e.reason) == "timed out":
raise jenkins.TimeoutException('Error in request: %s' % e.reason)
raise JenkinsException('Error in request: %s' % e.reason)
class JenkinsJobTriggerOperator(BaseOperator):
"""
Trigger a Jenkins Job and monitor it's execution.
This operator depend on python-jenkins library,
version >= 0.4.15 to communicate with jenkins server.
You'll also need to configure a Jenkins connection in the connections screen.
:param jenkins_connection_id: The jenkins connection to use for this job
:type jenkins_connection_id: str
:param job_name: The name of the job to trigger
:type job_name: str
:param parameters: The parameters block to provide to jenkins. (templated)
:type parameters: str
:param sleep_time: How long will the operator sleep between each status
request for the job (min 1, default 10)
:type sleep_time: int
:param max_try_before_job_appears: The maximum number of requests to make
while waiting for the job to appears on jenkins server (default 10)
:type max_try_before_job_appears: int
"""
template_fields = ('parameters',)
template_ext = ('.json',)
ui_color = '#f9ec86'
@apply_defaults
def __init__(self,
jenkins_connection_id,
job_name,
parameters="",
sleep_time=10,
max_try_before_job_appears=10,
*args,
**kwargs):
super(JenkinsJobTriggerOperator, self).__init__(*args, **kwargs)
self.job_name = job_name
self.parameters = parameters
if sleep_time < 1:
sleep_time = 1
self.sleep_time = sleep_time
self.jenkins_connection_id = jenkins_connection_id
self.max_try_before_job_appears = max_try_before_job_appears
def build_job(self, jenkins_server):
"""
This function makes an API call to Jenkins to trigger a build for 'job_name'
It returned a dict with 2 keys : body and headers.
headers contains also a dict-like object which can be queried to get
the location to poll in the queue.
:param jenkins_server: The jenkins server where the job should be triggered
:return: Dict containing the response body (key body)
and the headers coming along (headers)
"""
# Warning if the parameter is too long, the URL can be longer than
# the maximum allowed size
if self.parameters and isinstance(self.parameters, basestring):
import ast
self.parameters = ast.literal_eval(self.parameters)
if not self.parameters:
# We need a None to call the non parametrized jenkins api end point
self.parameters = None
request = Request(jenkins_server.build_job_url(self.job_name,
self.parameters, None), b'')
return jenkins_request_with_headers(jenkins_server, request)
def poll_job_in_queue(self, location, jenkins_server):
"""
This method poll the jenkins queue until the job is executed.
When we trigger a job through an API call,
the job is first put in the queue without having a build number assigned.
Thus we have to wait the job exit the queue to know its build number.
To do so, we have to add /api/json (or /api/xml) to the location
returned by the build_job call and poll this file.
When a 'executable' block appears in the json, it means the job execution started
and the field 'number' then contains the build number.
:param location: Location to poll, returned in the header of the build_job call
:param jenkins_server: The jenkins server to poll
:return: The build_number corresponding to the triggered job
"""
try_count = 0
location = location + '/api/json'
# TODO Use get_queue_info instead
# once it will be available in python-jenkins (v > 0.4.15)
self.log.info('Polling jenkins queue at the url %s', location)
while try_count < self.max_try_before_job_appears:
location_answer = jenkins_request_with_headers(jenkins_server,
Request(location))
if location_answer is not None:
json_response = json.loads(location_answer['body'])
if 'executable' in json_response:
build_number = json_response['executable']['number']
self.log.info('Job executed on Jenkins side with the build number %s',
|
dminca/python-sandbox
|
udp_client.py
|
Python
|
gpl-3.0
| 285
| 0.003509
|
#
|
!/usr/bin/env python2
# SIMPLE UDP CLI
|
ENT
import socket
host = '127.0.0.1'
port = 80
# create socket obj
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# send data
client.sendto("AAABBBCCC", (host, port))
# receive data
data, addr = client.recvfrom(4096)
print data
|
kvar/ansible
|
lib/ansible/utils/unsafe_proxy.py
|
Python
|
gpl-3.0
| 4,891
| 0.001022
|
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
# --------------------------------------------
#
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
# otherwise using this software ("Python") in source or binary form and
# its associated documentation.
#
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
# analyze, test, perform and/or display publicly, prepare derivative works,
# distribute, and otherwise use Python alone or in any derivative version,
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
# retained in Python alone or in any derivative version prepared by Licensee.
#
# 3. In the event Licensee prepares a derivative work that is based on
# or incorporates Python or any part thereof, and wants to make
# the derivative work available to others as provided herein, then
# Licensee hereby agrees to include in any such work a brief summary of
# the changes made to Python.
#
# 4. PSF is making Python available to Licensee on an "AS IS"
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
# INFRINGE ANY THIRD PARTY RIGHTS.
#
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
#
# 6. This License Agreement will automatically terminate upon a material
# breach of its terms and conditions.
#
# 7. Nothing in this License Agreement shall be deemed to create any
# relationship of agency, partnership, or joint venture between PSF and
# Licensee. This License Agreement does not grant permission to use PSF
# trademarks or trade name in a trademark sense to endorse or promote
# products or services of Licensee, or any third party.
#
# 8. By copying, installing or otherwise using Python, Licensee
# agrees to be bound by the terms and conditions of this License
# Agreement.
#
# Original Python Recipe for Proxy:
# http://code.activestate.com/recipes/496741-object-proxying/
# Author: Tomer Filiba
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.common._collections_compat import Mapping, MutableSequence, Set
from ansible.module_utils.six import string_types, binary_type, text_type
__all__ = ['AnsibleUnsafe', 'wrap_var']
class AnsibleUnsafe(object):
__UNSAFE__ = True
class AnsibleUnsafeBytes(binary_type, AnsibleUnsafe):
pass
class AnsibleUnsafeText(text_type, AnsibleUnsafe):
pass
class UnsafeProxy(object):
def __new__(cls, obj, *args, **kwargs):
from ansible.utils.display import Display
Display().deprecated(
'UnsafeProxy is being deprecated. Use wrap_var or AnsibleUnsafeBytes/AnsibleUnsafeText directly instead',
version='2.13'
)
# In our usage we should only receive unicode strings.
# This conditional and conversion exists to sanity check the values
# we're given but we may want to take it out for testing and sanitize
# our input instead.
if isinstance(obj, AnsibleUnsafe):
return obj
if isinstance(obj, string_types):
obj = AnsibleUnsafeText(to_text(obj, errors='su
|
rrogate_or_strict'))
return obj
def _wrap_dict(v):
for k in v.keys():
if v[k] is not None:
v[wrap_var(k)] = wrap_var(v[k])
return v
d
|
ef _wrap_list(v):
for idx, item in enumerate(v):
if item is not None:
v[idx] = wrap_var(item)
return v
def _wrap_set(v):
return set(item if item is None else wrap_var(item) for item in v)
def wrap_var(v):
if v is None or isinstance(v, AnsibleUnsafe):
return v
if isinstance(v, Mapping):
v = _wrap_dict(v)
elif isinstance(v, MutableSequence):
v = _wrap_list(v)
elif isinstance(v, Set):
v = _wrap_set(v)
elif isinstance(v, binary_type):
v = AnsibleUnsafeBytes(v)
elif isinstance(v, text_type):
v = AnsibleUnsafeText(v)
return v
def to_unsafe_bytes(*args, **kwargs):
return wrap_var(to_bytes(*args, **kwargs))
def to_unsafe_text(*args, **kwargs):
return wrap_var(to_text(*args, **kwargs))
|
canvasnetworks/canvas
|
website/settings_drawquest.py
|
Python
|
bsd-3-clause
| 8,754
| 0.001714
|
from settings import *
PROJECT = 'drawquest'
CANVAS_SUB_SITE = '/admin/'
if PRODUCTION:
DOMAIN = "example.com"
SELF_PORT = 9000
SELF = 'localhost:9000'
UGC_HOST = 'i.canvasugc.com'
FACEBOOK_APP_ACCESS_TOKEN = "REDACTED"
FACEBOOK_APP_ID = "REDACTED"
FACEBOOK_APP_SECRET = "REDACTED"
FACEBOOK_NAMESPACE = "REDACTED"
URBANAIRSHIP_APP_KEY = "REDACTED"
URBANAIRSHIP_APP_SECRET = "REDACTED"
URBANAIRSHIP_APP_MASTER_SECRET = "REDACTED"
else:
DOMAIN = "dq.savnac.com"
# We're port forwarding 80 -> 9000
SELF_PORT = 80
SELF = 'localhost'
UGC_HOST = 'ugc.savnac.com'
FACEBOOK_APP_ACCESS_TOKEN = "REDACTED"
FACEBOOK_APP_ID = "REDACTED"
FACEBOOK_APP_SECRET = "REDACTED"
FACEBOOK_NAMESPACE = "REDACTED"
URBANAIRSHIP_APP_KEY = "REDACTED"
URBANAIRSHIP_APP_SECRET = "REDACTED"
URBANAIRSHIP_APP_MASTER_SECRET = "REDACTED"
# To get to the mysql shell:
# mysql -h <hostname> -u drawquest -p<press enter><paste pw from below>
# Useful commands:
# See long-running transactions:
# SHOW ENGINE INNODB STATUS;
if PRODUCTION:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'drawquest',
'USER': 'drawquest',
'PASSWORD': 'E78Sg38TNNmP',
'HOST': 'drawquestdb.ccop1gmd625s.us-east-1.rds.amazonaws.com',
'PORT': '3306',
}
}
elif TESTING_USE_MYSQL:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'drawquest',
'USER': 'root',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '',
'OPTIONS': {
# http:
|
//stackoverflow.com/questions/11853141/foo-objects-getid-none-returns-foo-instance-sometimes
'init_command': 'SET SQL_AUTO_IS_NULL=0;',
},
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql
|
', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'drawquest/db.sqlite', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
MIDDLEWARE_CLASSES = (
'drawquest.middleware.PingMiddleware',
'drawquest.middleware.DrawquestShimMiddleware',
'canvas.middleware.ExceptionLogger',
'canvas.middleware.HandleLoadBalancerHeaders',
'canvas.middleware.DeferredWorkMiddleware',
#TODO remove
'drawquest.middleware.Log403',
'django.middleware.common.CommonMiddleware',
'canvas.middleware.UploadifyIsALittleBitchMiddleware',
'drawquest.apps.drawquest_auth.middleware.SessionHeaderMiddleware',
'canvas.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'apps.canvas_auth.middleware.AnonymousUserMiddleware',
#TODO 'canvas.middleware.RedirectToHttpsMiddleware',
'canvas.experiments.ForceExperimentMiddleware',
'canvas.middleware.FacebookMiddleware',
'canvas.middleware.ImpersonateMiddleware',
'canvas.middleware.RequestSetupMiddleware',
'drawquest.middleware.InactiveUserMiddleware',
'drawquest.middleware.StaffOnlyMiddleware',
'canvas.middleware.StaffOnlyMiddleware',
'canvas.middleware.IPHistoryMiddleware',
'canvas.middleware.GlobalExperimentMiddleware',
'canvas.middleware.HttpRedirectExceptionMiddleware',
'canvas.middleware.Django403Middleware',
'canvas.middleware.HttpExceptionMiddleware',
'canvas.middleware.TimeDilationMiddleware',
'apps.share_tracking.middleware.TrackShareViewsMiddleware',
'apps.share_tracking.middleware.TrackClickthroughMiddleware',
#'django.contrib.messages.middleware.MessageMiddleware',
#'debug_toolbar.middleware.DebugToolbarMiddleware',
'canvas.middleware.ResponseGuard',
)
AUTHENTICATION_BACKENDS = (
'drawquest.apps.drawquest_auth.backends.DrawquestAuthBackend',
)
TEMPLATE_CONTEXT_PROCESSORS = DJANGO_DEFAULT_CONTEXT_PROCESSORS + (
'django.core.context_processors.request',
'canvas.context_processors.base_context',
'apps.features.context_processors.features_context',
)
ROOT_URLCONF = 'drawquest.urls'
REDIS_HOST = Config['drawquest_redis_host']
# Avoid colliding with example.com redis DBs in testrunner and locally.
if not TESTING:
REDIS_DB_MAIN = 10
REDIS_DB_CACHE = 11
SESSION_REDIS_DB = 12
else:
REDIS_DB_MAIN = 13
REDIS_DB_CACHE = 14
SESSION_REDIS_DB = 15
MEMCACHE_HOSTS = Config['drawquest_memcache_hosts']
if PRODUCTION:
CACHE_BACKEND = 'memcached://{}'.format(';'.join(Config['drawquest_memcache_hosts']))
else:
CACHE_BACKEND = 'locmem://?max_entries=1000'
# Bump this to wipe out all caches which use cachecow.
CACHE_KEY_PREFIX = 'DQv6'
FACT_HOST = Config['drawquest_fact_host']
FACT_BUCKET = 'drawquest-facts'
IMAGE_FS = Config['drawquest_image_fs']
HTTPS_ENABLED = True
UGC_HTTPS_ENABLED = False
API_PROTOCOL = 'https' if HTTPS_ENABLED else 'http'
API_PREFIX = API_PROTOCOL + '://example.com/api/'
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'drawquest', 'templates'),
os.path.join(PROJECT_PATH, 'templates'),
)
INSTALLED_APPS = (
'apps.monkey_patch_django',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sitemaps',
'django.contrib.sites',
'django.contrib.humanize',
'django.contrib.messages',
'south',
'compressor',
'debug_toolbar',
'django_bcrypt',
'apps.activity',
'apps.analytics',
'apps.canvas_auth',
'apps.comments',
'apps.features',
'apps.feed',
'apps.ip_blocking',
'apps.jinja_adapter',
'apps.post_thread',
'apps.share_tracking',
'apps.signup',
'apps.user_settings',
'apps.threads',
'drawquest.apps.api_console',
'drawquest.apps.comment_freeze',
'drawquest.apps.drawquest_auth',
'drawquest.apps.following',
'drawquest.apps.iap',
'drawquest.apps.palettes',
'drawquest.apps.playback',
'drawquest.apps.push_notifications',
'drawquest.apps.quest_comments',
'drawquest.apps.quests',
'drawquest.apps.stars',
'drawquest.apps.submit_quest',
'drawquest.apps.timeline',
'drawquest.apps.tumblr',
'drawquest.apps.whitelisting',
'canvas',
'drawquest',
)
if PRODUCTION:
INSTALLED_APPS += ('sentry.client',)
else:
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = ['--exclude=compressor', '-d']
INSTALLED_APPS += ('apps.sentry_debug',)
SENTRY_CLIENT = 'sentry.client.base.DummyClient'
MINIFY_HTML = False
# We're going to stop using django-compressor if we ever launch any public-facing pages in DrawQuest.
COMPRESS_OFFLINE = False
logging.basicConfig(
level=(logging.DEBUG if PRODUCTION else logging.DEBUG),
format='%(asctime)s - %(levelname)s - %(message)s',
filename=os.path.join(PROJECT_PATH, "drawquest/run/gunicorn.log"),
filemode='a',
)
DKIM_SELECTOR = "amazonses"
DKIM_DOMAIN = "example.com"
DKIM_PRIVATE_KEY_PATH = "/etc/canvas/dkim.private.key"
DKIM_PRIVATE_KEY = open(DKIM_PRIVATE_KEY_PATH).read() if os.path.exists(DKIM_PRIVATE_KEY_PATH) else None
# For now, because the password reset template in Django 1.2 is dumb and doesn't take a from_email
DEFAULT_FROM_EMAIL = "passwordreset@example.com"
UPDATES_EMAIL = "DrawQuest <updates@example.com>"
INCLUDE_ERROR_TYPE_IN_API = True
STAR_STICKER_TYPE_ID = 9001
ACTIVITY_TYPE_CLASSES = (
'apps.activity.redis_models.FollowedByUserActivity',
'drawquest.activities.StarredActivity',
'drawquest.activities.PlaybackActivity',
'drawquest.activities.FolloweePostedActivity',
'drawquest.activities.Welc
|
Informatik-AG-KGN-2016/Dokumente
|
2016-11-28/aufgabe-addierer.py
|
Python
|
gpl-3.0
| 587
| 0.001712
|
# Addierer mit += 1
# Eingaben erhalten
a = input("Dies ist ein Addierer!\nGeben Sie a ein: ")
b = input("Geben Sie b ein: ")
# Zeichenketten in Zahlen umwandeln
a = int(a)
b = int(b)
# neue Variable verwenden, Eingaben nicht verändern
result = a
|
i = 0
if b > 0: # wenn b größer Null
while i < b: # dann Schleife positiv durchlaufen
result += 1
i += 1
elif b < 0: # wenn b kleiner Null
while i > b:
|
# dann Schleife negativ durchlaufen
result -= 1
i -= 1
print("\nDas Ergebnis ist: " + str(result))
|
maier/repo-mirror-mgr
|
Mirror/__init__.py
|
Python
|
mit
| 52
| 0
|
from Config im
|
port Config
from Distro import Distr
|
o
|
wlashell/lyrical_page
|
site_tracking/admin.py
|
Python
|
apache-2.0
| 493
| 0.014199
|
from django.contrib.admin import site, ModelA
|
dmin
from site_tracking.models import VerificationCode, TrackingCode
class VerificationCodeAdmin(ModelAdmin):
list_display = ('site', 'verification_type', 'code',)
list_editable = ('code',)
site.register(VerificationCode, VerificationCodeAdmin)
class TrackingCodeAdmin(ModelAdmin):
list_display = ('site', 'tracking_type', 'code')
list_editable = ('tracking_type', 'co
|
de')
site.register(TrackingCode, TrackingCodeAdmin)
|
anistark/opiniohll
|
OpinioDelivery/constants.py
|
Python
|
mit
| 522
| 0.001916
|
# All
|
the constants
# Get keys
import config
# All the Base Endpoints
STAGING_URL_HOST = 'test.deliver.opinioapp.com'
PRODUCTION_URL_HOST = 'deliver.opinioapp.com'
ACCESS_KEY = config.ACCESS_KEY
SECRET_KEY = config.SECRET_KEY
# Api Endpoints
API_BASE_ENDPOINT = '/api'
API_VERSION = 'v1'
API_ENDPOINT = API_BASE_ENDPOINT + '/' + API_VERSION + '/orders'
API_MERCHANT_ENDPOINT = API_BASE_ENDPOINT + '/' + API_VERSION + '/merchants'
API_SERVICEABILITY_E
|
NDPOINT = API_BASE_ENDPOINT + '/' + API_VERSION + '/serviceability'
|
erinspace/osf.io
|
addons/base/utils.py
|
Python
|
apache-2.0
| 2,446
| 0.006132
|
import markupsafe
from os.path import basename
from website import settings
def serialize_addon_config(config, user):
lookup = config.template_lookup
user_addon = user.get_addon(config.short_name)
ret = {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
'is_enabled': user_addon is not None,
'addon_icon_url': config.icon_url,
}
ret.update(user_addon.to_json(user) if user_addon else {})
return ret
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
return [serialize_addon_config(addon_config, user) for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower())]
def format_last_known_metadata(auth, node, file, error_type):
msg = """
</div>"""
|
# None is default
if error_type != 'FILE_SUSPENDED' and ((auth.user and node.is_contributor(auth.user)) or (auth.private_key and auth.private_key in node.private_link_keys_active)):
last_meta = file.last_known_metadata
last_seen = last_meta.get('last_seen', None)
hashes = last_meta.get('hashes', None)
path = last_meta.get('path', None)
size = last_meta.get('size', None)
parts = [
"""</br>This file was """ if last_seen or hashes or path or siz
|
e else '',
"""last seen on {} UTC """.format(last_seen.strftime('%c')) if last_seen else '',
"""and found at path {} """.format(markupsafe.escape(path)) if last_seen and path else '',
"""last found at path {} """.format(markupsafe.escape(path)) if not last_seen and path else '',
"""with a file size of {} bytes""".format(size) if size and (last_seen or path) else '',
"""last seen with a file size of {} bytes""".format(size) if size and not (last_seen or path) else '',
""".</br></br>""" if last_seen or hashes or path or size else '',
"""Hashes of last seen version:</br><p>{}</p>""".format(
'</br>'.join(['{}: {}'.format(k, v) for k, v in hashes.items()])
) if hashes else '', # TODO: Format better for UI
msg
]
return ''.join(parts)
return msg
|
ktok07b6/polyphony
|
tests/if/if23.py
|
Python
|
mit
| 439
| 0
|
from polyphony import testbench
class C:
def __init__(self):
self.v1 = 0
self.v2 = 0
def set_v(self, i, v):
if i == 0:
pass
elif i == 1:
self.v1 = v
elif i == 2:
self.v2 = v
else:
return
def if23():
c = C()
c.set_v(1, 10)
c.set_v(2, 20)
|
return c.v1 + c.v2
@testbench
def test():
assert 30 ==
|
if23()
test()
|
vrga/pyFanController
|
pyfc/common.py
|
Python
|
mit
| 4,243
| 0.001414
|
import logging
from abc import ABCMeta, abstractmethod
from collections import deque
from typing import List, Union, Iterable, Sequence
log = logging.getLogger(__name__)
class NoSensorsFoundException(RuntimeError):
pass
class Controller(metaclass=ABCMeta):
@abstractmethod
def run(self):
raise NotImplementedError
@abstractmethod
def enable(self):
raise NotImplementedError
@abstractmethod
def disable(self):
raise NotImplementedError
@abstractmethod
def valid(self) -> bool:
raise NotImplementedError
class InputDevice(metaclass=ABCMeta):
"""
Abstract class for input devices.
"""
def __init__(self, name):
self.name = name
self.values = ValueBuffer(name, 128)
@abstractmethod
def get_value(self) -> float:
raise NotImplementedError
class OutputDevice(metaclass=ABCMeta):
"""
Abstract class for output devices.
"""
def __init__(self, name):
self.name = name
self.values = ValueBuffer(name, 128)
def set_value(self, value: Union[int, float]):
self.values.update(value)
@abstractmethod
def apply(self):
raise NotImplement
|
edError
@abstractmethod
def enable(self):
raise NotImplementedError
@abstractmethod
def disable(self):
raise NotImplementedError
class PassthroughController(Controller):
def __init__(self, inputs=Sequence[InputDevice], outputs=Sequence[OutputDevice], speeds=None):
self.inputs = list(inputs)
self.outputs = list(outputs)
def run(self):
fo
|
r idx, input_reader in enumerate(self.inputs):
output = self.outputs[idx]
output.name = input_reader.name
output.values.name = input_reader.name
output.set_value(input_reader.get_value())
output.apply()
log.debug('ran loop')
def apply_candidates(self):
return self.outputs
def enable(self):
for output_dev in self.outputs:
output_dev.enable()
def disable(self):
for output_dev in self.outputs:
output_dev.disable()
def valid(self) -> bool:
return bool(self.inputs and self.outputs) and len(self.inputs) == len(self.outputs)
class DummyInput(InputDevice):
def __init__(self):
super().__init__('dummy')
self.temp = 0
def get_value(self):
return self.temp
def set_value(self, value):
self.temp = value
class DummyOutput(OutputDevice):
def __init__(self):
super().__init__('dummy')
self.speed = None
self.enabled = False
def apply(self):
if self.enabled:
self.speed = round(self.values.mean())
def enable(self):
self.enabled = True
def disable(self):
self.enabled = False
def mean(seq: Iterable) -> float:
if not isinstance(seq, Iterable):
raise ValueError('provided sequence MUST be iterable')
if not isinstance(seq, Sequence):
seq = list(seq)
if len(seq) == 1:
return float(seq[0])
if len(seq) == 0:
raise ValueError('sequence must have at least one value.')
return sum(seq) / len(seq)
def lerp(value: Union[float, int], input_min: Union[float, int], input_max: Union[float, int], output_min: Union[float, int], output_max: Union[float, int]) -> float:
if value <= input_min:
return float(output_min)
if value >= input_max:
return float(output_max)
return (output_min * (input_max - value) + output_max * (value - input_min)) / (input_max - input_min)
def lerp_range(seq: Iterable[Union[float, int]], input_min, input_max, output_min, output_max) -> List[float]:
return [lerp(val, input_min, input_max, output_min, output_max) for val in seq]
class ValueBuffer:
def __init__(self, name, default_value=0.0):
self.name = name
self.buffer = deque(maxlen=32)
self._default_value = default_value
def update(self, value: float):
self.buffer.append(value)
def mean(self) -> float:
try:
return mean(self.buffer)
except (ValueError, ZeroDivisionError):
return self._default_value
|
beetbox/beets
|
beets/dbcore/query.py
|
Python
|
mit
| 29,107
| 0
|
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""The Query type hierarchy for DBCore.
"""
import re
from operator import mul
from beets import util
from datetime import datetime, timedelta
import unicodedata
from functools import reduce
class ParsingError(ValueError):
"""Abstract class for any unparseable user-requested album/query
specification.
"""
class InvalidQueryError(ParsingError):
"""Represent any kind of invalid query.
The query should be a unicode string or a list, which will be space-joined.
"""
def __init__(self, query, explanation):
if isinstance(query, list):
query = " ".join(query)
message = f"'{query}': {explanation}"
super().__init__(message)
class InvalidQueryArgumentValueError(ParsingError):
"""Represent a query argument that could not be converted as expected.
It exists to be caught in upper stack levels so a meaningful (i.e. with the
query) InvalidQueryError can be raised.
"""
def __init__(self, what, expected, detail=None):
message = f"'{what}' is not {expected}"
if detail:
message = f"{message}: {detail}"
super().__init__(message)
class Query:
"""An abstract class representing a query into the item database.
"""
def clause(self):
"""Generate an SQLite expression implementing the query.
Return (clause, subvals) where clause is a valid sqlite
WHERE clause implementing the query and subvals is a list of
items to be substituted for ?s in the clause.
"""
return None, ()
def match(self, item):
"""Check whether this query matches a given Item. Can be used to
perform queries on arbitrary sets of Items.
"""
raise NotImplementedError
def __repr__(self):
return f"{self.__class__.__name__}()"
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return 0
class FieldQuery(Query):
"""An abstract query that searches in a specific field for a
pattern. Subclasses must provide a `value_match` class method, which
determines whether a certain pattern string matches a certain value
string. Subclasses may also provide `col_clause` to implement the
same matching functionality in SQLite.
"""
def __init__(self, field, pattern, fast=True):
self.field = field
self.pattern = pattern
self.fast = fast
def col_clause(self):
return None, ()
def clause(self):
if self.fast:
return self.col_clause()
else:
# Matching a flexattr. This is a slow query.
return None, ()
@classmethod
def value_match(cls, pattern, value):
"""Determine whether the value matches the pattern. Both
arguments are strings.
"""
raise NotImplementedError()
def match(self, item):
return self.value_match(self.pattern, item.get(self.field))
def __repr__(self):
return ("{0.__class__.__name__}({0.field!r}, {0.pattern!r}, "
"{0.fast})".format(self))
def __eq__(self, other):
return super().__eq__(other) and \
self.field == other.field and self.pattern == other.pattern
def __hash__(self):
return hash((self.field, hash(self.pattern)))
class MatchQuery(FieldQuery):
"""A query that looks for exact matches in an item field."""
def col_clause(self):
return self.field + " = ?", [self.pattern]
@classmethod
def value_match(cls, pattern, value):
return pattern == value
class NoneQuery(FieldQuery):
"""A query that checks whether a field is
|
null."""
def __init__(self, field, fast=True):
super().__init__(field, None, fast)
def col_clause(self):
return self.field + " IS NULL", ()
def match(self, item):
return item.get(self.field) is None
def __repr__(self):
return "{0.__class__.__name__}({0.field!r}, {0.fast})".format(self)
class StringFieldQuery(FieldQuery):
"""A FieldQuery that converts values to strings before matching
them.
"""
@classmethod
def value_m
|
atch(cls, pattern, value):
"""Determine whether the value matches the pattern. The value
may have any type.
"""
return cls.string_match(pattern, util.as_string(value))
@classmethod
def string_match(cls, pattern, value):
"""Determine whether the value matches the pattern. Both
arguments are strings. Subclasses implement this method.
"""
raise NotImplementedError()
class StringQuery(StringFieldQuery):
"""A query that matches a whole string in a specific item field."""
def col_clause(self):
search = (self.pattern
.replace('\\', '\\\\')
.replace('%', '\\%')
.replace('_', '\\_'))
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals
@classmethod
def string_match(cls, pattern, value):
return pattern.lower() == value.lower()
class SubstringQuery(StringFieldQuery):
"""A query that matches a substring in a specific item field."""
def col_clause(self):
pattern = (self.pattern
.replace('\\', '\\\\')
.replace('%', '\\%')
.replace('_', '\\_'))
search = '%' + pattern + '%'
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals
@classmethod
def string_match(cls, pattern, value):
return pattern.lower() in value.lower()
class RegexpQuery(StringFieldQuery):
"""A query that matches a regular expression in a specific item
field.
Raises InvalidQueryError when the pattern is not a valid regular
expression.
"""
def __init__(self, field, pattern, fast=True):
super().__init__(field, pattern, fast)
pattern = self._normalize(pattern)
try:
self.pattern = re.compile(self.pattern)
except re.error as exc:
# Invalid regular expression.
raise InvalidQueryArgumentValueError(pattern,
"a regular expression",
format(exc))
@staticmethod
def _normalize(s):
"""Normalize a Unicode string's representation (used on both
patterns and matched values).
"""
return unicodedata.normalize('NFC', s)
@classmethod
def string_match(cls, pattern, value):
return pattern.search(cls._normalize(value)) is not None
class BooleanQuery(MatchQuery):
"""Matches a boolean field. Pattern should either be a boolean or a
string reflecting a boolean.
"""
def __init__(self, field, pattern, fast=True):
super().__init__(field, pattern, fast)
if isinstance(pattern, str):
self.pattern = util.str2bool(pattern)
self.pattern = int(self.pattern)
class BytesQuery(MatchQuery):
"""Match a raw bytes field (i.e., a path). This is a necessary hack
to work around the `sqlite3` module's desire to treat `bytes` and
`unicode` equivalently in Python 2. Always use this query instead of
`MatchQuery` when matching on BLOB values.
"""
def __init__(self, field, pattern):
super().__init__(field, pattern)
# Use a buffer/memoryview representation of the pattern for SQLite
# matc
|
anubhav929/eden
|
modules/eden/doc.py
|
Python
|
mit
| 14,543
| 0.010314
|
# -*- coding: utf-8 -*-
""" Sahana Eden Document Library
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3DocumentLibrary",
"doc_image_represent"]
import os
from gluon import *
from gluon.storage import Storage
from ..s3 import *
# =============================================================================
class S3DocumentLibrary(S3Model):
names = ["doc_entity",
"doc_document",
"doc_image"]
def model(self):
T = current.T
db = current.db
s3 = current.response.s3
person_comment = self.pr_person_comment
person_id = self.pr_person_id
location_id = self.gis_location_id
organisation_id = self.org_organisation_id
messages = current.messages
NONE = messages.NONE
UNKNOWN_OPT = messages.UNKNOWN_OPT
# Shortcuts
add_component = self.add_component
configure = self.configure
crud_strings = s3.crud_strings
define_table = self.define_table
super_link = self.super_link
# ---------------------------------------------------------------------
# Document-referencing entities
#
entity_types = Storage(asset_asset=T("Asset"),
irs_ireport=T("Incident Report"),
project_project=T("Project"),
project_activity=T("Project Activity"),
project_task=T("Task"),
hms_hospital=T("Hospital"))
tablename = "doc_entity"
doc_entity = self.super_entity(tablename, "doc_id", entity_types)
# Components
add_component("doc_document", doc_entity=self.super_key(doc_entity))
add_component("doc_image", doc_entity=self.super_key(doc_entity))
# ---------------------------------------------------------------------
# Documents
#
tablename = "doc_document"
table = define_table(tablename,
super_link("site_id", "org_site"),
super_link("doc_id", doc_entity),
Field("file", "upload", autodelete=True),
Field("name", length=128,
notnull=True,
# Allow Name to be added onvalidation
requires = IS_NULL_OR(IS_LENGTH(128)),
label=T("Name")),
Field("url", label=T("URL"),
requires = IS_NULL_OR(IS_URL()),
represent = lambda url: \
url and A(url,_href=url) or NONE),
person_id(label=T("Author"),
comment=person_comment(T("Author"),
T("The Author of this Document (optional)"))),
|
organisation_id(
widget = S3OrganisationAutocompleteWidget(default_from_profile=True)
),
s3_date(label = T("Date Published")),
location_id(),
s3_comments(),
#Field("entered", "boolean", label=T("Entered")),
Field("checksum", readable=False,
|
writable=False),
*s3_meta_fields())
# Field configuration
table.file.represent = lambda file, table=table: \
self.doc_file_represent(file, table)
#table.location_id.readable = False
#table.location_id.writable = False
#table.entered.comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Entered"),
# T("Has data from this Reference Document been entered into Sahana?")))
# CRUD Strings
ADD_DOCUMENT = T("Add Reference Document")
crud_strings[tablename] = Storage(
title_create = ADD_DOCUMENT,
title_display = T("Document Details"),
title_list = T("Documents"),
title_update = T("Edit Document"),
title_search = T("Search Documents"),
subtitle_create = T("Add New Document"),
label_list_button = T("List Documents"),
label_create_button = ADD_DOCUMENT,
label_delete_button = T("Delete Document"),
msg_record_created = T("Document added"),
msg_record_modified = T("Document updated"),
msg_record_deleted = T("Document deleted"),
msg_list_empty = T("No Documents found")
)
# Search Method?
# Resource Configuration
configure(tablename,
onvalidation=self.document_onvalidation)
# ---------------------------------------------------------------------
# Images
#
# @ToDo: Field to determine which is the default image to use for
# e.g. a Map popup (like the profile picture)
# readable/writable=False except in the cases where-needed
#
doc_image_type_opts = {
1:T("Photograph"),
2:T("Map"),
3:T("Document Scan"),
99:T("other")
}
tablename = "doc_image"
table = define_table(tablename,
super_link("site_id", "org_site"),
super_link("pe_id", "pr_pentity"),
super_link("doc_id", doc_entity),
Field("file", "upload", autodelete=True,
requires = IS_NULL_OR(
IS_IMAGE(extensions=(s3.IMAGE_EXTENSIONS)
)),
# upload folder needs to be visible to the download() function as well as the upload
uploadfolder = os.path.join(current.request.folder,
"uploads",
"images")),
Field("name", length=128,
notnull=True,
# Allow Name to be added onvalidation
requires = IS_NULL_OR(IS_LENGTH(128)),
label=T("Name")),
Field("url", label=T("URL"),
requires = IS_NULL_OR(IS_URL())),
Field("type", "integer",
requires = IS_IN_SET(doc_image_type_opts, zero=None),
default = 1,
|
acs3ss/austinsullivan.github.io
|
files/Gamebox/game.py
|
Python
|
unlicense
| 28,200
| 0.002163
|
# Grayson Gatti (gtg8cd) AND Austin Sullivan (acs3ss)
# CS 1110 - Spring 2017
# Monday, May 1st 2017
import pygame
import gamebox
import random
import math
# this code uses stop_loop to exit ticks() function. Then changes level, resets the screen, etc before re-entering ticks
def tick(keys):
global level, room, char_health, char_speed, frames, half_frames, attack_cooldown, attack_burst, flipped, \
ability_burst, ability, kill_count, dead_enemies, level_screen, game_over, boss_appear, boss_health, dead_boss
if game_over:
end_game()
half_frames += 1
if half_frames % 2 == 0:
frames += 1
attacking = False
walking = False
left = False
ability = False
# causes an error, quitting the game when F1 hit
if pygame.K_F1 in keys:
level = -3
gamebox.stop_loop()
# home screen
if level == -1:
if camera.mouseclick:
if clicking(start):
gamebox.stop_loop()
# select char screen
if level == 0:
# depending on which character is clicked, sets variables for future use. Then, stops loop to go to next level
if camera.mouseclick:
if clicking(knight):
character = knight
char_sheet = knight_sheet
ability_sheet = knight_ability_sheet
global char_sheet
global ability_sheet
gamebox.stop_loop()
if clicking(ranger):
character = ranger
char_sheet = ranger_sheet
ability_sheet = ranger_ability_sheet
global char_sheet, ability_sheet
gamebox.stop_loop()
global character
# setup is over, we're ready to play! These are the controls for actual gameplay
if 0 < level <= 2 and not level_screen:
# level one setup
if level == 1:
camera.draw(background)
room_setup()
# level two setup
if level == 2:
camera.draw(background)
room_setup()
# use the arrows to move
if pygame.K_w in keys:
character.y -= char_speed
walking = True
if pygame.K_s in keys:
character.y += char_speed
walking = True
if pygame.K_d in keys:
character.x += char_speed
walking = True
if pygame.K_a in keys:
character.x -= char_speed
walking = True
left = True
# space bar to attack
if camera.mouseclick:
if attack_burst <= 20 and attack_cooldown <= 0: # can't attack continuously. Can attack for 1/3 sec at time
attacking = True
attack_burst += 1
if attack_burst >= 20:
attack_cooldown = 15 # cooldown before you're allowed to attack again
if attack_burst == 1 and character == ranger:
fire_arrow()
else:
attack_burst = 0
attack_cooldown -= 1
# Q to use abilities
if pygame.K_q in keys:
if kill_count >= 3: # can use ability if ability bar is full enough
ability = True
kill_count -= 3
ability_burst = 0
ability_burst += 1
if ability_burst <= 60: # can't use ability continuously. Can use 2 sec at a time
ability = True
# arrow animations and removal
moving_arrows(arrows)
moving_arrows(skeleton_arrows)
moving_arrows(boss_arrows)
# enemy AI, boss AI
enemy_movement()
if boss_appear and not dead_boss:
boss_movement(frames)
# running into doors
room_movement()
# collecting hearts
for heart in life:
if character.touches(heart):
life.remove(heart)
# you can't have more than 100% health, silly
if char_health > 90:
char_health = 100
else:
char_health += 20
# interactions with enemies. If knight is attacking, they die. If not, you take damage and get knocked back
# arrows kill enemies
for room_num, where in enumerate(enemies):
if room_num == room:
for index, (enemy, alive, species) in enumerate(where):
if character.touches(enemy) and alive:
if not ability:
rebound(character, enemy, attacking)
if not attacking or character == ranger:
char_health = take_damage(20, char_health)
else:
enemies[room_num][index][1] = False
dead_enemy(enemy)
if ability and (attacking or character =
|
= knight):
|
enemies[room_num][index][1] = False
dead_enemy(enemy)
for sharp in arrows:
if sharp[0].touches(enemy) and alive:
enemies[room_num][index][1] = False
dead_enemy(enemy)
for sharp in skeleton_arrows:
if sharp[0].touches(character):
skeleton_arrows.remove(sharp)
char_health = take_damage(20, char_health)
# only draws living enemies. Also, checks to see if all enemies are dead
num_enemies = 0
for room_num, where in enumerate(enemies):
for index, (enemy, alive, species) in enumerate(where):
if alive:
num_enemies += 1
if room_num == room:
camera.draw(enemy)
if num_enemies == 0:
dead_enemies = True
# handles interactions between character and bosses. Boss must be
if character.touches(boss) and boss_appear:
if not attacking and not ability:
char_health = take_damage(20, char_health)
rebound(character, boss, attacking)
if attacking and character == knight:
boss_health = take_damage(20, boss_health)
rebound(character, boss, attacking)
# ranger arrow interactions with bosses
for arrow in arrows:
if arrow[0].touches(boss) and boss_appear:
boss_health = take_damage(10, boss_health)
arrows.remove(arrow)
# boss arrow interactions with character
for sharp in boss_arrows:
if sharp[0].touches(character):
boss_arrows.remove(sharp)
char_health = take_damage(20, char_health)
# you can't run through walls
[character.move_to_stop_overlapping(wall) for wall in border]
[boss.move_to_stop_overlapping(wall) for wall in border]
# draws walls, hearts after enemy death
[camera.draw(wall) for wall in border]
[camera.draw(heart) for heart in life]
# exits level once you get to the end of the level. Also resets room to 0
if character.touches(ladder) and dead_enemies and dead_boss:
level_screen = True
gamebox.stop_loop()
# your health. Game pauses when you run out of life
if char_health <= 0:
game_over = True
if character == ranger:
char_health_bar = gamebox.from_image(75, 550, char_health_sheet[5 - int(char_health/20)])
if character == knight:
char_health_bar = gamebox.from_image(75, 550, char_health_sheet[5 - int(char_health/40)])
camera.draw(char_health_bar)
if boss_health <= 0:
dead_boss = True
boss_appear = False
char_ability_bar = gamebox.from_image(75, 50, ability_bar_sheet[5 - kill_count])
char_ability_b
|
ic-labs/django-icekit
|
icekit/page_types/layout_page/migrations/0008_auto_20170518_1629.py
|
Python
|
mit
| 686
| 0.002915
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('layout_page', '0007_auto_20170509_1148'),
]
operations = [
migrations.AddField(
model_name='layoutpage',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
|
migrations.AddField(
model_name='layoutpage',
name='brief',
field=models.TextField(help_text=b'A document brief describing the p
|
urpose of this content', blank=True),
),
]
|
Arvedui/picuplib
|
setup.py
|
Python
|
lgpl-2.1
| 950
| 0.015789
|
#!/usr/bin/env python
# -*- coding:utf8 -*-
from setuptools import setup
import picuplib
setup(
name = 'picuplib',
packages = ['picuplib'],
version = picuplib.__version__,
description = 'Picflash upload library',
author = 'Arvedui',
author_email = 'arvedui@posteo.de',
url = 'https://github.com/Arvedui/picuplib',
install_requires=['requests', 'requests-toolbelt'],
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Software Development :
|
: Libraries',
'Intended Audience :: Developers',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'License :: OSI Approved :: GNU L
|
esser General Public License v2 (LGPLv2)',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
matham/cplcom
|
cplcom/moa/device/ftdi.py
|
Python
|
mit
| 21,337
| 0.000047
|
'''Barst FTDI Wrapper
=====================
'''
from pybarst.ftdi import FTDIChannel
from pybarst.ftdi.switch import SerializerSettings, PinSettings
from pybarst.ftdi.adc import ADCSettings
from kivy.properties import NumericProperty, DictProperty, StringProperty, \
ListProperty, ObjectProperty, BooleanProperty
from moa.threads import ScheduledEventLoop
from moa.device.digital import ButtonViewPort
from moa.device.adc import ADCPort
from moa.logger import Logger
from moa.device import Device
from cplcom.moa.device import DeviceExceptionBehavior
__all__ = ('FTDIDevChannel', 'FTDISerializerDevice', 'FTDIPinDevice',
'FTDIADCDevice')
class FTDIDevChannel(DeviceExceptionBehavior, Device, ScheduledEventLoop):
'''A :class:`moa.device.Device` wrapper around a
:class:`pybarst.ftdi.FTDIChannel` instance and controls the
:class:`FTDISerializerDevice`, :class:`FTDIPinDevice`, and the
:class:`FTDIADCDevice` instances.
'''
__settings_attrs__ = ('ftdi_serial', 'ftdi_desc')
def activate(self, *largs, **kwargs):
kwargs['state'] = 'activating'
if not super(FTDIDevChannel, self).activate(*largs, **kwargs):
return False
self.start_thread()
self.chan = FTDIChannel(
channels=[dev.settings for dev in self.devs],
server=self.server.server, desc=self.ftdi_desc,
serial=self.ftdi_serial)
def finish_activate(*largs):
self.activation = 'active'
self.request_callback(self._start_channel, finish_activate)
return True
def _start_channel(self):
if self.restart:
self.chan.open_channel(alloc=True)
self.chan.close_channel_server()
devs = self.chan.open_channel(alloc=True)
for moadev, ftdev in zip(self.devs, devs):
moadev.chan = ftdev
def deactivate(self, *largs, **kwargs):
kwargs['state'] = 'deactivating'
if not super(FTDIDevChannel, self).deactivate(*largs, **kwargs):
return False
def finish_deactivate(*largs):
self.activation = 'inactive'
self.stop_thread()
self.request_callback(self.chan.close_channel_server,
finish_deactivate)
return True
ftdi_serial = StringProperty('')
'''The serial number of the FTDI hardware board. Can be empty if
:attr:`ftdi_desc` is provided.
'''
ftdi_desc = StringProperty('')
'''The description of the FTDI hardware board. This a name written to the
hardware device.
:attr:`ftdi_serial` or :attr:`ftdi_desc` are used to locate the correct
board to open. An example is `'Alder Board'` for the Alder board.
'''
server = ObjectProperty(None, allownone=True)
'''The internal barst :class:`pybarst.core.server.BarstServer`. It
must be provided to the instance.
'''
chan = ObjectProperty(None)
'''The internal :class:`pybarst.ftdi.FTDIChannel` instance.
It is read only and is automatically created.
'''
devs = ListProperty([])
'''A list of the :class:`FTDISerializerDevice`, :class:`FTDIPinDevice`, and
the :class:`FTDIADCDevice` instances connected to this channel.
'''
restart = BooleanProperty(True)
'''If True we will restart the channel if it already exists. Should be set
to False if multiple users of the channel exist.
Defaults to ``True``
'''
class FTDISerializerDevice(DeviceExceptionBehavior, ButtonViewPort,
ScheduledEventLoop):
'''A :class:`moa.de
|
vice.d
|
igital.ButtonViewPort` wrapper around a
:class:`pybarst.ftdi.switch.FTDISerializerIn` or
:class:`pybarst.ftdi.switch.FTDISerializerOut` instance
(depending on the value of :attr:`output`).
For this class, :class:`moa.device.digital.ButtonViewPort.dev_map` must be
provided upon creation and it's a dict whose keys are the property names
and whose values are the serial device's port numbers that the
property controls.
E.g. for a group of odors connected to channel 3-4 output port, define the
class::
class MyFTDISerializerDevice(FTDISerializerDevice):
p3 = BooleanProperty(False)
p4 = BooleanProperty(False)
And then create the instance with::
dev = FTDISerializerDevice(dev_map={'p3': 3, 'p4': 4})
And then we can set the state by calling e.g.::
dev.set_state(high=['p3'], low=['p4'])
For an input serial devices it can defined similarly and the state of the
property reflects the value of the port.
'''
__settings_attrs__ = (
'clock_size', 'num_boards', 'clock_bit', 'data_bit', 'latch_bit',
'output')
_read_event = None
_write_event = None
def __init__(self, **kwargs):
super(FTDISerializerDevice, self).__init__(**kwargs)
self.direction = 'o' if self.output else 'i'
self.settings = SerializerSettings(
clock_bit=self.clock_bit, data_bit=self.data_bit,
latch_bit=self.latch_bit, num_boards=self.num_boards,
output=self.output, clock_size=self.clock_size)
def _write_callback(self, result, kw_in):
high = kw_in['set_high']
low = kw_in['set_low']
dev_map = self.chan_dev_map
self.timestamp = result
for idx in high:
setattr(self, dev_map[idx], True)
for idx in low:
setattr(self, dev_map[idx], False)
self.dispatch('on_data_update', self)
def _read_callback(self, result, **kwargs):
t, val = result
self.timestamp = t
for idx, name in self.chan_dev_map.items():
setattr(self, name, val[idx])
self.dispatch('on_data_update', self)
def activate(self, *largs, **kwargs):
kwargs['state'] = 'activating'
if not super(FTDISerializerDevice, self).activate(*largs, **kwargs):
return False
self.start_thread()
def finish_activate(*largs):
self.activation = 'active'
self._write_event = self.request_callback(
self.chan.write, callback=self._write_callback, trigger=False,
repeat=True)
if 'i' in self.direction:
self._read_event = self.request_callback(
self.chan.read, callback=self._read_callback, trigger=True,
repeat=True)
self.request_callback(self._start_channel, finish_activate)
return True
def _start_channel(self):
odors = self.chan
odors.open_channel()
odors.set_state(True)
odors.write(set_low=list(range(8 * self.num_boards)))
def deactivate(self, *largs, **kwargs):
kwargs['state'] = 'deactivating'
if not super(FTDISerializerDevice, self).deactivate(*largs, **kwargs):
return False
self.remove_request(self.chan.read, self._read_event)
self.remove_request(self.chan.write, self._write_event)
self._write_event = self._read_event = None
def finish_deactivate(*largs):
self.activation = 'inactive'
self.stop_thread()
self.request_callback(self._stop_channel, finish_deactivate)
return True
def _stop_channel(self, *largs, **kwargs):
chan = self.chan
chan.write(set_low=list(range(8 * self.num_boards)))
if self.settings.continuous:
chan.cancel_read(flush=True)
chan.set_state(False)
chan.close_channel_client()
def set_state(self, high=[], low=[], **kwargs):
if 'o' not in self.direction:
raise TypeError('Cannot write state for a input device')
dev_map = self.dev_map
self.request_callback(self.chan.write,
set_high=[dev_map[name] for name in high],
set_low=[dev_map[name] for name in low])
clock_size = Numer
|
deployed/django
|
django/utils/text.py
|
Python
|
bsd-3-clause
| 14,829
| 0.001416
|
from __future__ import unicode_literals
import re
import unicodedata
from gzip import GzipFile
from io import BytesIO
from django.utils.encoding import force_text
from django.utils.functional import allow_lazy, SimpleLazyObject
from django.utils import six
from django.utils.six.moves import html_entities
from django.utils.translation import ugettext_lazy, ugettext as _, pgettext
from django.utils.safestring import mark_safe
if six.PY2:
# Import force_unicode even though this module doesn't use it, because some
# people rely on it being here.
from django.utils.encoding import force_unicode # NOQA
# Capitalizes the first letter of a string.
capfirst = lambda x: x and force_text(x)[0].upper() + force_text(x)[1:]
capfirst = allow_lazy(capfirst, six.text_type)
# Set up regular expressions
re_words = re.compile(r'<.*?>|((?:\w[-\w]*|&.*?;)+)', re.U | re.S)
re_chars = re.compile(r'<.*?>|(.)', re.U | re.S)
re_tag = re.compile(r'<(/)?([^ ]+?)(?:(\s*/)| .*?)?>', re.S)
re_newlines = re.compile(r'\r\n|\r') # Used in normalize_newlines
re_camel_case = re.compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))')
def wrap(text, width):
"""
A word-wrap function that preserves existing line breaks and most spaces in
the text. Expects that existing line breaks are posix newlines.
"""
text = force_text(text)
def _generator():
it = iter(text.split(' '))
word = next(it)
yield word
pos = len(word) - word.rfind('\n') - 1
for word in it:
if "\n" in word:
lines = word.split('\n')
else:
lines = (word,)
pos += len(lines[0]) + 1
if pos > width:
yield '\n'
pos = len(lines[-1])
else:
yield ' '
if len(lines) > 1:
pos = len(lines[-1])
yield word
return ''.join(_generator())
wrap = allow_lazy(wrap, six.text_type)
class Truncator(SimpleLazyObject):
"""
An object used to truncate text, either by characters or words.
"""
def __init__(self, text):
super(Truncator, self).__init__(lambda: force_text(text))
def add_truncation_text(self, text, truncate=None):
if truncate is None:
truncate = pgettext(
'String to return when truncating text',
'%(truncated_text)s...')
truncate = force_text(truncate)
if '%(truncated_text)s' in truncate:
return truncate % {'truncated_text': text}
# The truncation text didn't contain the %(truncated_text)s string
# replacement argument so just append it to the text.
if text.endswith(truncate):
# But don't append the truncation text if the current text already
# ends in this.
return text
return '%s%s' % (text, truncate)
def chars(self, num, truncate=None, html=False):
"""
Returns the text truncated to be no longer than the specif
|
ied number
of characters.
Takes an optional argument of what should be used to notify that the
string has been truncated, defaulting to a translatable string of an
ellipsis (.
|
..).
"""
length = int(num)
text = unicodedata.normalize('NFC', self._wrapped)
# Calculate the length to truncate to (max length - end_text length)
truncate_len = length
for char in self.add_truncation_text('', truncate):
if not unicodedata.combining(char):
truncate_len -= 1
if truncate_len == 0:
break
if html:
return self._truncate_html(length, truncate, text, truncate_len, False)
return self._text_chars(length, truncate, text, truncate_len)
chars = allow_lazy(chars)
def _text_chars(self, length, truncate, text, truncate_len):
"""
Truncates a string after a certain number of chars.
"""
s_len = 0
end_index = None
for i, char in enumerate(text):
if unicodedata.combining(char):
# Don't consider combining characters
# as adding to the string length
continue
s_len += 1
if end_index is None and s_len > truncate_len:
end_index = i
if s_len > length:
# Return the truncated string
return self.add_truncation_text(text[:end_index or 0],
truncate)
# Return the original string since no truncation was necessary
return text
def words(self, num, truncate=None, html=False):
"""
Truncates a string after a certain number of words. Takes an optional
argument of what should be used to notify that the string has been
truncated, defaulting to ellipsis (...).
"""
length = int(num)
if html:
return self._truncate_html(length, truncate, self._wrapped, length, True)
return self._text_words(length, truncate)
words = allow_lazy(words)
def _text_words(self, length, truncate):
"""
Truncates a string after a certain number of words.
Newlines in the string will be stripped.
"""
words = self._wrapped.split()
if len(words) > length:
words = words[:length]
return self.add_truncation_text(' '.join(words), truncate)
return ' '.join(words)
def _truncate_html(self, length, truncate, text, truncate_len, words):
"""
Truncates HTML to a certain number of chars (not counting tags and
comments), or, if words is True, then to a certain number of words.
Closes opened tags if they were correctly closed in the given HTML.
Newlines in the HTML are preserved.
"""
if words and length <= 0:
return ''
html4_singlets = (
'br', 'col', 'link', 'base', 'img',
'param', 'area', 'hr', 'input'
)
# Count non-HTML chars/words and keep note of open tags
pos = 0
end_text_pos = 0
current_len = 0
open_tags = []
regex = re_words if words else re_chars
while current_len <= length:
m = regex.search(text, pos)
if not m:
# Checked through whole string
break
pos = m.end(0)
if m.group(1):
# It's an actual non-HTML word or char
current_len += 1
if current_len == truncate_len:
end_text_pos = pos
continue
# Check for tag
tag = re_tag.match(m.group(0))
if not tag or current_len >= truncate_len:
# Don't worry about non tags or tags after our truncate point
continue
closing_tag, tagname, self_closing = tag.groups()
# Element names are always case-insensitive
tagname = tagname.lower()
if self_closing or tagname in html4_singlets:
pass
elif closing_tag:
# Check for match in open tags list
try:
i = open_tags.index(tagname)
except ValueError:
pass
else:
# SGML: An end tag closes, back to the matching start tag,
# all unclosed intervening start tags with omitted end tags
open_tags = open_tags[i + 1:]
else:
# Add it to the start of the open tags list
open_tags.insert(0, tagname)
if current_len <= length:
return text
out = text[:end_text_pos]
truncate_text = self.add_truncation_text('', truncate)
if truncate_text:
out += truncate_text
# Close any tags still open
for tag in open_tags:
out += '</%s>' % tag
# Return string
return out
def get_valid_filename(s):
"""
Returns the given stri
|
furbrain/tingbot-python
|
tests/button_test.py
|
Python
|
bsd-2-clause
| 1,851
| 0.002161
|
import unittest
from tingbot.button import Button
class ButtonTestCase(unittest.TestCase):
def setUp(self):
self.button = Button()
def assertActions(self, action_types):
|
self.assertEqual(action_types, [a.type for a in self.button.actions])
|
def testPress(self):
self.button.add_event('down', timestamp=1)
self.button.add_event('up', timestamp=1.5)
self.button.process_events(time=2)
self.assertActions(['down', 'up', 'press'])
def testHold(self):
self.button.add_event('down', timestamp=1)
self.button.add_event('up', timestamp=3)
self.button.process_events(3.1)
self.assertActions(['down', 'hold', 'up'])
def testIncrementalHold(self):
self.button.add_event('down', timestamp=1)
self.button.process_events(time=1.1)
self.assertActions(['down'])
self.button.process_events(time=2.1)
self.assertActions(['down', 'hold'])
self.button.add_event('up', timestamp=3)
self.button.process_events(time=3.1)
self.assertActions(['down', 'hold', 'up'])
def testRepeatedPress(self):
self.button.add_event('down', timestamp=1)
self.button.add_event('up', timestamp=1.5)
self.button.add_event('down', timestamp=3.5)
self.button.add_event('up', timestamp=4.0)
self.button.process_events(time=4.1)
self.assertActions(['down', 'up', 'press','down', 'up', 'press'])
def testRepeatedQuickPress(self):
self.button.add_event('down', timestamp=1)
self.button.add_event('up', timestamp=1.5)
self.button.add_event('down', timestamp=1.6)
self.button.add_event('up', timestamp=2.2)
self.button.process_events(time=4.1)
self.assertActions(['down', 'up', 'press','down', 'up', 'press'])
|
yelu/leetcode
|
ReverseWords.py
|
Python
|
gpl-2.0
| 295
| 0.020339
|
class Solution:
def reverseWords(self, s) :
print 1 / 0
tks = s.split(' ');
tks = filter(None, tks)
|
tks.reve
|
rse();
return ' '.join(tks).strip()
test = ["the sky is blue", " a b "]
sol = Solution();
for t in test :
print sol.reverseWords(t)
|
idcodeoverflow/SocialNetworkAnalyzer
|
DBLayout/FacebookPostControlDB.py
|
Python
|
mit
| 3,492
| 0.002864
|
from DBLayout.DBConnection import DBConnection, mysql
from EntitiesLayout.FacebookPostControl import FacebookPostControl
__author__ = 'David'
class FacebookPostControlDB:
def __init__(self):
self.db = DBConnection()
print('Create object to access table user in DB.')
def insertPostControl(self, post: FacebookPostControl):
try:
cnx = self.db.openConnection()
cursor = cnx.cursor()
addFBPostControlQuery = 'INSERT INTO postControl(idpostControl, fbid, facebookUserID, visited) ' \
'VALUES (NULL, %s, %s, %s);'
data = (post.fbid, post.facebookUser.facebookUserId, post.visited)
cursor.execute(addFBPostControlQuery, data)
cnx.commit()
cursor.close()
self.db.closeConnection()
except mysql.connector.Error as err:
print('Error writing a Facebook Post Control in the DB.' + str(err))
except AttributeError as err:
print('Register can\'t be stored.' + str(err))
def readPostControl(self, id: int):
postControl = FacebookPostControl()
try:
cnx = self.db.openConnection()
cursor = cnx.cursor()
readFBPostControlQuery = ("SELECT idpostControl, fbid, facebookUserID, visited "
"FROM postControl WHERE fbid = %s;")
data = (id,)
cursor.execute(readFBPostControlQuery, data)
for (idpostControl, fbid, facebookUserID, visited) in cursor:
postControl = (FacebookPostControl(idpostControl, fbid, facebookUserID, visited))
cursor.close()
self.db.closeConnection()
except mysql.connector.Error as ex:
print(ex)
print('Error reading a Facebook Post Control in the DB.')
return postControl
def readNotVisitedPostsControl(self):
postsControl = []
try:
cnx = self.db.openConnection()
cursor = cnx.cursor()
readFBPostControlQuery = ("SELECT idpostControl, fbid, facebookUserID, visited "
"FROM postControl WHERE visited = %s")
data = (False,)
cursor.execute(readFBPostControlQuery, data)
for (idpostControl, fbid, facebookUserID, visited) in cursor:
postsControl.append(FacebookPostControl(idpostControl, fbid, facebookUserID, visited))
|
cursor.close()
self.db.closeConnection()
except mysql.connector.Error
|
as ex:
print(ex)
print('Error reading not Visited Facebook Posts Control in the DB.')
return postsControl
def readPostsControl(self):
postsControl = []
try:
cnx = self.db.openConnection()
cursor = cnx.cursor()
readFBPostControlQuery = ("SELECT idpostControl, fbid, facebookUserID, visited "
"FROM postControl")
cursor.execute(readFBPostControlQuery)
for (idpostControl, fbid, facebookUserID, visited) in cursor:
postsControl.append(FacebookPostControl(idpostControl, fbid, facebookUserID, visited))
cursor.close()
self.db.closeConnection()
except mysql.connector.Error as ex:
print(ex)
print('Error reading Facebook Posts Control in the DB.')
return postsControl
|
django-salesforce/django-salesforce
|
salesforce/testrunner/example/tests.py
|
Python
|
mit
| 363
| 0
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
This
|
file de
|
monstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
# from django.test import TestCase
|
Macemann/Georgetown-Capstone
|
app/db/connect.py
|
Python
|
mit
| 493
| 0.008114
|
import pymongo
def connect ():
|
'''
Create the connection to the MongoDB and create 3 collections needed
'''
try:
# Create the connection to the local host
conn = pymongo.MongoClient()
print 'MongoDB Connection Successful'
except pymongo.errors.ConnectionFailure, err:
print 'MongoDB Connection Unsuccessful'
retur
|
n False
# This is the name of the database -'GtownTwitter'
db = conn['GtownTwitter_PROD']
return db
|
tombs/Water-Billing-System
|
waterbilling/tasks/models.py
|
Python
|
agpl-3.0
| 250
| 0.004
|
from django.db import models
# Create your models here.
from core.models import BillingSchedule
f
|
rom core.models import Bill
from core.models import Account
from core.models
|
import FileRepo
from core.models import Config
from core.models import Task
|
kervi/kervi
|
kervi-core/kervi/dashboards/__init__.py
|
Python
|
mit
| 9,756
| 0.004818
|
#Copyright 2016 Tim Wentlau.
#Distributed under the MIT License. See LICENSE in root of project.
"""
A dashboard is the main ui component in a kervi application.
"""
_DASHBOARDS = []
from kervi.core.utility.component import KerviComponent
import kervi.spine as spine
class DashboardPanelGroup(object):
r"""
Create a group of dashboard panels.
:param group_id:
id of the group.
:type group_id: str
:param \**kwargs:
See below
:Keyword Arguments:
* *title* (``str``) -- Title of the group.
* *columns* (``int``) -- Number of columns in this group, default is 1.
"""
def __init__(self, panels=None, **kwargs):
self.spine = spine.Spine()
self.group_id = kwargs.get("title", None)
self.ui_parameters = {
"label":kwargs.get("title", ""),
"width":kwargs.get("width", 0),
"height":kwargs.get("height", 0),
"layout":kwargs.get("layout", "row"),
"gauge_width":kwargs.get("gauge_width", 0),
"gauge_height":kwargs.get("gauge_height", 0),
"panel_width":kwargs.get("panel_width", 0),
"panel_height":kwargs.get("panel_height", 0),
}
self._dashboard = None
self._user_groups = kwargs.get("user_groups", [])
self._panels = []
for panel in panels:
self.add_panel(panel)
@property
def user_groups(self):
return self._user_groups
@user_groups.setter
def user_groups(self, value):
self._user_groups.clear()
self._user_groups += value
@property
def dashboard(self):
return self._dashboard
@dashboard.setter
def dashboard(self, dashboard):
self._dashboard = dashboard
for panel in self._panels:
panel.dashboard = dashboard
def add_panel(self, panel):
self._panels += [panel]
panel.dashboard = self.dashboard
def _get_info(self, **kwargs):
self.spine.log.debug("Query dashboard components:{0} - {1}", self.dashboard.dashboard_id, self.group_id)
session = kwargs.get("session", None)
authorized = True
if session and len(self.user_groups) > 0:
for group in self.user_groups:
if group in session["groups"]:
break
else:
authorized = False
if authorized:
panels = []
for panel in self._panels:
panels += [panel._get_info(**kwargs)]
return {
"id": self.group_id,
"type": "group",
"uiParameters": self.ui_parameters,
|
"dashboard": self.dashboard.get_referenc
|
e(),
"panels": panels
}
class DashboardPanel(object):
r"""
Create a dashboard panel.
:param panel_id:
id of the panel.
This id is used in other components to reference this panel.
:type panel_id: str
:param \**kwargs:
See below
:Keyword Arguments:
* *title* (``str``) -- Title of the panel.
* *user_log* (``bool``) -- This panel shows user log messages. Any components that are linked to a user log panel are ignored.
* *collapsed* (``bool``) -- If true the body of the panel is collapsed.
"""
def __init__(self, panel_id, **kwargs):
self.spine = spine.Spine()
self.panel_id = panel_id
self.ui_parameters = {
"label":kwargs.pop("title", ""),
"width":kwargs.pop("width", 0),
"height":kwargs.pop("height", 0),
"userLog":kwargs.pop("user_log", False),
"app_health":kwargs.pop("app_health", False),
"logLength":kwargs.pop("log_length", 5),
"gauge_width":kwargs.pop("gauge_width", 0),
"gauge_height":kwargs.pop("gauge_height", 0),
}
self.dashboard = None
self.panel_id = panel_id
self._user_groups = kwargs.pop("user_groups", [])
@property
def user_groups(self):
return self._user_groups
@user_groups.setter
def user_groups(self, value):
self._user_groups.clear()
self._user_groups += value
def _get_panel_components(self, components):
result = []
if hasattr(components, "__len__") and not isinstance(components, dict):
for component in components:
result += self._get_panel_components(component)
else:
result += [components]
return result
def _get_info(self, **kwargs):
self.spine.log.debug("Query dashboard components:{0} - {1}", self.dashboard.dashboard_id, self.panel_id)
session = kwargs.get("session", None)
authorized = True
if session and len(self.user_groups) > 0:
for group in self.user_groups:
if group in session["groups"]:
break
else:
authorized = False
if authorized:
components = []
#components = self.spine.send_query(
# "getDashboardComponents",
# self.dashboard.dashboard_id,
# self.panel_id
#)
#panel_components = self._get_panel_components(components)
return {
"id": self.panel_id,
"type": "panel",
"uiParameters": self.ui_parameters,
"dashboard": self.dashboard.get_reference(),
"components": []
}
class Dashboard(KerviComponent):
r"""
Create a UI dashboard. The dashboard will show up in the dashboard menu in the UI.
A dashboard contains one or more panels. Kervi components like *sensors*,
*controller inputs* and other dynamic values are able to link to a panel on a dashboard.
All dashboard have the following system defined panels:
* header_right
* header_center
* footer_left
* footer_center
* footer_right
Besides from these panels each dashboard has two *controller pad* areas where
it is possible to link to the x and y coordinates of the pads.
A dynamic value like controller input may link to one of the following panels:
* left_pad_x
* left_pad_y
* right_pad_x
* right_pad_y
:param dashboard_id:
Unique id of the dashboard. Used when referencing this dashboard.
:type dashboard_id: ``str``
:param name:
Name of the dahsboard. Used when this dashboard is listed in the dashboard menu in the UI.
:type name: ``str``
:param \**kwargs:
See below
:Keyword Arguments:
* *is_default* (``bool``) --
If true this dashboard will show up as the active dashboard when web dashboards loads.
"""
def __init__(self, dashboard_id, name, panels=None, **kwargs):
global _DASHBOARDS
KerviComponent.__init__(self, dashboard_id, "dashboard", name, **kwargs)
_DASHBOARDS.append(self)
self.dashboard_id = dashboard_id
self.is_default = kwargs.pop("is_default", False)
self.gauge_width = kwargs.pop("gauge_width", 0)
self.gauge_height = kwargs.pop("gauge_height", 0)
self.panel_width = kwargs.pop("panel_width", 0),
self.panel_height = kwargs.pop("panel_height", 0),
self.panels = []
self.add_panel(DashboardPanel("header_right"))
self.add_panel(DashboardPanel("header_center"))
self.add_panel(DashboardPanel("footer_center"))
self.add_panel(DashboardPanel("footer_left"))
self.add_panel(DashboardPanel("footer_right"))
self.add_panel(DashboardPanel("background"))
self.add_panel(DashboardPanel("left_pad_x"))
self.add_panel(DashboardPanel("left_pad_y"))
self.add_panel(DashboardPanel("right_pad_x"))
self.add_panel(DashboardPanel("right_pad_y"))
if panels:
for panel in panels:
self.add_panel(panel)
self.bac
|
nstanke/mailinabox
|
management/status_checks.py
|
Python
|
cc0-1.0
| 41,760
| 0.024315
|
#!/usr/bin/python3
#
# Checks that the upstream DNS has been set correctly and that
# TLS certificates have been signed, etc., and if not tells the user
# what to do next.
import sys, os, os.path, re, subprocess, datetime, multiprocessing.pool
import dns.reversename, dns.resolver
import dateutil.parser, dateutil.tz
import idna
import psutil
from dns_update import get_dns_zones, build_tlsa_record, get_custom_dns_config, get_secondary_dns, get_custom_dns_record
from web_update import get_web_domains, get_domains_with_a_records
from ssl_certificates import get_ssl_certificates, get_domain_ssl_files, check_certificate
from mailconfig import get_mail_domains, get_mail_aliases
from utils import shell, sort_domains, load_env_vars_from_file, load_settings
def run_checks(rounded_values, env, output, pool):
# run systems checks
output.add_heading("System")
# check that services are running
if not run_services_checks(env, output, pool):
# If critical services are not running, stop. If bind9 isn't running,
# all later DNS checks will timeout and that will take forever to
# go through, and if running over the web will cause a fastcgi timeout.
return
# clear bind9's DNS cache so our DNS checks are up to date
# (ignore errors; if bind9/rndc isn't running we'd already report
# that in run_services checks.)
shell('check_call', ["/usr/sbin/rndc", "flush"], trap=True)
run_system_checks(rounded_values, env, output)
# perform other checks asynchronously
run_network_checks(env, output)
run_domain_checks(rounded_values, env, output, pool)
def get_ssh_port():
# Returns ssh port
try:
output = shell('check_output', ['sshd', '-T'])
except FileNotFoundError:
# sshd is not installed. That's ok.
return None
returnNext = False
for e in output.split():
if returnNext:
return int(e)
if e == "port":
returnNext = True
# Did not find port!
return None
def run_services_checks(env, output, pool):
# Check that system services are running.
services = [
{ "name": "Local DNS (bind9)", "port": 53, "public": False, },
#{ "name": "NSD Control", "port": 8952, "public": False, },
{ "name": "Local DNS Control (bind9/rndc)", "port": 953, "public": False, },
{ "name": "Dovecot LMTP LDA", "port": 10026, "public": False, },
{ "name": "Postgrey", "port": 10023, "public": False, },
{ "name": "Spamassassin", "port": 10025, "public": False, },
{ "name": "OpenDKIM", "port": 8891, "public": False, },
{ "name": "OpenDMARC", "port": 8893, "public": False, },
{ "name": "Memcached", "port": 11211, "public": False, },
{ "name": "Mail-in-a-Box Management Daemon", "port": 10222, "public": False, },
{ "name": "SSH Login (ssh)", "port": get_ssh_port(), "public": True, },
{ "name": "Public DNS (nsd4)", "port": 53, "public": True, },
{ "name": "Incoming Mail (SMTP/postfix)", "port": 25, "public": True, },
{ "name": "Outgoing Mail (SMTP 587/postfix)", "port": 587, "public": True, },
#{ "name": "Postfix/master", "port": 10587, "public": True, },
{ "name": "IMAPS (dovecot)", "port": 993, "public": True, },
{ "name": "Mail Filters (Sieve/dovecot)", "port": 4190, "public": True, },
{ "name": "HTTP Web (nginx)", "port": 80, "public": True, },
{ "name": "HTTPS Web (nginx)", "port": 443, "public": True, },
]
all_running = True
fatal = False
ret = pool.starmap(check_service, ((i, service, env) for i, service in enumerate(services)), chunksize=1)
for i, running, fatal2, output2 in sorted(ret):
if output2 is None: continue # skip check (e.g. no port was set, e.g. no sshd)
all_running = all_running and running
fatal = fatal or fatal2
output2.playback(output)
if all_running:
output.print_ok("All system services are running.")
return not fatal
def check_service(i, service, env):
if not service["port"]:
# Skip check (no port, e.g. no sshd).
return (i, None, None, None)
output = BufferedOutput()
running = False
fatal = False
# Helper function to make a connection to the service, since we try
# up to three ways (localhost, IPv4 address, IPv6 address).
def try_connect(ip):
# Connect to the given IP address on the service's port with a one-second timeout.
import socket
s = socket.socket(socket.AF_INET if ":" not in ip else socket.AF_INET6, socket.SOCK_STREAM)
s.settimeout(1)
try:
s.connect((ip, service["port"]))
return True
except OSError as e:
# timed out or some other odd error
return False
finally:
s.close()
if service["public"]:
# Service should be publicly accessible.
if try_connect(env["PUBLIC_IP"]):
# IPv4 ok.
if not env.get("PUBLIC_IPV6") or service.get("ipv6") is False or try_connect(env["PUBLIC_IPV6"]):
# No IPv6, or service isn't meant to run on IPv6, or IPv6 is good.
running = True
# IPv4 ok but IPv6 failed. Try the PRIVATE_IPV6 address to see if the service is bound to the interface.
elif service["port"] != 53 and try_connect(env["PRIVATE_IPV6"]):
output.print_error("%s is running (and available over IPv4 and the local IPv6 address), but it is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is running and available over IPv4 but is not accessible over IPv6 at %s port %d." % (service['name'], env['PUBLIC_IPV6'], service['port']))
# IPv4 failed. Try the private IP to see if the service is running but not accessible (except DNS because a different service runs on the private IP).
elif service["port"] != 53 and try_connect("127.0.0.1"):
output.print_error("%s is running but is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Why is nginx not running?
if not running and service["port"] in (80, 443):
output.print_line(shell('check_output', ['nginx', '-t'], capture_stderr=True, trap=True)[1].strip())
else:
# Service should be running locally.
if try_connect("127.0.0.1"):
running = True
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Flag if local DNS is not running.
if not running and service["port"] == 53 and service["public"] == False:
fatal = True
return (i, running, fatal, output)
def run_system_checks(rounded_values, env, output):
check_ssh_password(env, output)
check_software_updates(env, output)
check_miab_version(env, output)
check_system_aliases(env, output)
check_free_disk_space(rounded_values, env, output)
check_free_memory(rounded_values, env, output)
def check_ssh_password(env, output):
# Check that SSH login with password is disabled. The openssh-server
# package may not be installed so check that before trying to access
# the configuration file.
if not os.path.exists("/etc/ssh/sshd_config"):
return
sshd = open("/etc/ssh/sshd_config").read()
if re.search("\nPasswordAuthentication\s+yes", sshd) \
or not re.search("\nPasswordAuthentication\s+no", sshd):
output.print_error("""The SSH server on this machine permits password-based login. A more secure
way to log in is using a public key. Add your SSH public key to $HOME/.ssh/authorized_keys, check
that you can log in without a password, set the option 'PasswordAuthentication no' in
/etc/ssh/sshd_config, and then restart the openssh via 'sudo service ssh restart'.""")
else:
output.print_ok("SSH disallows password-based login.")
def is_reboot_needed_due_to_package_installation():
return os.path.exists("/var/run/reboot-required")
def check_software_updates(env, output):
# Check for any software package updates.
pkgs = list_apt_updates(apt_update=False)
if is_reboot_needed_due_to_package_installation():
output.print_error("System updates have been installed and a reboot of the machine is required.
|
")
elif len(pkgs) == 0:
output.print_ok("System software is up to date.")
else:
output.print_error("There are %d software packages that can be updated." % len(pkgs))
for p in pk
|
gs:
output.print_line("%s (%s)" % (p["package"], p["version"]))
def check_system_aliases(env, output):
# Check that the administrator alias e
|
strugee/pumptweet
|
pumptweet/MLStripper.py
|
Python
|
mit
| 831
| 0.038882
|
# cod
|
ing=utf-8
from HTMLParser import HTMLParser
# Class for stripping HTML from text.
class MLStripper(HTMLParser):
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
|
return ''.join(self.fed)
# Replaces HTML entities with actual characters.
def replace_entities(html):
unifiable = [
('&', '&'),
(' ', ' '),
(''', "'"),
('"', "'"),
('–', '-'),
('—', u'–'),
('→', u'→'),
('←', u'←'),
('⇆', u'↔'),
('“', '"'),
('”', '"'),
]
for (entity, char) in unifiable:
html = html.replace(entity, char)
return html
# Strips tags from HTML, returning regular text.
def strip_tags(html):
html = replace_entities(html)
s = MLStripper()
s.feed(html)
return s.get_data()
|
openrightsgroup/OrgProbe
|
test/test_result.py
|
Python
|
gpl-3.0
| 936
| 0.004296
|
# coding: utf-8
from orgprobe.result import Result
import logging
def test_unicode():
title = u"Some text here with a \u00a3 sign"
r = Result('ok', 200, title=title)
assert isinstance(title, unicode)
assert r.title == "Some text here with a £ sign"
assert isinstance(r.title, str)
assert str(r) == """<Result: status="ok" code="200" category="None" type="None" ip="None" body_length="
|
0" """ \
"""ssl_verified="None" ssl_fingerprint="None" final_url="None" resolved_ip="None" title="Some text here with a £ sign">"""
logging.info("result: %s", r)
def test_utf8():
r = Result('ok', 200, title="£20")
assert r.title == "£20"
assert isinstance(r.
|
title, str)
assert str(r) == """<Result: status="ok" code="200" category="None" type="None" ip="None" body_length="0" """ \
"""ssl_verified="None" ssl_fingerprint="None" final_url="None" resolved_ip="None" title="£20">"""
|
bblacey/FreeCAD-MacOS-CI
|
src/Tools/embedded/PySide/mainwindow2.py
|
Python
|
lgpl-2.1
| 1,203
| 0.012469
|
import sys
#sys.path.append("")
from PySide import QtCore,
|
QtGui
import FreeCADGui
class MainWindow(QtGui.QMainWindow):
def
|
__init__(self, parent = None):
super(MainWindow, self).__init__(parent)
from PySide import QtNetwork
# Webkit is used to create icons from SVG files. This could cause a deadlock
# when setting up the internally used network interface. Doing this before
# creating the icons fixes the issue.
QtNetwork.QNetworkConfigurationManager()
def showEvent(self, event):
FreeCADGui.showMainWindow()
self.setCentralWidget(FreeCADGui.getMainWindow())
# Need version >= 0.16.5949
class BlankWorkbench(FreeCADGui.Workbench):
MenuText = "Blank"
ToolTip = "Blank workbench"
def Initialize(self):
self.appendMenu("Menu",["Std_New", "Part_Box"])
return
def GetClassName(self):
return "Gui::PythonBlankWorkbench"
FreeCADGui.addWorkbench(BlankWorkbench)
FreeCADGui.activateWorkbench("BlankWorkbench")
app=QtGui.QApplication(sys.argv)
mw=MainWindow()
mw.resize(1200,800)
mw.show()
app.exec_()
|
whitegreyblack/Spaceship
|
spaceship/classes/dungeon.py
|
Python
|
mit
| 25,031
| 0.004115
|
# Dungeon.py
# builds a random dungeon of size MxN
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+'/../')
from bearlibterminal import terminal as term
from copy import deepcopy
from random import choice, randint, shuffle
from collections import namedtuple
from tools import bresenhams
from spaceship.setup_game import setup_font
import math
import time
from copy import deepcopy
debug = False
X_TEMP, Y_TEMP = 78, 40
# X_TEMP, Y_TEMP = 160, 80
# X_TEMP, Y_TEMP = 80, 50
WALL, FLOOR = -1, 1
box = namedtuple("Box", "x1 y1 x2 y2")
point = namedtuple("Point", "x y")
def mst(graph):
q, p = {}, {}
for k in graph.keys():
q[k] = math.inf
p[k] = 0
q[0] = 0
p[0] = 0
while q:
u = min(k for k in q.keys())
for z in graph[u].keys():
if z in q.keys() and 0 < graph[u][z] < q[z]:
p[z] = u
q[z] = graph[u][z]
q.pop(u)
if choice([0, 1]) == 1 and q.keys():
u = min(k for k in q.keys())
for z in graph[u].keys():
if z in q.keys() and 0 < graph[u][z] < q[z]:
p[z] = u
q[z] = graph[u][z]
return p
def distance(p1, p2):
try:
return math.sqrt((p2.x-p1.x)**2+(p2.y-p1.y)**2)
except AttributeError:
return math.sqrt((p2[0]-p1[0])**2+(p2[1]-p1[1])**2)
def intersect(b1, b2):
o = offset = 1
return (b1.x1+o <= b2.x2 and b1.x2-o >= b2.x1 and
b1.y1+o <= b2.y2 and b1.y2-o >= b2.y1)
def center(box):
return point((box.x1 + box.x2)//2, (box.y1 + box.y2)//2)
def volume(box):
return (box.x2-box.x1) * (box.y2-box.y1)
def rotate(box):
return list(zip(*box[::-1]))
def equal(p1, p2):
try:
# point comparison
return p1.x == p2.x and p1.y == p2.y
except AttributeError:
# room comparison
return center(p1) == center(p2)
except:
raise
def ooe(i, j):
h = rx = X_TEMP//2-1
k = ry = Y_TEMP//2-1
return ((i-h)**2)/(rx**2) + ((j-k)**2)/(ry**2) <= 1
def smooth(dungeon):
def neighbor(x, y):
val = 0
wall = dungeon[x][y] == WALL
for i in range(-1, 2):
for j in range(-1, 2):
if (x, y) != (x+i, y+j):
if wall:
try:
if dungeon[x+i][y+j] == FLOOR:
val += 1
except:
pass
else:
try:
if dungeon[x+i][y+j] == FLOOR:
val += 1
except:
pass
if wall:
return WALL if val < 5 else FLOOR
else:
return FLOOR if val > 4 else WALL
newmap = deepcopy(dungeon)
for i in range(len(dungeon)):
for j in range(len(dungeon[0])):
newmap[i][j] = neighbor(i, j)
return newmap
def path(p1, p2, dungeon):
node = namedtuple("Node", "df dg dh parent node")
openlist = set()
closelist = []
openlist.add(node(0, 0, 0, None, p1))
if debug:
print("PATH: DISTANCE - {}".format(int(distance(p1, p2)*10)))
while openlist:
nodeq = min(sorted(openlist))
openlist.remove(nodeq)
for i in range(-1, 2):
for j in range(-1, 2):
if (i, j) != (0, 0):
neighbor = nodeq.node[0]+i, nodeq.node[1]+j
if neighbor == p2:
closelist.append(nodeq)
return closelist
if dungeon[neighbor[1]][neighbor[0]] in ('.', '+'):
sg = nodeq.dg + int(distance(nodeq.node, neighbor) * 10)
sh = int(distance(neighbor, p2) * 10)
sf = sg + sh
if any(n.node == neighbor and n.df < sf for n in openlist):
pass
elif any(n.node == neighbor and n.df < sf for n in closelist):
pass
else:
openlist.add(node(sf, sg, sh, nodeq.node, neighbor))
closelist.append(nodeq)
return closelist
def decay(dungeon, n=1000):
"""More of a fantasy concept where a pristine dungeon layout has
exprienced years of degeneration along with decay and collapse. This
leads to growth of fauna, broken tunnels and such. Should start with
a well-formed dungeon and then start decay for n turns"""
def point_oob(i, j):
'''Uses size of dungeon to determine out of bounds error'''
return 0 <= i < len(dungeon[0])-1 and 0 <= j < len(dungeon)-1
def point_oob_ext(i, j, xlim, ylim):
'''Same as function of the same name but uses custom bounds'''
return xlim[0] <= i < xlim[1] and ylim[0] <= j < ylim[1]
def cellauto(i, j):
val = 0
# check the value of the cell
doors = set()
space = set()
for ii in range(-1, 2):
for jj in range(-1, 2):
if (i, j) != (i+ii, j+jj):
if dungeon[j+jj][i+ii] == '%':
val += 1
if dungeon[j+jj][i+ii] == '+':
doors.add((i+ii, j+jj))
if dungeon[j+jj][i+ii] == ' ':
space.add((i+ii, j+jj))
# turn the cell into a floor cell if surrounded by 4 walls or more
if (val >= 4 or val <= 1) and point_oob_ext(i+ii, j+jj, (2, X_TEMP-2), (2, Y_TEMP-2)):
# if choice([i in range(0, 2)]):
decayed[j][i] = '.'
floors.append((i, j))
for di, dj in doors:
decayed[dj][di] = '.'
floors.append((di, dj))
'''
else:
decayed[j][i] = '~'
|
liquid.append((i, j))
''
|
'
# make sure any free spaces bordering changed cell turn into wall tiles
# for ii in range(-1, 2):
# for jj in range(-1, 2):
# within = point_oob_ext(i+ii, j+jj, (0, X_TEMP-1), (0, Y_TEMP-1))
# if within and decayed[j+jj][i+ii] == ' ':
# decayed[j+jj][i+ii] = '%'
# walls.append((i+ii, j+jj))
for si, sj in space:
within = point_oob_ext(si, sj, (0, X_TEMP-1), (0, Y_TEMP-1))
if within:
decayed[sj][si] = '%'
walls.append((si, sj))
return True
return False
def cellpath(p1, p2):
# creates a bsf from two points
frontier = set()
frontier.add((0, p1))
camefrom = { p1:None }
costfrom = { p1:0 }
# print(p1, p2)
found = False
for j in range(len(decayed)):
for i in range(len(decayed[0])):
if dungeon[j][i] == '%':
term.puts(i, j, "[c=#ffffff]{}[/c]".format(decayed[j][i]))
elif dungeon[j][i] == '.':
term.puts(i, j, "[c=#806040]{}[/c]".format(decayed[j][i]))
elif dungeon[j][i] == '~':
term.puts(i, j, "[c=#0080C0]{}[/c]".format(decayed[j][i]))
elif dungeon[j][i] == '=':
term.puts(i, j, "[c=#D02020]{}[/c]".format(decayed[j][i]))
elif dungeon[j][i] == ',':
term.puts(i, j, "[c=#80C080]{}[/c]".format(decayed[j][i]))
else:
term.puts(i, j, decayed[j][i])
while frontier:
current = min(sorted(frontier))
frontier.remove(current)
curnode = current[1]
# print(curnode)
i, j = curnode
if curnode == p2:
camefrom[neighbor] = curnode
found = True
break
for ii in range(-1, 2):
for jj in range(-1, 2):
ni, nj = i+ii, j+jj
neighbor
|
yilei0620/3D_Conditional_Gan
|
lib/ops.py
|
Python
|
mit
| 4,533
| 0.015442
|
import theano
import theano.tensor as T
from theano.sandbox.cuda.basic_ops import (as_cuda_ndarray_variable,
host_from_gpu,
gpu_contiguous, HostFromGpu,
gpu_alloc_empty)
from theano.
|
sandbox.cuda.dnn import GpuDnnConvDesc, GpuDnnConv, GpuDnnConvGradI, dnn_conv, dnn_pool
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from theano.tensor.nnet.abstract_conv import conv3d_grad_wrt_inputs
from theano.tensor.nnet import conv3d
from rng import t_rng
t_rng = RandomStreams()
def
|
l2normalize(x, axis=1, e=1e-8, keepdims=True):
return x/l2norm(x, axis=axis, e=e, keepdims=keepdims)
def l2norm(x, axis=1, e=1e-8, keepdims=True):
return T.sqrt(T.sum(T.sqr(x), axis=axis, keepdims=keepdims) + e)
def cosine(x, y):
d = T.dot(x, y.T)
d /= l2norm(x).dimshuffle(0, 'x')
d /= l2norm(y).dimshuffle('x', 0)
return d
def euclidean(x, y, e=1e-8):
xx = T.sqr(T.sqrt((x*x).sum(axis=1) + e))
yy = T.sqr(T.sqrt((y*y).sum(axis=1) + e))
dist = T.dot(x, y.T)
dist *= -2
dist += xx.dimshuffle(0, 'x')
dist += yy.dimshuffle('x', 0)
dist = T.sqrt(dist)
return dist
def dropout(X, p=0.):
"""
dropout using activation scaling to avoid test time weight rescaling
"""
if p > 0:
retain_prob = 1 - p
X *= t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
X /= retain_prob
return X
def conv_cond_concat(x, y):
"""
concatenate conditioning vector on feature map axis
"""
return T.concatenate([x, y*T.ones((x.shape[0], y.shape[1], x.shape[2], x.shape[3], x.shape[4]))], axis=1)
def batchnorm(X ,g = None, b = None, u=None, s=None, a=1., e=1e-7):
"""
batchnorm with support for not using scale and shift parameters
as well as inference values (u and s) and partial batchnorm (via a)
will detect and use convolutional or fully connected version
"""
if X.ndim == 5:
if u is not None and s is not None:
b_u = u.dimshuffle('x', 0, 'x', 'x', 'x')
b_s = s.dimshuffle('x', 0, 'x', 'x', 'x')
else:
b_u = T.mean(X, axis=[0, 2, 3, 4]).dimshuffle('x', 0, 'x', 'x', 'x')
b_s = T.mean(T.sqr(X - b_u), axis=[0, 2, 3, 4]).dimshuffle('x', 0, 'x', 'x', 'x')
if a != 1:
b_u = (1. - a)*0. + a*b_u
b_s = (1. - a)*1. + a*b_s
X = (X - b_u) / T.sqrt(b_s + e)
if g is not None and b is not None:
X = X*g.dimshuffle('x', 0, 'x', 'x', 'x') + b.dimshuffle('x', 0, 'x', 'x', 'x')
elif X.ndim == 2:
if u is None and s is None:
u = T.mean(X, axis=0)
s = T.mean(T.sqr(X - u), axis=0)
if a != 1:
u = (1. - a)*0. + a*u
s = (1. - a)*1. + a*s
X = (X - u) / T.sqrt(s + e)
if g is not None and b is not None:
X = X*g + b
else:
raise NotImplementedError
return X
def conv(X, w, input_shape = None, filter_shape = None, subsample=(2, 2, 2), border_mode=(1,1,1), conv_mode='conv',output_shape = None):
"""
sets up dummy convolutional forward pass and uses its grad as deconv
currently only tested/working with same padding
input_shape: (batch size, num input feature maps, voxel height, voxel width, voxel depth)
filter_shape: (output channels, input channels, filter height, filter width, filter depth)
"""
if conv_mode == 'conv':
return conv3d(
input = X,
filters = w,
input_shape = input_shape,
filter_shape = filter_shape,
border_mode = border_mode,
subsample = subsample,
filter_flip = True
)
elif conv_mode == 'deconv':
if output_shape == None:
input_shape = (None,None,(input_shape[2]-1)*subsample[0] + filter_shape[2] - 2*border_mode[0]
,(input_shape[3]-1)*subsample[1] + filter_shape[3] - 2*border_mode[0]
,(input_shape[4]-1)*subsample[2] + filter_shape[4] - 2*border_mode[0])
else:
input_shape = output_shape
return conv3d_grad_wrt_inputs(
output_grad = X,
filters = w,
input_shape = input_shape,
filter_shape = filter_shape,
border_mode = border_mode,
subsample = subsample,
)
|
henrykironde/deletedret
|
scripts/vertnet_amphibians.py
|
Python
|
mit
| 10,575
| 0.000567
|
# -*- coding: latin-1 -*-
#retriever
"""Retriever script for direct download of vertnet-amphibians data"""
import os
from builtins import str
from pkg_resources import parse_version
from retriever.lib.models import Table
from retriever.lib.templates import Script
try:
from retriever.lib.defaults import VERSION
except ImportError:
from retriever import VERSION
class main(Script):
def __init__(self, **kwargs):
Script.__init__(self, **kwargs)
self.title = "Vertnet Amphibians"
self.name = "vertnet-amphibians"
self.retriever_minimum_version = '2.0.dev'
self.version = '1.1.2'
self.ref = "http://vertnet.org/
|
resources/datatoolscode.html"
self.urls = {
'amphibians': 'https://de.iplantcollaborative.org/anon-files//iplant/home/shared/commons_repo/curated/Vertnet_Amph
|
ibia_Sep2016/VertNet_Amphibia_Sept2016.zip'
}
self.citation = "Bloom, D., Wieczorek J., Russell, L. (2016). VertNet_Amphibia_Sept. 2016. CyVerse Data Commons. http://datacommons.cyverse.org/browse/iplant/home/shared/commons_repo/curated/VertNet_Amphibia_Sep2016"
self.description = "Compilation of digitized museum records of amphibians including locations, dates of collection, and some trait data."
self.keywords = ['amphibians']
if parse_version(VERSION) <= parse_version("2.0.0"):
self.shortname = self.name
self.name = self.title
self.tags = self.keywords
def download(self, engine=None, debug=False):
Script.download(self, engine, debug)
engine = self.engine
filename = 'vertnet_latest_amphibians.csv'
tablename = 'amphibians'
table = Table(str(tablename), delimiter=',')
table.columns = [
("record_id", ("pk-auto",)),
("beginrecord", ("char",)),
("icode", ("char",)),
("title", ("char",)),
("citation", ("char",)),
("contact", ("char",)),
("email", ("char",)),
("emlrights", ("char",)),
("gbifdatasetid", ("char",)),
("gbifpublisherid", ("char",)),
("doi", ("char",)),
("migrator", ("char",)),
("networks", ("char",)),
("orgcountry", ("char",)),
("orgname", ("char",)),
("orgstateprovince", ("char",)),
("pubdate", ("char",)),
("source_url", ("char",)),
("iptrecordid", ("char",)),
("associatedmedia", ("char",)),
("associatedoccurrences", ("char",)),
("associatedorganisms", ("char",)),
("associatedreferences", ("char",)),
("associatedsequences", ("char",)),
("associatedtaxa", ("char",)),
("bed", ("char",)),
("behavior", ("char",)),
("catalognumber", ("char",)),
("continent", ("char",)),
("coordinateprecision", ("char",)),
("coordinateuncertaintyinmeters", ("char",)),
("country", ("char",)),
("countrycode", ("char",)),
("county", ("char",)),
("dateidentified", ("char",)),
("day", ("char",)),
("decimallatitude", ("char",)),
("decimallongitude", ("char",)),
("disposition", ("char",)),
("earliestageorloweststage", ("char",)),
("earliesteonorlowesteonothem", ("char",)),
("earliestepochorlowestseries", ("char",)),
("earliesteraorlowesterathem", ("char",)),
("earliestperiodorlowestsystem", ("char",)),
("enddayofyear", ("char",)),
("establishmentmeans", ("char",)),
("eventdate", ("char",)),
("eventid", ("char",)),
("eventremarks", ("char",)),
("eventtime", ("char",)),
("fieldnotes", ("char",)),
("fieldnumber", ("char",)),
("footprintspatialfit", ("char",)),
("footprintsrs", ("char",)),
("footprintwkt", ("char",)),
("formation", ("char",)),
("geodeticdatum", ("char",)),
("geologicalcontextid", ("char",)),
("georeferencedby", ("char",)),
("georeferenceddate", ("char",)),
("georeferenceprotocol", ("char",)),
("georeferenceremarks", ("char",)),
("georeferencesources", ("char",)),
("georeferenceverificationstatus", ("char",)),
("group", ("char",)),
("habitat", ("char",)),
("highergeography", ("char",)),
("highergeographyid", ("char",)),
("highestbiostratigraphiczone", ("char",)),
("identificationid", ("char",)),
("identificationqualifier", ("char",)),
("identificationreferences", ("char",)),
("identificationremarks", ("char",)),
("identificationverificationstatus", ("char",)),
("identifiedby", ("char",)),
("individualcount", ("char",)),
("island", ("char",)),
("islandgroup", ("char",)),
("latestageorhigheststage", ("char",)),
("latesteonorhighesteonothem", ("char",)),
("latestepochorhighestseries", ("char",)),
("latesteraorhighesterathem", ("char",)),
("latestperiodorhighestsystem", ("char",)),
("lifestage", ("char",)),
("lithostratigraphicterms", ("char",)),
("locality", ("char",)),
("locationaccordingto", ("char",)),
("locationid", ("char",)),
("locationremarks", ("char",)),
("lowestbiostratigraphiczone", ("char",)),
("materialsampleid", ("char",)),
("maximumdepthinmeters", ("char",)),
("maximumdistanceabovesurfaceinmeters", ("char",)),
("maximumelevationinmeters", ("char",)),
("member", ("char",)),
("minimumdepthinmeters", ("char",)),
("minimumdistanceabovesurfaceinmeters", ("char",)),
("minimumelevationinmeters", ("char",)),
("month", ("char",)),
("municipality", ("char",)),
("occurrenceid", ("char",)),
("occurrenceremarks", ("char",)),
("occurrencestatus", ("char",)),
("organismid", ("char",)),
("organismname", ("char",)),
("organismremarks", ("char",)),
("organismscope", ("char",)),
("othercatalognumbers", ("char",)),
("pointradiusspatialfit", ("char",)),
("preparations", ("char",)),
("previousidentifications", ("char",)),
("recordedby", ("char",)),
("recordnumber", ("char",)),
("reproductivecondition", ("char",)),
("samplingeffort", ("char",)),
("samplingprotocol", ("char",)),
("sex", ("char",)),
("startdayofyear", ("char",)),
("stateprovince", ("char",)),
("typestatus", ("char",)),
("verbatimcoordinates", ("char",)),
("verbatimcoordinatesystem", ("char",)),
("verbatimdepth", ("char",)),
("verbatimelevation", ("char",)),
("verbatimeventdate", ("char",)),
("verbatimlatitude", ("char",)),
("verbatimlocality", ("char",)),
("verbatimlongitude", ("char",)),
("verbatimsrs", ("char",)),
("waterbody", ("char",)),
("year", ("char",)),
("dctype", ("char",)),
("modified", ("char",)),
("language", ("char",)),
("license", ("char",)),
("rightsholder", ("char",)),
("accessrights", ("char",)),
("bibliographiccitation", ("char",)),
("dc_references", ("char",)),
("institutionid", ("char",)),
("collectionid", ("char",)),
("datasetid", ("char",)),
("institutioncode", ("char",)),
("collectioncode", ("char",)),
("datasetname", ("char",)),
("ownerinstitutioncode", ("char",)),
|
TakayukiSakai/tensorflow
|
tensorflow/contrib/learn/python/learn/estimators/tensor_signature_test.py
|
Python
|
apache-2.0
| 5,029
| 0.000994
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for learn.estimators.tensor_signature."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.estimators import tensor_signature
class TensorSignatureTest(tf.test.TestCase):
def testTensorSignatureCompatible(self):
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
placeholder_b = tf.placeholder(name='another',
shape=[256, 100],
dtype=tf.int32)
placeholder_c = tf.placeholder(name='mismatch',
shape=[256, 100],
dtype=tf.float32)
placeholder_d = tf.placeholder(name='mismatch',
shape=[128, 100],
dtype=tf.int32)
signatures = tensor_signature.create_signatures(placeholder_a)
self.assertTrue(tensor_signature.tensors_compatible(placeholder_a,
signatures))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_b,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_c,
signatures))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_d,
|
signatures))
inputs = {'a': placeholder_a}
signatures = tensor_signature.create_signatures(inputs)
|
self.assertTrue(tensor_signature.tensors_compatible(inputs, signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_a,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_b,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(
{'b': placeholder_b}, signatures))
self.assertTrue(tensor_signature.tensors_compatible(
{'a': placeholder_b,
'c': placeholder_c}, signatures))
self.assertFalse(tensor_signature.tensors_compatible(
{'a': placeholder_c}, signatures))
def testSparseTensorCompatible(self):
t = tf.SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], shape=[3, 4])
signatures = tensor_signature.create_signatures(t)
self.assertTrue(tensor_signature.tensors_compatible(t, signatures))
def testTensorSignaturePlaceholders(self):
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
signatures = tensor_signature.create_signatures(placeholder_a)
placeholder_out = tensor_signature.create_placeholders_from_signatures(
signatures)
self.assertEqual(placeholder_out.dtype, placeholder_a.dtype)
self.assertTrue(placeholder_out.get_shape().is_compatible_with(
placeholder_a.get_shape()))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_out,
signatures))
inputs = {'a': placeholder_a}
signatures = tensor_signature.create_signatures(inputs)
placeholders_out = tensor_signature.create_placeholders_from_signatures(
signatures)
self.assertEqual(placeholders_out['a'].dtype, placeholder_a.dtype)
self.assertTrue(
placeholders_out['a'].get_shape().is_compatible_with(
placeholder_a.get_shape()))
self.assertTrue(tensor_signature.tensors_compatible(placeholders_out,
signatures))
def testSparseTensorSignaturePlaceholders(self):
tensor = tf.SparseTensor(values=[1.0, 2.0], indices=[[0, 2], [0, 3]],
shape=[5, 5])
signature = tensor_signature.create_signatures(tensor)
placeholder = tensor_signature.create_placeholders_from_signatures(
signature)
self.assertTrue(isinstance(placeholder, tf.SparseTensor))
self.assertEqual(placeholder.values.dtype, tensor.values.dtype)
if __name__ == '__main__':
tf.test.main()
|
Juniper/nova
|
nova/tests/unit/virt/xenapi/test_vm_utils.py
|
Python
|
apache-2.0
| 98,282
| 0.000824
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
from eventlet import greenthread
import mock
from mox3 import mox
import os_xenapi
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import fixture as config_fixture
from oslo_utils import fixture as utils_fixture
from oslo_utils import timeutils
from oslo_utils import units
from oslo_utils import uuidutils
import six
from nova.compute import flavors
from nova.compute import power_state
import nova.conf
from nova import context
from nova import exception
from nova import objects
from nova.objects import fields as obj_fields
from nova import test
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_flavor
from nova.tests.unit.virt.xenapi import stubs
from nova.tests import uuidsentinel as uuids
from nova import utils
from nova.virt import hardware
from nova.virt.xenapi import driver as xenapi_conn
from nova.virt.xenapi import fake
from nova.virt.xenapi import vm_utils
CONF = nova.conf.CONF
XENSM_TYPE = 'xensm'
ISCSI_TYPE = 'iscsi'
def get_fake_connection_data(sr_type):
fakes = {XENSM_TYPE: {'sr_uuid': 'falseSR',
'name_label': 'fake_storage',
'name_description': 'test purposes',
'server': 'myserver',
'serverpath': '/local/scratch/myname',
'sr_type': 'nfs',
'introduce_sr_keys': ['server',
'serverpath',
'sr_type'],
'vdi_uuid': 'falseVDI'},
ISCSI_TYPE: {'volume_id': 'fake_volume_id',
'target_lun': 1,
'target_iqn': 'fake_iqn:volume-fake_volume_id',
'target_portal': u'localhost:3260',
'target_discovered': False}, }
return fakes[sr_type]
@contextlib.contextmanager
def contextified(result):
yield result
def _fake_noop(*args, **kwargs):
return
class VMUtilsTestBase(stubs.XenAPITestBaseNoDB):
pass
class LookupTestCase(VMUtilsTestBase):
def setUp(self):
super(LookupTestCase, self).setUp()
self.session = self.mox.CreateMockAnything('Fake Session')
self.name_label = 'my_vm'
def _do_mock(self, result):
self.session.call_xenapi(
"VM.get_by_name_label", self.name_label).AndReturn(result)
self.mox.ReplayAll()
def test_normal(self):
self._do_mock(['x'])
result = vm_utils.lookup(self.session, self.name_label)
self.assertEqual('x', result)
def test_no_result(self):
self._do_mock([])
result = vm_utils.lookup(self.session, self.name_label)
self.assertIsNone(result)
def test_too_many(self):
self._do_mock(['a', 'b'])
self.assertRaises(exception.InstanceExists,
vm_utils.lookup,
self.session, self.name_label)
def test_rescue_none(self):
self.session.call_xenapi(
"VM.get_by_name_label", self.name_label + '-rescue').AndReturn([])
self._do_mock(['x'])
result = vm_utils.lookup(self.session, self.name_label,
check_rescue=True)
self.assertEqual('x', result)
def test_rescue_found(self):
self.session.call_xenapi(
"VM.get_by_name_label",
self.name_label + '-rescue').AndReturn(['y'])
self.mox.ReplayAll()
result = vm_utils.lookup(self.session, self.name_label,
check_rescue=True)
self.assertEqual('y', result)
def test_rescue_too_many(self):
self.session.call_xenapi(
"VM.get_by_name_label",
self.name_label + '-rescue').AndReturn(['a', 'b', 'c'])
self.mox.ReplayAll()
self.assertRaises(exception.InstanceExists,
|
vm_utils.lookup,
|
self.session, self.name_label,
check_rescue=True)
class GenerateConfigDriveTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, 'safe_find_sr')
@mock.patch.object(vm_utils, "create_vdi", return_value='vdi_ref')
@mock.patch.object(vm_utils.instance_metadata, "InstanceMetadata")
@mock.patch.object(vm_utils.configdrive, 'ConfigDriveBuilder')
@mock.patch.object(vm_utils.utils, 'execute')
@mock.patch.object(vm_utils.volume_utils, 'stream_to_vdi')
@mock.patch.object(vm_utils.os.path, 'getsize', return_value=100)
@mock.patch.object(vm_utils, 'create_vbd', return_value='vbd_ref')
@mock.patch.object(vm_utils.utils, 'tempdir')
def test_no_admin_pass(self, mock_tmpdir, mock_create_vbd, mock_size,
mock_stream, mock_execute, mock_builder,
mock_instance_metadata, mock_create_vdi,
mock_find_sr):
mock_tmpdir.return_value.__enter__.return_value = '/mock'
with mock.patch.object(six.moves.builtins, 'open') as mock_open:
mock_open.return_value.__enter__.return_value = 'open_fd'
vm_utils.generate_configdrive('session', 'context', 'instance',
'vm_ref', 'userdevice',
'network_info')
mock_size.assert_called_with('/mock/configdrive.vhd')
mock_open.assert_called_with('/mock/configdrive.vhd')
mock_execute.assert_called_with('qemu-img', 'convert', '-Ovpc',
'/mock/configdrive',
'/mock/configdrive.vhd')
mock_instance_metadata.assert_called_with(
'instance', content=None, extra_md={},
network_info='network_info', request_context='context')
mock_stream.assert_called_with('session', 'instance', 'vhd',
'open_fd', 100, 'vdi_ref')
@mock.patch.object(vm_utils, "destroy_vdi")
@mock.patch.object(vm_utils, 'safe_find_sr')
@mock.patch.object(vm_utils, "create_vdi", return_value='vdi_ref')
@mock.patch.object(vm_utils.instance_metadata, "InstanceMetadata",
side_effect=test.TestingException)
def test_vdi_cleaned_up(self, mock_instance_metadata, mock_create,
mock_find_sr, mock_destroy):
self.assertRaises(test.TestingException, vm_utils.generate_configdrive,
'session', None, None, None, None, None)
mock_destroy.assert_called_once_with('session', 'vdi_ref')
class XenAPIGetUUID(VMUtilsTestBase):
def test_get_this_vm_uuid_new_kernel(self):
self.mox.StubOutWithMock(vm_utils, '_get_sys_hypervisor_uuid')
vm_utils._get_sys_hypervisor_uuid().AndReturn(
'2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f')
self.mox.ReplayAll()
self.assertEqual('2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f',
vm_utils.get_this_vm_uuid(None))
self.mox.VerifyAll()
def test_get_this_vm_uuid_old_kernel_reboot(self):
self.mox.StubOutWithMock(vm_utils, '_get_sys_hypervisor_uuid')
self.mox.StubOutWithMock(utils, 'execute')
vm_utils._get_sys_hypervisor_uuid().AndRaise(
IOError(13, 'Permission denied'))
utils.execute('xenstore-read', 'domid', run_as_root=True).AndRetur
|
aaronn/django-rest-framework-passwordless
|
tests/test_verification.py
|
Python
|
mit
| 8,084
| 0.002474
|
from rest_framework import status
from rest_framework.authtoken.models import Token
from django.utils.translation import ugettext_lazy as _
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from drfpasswordless.settings import api_settings, DEFAULTS
from drfpasswordless.utils import CallbackToken
User = get_user_model()
class AliasEmailVerificationTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_AUTH_TYPES = ['EMAIL']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = 'noreply@example.com'
api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED = True
self.url = reverse('drfpasswordless:auth_email')
self.callback_url = reverse('drfpasswordless:auth_token')
self.verify_url = reverse('drfpasswordless:verify_email')
self.callback_verify = reverse('drfpasswordless:verify_token')
self.email_field_name = api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME
self.email_verified_field_name = api_settings.PASSWORDLESS_USER_EMAIL_VERIFIED_FIELD_NAME
def test_email_unverified_to_verified_and_back(self):
email = 'aaron@example.com'
email2 = 'aaron2@example.com'
data = {'email': email}
# create a new user
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.email_field_name: email})
self.assertNotEqual(user, None)
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Verify a token exists for the user, sign in and check verified again
callback = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_AUTH, is_active=True).first()
callback_data = {'email': email, 'token': callback}
callback_response = self.client.post(self.callback_url, callback_data)
self.assertEqual(callback_response.status_code, status.HTTP_200_OK)
# Verify we got the token, then check and see that email_verified is now verified
token = callback_response.data['token']
self.assertEqual(token, Token.objects.get(user=user).key)
# Refresh and see that the endpoint is now verified as True
user.refresh_from_db()
self.assertEqual(getattr(user, self.email_verified_field_name), True)
# Change email, should result in flag changing to false
setattr(user, self.email_field_name, email2)
user.save()
user.refresh_from_db()
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Verify
self.client.force_authenticate(user)
verify_response = self.client.post(self.verify_url)
self.assertEqual(verify_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.email_field_name: email2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.email_field_name), None)
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Post callback token back.
verify_token = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_VERIFY, is_active=True).first()
self.assertNotEqual(verify_token, None)
verify_callback_response = self.client.post(self.callback_verify, {'email': email2, 'token': verify_token.key})
self.assertEqual(verify_callback_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.email_field_name: email2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.email_field_name), None)
self.assertEqual(getattr(user, self.email_verified_field_name), True)
def tearDown(self):
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_EMAIL_NOREPLY_ADDRESS']
api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED = DEFAULTS['PASSWORDLESS_USER_MARK_MOBILE_VERIFIED']
class AliasMobileVerificationTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = True
api_settings.PASSWORDLESS_AUTH_TYPES = ['MOBILE']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER = '+15550000000'
api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED = True
self.url = reverse('drfpasswordless:auth_mobile')
self.callback_url = reverse('drfpasswordless:auth_token')
self.verify_url = reverse('drfpasswordless:verify_mobile')
self.callback_verify = reverse('drfpasswordless:verify_token')
self.mobile_field_name = api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME
self.mobile_verified_field_name = api_settings.PASSWORDLESS_USER_MOBILE_VERIFIED_FIELD_NAME
def test_mobile_unverified_to_verified_and_back(self):
mobile = '+15551234567'
mobile2 = '+15557654321'
data = {'mobile': mobile}
# create a new user
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.mobile_field_name: mobile})
self.assertNotEqual(user, None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Verify a token exists for the user, sign in and check verified again
callback = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_AUTH, is_active=True).first()
callback_data = {'mobile': mo
|
bile, 'token': callback}
callback_response = self.client.post(self.callback_url, callback_data)
self.assertEqual(callback_response.status_code, status.HTTP_200_OK)
# Verify we got the t
|
oken, then check and see that email_verified is now verified
token = callback_response.data['token']
self.assertEqual(token, Token.objects.get(user=user).key)
# Refresh and see that the endpoint is now verified as True
user.refresh_from_db()
self.assertEqual(getattr(user, self.mobile_verified_field_name), True)
# Change mobile, should result in flag changing to false
setattr(user, self.mobile_field_name, '+15557654321')
user.save()
user.refresh_from_db()
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Verify
self.client.force_authenticate(user)
verify_response = self.client.post(self.verify_url)
self.assertEqual(verify_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.mobile_field_name: mobile2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.mobile_field_name), None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Post callback token back.
verify_token = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_VERIFY, is_active=True).first()
self.assertNotEqual(verify_token, None)
verify_callback_response = self.client.post(self.callback_verify, {'mobile': mobile2, 'token': verify_token.key})
self.assertEqual(verify_callback_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.mobile_field_name: mobile2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.mobile_field_name), None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), True)
def tearDown(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = DEFAULTS['PASSWORDLESS_TEST_SUPPRESSION']
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_MOBILE_NOREPLY_NUMBER']
api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED = DEFAULTS['PASSWORDLESS_USER_MARK_MOBILE_VERIFIED']
|
bonzanini/luigi-slack
|
tests/test_api.py
|
Python
|
mit
| 5,897
| 0.001696
|
import sys
import unittest
from unittest import mock
import luigi
import luigi_slack.api
from luigi_slack.api import SlackBot
from luigi_slack.api import notify
from luigi_slack.events import *
class TestSlackBot(unittest.TestCase):
def setUp(self):
self.patcher = mock.patch('luigi_slack.api.SlackAPI')
self.mock_SlackAPI = self.patcher.start()
self.token = 'dummy-token'
self.channels = ['channel1', 'channel2']
self.bot = SlackBot(self.token, channels=self.channels)
def tearDown(self):
self.patcher.stop()
def test_send_notification(self):
"""Test SlackAPI is called by send_notification()"""
self.bot.send_notification()
def test_events_not_list(self):
"""Raise a ValueError if events is not a list"""
with self.assertRaises(ValueError):
bot = SlackBot(self.token, events='FOOBAR')
bot = SlackBot(self.token, events=123)
bot = SlackBot(self.token, events=None)
bot = SlackBot(self.token, events=True)
def test_set_handlers_valid(self):
"""Test set_handlers() for valid events"""
bot = SlackBot(self.token, events=[SUCCESS, FAILURE])
bot.set_handlers()
def test_set_handlers_invalid(self):
"""Test set_handlers for invalid events"""
bot = SlackBot(self.token, events=['THIS-IS-NOT-A-VALID-EVENT'])
with self.asse
|
rtRaises(ValueError):
bot.set_handlers()
class TestEvents(unittest.TestCase):
def test_event_label(self):
"""Test event labels for output"""
fixtures = {
'SUCCESS': 'Success',
'FAILURE': 'Failure',
'MISSING': 'Missing',
}
for event, expected in fixtures.items():
self.assertEqual(event_label(event), expected)
clas
|
s TestHandlers(unittest.TestCase):
def setUp(self):
self.patcher = mock.patch('luigi_slack.api.SlackAPI')
self.mock_SlackAPI = self.patcher.start()
self.token = 'dummy-token'
self.channels = ['channel1']
def tearDown(self):
self.patcher.stop()
def test_success(self):
"""Test successful task if queued"""
bot = SlackBot(self.token, events=[SUCCESS], channels=self.channels)
bot.set_handlers()
task = luigi.Task()
self.assertEqual(len(bot.event_queue.get(SUCCESS, [])), 0)
task.trigger_event(luigi.event.Event.SUCCESS, task)
self.assertEqual(len(bot.event_queue.get(SUCCESS)), 1)
def test_success_empties_queue(self):
"""Test success event empties the failure queue"""
bot = SlackBot(self.token, events=[SUCCESS, FAILURE], channels=self.channels)
bot.set_handlers()
task1 = luigi.Task() # task1 and task2 have the same task_id
task2 = luigi.Task()
self.assertEqual(len(bot.event_queue.get(FAILURE, [])), 0)
task2.trigger_event(luigi.event.Event.FAILURE, task2, Exception())
self.assertEqual(len(bot.event_queue.get(FAILURE)), 1)
task1.trigger_event(luigi.event.Event.SUCCESS, task1)
self.assertEqual(len(bot.event_queue.get(FAILURE)), 0)
def test_different_task_doesnt_empty_queue(self):
"""Test a successful task doesn't empty queue with different task"""
class CustomTask(luigi.Task):
pass
bot = SlackBot(self.token, events=[SUCCESS, FAILURE], channels=self.channels)
bot.set_handlers()
task1 = luigi.Task() # task1 and task2 have different task_id
task2 = CustomTask()
self.assertEqual(len(bot.event_queue.get(FAILURE, [])), 0)
task2.trigger_event(luigi.event.Event.FAILURE, task2, Exception())
self.assertEqual(len(bot.event_queue.get(FAILURE)), 1)
task1.trigger_event(luigi.event.Event.SUCCESS, task1)
self.assertEqual(len(bot.event_queue.get(FAILURE)), 1)
def test_start(self):
"""Test start event adds task in queue"""
bot = SlackBot(self.token, events=[START], channels=self.channels)
bot.set_handlers()
task = luigi.Task()
self.assertEqual(len(bot.event_queue.get(START, [])), 0)
task.trigger_event(luigi.event.Event.START, task)
self.assertEqual(len(bot.event_queue.get(START)), 1)
def test_failure(self):
"""Test failure event adds task in queue"""
bot = SlackBot(self.token, events=[FAILURE], channels=self.channels)
bot.set_handlers()
task = luigi.Task()
self.assertEqual(len(bot.event_queue.get(FAILURE, [])), 0)
task.trigger_event(luigi.event.Event.FAILURE, task, Exception())
self.assertEqual(len(bot.event_queue.get(FAILURE)), 1)
def test_missing(self):
"""Test missing dependency event adds task in queue"""
bot = SlackBot(self.token, events=[MISSING], channels=self.channels)
bot.set_handlers()
task = luigi.Task()
self.assertEqual(len(bot.event_queue.get(MISSING, [])), 0)
task.trigger_event(luigi.event.Event.DEPENDENCY_MISSING, task)
self.assertEqual(len(bot.event_queue.get(MISSING)), 1)
def test_event_not_implemented(self):
"""Test processing time event is not implemented yet"""
bot = SlackBot(self.token, events=[PROCESSING_TIME], channels=self.channels)
bot.set_handlers()
task = luigi.Task()
self.assertRaises(NotImplementedError, task.trigger_event(luigi.event.Event.PROCESSING_TIME, task))
class MockSlackBot(object):
def set_handlers(self):
return True
def send_notification(self):
return True
class TestNotify(unittest.TestCase):
def test_notify(self):
"""Test notify() performs pre/post operations"""
slacker = MockSlackBot()
some_test = False
with notify(slacker):
some_test = True
self.assertTrue(some_test)
|
Relrin/aiorest-ws
|
examples/auth_token/settings.py
|
Python
|
bsd-3-clause
| 187
| 0
|
# -*- coding: utf-8 -*-
# store User and Token tables in the memory
DATABASES = {
'default': {
|
'backend': 'aiorest_ws.db.backends.sqlite3',
'nam
|
e': ':memory:'
}
}
|
Cadasta/django-jsonattrs
|
tests/test_mixins.py
|
Python
|
agpl-3.0
| 6,431
| 0
|
from django.test import TestCase
from django.views.generic import TemplateView
from django.contrib.contenttypes.models import ContentType
from jsonattrs import models, mixins
from . import factories
class XLangLabelsTest(TestCase):
def test_dict(self):
res = mixins.template_xlang_labels({'en': 'Field 1', 'de': 'Feld 1'})
assert 'data-label-en="Field 1"' in res
assert 'data-label-de="Feld 1"' in res
def test_string(self):
assert mixins.template_xlang_labels('Field 1') == ''
def test_none(self):
assert mixins.template_xlang_labels(None) == ''
class JsonAttrsView(mixins.JsonAttrsMixin, TemplateView):
attributes_field = 'attrs'
class JsonAttrsMixinTest(TestCase):
def test_get_context(self):
models.create_attribute_types()
org = factories.OrganizationFactory.create()
project = factories.ProjectFactory.create(organization=org)
content_type = ContentType.objects.get(
app_label='tests', model='party')
schema1 = models.Schema.objects.create(
content_type=content_type,
selectors=(org.id, project.id))
models.Attribute.objects.create(
schema=schema1,
name='field_1',
long_name='Field 1',
attr_type=models.AttributeType.objects.get(name='text'),
index=0
)
models.Attribute.objects.create(
schema=schema1,
name='field_2',
long_name='Field 2',
attr_type=models.AttributeType.objects.get(name='text'),
index=1
)
models.Attribute.objects.create(
schema=schema1,
name='field_3',
long_name='Field 3',
attr_type=models.AttributeType.objects.get(name='select_multiple'),
choices=['one', 'two', 'three'],
choice_labels=['Choice 1', 'Choice 2', 'Choice 3'],
index=2,
)
models.Attribute.objects.create(
schema=schema1,
name='field_4',
long_name='Field 4',
attr_type=models.AttributeType.objects.get(name='select_one'),
choices=['one', 'two', 'three'],
choice_labels=['Choice 1', 'Choice 2', 'Choice 3'],
index=3,
)
party = factories.PartyFactory.create(
project=project,
attrs={'field_1': 'Some value',
'field_3': ['one'
|
, 'three'],
'field_4': 'two'}
)
view = JsonAttrsView()
view.object = party
context = view.get_context_data()
assert len(context['attrs']) == 4
assert context['attrs'][0] == ('Field 1', 'Some value', '', '')
assert context['attrs'][1] == ('Field 2', '—', '', '')
assert context['attrs'][2] == ('Fi
|
eld 3', 'Choice 1, Choice 3', '', '')
assert context['attrs'][3] == ('Field 4', 'Choice 2', '', '')
def test_get_context_xlang(self):
models.create_attribute_types()
org = factories.OrganizationFactory.create()
project = factories.ProjectFactory.create(organization=org)
content_type = ContentType.objects.get(
app_label='tests', model='party')
schema1 = models.Schema.objects.create(
content_type=content_type,
selectors=(org.id, project.id),
default_language='en')
models.Attribute.objects.create(
schema=schema1,
name='field_1',
long_name={'en': 'Field 1', 'de': 'Feld 1'},
attr_type=models.AttributeType.objects.get(name='text'),
index=0
)
models.Attribute.objects.create(
schema=schema1,
name='field_2',
long_name={'en': 'Field 2', 'de': 'Feld 2'},
attr_type=models.AttributeType.objects.get(name='text'),
index=1
)
models.Attribute.objects.create(
schema=schema1,
name='field_3',
long_name={'en': 'Field 3', 'de': 'Feld 3'},
attr_type=models.AttributeType.objects.get(name='select_multiple'),
choices=['one', 'two', 'three'],
choice_labels=[{'en': 'Choice 1', 'de': 'Wahl 1'},
{'en': 'Choice 2', 'de': 'Wahl 2'},
{'en': 'Choice 3', 'de': 'Wahl 3'}],
index=2,
)
models.Attribute.objects.create(
schema=schema1,
name='field_4',
long_name={'en': 'Field 4', 'de': 'Feld 4'},
attr_type=models.AttributeType.objects.get(name='select_one'),
choices=['one', 'two', 'three'],
choice_labels=[{'en': 'Choice 1', 'de': 'Wahl 1'},
{'en': 'Choice 2', 'de': 'Wahl 2'},
{'en': 'Choice 3', 'de': 'Wahl 3'}],
index=3,
)
party = factories.PartyFactory.create(
project=project,
attrs={'field_1': 'Some value',
'field_3': ['one', 'three'],
'field_4': 'two'}
)
view = JsonAttrsView()
view.object = party
context = view.get_context_data()
assert len(context['attrs']) == 4
field_1 = context['attrs'][0]
assert field_1[0] == 'Field 1'
assert field_1[1] == 'Some value'
assert 'data-label-en="Field 1"' in field_1[2]
assert 'data-label-de="Feld 1"' in field_1[2]
field_2 = context['attrs'][1]
assert field_2[0] == 'Field 2'
assert field_2[1] == '—'
assert 'data-label-en="Field 2"' in field_2[2]
assert 'data-label-de="Feld 2"' in field_2[2]
field_3 = context['attrs'][2]
assert field_3[0] == 'Field 3'
assert field_3[1] == 'Choice 1, Choice 3'
assert 'data-label-en="Field 3"' in field_3[2]
assert 'data-label-de="Feld 3"' in field_3[2]
assert 'data-label-en="Choice 1, Choice 3"' in field_3[3]
assert 'data-label-de="Wahl 1, Wahl 3"' in field_3[3]
field_4 = context['attrs'][3]
assert field_4[0] == 'Field 4'
assert field_4[1] == 'Choice 2'
assert 'data-label-en="Field 4"' in field_4[2]
assert 'data-label-de="Feld 4"' in field_4[2]
assert 'data-label-en="Choice 2"' in field_4[3]
assert 'data-label-de="Wahl 2"' in field_4[3]
|
dakiri/splunk-app-twitter
|
twitter2/bin/twython/streaming/types.py
|
Python
|
apache-2.0
| 2,822
| 0.002126
|
# -*- coding: utf-8 -*-
"""
twython.streaming.types
~~~~~~~~~~~~~~~~~~~~~~~
This module contains classes and methods for :class:`TwythonStreamer` to use.
"""
class TwythonStreamerTypes(object):
"""Class for different stream endpoints
Not all streaming endpoints have nested endpoints.
User Streams and Site Streams are single streams with no nested endpoints
Status Streams include filter, sample and firehose endpoints
"""
def __init__(self, streamer):
self.streamer = streamer
self.statuses = TwythonStreamerTypesStatuses(streamer)
def user(self, **params):
"""Stream user
Accepted params found at:
https://dev.twitter.com/docs/api/1.1/get/user
"""
url = 'https://userstream.twitter.com/%s/user.json' \
% self.streamer.api_version
self.streamer._request(url, params=params)
def site(self, **params):
"""Stream site
Accepted params found at:
https://dev.twitter.com/docs/api/1.1/get/site
"""
url = 'https://sitestream.twitter.com/%s/site.json' \
% self.streamer.api_version
self.streamer._request(url, params=params)
class TwythonStreamerTypesStatuses(object):
"""Class for different statuses endpoints
Available so TwythonStreamer.statuses.filter() is available.
Just a bit cleaner than TwythonStreamer.statuses_filter(),
statuses_sample(), etc. all being single methods in TwythonStreamer
"""
def __init__(self, streamer):
self.streamer = streamer
def filter(self, **p
|
arams):
"""Stream statuses/filter
:param \*\*params: Paramters to send with your stream request
Accepted params found at:
https://dev.twitter.com/docs/api/1.1/post/
|
statuses/filter
"""
url = 'https://stream.twitter.com/%s/statuses/filter.json' \
% self.streamer.api_version
self.streamer._request(url, 'POST', params=params)
def sample(self, **params):
"""Stream statuses/sample
:param \*\*params: Paramters to send with your stream request
Accepted params found at:
https://dev.twitter.com/docs/api/1.1/get/statuses/sample
"""
url = 'https://stream.twitter.com/%s/statuses/sample.json' \
% self.streamer.api_version
self.streamer._request(url, params=params)
def firehose(self, **params):
"""Stream statuses/firehose
:param \*\*params: Paramters to send with your stream request
Accepted params found at:
https://dev.twitter.com/docs/api/1.1/get/statuses/firehose
"""
url = 'https://stream.twitter.com/%s/statuses/firehose.json' \
% self.streamer.api_version
self.streamer._request(url, params=params)
|
chrisortman/CIS-121
|
k0458928/assignment04.py
|
Python
|
mit
| 914
| 0.014223
|
#This line asks the user to input an integer that will be recorded as my age
my_age = input("Enter your age:")
#T
|
his line asks the user to input an integer that will be recorded as days_in_a_year
days_in_a_year = input("How many days are in a ye
|
ar?")
#This line states how many hours are in a day
hours_in_a_day = 24
#This line tells you how many seconds you've been alive based on the recorded integers
print "how many seconds have i been alive?", my_age * days_in_a_year * hours_in_a_day * 60 * 60
#This line says that there are 8 black cars
black_cars = 8
#This line says that there are 6 red cars
red_cars = 6
#This line states the total amount of black and red cars combined
print "What is the total number of black and red cars?", (black_cars + red_cars)
#This line tells you how many more black cars there are than red cars
print "How many more black cars are there than red cars?", (black_cars - red_cars)
|
arunkgupta/gramps
|
gramps/gen/datehandler/_date_es.py
|
Python
|
gpl-2.0
| 6,538
| 0.017781
|
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2004-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Spanish-specific classes for parsing and displaying dates.
"""
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import re
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from ..lib.date import Date
from _dateparser import DateParser
from _datedisplay import DateDisplay
from _datehandler import register_datehandler
#-------------------------------------------------------------------------
#
# Spanish parser
#
#-------------------------------------------------------------------------
class DateParserES(DateParser):
modifier_to_int = {
u'antes de' : Date.MOD_BEFORE,
u'antes' : Date.MOD_BEFORE,
u'ant.' : Date.MOD_BEFORE,
u'ant' : Date.MOD_BEFORE,
u'después de' : Date.MOD_AFTER,
u'después' : Date.MOD_AFTER,
u'desp.' : Date.MOD_AFTER,
u'desp' : Date.MOD_AFTER,
u'aprox.' : Date.MOD_ABOUT,
u'aprox' : Date.MOD_ABOUT,
u'apr.' : Date.MOD_ABOUT,
u'apr' : Date.MOD_ABOUT,
u'circa' : Date.MOD_ABOUT,
u'ca.' : Date.MOD_ABOUT,
u'ca' : Date.MOD_ABOUT,
u'c.' : Date.MOD_ABOUT,
u'hacia' : Date.MOD_ABOUT,
}
calendar_to_int = {
u'gregoriano' : Date.CAL_GREGORIAN,
u'g' : Date.CAL_GREGORIAN,
u'juliano' : Date.CAL_JULIAN,
u'j' : Date.CAL_JULIAN,
u'hebreo' : Date.CAL_HEBREW,
u'h' : Date.CAL_HEBREW,
u'islámico' : Date.CAL_ISLAMIC,
u'i' : Date.CAL_ISLAMIC,
u'revolucionario' : Date.CAL_FRENCH,
u'r' : Date.CAL_FRENCH,
u'persa' : Date.CAL_PERSIAN,
u'p' : Date.CAL_PERSIAN,
u'swedish' : Date.CAL_SWEDISH,
u's' : Date.CAL_SWEDISH,
}
quality_to_int = {
u'estimado' : Date.QUAL_ESTIMATED,
u'est.' : Date.QUAL_ESTIMATED,
u'est' : Date.QUAL_ESTIMATED,
u'calc.' : Date.QUAL_CALCULATED,
u'calc' : Date.QUAL_CALCULATED,
u'calculado' : Date.QUAL_CALCULATED,
}
def init_strings(self):
DateParser.init_strings(self)
_span_1 = [u'de']
_span_2 = [u'a']
_range_1 = [u'entre', u'ent\.', u'ent']
_range
|
_2 = [u'y']
self._span = re.compile("(%s)\s+(?P<start>.+)\s+(%s)\s+(?P<stop>.+)" %
('|'.join(_span_1), '|'.join(_span_2)),
re.IGNORECASE)
self._range = re.compile("(%s)\s+(?P<start>.+
|
)\s+(%s)\s+(?P<stop>.+)" %
('|'.join(_range_1), '|'.join(_range_2)),
re.IGNORECASE)
#-------------------------------------------------------------------------
#
# Spanish display
#
#-------------------------------------------------------------------------
class DateDisplayES(DateDisplay):
"""
Spanish language date display class.
"""
# TODO: Translate these month strings:
long_months = ( u"", u"enero", u"febrero", u"marzo", u"abril", u"mayo",
u"junio", u"julio", u"agosto", u"septiembre", u"octubre",
u"noviembre", u"diciembre" )
short_months = ( u"", u"enero", u"feb.", u"marzo", u"abr.", u"mayo",
u"jun.", u"jul.", u"agosto", u"set.", u"oct.", u"nov.",
u"dic" )
calendar = (
"", u"Juliano", u"Hebreo",
u"Revolucionario", u"Persa", u"Islámico",
u"Swedish"
)
_mod_str = ("", u"antes de ", u"después de ", u"hacia ", "", "", "")
_qual_str = ("", "estimado ", "calculado ")
formats = (
"AAAA-MM-DD (ISO)", "Numérica", "Mes Día, Año",
"MES Día, Año", "Día Mes, Año", "Día MES, Año"
)
def display(self, date):
"""
Return a text string representing the date.
"""
mod = date.get_modifier()
cal = date.get_calendar()
qual = date.get_quality()
start = date.get_start_date()
newyear = date.get_new_year()
qual_str = self._qual_str[qual]
if mod == Date.MOD_TEXTONLY:
return date.get_text()
elif start == Date.EMPTY:
return ""
elif mod == Date.MOD_SPAN:
d1 = self.display_cal[cal](start)
d2 = self.display_cal[cal](date.get_stop_date())
scal = self.format_extras(cal, newyear)
return "%s%s %s %s %s%s" % (qual_str, u'de', d1, u'a', d2, scal)
elif mod == Date.MOD_RANGE:
d1 = self.display_cal[cal](start)
d2 = self.display_cal[cal](date.get_stop_date())
scal = self.format_extras(cal, newyear)
return "%s%s %s %s %s%s" % (qual_str, u'entre', d1, u'y', d2, scal)
else:
text = self.display_cal[date.get_calendar()](start)
scal = self.format_extras(cal, newyear)
return "%s%s%s%s" % (qual_str, self._mod_str[mod], text, scal)
#-------------------------------------------------------------------------
#
# Register classes
#
#-------------------------------------------------------------------------
register_datehandler(('es_ES', 'es', 'spanish', 'Spanish'), DateParserES, DateDisplayES)
|
jtwp470/my-programming-learning-book
|
nlp100/python/14.py
|
Python
|
unlicense
| 278
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 14. 先頭からN行を出力
import sys
if len(sys.argv) != 3:
print("Usage: python
|
{} filename [num]".format(sys.argv[0]))
sys.exit()
with open(sys.argv[
|
1]) as f:
print("".join(f.readlines()[:int(sys.argv[2])]))
|
tbjoern/adventofcode
|
Twentyfour/script.py
|
Python
|
mit
| 2,404
| 0.037022
|
import Queue
import copy
laby = []
spots = []
de
|
f isInt(c):
try:
int(c)
return True
except:
return False
def shortestPath(map, start, dest):
height = len(map[0])
width = len(map)
dist = [[-1 for y in range(he
|
ight)] for x in range(width)]
prev = [[None for y in range(height)] for x in range(width)]
dist[start[0]][start[1]] = 0
shortestPath = []
Q = []
for i in range(width):
for j in range(height):
if map != "#":
Q.append((i,j))
while dest in Q:
min = 100000
minq = (-1,-1)
for q in Q:
if dist[q[0]][q[1]] > -1 and dist[q[0]][q[1]] < min:
min = dist[q[0]][q[1]]
minq = q
if minq == (-1,-1):
break
Q.remove(minq)
#print len(Q)
offset = [-1,0,1]
for i in offset:
for j in offset:
offsetQ = (i+minq[0],j+minq[1])
if i != j and j+i != 0 and offsetQ in Q:
#print dist[i][j]
altDist = min + 1
if altDist < dist[offsetQ[0]][offsetQ[1]] or dist[offsetQ[0]][offsetQ[1]] == -1:
dist[offsetQ[0]][offsetQ[1]] = altDist
prev[offsetQ[0]][offsetQ[1]] = minq
shortestPath = [dest]
start = dest
while prev[start[0]][start[1]]:
start = prev[start[0]][start[1]]
shortestPath.insert(0, start)
return shortestPath
zero = (1,1)
with open("test.txt") as f:
for line in f:
laby.append([])
index = len(laby) - 1
for c in line:
laby[index].append(c)
if isInt(c):
spots.append((index,len(laby[index]) - 1))
if c == 0:
zero = (index,len(laby[index]) - 1)
distances = {}
print spots
print
print "calculating distances"
for start in spots:
distances[start] = {}
for dest in spots:
if dest != start:
distances[start][dest] = len(shortestPath(laby, start, dest) - 1)
print "one done"
visitOrder = []
notVisited = copy.copy(spots)
start = spots[0]
notVisited.remove(start)
visitOrder.append(start)
totaldist = 0
edges = []
print distances
print "calculating mst"
while len(notVisited) > 0:
startspot = None
nextSpot = None
mindist = 10000000
for start in visitOrder:
for dest in notVisited:
if distances[start][dest] < mindist:
mindist = distances[start][dest]
nextSpot = dest
startspot = start
edges.append((startspot,nextSpot,mindist))
notVisited.remove(nextSpot)
visitOrder.append(nextSpot)
totaldist += mindist
print totaldist
print edges
|
tomkralidis/geonode
|
geonode/messaging/consumer.py
|
Python
|
gpl-3.0
| 9,159
| 0.001201
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2017 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import logging
import time
import json
from datetime import datetime
# from django.conf import settings
from kombu.mixins import ConsumerMixin
from geonode.geoserver.signals import geoserver_post_save_local
from geonode.security.views import send_email_consumer # , send_email_owner_on_view
# from geonode.social.signals import notification_post_save_resource2
from geonode.layers.views import layer_view_counter
from geonode.layers.models import Layer
from geonode.geoserver.helpers import gs_slurp
from .queues import (
queue_email_events,
queue_geoserver_events,
queue_notifications_events,
queue_all_events,
queue_geoserver_catalog,
queue_geoserver_data,
queue_geoserver,
queue_layer_viewers
)
logger = logging.getLogger(__package__)
class Consumer(ConsumerMixin):
def __init__(self, connection, messages_limit=None):
self.last_message = None
self.connection = connection
self.messages_limit = messages_limit
def get_consumers(self, Consumer, channel):
return [
Consumer(queue_all_events,
callbacks=[self.on_message]),
Consumer(queue_email_events,
callbacks=[self.on_email_messages]),
Consumer(queue_geoserver_events,
callbacks=[self.on_geoserver_messages]),
Consumer(queue_notifications_events,
callbacks=[self.on_notifications_messages]),
Consumer(queue_geoserver_catalog,
callbacks=[self.on_geoserver_catalog]),
Consumer(queue_geoserver_data,
callbacks=[self.on_geoserver_data]),
Consumer(queue_geoserver,
callbacks=[self.on_geoserver_all]),
Consumer(queue_layer_viewers,
callbacks=[self.on_layer_viewer]),
]
def _check_message_limit(self):
if self.messages_limit is not None:
self.messages_limit -= 1
if self.messages_limit < 1:
self.should_stop = True
return True
def on_consume_end(self, connection, channel):
super(Consumer, self).on_consume_end(connection, channel)
logger.debug("finished.")
def on_message(self, body, message):
logger.debug("broadcast: RECEIVED MSG - body: %r" % (body,))
message.ack()
self._check_message_limit()
def on_email_messages(self, body, message):
logger.debug("on_email_messages: RECEIVED MSG - body: %r" % (body,))
layer_uuid = body.get("layer_uuid")
user_id = body.get("user_id")
send_email_consumer(layer_uuid, user_id)
# Not sure if we need to send ack on this fanout version.
message.ack()
logger.debug("on_email_messages: finished")
self._check_message_limit()
def on_geoserver_messages(self, body, message):
logger.debug("on_geoserver_messages: RECEIVED MSG - body: %r" % (body,))
layer_id = body.get("id")
try:
layer = _wait_for_layer(layer_id)
except Layer.DoesNotExist as err:
logger.debug(err)
return
geoserver_post_save_local(layer)
# Not sure if we need to send ack on this fanout version.
message.ack()
logger.debug("on_geoserver_messages: finished")
self._check_message_limit()
def on_notifications_messages(self, body, message):
logger.debug("on_notifications_message: RECEIVED MSG - body: %r" % (body,))
body.get("id")
body.get("app_label")
body.get("model")
body.get("created")
# notification_post_save_resource2(instance_id, app_label, model, created)
message.ack()
logger.debug("on_notifications_message: finished")
self._check_message_limit()
def on_geoserver_all(self, body, message):
logger.debug("on_geoserver_all: RECEIVED MSG - body: %r" % (body,))
message.ack()
logger.debug("on_geoserver_all: finished")
# TODO:Adding consurmer's producers.
self._check_message_limit()
def on_geoserver_catalog(self, body, message):
logger.debug("on_geoserver_catalog: RECEIVED MSG - body: %r" % (body,))
try:
_update_layer_data(body, self.last_message)
self.last_message = json.loads(body)
except Exception:
logger.debug("Could not encode message {!r}".format(body))
message.ack()
logger.debug("on_geoserver_catalog: finished")
self._check_message_limit()
def on_geoserver_data(self, body, message):
logger.debug("on_geoserver_data: RECEIVED MSG - body: %r" % (body,))
try:
_update_layer_data(body, self.last_message)
self.last_message = json.loads(body)
except Exception:
logger.debug("Could not encode message {!r}".format(body))
message.ack()
logger.debug("on_geoserver_data: finished")
self._check_message_limit()
def on_consume_ready(self, connection, channel, consumers, **kwargs):
logger.debug(">>> Ready:")
logger.debug(connection)
logger.debug("{} consumers:".format(len(consumers)))
for i, consumer in enumerate(consumers, start=1):
logger.debug("{0} {1}".format(i, consumer))
super(Consumer, self).on_consume_ready(connection, channel, consumers,
**kwargs)
def on_layer_viewer(self, body, message):
logger.debug("on_layer_viewer: RECEIVED MSG - body: %r" % (body,))
viewer = body.get("viewer")
# owner_layer = body.get("owner_layer")
layer_id = body.get("layer_id")
layer_view_counter(layer_id, viewer)
# TODO Disabled for now. This should be handeld through Notifications
# if settings.EMAIL_ENABLE:
# send_email_owner_on_view(owner_layer, viewer, layer_id)
message.ack()
logger.debug("on_layer_viewer: finished")
self._check_message_limit()
def _update_layer_data(body, last_message):
message = json.loads(body)
workspace = message["source"]["workspace"] if "workspace" in message["source"] else None
store = message["source"]["store"] if "store" in message["source"] else None
filter = message["source"]["name"]
update_layer = False
if not last_message:
|
last_message = message
update_layer = True
last_workspace = message["source"]["workspace"] if "workspace" in message["source"] else None
last_store = message["source"]["store"] if "store" in message["source"] else None
last_filter = last_message["source"]["name"]
if (last_workspace, last_store, last_filter) != (workspace, store, filter):
update_layer = True
else:
tim
|
estamp_t1 = datetime.strptime(last_message["timestamp"], '%Y-%m-%dT%H:%MZ')
timestamp_t2 = datetime.strptime(message["timestamp"], '%Y-%m-%dT%H:%MZ')
timestamp_delta = timestamp_t2 - timestamp_t1
if timestamp_t2 > timestamp_t1 and timestamp_delta.seconds > 60:
update_layer = True
if update_layer:
gs_slurp(True, workspace=workspace, store=store, filter=filter, remove_deleted=True, execute_signals=True)
def _wait_for_layer(layer_id, num_attempts=5, wait_seconds=1):
|
ray-project/ray
|
ci/travis/py_dep_analysis.py
|
Python
|
apache-2.0
| 11,800
| 0.000508
|
#!/usr/bin/env python
#
# This file contains utilities for understanding dependencies between python
# source files and tests.
#
# Utils are assumed to be used from top level ray/ folder, since that is how
# our tests are defined today.
#
# Example usage:
# To find all circular dependencies under ray/python/:
# python ci/travis/py_dep_analysis.py --mode=circular-dep
# To find all the RLlib tests that depend on a file:
# python ci/travis/py_dep_analysis.py --mode=test-dep \
# --file=python/ray/tune/tune.py
# For testing, add --smoke-test to any commands, so it doesn't spend
# tons of time querying for available RLlib tests.
import argparse
import ast
import os
import re
import subprocess
import sys
from typing import Dict, List, Tuple
class DepGraph(object):
def __init__(self):
self.edges: Dict[str, Dict[str, bool]] = {}
self.ids: Dict[str, int] = {}
self.inv_ids: Dict[int, str] = {}
def _run_shell(args: List[str]) -> str:
return subprocess.check_output(args).decode(sys.stdout.encoding)
def list_rllib_tests(n: int = -1, test: str = None) -> Tuple[str, List[str]]:
"""List RLlib tests.
Args:
n: return at most n tests. all tests if n = -1.
test: only return information about a specific test.
"""
tests_res = _run_shell(
["bazel", "query", "tests(//python/ray/rllib:*)", "--output", "label"]
)
all_tests = []
# Strip, also skip any empty lines
tests = [t.strip() for t in tests_res.splitlines() if t.strip()]
for t in tests:
if test and t != test:
continue
src_out = _run_shell(
[
"bazel",
"query",
'kind("source file", deps({}))'.format(t),
"--output",
"label",
]
)
srcs = [f.strip() for f in src_out.splitlines()]
srcs = [f for f in srcs if f.startswith("//python") and f.endswith(".py")]
if srcs:
all_tests.append((t, srcs))
# Break early if smoke test.
if n > 0 and len(all_tests) >= n:
break
return all_tests
def _new_dep(graph: DepGraph, src_module: str, dep: str):
"""Create a new dependency between src_module and dep."""
if dep not in graph.ids:
graph.ids[dep] = len(graph.ids)
src_id = graph.ids[src_module]
dep_id = graph.ids[dep]
if src_id not in graph.edges:
graph.edges[src_id] = {}
graph.edges[src_id][dep_id] = True
def _new_import(graph: DepGraph, src_module: str, dep_module: str):
"""Process a new import statement in src_module."""
# We don't care about system imports.
if not dep_module.startswith("ray"):
return
_new_dep(graph, src_module, dep_module)
def _is_path_module(module: str, name: str, _base_dir: str) -> bool:
"""Figure out if base.sub is a python module or not."""
# Special handling for _raylet, which is a C++ lib.
if module == "ray._raylet":
return False
bps = ["python"] + module.split(".")
path = os.path.join(_base_dir, os.path.join(*bps), name + ".py")
if os.path.isfile(path):
return True # file module
return False
def _new_from_import(
graph: DepGraph, src_module: str, dep_module: str, dep_name: str, _base_dir: str
):
"""Process a new "from ... import ..." statement in src_module."""
# We don't care about imports outside of ray package.
if not dep_module or not dep_module.startswith("ray"):
return
if _is_path_module(dep_module, dep_name, _base_dir):
# dep_module.dep_name points to a file.
_new_dep(graph, src_module, _full_module_path(dep_module, dep_name))
else:
# sub is an obj on base dir/file.
_new_dep(graph, src_module, dep_module)
def _process_file(graph: DepGraph, src_path: str, src_module: str, _base_dir=""):
"""Create dependencies from src_module to all the valid imports in src_path.
Args:
graph: the DepGraph to be added to.
src_path: .py file to be processed.
src_module: full module path of the source file.
_base_dir: use a different base dir than current dir. For unit testing.
"""
with open(os.path.join(_base_dir, src_path), "r") as in_f:
tree = ast.parse(in_f.read())
for node in ast.walk(tree):
if isinstance(node, ast.Import):
for alias in node.names:
_new_import(graph, src_module, alias.name)
elif isinstance(node, ast.ImportFrom):
for alias in node.names:
_new_from_import(
graph, src_module, node.module, alias.name, _base_dir
)
def build_dep_graph() -> DepGraph:
"""Build index
|
from py files to their immediate dependees."""
graph = DepGraph()
# Assuming we run from root /ray directory.
# Follow links since rllib is linked to /rllib.
for root, sub_dirs, files in os.walk("python", followlinks=True):
if _should_skip(root):
continue
module = _
|
bazel_path_to_module_path(root)
# Process files first.
for f in files:
if not f.endswith(".py"):
continue
full = _full_module_path(module, f)
if full not in graph.ids:
graph.ids[full] = len(graph.ids)
# Process file:
_process_file(graph, os.path.join(root, f), full)
# Build reverse index for convenience.
graph.inv_ids = {v: k for k, v in graph.ids.items()}
return graph
def _full_module_path(module, f) -> str:
if f == "__init__.py":
# __init__ file for this module.
# Full path is the same as the module name.
return module
fn = re.sub(r"\.py$", "", f)
if not module:
return fn
return module + "." + fn
def _should_skip(d: str) -> bool:
"""Skip directories that should not contain py sources."""
if d.startswith("python/.eggs/"):
return True
if d.startswith("python/."):
return True
if d.startswith("python/build"):
return True
if d.startswith("python/ray/cpp"):
return True
return False
def _bazel_path_to_module_path(d: str) -> str:
"""Convert a Bazel file path to python module path.
Example: //python/ray/rllib:xxx/yyy/dd -> ray.rllib.xxx.yyy.dd
"""
# Do this in 3 steps, so all of 'python:', 'python/', or '//python', etc
# will get stripped.
d = re.sub(r"^\/\/", "", d)
d = re.sub(r"^python", "", d)
d = re.sub(r"^[\/:]", "", d)
return d.replace("/", ".").replace(":", ".")
def _file_path_to_module_path(f: str) -> str:
"""Return the corresponding module path for a .py file."""
dir, fn = os.path.split(f)
return _full_module_path(_bazel_path_to_module_path(dir), fn)
def _depends(
graph: DepGraph, visited: Dict[int, bool], tid: int, qid: int
) -> List[int]:
"""Whether there is a dependency path from module tid to module qid.
Given graph, and without going through visited.
"""
if tid not in graph.edges or qid not in graph.edges:
return []
if qid in graph.edges[tid]:
# tid directly depends on qid.
return [tid, qid]
for c in graph.edges[tid]:
if c in visited:
continue
visited[c] = True
# Reduce to a question of whether there is a path from c to qid.
ds = _depends(graph, visited, c, qid)
if ds:
# From tid -> c -> qid.
return [tid] + ds
return []
def test_depends_on_file(
graph: DepGraph, test: Tuple[str, Tuple[str]], path: str
) -> List[int]:
"""Give dependency graph, check if a test depends on a specific .py file.
Args:
graph: the dependency graph.
test: information about a test, in the format of:
[test_name, (src files for the test)]
"""
query = _file_path_to_module_path(path)
if query not in graph.ids:
# Not a file that we care about.
return []
t, srcs = test
# Skip tuned_examples/ and examples/ tests.
|
flexiant/xen
|
tools/python/xen/util/xmlrpcclient.py
|
Python
|
gpl-2.0
| 4,533
| 0.001985
|
#============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2006 Anthony Liguori <aliguori@us.ibm.com>
# Copyright (C) 2007 XenSource Inc.
#============================================================================
from httplib import FakeSocket, HTTPConnection, HTTP
import socket
import string
import xmlrpclib
from types import StringTypes
from sys import hexversion
try:
import SSHTransport
ssh_enabled = True
except ImportError:
# SSHTransport is disabled on Python <2.4, because it uses the subprocess
# package.
ssh_enabled = False
# A new ServerProxy that also supports httpu urls. An http URL comes in the
# form:
#
# httpu:///absolute/path/to/socket.sock
#
# It assumes that the RPC handler is /RPC2. This probably needs to be improved
class HTTPUnixConnection(HTTPConnection):
def connect(self):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(self.host)
class HTTPUnix(HTTP):
_connection_class = HTTPUnixConnection
class UnixTransport(xmlrpclib.Transport):
def request(self, host, handler, request_body, verbose=0):
self.__handler = handler
return xmlrpclib.Transport.request(self, host, '/RPC2',
request_body, verbose)
def make_connection(self, host):
if hexversion < 0x02070000:
# python 2.6 or earlier
return HTTPUnix(self.__handler)
else:
# xmlrpclib.Transport changed in python 2.7
return HTTPUnixConnection(self.__handler)
# We need our own transport for HTTPS, because xmlrpclib.SafeTransport is
# broken -- it does not handle ERROR_ZERO_RETURN properly.
class HTTPSTransport(xmlrpclib.SafeTransport):
def _parse_response(self, file, sock):
p, u = self.getparser()
wh
|
ile 1:
try:
if sock:
response = sock.recv(1024)
else:
response = file.read(1024)
except socket.sslerror, exn:
if exn[0] == socket.SSL_ERROR_ZERO_RETURN:
break
raise
if not response:
break
if
|
self.verbose:
print 'body:', repr(response)
p.feed(response)
file.close()
p.close()
return u.close()
# See xmlrpclib2.TCPXMLRPCServer._marshalled_dispatch.
def conv_string(x):
if isinstance(x, StringTypes):
s = string.replace(x, "'", r"\047")
exec "s = '" + s + "'"
return s
else:
return x
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=1):
if transport == None:
(protocol, rest) = uri.split(':', 1)
if protocol == 'httpu':
uri = 'http:' + rest
transport = UnixTransport()
elif protocol == 'https':
transport = HTTPSTransport()
elif protocol == 'ssh':
global ssh_enabled
if ssh_enabled:
(transport, uri) = SSHTransport.getHTTPURI(uri)
else:
raise ValueError(
"SSH transport not supported on Python <2.4.")
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding,
verbose, allow_none)
def __request(self, methodname, params):
response = xmlrpclib.ServerProxy.__request(self, methodname, params)
if isinstance(response, tuple):
return tuple([conv_string(x) for x in response])
else:
return conv_string(response)
|
nirbheek/cerbero
|
cerbero/tools/osxrelocator.py
|
Python
|
lgpl-2.1
| 5,546
| 0.000721
|
#!/usr/bin/env python3
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
|
from cerbero.utils import shell
INT_C
|
MD = 'install_name_tool'
OTOOL_CMD = 'otool'
class OSXRelocator(object):
'''
Wrapper for OS X's install_name_tool and otool commands to help
relocating shared libraries.
It parses lib/ /libexec and bin/ directories, changes the prefix path of
the shared libraries that an object file uses and changes it's library
ID if the file is a shared library.
'''
def __init__(self, root, lib_prefix, recursive, logfile=None):
self.root = root
self.lib_prefix = self._fix_path(lib_prefix)
self.recursive = recursive
self.use_relative_paths = True
self.logfile = None
def relocate(self):
self.parse_dir(self.root)
def relocate_dir(self, dirname):
self.parse_dir(os.path.join(self.root, dirname))
def relocate_file(self, object_file):
self.change_libs_path(object_file)
def change_id(self, object_file, id=None):
id = id or object_file.replace(self.lib_prefix, '@rpath')
filename = os.path.basename(object_file)
if not self._is_mach_o_file(filename):
return
cmd = [INT_CMD, '-id', id, object_file]
shell.new_call(cmd, fail=False, logfile=self.logfile)
def change_libs_path(self, object_file):
depth = len(object_file.split('/')) - len(self.root.split('/')) - 1
p_depth = '/..' * depth
rpaths = ['.']
rpaths += ['@loader_path' + p_depth, '@executable_path' + p_depth]
rpaths += ['@loader_path' + '/../lib', '@executable_path' + '/../lib']
if not self._is_mach_o_file(object_file):
return
if depth > 1:
rpaths += ['@loader_path/..', '@executable_path/..']
for p in rpaths:
cmd = [INT_CMD, '-add_rpath', p, object_file]
shell.new_call(cmd, fail=False)
for lib in self.list_shared_libraries(object_file):
if self.lib_prefix in lib:
new_lib = lib.replace(self.lib_prefix, '@rpath')
cmd = [INT_CMD, '-change', lib, new_lib, object_file]
shell.new_call(cmd, fail=False, logfile=self.logfile)
def change_lib_path(self, object_file, old_path, new_path):
for lib in self.list_shared_libraries(object_file):
if old_path in lib:
new_path = lib.replace(old_path, new_path)
cmd = [INT_CMD, '-change', lib, new_path, object_path]
shell.new_call(cmd, fail=True, logfile=self.logfile)
def parse_dir(self, dir_path, filters=None):
for dirpath, dirnames, filenames in os.walk(dir_path):
for f in filenames:
if filters is not None and \
os.path.splitext(f)[1] not in filters:
continue
self.change_libs_path(os.path.join(dirpath, f))
if not self.recursive:
break
@staticmethod
def list_shared_libraries(object_file):
res = shell.check_output([OTOOL_CMD, '-L', object_file]).splitlines()
# We don't use the first line
libs = res[1:]
# Remove the first character tabulation
libs = [x[1:] for x in libs]
# Remove the version info
libs = [x.split(' ', 1)[0] for x in libs]
return libs
def _fix_path(self, path):
if path.endswith('/'):
return path[:-1]
return path
def _is_mach_o_file(self, filename):
return os.path.splitext(filename)[1] in ['.dylib', '.so'] or \
shell.check_output(['file', '-bh', filename]).startswith('Mach-O')
class Main(object):
def run(self):
# We use OptionParser instead of ArgumentsParse because this script
# might be run in OS X 10.6 or older, which do not provide the argparse
# module
import optparse
usage = "usage: %prog [options] library_path old_prefix new_prefix"
description = 'Rellocates object files changing the dependant '\
' dynamic libraries location path with a new one'
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-r', '--recursive', action='store_true',
default=False, dest='recursive',
help='Scan directories recursively')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_usage()
exit(1)
relocator = OSXRelocator(args[1], args[2], options.recursive)
relocator.relocate_file(args[0])
exit(0)
if __name__ == "__main__":
main = Main()
main.run()
|
snailwalker/python
|
burness/0000/test.py
|
Python
|
mit
| 607
| 0.07084
|
from PIL import Image, ImageDraw, ImageFont
cla
|
ss Image_unread_message:
def open(self,path):
self.im=Image.open(path)
return True
def __init__(self):
self.fnt=None
self.im=None
def setFont(self,font_path,size):
self.fnt=ImageFont.truetype(font_path,size)
return True
def draw_text(self,position,str,colour,fnt):
draw=ImageDraw.Draw(self.im)
draw.text(position,str,fill=colour,font=fnt)
se
|
lf.im.show()
self.im.save(str+'num'+'.jpg')
return True
test=Image_unread_message()
test.open('test.jpg')
test.setFont('ahronbd.ttf',80)
test.draw_text((160,-20),'4',(255,0,0),test.fnt)
|
scheib/chromium
|
tools/perf/core/services/luci_auth.py
|
Python
|
bsd-3-clause
| 1,316
| 0.009878
|
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import subprocess
import sys
import six
_RE_INFO_USER_EMAIL = r'Logged in as (?P<email>\S+)\.$'
class AuthorizationError(Exception):
pass
def _RunCommand(command):
try:
return six.ensure_str(
subprocess.check_output(['luci-auth', command],
stderr=subprocess.STDOUT,
universal_newlines=True))
except subprocess.CalledProcessError as exc:
raise AuthorizationError(exc.output.strip())
def CheckLoggedIn():
"""Check that the user is currently logged in.
Otherwise sys.exit immediately with the error message from luci-auth
instructing the user how to log in.
"""
try:
GetAccess
|
Token()
except AuthorizationError as exc:
sys.exit(str(exc))
def GetAccessToken():
"""Get an access token to make requests on behalf of the logged in user."""
return _RunCommand('token').rstrip()
def GetUserEmail():
"""Get the email address of the currently logged in user."""
output = _RunCommand('info')
|
m = re.match(_RE_INFO_USER_EMAIL, output, re.MULTILINE)
assert m, 'Failed to parse luci-auth info output.'
return m.group('email')
|
GertBurger/pygame_cffi
|
pygame/compat.py
|
Python
|
lgpl-2.1
| 2,767
| 0.004698
|
"""Python 2.x/3.x compatibility tools
|
"""
import sys
__all__ = ['geterror', 'long_', 'xrange_', 'ord_', 'unichr_',
'unicode_', 'raw_input_', 'as_bytes', 'as_unicode']
def geterror ():
|
return sys.exc_info()[1]
try:
long_ = long
except NameError:
long_ = int
try:
xrange_ = xrange
except NameError:
xrange_ = range
def get_BytesIO():
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
return BytesIO
def get_StringIO():
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
return StringIO
def ord_(o):
try:
return ord(o)
except TypeError:
return o
try:
unichr_ = unichr
except NameError:
unichr_ = chr
try:
unicode_ = unicode
except NameError:
unicode_ = str
try:
bytes_ = bytes
except NameError:
bytes_ = str
try:
raw_input_ = raw_input
except NameError:
raw_input_ = input
if sys.platform == 'win32':
filesystem_errors = "replace"
elif sys.version_info >= (3, 0, 0):
filesystem_errors = "surrogateescape"
else:
filesystem_errors = "strict"
def filesystem_encode(u):
return u.encode(sys.getfilesystemencoding(), filesystem_errors)
# Represent escaped bytes and strings in a portable way.
#
# as_bytes: Allow a Python 3.x string to represent a bytes object.
# e.g.: as_bytes("a\x01\b") == b"a\x01b" # Python 3.x
# as_bytes("a\x01\b") == "a\x01b" # Python 2.x
# as_unicode: Allow a Python "r" string to represent a unicode string.
# e.g.: as_unicode(r"Bo\u00F6tes") == u"Bo\u00F6tes" # Python 2.x
# as_unicode(r"Bo\u00F6tes") == "Bo\u00F6tes" # Python 3.x
try:
unicode
def as_bytes(string):
""" '<binary literal>' => '<binary literal>' """
return string
def as_unicode(rstring):
""" r'<Unicode literal>' => u'<Unicode literal>' """
return rstring.decode('unicode_escape', 'strict')
except NameError:
def as_bytes(string):
""" '<binary literal>' => b'<binary literal>' """
return string.encode('latin-1', 'strict')
def as_unicode(rstring):
""" r'<Unicode literal>' => '<Unicode literal>' """
return rstring.encode('ascii', 'strict').decode('unicode_escape',
'stict')
# Include a next compatible function for Python versions < 2.6
try:
next_ = next
except NameError:
def next_(i, *args):
try:
return i.next()
except StopIteration:
if args:
return args[0]
raise
# itertools.imap is missing in 3.x
try:
import itertools.imap as imap_
except ImportError:
imap_ = map
|
beni55/nixops
|
nixops/deployment.py
|
Python
|
lgpl-3.0
| 46,436
| 0.004427
|
# -*- coding: utf-8 -*-
import sys
import os.path
import subprocess
import json
import string
import tempfile
import shutil
import threading
import exceptions
import errno
from collections import defaultdict
from xml.etree import ElementTree
import nixops.statefile
import nixops.backends
import nixops.logger
import nixops.parallel
import nixops.resources.ssh_keypair
import nixops.resources.ec2_keypair
import nixops.resources.sqs_queue
import nixops.resources.iam_role
import nixops.resources.s3_bucket
import nixops.resources.ec2_security_group
import nixops.resources.ebs_volume
import nixops.resources.elastic_ip
from nixops.nix_expr import RawValue, Function, nixmerge, py2nix
import re
from datetime import datetime
import getpass
import traceback
import glob
import fcntl
import itertools
import platform
class NixEvalError(Exception):
pass
class UnknownBackend(Exception):
pass
debug = False
class Deployment(object):
"""NixOps top-level deployment manager."""
default_description = "Unnamed NixOps network"
name = nixops.util.attr_property("name", None)
nix_exprs = nixops.util.attr_property("nixExprs", [], 'json')
nix_path = nixops.util.attr_property("nixPath", [], 'json')
args = nixops.util.attr_property("args", {}, 'json')
description = nixops.util.attr_property("description", default_description)
configs_path = nixops.util.attr_property("configsPath", None)
rollback_enabled = nixops.util.attr_property("rollbackEnabled", False)
def __init__(self, statefile, uuid, log_file=sys.stderr):
self._statefile = statefile
self._db = statefile._db
self.uuid = uuid
self._last_log_prefix = None
self.extra_nix_path = []
self.extra_nix_flags = []
self.extra_nix_eval_flags = []
self.nixos_version_suffix = None
self.logger = nixops.logger.Logger(log_file)
self._lock_file_path = None
self.expr_path = os.path.realpath(os.path.dirname(__file__) + "/../../../../share/nix/nixops")
if not os.path.exists(self.expr_path):
self.expr_path = os.path.realpath(os.path.dirname(__file__) + "/../../../../../share/nix/nixops")
if not os.path.exists(self.expr_path):
self.expr_path = os.path.dirname(__file__) + "/../nix"
self.tempdir = nixops.util.SelfDeletingDir(tempfile.mkdtemp(prefix="nixops-tmp"))
self.resources = {}
with self._db:
c = self._db.cursor()
c.execute("select id, name, type from Resources where deployment = ?", (self.uuid,))
for (id, name, type) in c.fetchall():
r = nixops.backends.create_state(self, type, name, id)
self.resources[name] = r
self.logger.update_log_prefixes()
self.definitions = None
@property
def machines(self):
return {n: r for n, r in self.resources.items() if is_machine(r)}
@property
def active(self): # FIXME: rename to "active_machines"
return {n: r for n, r in self.resources.items() if is_machine(r) and not r.obsolete}
@property
def active_resources(self):
return {n: r for n, r in self.resources.items() if not r.obsolete}
def get_typed_resource(self, name, type):
res = self.active_resources.get(name, None)
if not res:
raise Exception("resource ‘{0}’ does not exist".format(name))
if res.get_type() != type:
raise Exception("resource ‘{0}’ is not of type ‘{1}’".format(name, type))
return res
def _set_attrs(self, attrs):
"""Update deployment attributes in the state file."""
with self._db:
c = self._db.cursor()
for n, v in attrs.iteritems():
if v == None:
c.execute("delete from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, n))
else:
c.execute("insert or replace into DeploymentAttrs(deployment, name, value) values (?, ?, ?)",
(self.uuid, n, v))
def _set_attr(self, name, value):
"""Update one deployment attribute in the state file."""
self._set_attrs({name: value})
def _del_attr(self, name):
"""Delete a deployment attribute from the state file."""
with self._db:
self._db.execute("delete from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, name))
def _get_attr(self, name, default=nixops.util.undefined):
"""Get a deployment attribute from the state file."""
with self._db:
c = self._db.cursor()
c.execute("select value from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, name))
row = c.fetchone()
if row != None: return row[0]
return nixops.util.undefined
def _create_resource(self, name, type):
c = self._db.cursor()
c.execute("select 1 from Resources where deployment = ? and name = ?", (self.uuid, name))
if len(c.fetchall()) != 0:
raise Exception("resource already exists in database!")
c.execute("insert into Resources(deployment, name, type) values (?, ?, ?)",
(self.uuid, name, type))
id = c.lastrowid
r = nixops.backends.create_state(self, type, name, id)
self.resources[name] = r
return r
def export(self):
with self._db:
c = self._db.cursor()
c.exec
|
ute("select name, value from DeploymentAttrs where deployment = ?", (self.uuid,))
rows = c.fetchall()
res = {row[0]: row[1] for row in rows}
res['resourc
|
es'] = {r.name: r.export() for r in self.resources.itervalues()}
return res
def import_(self, attrs):
with self._db:
for k, v in attrs.iteritems():
if k == 'resources': continue
self._set_attr(k, v)
for k, v in attrs['resources'].iteritems():
if 'type' not in v: raise Exception("imported resource lacks a type")
r = self._create_resource(k, v['type'])
r.import_(v)
def clone(self):
with self._db:
new = self._statefile.create_deployment()
self._db.execute("insert into DeploymentAttrs (deployment, name, value) " +
"select ?, name, value from DeploymentAttrs where deployment = ?",
(new.uuid, self.uuid))
new.configs_path = None
return new
def _get_deployment_lock(self):
if self._lock_file_path is None:
lock_dir = os.environ.get("HOME", "") + "/.nixops/locks"
if not os.path.exists(lock_dir): os.makedirs(lock_dir, 0700)
self._lock_file_path = lock_dir + "/" + self.uuid
class DeploymentLock(object):
def __init__(self, depl):
self._lock_file_path = depl._lock_file_path
self._logger = depl.logger
self._lock_file = None
def __enter__(self):
self._lock_file = open(self._lock_file_path, "w")
fcntl.fcntl(self._lock_file, fcntl.F_SETFD, fcntl.FD_CLOEXEC)
try:
fcntl.flock(self._lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
self._logger.log(
"waiting for exclusive deployment lock..."
)
fcntl.flock(self._lock_file, fcntl.LOCK_EX)
def __exit__(self, exception_type, exception_value, exception_traceback):
self._lock_file.close()
return DeploymentLock(self)
def delete_resource(self, m):
del self.resources[m.name]
with self._db:
self._db.execute("delete from Resources where deployment = ? and id = ?", (self.uuid, m.id))
def delete(self, force=False):
"""Delete this deployment from the state file."""
with self._db:
if not force and len(self.resources) > 0:
raise Exception("cannot delete this deploy
|
ksmit799/Toontown-Source
|
toontown/estate/DistributedStatuary.py
|
Python
|
mit
| 5,336
| 0.002436
|
import DistributedLawnDecor
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.ShowBase import *
import GardenGlobals
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownGlobals
from toontown.toontowngui import TTDialog
from toontown.toonbase import TTLocalizer
from pandac.PandaModules import NodePath
from pandac.PandaModules import Point3
class DistributedStatuary(DistributedLawnDecor.DistributedLawnDecor):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedStatuary')
def __init__(self, cr):
self.notify.debug('constructing DistributedStatuary')
DistributedLawnDecor.DistributedLawnDecor.__init__(self, cr)
self.confirmDialog = None
self.resultDialog = None
return
def loadModel(self):
self.rotateNode = self.plantPath.attachNewNode('rotate')
self.model = loader.loadModel(self.modelPath)
|
colNode = self.model.find('**/+CollisionNode')
if not colNode.isEmpty():
score, multiplier = ToontownGlobals.PinballScoring[ToontownGlobals.PinballStatuary]
if self.pinballScore:
score = self.pinballScore[0]
multiplier = self.pinballScore[1]
scoreNodePath
|
= NodePath('statuary-%d-%d' % (score, multiplier))
colNode.setName('statuaryCol')
scoreNodePath.reparentTo(colNode.getParent())
colNode.reparentTo(scoreNodePath)
self.model.setScale(self.worldScale)
self.model.reparentTo(self.rotateNode)
attrib = GardenGlobals.PlantAttributes[self.typeIndex]
self.stick2Ground()
def setTypeIndex(self, typeIndex):
self.typeIndex = typeIndex
self.name = GardenGlobals.PlantAttributes[typeIndex]['name']
self.plantType = GardenGlobals.PlantAttributes[typeIndex]['plantType']
self.modelPath = GardenGlobals.PlantAttributes[typeIndex]['model']
self.pinballScore = None
if GardenGlobals.PlantAttributes[typeIndex].has_key('pinballScore'):
self.pinballScore = GardenGlobals.PlantAttributes[typeIndex]['pinballScore']
self.worldScale = 1.0
if GardenGlobals.PlantAttributes[typeIndex].has_key('worldScale'):
self.worldScale = GardenGlobals.PlantAttributes[typeIndex]['worldScale']
return
def getTypeIndex(self):
return self.typeIndex
def setWaterLevel(self, waterLevel):
self.waterLevel = waterLevel
def getWaterLevel(self):
return self.waterLevel
def setGrowthLevel(self, growthLevel):
self.growthLevel = growthLevel
def getGrowthLevel(self):
return self.growthLevel
def setupCollision(self):
DistributedLawnDecor.DistributedLawnDecor.setupCollision(self)
minPt = Point3(0, 0, 0)
maxPt = Point3(0, 0, 0)
self.model.calcTightBounds(minPt, maxPt)
self.notify.debug('max=%s min=%s' % (maxPt, minPt))
xDiff = maxPt[0] - minPt[0]
yDiff = maxPt[1] - minPt[1]
radius = (xDiff * xDiff + yDiff * yDiff) ** 0.5
radius /= 3
self.notify.debug('xDiff=%s yDiff=%s radius = %s' % (xDiff, yDiff, radius))
self.colSphereNode.setScale(radius)
def getShovelCommand(self):
return self.handlePicking
def getShovelAction(self):
return TTLocalizer.GardeningRemove
def handleEnterPlot(self, colEntry = None):
if self.canBePicked():
self.notify.debug('entering if')
base.localAvatar.addShovelRelatedDoId(self.doId)
base.localAvatar.setShovelAbility(TTLocalizer.GardeningRemove)
else:
self.notify.debug('entering else')
def handlePicking(self):
fullName = self.name
messenger.send('wakeup')
self.confirmDialog = TTDialog.TTDialog(style=TTDialog.YesNo, text=TTLocalizer.ConfirmRemoveStatuary % {'item': fullName}, command=self.confirmCallback)
self.confirmDialog.show()
base.cr.playGame.getPlace().detectedGardenPlotUse()
def confirmCallback(self, value):
self.notify.debug('value=%d' % value)
if self.confirmDialog:
self.confirmDialog.destroy()
self.confirmDialog = None
if value > 0:
self.doPicking()
else:
base.cr.playGame.getPlace().detectedGardenPlotDone()
return
def doPicking(self):
if not self.canBePicked():
self.notify.debug("I don't own this flower, just returning")
return
self.handleRemove()
def handleExitPlot(self, entry = None):
DistributedLawnDecor.DistributedLawnDecor.handleExitPlot(self, entry)
base.localAvatar.removeShovelRelatedDoId(self.doId)
def doResultDialog(self):
self.startInteraction()
itemName = GardenGlobals.PlantAttributes[self.typeIndex]['name']
stringToShow = TTLocalizer.getResultPlantedSomethingSentence(itemName)
self.resultDialog = TTDialog.TTDialog(style=TTDialog.Acknowledge, text=stringToShow, command=self.resultsCallback)
def resultsCallback(self, value):
self.notify.debug('value=%d' % value)
if self.resultDialog:
self.resultDialog.destroy()
self.resultDialog = None
self.finishInteraction()
return
|
birdland/dlkit-doc
|
dlkit/mongo/repository/sessions.py
|
Python
|
mit
| 237,798
| 0.001409
|
"""Mongodb implementations of repository sessions."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package mongo package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from bson.objectid import ObjectId
from . import objects
from . import queries
from . import searches
from .. import MONGO_LISTENER
from .. import utilities
from ...abstract_osid.id.primitives import Id as ABCId
from ...abstract_osid.repository import sessions as abc_repository_sessions
from ...abstract_osid.repository.objects import AssetForm as ABCAssetForm
from ...abstract_osid.repository.objects import CompositionForm as ABCCompositionForm
from ...abstract_osid.repository.objects import RepositoryForm as ABCRepositoryForm
from ...abstract_osid.type.primitives import Type as ABCType
from ..id.objects import IdList
from ..list_utilities import move_id_ahead, move_id_behind, order_ids
from ..osid.sessions import OsidSession
from ..primitives import Id
from ..primitives import Type
from ..utilities import MongoClientValidated
from dlkit.abstract_osid.osid import errors
from dlkit.mongo.osid import sessions as osid_sessions
from dlkit.primordium.id.primitives import Id
DESCENDING =
|
-1
ASCENDING = 1
CREATED = True
UPDATED = True
ENCLOSURE_RECORD_TYPE = Type(
identifier='enclosure',
namespace='osid-object',
authority='ODL.MIT.EDU')
CREATED = TrueUPDATED = True
COMPARATIVE = 0
PLENARY = 1
ACTIVE = 0
ANY_STATUS = 1
SEQUESTERED = 0
UNSEQUESTERED = 1
class AssetLookupSession(abc_reposito
|
ry_sessions.AssetLookupSession, osid_sessions.OsidSession):
"""This session defines methods for retrieving assets.
An ``Asset`` represents an element of content stored in a
Repository.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
* isolated repository view: All asset methods in this session
operate, retrieve and pertain to assets defined explicitly in
the current repository. Using an isolated view is useful for
managing ``Assets`` with the ``AssetAdminSession.``
* federated repository view: All asset methods in this session
operate, retrieve and pertain to all assets defined in this
repository and any other assets implicitly available in this
repository through repository inheritence.
The methods ``use_federated_repository_view()`` and
``use_isolated_repository_view()`` behave as a radio group and one
should be selected before invoking any lookup methods.
Assets may have an additional records indicated by their respective
record types. The record may not be accessed through a cast of the
``Asset``.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'AssetLookupSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._kwargs = kwargs
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_lookup_assets(self):
"""Tests if this user can perform ``Asset`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.can_lookup_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_asset_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_comparative_resource_view
self._use_comparative_object_view()
def use_plenary_asset_view(self):
"""A complete view of the ``Asset`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_plenary_resource_view
self._use_plenary_object_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include assets in repositories which are
children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
@utilities.arguments_not_none
def get_asset(self, asset_id):
"""Gets the ``Asset`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Asset`` may have a different
``Id`` than requested, such as the case where a duplicate ``Id``
was assigned to an ``Asset`` and retained for compatibility.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
retrieve
return: (osid.repository.Asset) - the returned ``Asset``
raise: NotFound - no ``Asset`` found with the given ``Id``
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to c
|
nizox/circuits
|
circuits/web/errors.py
|
Python
|
mit
| 7,638
| 0.000262
|
"""Errors
This module implements a set of standard HTTP Errors.
"""
import json
import traceback
try:
from html import escape
except ImportError:
from cgi import escape # Deprecated since version 3.2
try:
from urllib.parse import urljoin as _urljoin
except ImportError:
from urlparse import urljoin as _urljoin # NOQA
from circuits import Event
from ..six import string_types
from .constants import SERVER_URL, SERVER_VERSION, POWERED_BY
from .constants import DEFAULT_ERROR_MESSAGE, HTTP_STATUS_CODES
class httperror(Event):
"""An event for signaling an HTTP error"""
code = 500
description = ""
def __init__(self, request, response, code=None, **kwargs):
"""
The constructor creates a new instance and modifies the *response*
argument to reflect the error.
"""
super(httperror, self).__init__(request, response, code, **kwargs)
# Override HTTPError subclasses
self.name = "httperror"
self.request = request
self.response = response
if code is not None:
self.code = code
self.error = kwargs.get("error", None)
self.description = kwargs.get(
"description", getattr(self.__class__, "description", "")
)
if self.error is not None:
#self.traceback = "ERROR: (%s) %s\n%s" % (
# self.error[0], self.error[1], "".join(self.error[2])
#)
self.traceback = "\n".join(traceback.format_exception(*self.error))
else:
self.traceback = ""
self.response.close = True
self.response.status = self.code
powered_by = POWERED_BY % ({
"url": SERVER_URL,
"version": SERVER_VERSION
}) if getattr(request.server, 'display_banner', False) else ""
self.data = {
"code": self.code,
"name": HTTP_STATUS_CODES.get(self.code, "???"),
"description": self.description,
"traceback": escape(self.traceback),
"powered_by": powered_by
}
def sanitize(self):
if self.code != 201 and not (299 < self.code < 400):
if "Location" in self.response.headers:
del self.response.headers["Location"]
def __str__(self):
self.sanitize()
if "json" in self.response.headers.get("Content-Type", ""):
index = ["code", "name", "description"]
if self.request.print_debug:
index.append("traceback")
|
return json.dumps(dict((key, self.data[key]) for key in index))
if not self.request.print_debug:
self.data["traceback"] = ''
return DEFAULT_ERROR_MESSAGE % self.data
def __repr__(self):
return "<%s %d %s>" % (
self.__class__.__name__, self.code, HTTP_STATUS_CODES.get(
|
self.code, "???"
)
)
class forbidden(httperror):
"""An event for signaling the HTTP Forbidden error"""
code = 403
class unauthorized(httperror):
"""An event for signaling the HTTP Unauthorized error"""
code = 401
class notfound(httperror):
"""An event for signaling the HTTP Not Fouond error"""
code = 404
class redirect(httperror):
"""An event for signaling the HTTP Redirect response"""
def __init__(self, request, response, urls, code=None):
"""
The constructor creates a new instance and modifies the
*response* argument to reflect a redirect response to the
given *url*.
"""
if isinstance(urls, string_types):
urls = [urls]
abs_urls = []
for url in urls:
# Note that urljoin will "do the right thing" whether url is:
# 1. a complete URL with host (e.g. "http://www.example.com/test")
# 2. a URL relative to root (e.g. "/dummy")
# 3. a URL relative to the current path
# Note that any query string in request is discarded.
url = request.uri.relative(url).unicode()
abs_urls.append(url)
self.urls = urls = abs_urls
# RFC 2616 indicates a 301 response code fits our goal; however,
# browser support for 301 is quite messy. Do 302/303 instead. See
# http://ppewww.ph.gla.ac.uk/~flavell/www/post-redirect.html
if code is None:
if request.protocol >= (1, 1):
code = 303
else:
code = 302
else:
if code < 300 or code > 399:
raise ValueError("status code must be between 300 and 399.")
super(redirect, self).__init__(request, response, code)
if code in (300, 301, 302, 303, 307):
response.headers["Content-Type"] = "text/html"
# "The ... URI SHOULD be given by the Location field
# in the response."
response.headers["Location"] = urls[0]
# "Unless the request method was HEAD, the entity of the response
# SHOULD contain a short hypertext note with a hyperlink to the
# new URI(s)."
msg = {300: "This resource can be found at <a href='%s'>%s</a>.",
301: ("This resource has permanently moved to "
"<a href='%s'>%s</a>."),
302: ("This resource resides temporarily at "
"<a href='%s'>%s</a>."),
303: ("This resource can be found at "
"<a href='%s'>%s</a>."),
307: ("This resource has moved temporarily to "
"<a href='%s'>%s</a>."),
}[code]
response.body = "<br />\n".join([msg % (u, u) for u in urls])
# Previous code may have set C-L, so we have to reset it
# (allow finalize to set it).
response.headers.pop("Content-Length", None)
elif code == 304:
# Not Modified.
# "The response MUST include the following header fields:
# Date, unless its omission is required by section 14.18.1"
# The "Date" header should have been set in Response.__init__
# "...the response SHOULD NOT include other entity-headers."
for key in ("Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-Location", "Content-MD5",
"Content-Range", "Content-Type", "Expires",
"Last-Modified"):
if key in response.headers:
del response.headers[key]
# "The 304 response MUST NOT contain a message-body."
response.body = None
# Previous code may have set C-L, so we have to reset it.
response.headers.pop("Content-Length", None)
elif code == 305:
# Use Proxy.
# urls[0] should be the URI of the proxy.
response.headers["Location"] = urls[0]
response.body = None
# Previous code may have set C-L, so we have to reset it.
response.headers.pop("Content-Length", None)
else:
raise ValueError("The %s status code is unknown." % code)
def __repr__(self):
if len(self.channels) > 1:
channels = repr(self.channels)
elif len(self.channels) == 1:
channels = str(self.channels[0])
else:
channels = ""
return "<%s %d[%s.%s] %s>" % (
self.__class__.__name__, self.code, channels, self.name,
" ".join(self.urls)
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.