repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
atloiaco/vivo-pump
examples/positions/make_enum.py
Python
bsd-3-clause
1,483
0.001349
#!/usr/bin/env/python """ make_enum.py -- make enumerations for positions """ __author__ = "Michael Conlon" __copyright__ = "Copyright 2015 (c) Michael Conlon" __license__ = "BSD 3-Clause license" __version__ = "0.1.1" from datetime import datetime from vivopump import get_parms, create_enum def main(): """ Generate the enums for positions """ print datetime.now(), "Start" parms = get_parms() # perso
n via Orcid query = """ SELECT (MIN (?xshort) AS ?short) ?vivo WHERE { ?vivo vivo:orcidId ?xs
hort . } GROUP BY ?vivo ORDER BY ?short """ create_enum("orcid_enum.txt", query, parms) # department via label query = """ SELECT (MIN (?xlabel) AS ?short) ?vivo WHERE { {?vivo a vivo:Department . } UNION {?vivo a vivo:Institute . } UNION {?vivo a vivo:School . } UNION {?vivo a vivo:Center .} UNION {?vivo a vivo:College . } ?vivo rdfs:label ?xlabel . } GROUP BY ?vivo ORDER BY ?short """ create_enum("dept_enum.txt", query, parms) # dates via datetime query = """ SELECT ?short ?vivo WHERE { ?vivo a vivo:DateTimeValue . # ?vivo vivo:dateTimePrecision vivo:yearMonthDayPrecision . ?vivo vivo:dateTime ?short . } ORDER BY ?short """ create_enum("date_enum.txt", query, parms, trim=10) print datetime.now(), "End" if __name__ == "__main__": main()
while519/SME
Tensor_exp.py
Python
bsd-3-clause
11,310
0.002918
#! /usr/bin/python from model import * # Utils ---------------------------------------------------------------------- def load_file(path): return scipy.sparse.csr_matrix(cPickle.load(open(path)), dtype=theano.config.floatX) def compute_prauc(pred, lab): pred = np.asarray(pred) lab = np.asarray(lab) order = np.argsort(pred) lab_ordered = lab[order] pred_ordered = pred[order] precision = {} recall = {} # All examples are classified 1 precision[np.min(pred_ordered) - 1.0] = (np.sum(lab_ordered) / float(len(lab))) recall[np.min(pred_ordered) - 1.0] = 1. for i in range(len(lab)): if len(lab) - i - 1 == 0: # No examples are classified 1 precision[pred_ordered[i]] = 1 else: precision[pred_ordered[i]] = (np.sum(lab_ordered[i + 1:]) / float(len(lab) - i - 1)) recall[pred_ordered[i]] = (np.sum(lab_ordered[i + 1:]) / float(np.sum(lab_ordered))) # Precision-Recall curve points points = [] for i in np.sort(precision.keys())[::-1]: points += [(float(recall[i]), float(precision[i]))] # Compute area auc = sum((y0 + y1) / 2. * (x1 - x0) for (x0, y0), (x1, y1) in zip(points[:-1], points[1:])) return auc class DD(dict): """This class is only used to replace a state variable of Jobman""" def __getattr__(self, attr): if attr == '__getstate__': return super(DD, self).__getstate__ elif attr == '__setstate__': return super(DD, self).__setstate__ elif attr == '__slots__': return super(DD, self).__slots__ return self[attr] def __setattr__(self, attr, value): assert attr not in ('__getstate__', '__setstate__', '__slots__') self[attr] = value def __str__(self): return 'DD%s' % dict(self) def __repr__(self): return str(self) def __deepcopy__(self, memo): z = DD() for k, kv in self.iteritems(): z[k] = copy.deepcopy(kv, memo) return z # ---------------------------------------------------------------------------- # Experiment function -------------------------------------------------------- def Tensorexp(state, channel): # Show experiment parameters print >> sys.stderr, state np.random.seed(state.seed) # Experiment folder if hasattr(channel, 'remote_path'): state.savepath = channel.remote_path + '/' elif hasattr(channel, 'path'): state.savepath = channel.path + '/' else: if not os.path.isdir(state.savepath): os.mkdir(state.savepath) # Positives trainl = load_file(state.datapath + state.dataset + '-train-pos-lhs-fold%s.pkl' % state.fold) trainr = load_file(state.datapath + state.dataset + '-train-pos-rhs-fold%s.pkl' % state.fold) traino = load_file(state.datapath + state.dataset + '-train-pos-rel-fold%s.pkl' % state.fold) if state.op == 'SE': traino = traino[-state.Nrel:, :] # Negatives trainln = load_file(state.datapath + state.dataset + '-train-neg-lhs-fold%s.pkl' % state.fold) trainrn = load_file(state.datapath + state.dataset + '-train-neg-rhs-fold%s.pkl' % state.fold) trainon = load_file(state.datapath + state.dataset + '-train-neg-rel-fold%s.pkl' % state.fold) if state.op == 'SE': trainon = trainon[-state.Nrel:, :] # Valid set validl = load_file(state.datapath + state.dataset + '-valid-lhs-fold%s.pkl' % state.fold) validr = load_file(state.datapath + state.dataset + '-valid-rhs-fold%s.pkl' % state.fold) valido = load_file(state.datapath + state.dataset + '-valid-rel-fold%s.pkl' % state.fold)
if state.op == 'SE': valido = valido[-state.Nrel:, :] outvalid = cPickle.load(open(state.datapath + '%s-valid-targets-fold%s.pkl' % (state.dataset, state.fold))) # Test set testl = load_file(state.datapath + state.dataset + '-test-lhs-fold%s.pkl' % state.fold) testr = load_file(state.datapath + state.dataset + '-test-rhs-fold%s.
pkl' % state.fold) testo = load_file(state.datapath + state.dataset + '-test-rel-fold%s.pkl' % state.fold) if state.op == 'SE': testo = testo[-state.Nrel:, :] outtest = cPickle.load(open(state.datapath + '%s-test-targets-fold%s.pkl' % (state.dataset, state.fold))) # Model declaration if not state.loadmodel: # operators if state.op == 'Unstructured': leftop = Unstructured() rightop = Unstructured() elif state.op == 'SME_lin': leftop = LayerLinear(np.random, 'lin', state.ndim, state.ndim, state.nhid, 'left') rightop = LayerLinear(np.random, 'lin', state.ndim, state.ndim, state.nhid, 'right') elif state.op == 'SME_bil': leftop = LayerBilinear(np.random, 'lin', state.ndim, state.ndim, state.nhid, 'left') rightop = LayerBilinear(np.random, 'lin', state.ndim, state.ndim, state.nhid, 'right') elif state.op == 'SE': leftop = LayerMat('lin', state.ndim, state.nhid) rightop = LayerMat('lin', state.ndim, state.nhid) # embeddings if not state.loademb: embeddings = Embeddings(np.random, state.Nent, state.ndim, 'emb') else: f = open(state.loademb) embeddings = cPickle.load(f) f.close() if state.op == 'SE' and type(embeddings) is not list: relationl = Embeddings(np.random, state.Nrel, state.ndim * state.nhid, 'rell') relationr = Embeddings(np.random, state.Nrel, state.ndim * state.nhid, 'relr') embeddings = [embeddings, relationl, relationr] simfn = eval(state.simfn + 'sim') else: f = open(state.loadmodel) embeddings = cPickle.load(f) leftop = cPickle.load(f) rightop = cPickle.load(f) simfn = cPickle.load(f) f.close() # Functions compilation trainfunc = TrainFn(simfn, embeddings, leftop, rightop, marge=state.marge) testfunc = SimFn(simfn, embeddings, leftop, rightop) out = [] outb = [] state.bestvalid = -1 batchsize = trainl.shape[1] / state.nbatches print >> sys.stderr, "BEGIN TRAINING" timeref = time.time() for epoch_count in xrange(1, state.totepochs + 1): # Shuffling order = np.random.permutation(trainl.shape[1]) trainl = trainl[:, order] trainr = trainr[:, order] traino = traino[:, order] order = np.random.permutation(trainln.shape[1]) trainln = trainln[:, order] trainrn = trainrn[:, order] trainon = trainon[:, order] for i in range(state.nbatches): tmpl = trainl[:, i * batchsize:(i + 1) * batchsize] tmpr = trainr[:, i * batchsize:(i + 1) * batchsize] tmpo = traino[:, i * batchsize:(i + 1) * batchsize] tmpln = trainln[:, i * batchsize:(i + 1) * batchsize] tmprn = trainrn[:, i * batchsize:(i + 1) * batchsize] tmpon = trainon[:, i * batchsize:(i + 1) * batchsize] # training iteration outtmp = trainfunc(state.lremb, state.lrparam / float(batchsize), tmpl, tmpr, tmpo, tmpln, tmprn, tmpon) out += [outtmp[0] / float(batchsize)] outb += [outtmp[1]] # embeddings normalization if type(embeddings) is list: embeddings[0].normalize() else: embeddings.normalize() if (epoch_count % state.test_all) == 0: # model evaluation print >> sys.stderr, "-- EPOCH %s (%s seconds per epoch):" % ( epoch_count, round(time.time() - timeref, 3) / float(state.test_all)) timeref = time.time() print >>
chenyujie/hybrid-murano
murano/packages/load_utils.py
Python
apache-2.0
3,350
0
# Copyright (c) 2014 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import shutil import sys import tempfile import zipfile import yaml from murano.engine import yaql_yaml_loader import murano.packages.application_package import murano.packages.exceptions as e import murano.packages.versions.hot_v1 import murano.packages.versions.mpl_v1 def load_from_file(archive_path, target_dir=None, drop_dir=False, loader=yaql_yaml_loader.YaqlYamlLoader): if not os.path.isfile(archive_path): raise e.PackageLoadError('Unable to find package file') created = False if not target_dir: target_dir = tempfile.mkdtemp() created = True elif not os.path.exists(target_dir): os.mkdir(target_dir) created = True else: if os.listdir(target_dir): raise e.PackageLoadError('Target directory is not empty') try: if not zipfile.is_zipfile(archive_path): raise e.PackageFormatError("Uploaded file {0} is not a " "zip archive".format(archive_path)) package = zipfile.ZipFile(archive_path) package.extractall(path=target_dir) return load_from_dir(target_dir, preload=True, loader=loader) finally: if drop_dir: if created: shutil.rmtree(target_dir) else: for f in os.listdir(target_dir): os.unlink(os.path.join(target_dir, f)) def load_from_dir(source_directory, filename='manifest.yaml', preload=False, loader=yaql_yaml_loader.YaqlYamlLoader): formats = { '1.0': murano.packages.versions.mpl_v1, 'MuranoPL/1.0': murano.packages.versions.mpl_v1, 'Heat.HOT/1.0': murano.packages.versions.hot_v1 } if not os.path.isdir(source_directory) or not os.path.exists( source_directory): raise e.PackageLoadError('Invalid package directory') full_path = os.path.join(source_directory, filename) if not os.path.isfile(full_path): raise e.PackageLoadError('Unable to find package manifest') try: with open(full_path) as stream: content = yaml.safe_load(stream) except Excep
tion as ex: trace = sys.exc_info()[2] raise e.PackageLoadError( "Unable to load due to '{0}'".format(str(ex))), None, trace if content: p_format = str(content.get('Format')) if not p_format or p_format not in formats: raise e.PackageFormatError( 'Unknown or missing format version') package = form
ats[p_format].create(source_directory, content, loader) formats[p_format].load(package, content) if preload: package.validate() return package
santisiri/popego
envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/protocols/amp.py
Python
bsd-3-clause
60,989
0.002246
# -*- test-case-name: twisted.test.test_amp -*- # Copyright 2005 Divmod, Inc. See LICENSE file for details """ This module implements AMP, the Asynchronous Messaging Protocol. AMP is a protocol for sending multiple asynchronous request/response pairs over the same connection. Requests and responses are both collections of key/value pairs. AMP is a very simple protocol which is not an application. This module is a "protocol construction kit" of sorts; it attempts to be the simplest wire-level implementation of Deferreds. AMP provides the following base-level features: - Asynchronous request/response handling (hence the name) - Requests and responses are both key/value pairs - Binary transfer of all data: all data is length-prefixed. Your application will never need to worry about quoting. - Command dispatching (like HTTP Verbs): the protocol is extensible, and multiple AMP sub-protocols can be grouped together easily. The protocol implementation also provides a few additional features which are not part of the core wire protocol, but are nevertheless very useful: - Tight TLS integration, with an included StartTLS command. - Handshaking to other protocols: because AMP has well-defined message boundaries and maintains all incoming and outgoing requests for you, you can start a connection over AMP and then switch to another protocol. This makes it ideal for firewall-traversal applications where you may have only one forwarded port but multiple applications that want to use it. Using AMP with Twisted is simple. Each message is a command, with a response. You begin by defining a command type. Commands specify their input and output in terms of the types that they expect to see in the request and response key-value pairs. Here's an example of a command that adds two integers, 'a' and 'b':: class Sum(amp.Command): arguments = [('a', amp.Integer()), ('b', amp.Integer())] response = [('total', amp.Integer())] Once you have specified a command, you need to make it part of a protocol, and define a responder for it. Here's a 'JustSum' protocol that includes a responder for our 'Sum' command:: class JustSum(amp.AMP): def sum(self, a, b): total = a + b print 'Did a sum: %d + %d = %d' % (a, b, total) return {'total': total} Sum.responder(sum) Later, when you want to actually do a sum, the following expression will return a Deferred whilch will fire with the result:: ClientCreator(reactor, amp.AMP).connectTCP(...).addCallback( lambda p: p.callRemote(Sum, a=13, b=81)).addCallback( lambda result: result['total']) You can also define the propogation of specific errors in AMP. For example, for the slightly more complicated case of division, we might have to deal with division by zero:: class Divide(amp.Command): arguments = [('numerator', amp.Integer()), ('denominator', amp.Integer())] response = [('result', amp.Float())] errors = {ZeroDivisionError: 'ZERO_DIVISION'} The 'errors' mapping here tells AMP that if a responder to Divide emits a L{ZeroDivisionError}, then the other side should be informed that an error of the type 'ZERO_DIVISION' has occurred. Writing a responder which takes advantage of this is very simple - just raise your exception normally:: class JustDivide(amp.AMP): def divide(self, numerator, denominator): result = numerator / denominator pr
int 'Divided: %d / %d = %d' % (numerator, denominator, total) return {'result': result} Divide.responder(divide) On the client side, the errors mapping will be used to
determine what the 'ZERO_DIVISION' error means, and translated into an asynchronous exception, which can be handled normally as any L{Deferred} would be:: def trapZero(result): result.trap(ZeroDivisionError) print "Divided by zero: returning INF" return 1e1000 ClientCreator(reactor, amp.AMP).connectTCP(...).addCallback( lambda p: p.callRemote(Divide, numerator=1234, denominator=0) ).addErrback(trapZero) For a complete, runnable example of both of these commands, see the files in the Twisted repository:: doc/core/examples/ampserver.py doc/core/examples/ampclient.py On the wire, AMP is a protocol which uses 2-byte lengths to prefix keys and values, and empty keys to separate messages:: <2-byte length><key><2-byte length><value> <2-byte length><key><2-byte length><value> ... <2-byte length><key><2-byte length><value> <NUL><NUL> # Empty Key == End of Message And so on. Because it's tedious to refer to lengths and NULs constantly, the documentation will refer to packets as if they were newline delimited, like so:: C: _command: sum C: _ask: ef639e5c892ccb54 C: a: 13 C: b: 81 S: _answer: ef639e5c892ccb54 S: total: 94 Notes: Values are limited to the maximum encodable size in a 16-bit length, 65535 bytes. Keys are limited to the maximum encodable size in a 8-bit length, 255 bytes. Note that we still use 2-byte lengths to encode keys. This small redundancy has several features: - If an implementation becomes confused and starts emitting corrupt data, or gets keys confused with values, many common errors will be signalled immediately instead of delivering obviously corrupt packets. - A single NUL will separate every key, and a double NUL separates messages. This provides some redundancy when debugging traffic dumps. - NULs will be present at regular intervals along the protocol, providing some padding for otherwise braindead C implementations of the protocol, so that <stdio.h> string functions will see the NUL and stop. - This makes it possible to run an AMP server on a port also used by a plain-text protocol, and easily distinguish between non-AMP clients (like web browsers) which issue non-NUL as the first byte, and AMP clients, which always issue NUL as the first byte. """ __metaclass__ = type import types from cStringIO import StringIO from struct import pack from twisted.python.reflect import accumulateClassDict from twisted.python.failure import Failure from twisted.python import log, filepath from twisted.internet.main import CONNECTION_LOST from twisted.internet.error import PeerVerifyError from twisted.internet.defer import Deferred, maybeDeferred, fail from twisted.protocols.basic import Int16StringReceiver, StatefulStringProtocol from twisted.internet._sslverify import problemsFromTransport # I'd like this to use the exposed public API, but for some reason, when it was # moved, these names were not exposed by internet.ssl. from twisted.internet.ssl import CertificateOptions, Certificate, DN, KeyPair ASK = '_ask' ANSWER = '_answer' COMMAND = '_command' ERROR = '_error' ERROR_CODE = '_error_code' ERROR_DESCRIPTION = '_error_description' UNKNOWN_ERROR_CODE = 'UNKNOWN' UNHANDLED_ERROR_CODE = 'UNHANDLED' MAX_KEY_LENGTH = 0xff MAX_VALUE_LENGTH = 0xffff class AmpError(Exception): """ Base class of all Amp-related exceptions. """ class ProtocolSwitched(Exception): """ Connections which have been switched to other protocols can no longer accept traffic at the AMP level. This is raised when you try to send it. """ class OnlyOneTLS(AmpError): """ This is an implementation limitation; TLS may only be started once per connection. """ class NoEmptyBoxes(AmpError): """ You can't have empty boxes on the connection. This is raised when you receive or attempt to send one. """ class InvalidSignature(AmpError): """ You didn't pass all the required arguments. """ class TooLong(AmpError): """ One of the protocol's length limitations was violated. @ivar isKey: true if the string being encoded in a key position, false if it was in a value position. @ivar isLocal: Was the string encod
tsdmgz/ansible
lib/ansible/utils/module_docs_fragments/dellos9.py
Python
gpl-3.0
2,591
0.003088
# # (c) 2015, Peter Sprygada <psprygada@ansible.com> # # Copyright (c) 2016 Dell Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. class ModuleDocFragment(object): # Standard files documentation fragment DOCUMENTATION = """ options: provider: description: - A dic
t object containing connection details. default: null suboptions: host: description: - Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport. required: true port: description: - Specifies t
he port to use when building the connection to the remote device. default: 22 username: description: - User to authenticate the SSH session to the remote device. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead. password: description: - Password to authenticate the SSH session to the remote device. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead. default: null ssh_keyfile: description: - Path to an ssh key used to authenticate the SSH session to the remote device. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead. timeout: description: - Specifies idle timeout (in seconds) for the connection. Useful if the console freezes before continuing. For example when saving configurations. default: 10 notes: - For more information on using Ansible to manage Dell EMC Network devices see U(https://www.ansible.com/ansible-dell-networking). """
straup/buildingequalsyes
bin/sqlite2solr.py
Python
bsd-2-clause
3,932
0.006358
#!/usr/bin/env python import pysolr import os.path import sqlite3 import sys import json import Geohash import re import geojson import shapely.wkt from shapely.geometry import Polygon from shapely.geometry import LineString solr = pysolr.Solr('http://localhost:9999/solr/buildings') solr.delete(q='*:*') dbconn = sqlite3.connect('buildings.osm.db') dbcurs = dbconn.cursor() last_woeid = 2147483647 uid = last_woeid count = 0 offset = 0 limit = 10000 counter = 0 docs = [] sql = "SELECT COUNT(id) FROM ways" dbcurs.execute(sql) row = dbcurs.fetchone() count = row[0] while offset < count : sql = "SELECT * FROM ways
LIMIT %s, %s" % (offset, limit) print "%s (%s)" % (sql, count) dbcurs.execute(sql) for row in dbcurs.fetchall(): counter += 1 uid = uid + 1 way_
id, lat, lon, woeid, nodes, tags = row if not lat or not lon: continue if float(lat) < -90. or float(lat) > 90.: continue if float(lon) < -180. or float(lon) > 180.: continue if not woeid: woeid = 0 nodes = nodes.split(',') points = [] poly = None center = None alltags = {} name = None tags = json.loads(tags) if tags.get('name', False): name = tags['name'] for node_id in nodes: dbcurs.execute("SELECT * FROM nodes WHERE id=?", (node_id, )) node = dbcurs.fetchone() points.append((node[2], node[1])) try: _tags = json.loads(node[3]) for k,v in _tags.items(): alltags[k] = v except Exception, e: pass # TO DO: fix me (define line) if len(points) == 2: line = LineString(points) poly = line.centroid center = line.centroid else : points.append(points[0]) poly = Polygon(points) center = poly.centroid # TO DO : trim decimal coordinates if poly: # poly = shapely.wkt.dumps(poly) poly = geojson.dumps(poly) if center : lat = center.y lon = center.x # tags for k,v in tags.items(): alltags[k] = v if alltags.get('building') and alltags['building'] == 'yes': del(alltags['building']) _alltags = [] for k,v in alltags.items(): tmp = k.split(":") v = unicode(v) v = re.sub("8", "88", v) v = re.sub("/", "8s", v) v = re.sub(":", "8c", v) tmp.append(v) _alltags.append("/".join(map(unicode, tmp))) alltags = _alltags # go! lat = float("%.6f" % lat) lon = float("%.6f" % lon) # def stupid_floating_points(m): return m.group(1) poly = re.sub(r'(\.\d{6})\d+', stupid_floating_points, poly) # doc = { 'id' : uid, 'parent_woeid' : woeid, 'way_id' : way_id, 'nodes' : nodes, 'centroid' : "%s,%s" % (lat,lon), } if poly != None : doc['polygon'] = poly if len(alltags): doc['tags'] = alltags if name != None: doc['name'] = name for k,v in doc.items(): if v == None or v == '': print "WTF %s : %s" % (k, v) sys.exit() print "[%s] add doc" % counter docs.append(doc) # if doc.get('tags'): # print doc['tags'] try: solr.add(docs) except Exception, e: fh = open('add.json', 'w') fh.write(json.dumps(docs, indent=2)) fh.close() raise Exception, e docs = [] offset += limit if len(docs): solr.add(docs)
pwyliu/cloud-init-0.6.3
cloudinit/CloudConfig/cc_rsyslog.py
Python
gpl-3.0
3,237
0.000618
# vi: ts=4 expandtab syntax=python # # Copyright (C) 2009-2010 Canonical Ltd. # Copyright (C) 2012 Hewlett-Packard Development Company, L.P. # # Author: Scott Moser <scott.moser@canonical.com> # Author: Juerg Haefliger <juerg.haefliger@hp.com> # # This program is free softw
are: you can redistribute it and/or modify # it under the terms
of the GNU General Public License version 3, as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import cloudinit import logging import cloudinit.util as util import traceback DEF_FILENAME = "20-cloud-config.conf" DEF_DIR = "/etc/rsyslog.d" def handle(_name, cfg, _cloud, log, _args): # rsyslog: # - "*.* @@192.158.1.1" # - content: "*.* @@192.0.2.1:10514" # - filename: 01-examplecom.conf # content: | # *.* @@syslogd.example.com # process 'rsyslog' if not 'rsyslog' in cfg: return def_dir = cfg.get('rsyslog_dir', DEF_DIR) def_fname = cfg.get('rsyslog_filename', DEF_FILENAME) files = [] elst = [] for ent in cfg['rsyslog']: if isinstance(ent, dict): if not "content" in ent: elst.append((ent, "no 'content' entry")) continue content = ent['content'] filename = ent.get("filename", def_fname) else: content = ent filename = def_fname if not filename.startswith("/"): filename = "%s/%s" % (def_dir, filename) omode = "ab" # truncate filename first time you see it if filename not in files: omode = "wb" files.append(filename) try: util.write_file(filename, content + "\n", omode=omode) except Exception as e: log.debug(traceback.format_exc(e)) elst.append((content, "failed to write to %s" % filename)) # need to restart syslogd restarted = False try: # if this config module is running at cloud-init time # (before rsyslog is running) we don't actually have to # restart syslog. # # upstart actually does what we want here, in that it doesn't # start a service that wasn't running already on 'restart' # it will also return failure on the attempt, so 'restarted' # won't get set log.debug("restarting rsyslog") util.subp(['service', 'rsyslog', 'restart']) restarted = True except Exception as e: elst.append(("restart", str(e))) if restarted: # this only needs to run if we *actually* restarted # syslog above. cloudinit.logging_set_from_cfg_file() log = logging.getLogger() log.debug("rsyslog configured %s" % files) for e in elst: log.warn("rsyslog error: %s\n" % ':'.join(e)) return
anupam-mitra/PySpikeSort
spikesort/features/spectral.py
Python
gpl-3.0
1,567
0.008296
#!/usr/bin/env python # -*- coding: utf-8 -*- # # ${FILENAME} # # Copyright 2015 Anupam Mitra <anupam.mitra@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed
in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth
Floor, Boston, # MA 02110-1301, USA. # # import numpy as np import pywt def wavelet_decomp (s, wavelet='haar', levels=4): """ Wavelet decomposition based features Parameters ---------- s: Signal segments from which to compute first difference with lag. The shape should be (n_signals, n_samples) wavelet: str Wavelet basis to use for decomposition levels: int Level of wavelet decomposition """ n_features = np.shape(s)[1] n_spikes = np.shape(s)[0] features = np.empty((n_spikes, n_features)) for j in range(n_spikes): wd = pywt.wavedec(s[j, :], wavelet=wavelet, level=levels) features[j,:] = np.hstack(wd) return features
liqd/a4-meinberlin
tests/dashboard/test_dashboard_views_module_delete.py
Python
agpl-3.0
3,903
0
import pytest from django.urls import reverse from adhocracy4.modules.models import Module from adhocracy4.test.helpers import redirect_target @pytest.mark.django_db def test_module_delete_perms(client, phase, user, user2): module = phase.module module_delete_url = reverse('a4dashboard:module-delete', kwargs={ 'slug': module.slug}) response = client.post(module_delete_url) assert redirect_target(response) == 'account_login' client.login(username=user, password='password') response = client.post(module_delete_url) assert response.status_code == 403 organisation = module.project.organisation organisation.initiators.add(user2) client.login(username=user2, password='password') response = client.post(module_delete_url) assert redirect_target(response) == 'project-edit' @pytest.mark.django_db def test_module_delete(client, phase, user2): module = phase.module module.is_draft = False module.save() organisation = module.project.organisation organisation.initiators.add(user2) assert Module.objects.all().count() == 1 module_delete_url = reverse('a4dashboard:module-delete', kwargs={ 'slug': module.slug}) # deletin
g published modules has no effect client.login(username=user2, password='password') response = client.post(m
odule_delete_url) assert response.status_code == 302 assert Module.objects.all().count() == 1 # unpublish module module.is_draft = True module.save() client.login(username=user2, password='password') response = client.post(module_delete_url) assert redirect_target(response) == 'project-edit' assert Module.objects.all().count() == 0 @pytest.mark.django_db def test_module_delete_redirect(client, module_factory, user2): module = module_factory(is_draft=True) organisation = module.project.organisation organisation.initiators.add(user2) module_2 = module_factory(project=module.project, is_draft=True) module_3 = module_factory(project=module.project, is_draft=True) module_delete_url = reverse('a4dashboard:module-delete', kwargs={ 'slug': module.slug}) module_delete_url_2 = reverse('a4dashboard:module-delete', kwargs={ 'slug': module_2.slug}) module_delete_url_3 = reverse('a4dashboard:module-delete', kwargs={ 'slug': module_3.slug}) client.login(username=user2, password='password') referrer = reverse('a4dashboard:dashboard-information-edit', kwargs={ 'project_slug': module.project.slug}) response = client.post(module_delete_url, {'referrer': referrer}) assert response.status_code == 302 assert response['location'] == referrer response = client.post(module_delete_url_2, {}, HTTP_REFERER=referrer) assert response.status_code == 302 assert response['location'] == referrer response = client.post(module_delete_url_3, {}) assert redirect_target(response) == 'project-edit' @pytest.mark.django_db def test_module_unsuccessful_delete_redirect(client, module_factory, user2): module = module_factory(is_draft=False) organisation = module.project.organisation organisation.initiators.add(user2) module_delete_url = reverse('a4dashboard:module-delete', kwargs={ 'slug': module.slug}) client.login(username=user2, password='password') referrer = reverse('a4dashboard:dashboard-information-edit', kwargs={ 'project_slug': module.project.slug}) response = client.post(module_delete_url, {'referrer': referrer}) assert response.status_code == 302 assert response['location'] == referrer response = client.post(module_delete_url, {}, HTTP_REFERER=referrer) assert response.status_code == 302 assert response['location'] == referrer response = client.post(module_delete_url, {}) assert redirect_target(response) == 'project-edit'
ternaris/marv-robotics
code/marv/marv_node/testing/_robotics_tests/test_section_topics.py
Python
agpl-3.0
960
0.002083
# Copyright 2016 - 2018 Ternaris. # SPDX-License-Identifier: AGPL-3.0-only from pkg_resources import resource_filename import marv_node.testing fro
m marv_node.testing import make_dataset, run_nodes, temporary_directory from marv_robotics.detail import connections_section as node from marv_store import Store class TestCase(marv_node.testing.TestCase): # TODO: Generate bags instead, but with connection info! BAGS = [ resource_filename('marv_node.testing._robotics_tests', 'data/test_0.bag'), resource_filename('marv_node.testing._robotics_tests', 'data/test_1.bag'), ] async d
ef test_node(self): with temporary_directory() as storedir: store = Store(storedir, {}) dataset = make_dataset(self.BAGS) store.add_dataset(dataset) streams = await run_nodes(dataset, [node], store) self.assertNodeOutput(streams[0], node) # TODO: test also header
tvansteenburgh/PerfKitBenchmarker
tests/sample_test.py
Python
apache-2.0
1,098
0.001821
# Copyright 2014
Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except
in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from perfkitbenchmarker import sample class SampleTestCase(unittest.TestCase): def testMetadataOptional(self): instance = sample.Sample(metric='Test', value=1.0, unit='Mbps') self.assertDictEqual({}, instance.metadata) def testProvidedMetadataSet(self): metadata = {'origin': 'unit test'} instance = sample.Sample(metric='Test', value=1.0, unit='Mbps', metadata=metadata.copy()) self.assertDictEqual(metadata, instance.metadata)
alessio/laditools
laditools/jack.py
Python
gpl-3.0
8,258
0.013684
#!/usr/bin/python # LADITools - Linux Audio Desktop Integration Tools # Copyright (C) 2011-2012 Alessio Treglia <quadrispro@ubuntu.com> # Copyright (C) 2007-2010, Marc-Olivier Barre <marco@marcochapeau.org> # Copyright (C) 2007-2009, Nedko Arnaudov <nedko@arnaudov.name> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys import dbus from .controller import LadiController name_base = 'org.jackaudio' ctrl_iface_name = name_base + '.JackControl' conf_iface_name = name_base + '.Configure' service_name = name_base + '.service' obj_path = '/org/jackaudio/Controller' def _dbus_type_to_python_type (dbus_value): if type (dbus_value) == dbus.Boolean: return bool(dbus_value) if type (dbus_value) == dbus.Int32 or type (dbus_value) == dbus.UInt32: return int(dbus_value) if type (dbus_value) == dbus.String: return str(dbus_value) if type (dbus_value) == dbus.Byte: return str (dbus_value) return dbus_value class JackController(LadiController): """Wrapper for controlling and monitoring JACK. This class provides an (almost) complete control on configured JACK servers. """ def __init__ (self): LadiController.__init__(self, dbus_type='SessionBus', service_name=service_name, obj_path=obj_path, iface_name=ctrl_iface_name) def is_started (self): return self.controller_iface.IsStarted () def name_owner_changed (name = None, old_owner = None, new_owner = None): sys.stderr.write("Name changed : %r\n" % name) sys.stderr.flush() def is_realtime (self): return self.controller_iface.IsRealtime () def get_load (self): return self.controller_iface.GetLoad () def get_xruns (self): return self.controller_iface.GetXruns () def get_sample_rate (self): return self.controller_iface.GetSampleRate () def get_latency (self): return self.controller_iface.GetLatency () def reset_xruns (self): return self.controller_iface.ResetXruns () def start (self): self.controller_iface.StartServer () def stop (self): self.controller_iface.StopServer () def kill (self): self.controller_iface.Exit () class JackConfigParameter(object): """Wrapper for JACK's parameters. This class provides an (almost) complete control to JACK's configuration parameters. """ def __init__(self, jack, path): self._jack = jack self.path = path self.name = path[-1:] def get_name(self): return self.name def get_type(self): return self._jack.get_param_type(self.path) def get_value(self): return self._jack.get_param_value(self.path) def set_value(self, value): self._jack.set_param_value(self.path, value) def reset_value(self): self._jack.reset_param_value(self.path) def get_short_description(self): return self._jack.get_param_short_description(self.path) def get_long_description(self): descr = self._jack.get_param_long_description(self.path) if not descr: descr = self.get_short_description() return descr def has_range(self): return self._jack.param_has_range(self.path) def get_range(self): return self._jack.param_get_range(self.path) def has_enum(self): return self._jack.param_has_enum(self.path) def is_strict_enum(self): return self._jack.param_is_strict_enum(self.path) def is_fake_values_enum(self): return self._jack.param_is_fake_value(self.path) def get_enum_values(self): return self._jack.param_get_enum_values(self.path) class JackConfigProxy(LadiController): """Wrapper for JACK's configuration. This controller provides access to the JACK's whole configuration. """ def __init__ (self): LadiController.__init__(self, dbus_type='SessionBus', service_name=service_name, obj_path=obj_path, iface_name=conf_iface_name) def name_owner_changed (name = None, old_owner = None, new_owner = None): print "Name changed : %r" % name def get_selected_driver (self): isset, default, value = self.controller_iface.GetParameterValue (['engine', 'driver']) return value def read_container (self, path): is_leaf, children = self.controller_iface.ReadContainer (path) if is_leaf: return [] return children def get_param_names (self, path): is_leaf, children = self.controller_iface.ReadContainer (path) if not is_leaf: return [] return children def get_param_short_description (self, path): type_char, name, short_descr, long_descr = self.controller_iface.GetParameterInfo (path) return short_descr def get_param_long_description (self, path): type_char, name, short_descr, long_descr = self.controller_iface.GetParameterInfo (path) return long_descr def get_param_type (self, path): type_char, name, short_descr, long_descr = self.controller_iface.GetParameterInfo (path) return str (type_char) def get_param_value (self, path): isset, default, value = self.controller_iface.GetParameterValue (path) isset = bool (isset) default = _dbus_type_to_python_type (default) value = _dbus_type_to_python_type (value) return isset, default, value def set_param_value (self, path, value): typestr = self.get_param_type (path) if typestr == "b": value = dbus.Boolean (value) elif typestr == "y": value = dbus.Byte (value) elif typestr == "i": value = dbus.Int32 (value) elif typestr == "u": value = dbus.UInt32 (value) self.controller_iface.SetParameterValue (path, value) def reset_param_value (self, path): self.controller_iface.ResetParameterValue (path) def param_has_range (self, path): is_range, is_strict, is_fake_value, values = self.controller_iface.GetParameterConstraint (path) return bool (is_range) def param_get_range (self, path): is_range, is_strict, is_fake_value, values = self.controller_iface.GetParameterConstraint (path) if not is_range or len (values) != 2: return -1, -1
return _dbus_type_to_python_type (values[0][0]), _dbus_type_to_python_type (values[1][0]) def param_has_enum (self, path): is_range, is_strict, is_fake_value, values = self.controller_iface.GetParameterConstraint (path) return not is_range and len (values) != 0 def param_is_strict_enum (self, path): is_range, is_strict, is_fake_value, values = self.control
ler_iface.GetParameterConstraint (path) return is_strict def param_is_fake_value (self, path): is_range, is_strict, is_fake_value, values = self.controller_iface.GetParameterConstraint (path) return is_fake_value def param_get_enum_values (self, path): is_range, is_strict, is_fake_value, dbus_values = self.controller_iface.GetParameterConstraint (path) values = [] if not is_range and len (dbus_values) != 0: for dbus_value in dbus_values: values.append ([_dbus_type_to_python_type (dbus
openzim/zimfarm
dispatcher/backend/supervisor-listener.py
Python
gpl-3.0
1,917
0.000522
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: ai ts=4 sts=4 et sw=4 nu """ supervisor event listenner for generic script launching launches a script (passed as $1+) and communicates with supervisor """ import sys import pathlib import datetime import subprocess def to_supervisor(text): # only eventlistener protocol messages may be sent to stdout sys.stdout.write(text) sys.stdout.flush() def to_log(text): sys.stderr.write(text) sys.stderr.flush() def main(interval, command, args=[]): last_run = None while True: # transition from ACKNOWLEDGED to READY to_supervisor("READY\n") # read header line and print it to stderr line = sys.stdin.readline() # re
ad event payload and print it to stderr
headers = dict([x.split(":") for x in line.split()]) sys.stdin.read(int(headers["len"])) now = datetime.datetime.now() if last_run is None or last_run <= now - datetime.timedelta(seconds=interval): last_run = now script = subprocess.run( [command] + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, ) to_log(script.stdout) # if script.returncode != 0: # to_supervisor("RESULT 4\nFAIL") # transition from READY to ACKNOWLEDGED to_supervisor("RESULT 2\nOK") if __name__ == "__main__": if len(sys.argv) < 3: to_log("missing interval and/or script, crashing") sys.exit(1) try: interval = int(sys.argv[1]) except Exception: to_log(f"incorrect interval `{sys.argv[1]}, crashing") sys.exit(1) args = sys.argv[2:] if not pathlib.Path(args[0]).exists(): to_log("script path `{cmd}` doesnt exists. crashing") sys.exit(1) main(interval, args[0], args[1:])
olivierdalang/stdm
third_party/sqlalchemy/engine/interfaces.py
Python
gpl-2.0
31,317
0.000032
# engine/interfaces.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Define core interfaces used by the engine system.""" from .. import util, event # backwards compat from ..sql.compiler import Compiled, TypeCompiler class Dialect(object): """Define the behavior of a specific database and DB-API combination. Any aspect of metadata definition, SQL query generation, execution, result-set handling, or anything else which varies between databases is defined under the general category of the Dialect. The Dialect acts as a factory for other database-specific object implementations including ExecutionContext,
Compiled, D
efaultGenerator, and TypeEngine. All Dialects implement the following attributes: name identifying name for the dialect from a DBAPI-neutral point of view (i.e. 'sqlite') driver identifying name for the dialect's DBAPI positional True if the paramstyle for this Dialect is positional. paramstyle the paramstyle to be used (some DB-APIs support multiple paramstyles). convert_unicode True if Unicode conversion should be applied to all ``str`` types. encoding type of encoding to use for unicode, usually defaults to 'utf-8'. statement_compiler a :class:`.Compiled` class used to compile SQL statements ddl_compiler a :class:`.Compiled` class used to compile DDL statements server_version_info a tuple containing a version number for the DB backend in use. This value is only available for supporting dialects, and is typically populated during the initial connection to the database. default_schema_name the name of the default schema. This value is only available for supporting dialects, and is typically populated during the initial connection to the database. execution_ctx_cls a :class:`.ExecutionContext` class used to handle statement execution execute_sequence_format either the 'tuple' or 'list' type, depending on what cursor.execute() accepts for the second argument (they vary). preparer a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to quote identifiers. supports_alter ``True`` if the database supports ``ALTER TABLE``. max_identifier_length The maximum length of identifier names. supports_unicode_statements Indicate whether the DB-API can receive SQL statements as Python unicode strings supports_unicode_binds Indicate whether the DB-API can receive string bind parameters as Python unicode strings supports_sane_rowcount Indicate whether the dialect properly implements rowcount for ``UPDATE`` and ``DELETE`` statements. supports_sane_multi_rowcount Indicate whether the dialect properly implements rowcount for ``UPDATE`` and ``DELETE`` statements when executed via executemany. preexecute_autoincrement_sequences True if 'implicit' primary key functions must be executed separately in order to get their value. This is currently oriented towards Postgresql. implicit_returning use RETURNING or equivalent during INSERT execution in order to load newly generated primary keys and other column defaults in one execution, which are then available via inserted_primary_key. If an insert statement has returning() specified explicitly, the "implicit" functionality is not used and inserted_primary_key will not be available. dbapi_type_map A mapping of DB-API type objects present in this Dialect's DB-API implementation mapped to TypeEngine implementations used by the dialect. This is used to apply types to result sets based on the DB-API types present in cursor.description; it only takes effect for result sets against textual statements where no explicit typemap was present. colspecs A dictionary of TypeEngine classes from sqlalchemy.types mapped to subclasses that are specific to the dialect class. This dictionary is class-level only and is not accessed from the dialect instance itself. supports_default_values Indicates if the construct ``INSERT INTO tablename DEFAULT VALUES`` is supported supports_sequences Indicates if the dialect supports CREATE SEQUENCE or similar. sequences_optional If True, indicates if the "optional" flag on the Sequence() construct should signal to not generate a CREATE SEQUENCE. Applies only to dialects that support sequences. Currently used only to allow Postgresql SERIAL to be used on a column that specifies Sequence() for usage on other backends. supports_native_enum Indicates if the dialect supports a native ENUM construct. This will prevent types.Enum from generating a CHECK constraint when that type is used. supports_native_boolean Indicates if the dialect supports a native boolean construct. This will prevent types.Boolean from generating a CHECK constraint when that type is used. """ _has_events = False def create_connect_args(self, url): """Build DB-API compatible connection arguments. Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple consisting of a `*args`/`**kwargs` suitable to send directly to the dbapi's connect function. """ raise NotImplementedError() @classmethod def type_descriptor(cls, typeobj): """Transform a generic type to a dialect-specific type. Dialect classes will usually use the :func:`.types.adapt_type` function in the types module to accomplish this. The returned result is cached *per dialect class* so can contain no dialect-instance state. """ raise NotImplementedError() def initialize(self, connection): """Called during strategized creation of the dialect with a connection. Allows dialects to configure options based on server version info or other properties. The connection passed here is a SQLAlchemy Connection object, with full capabilities. The initialize() method of the base dialect should be called via super(). """ pass def reflecttable( self, connection, table, include_columns, exclude_columns): """Load table description from the database. Given a :class:`.Connection` and a :class:`~sqlalchemy.schema.Table` object, reflect its columns and properties from the database. The implementation of this method is provided by :meth:`.DefaultDialect.reflecttable`, which makes use of :class:`.Inspector` to retrieve column information. Dialects should **not** seek to implement this method, and should instead implement individual schema inspection operations such as :meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`, etc. """ raise NotImplementedError() def get_columns(self, connection, table_name, schema=None, **kw): """Return information about columns in `table_name`. Given a :class:`.Connection`, a string `table_name`, and an optional string `schema`, return column information as a list of dictionaries with these keys: name the column's name type [sqlalchemy.types#TypeEngine] nullable boolean default the column's default value autoincrement boolean sequence
grahamhayes/pdns
regression-tests.dnsdist/test_Trailing.py
Python
gpl-2.0
2,718
0.004047
#!/usr/bin/env python import threading import dns from dnsdisttests import DNSDistTest class TestTrailing(DNSDistTest): # this test suite uses a different responder port # because, contrary to the other ones, its # responders allow trailing data and we don't want # to mix things up. _testServerPort = 5360 _config_template = """
newServer{address="127.0.0.1:%s"} addAction(AndRule({QTypeRule(dnsdist.AAAA), TrailingDataRule()}), DropAction()) """
@classmethod def startResponders(cls): print("Launching responders..") cls._UDPResponder = threading.Thread(name='UDP Responder', target=cls.UDPResponder, args=[cls._testServerPort, True]) cls._UDPResponder.setDaemon(True) cls._UDPResponder.start() cls._TCPResponder = threading.Thread(name='TCP Responder', target=cls.TCPResponder, args=[cls._testServerPort, True]) cls._TCPResponder.setDaemon(True) cls._TCPResponder.start() def testTrailingAllowed(self): """ Trailing: Allowed """ name = 'allowed.trailing.tests.powerdns.com.' query = dns.message.make_query(name, 'A', 'IN') response = dns.message.make_response(query) rrset = dns.rrset.from_text(name, 3600, dns.rdataclass.IN, dns.rdatatype.A, '127.0.0.1') response.answer.append(rrset) raw = query.to_wire() raw = raw + 'A'* 20 (receivedQuery, receivedResponse) = self.sendUDPQuery(raw, response, rawQuery=True) self.assertTrue(receivedQuery) self.assertTrue(receivedResponse) receivedQuery.id = query.id self.assertEquals(query, receivedQuery) self.assertEquals(response, receivedResponse) (receivedQuery, receivedResponse) = self.sendTCPQuery(raw, response, rawQuery=True) self.assertTrue(receivedQuery) self.assertTrue(receivedResponse) receivedQuery.id = query.id self.assertEquals(query, receivedQuery) self.assertEquals(response, receivedResponse) def testTrailingDropped(self): """ Trailing: dropped """ name = 'dropped.trailing.tests.powerdns.com.' query = dns.message.make_query(name, 'AAAA', 'IN') raw = query.to_wire() raw = raw + 'A'* 20 (_, receivedResponse) = self.sendUDPQuery(raw, response=None, rawQuery=True) self.assertEquals(receivedResponse, None) (_, receivedResponse) = self.sendTCPQuery(raw, response=None, rawQuery=True) self.assertEquals(receivedResponse, None)
Hybrid-Cloud/conveyor
conveyor/conveyorheat/engine/resources/aws/cfn/wait_condition_handle.py
Python
apache-2.0
2,385
0
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from conveyor.conveyorheat.engine.resources import signal_responder from conveyor.conveyorheat.engine.resources import wait_condition as wc_base from conveyor.conveyorheat.engine import support class WaitConditionHandle(wc_base.BaseWaitConditionHandle): """AWS WaitConditionHandle resource. the main point of this class is to : have no dependencies (so the instance can reference it) generate a unique url (to be returned in the reference) then the cfn-signal will use this url to post to and WaitCondition will poll it to see if has been written to. """ support_s
tatus = support.SupportStatus(version='2014.1') METADATA_KEYS = (
DATA, REASON, STATUS, UNIQUE_ID ) = ( 'Data', 'Reason', 'Status', 'UniqueId' ) def get_reference_id(self): if self.resource_id: wc = signal_responder.WAITCONDITION return six.text_type(self._get_ec2_signed_url(signal_type=wc)) else: return six.text_type(self.name) def metadata_update(self, new_metadata=None): """DEPRECATED. Should use handle_signal instead.""" self.handle_signal(details=new_metadata) def handle_signal(self, details=None): """Validate and update the resource metadata. metadata must use the following format: { "Status" : "Status (must be SUCCESS or FAILURE)", "UniqueId" : "Some ID, should be unique for Count>1", "Data" : "Arbitrary Data", "Reason" : "Reason String" } """ if details is None: return return super(WaitConditionHandle, self).handle_signal(details) def resource_mapping(): return { 'AWS::CloudFormation::WaitConditionHandle': WaitConditionHandle, }
TiddlySpace/tiddlyspace
tiddlywebplugins/tiddlyspace/betaserialization.py
Python
bsd-3-clause
3,007
0.000998
""" extend TiddlyWiki serialization to optionally use beta or externalized releases and add the UniversalBackstage. activated via "twrelease=beta" URL parameter or ServerSettings, see build_config_var """ import logging from tiddlyweb.util import read_utf8_file from tiddlywebwiki.serialization import Serialization as WikiSerialization from tiddlywebplugins.tiddlyspace.web import (determine_host, determine_space, determine_space_recipe) LOGGER = logging.getLogger(__name__) def build_config_var(beta=False, external=False): """ Create the configuration key which will be used to locate the base tiddlywiki file. """ base = 'base_tiddlywiki' if external: base += '_external' if beta: base += '_beta' return base class Serialization(WikiSerialization): """ Subclass of the standard TiddlyWiki serialization to allow choosing beta or externalized versions of the base empty.html in which the tiddlers will be servered. Also, if the TiddlyWiki is not being downloaded, add the UniversalBackstage by injecting a script tag. """ def list_tiddlers(self, tiddlers): """ Override tiddlers.link so the location in noscript is to /tiddlers. """ http_host, _ = determine_host(self.environ) space_name = determine_space(self.environ, http_host) if space_name: recipe_name = determine_space_recipe(self.environ, space_name) if '/recipes/%s' % recipe_
name in tiddlers.link: tiddlers.link = '/tiddlers' return WikiSerialization.list_tiddlers(self, tiddlers) def _get_wiki(self): beta = external = False release = self.environ.get('tiddlyweb.query', {}).get( 'twrelease', [False])[0] externali
ze = self.environ.get('tiddlyweb.query', {}).get( 'external', [False])[0] download = self.environ.get('tiddlyweb.query', {}).get( 'download', [False])[0] if release == 'beta': beta = True if externalize: external = True # If somebody is downloading, don't allow them to # externalize. if download: external = False wiki = None if beta or external: config_var = build_config_var(beta, external) LOGGER.debug('looking for %s', config_var) base_wiki_file = self.environ.get('tiddlyweb.config', {}).get(config_var, '') if base_wiki_file: LOGGER.debug('using %s as base_tiddlywiki', base_wiki_file) wiki = read_utf8_file(base_wiki_file) if not wiki: wiki = WikiSerialization._get_wiki(self) tag = "<!--POST-SCRIPT-START-->" if not download: wiki = wiki.replace(tag, '<script type="text/javascript" ' 'src="/bags/common/tiddlers/backstage.js"></script> %s' % tag) return wiki
JMill/edX-Learning-From-Data-Solutions-jm
Homework_6/Python/hw6_by_kirbs.py
Python
apache-2.0
3,869
0.010856
#------------------------------------------------------------------------------- # Name: hom
ework 6 # Author: kirbs # Created: 11/9/2013 #--------------------------------------------------------------------
----------- #!/usr/bin/env python import urllib import numpy # ################################################### # ##################Question 2-6 Helpers ############# # ################################################### def in_dta(): fpin = urllib.urlopen("http://work.caltech.edu/data/in.dta") return ([map(float,(line.strip('\n').split('\r')[0].split())) for line in fpin]) def out_dta(): fpin = urllib.urlopen("http://work.caltech.edu/data/out.dta") return ([map(float,(line.strip('\n').split('\r')[0].split())) for line in fpin]) def transform(point): return [1, point[0], point[1], point[0]**2, point[1]**2, point[0]* point[1],abs(point[0] - point[1]), abs(point[0] + point[1]), point[2]] def transformPoints(points): transformedPoints = [] for point in points: transformedPoints.append(transform(point)) return transformedPoints """ Calculate weights using linear regression. Return list of weights. """ def linearRegression(samplePoints): X = [] y = [] y_location = len(samplePoints[0]) -1 # y's location is assumed to be the last element in the list # Construct X space and split y values out for point in samplePoints: X.append(numpy.array(point[:y_location])) y.append(point[y_location]) X = numpy.array(X) y = numpy.array(y) X_inverse = numpy.linalg.pinv(X) return numpy.dot(X_inverse, y) def linRegWithRegularization(samplePoints, l): X = [] y = [] y_location = len(samplePoints[0]) -1 # y's location is assumed to be the last element in the list for point in samplePoints: X.append(numpy.array(point[:y_location])) y.append(point[y_location]) weights = linearRegression(samplePoints) X = numpy.array(X) X_inverse = numpy.linalg.pinv(X + numpy.array(l/len(samplePoints)*numpy.dot(weights, weights))) return numpy.dot(X_inverse, y) """ Returns E_in error percentage for given weights and sample points. Assumes samplePoints is a list of lists, and the last element in given list is the y value. """ def Ein(weights, samplePoints): errorCount = 0 y_location = len(samplePoints[0]) - 1 for point in samplePoints: if numpy.sign(numpy.dot(weights,point[:y_location])) != point[y_location]: errorCount += 1 return errorCount/float(len(samplePoints)) # ################################################################## """ Print in and out of sample errors. """ def q2(): points = in_dta() testPoints = out_dta() transformedPoints = transformPoints(points) transformedTestPoints = transformPoints(testPoints) weights = linearRegression(transformedPoints) print "E_in: {}, E_out: {}".format(Ein(weights, transformedPoints), Ein(weights, transformedTestPoints)) def q3(l): points = in_dta() testPoints = out_dta() transformedPoints = transformPoints(points) transformedTestPoints = transformPoints(testPoints) weights = linRegWithRegularization(transformedPoints, l) print "E_in: {}, E_out: {}".format(Ein(weights, transformedPoints), Ein(weights, transformedTestPoints)) # Question 3 #q3(10**-3) # Question 4 #q3(10**3) # Question 5 def q5(start, end): points = in_dta() testPoints = out_dta() transformedPoints = transformPoints(points) transformedTestPoints = transformPoints(testPoints) smallest_k = -2 for i in range(start, end + 1): e_out = Ein(linRegWithRegularization(transformedPoints, 10**i), transformedTestPoints) print "k={}, E_out={}".format(i, e_out) # Question 5 #q5(-2, 2) # Question 6 #q5(-20, 20)
kwilliams-mo/iris
lib/iris/tests/test_grib_save.py
Python
gpl-3.0
10,295
0.002817
# (C) British Crown Copyright 2010 - 2013, Met Office # # This file is part of Iris. # # Iris is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Iris is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Iris. If not, see <http://www.gnu.org/licenses/>. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests import os import warnings import datetime import gribapi import numpy as np import iris import iris.cube import iris.coord_systems import iris.coords @iris.tests.skip_data class TestLoadSave(tests.IrisTest): # load and save grib def setUp(self): iris.fileformats.grib.hindcast_workaround = True def tearDown(self): iris.fileformats.grib.hindcast_workaround = False def save_and_compare(self, source_grib, reference_text): """Load and save grib data, generate diffs, compare with expected diffs.""" # load and save from Iris cubes = iris.load(source_grib) saved_grib = iris.util.create_temp_filename(suffix='.grib2') iris.save(cubes, saved_grib) # missing reference? (the expected diffs between source_grib and saved_grib) if not os.path.exists(reference_text): warnings.warn("Creating grib compare reference %s" % reference_text) os.system("grib_compare %s %s > %s" % (source_grib, saved_grib, reference_text)) # generate and compare diffs compare_text = iris.util.create_temp_filename(suffix='.grib_compare.txt') os.system("grib_compare %s %s > %s" % (source_grib, saved_grib, compare_text)) self.assertTextFile(compare_text, reference_text, "grib_compare output") os.remove(saved_grib) os.remove(compare_text) def test_latlon_forecast_plev(self): source_grib = tests.get_data_path
(("GRIB", "uk_t", "uk_t.grib2")) reference_text = tests.get_result_path(("grib_save", "latlon_forecast_plev.grib_compare.txt")) self.save_and_compare(source_grib, reference_text) def test_rotated_latlon(self): source_gr
ib = tests.get_data_path(("GRIB", "rotated_nae_t", "sensible_pole.grib2")) reference_text = tests.get_result_path(("grib_save", "rotated_latlon.grib_compare.txt")) # TODO: Investigate small change in test result: # long [iDirectionIncrement]: [109994] != [109993] # Consider the change in dx_dy() to "InDegrees" too. self.save_and_compare(source_grib, reference_text) # XXX Addressed in #1118 pending #1039 for hybrid levels # def test_hybrid_pressure_levels(self): # source_grib = tests.get_data_path(("GRIB", "ecmwf_standard", "t0.grib2")) # reference_text = tests.get_result_path(("grib_save", "hybrid_pressure.grib_compare.txt")) # self.save_and_compare(source_grib, reference_text) def test_time_mean(self): # This test for time-mean fields also tests negative forecast time. # Because the results depend on the presence of our api patch, # we currently have results for both a patched and unpatched api. # If the api ever allows -ve ft, we should revert to a single result. source_grib = tests.get_data_path(("GRIB", "time_processed", "time_bound.grib2")) reference_text = tests.get_result_path(("grib_save", "time_mean.grib_compare.txt")) # TODO: It's not ideal to have grib patch awareness here... import unittest try: self.save_and_compare(source_grib, reference_text) except unittest.TestCase.failureException: reference_text = tests.get_result_path(( "grib_save", "time_mean.grib_compare.FT_PATCH.txt")) self.save_and_compare(source_grib, reference_text) @iris.tests.skip_data class TestCubeSave(tests.IrisTest): # save fabricated cubes def _load_basic(self): path = tests.get_data_path(("GRIB", "uk_t", "uk_t.grib2")) return iris.load(path)[0] def test_params(self): # TODO pass def test_originating_centre(self): # TODO pass def test_irregular(self): cube = self._load_basic() lat_coord = cube.coord("latitude") cube.remove_coord("latitude") new_lats = np.append(lat_coord.points[:-1], lat_coord.points[0]) # Irregular cube.add_aux_coord(iris.coords.AuxCoord(new_lats, "latitude", units="degrees", coord_system=lat_coord.coord_system), 0) saved_grib = iris.util.create_temp_filename(suffix='.grib2') self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) def test_non_latlon(self): cube = self._load_basic() cube.coord(dimensions=[0]).coord_system = None saved_grib = iris.util.create_temp_filename(suffix='.grib2') self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) def test_forecast_period(self): # unhandled unit cube = self._load_basic() cube.coord("forecast_period").units = iris.unit.Unit("years") saved_grib = iris.util.create_temp_filename(suffix='.grib2') self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib) os.remove(saved_grib) def test_unhandled_vertical(self): # unhandled level type cube = self._load_basic() # Adjust the 'pressure' coord to make it into an "unrecognised Z coord" p_coord = cube.coord("pressure") p_coord.rename("not the messiah") p_coord.units = 'K' p_coord.attributes['positive'] = 'up' saved_grib = iris.util.create_temp_filename(suffix='.grib2') with self.assertRaises(iris.exceptions.TranslationError): iris.save(cube, saved_grib) os.remove(saved_grib) def test_scalar_int32_pressure(self): # Make sure we can save a scalar int32 coordinate with unit conversion. cube = self._load_basic() cube.coord("pressure").points = np.array([200], dtype=np.int32) cube.coord("pressure").units = "hPa" with self.temp_filename(".grib2") as testfile: iris.save(cube, testfile) def test_bounded_level(self): cube = iris.load_cube(tests.get_data_path(("GRIB", "uk_t", "uk_t.grib2"))) # Changing pressure to altitude due to grib api bug: # https://github.com/SciTools/iris/pull/715#discussion_r5901538 cube.remove_coord("pressure") cube.add_aux_coord(iris.coords.AuxCoord( 1030.0, long_name='altitude', units='m', bounds=np.array([111.0, 1949.0]))) with self.temp_filename(".grib2") as testfile: iris.save(cube, testfile) with open(testfile, "rb") as saved_file: g = gribapi.grib_new_from_file(saved_file) self.assertEqual( gribapi.grib_get_double(g, "scaledValueOfFirstFixedSurface"), 111.0) self.assertEqual( gribapi.grib_get_double(g, "scaledValueOfSecondFixedSurface"), 1949.0) class TestHandmade(tests.IrisTest): def _lat_lon_cube_no_time(self): """Returns a cube with a latitude and longitude suitable for testing saving to PP/NetCDF etc.""" cube = iris.cube.Cube(np.arange(12, dtype=np.int32).reshape
jeffbaumes/jeffbaumes-vtk
Wrapping/Python/vtk/charts.py
Python
bsd-3-clause
230
0
""" This module loads all the classes from t
he VTK Charts library into its namespace. This is an optional module.""" import os if os.name == 'posix': from libvtkChartsPython import * else: from vtkChartsPython
import *
shantnu/PyEng
WordCount/count_lines_fixed.py
Python
mit
299
0.003344
#! /
usr/bin/python f = open("birds.txt", "r") data = f.read() f.close() lines = data.split("\n") print("Wrong: The number of lines is", len(lines)) for l in lines: if not l: # Can also do this: if len(l) == 0
lines.remove(l) print("Right: The number of lines is", len(lines))
ZerpaTechnology/AsenZor
apps/votSys2/admin/settings/config.py
Python
lgpl-3.0
220
0.045455
#!/usr/bin/python #
-*- coding: utf-8 -*- libs_python=["functions"] libs_php=[] #aqui va el nombre de las bases de datos dbs=["main"] #variable para paso de parametros p={} consola=True host="localhost" consola
_port=9999
alexandercrosson/ml
neural_network/basic.py
Python
mit
1,043
0.004794
"""A basic implementation of a Neural Network by following the tutorial by Andrew Trask http://iamtrask.github.io/2015/07/12/basic-python-network/ """ import numpy as np # sigmoid function def nonlin(x, deriv=False): if deriv==True: return x * (1-x) return 1 / (1 + np.exp(-x)) # input dataset x = np.array([[0, 0, 1], [0, 1, 1], [1, 0, 1], [1, 1, 1]]) # output dataset y = np.array([[0, 0, 1, 1]]).T # seed random numbers to make calculation # deterministic (good practice) np.random.seed(1) # initialize weights randomly with mean 0 syn0 = 2*np.ra
ndom.random((3, 1)) - 1 for i in xrange(10000): # forward propagation l0 = x l1 = nonlin(np.dot(l0, syn0)) print l1 break # how much did we miss l1_error = y - l1 # multiply how much we missed by the # slope of the si
gmoid at the values in l1 l1_delta = l1_error * nonlin(l1, True) # update weights syn0 += np.dot(l0.T, l1_delta) print 'Output after training:' print l1
ronnyandersson/zignal
zignal/tests/test_music_scales.py
Python
mit
5,855
0.006319
''' Created on 24 Feb 2015 @author: Ronny Andersson (ronny@andersson.tk) @copyright: (c) 2015 Ronny Andersson @license: MIT ''' # Standard library import unittest # Third party import nose # Internal from zignal.music import scales class Test_midi_scales(unittest.TestCase): # Benson, DJ. (2006). Music: A Mathematical Offering. Cambridge University Press. # http://homepages.abdn.ac.uk/mth192/pages/html/maths-music.html def test_freq2key_quantise(self): # 70 466.164 # 69 440.00 # 68 415.305 self.assertAlmostEqual(scales.midi_freq2key(416.4, quantise=True), 68, places=7) self.assertAlmostEqual(scales.midi_freq2key(438.0, quantise=True), 69, places=7) self.assertAlmostEqual(scales.midi_freq2key(441.0, quantise=True), 69, places=7) self.assertAlmostEqual(scales.midi_freq2key(442.0, quantise=True), 69, places=7) self.assertAlmostEqual(scales.midi_freq2key(452.1, quantise=True), 69, places=7) self.assertAlmostEqual(scales.midi_freq2key(453.1, quantise=True), 70, places=7) self.assertAlmostEqual(scales.midi_freq2key(460.0, quantise=True), 70, places=7) self.assertAlmostEqual(scales.midi_freq2key(470.0, quantise=True), 70, places=7) def test_key2freq(self): self.assertAlmostEqual(scales.midi_key2freq(69), 440.0, places=7) self.assertAlmostEqual(scales.midi_key2freq(81), 880.0, places=7) self.assertAlmostEqual(scales.midi_key2freq(21), 27.5, places=7) self.assertAlmostEqual(scales.midi_key2freq(43), 97.9989, places=4) def test_freq2key(self): self.assertAlmostEqual(scales.midi_freq2key(440), 69.0, places=7) self.assertAlmostEqual(scales.midi_freq2key(880), 81.0, places=7) def test_key2freq_tuning(self): self.assertAlmostEqual(scales.midi_key2freq(69, tuning=450), 450.0, places=7) self.assertAlmostEqual(scales.midi_key2freq(81, tuning=450), 900.0, places=7) self.assertAlmostEqual(scales.midi_key2freq(21, tuning=400), 25.0, places=7) def test_freq2key_tuning(self): self.assertAlmostEqual(scales.midi_freq2key(450, tuning=450), 69.0, places=7) self.assertAlmostEqual(scales.midi_freq2key(900, tuning=450), 81.0, places=7) def test_back2back_key(self): self.assertAlmostEqual( scales.midi_key2freq(scales.midi_freq2key(1234)), 1234, places=7) self.assertAlmostEqual( scales.midi_key2freq(scales.midi_freq2key(45.67)), 45.67, places=7) def test_back2back_freq(self): self.assertAlmostEqual( scales.midi_freq2key(scales.midi_key2freq(76.543)), 76.543, places=7) self.assertAlmostEqual( scales.midi_freq2key(scales.midi_key2freq(124)), 124, places=7) class Test_piano_note_to_freq(unittest.TestCase): def test_octaves(self): self.assertAlmostEqual(scales.piano_note2freq('A2'), 110.0, places=7) self.assertAlmostEqual(scales.piano_note2freq('A3'), 220.0, places=7) self.assertAlmostEqual(scales.piano_note2freq('A4'), 440.0, places=7) self.assertAlmostEqual(scales.piano_note2freq('A5'), 880.0, places=7) self.assertAlmostEqual(scales.piano_note2freq('A6'), 1760.0, places=7) def test_values(self): self.assertAlmostEqual(scales.piano_note2freq('C6'), 1046.50, places=2) self.assertAlmostEqual(scales.piano_note2freq('D1'), 36.7081, places=4) class Test_piano_freq_to_note(unittest.TestCase): def test_values(self): self.assertEqual(scales.piano_freq2note(1046.50), 'C6') self.assertEqual(scales.piano_freq2note(36.7051), 'D1') self.assertEqual(scales.piano_freq2note(440), 'A4') def test_quantise(self): self.assertEqual(scales.piano_freq2note(435.00), 'A4') self.assertEqual(scales.piano_freq2note(439.00), 'A4') self.assertEqual(scales.piano_freq2note(440.00), 'A4') self.assertEqual(scales.piano_freq2note(441.00), 'A4') self.assertEqual(scales.piano_freq2note(447.00), 'A4') class Test_piano(unittest.TestCase): def test_back2back_key(self): self.assertAlmostEqual( scales.piano_key2freq(scales.piano_freq2key(100)), 100, places=7) self.assertAlmostEqual( scales.piano_key2freq(scales.piano_freq2key(32)), 32, places=7) self.assertAlmostEqual( scales.piano_key2freq(scales.piano_freq2key(997)), 997, places=7) self.assertAlmostEqual( scales.piano_key2freq(scales.piano_freq2key(12345)), 12345, places=7) self.assertAlmostEqual( scales.piano_key2freq(scales.piano_freq2key(4.563)), 4.563, places=7) def test_back2back_freq(self): self.assertAlmostEqual( scales.piano_freq2key(scales.piano_key2freq(10)), 10, places=7) self.assertAlmostEqual( scales.piano_freq2key(scales.piano_key2freq(49)), 49, places=7) self.assertAlmostEqual( scales.piano_freq2key(scales.piano_key2freq(30.3)), 30.3, places=7) def test_back2back_freq_quantised(self): self.assertAlmostEqual(sca
les.piano_freq2key(scales.piano_key2freq(10.2),
quantise=True), 10, places=7) self.assertAlmostEqual(scales.piano_freq2key(scales.piano_key2freq(34.678), quantise=True), 35, places=7) if __name__ == "__main__": noseargs = [__name__, "--verbosity=2", "--logging-format=%(asctime)s %(levelname)-8s: %(name)-15s " + "%(module)-15s %(funcName)-20s %(message)s", "--logging-level=DEBUG", __file__, ] nose.run(argv=noseargs)
pk-python/basics
basics/files.py
Python
mit
169
0.029586
flight_file=open("flight.txt","w") flight_file.write("Hello")
text=fl
ight_file.read() flight_file.close() flight_file.closed # Returns whether the file is closed or not.
tilogaat/blueflood
demo/ingest.py
Python
apache-2.0
3,541
0.001977
#!/usr
/bin/env python # Licensed to Rackspace under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # Rackspace licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless req
uired by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import uuid import random from optparse import OptionParser try: import simplejson as json except ImportError: import json try: import requests except ImportError: raise ImportError('Missing dependency requests. ' + 'Please install it using pip.') def _generate_metrics_data(tenantId, metricName): data = [] # Blueflood understands millis since epoch only now = long(time.time() * 1000) # Publish metrics with older timestamps (2 hrs before current time) startTimestamp = now - 2 * 60 * 60 * 1000 endTimestamp = startTimestamp for i in range(100): metric = {} metric['collectionTime'] = endTimestamp metric['metricName'] = metricName metric['metricValue'] = random.randint(1, 100) metric['ttlInSeconds'] = 2 * 24 * 60 * 60 # 2 days metric['unit'] = 'seconds' data.append(metric) endTimestamp += 30 * 1000 # 30s spaced metric samples return data, startTimestamp, endTimestamp def _get_metrics_url(host, port, scheme, tenantId): return scheme + '://' + host + ':' + port + '/v1.0/'\ + tenantId + '/experimental/metrics' def main(): usage = 'usage: %prog \n' + \ '--host=<host running blueflood> \n' + \ '--port=<blueflood HTTP metrics ingestion port>' parser = OptionParser(usage=usage) parser.add_option('--host', dest='host', help='Blueflood host') parser.add_option('--port', dest='port', help='HTTP ingestion port') (options, args) = parser.parse_args() if not options.host: options.host = 'localhost' if not options.port: options.port = '19000' tenantId = 'ac' + str(uuid.uuid1()) metricName = 'met.' + str(uuid.uuid1()) (payload, start, end) = _generate_metrics_data(tenantId, metricName) prettyjsondata = json.dumps(payload, indent=4, separators=(',', ': ')) print(prettyjsondata) url = _get_metrics_url(options.host, options.port, 'http', tenantId) print(url) try: print('Writing metrics for tenant: %s, metric name: %s,\ start: %d, end: %d' % (tenantId, metricName, start, end)) r = requests.post(url, data=json.dumps(payload)) print('Response from server %s' % (r)) print('To retrive the generated data with retrieve.py script, use the following command (assuming port number 20000):') print('') print('./retrieve.py --host %s --port 20000 --metric %s --tenant %s --from %s --to %s --points 100' \ % (options.host, metricName, tenantId, start - 100000000, end + 100000000)) print('') except Exception, ex: print(ex) raise Exception('Cannot ingest metrics into bluflood') main()
Landver/netmon
apps/backups/migrations/0002_auto_20170424_0117.py
Python
mit
737
0
# -*- coding: utf-8 -*- # Generated by Django 1.11 on
2017-04-23 22:17 from __future__ import unicode_literals
from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('backups', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='backupfirewall', name='file_location', ), migrations.RemoveField( model_name='backupfirewall', name='site_name', ), migrations.RemoveField( model_name='backuprouter', name='file_location', ), migrations.RemoveField( model_name='backuprouter', name='site_name', ), ]
tboyce021/home-assistant
homeassistant/components/vizio/media_player.py
Python
apache-2.0
18,119
0.000883
"""Vizio SmartCast Device support.""" from datetime import timedelta import logging from typing import Any, Callable, Dict, List, Optional, Union from pyvizio import VizioAsync from pyvizio.api.apps import find_app_name from pyvizio.const import APP_HOME, INPUT_APPS, NO_APP_RUNNING, UNKNOWN_APP from homeassistant.components.media_player import ( DEVICE_CLASS_SPEAKER, DEVICE_CLASS_TV, SUPPORT_SELECT_SOUND_MODE, MediaPlayerEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_DEVICE_CLASS, CONF_EXCLUDE, CONF_HOST, CONF_INCLUDE, CONF_NAME, STATE_OFF, STATE_ON, ) from homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import entity_platform from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import HomeAssistantType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import ( CONF_ADDITIONAL_CONFIGS, CONF_APPS, CONF_VOLUME_STEP, DEFAULT_TIMEOUT, DEFAULT_VOLUME_STEP, DEVICE_ID, DOMAIN, ICON, SERVICE_UPDATE_SETTING, SUPPORTED_COMMANDS, UPDATE_SETTING_SCHEMA, VIZIO_AUDIO_SETTINGS, VIZIO_DEVICE_CLASSES, VIZIO_MUTE, VIZIO_MUTE_ON, VIZIO_SOUND_MODE, VIZIO_VOLUME, ) _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=30) PARALLEL_UPDATES = 0 async def async_setup_entry( hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up a Vizio media player entry.""" host = config_entry.data[CONF_HOST] token = config_entry.data.get(CONF_ACCESS_TOKEN) name = config_entry.data[CONF_NAME] device_class = config_entry.data[CONF_DEVICE_CLASS] # If config entry options not set up, set them up, otherwise assign values managed in options volume_step = config_entry.options.get( CONF_VOLUME_STEP, config_entry.data.get(CONF_VOLUME_STEP, DEFAULT_VOLUME_STEP) ) params = {} if not config_entry.options: params["options"] = {CONF_VOLUME_STEP: volume_step} include_or_exclude_key = next( ( key for key in config_entry.data.get(CONF_APPS, {}) if key in [CONF_INCLUDE, CONF_EXCLUDE] ), None, ) if include_or_exclude_key: params["options"][CONF_APPS] = { include_or_exclude_key: config_entry.data[CONF_APPS][ include_or_exclude_key ].copy() } if not config_entry.data.get(CONF_VOLUME_STEP): new_data = config_entry.data.copy() new_data.update({CONF_VOLUME_STEP: volume_step}) params["data"] = new_data if params: hass.config_entries.async_update_entry(config_entry, **params) device = VizioAsync( DEVICE_ID, host, name, auth_token=token, device_type=VIZIO_DEVICE_CLASSES[device_class], session=async_get_clientsession(hass, False), timeout=DEFAULT_TIMEOUT, ) if not await device.can_connect_with_auth_check(): _LOGGER.warning("Failed to connect to %s", host) raise PlatformNotReady apps_coordinator = hass.data[DOMAIN].get(CONF_APPS) entity = VizioDevice(config_entry, device, name, device_class, apps_coordinator) async_add_entities([entity], update_before_add=True) platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_UPDATE_SETTING, UPDATE_SETTING_SCHEMA, "async_update_setting" ) class VizioDevice(MediaPlayerEntity): """Media Player implementation which performs REST requests to device.""" def __init__( self, config_entry: ConfigEntry, device: VizioAsync, name: str, device_class: str, apps_coordinator: DataUpdateCoordinator, ) -> None: """Initialize Vizio device.""" self._config_entry = config_entry self._apps_coordinator = apps_coordinator self._name = name self._state = None self._volume_level = None self._volume_step = config_entry.options[CONF_VOLUME_STEP] self._is_volume_muted = None self._current_input = None self._current_app = None self._current_app_config = None self._current_sound_mode = None self._available_sound_modes = [] self._available_inputs = [] self._available_apps = [] self._all_apps = apps_coordinator.data if apps_coordinator else None self._conf_apps = config_entry.options.get(CONF_APPS, {}) self._additional_app_configs = config_entry.data.get(CONF_APPS, {}).get( CONF_ADDITIONAL_CONFIGS, [] ) self._device_class = device_class self._supported_commands = SUPPORTED_COMMANDS[device_class] self._device = device self._max_volume = float(self._device.get_max_volume()) self._icon = ICON[device_class] self._available = True self._model = None self._sw_version = None def _apps_list(self, apps: List[str]) -> List[str]: """Return process apps list based on configured filters.""" if self._conf_apps.get(CONF_INCLUDE): return [app for app in apps if app in self._conf_apps[CONF_INCLUDE]] if self._conf_apps.get(CONF_EXCLUDE): return [app for app in apps if app not in self._conf_apps[CONF_EXCLUDE]] return apps async def async_update(self) -> None: """Retrieve latest state of the device.""" if not self._model: self._model = await self._device.get_model_name() if not self._sw_version: self._sw_version = await self._device.get_version() is_on = await self._device.get_power_state(log_api_exception=False) if is_on is None: if self._available: _LOGGER.warning( "Lost connection to %s", self._config_entry.data[CONF_HOST] ) self._available = False return if not self._available: _LOGGER.info( "Restored connection to %s", self._config_entry.data[CONF_HOST] ) self._available = True if not is_on: self._state = STATE_OFF self._volume_level = None self._is_volume_muted = None self._current_input = None self._current_app = None self._current_app_config = None self._current_sound_mod
e = None return self._state = STATE_ON audio_settings = await self._device.get_all_settings( VIZIO_AUDIO_SETTINGS, log_api_exception=False ) if audio_settings: self._volume_level = float(audio_settings[VIZIO_VOLUME]) / self._max_volume if VIZIO_MUTE in audio_settings:
self._is_volume_muted = ( audio_settings[VIZIO_MUTE].lower() == VIZIO_MUTE_ON ) else: self._is_volume_muted = None if VIZIO_SOUND_MODE in audio_settings: self._supported_commands |= SUPPORT_SELECT_SOUND_MODE self._current_sound_mode = audio_settings[VIZIO_SOUND_MODE] if not self._available_sound_modes: self._available_sound_modes = ( await self._device.get_setting_options( VIZIO_AUDIO_SETTINGS, VIZIO_SOUND_MODE ) ) else: # Explicitly remove SUPPORT_SELECT_SOUND_MODE from supported features self._supported_commands &= ~SUPPORT_SELECT_SOUND_MODE input_ = await self._device.get_
theicfire/djangofun
src/firsty/settings.py
Python
bsd-3-clause
5,085
0.001573
# Django settings for firsty project. DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '/home/chase/django/nrfirst/firsty/src/sqlite.db', # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale USE_L10N = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = '' # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = '/static/' # URL prefix for admin static files -- CSS, JavaScript and images. # Make sure to use a trailing slash. # Examples: "http://foo.com/static/admin/", "/static/admin/". ADMIN_MEDIA_PREFIX = '/static/admin/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = '11l4c!ngykol5x#rsqed+$bv9ln$(oefcf@ovzjhx+_56e7u6%' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) ROOT_URLCONF = 'firsty.urls' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.a
dmindocs', ) # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 er
ror. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } }
iti-luebeck/HANSE2012
hanse_ros/hanse_pipefollowing/nodes/pipefollowing.py
Python
bsd-3-clause
10,171
0.033133
#!/usr/bin/env python PACKAGE = 'hanse_pipefollowing' import roslib; roslib.load_manifest('hanse_pipefollowing') import rospy import dynamic_reconfigure.server import math import smach import smach_ros import numpy from geometry_msgs.msg import PoseStamped, Point, Twist, Vector3 from hanse_msgs.msg import Object, sollSpeed from std_msgs.msg import String from hanse_pipefollowing.cfg import PipeFollowingConfig from hanse_pipefollowing.msg import PipeFollowingAction ################# IMAGE_COLS = 640 IMAGE_ROWS = 480 ################# # TODO Global.x/y/lastX/lastY locken? # The pipe is seen if: # 1. at least 1/20th of the image is "pipe" # 2. at most half of the image is "pipe" # We have successfully passed the pipe if # 1. we see the pipe # 2. the pipe angle is about 0 # 3. the pipe center is in the lower quater of the image. class Config: minSize = 0.05 maxSize = 0.4 fwSpeed = 0.8 deltaAngle = 0.192 # 0.192 radians = 11 degrees deltaDist = 100 kpAngle = 1.0 kpDist = 1.0 robCenterX = 320 robCenterY = 240 maxDistance = 320 mirror = False class Global: x = 0.0 y = 0.0 size = 0.0 orientation = 0.0 lastX = 0.0 lastY = 0.0 isSizeTooSmall = False currentPosition = Point() is_seen = True state = " " distance = 0 pipe_passed = 63.6 #============================================================================== # Constants #============================================================================== class States: NotSeenYet = 'NotSeenYet' Passed = 'Passed' IsSeen = 'IsSeen' Lost = 'Lost' class Transitions: IsSeen = 'IsSeen' Passed = 'Passed' Lost = 'Lost' Aborted = 'Aborted' class LostTypes: LostLeft = 'LostLeft' LostRight = 'LostRight' LostBottom = 'LostBottom' LostTop = 'LostTop' Lost = 'Lost' #============================================================================== # State classes #============================================================================== class AbortableState(smach.State): def abort(self): setMotorSpeed(0,0) self.service_preempt() return Transitions.Aborted class NotSeenYet(AbortableState): def __init__(self): smach.State.__init__(self, outcomes=[Transitions.IsSeen, Transitions.Aborted]) def execute(self, userdata): rospy.loginfo('Executing state '+States.NotSeenYet) while not rospy.is_shutdown() and not self.preempt_requested(): # if size between min und max.. if Config.minSize < Global.size < Config.maxSize: return Transitions.IsSeen setMotorSpeed(Config.fwSpeed, 0.0) rospy.sleep(0.2) return self.abort() class IsSeen(AbortableState): def __init__(self): smach.State.__init__(self, outcomes=[Transitions.Lost, Transitions.Passed, Transitions.Aborted], output_keys=['lost_type']) def execute(self, userdata): rospy.loginfo('Executing state '+States.IsSeen) while not rospy.is_shutdown() and not self.preempt_requested(): # if size between min und max.. if Config.minSize < Global.size < Config.maxSize: # end of pipe reached? #Coordiantes for end of pipe if Global.currentPosition.x < abs(Global.pipe_passed): setMotorSpeed(0,0) return Transitions.Passed # lost else: tmp_x = 0.0 tmp_y = 0.0 # lost if less than minSize is seen if Global.size <= Config.minSize: tmp_x = Global.x tmp_y = Global.y # lost if more than maxSize is seen elif Global.size >= Config.maxSize: tmp_x = Global.lastX tmp_y = Global.lastY tmp_x /= IMAGE_COLS tmp_y /= IMAGE_ROWS if tmp_x < 0.5: userdata.lost_type = LostTypes.LostLeft elif tmp_x >= 0.5: userdata.lost_type = LostTypes.LostRight elif tmp_y < 0.5: userdata.lost_type = LostTypes.LostTop elif tmp_y >= 0.5: userdata.lost_type = LostTypes.LostBottom else: userdata.lost_type = LostTypes.Lost return Transitions.Lost distanceY = computeIntersection(Global.x, Global.y, Global.orientation) #if not Config.mirror: distanceY = -distanceY #rospy.loginfo('distanceY: ' + repr(distanceY)) angularSpeed = 0.0 if math.fabs(Global.orientation) > Config.deltaAngle: angularSpeed = Config.kpAngle * Global.orientation / (math.pi/2) if math.fabs(distanceY) > Config.deltaDist: angularSpeed += Config.kpDist * distanceY / Config.maxDistance #rospy.loginfo('angularSpeed: ' + repr(angularSpeed) + '\t\t ('+repr(Global.x)+','+repr(Global.y)+')') setMotorSpeed(Config.fwSpeed, angularSpeed) rospy.sleep(0.2) return self.abort() class Lost(AbortableState): def __init__(self): smach.State.__init__(self, outcomes=[Transitions.IsSeen, Transitions.Aborted], input_keys=['lost_type']) def execute(self, userdata): rospy.loginfo('Executing state '+States.Lost+' ('+userdata.lost_type +')') if userdata.lost_type == LostTypes.Lost: while not rospy.is_shutdown(): rospy.loginfo('PANIC: lost'); return self.abort() else: # linear-/angularspeed tuples speedDict = { LostTypes.LostLeft: (Config.fwSpeed, -0.2), LostTypes.LostRight: (Config.fwSpeed, 0.2), LostTypes.LostBottom:(Config.fwSpeed, 0.0), LostTypes.LostTop: (Config.fwSpeed, 0.0), } while not rospy.is_shutdown() and not self.preempt_requested(): if Config.minSize < Global.size < Config.maxSize: return Transitions.IsSeen setMotorSpeed(*speedDict[userdata.lost_type]) rospy.sleep(0.2) return self.abort() #============================================================================== # Callback functions #============================================================================== def objectCallback(msg): #rospy.loginfo('objectCallback: size='+repr(msg.size)+'\t\t orientation='+repr(msg.orientation)); Global.lastX = Global.x Global.lastY = Global.y Global.size = msg.size Global.is_seen = msg.is_seen if Config.mirror: Global.x = (IMAGE_COLS - msg.x) Global.y = (IMAGE_ROWS - msg.y) Global.orientation = -msg.orientation else: Global.x = msg.x Global.y = msg.y Global.orientation = msg.orientation distanceY = computeIntersection(Global.x, Global.y, Global.orientation) #r
ospy.loginfo('distY: '+repr(distanceY / Config.maxDistance)) def configCallback(config, level): rospy.loginfo('Reconfigure Request: ') Config.minSize = config['minSize'
] Config.maxSize = config['maxSize'] Config.fwSpeed = config['fwSpeed'] Config.deltaAngle = config['deltaAngle'] Config.deltaDist = config['deltaDist'] Config.kpAngle = config['kpAngle'] Config.kpDist = config['kpDist'] Config.robCenterX = config['robCenterX'] Config.robCenterY = config['robCenterY'] Config.maxDistance = config['maxDistance'] Config.mirror = config['mirror'] return config def positionCallback(msg): Global.currentPosition = msg.pose.position #============================================================================== # Helper functions #============================================================================== def hasPassed(): return (math.fabs(Global.orientation) < math.pi/6.0) and (Global.y > 0.75*IMAGE_ROWS) and (0.2*IMAGE_COLS < Global.x < 0.8*IMAGE_COLS) def computeIntersection(meanX, meanY, theta): robX = Config.robCenterX robY = Config.robCenterY nzero = (math.cos(theta), math.sin(theta)) d = meanX * nzero[0] + meanY * nzero[1]; # nzero * p - d return (nzero[0] * robX) + (nzero[1] * robY) - d; # werte im bereich [-1, 1] def setMotorSpeed(lin, ang): linearVector = Vector3(x=lin,z=0) angularVector = Vector3(z=ang) twist = Twist(linear=linearVector, angular=angularVector) pub_cmd_vel.publish(twist) # #ang = ang # geschwindigkeitswerte fuer thruster berechnen #left = lin*127 + ang*127 #right = lin*127 - ang*127 # auf den wertebereich -127 bis 127 beschraenken #left = numpy.clip(left, -127, 127) #right = numpy.clip(right, -127, 127) # nachrichten an motoren publishen #pub_motor_left.publish(sollSpeed(data = left)) #pub_motor_right.publish(sollSpeed(data = right)) def timerCallback(event): pub_behaviour_info.publish(String(data = 'Orientation: '+str(Global.orientation))) #===========================================================================
dries007/Basys3
python/RouletteMaker.py
Python
mit
1,814
0.001654
index = 20 bets = 25 names = ("Plain", "Cheval H", "Cheval V", "Trans", "Trans S", "Carre", "Colonne", "Simple") for bet in range(bets): col = 40 # --------------------------------------- money print(""" when %d => if bets_index > %d then fb_a_addr <= std_logic_vector(to_unsigned(COLS*21 + COLS*%d + %d, 14)); fb_a_dat_in <= x"24"; -- $ end if;""" % (index, bet, bet, col)) index += 1 col += 2 # extra space for m in range(5, -1, -1): print("""when %d => if bets_index > %d then fb_a_addr <= std_logic_vector(to_unsigned(COLS*21 + COLS*%d + %d, 14)); fb_a_dat_in <= ascii_i(bets(%d).money, %d); end if;""" % (index, bet, bet, col, bet, m)) index += 1 col += 1 if m == 5: col += 1 # extra space if m == 2: print("""when %d => if bets_index
> %d then fb_a_addr <= std_logic_vector(to_unsigned(COLS*21 + COLS*%d + %d, 14)); fb_a_dat_in <= x"2e"; -- . end if;""" % (index, bet, bet, col)) index += 1 col += 1 # --------------------------------------- name col += 1 for n in range(8): # n = index of letter print("""when %d => if bets_index > %d then fb_a_addr <= std_logic_vector(to_unsigned(COLS*21 + COLS*%d + %d, 14)); case bets(
%d).kind is""" % (index, bet, bet, col, bet)) for kind in range(1, 9): if n < len(names[kind-1]) and names[kind-1][n] != ' ': print(""" when %d => fb_a_dat_in <= x"%02x"; -- %c""" % (kind, ord(names[kind-1][n]), names[kind-1][n])) print(""" when others => fb_a_dat_in <= x"20"; -- space end case; fb_a_dat_in <= x"2e"; -- . end if;""") index += 1 col += 1
PyBossa/random-scheduler
random_scheduler/__init__.py
Python
agpl-3.0
1,346
0.001486
import pybossa.sched as sched from pybossa.forms.forms import TaskSchedulerForm from pybossa.core import project_repo from flask.ext.plugins import
Plugin from functools import wraps import random __plugin__ = "RandomScheduler" __version__ = "0.0.1" SCHEDULER_NAME = 'random' def get_random_task(project_id, user_id=None, user_ip=None, n_answers=30, offset=0): """Return a random task for the user.""" project = project_repo.get(project_id) if project and l
en(project.tasks) > 0: return random.choice(project.tasks) else: return None def with_random_scheduler(f): @wraps(f) def wrapper(project_id, sched, user_id=None, user_ip=None, offset=0): if sched == SCHEDULER_NAME: return get_random_task(project_id, user_id, user_ip, offset=offset) return f(project_id, sched, user_id=user_id, user_ip=user_ip, offset=offset) return wrapper def variants_with_random_scheduler(f): @wraps(f) def wrapper(): return f() + [(SCHEDULER_NAME, 'Random')] return wrapper class RandomScheduler(Plugin): def setup(self): sched.new_task = with_random_scheduler(sched.new_task) sched.sched_variants = variants_with_random_scheduler(sched.sched_variants) TaskSchedulerForm.update_sched_options(sched.sched_variants())
ThiefMaster/indico
indico/modules/events/static/util.py
Python
mit
6,995
0.002001
# This file is part of Indico. # Copyright (C) 2002 - 2021 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. import base64 import mimetypes import re from contextlib import contextmanager from urllib.parse import urlsplit, urlunsplit import requests from flask import current_app, g, request from flask_webpackext import current_webpack from flask_webpackext.manifest import JinjaManifestEntry from pywebpack import Manifest from werkzeug.urls import url_parse from indico.core.config import config from indico.modules.events.layout.models.images import ImageFile from indico.web.flask.util import endpoint_for_url _css_url_pattern = r'''url\((['"]?)({}|https?:)?([^)'"]+)\1\)''' _url_has_extension_re = re.compile(r'.*\.([^/]+)$') _plugin_url_pattern = r'(?:{})?/static/plugins/([^/]+)/(.*?)(?:__v[0-9a-f]+)?\.([^.]+)$' _static_url_pattern = r'(?:{})?/(images|dist|fonts)(.*)/(.+?)(?:__v[0-9a-f]+)?\.([^.]+)$' _custom_url_pattern = r'(?:{})?/static/custom/(.+)$' def rewrite_static_url(path): """Remove __vxxx prefix from static URLs.""" plugin_pattern = _plugin_url_pattern.format(url_parse(config.BASE_URL).path) static_pattern = _static_url_pattern.format(url_parse(config.BASE_URL).path) custom_pattern = _custom_url_pattern.format(url_parse(config.BASE_URL).path) if re.match(plugin_pattern, path): return re.sub(plugin_pattern, r'static/plugins/\1/\2.\3', path) elif re.match(static_pattern, path): return re.sub(static_pattern, r'static/\1\2/\3.\4', path) else: return re.sub(custom_pattern, r'static/custom/\1', path) def _create_data_uri(url, filename): """Create a data url that contains the file in question.""" response = requests.get(url, verify=False) if response.status_code != 200: # couldn't access the file return url data = base64.b64encode(response.content).decode() content_type = (mimetypes.guess_type(filename)[0] or response.headers.get('Content-Type', 'application/octet-stream')) return f'data:{content_type};base64,{data}' def _rewrite_event_asset_url(event, url): """Rewrite URLs of assets such as event images. Only assets contained within the event will be taken into account """ scheme, netloc, path, qs, anchor = urlsplit(url) netloc = netloc or current_app.config['SERVER_NAME'] scheme = scheme or 'https' # internal URLs (same server) if netloc == current_app.config['SERVER_NAME']: # this piece of Flask magic finds the endpoint that corresponds to # the URL and checks that it points to an image belonging to this event endpoint_info = endpoint_for_url(path) if endpoint_info: endpoint, data = endpoint_info if endpoint == 'event_images.image_display' and data['event_id'] == event.id: image_file = ImageFile.get(data['image_id']) if image_file and image_file.event == event: return f'images/{image_file.id}-{image_file.filename}', image_file # if the URL is not internal or just not an image, # we embed the contents using a data URI data_uri = _create_data_uri(urlunsplit((scheme, netloc, path, qs, '')), urlsplit(path)[-1]) return data_uri, None def _remove_anchor(url): """Remove the anchor from a URL.""" scheme, netloc, path, qs, anchor = urlsplit(url) return urlunsplit((scheme, netloc, path, qs, '')) def rewrite_css_urls(event, css): """Rewrite CSS in order to handle url(...) properly.""" # keeping track of used URLs used_urls = set() used_images = set() def _replace_url(m): prefix = m.group(2) or '' url = m.group(3) if url.startswith('/event/') or re.match(r'https?:', prefix): rewritten_url, image_file = _rewrite_event_asset_url(event, prefix + url) if image_file: used_images.add(image_file) return f'url({rewritten_url})' else: rewritten_url = rewrite_static_url(url) used_urls.add(_remove_anchor(rewritten_url)) if url.startswith('/static/plugins/'): return f"url('../../../../../{rewritten_url}')" else: return f"url('../../../{rewritten_url}')" indico_path = url_parse(config.BASE_URL).path new_css = re.sub(_css_url_pattern.format(indico_path), _replace_url, css, flags=re.MULTILINE) return new_css, used_urls, used_images def url_to_static_filename(endpoint, url): """Handle special endpoint/URLs so that they link to offline content.""" if re.match(r'(events)?\.display(_overview)?$', endpoint): return 'index.html' elif endpoint == 'event_layout.css_display': return 'custom.css' elif endpoint == 'event_images.logo_display': return 'logo.png' indico_path = url_parse(config.BASE_URL).path if re.match(_static_url_pattern.format(indico_path), url): url = rewrite_static_url(url) else: # get rid of [/whatever]/event/1234 url = re.sub(fr'{indico_path}(?:/event/\d+)?/(.*)', r'\1', url) if not url.startswith('assets/'): # replace all remaining slashes url = url.rstrip('/').replace('/', '--') # it's not executed in a webserver, so we do
need
a .html extension if not _url_has_extension_re.match(url): url += '.html' return url def _rule_for_endpoint(endpoint): return next((x for x in current_app.url_map.iter_rules(endpoint) if 'GET' in x.methods), None) @contextmanager def override_request_endpoint(endpoint): rule = _rule_for_endpoint(endpoint) assert rule is not None old_rule = request.url_rule request.url_rule = rule try: yield finally: request.url_rule = old_rule class RewrittenManifest(Manifest): """A manifest that rewrites its asset paths.""" def __init__(self, manifest): super().__init__() self._entries = {k: JinjaManifestEntry(entry.name, self._rewrite_paths(entry._paths)) for k, entry in manifest._entries.items()} self.used_assets = set() def _rewrite_paths(self, paths): return [rewrite_static_url(path) for path in paths] def __getitem__(self, key): self.used_assets.add(key) return super().__getitem__(key) @contextmanager def collect_static_files(): """Keep track of URLs used by manifest and url_for.""" g.custom_manifests = {None: RewrittenManifest(current_webpack.manifest)} g.used_url_for_assets = set() used_assets = set() yield used_assets for manifest in g.custom_manifests.values(): used_assets |= {p for k in manifest.used_assets for p in manifest[k]._paths} used_assets |= {rewrite_static_url(url) for url in g.used_url_for_assets} del g.custom_manifests del g.used_url_for_assets
wgwoods/blivet
tests/imagebackedtestcase.py
Python
gpl-2.0
3,326
0.001203
import os import unittest from blivet import Blivet from blivet import util from blivet.size import Size from blivet.flags import flags @unittest.skipUnless(os.environ.get("JENKINS_HOME"), "jenkins only test") @unittest.skipUnless(os.geteuid() == 0, "requires root access") class ImageBackedTestCase(unittest.TestCase): """ A class to encapsulate testing of blivet using block devices. The basic idea is you create some scratch block devices and then run some test code on them. :attr:`~.ImageBackedTestCase.disks` defines the set of disk images. :meth:`~.ImageBackedTestCase._set_up_storage` is where you specify the initial layout of the disks. It will be written to the disk images in :meth:`~.ImageBackedTestCase.set_up_storage`. You then write test methods as usual that use the disk images, which will be cleaned up and removed when each test method finishes. """ initialize_disks = True """ Whether or not to create a disklabel on the disks. """ disks = { "disk1": Size("2 GiB"), "disk2": Size("2 GiB") } """ The names and sizes of the disk images to create/use. """ def set_up_disks(self): """ Create disk image files to build the test's storage on. If you are actually creating the disk image files here don't forget to set the initializeDisks flag so they get a fresh disklabel when clearPartitions gets called from create_storage later. """ for (name, size) in iter(self.disks.items()): path = util.create_sparse_tempfile(name, size) self.blivet.config.diskImages[name] = path # # set up the disk images with a disklabel # self.blivet.config.initializeDisks = self.initialize_disks def _set_up_storage(self): """ Schedule creation of storage devices on the disk images. .. note:: The disk images should already be in a populated devicetree. """ pass def set_up_storage(self): """ Create a device stack on top of disk images for this test to run on. This will write the configuration to whatever disk images are defined in set_up_disks. """ # # create disk images # self.set_up_disks() # # populate the devicetree # self.blivet.reset() # # clear and/or initialize disks as specified in set_up_disks #
self.blivet.clearPartitions() # # create the rest of the stack # self._set_up_storage() # # write configuration to disk images # self.blivet.doIt()
def setUp(self): """ Do any setup required prior to running a test. """ flags.image_install = True self.blivet = Blivet() self.addCleanup(self._cleanUp) self.set_up_storage() def _cleanUp(self): """ Clean up any resources that may have been set up for a test. """ self.blivet.reset() self.blivet.devicetree.teardownDiskImages() for fn in self.blivet.config.diskImages.values(): if os.path.exists(fn): os.unlink(fn) flags.image_install = False
PTAug/fashion-analytics
fashion-analytics/image-processing/testcolor.py
Python
apache-2.0
56
0.017857
from colordetection
imp
ort * topColors(992780587437103)
kamagatos/django-registration-withemail
registration_withemail/auth_urls.py
Python
bsd-2-clause
1,200
0.0075
""" URL patterns for the views included in ``django.contrib.auth``. """ from django.conf.urls import patterns, url from registration_withemail.forms import EldonUserAuth
enticationForm urlpatterns = patterns('', url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'registration/login.html', 'authentication_form': EldonUserAuthenticationForm}, name='login'), url(r'^logout/$', 'django.contrib.auth.views.logout', name='logout'), url(r'^password_change/$', 'django.contrib.auth.views.password_change', name='password_change'), url(r'^password_change/done/$', 'dj
ango.contrib.auth.views.password_change_done', name='password_change_done'), url(r'^password_reset/$', 'django.contrib.auth.views.password_reset', name='password_reset'), url(r'^password_reset/done/$', 'django.contrib.auth.views.password_reset_done', name='password_reset_done'), url(r'^reset/(?P<uidb36>[0-9A-Za-z]{1,13})-(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', 'django.contrib.auth.views.password_reset_confirm', name='password_reset_confirm'), url(r'^reset/done/$', 'django.contrib.auth.views.password_reset_complete', name='password_reset_complete'), )
Cuile/Grab-the-National-Train-Timetable
grab.py
Python
mit
7,660
0.002094
# -*- coding: utf-8 -*- import json from concurrent.futures import ThreadPoolExecutor from datetime import date, timedelta import requests import common # ------------------------------------------------------------------------ def grab(url): requests.packages.urllib3.disable_warnings() # 请求头 headers = { 'Host': 'kyfw.12306.cn', 'Connection': 'keep-alive', 'Cache-Control': 'no-cache', 'Accept': '*/*', 'X-Requested-With': 'XMLHttpRequest', # 'If-Modified-Since': '0', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36', 'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init', 'Accept-Encoding': 'gzip, deflate, sdch, br', 'Accept-Language': 'zh-CN,zh;q=0.8'} try: return requests.get(url, headers=headers, verify=False, timeout=15).text except (requests.exceptions.ReadTimeout, requests.exceptions.ConnectionError): print(url) return '["timeout"]' # ------------------------------------------------------------------------ def grab_station_name(): url = 'https://kyfw.12306.cn/otn/resources/js/framework/station_name.js' r = grab(url) r = r.replace('var station_names =\'@', '') r = r.replace('\';', '') r = r.split('@') station_name = [] for i in r: sn = i.split('|') sn = {'id': sn[5], 'telecode': sn[2], 'station_name': sn[1], 'pinyin': sn[3], 'initials': sn[4], 'pinyin_code': sn[0]} station_name.append(sn) with open('station_name.json', 'w', encoding='utf-8') as fp: json.dump(station_name, fp, ensure_ascii=False, sort_keys=True, indent=2) return True # ------------------------------------------------------------------------ def grab_train_list(): url = 'https://kyfw.12306.cn/otn/resources/js/query/train_list.js' r = grab(url) r = r.replace('var train_list =', '') d = json.loads(r) train_list = [] for key in d: for i in d[key]: for j in d[key][i]: j['station_train_code'] = j['station_train_code'].replace('(', '|') j['station_train_code'] = j['station_train_code'].replace(')', '|') j['station_train_code'] = j['station_train_code'].replace('-', '|') j['station_train_code'] = j['station_train_code'].split('|') j['train_code'] = j['station_train_code'][0] j['from_station'] = j['station_train_code'][1] j['to_station'] = j['station_train_code'][2] del j['station_train_code'] train_list.append(json.dumps(j, ensure_ascii=False, sort_keys=True)) train_list = list(set(train_list)) for i in train_list: train_list[train_list.index(i)] = json.loads(i) with open('train_list.json', 'w', encoding='utf-8') as fp: json.dump(train_list, fp, ensure_ascii=False, sort_keys=True, indent=2) return True # ------------------------------------------------------------------------ class train_schedule(): def get_train_schedule(self, d, max_workers): # self.grab_train_schedule(d, max_workers) self.parse_train_schedule() def parse_train_schedule(self): with open('train_schedule.json', 'r', encoding='utf-8') as ts: train_schedule = json.load(ts) get_train_schedule = [] get_train_schedule_err = [] for ts in train_schedule: try: train = ts['data'] ts.clear() ts['train'] = train ts['train']['schedule'] = ts['train']['data'] del ts['train']['data'] ts['train']['start_station_name'] = ts['train']['schedule'][0][ 'start_station_name'] del ts['train']['schedule'][0]['start_station_name'] ts['train']['end_station_name'] = ts['train']['schedule'][0]['end_station_name'] del ts['train']['schedule'][0]['end_station_name'] ts['train']['station_train_code'] = ts['train']['schedule'][0][ 'station_train_code'] del ts['train']['schedule'][0]['station_train_code'] ts['train']['train_class_name'] = ts['train']['schedule'][0]['train_class_name'] del ts['train']['schedule'][0]['train_class_name'] ts['train']['service_type'] = ts['train']['schedule'][0]['service_type'] del ts['train']['schedule'][0]['service_type'] ts['train']['s'] = sorted(ts['train']['schedule'], key=lambda x: x['station_no']) ts['train']['schedule'] = {} schedule_index = 0 for td in ts['train']['s']: ts['train']['schedule'].update({schedule_index: td}) schedule_index += 1 del ts['train']['s'] get_train_schedule.append(ts) except (IndexError, TypeError, json.decoder.JSONDecodeError): pass with open('get_train_schedule.json', 'w', encoding='utf-8') as fp: json.dump(get_train_schedule, fp, ensure_ascii=False, sort_keys=True, indent=2) def grab_train_schedule_callback(self, url): try: ts = json.loads(grab(url)) # print(ts) return [True, ts] except json.decoder.JSONDecodeError: # print(ts) return [False, ts] def grab_train_schedule(self, d, max_workers): common.timing_starts() with open('station_name.json', 'r', encoding='utf-8') as sn: station_name = json.load(sn) with open('train_list.json', 'r', encoding='utf-8') as tl: train_list = json.load(tl) year, month, day = d.split('-') start_time = date(int(year), int(month), int(day)) # dates = [ # start_time - timedelta(days=3), # start_time - timedelta(days=2), # start_time - timedelta(days=1), # start_time, # start_time + timedelta(days=1), # start_time + timedelta(days=2), # start_
time + timedelta(days=3) # ] dates = [start_time] urls = [] for d in dates: for i in train_list: train_no
= i['train_no'] for j in station_name: if i['from_station'] == j['station_name']: from_station_telecode = j['telecode'] if i['to_station'] == j['station_name']: to_station_telecode = j['telecode'] urls.append( 'https://kyfw.12306.cn/otn/czxx/queryByTrainNo?train_no=%s&from_station_telecode=%s&to_station_telecode=%s&depart_date=%s' % ( train_no, from_station_telecode, to_station_telecode, d.isoformat() )) train_schedule = [] train_schedule_err = [] with ThreadPoolExecutor(max_workers=max_workers) as pool: for i in pool.map(self.grab_train_schedule_callback, urls): if i[0]: train_schedule.append(i[1]) else: train_schedule_err.append(i[1]) with open('train_schedule.json', 'w', encoding='utf-8') as fp: json.dump(train_schedule, fp, ensure_ascii=False, sort_keys=True, indent=2) common.timing_ends('抓取列车时刻表') return True
gisce/enerdata
spec/contracts/tariff_spec.py
Python
mit
226,020
0.007597
from expects.testing import failure from expects import * from enerdata.contracts.tariff import * from datetime import datetime, timedelta from enerdata.datetime.timezone import TIMEZONE from mamba import before, context, description, it with description('Create a period'): with it('accepts "te"'): TariffPeriod('P1', 'te') with it('accepts "tp"'): TariffPeriod('P1', 'tp') with it('fails when is not "te" nor "tp"'): with failure: TariffPeriod('P1', 'foo') with it('should raise an exception if range of hours is invalid'): expect(lambda: TariffPeriod('P1', 'te', winter_hours=[ (0, 12), (11, 23) ])).to(raise_error(ValueError, 'Invalid winter hours')) expect(lambda: TariffPeriod('P1', 'te', summer_hours=[ (0, 12), (11, 23) ])).to(raise_error(ValueError, 'Invalid summer hours')) with description('A period'): with it('should have a range of hours 24 for winter by default'): p1 = TariffPeriod('P1', 'te') assert p1.winter_hours == [(0, 24)] with it('should be possible to set the range of winter hours in creation'): p1 = TariffPeriod('P1', 'te', winter_hours=[(0, 12)]) assert p1.winter_hours == [(0, 12)] with it('should have a range of hours 24 for summer by default'): p1 = TariffPeriod('P1', 'te') assert p1.summer_hours == [(0, 24)] with it('should be possible to set the range of sumer hours in creation'): p1 = TariffPeriod('P1', 'te', summer_hours=[(0, 12)]) assert p1.summer_hours == [(0, 12)] with it('has to validate range hours is correct'): assert check_range_hours([(0, 12)]) is True assert check_range_hours([(0, 12), (12, 24)]) is True assert check_range_hours([(-1, 0)]) is False assert check_range_hours([(0, 25)]) is False assert check_range_hours([(0, 0)]) is False assert check_range_hours([(4, 1)]) is False assert check_range_hours([(0, 1), (0, 2)]) is False assert check_range_hours([(0, 12), (12, 24)]) is True with it('should know the total of hours in summer'): p1 = TariffPeriod('P1', 'te', summer_hours=[(0, 12), (22, 24)]) assert p1.total_summer_hours == 14 with it('should know the total of hours in winter'): p1 = TariffPeriod('P1', 'te', winter_hours=[(12, 22)]) assert p1.total_winter_hours == 10 with context('A tariff'): with before.all: self.tariff = TariffPreTD('T1') with it('periods should be a tuple type'): assert isinstance(self.tariff.periods, tuple) with it('should return the number of periods of te'): self.tariff.periods = ( TariffPeriod('1', 'te', winter_hours=[(12, 22)], summer_hours=[(13, 23)]), TariffPeriod('2', 'te', winter_hours=[(0, 12), (22, 24)], summer_hours=[(0, 13), (23, 24)]) ) assert self.tariff.get_number_of_periods() == 2 with it('should return the periods of energy'): assert len(self.tariff.energy_periods) == 2 assert self.tariff.energy_periods.keys() == ['1', '2'] with it('should return the periods of power'): assert len(self.tariff.power_periods) == 0 self.tariff.periods += (TariffPeriod('1', 'tp'),) assert len(self.tariff.power_periods) == 1 assert self.tariff.power_periods.keys() == ['1'] with it('should have 24h of range ours in its energy periods'): def set_periods(): self.tariff.periods = ( TariffPeriod('1', 'te', summer_hours=[(12, 22)]), TariffPeriod('2', 'te', summer_hours=[(0, 12), (22, 23)]) ) expect(set_periods).to(raise_error(ValueError)) with it('should check range of hours'): def set_periods(): self.tariff.periods = ( TariffPeriod('1', 'te', summer_hours=[(13, 23)]), TariffPeriod('2', 'te', summer_hours=[(0, 12), (22, 24)]) ) expect(set_periods).to(raise_error(ValueError)) with it('should check range and hours if a holiday period is defined'): def set_periods(): self.tariff.periods = ( TariffPeriod('P1', 'te', winter_hours=[(18, 22)], summer_hours=[(11, 15)]), TariffPeriod('P2', 'te', winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]), TariffPeriod('P3', 'te', winter_hours=[(0, 8)], summer_hours=[(0, 8)]), TariffPeriod('P4', 'te', holiday=True, winter_hours=[(18, 22)], summer_hours=[(11, 15)]), TariffPeriod('P5', 'te', holiday=True, winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]), TariffPeriod('P6', 'te', holiday=True, winter_hours=[(0, 8)], summer_hours=[(1, 8)]) ) expect(set_periods).to(raise_error(ValueError, 'The sum of hours in summer (in holidays) must be 24h: [(1, 8), (8, 11), (11, 15), (15, 24)]')) with it('should find the period by datetime'): self.tariff.periods = ( TariffPeriod('P1', 'te', winter_hours=[(18, 22)], summer_hours=[(11, 15)]), TariffPeriod('P2', 'te', winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]), TariffPeriod('P3', 'te', winter_hours=[(0, 8)], summer_hours=[(0, 8)]), TariffPeriod('P4', 'te', holiday=True, winter_hours=[(18, 22)], summer_hours=[(11, 15)]), TariffPeriod('P5', 'te', holiday=True, winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]), TariffPeriod('P6', 'te', holiday=True, winter_hours=[(0, 8)], summer_hours=[(0, 8)]) ) dt = TIMEZONE.localize(datetime(2015, 12, 24, 19, 0, 0)) period = self.tariff.get_period_by_date(dt) assert period.code == 'P1' dt = TIMEZONE.localize(datetime(2015, 12, 25, 19, 0, 0)) period = self.tariff.get_period_by_date(dt) assert period.code == 'P4' dt = TIMEZONE.localize(datetime(2015, 12, 27, 19, 0, 0)) period = self.tariff.get_period_by_date(dt) assert period.code == 'P4' dt = TIMEZONE.localize(datetime(2015, 12, 27, 19, 0, 0)) period = self.tariff.get_period_by_date(dt) assert period.code == 'P4' dt = TIMEZONE.localize(datetime(2015, 12, 27, 17, 0, 0)) period = self.tariff.get_period_by_date(dt) assert period.code == 'P5' dt = TIMEZONE.localize(datetime(2015, 12, 27, 1, 0, 0)) period = self.tariff.get_period_by_date(dt) assert period.code == 'P6' with it('should allow to check if a set of powers is correct'): tari_T20A = T20A() expect(lambda: tari_T20A.evaluate_powers([-10])).to( raise_error(NotPositivePower)) expect(lambda: tari_T20A.evaluate_powers([0])).to( raise_error(NotPositivePower)) expect(lambda: tari_T20A.evaluate_powers([5.55])).to( raise_error(NotNormalizedPower)) assert tari_T20A.evaluate_powers([5.5]) expect(lambda: tari_T20A.evaluate_powers([5, 7])).to( raise_error(IncorrectPowerNumber, 'Expected 1 power(s) and got 2')) expect(lambda: tari_T20A.evaluate_powers([100])).to( raise_error(IncorrectMaxPower)) tari_T30A = T30A() expect(lambda: tari_T30A.evaluate_powers([-10, -5, 0])).to( raise_error(NotPositivePower)) expect(lambda: tari_T30A.evaluate_powers([15, 15, 15])).to( raise_error(IncorrectMaxPower)) expect(
lambda: tari_T30A.evaluate_powers([16, 17.1, 16])).to( raise_error(NotNormalizedPower)) expect(lambda: tari_T30A.evaluate_powers([14, 15.242, 15.242])).to( raise_error(IncorrectMinPower)) assert tari_T30A.evaluate_powers([15.242, 15.242, 16.454]) expect(lambda: tari_T30A.evaluate_powers([16, 17])).to( raise_error(IncorrectPowerNumber, 'Expected 3 power(s) and got 2')) tari_T31A = T31A() expect(lambda: tari_T3
1A.evaluate_powers([-10, -5, 0])).to( raise_error(NotPositivePower)) assert tari_T31A.evaluate_powers([10,
plotly/plotly.py
packages/python/plotly/plotly/graph_objs/scatter/_hoverlabel.py
Python
mit
17,848
0.000952
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType import copy as _copy class Hoverlabel(_BaseTraceHierarchyType): # class properties # -------------------- _parent_path_str = "scatter" _path_str = "scatter.hoverlabel" _valid_props = { "align", "alignsrc", "bgcolor", "bgcolorsrc", "bordercolor", "bordercolorsrc", "font", "namelength", "namelengthsrc", } # align # ----- @property def align(self): """ Sets the horizontal alignment of the text content within hover label box. Has an effect only if the hover label text spans more two or more lines The 'align' property is an enumeration that may be specified as: - One of the following enumeration values: ['left', 'right', 'auto'] - A tuple, list, or one-dimensional numpy array of the above Returns ------- Any|numpy.ndarray """ return self["align"] @align.setter def align(self, val): self["align"] = val # alignsrc # -------- @property def alignsrc(self): """ Sets the source reference on Chart Studio Cloud for `align`. The 'alignsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["alignsrc"] @alignsrc.setter def alignsrc(self, val): self["alignsrc"] = val # bgcolor # ------- @property def bgcolor(self): """ Sets the background color of the hover labels for this trace The 'bgcolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen - A list or array of any of the above Returns ------- str|numpy.ndarray """ return self["bgcolor"] @bgcolor.setter def bgcolor(self, val): self["bgcolor"] = val # bgcolorsrc # ---------- @property def bgcolorsrc(self): """ Sets the source reference on Chart Studio Cloud for `bgcolor`. The 'bgcolorsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["bgcolorsrc"] @bgcolorsrc.setter def bgcolorsrc(self, val): self["bgcolorsrc"] = val # bordercolor # ----------- @property def bordercolor(self): """ Sets the border color of the hover labels for this trace. The 'bordercolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown,
burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, da
rkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, rebeccapurple, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen - A list or array of any of the above Returns ------- str|numpy.ndarray """ return self["bordercolor"] @bordercolor.setter def bordercolor(self, val): self["bordercolor"] = val # bordercolorsrc # -------------- @property def bordercolorsrc(self): """ Sets the source reference on Chart Studio Cloud for `bordercolor`. The 'bordercolorsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str """ return self["bordercol
hannosch/genc
genc/__init__.py
Python
apache-2.0
935
0
from genc.regions import ( Region, REGIONS, ) try: basestring except NameError: # pragma: no cover basestring = str def _build_cache(name): idx = Region._fields.index(name) return dict([(reg[idx].upper(), reg) for reg in REGIONS if reg[idx] is not None]) _alpha2 = _build_cache('alpha2') _alpha3 = _build_cache('alpha3') _name = _build_cache('name') def region_by_alpha2(code, default=None): if isinstance(code, basestring): code = code.upper() return _alpha2.get(code, default) def region_by_alpha3(code, default=None): if isinstance(cod
e, basestring): code = code.upper() return _alpha3.get(code, default) def region_by_name(name, default=None): if isinstance(name, basestring): name = name.upper() return _name.get(name, default) __all__ = ( 'region_by_alpha2', 'region_by_alpha3', 'region_by_name',
'REGIONS', )
empty/django-reversion
test_project/test_app/admin.py
Python
bsd-3-clause
780
0.017949
from django.contrib import admin from django.contrib.contenttypes.generic import GenericStackedInline from reversion.admin import VersionAdmin from test_project.test_app.models import ChildModel, RelatedModel, GenericRelatedModel, ProxyModel class RelatedModelInline(admin.StackedInline): model = RelatedModel class GenericRelatedInline(GenericStackedInline): model = GenericRelatedModel class ChildModelAdmin(Versio
nAdmin): inlines = RelatedModelInline, GenericRelatedInline, list_display = ("parent_name", "child_name",) list_editable = ("child_name",) readonly_fields = ("parent_name",) admin.site.register(ChildModel, ChildModelAdmin) admin.site.register(ProxyModel, ChildModel
Admin)
arongdari/sparse-graph-prior
sgp/MixGGPgraphmcmc.py
Python
mit
7,905
0.002657
import time import numpy as np from numpy import log, exp from scipy.sparse import triu, csr_matrix from scipy.special import gammaln from scipy.stats import norm, lognorm from .NCRMmcmc import NGGPmcmc from .GGPgraphmcmc import tpoissonrnd def MixGGPgraphmcmc(G, modelparam, mcmcparam, typegraph, verbose=True): """ Run MCMC for the GGP graph model Convert the same function used in BNPGraph matlab package by Francois Caron http://www.stats.ox.ac.uk/~caron/code/bnpgraph/index.html :param G:sparse logical adjacency matrix :param modelparam: dictionary of model parameters with the following fields: - alpha: if scalar, the value of alpha. If vector of length 2, parameters of the gamma prior over alpha - sigma: if scalar, the value of sigma. If vector of length 2, parameters of the gamma prior over (1-sigma) - tau: if scalar, the value of tau. If vector of length 2, parameters of the gamma prior over tau :param mcmcparam: dictionary of mcmc parameters with the following fields: - niter: number of MCMC iterations - nburn: number of burn-in iterations - thin: thinning of the MCMC output - latent.MH_nb: number of MH iterations for latent (if 0: Gibbs update) - hyper.MH_nb: number of MH iterations for hyperparameters - store_w: logical. If true, returns MCMC draws of w :param typegraph: type of graph ('undirected' or 'simple') simple graph does not contain any self-loop :param verbose: logical. If true (default), print information :return: - samples: dictionary with the MCMC samples for the variables - w - w_rem - alpha - logalpha - sigma - tau - stats: dictionary with summary stats about the MCMC algorithm - w_rate: acceptance rate of the HMC step at each iteration - hyper_rate: acceptance rate of the MH for the hyperparameters at each iteration """ n_mixture = modelparam['n_mixture'] if typegraph is 'simple': issimple = True else: issimple = False if modelparam['estimate_alpha']: alpha = 100. * np.random.random(size=n_mixture) if verbose: print('Random Init: alpha', alpha) else: alpha = modelparam['alpha'] if modelparam['estimate_sigma']: sigma = 1 - np.random.lognormal(1, 1, size=n_mixture) else: sigma = modelparam['sigma'] if modelparam['estimate_tau']: tau = 10. * np.random.random(size=n_mixture) else: tau = modelparam['tau'] u = exp(np.random.normal(0, 1 / 4, size=n_mixture)) K = G.shape[0] # nodes pi = np.random.randint(0, n_mixture, size=K) if issimple: G2 = triu(G + G.T, k=1) else: G2 = triu(G + G.T, k=0) ind1, ind2 = G2.nonzero() n = np.random.randint(1, 5, size=len(ind1)) count = csr_matrix((n, (ind1, ind2)), shape=(K, K), dtype=int) N = count.sum(0).T + count.sum(1) niter = mcmcparam['niter'] nburn = mcmcparam['nburn'] thin = mcmcparam['thin'] dir_alpha = modelparam['dir_alpha'] J = np.zeros(K) J_rem = np.zeros(n_mixture) n_samples = int((niter - nburn) / thin) w_st = np.zeros((n_samples, K)) w_rem_st = np.zeros((n_samples, n_mixture)) alpha_st = np.zeros((n_samples, n_mixture)) tau_st = np.zeros((n_samples, n_mixture)) sigma_st = np.zeros((n_samples, n_mixture)) rate = np.zeros(niter) rate2 = np.zeros(niter) logdist = np.zeros(n_mixture) tic = time.time() for iter in range(niter): if verbose: print('Iteration=%d' % iter, flush=True) print('\talpha =', alpha, flush=True) print('\tsigma =', sigma, flush=True) print('\ttau =', tau, flush=True) print('\tu =', u, flush=True) print('\t# node for each mixture', [np.sum(pi == m) for m in range(n_mixture)], flush=True) print('\tJoint log likelihood', logdist, np.sum(logdist), flush=True) # update jump size & update hyperparams for m in range(n_mixture): J[pi == m], J_rem[m], alpha[m], sigma[m], tau[m], u[m] = NGGPmcmc(np.sum(N[pi == m]), N[pi == m], alpha[m], sigma[m], tau[m], u[m], modelparam, mcmcparam) logJ = log(J) # update node membership n_sum = np.zeros(n_mixture) for m in range(n_mixture): logdist[m] = joint_logdist(N[pi == m], alpha[m], sigma[m], tau[m], u[m]) n_sum[m] = np.sum(N[pi == m]) # print("DEBUG", logdist, exp(log_normalise(logdist))) for k in range(K): prev_m = pi[k] logdist[prev_m] += -log(alpha[prev_m]) - N[k] * log(u[prev_m]) + gammaln(n_sum[prev_m]) \ - gammaln(n_sum[prev_m] - N[k]) + (N[k] - sigma[prev_m]) * log(u[prev_m] + tau[prev_m]) n_sum[prev_m] -= N[k] tmp = np.zeros(n_mixture) for m in range(n_mixture): tmp[m] = logdist[m] + log(alpha[m]) + N[k] * log(u[m]) - gammaln(n_sum[m]) \ - gammaln(n_sum[m] + N[k]) - (N[k] - sigma[m]) * log(u[m] + tau[m]) tmp = log_normalise(tmp) pi[k] = np.random.multinomial(1, tmp).argmax() # print(tmp, pi[k]) new_m = pi[k] logdist[new_m] += log(alpha[new_m]) + N[k] * log(u[new_m]) - gammaln(n_sum[new_m]) \ - gammaln(n_sum[new_m] + N[k]) - (N[k] - sigma[new_m]) * log(u[new_m] + tau[new_m]) n_sum[new_m] += N[k] # update latent count n lograte_poi = log(2.) + logJ[ind1] + logJ[ind2] lograte_poi[ind1 == ind2] = 2. * logJ[ind1[ind1 == ind2]] n
= tpoissonrnd(lograte_poi) count
= csr_matrix((n, (ind1, ind2)), (K, K)) N = count.sum(0).T + count.sum(1) if iter == 10: toc = (time.time() - tic) * niter / 10. hours = np.floor(toc / 3600) minutes = (toc - hours * 3600.) / 60. print('-----------------------------------', flush=True) print('Start MCMC for GGP graphs', flush=True) print('Nb of nodes: %d - Nb of edges: %d' % (K, G2.sum()), flush=True) print('Number of iterations: %d' % niter, flush=True) print('Estimated computation time: %.0f hour(s) %.0f minute(s)' % (hours, minutes), flush=True) print('Estimated end of computation: ', time.strftime('%b %dth, %H:%M:%S', time.localtime(tic + toc)), flush=True) print('-----------------------------------', flush=True) if iter > nburn and (iter - nburn) % thin == 0: ind = int((iter - nburn) / thin) if mcmcparam['store_w']: w_st[ind] = J w_rem_st[ind] = J_rem alpha_st[ind] = alpha sigma_st[ind] = sigma tau_st[ind] = tau def log_normalise(log_prob): log_prob -= np.max(log_prob) return exp(log_prob) def joint_logdist(pi, alpha, sigma, tau, u): abs_pi = len(pi) n = np.sum(pi) tmp = abs_pi * log(alpha) + (n - 1.) * log(u) - gammaln(n) - (n - sigma * abs_pi) * log(u + tau) \ - (alpha / sigma) * ((u + tau) ** sigma - tau ** sigma) tmp += np.sum(gammaln(pi - sigma) - gammaln(1. - sigma)) return tmp def dirichlet_multinomial(hyper_alpha, pi, n_mixture): pi_m = np.array([np.sum(pi == m) for m in range(n_mixture)]) return gammaln(n_mixture * hyper_alpha) + np.sum(gammaln(pi_m + hyper_alpha)) - n_mixture * gammaln( hyper_alpha) - gammaln(len(pi) + hyper_alpha)
tedder/ansible-modules-core
utilities/helper/_fireball.py
Python
gpl-3.0
1,209
0.000827
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT A
NY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://
www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: fireball short_description: Enable fireball mode on remote node version_added: "0.9" deprecated: "in favor of SSH with ControlPersist" description: - Modern SSH clients support ControlPersist which is just as fast as fireball was. Please enable that in ansible.cfg as a replacement for fireball. - Removed in ansible 2.0. author: - "Ansible Core Team" - "Michael DeHaan" ''' EXAMPLES = ''' '''
giuseppe/virt-manager
virtManager/about.py
Python
gpl-2.0
1,472
0
# # Copyright (C) 2006, 2013 Red Hat, Inc. # Copyright (C) 2006 Daniel P. Berrange <berrange@redhat.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301 USA. # import logging from virtManager.baseclass import vmmGObjectUI class vmmAbout(vmmGObjectUI):
def __init__(self): vmmGObjectUI.__init__(self, "about.ui", "vmm-about") self.builder.connect_signals({ "on_vmm_about_delete_event": self.close, "on_vmm_about_response": self.close, }) def show(self): logging.debug("Showing about") self.topwin.set_version(self.config.get_appversion()) self.topwin.present() def close(self, ignore1=None, ignore2=None):
logging.debug("Closing about") self.topwin.hide() return 1 def _cleanup(self): pass
google/google-ctf
2017/quals/2017-re-food/dex_to_bytes.py
Python
apache-2.0
705
0.001418
#!/usr/bin/python # # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.o
rg/licenses/LICENSE-2.0 # # Unless re
quired by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. data = open('output.dex', 'rb').read() with open('output.txt', 'wb') as f: f.write(str(map(ord, data)))
fabioz/Pydev
plugins/org.python.pydev.core/pysrc/conftest.py
Python
epl-1.0
12,827
0.002495
import pytest import sys from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_IRONPYTHON from tests_python.debug_constants import TEST_CYTHON from tests_python.debug_constants import PYDEVD_TEST_VM import site import os from _pydev_bundle import pydev_log def pytest_report_header(config): print('PYDEVD_USE_CYTHON: %s' % (TEST_CYTHON,)) print('PYDEVD_TEST_VM: %s' % (PYDEVD_TEST_VM,)) try: import multiprocessing except ImportError: pass else: print('Number of processors: %s' % (multiprocessing.cpu_count(),)) print('Relevant system paths:') print('sys.executable: %s' % (sys.executable,)) print('sys.prefix: %s' % (sys.prefix,)) if hasattr(sys, 'base_prefix'): print('sys.base_prefix: %s' % (sys.base_prefix,)) if hasattr(sys, 'real_prefix'): print('sys.real_prefix: %s' % (sys.real_prefix,)) if hasattr(site, 'getusersitepackages'): print('site.getusersitepackages(): %s' % (site.getusersitepackages(),)) if hasattr(site, 'getsitepackages'): print('site.getsitepackages(): %s' % (site.getsitepackages(),)) for path in sys.path: if os.path.exists(path) and os.path.basename(path) == 'site-packages': print('Folder with "site-packages" in sys.path: %s' % (path,)) _started_monitoring_threads = False def _start_monitoring_threads(): # After the session finishes, wait 20 seconds to see if everything finished properly # and if it doesn't report an error. global _started_monitoring_threads if _started_monitoring_threads: return _started_monitoring_threads = True import threading if hasattr(sys, '_current_frames') and hasattr(threading, 'enumerate'): import time import traceback class DumpThreads(threading.Thread): def run(self): time.sleep(20) thread_id_to_name = {} try: for t in threading.enumerate(): thread_id_to_name[t.ident] = '%s (daemon: %s)' % (t.name, t.daemon) except: pass stack_trace = [ '===============================================================================', 'pydev pyunit runner: Threads still found running after tests finished', '================================= Thread Dump ================================='] for thread_id, stack in sys._current_frames().items(): stack_trace.append('\n-------------------------------------------------------------------------------') stack_trace.append(" Thread %s" % thread_id_to_name.get(thread_id, thread_id)) stack_trace.append('') if 'self' in stack.f_locals: sys.stderr.write(str(stack.f_locals['self']) + '\n') for filename, lineno, name, line in traceback.extract_stack(stack): stack_trace.append(' File "%s", line %d, in %s' % (filename, lineno, name)) if line: stack_trace.append(" %s" % (line.strip())) stack_trace.append('\n=============================== END Thread Dump ===============================') sys.stderr.write('\n'.join(stack_trace)) # Force thread run to finish import os os._exit(123) dump_current_frames_thread = DumpThreads() dump_current_frames_thread.daemon = True # Daemon so that this thread doesn't halt it! dump_current_frames_thread.start() def pytest_unconfigure(): _start_monitoring_threads() @pytest.fixture(scope="session", autouse=True) def check_no_threads(): yield _start_monitoring_threads() # see: http://goo.gl/kTQMs SYMBOLS = { 'customary': ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'), 'customary_ext': ('byte', 'kilo', 'mega', 'giga', 'tera', 'peta', 'exa', 'zetta', 'iotta'), 'iec': ('Bi', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'), 'iec_ext': ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi', 'zebi', 'yobi'), } def bytes2human(n, format='%(value).1f %(symbol)s', symbols='customary'): """ Bytes-to-human / human-to-bytes converter. Based on: http://goo.gl/kTQMs Working with Python 2.x and 3.x. Author: Giampaolo Rodola' <g.rodola [AT] gmail [DOT] com> License: MIT """ """ Convert n bytes into a human readable string based on format. symbols can be either "customary", "customary_ext", "iec" or "iec_ext", see: http://goo.gl/kTQMs >>> bytes2human(0) '0.0 B' >>> bytes2human(0.9) '0.0 B' >>> bytes2human(1) '1.0 B' >>> bytes2human(1.9) '1.0 B' >>> bytes2human(1024) '1.0 K' >>> bytes2human(1048576) '1.0 M' >>> bytes2human(1099511627776127398123789121) '909.5 Y' >>> bytes2human(9856, symbols="customary") '9.6 K' >>> bytes2human(9856, symbols="customary_ext") '9.6 kilo' >>> bytes2human(9856, symbols="iec") '9.6 Ki' >>> bytes2human(9856, symbols="iec_ext") '9.6 kibi' >>> bytes2human(10000, "%(value).1f %(symbol)s/sec") '9.8 K/sec' >>> # precision can be adjusted by playing with %f operator >>> bytes2human(10000, format="%(value).5f %(symbol)s") '9.76562 K' """ n = int(n) if n < 0: raise ValueError("n < 0") symbols = SYMBOLS[symbols] prefix = {} for i, s in enumerate(symbols[1:]): prefix[s] = 1 << (i + 1) * 10 for symbol in reversed(symbols[1:]): if n >= prefix[symbol]: value = float(n) / prefix[symbol] return format % locals() return format % dict(symbol=symbols[0], value=n) def format_memory_info(memory_info, curr_proc_memory_info): return 'Total: %s, Available: %s, Used: %s %%, Curr process: %s' % ( bytes2human(memory_info.total), bytes2human(memory_info.available), memory_info.percent, format_process_memory_info(curr_proc_memory_info)) def format_process_memory_info(proc_memory_info): return bytes2human(proc_memory_info.rss) DEBUG_MEMORY_INFO = False _global_collect_info = False PRINT_MEMORY_BEFORE_AFTER_TEST = False # This makes running tests slower (but it may be handy to diagnose memory issues). @pytest.fixture(autouse=PRINT_MEMORY_BEFORE_AFTER_TEST) def before_after_each_function(request): global _global_collect_info try: import psutil # Don't fail if not there except ImportError: yield return current_pids = set(proc.pid for proc in psutil.process_iter()) before_curr_proc_memory_info = psutil.Process().memory_info() if _global_collect_info and DEBUG_MEMORY_INFO: try: from pympler import summary, muppy sum1 = summary.summarize(muppy.get_objects()) except: pydev_log.exception() sys
.stdout.write( ''' =============================================================================== Memory before: %s %s =============================================================================== ''' % (request.function, format_memory_info(psutil.virtual_memory(), before_curr_proc_memory_info))) yield processes_info = [] for proc in psutil.process
_iter(): if proc.pid not in current_pids: try: try: cmdline = proc.cmdline() except: cmdline = '<unable to get>' processes_info.append( 'New Process: %s(%s - %s) - %s' % ( proc.name(), proc.pid, cmdline, format_process_memory_info(proc.memory_info()) ) ) except (psutil.NoSuchProcess, psutil.AccessDenied): pass # The process could've died in the meanwhile after_curr_proc_memory_info = psutil.Pro
wbtuomela/mezzanine
mezzanine/utils/sites.py
Python
bsd-2-clause
4,584
0.000218
from __future__ import unicode_literals import os import sys import threading from contextlib import contextmanager from django.contrib.sites.models import Site from mezzanine.conf import settings from mezzanine.core.request import current_request from mezzanine.utils.conf import middlewares_or_subclasses_installed SITE_PERMISSION_MIDDLEWARE = \ "mezzanine.core.middleware.SitePermissionMiddleware" def current_site_id(): """ Responsible for determining the current ``Site`` instance to use when retrieving data for any ``SiteRelated`` models. If we're inside an override_current_site_id context manager, return the overriding site ID. Otherwise, try to determine the site using the following methods in order: - ``site_id`` in session. Used in the admin so that admin users can switch sites and stay on the same domain for the admin. - The id of the Site object corresponding to the hostname in the current request. This result is cached. - ``MEZZANINE_SITE_ID`` environment variable, so management commands or anything else outside of a request can specify a site. - ``SITE_ID`` setting. If a current request exists and the current site is not overridden, the site ID is stored on the request object to speed up subsequent calls. """ if hasattr(override_current_site_id.thread_local, "site_id"): return override_current_site_id.thread_local.site_id from mezzanine.utils.cache import cache_installed, cache_get, cache_set request = current_request() site_id = getattr(request, "site_id", None) if request and not site_id: site_id = request.session.get("site_id", None) if not site_id: domain = request.get_host().lower() if cache_installed(): # Don't use Mezzanine's cache_key_prefix here, since it # uses this very function we're in right now to create a # per-site cache key. bits = (settings.CACHE_MIDDLEWARE_KEY_PREFIX, domain) cache_key = "%s.site_id.%s" % bits site_id = cache_get(cache_key) if not site_id: try: site = Site.objects.get(domain__iexact=domain) except Site.DoesNotExist: pass else: site_id = site.id if cache_installed(): cache_set(cache_key, site_id) if not site_id: site_id = os.environ.get("MEZZANINE_SITE_ID", settings.SITE_ID) if request and site_id and not getattr(settings, "TESTING", False): request.site_id = site_id return site_id @contextmanager def override_current_site_id(site_id): """ Context manager that overrides the current site id for code executed within it. Used to access SiteRelated objects outside the current site. """ override_current_site_id.thread_local.site_id = site_id yield del override_current_site_id.thread_local.site_id override_current_site_id.thread_local = threading.local() def has_site_permission(user): """ Checks if a staff user has staff-level access for the current site. The actual permission lookup occurs in ``SitePermissionMiddleware`` which then marks the request with the ``has_site_permission`` flag, so that we only query the db once per request, so this function serves as the entry point for everything else to check access. We also fall back to an ``is_staff`` check if the middleware is not installed, to ease migration. """ if not middlewares_or_su
bclasses_installed([SITE_PERMISSION_MIDDLEWARE]): return user.is_staff and user.is_active
return getattr(user, "has_site_permission", False) def host_theme_path(): """ Returns the directory of the theme associated with the given host. """ # Set domain to None, which we'll then query for in the first # iteration of HOST_THEMES. We use the current site_id rather # than a request object here, as it may differ for admin users. domain = None for (host, theme) in settings.HOST_THEMES: if domain is None: domain = Site.objects.get(id=current_site_id()).domain if host.lower() == domain.lower(): try: __import__(theme) module = sys.modules[theme] except ImportError: pass else: return os.path.dirname(os.path.abspath(module.__file__)) return ""
artemsok/sockeye
test/unit/test_attention.py
Python
apache-2.0
18,317
0.005186
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You may not # use this file except in compliance with the License. A copy of the License # is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language governing # permissions and limitations under the License. import mxnet as mx import numpy as np import pytest import sockeye.constants as C import sockeye.coverage import sockeye.rnn_attention from test.common import gaussian_vector, integer_vector attention_types = [C.ATT_BILINEAR, C.ATT_DOT, C.ATT_LOC, C.ATT_MLP] def test_att_bilinear(): config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_BILINEAR, num_hidden=None, input_previous_word=True, source_num_hidden=None, query_num_hidden=6, layer_normalization=False, config_coverage=None) attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None) assert type(attention) == sockeye.rnn_attention.BilinearAttention assert not attention._input_previous_word assert attention.num_hidden == 6 def test_att_dot(): config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_DOT, num_hidden=2, input_previous_word=True, source_num_hidden=4, query_num_hidden=6,
layer_normalization=False,
config_coverage=None, is_scaled=False) attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None) assert type(attention) == sockeye.rnn_attention.DotAttention assert attention._input_previous_word assert attention.project_source assert attention.project_query assert attention.num_hidden == 2 assert attention.is_scaled is False assert not attention.scale def test_att_dot_scaled(): config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_DOT, num_hidden=16, input_previous_word=True, source_num_hidden=None, query_num_hidden=None, layer_normalization=False, config_coverage=None, is_scaled=True) attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None) assert type(attention) == sockeye.rnn_attention.DotAttention assert attention._input_previous_word assert attention.project_source assert attention.project_query assert attention.num_hidden == 16 assert attention.is_scaled is True assert attention.scale == 0.25 def test_att_mh_dot(): config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_MH_DOT, num_hidden=None, input_previous_word=True, source_num_hidden=8, query_num_hidden=None, layer_normalization=False, config_coverage=None, num_heads=2) attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None) assert type(attention) == sockeye.rnn_attention.MultiHeadDotAttention assert attention._input_previous_word assert attention.num_hidden == 8 assert attention.heads == 2 assert attention.num_hidden_per_head == 4 def test_att_fixed(): config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_FIXED, num_hidden=None, input_previous_word=True, source_num_hidden=None, query_num_hidden=None, layer_normalization=False, config_coverage=None) attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=None) assert type(attention) == sockeye.rnn_attention.EncoderLastStateAttention assert attention._input_previous_word def test_att_loc(): config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_LOC, num_hidden=None, input_previous_word=True, source_num_hidden=None, query_num_hidden=None, layer_normalization=False, config_coverage=None) attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=10) assert type(attention) == sockeye.rnn_attention.LocationAttention assert attention._input_previous_word assert attention.max_source_seq_len == 10 def test_att_mlp(): config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_MLP, num_hidden=16, input_previous_word=True, source_num_hidden=None, query_num_hidden=None, layer_normalization=True, config_coverage=None) attention = sockeye.rnn_attention.get_attention(config_attention, max_seq_len=10) assert type(attention) == sockeye.rnn_attention.MlpAttention assert attention._input_previous_word assert attention.attention_num_hidden == 16 assert attention.dynamic_source_num_hidden == 1 assert attention._ln assert not attention.coverage def test_att_cov(): config_coverage = sockeye.coverage.CoverageConfig(type='tanh', num_hidden=5, layer_normalization=True) config_attention = sockeye.rnn_attention.AttentionConfig(type=C.ATT_COV, num_hidden=16, input_previous_word=True, source_num_hidden=None, query_num_hidden=None, layer_normalization=True, config_coverage=config_coverage) attention = sockeye.rnn_attention.
adrienbrault/home-assistant
homeassistant/helpers/aiohttp_client.py
Python
apache-2.0
5,482
0.000547
"""Helper for aiohttp webclient stuff.""" from __future__ import annotations import asyncio from contextlib import suppress from ssl import SSLContext import sys from typing import Any, Awaitable, cast import aiohttp from aiohttp import web from aiohttp.hdrs import CONTENT_TYPE, USER_AGENT from aiohttp.web_exceptions import HTTPBadGateway, HTTPGatewayTimeout import async_timeout from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE, __version__ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers.frame import warn_use from homeassistant.loader import bind_hass from homeassistant.util import ssl as ssl_util DATA_CONNECTOR = "aiohttp_connector" DATA_CONNECTOR_NOTVERIFY = "aiohttp_connector_notverify" DATA_CLIENTSESSION = "aiohttp_clientsession" DATA_CLIENTSESSION_NOTVERIFY = "aiohttp_clientsession_notverify" SERVER_SOFTWARE = "HomeAssistant/{0} aiohttp/{1} Python/{2[0]}.{2[1]}".format( __version__, aiohttp.__version__, sys.version_info ) @callback @bind_hass def async_get_clientsession( hass: HomeAssistant, verify_ssl: bool = True ) -> aiohttp.ClientSession: """Return default aiohttp ClientSession. This method must be run in the event loop. """ key = DATA_CLIENTSESSION_NOTVERIFY if verify_ssl: key = DATA_CLIENTSESSION if key not in hass.data: hass.data[key] = async_create_clientsession(hass, verify_ssl) return cast(aiohttp.ClientSession, hass.data[key]) @callback @bind_hass def async_create_clientsession( hass: HomeAssistant, verify_ssl: bool = True, au
to_cleanup: bool = True, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies. If auto_cleanup is False, you need to call detach() after the session returned is no longer used. Default is True, the session will
be automatically detached on homeassistant_stop. This method must be run in the event loop. """ connector = _async_get_connector(hass, verify_ssl) clientsession = aiohttp.ClientSession( connector=connector, headers={USER_AGENT: SERVER_SOFTWARE}, **kwargs, ) clientsession.close = warn_use( # type: ignore clientsession.close, "closes the Home Assistant aiohttp session" ) if auto_cleanup: _async_register_clientsession_shutdown(hass, clientsession) return clientsession @bind_hass async def async_aiohttp_proxy_web( hass: HomeAssistant, request: web.BaseRequest, web_coro: Awaitable[aiohttp.ClientResponse], buffer_size: int = 102400, timeout: int = 10, ) -> web.StreamResponse | None: """Stream websession request to aiohttp web response.""" try: with async_timeout.timeout(timeout): req = await web_coro except asyncio.CancelledError: # The user cancelled the request return None except asyncio.TimeoutError as err: # Timeout trying to start the web request raise HTTPGatewayTimeout() from err except aiohttp.ClientError as err: # Something went wrong with the connection raise HTTPBadGateway() from err try: return await async_aiohttp_proxy_stream( hass, request, req.content, req.headers.get(CONTENT_TYPE) ) finally: req.close() @bind_hass async def async_aiohttp_proxy_stream( hass: HomeAssistant, request: web.BaseRequest, stream: aiohttp.StreamReader, content_type: str | None, buffer_size: int = 102400, timeout: int = 10, ) -> web.StreamResponse: """Stream a stream to aiohttp web response.""" response = web.StreamResponse() if content_type is not None: response.content_type = content_type await response.prepare(request) # Suppressing something went wrong fetching data, closed connection with suppress(asyncio.TimeoutError, aiohttp.ClientError): while hass.is_running: with async_timeout.timeout(timeout): data = await stream.read(buffer_size) if not data: break await response.write(data) return response @callback def _async_register_clientsession_shutdown( hass: HomeAssistant, clientsession: aiohttp.ClientSession ) -> None: """Register ClientSession close on Home Assistant shutdown. This method must be run in the event loop. """ @callback def _async_close_websession(event: Event) -> None: """Close websession.""" clientsession.detach() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_websession) @callback def _async_get_connector( hass: HomeAssistant, verify_ssl: bool = True ) -> aiohttp.BaseConnector: """Return the connector pool for aiohttp. This method must be run in the event loop. """ key = DATA_CONNECTOR if verify_ssl else DATA_CONNECTOR_NOTVERIFY if key in hass.data: return cast(aiohttp.BaseConnector, hass.data[key]) if verify_ssl: ssl_context: bool | SSLContext = ssl_util.client_context() else: ssl_context = False connector = aiohttp.TCPConnector(enable_cleanup_closed=True, ssl=ssl_context) hass.data[key] = connector async def _async_close_connector(event: Event) -> None: """Close connector pool.""" await connector.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_connector) return connector
harikishen/addons-server
src/olympia/zadmin/management/commands/addusertogroup.py
Python
bsd-3-clause
1,528
0
from django.core.management.base import BaseCommand, CommandError from django.db import IntegrityError import olympia.core.logger from olympia.access.models import Group, GroupUser from olympia.users.models import UserProfile class Command(BaseCommand): help = 'Add a new user to a group.' log = olympia.core.logger.getLogger('z.users') def add_arguments(self, parser): parser.add_argument('user', type=unicode, help='User id or email') parser.add_argument('group_id', type=int, help='Group id') def handle(self, *args, **options): do_adduser(options['user'], options['group_id']) msg = 'Adding {user} to {group}\n'.format( user=options['user'], group=options['group_id']) self.log.info(msg) self.stdout.write(msg) def d
o_adduser(user, group): try: if '@' in user:
user = UserProfile.objects.get(email=user) elif user.isdigit(): user = UserProfile.objects.get(pk=user) else: raise CommandError('Unknown input for user.') group = Group.objects.get(pk=group) GroupUser.objects.create(user=user, group=group) except IntegrityError, e: raise CommandError('User is already in that group? %s' % e) except UserProfile.DoesNotExist: raise CommandError('User ({user}) does not exist.'.format(user=user)) except Group.DoesNotExist: raise CommandError('Group ({group}) does not exist.' .format(group=group))
angelblue05/plugin.video.emby
libraries/emby/core/connection_manager.py
Python
gpl-3.0
28,487
0.002457
# -*- coding: utf-8 -*- ################################################################################################# import json import logging import hashlib import socket import time from datetime import datetime from distutils.version import LooseVersion from credentials import Credentials from http import HTTP from exceptions import HTTPException ################################################################################################# LOG = logging.getLogger('Emby.'+__name__) CONNECTION_STATE = { 'Unavailable': 0, 'ServerSelection': 1, 'ServerSignIn': 2, 'SignedIn': 3, 'ConnectSignIn': 4, 'ServerUpdateNeeded': 5 } CONNECTION_MODE = { 'Local': 0, 'Remote': 1, 'Manual': 2 } ################################################################################################# def get_server_address(server, mode): modes = { CONNECTION_MODE['Local']: server.get('LocalAddress'), CONNECTION_MODE['Remote']: server.get('RemoteAddress'), CONNECTION_MODE['Manual']: server.get('ManualAddress') } return modes.get(mode) or server.get('ManualAddress', server.get('LocalAddress', server.get('RemoteAddress'))) class ConnectionManager(object): min_server_version = "3.0.5930" server_version = min_server_version user = {} server_id = None timeout = 10 def __init__(self, client): LOG.debug("ConnectionManager initializing...") self.client = client self.config = client.config self.credentials = Credentials() self.http = HTTP(client) def __shortcuts__(self, key): if key == "clear": return self.clear_data elif key == "servers": return self.get_available_servers() elif key in ("reconnect", "refresh"): return self.connect elif key == "login": return self.login elif key == "login-connect": return self.login_to_connect elif key == "connect-user": return self.connect_user() elif key == "connect-token": return self.connect_token() elif key == "connect-user-id": return self.connect_user_id() elif key == "server": return self.get_server_info(self.server_id) elif key == "server-id": return self.server_id elif key == "server-version": return self.server_version elif key == "user-id": return self.emby_user_id() elif key == "public-users": return self.get_public_users() elif key == "token": return self.emby_token() elif key == "manual-server": return self.connect_to_address elif key == "connect-to-server": return self.connect_to_server elif key == "server-address": server = self.get_server_info(self.server_id) return get_server_address(server, server['LastConnectionMode']) elif key == "revoke-token": return self.revoke_token() elif key == "server-mode": server = self.get_server_info(self.server_id) return server['LastConnectionMode'] return def __getitem__(self
, key): return self.__shortcuts__(key) def clear_data(self):
LOG.info("connection manager clearing data") self.user = None credentials = self.credentials.get_credentials() credentials['ConnectAccessToken'] = None credentials['ConnectUserId'] = None credentials['Servers'] = list() self.credentials.get_credentials(credentials) self.config.auth(None, None) def revoke_token(self): LOG.info("revoking token") self['server']['AccessToken'] = None self.credentials.get_credentials(self.credentials.get_credentials()) self.config['auth.token'] = None def get_available_servers(self): LOG.debug("Begin getAvailableServers") # Clone the credentials credentials = self.credentials.get_credentials() connect_servers = self._get_connect_servers(credentials) found_servers = self._find_servers(self._server_discovery()) if not connect_servers and not found_servers and not credentials['Servers']: # back out right away, no point in continuing LOG.info("Found no servers") return list() servers = list(credentials['Servers']) self._merge_servers(servers, found_servers) self._merge_servers(servers, connect_servers) servers = self._filter_servers(servers, connect_servers) try: servers.sort(key=lambda x: datetime.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ"), reverse=True) except TypeError: servers.sort(key=lambda x: datetime(*(time.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ")[0:6])), reverse=True) credentials['Servers'] = servers self.credentials.get_credentials(credentials) return servers def login_to_connect(self, username, password): if not username: raise AttributeError("username cannot be empty") if not password: raise AttributeError("password cannot be empty") try: result = self._request_url({ 'type': "POST", 'url': self.get_connect_url("user/authenticate"), 'data': { 'nameOrEmail': username, 'password': self._get_connect_password_hash(password) }, 'dataType': "json" }) except Exception as error: # Failed to login LOG.error(error) return False else: credentials = self.credentials.get_credentials() credentials['ConnectAccessToken'] = result['AccessToken'] credentials['ConnectUserId'] = result['User']['Id'] credentials['ConnectUser'] = result['User']['DisplayName'] self.credentials.get_credentials(credentials) # Signed in self._on_connect_user_signin(result['User']) return result def login(self, server, username, password=None, clear=True, options={}): if not username: raise AttributeError("username cannot be empty") if not server: raise AttributeError("server cannot be empty") try: request = { 'type': "POST", 'url': self.get_emby_url(server, "Users/AuthenticateByName"), 'json': { 'username': username, 'password': hashlib.sha1(password or "").hexdigest(), } } if clear: request['json']['pw'] = password or "" result = self._request_url(request, False) except Exception as error: # Failed to login LOG.error(error) return False self._on_authenticated(result, options) return result def connect_to_address(self, address, options={}): if not address: return False address = self._normalize_address(address) def _on_fail(): LOG.error("connectToAddress %s failed", address) return self._resolve_failure() try: public_info = self._try_connect(address, options=options) except Exception: return _on_fail() else: LOG.info("connectToAddress %s succeeded", address) server = { 'ManualAddress': address, 'LastConnectionMode': CONNECTION_MODE['Manual'] } self._update_server_info(server, public_info) server = self.connect_to_server(server, options) if server is False: return _on_fail() return server def connect_to_server(self, server, options={}): LOG.debug("Begin connectToServer") tests = [] if server.get('LastConnectionMode') != CONNECTION_MODE['Remote'] and server.get('AccessToken'):
wolendranh/movie_radio
admin/models.py
Python
apache-2.0
415
0.00241
from config.settings import STREAM_COLLECTION from bson.objectid import ObjectId from radio_db.models import Ba
seModel class Stream(BaseModel): def __init__(self, db, data): super().__init__(db, collection=STREAM_COLLECTION) self.stream_ip = data.get('stream_ip') self.name = data.get('name') self.id = da
ta.get('id') self.user_id = ObjectId(data.get('user_id'))
leaffan/pynhldb
db/takeaway.py
Python
mit
991
0
#!/usr/bin/env python # -*- coding: utf-8 -*- import uuid from db.common import Base from db.specific_event import SpecificEvent from db.event impor
t Event from db.player import Player from db.team import Team class Takeaway(Base, SpecificEvent): __tablename__ = 'takeaways' __autoload__ = True HUMAN_READABLE = 'takeaway' STANDARD_ATTRS = [ "team_id", "player_id", "zone", "taken_from_team_id" ] def __init__(self, event_id, data_dict):
self.takeaway_id = uuid.uuid4().urn self.event_id = event_id for attr in self.STANDARD_ATTRS: if attr in data_dict: setattr(self, attr, data_dict[attr]) else: setattr(self, attr, None) def __str__(self): plr = Player.find_by_id(self.player_id) event = Event.find_by_id(self.event_id) team = Team.find_by_id(self.team_id) return "Takeaway: %s (%s) - %s" % ( plr.name, team.abbr, event)
arbiterofcool/fig-seed
template/fig-django/figexample/settings.py
Python
mit
2,114
0
""" Django set
tings for figexample project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the
project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'pp&p7ex-&+#n4waijg96v&txz$=y*rh=t$u-!hri@(-s@6^51=' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'figexample.urls' WSGI_APPLICATION = 'figexample.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'postgres', 'USER': 'postgres', 'HOST': 'db_1', 'PORT': 5432, } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/'
Thx3r/survivalAlertWin
SurvivalAlertWin.py
Python
gpl-2.0
2,493
0.015243
#!/usr/bin/env python #PYTHON 3 only # Sample Windows Startup check -- Mail alert # SurvivalAlert v1.0 # By thxer.com # N-pn.fr Communi
ty and Hexpresso CTF team import os import socket import ctypes import smtplib #Global Variables HOSTNAME = str(socket.gethostname()) IPLAN = str(socket.gethostbyna
me(socket.gethostname())) AUTHORIZE_USER = ['Users','Utilisateur'] #User wich are allow to use computers LIMIT_FREE_HDD_SPACE = 11 # Limit of free HDD space alert in GB #Email Settings TO = "admin@1337.com" # User who recept mail alert USER = "smtp_user@1337.com" PWD = "smtp_passwd" SMTPSERV = "smtp.server_addres.com" #Check HDD Status def check_hdd(): """Check HDD disk with windows tools """ Hdd_status = os.popen("wmic diskdrive get status").read() for word in Hdd_status.split(): if not word in ["Status", "OK"]: ctypes.windll.user32.MessageBoxW(None, u"ALERT: HDD ERROR", u"ERROR CONTACT ADMIN NOW !", 0) send_mail("Warning HDD not SAFE !","Windows claims About unsafe HDD !") return Hdd_status def get_free_space(): """ Test first Drive Free space then alert < LIMIT_HDD_FREE_SPACE """ free_space = round(int(os.popen("wmic logicaldisk get freespace").read().split()[1])/1024/1024/1024) if free_space < LIMIT_FREE_HDD_SPACE : ctypes.windll.user32.MessageBoxW(None, u"ALERT: HDD FREE SPACE ERROR", u"ERROR CONTACT ADMIN NOW !", 0) msg = "Warning Free space is : " + str(free_space) + "GB" send_mail("Warning C: Free SPACE !",msg) return free_space def whois_log(): """ Get user Login name and alert if not in AUTHORIZE_USER list """ if not os.getlogin() in AUTHORIZE_USER : msg = "SUSPECT Login IN : " + os.getlogin() send_mail("SUSPECT LOGIN",msg) def send_mail(subject,message): subject = str(subject) message = str(message) server = smtplib.SMTP(SMTPSERV,25) # 587 for STARTLS server.ehlo() #server.starttls() # Un comment for use STARTTLS server.login(USER, PWD) header = 'TO:' + TO + '\n' + 'From: ' + USER + '\n' + 'Subject:'+ HOSTNAME + " | " + IPLAN + " " + subject +'\n' mail = header + '\n' + "PC : " + HOSTNAME + " IP LAN : " + IPLAN + "\n" + message + '\n\n' server.sendmail(USER, TO, mail ) server.close() if __name__ == '__main__': # Uncomment for test mail configuration #send_mail("Send a Test Mail","1337 Are In place N-pn") whois_log() get_free_space() check_hdd()
amwelch/a10sdk-python
a10sdk/core/cgnv6/cgnv6_lsn_stun_timeout_udp.py
Python
apache-2.0
1,644
0.009732
from a10sdk.common.A10BaseClass import A10BaseClass class Udp(A10BaseClass): """Class Description:: Set UDP STUN timeout. Class udp supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` :param port_start: {"description": "Port Range (Port Range Start)", "format": "number", "type": "number", "maximum": 65535, "minimum": 1, "optional": false} :param port_end: {"description": "Port Range (Port Range End)", "format": "number", "type": "number", "maximum": 65535, "minimum": 1, "optional": false} :param timeout: {"description": "STUN timeout in minutes (default: 2 minutes)", "format": "number", "type": "number", "maximum": 60, "minimum": 0, "optional": true} :param uuid: {"description": "uuid of the object", "format": "string"
, "minLength": 1, "modify-not-allowed": 1, "
optional": true, "maxLength": 64, "type": "string"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/cgnv6/lsn/stun-timeout/udp/{port_start}+{port_end}`. """ def __init__(self, **kwargs): self.ERROR_MSG = "" self.required = [ "port_start","port_end"] self.b_key = "udp" self.a10_url="/axapi/v3/cgnv6/lsn/stun-timeout/udp/{port_start}+{port_end}" self.DeviceProxy = "" self.port_start = "" self.port_end = "" self.timeout = "" self.uuid = "" for keys, value in kwargs.items(): setattr(self,keys, value)
yebrahim/pydatalab
google/datalab/bigquery/commands/_bigquery.py
Python
apache-2.0
49,966
0.011348
# Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except # in compliance with the License. You may obtain a copy of the License at # # http://www.apache.org/li
censes/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License # is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the L
icense for the specific language governing permissions and limitations under # the License. """Google Cloud Platform library - BigQuery IPython Functionality.""" from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from builtins import str from past.builtins import basestring try: import IPython import IPython.core.display import IPython.core.magic except ImportError: raise Exception('This module can only be loaded in ipython.') import datetime import jsonschema import fnmatch import json import re import google.datalab.bigquery as bigquery import google.datalab.data import google.datalab.utils import google.datalab.utils.commands from google.datalab.bigquery._query_output import QueryOutput from google.datalab.bigquery._sampling import Sampling class BigQuerySchema(object): """A container class for commonly used BQ-related constants.""" DATATYPES = ['STRING', 'BYTES', 'INTEGER', 'INT64', 'FLOAT', 'FLOAT64', 'BOOLEAN', 'BOOL', 'TIMESTAMP', 'DATE', 'TIME', 'DATETIME', 'RECORD'] DATATYPES_LOWER = [t.lower() for t in DATATYPES] MODES = ['NULLABLE', 'REQUIRED', 'REPEATED'] MODES_LOWER = [m.lower() for m in MODES] TABLE_SCHEMA_SCHEMA = { 'definitions': { 'field': { 'title': 'field', 'type': 'object', 'properties': { 'name': {'type': 'string'}, 'type': {'type': 'string', 'enum': DATATYPES + DATATYPES_LOWER}, 'mode': {'type': 'string', 'enum': MODES + MODES_LOWER}, 'description': {'type': 'string'}, 'fields': { 'type': 'array', 'items': { 'allOf': [{'$ref': '#/definitions/field'}] } } }, 'required': ['name', 'type'], 'additionalProperties': False } }, 'type': 'object', 'properties': { 'schema': { 'type': 'array', 'items': { 'allOf': [{'$ref': '#/definitions/field'}] } } }, 'required': ['schema'], 'additionalProperties': False } QUERY_PARAMS_SCHEMA = { 'type': 'object', 'properties': { 'parameters': { 'type': 'array', 'items': [ { 'type': 'object', 'properties': { 'name': {'type': 'string'}, 'type': {'type': 'string', 'enum': DATATYPES + DATATYPES_LOWER}, 'value': {'type': ['string', 'integer', 'number']} }, 'required': ['name', 'type', 'value'], 'additionalProperties': False } ] } }, 'required': ['parameters'], 'additionalProperties': False } def _create_dataset_subparser(parser): dataset_parser = parser.subcommand('datasets', 'Operations on BigQuery datasets') sub_commands = dataset_parser.add_subparsers(dest='command') # %%bq datasets list list_parser = sub_commands.add_parser('list', help='List datasets') list_parser.add_argument('-p', '--project', help='The project whose datasets should be listed') list_parser.add_argument('-f', '--filter', help='Optional wildcard filter string used to limit the results') # %%bq datasets create create_parser = sub_commands.add_parser('create', help='Create a dataset.') create_parser.add_argument('-n', '--name', help='The name of the dataset to create.', required=True) create_parser.add_argument('-f', '--friendly', help='The friendly name of the dataset.') # %%bq datasets delete delete_dataset_parser = sub_commands.add_parser('delete', help='Delete a dataset.') delete_dataset_parser.add_argument('-n', '--name', help='The name of the dataset to delete.', required=True) return dataset_parser def _create_table_subparser(parser): table_parser = parser.subcommand('tables', 'Operations on BigQuery tables') sub_commands = table_parser.add_subparsers(dest='command') # %%bq tables list list_parser = sub_commands.add_parser('list', help='List the tables in a BigQuery project or dataset.') list_parser.add_argument('-p', '--project', help='The project whose tables should be listed') list_parser.add_argument('-d', '--dataset', help='The dataset to restrict to') list_parser.add_argument('-f', '--filter', help='Optional wildcard filter string used to limit the results') # %%bq tables create create_parser = sub_commands.add_parser('create', help='Create a table.') create_parser.add_argument('-n', '--name', help='The name of the table to create.', required=True) create_parser.add_argument('-o', '--overwrite', help='Overwrite table if it exists.', action='store_true') # %%bq tables describe describe_parser = sub_commands.add_parser('describe', help='View a table\'s schema') describe_parser.add_argument('-n', '--name', help='Name of table to show', required=True) # %%bq tables delete delete_parser = sub_commands.add_parser('delete', help='Delete a table.') delete_parser.add_argument('-n', '--name', help='The name of the table to delete.', required=True) # %%bq tables view delete_parser = sub_commands.add_parser('view', help='View a table.') delete_parser.add_argument('-n', '--name', help='The name of the table to view.', required=True) return table_parser def _create_sample_subparser(parser): sample_parser = parser.subcommand('sample', help='Display a sample of the results of a BigQuery SQL query. ' 'The cell can optionally contain arguments for expanding ' 'variables in the query, if -q/--query was used, or it ' 'can contain SQL for a query.') group = sample_parser.add_mutually_exclusive_group() group.add_argument('-q', '--query', help='the name of the query object to sample') group.add_argument('-t', '--table', help='the name of the table object to sample') group.add_argument('-v', '--view', help='the name of the view object to sample') sample_parser.add_argument('-nc', '--nocache', help='Don\'t use previously cached results', action='store_true') sample_parser.add_argument('-b', '--billing', type=int, help='BigQuery billing tier') sample_parser.add_argument('-m', '--method', help='The type of sampling to use', choices=['limit', 'random', 'hashed', 'sorted'], default='limit') sample_parser.add_argument('--fields', help='Comma separated field names for projection') sample_parser.add_argument('-c', '--count', type=int, default=10, help='The number of rows to limit to, if sampling') sample_parser.add_argument('-p', '--percent', type=int, default=1, help='For random or hashed sampling, what percentage to sample from') sample_parser.add_argument('--key-field', help='The field to use for sorted or hashed sampling') sample_parser.add_argument('-o', '--order', choices=['ascending', 'descending'], default='ascending', help='The sort order to use for sorted sampling') sample_parser.add_argument('-P', '--profile', action='store_true', default=False, help='Generate an interactive profile of the data') sample_parser.add_argument('--verbose', help='Show the expanded SQL that is being executed',
Jorge-Rodriguez/ansible
lib/ansible/modules/system/cron.py
Python
gpl-3.0
24,897
0.002691
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2012, Dane Summers <dsummers@pinedesk.biz> # Copyright: (c) 2013, Mike Grozak <mike.grozak@gmail.com> # Copyright: (c) 2013, Patrick Callahan <pmc@patrickcallahan.com> # Copyright: (c) 2015, Evan Kaufman <evan@digitalflophouse.com> # Copyright: (c) 2015, Luca Berruti <nadirio@gmail.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: cron short_description: Manage cron.d and crontab entries description: - Use this module to manage crontab and environment variables entries. This module allows you to create environment variables and named crontab entries, update, or delete them. - 'When crontab jobs are managed: the module includes one line with the description of the crontab entry C("#Ansible: <name>") corresponding to the "name" passed to the module, which is used by future ansible/module calls to find/check the state. The "name" parameter should be unique, and changing the "name" value will result in a new cron task being created (or a different one being removed).' - 'When environment variables are managed: no comment line is added, but, when the module needs to find/check the state, it uses the "name" parameter to find the environment variable definition line.' - 'When using symbols such as %, they must be properly escaped.' version_added: "0.9" options: name: description: - Description of a crontab entry or, if env is set, the name of environment variable. Required if state=absent. Note that if name is not set and state=present, then a new crontab entry will always be created, regardless of existing ones. user: description: - The specific user whose crontab should be modified. default: root job: description: - The command to execute or, if env is set, the value of environment variable. The command should not contain line breaks. Required if state=present. aliases: [ value ] state: description: - Whether to ensure the job or environment variable is present or absent. choices: [ absent, present ] default: present cron_file: description: - If specified, uses this file instead of an individual user's crontab. If this is a relative path, it is interpreted with respect to /etc/cron.d. (If it is absolute, it will typically be /etc/crontab). Many linux distros expect (and some require) the filename portion to consist solely of upper- and lower-case letters, digits, underscores, and hyphens. To use the C(cron_file) parameter you must specify the C(user) as well. backup: descript
ion: - If set, create a backup of the crontab before it is modified. The location of the backup is returned in the C(backup_file) variable by this module. type: bool default: 'no' minute: description: - Minute when the job should run ( 0-59, *, */2, etc ) default: "*" hour: description: - Hour when the job should run ( 0-23, *, */2, etc ) default: "*" day: description: - Day of the month the job should
run ( 1-31, *, */2, etc ) default: "*" aliases: [ dom ] month: description: - Month of the year the job should run ( 1-12, *, */2, etc ) default: "*" weekday: description: - Day of the week that the job should run ( 0-6 for Sunday-Saturday, *, etc ) default: "*" aliases: [ dow ] reboot: description: - If the job should be run at reboot. This option is deprecated. Users should use special_time. version_added: "1.0" type: bool default: "no" special_time: description: - Special time specification nickname. choices: [ annually, daily, hourly, monthly, reboot, weekly, yearly ] version_added: "1.3" disabled: description: - If the job should be disabled (commented out) in the crontab. - Only has effect if C(state=present). type: bool default: 'no' version_added: "2.0" env: description: - If set, manages a crontab's environment variable. New variables are added on top of crontab. "name" and "value" parameters are the name and the value of environment variable. type: bool default: "no" version_added: "2.1" insertafter: description: - Used with C(state=present) and C(env). If specified, the environment variable will be inserted after the declaration of specified environment variable. version_added: "2.1" insertbefore: description: - Used with C(state=present) and C(env). If specified, the environment variable will be inserted before the declaration of specified environment variable. version_added: "2.1" requirements: - cron author: - Dane Summers (@dsummersl) - Mike Grozak (@rhaido) - Patrick Callahan (@dirtyharrycallahan) - Evan Kaufman (@EvanK) - Luca Berruti (@lberruti) """ EXAMPLES = ''' - name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null" cron: name: "check dirs" minute: "0" hour: "5,2" job: "ls -alh > /dev/null" - name: 'Ensure an old job is no longer present. Removes any job that is prefixed by "#Ansible: an old job" from the crontab' cron: name: "an old job" state: absent - name: Creates an entry like "@reboot /some/job.sh" cron: name: "a job for reboot" special_time: reboot job: "/some/job.sh" - name: Creates an entry like "PATH=/opt/bin" on top of crontab cron: name: PATH env: yes job: /opt/bin - name: Creates an entry like "APP_HOME=/srv/app" and insert it after PATH declaration cron: name: APP_HOME env: yes job: /srv/app insertafter: PATH - name: Creates a cron file under /etc/cron.d cron: name: yum autoupdate weekday: 2 minute: 0 hour: 12 user: root job: "YUMINTERACTIVE=0 /usr/sbin/yum-autoupdate" cron_file: ansible_yum-autoupdate - name: Removes a cron file from under /etc/cron.d cron: name: "yum autoupdate" cron_file: ansible_yum-autoupdate state: absent - name: Removes "APP_HOME" environment variable from crontab cron: name: APP_HOME env: yes state: absent ''' import os import platform import pipes import pwd import re import sys import tempfile from ansible.module_utils.basic import AnsibleModule, get_platform CRONCMD = "/usr/bin/crontab" class CronTabError(Exception): pass class CronTab(object): """ CronTab object to write time based crontab file user - the user of the crontab (defaults to root) cron_file - a cron file under /etc/cron.d, or an absolute path """ def __init__(self, module, user=None, cron_file=None): self.module = module self.user = user self.root = (os.getuid() == 0) self.lines = None self.ansible = "#Ansible: " self.existing = '' if cron_file: if os.path.isabs(cron_file): self.cron_file = cron_file else: self.cron_file = os.path.join('/etc/cron.d', cron_file) else: self.cron_file = None self.read() def read(self): # Read in the crontab from the system self.lines = [] if self.cron_file: # read the cronfile try: f = open(self.cron_file, 'r') self.existing = f.read() self.lines = self.existing.splitlines() f.close() except IOError: # cron file does not exist return except Exception: raise CronTabError("Unexpected error:", sys.exc_info()[0]) else: # using safely quoted shell for now, but this really should be two non-shell cal
phw/picard
picard/ui/ui_options_metadata.py
Python
gpl-2.0
9,138
0.003174
# -*- coding: utf-8 -*- # Automatically generated - don't edit. # Use `python setup.py build_ui` to update it. from PyQt5 import QtCore, QtGui, QtWidgets class Ui_MetadataOptionsPage(object): def setupUi(self, MetadataOptionsPage): MetadataOptionsPage.setObjectName("MetadataOptionsPage") MetadataOptionsPage.resize(423, 553) self.verticalLayout = QtWidgets.QVBoxLayout(MetadataOptionsPage) self.verticalLayout.setObjectName("verticalLayout") self.metadata_groupbox = QtWidgets.QGroupBox(MetadataOptionsPage) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.metadata_groupbox.sizePolicy().hasHeightForWidth()) self.metadata_groupbox.setSizePolicy(sizePolicy) self.metadata_groupbox.setMinimumSize(QtCore.QSize(397, 135)) self.metadata_groupbox.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.metadata_groupbox.setObjectName("metadata_groupbox") self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.metadata_groupbox) self.verticalLayout_3.setObjectName("verticalLayout_3") self.translate_artist_names = QtWidgets.QCheckBox(self.metadata_groupbox) self.translate_artist_names.setObjectName("translate_artist_names") self.verticalLayout_3.addWidget(self.translate_artist_names) self.horizontalLayout = QtWidgets.QHBoxLayout() self.horizontalLayout.setContentsMargins(-1, -1, -1, 0) self.horizontalLayout.setObjectName("horizontalLayout") self.selected_locales = QtWidgets.QLineEdit(self.metadata_groupbox) self.selected_locales.setReadOnly(True) self.selected_locales.setObjectName("selected_locales") self.horizontalLayout.addWidget(self.selected_locales) self.select_locales = QtWidgets.QPushButton(self.metadata_groupbox) self.select_locales.setObjectName("select_locales") self.horizontalLayout.addWidget(self.select_locales) self.verticalLayout_3.addLayout(self.horizontalLayout) self.translate_artist_names_script_exception = QtWidgets.QCheckBox(self.metadata_groupbox) self.translate_artist_names_script_exception.setObjectName("translate_artist_names_script_exception") self.verticalLayout_3.addWidget(self.translate_artist_names_script_exception) self.horizontalLayout_4 = QtWidgets.QHBoxLayout() self.horizontalLayout_4.setContentsMargins(-1, -1, -1, 0) self.horizontalLayout_4.setObjectName("horizontalLayout_4") self.selected_scripts = QtWidgets.QLineEdit(self.metadata_groupbox) self.selected_scripts.setReadOnly(True) self.selected_scripts.setObjectName("selected_scripts") self.horizontalLayout_4.addWidget(self.selected_scripts) self.select_scripts = QtWidgets.QPushButton(self.metadata_groupbox) self.select_scripts.setObjectName("select_scripts") self.horizontalLayout_4.addWidget(self.select_scripts) self.verticalLayout_3.addLayout(self.horizontalLayout_4) self.standardize_artists = QtWidgets.QCheckBox(self.metadata_groupbox) self.standardize_artists.setObjectName("standardize_artists") self.verticalLayout_3.addWidget(self.standardize_artists) self.standardize_instruments = QtWidgets.QCheckBox(self.metadata_groupbox) self.standardize_instruments.setObjectName("standardize_instruments") self.verticalLayout_3.addWidget(self.standardize_instruments) self.convert_punctuation = QtWidgets.QCheckBox(self.metadata_groupbox) self.convert_punctuation.setObjectName("convert_punctuation") self.vertic
alLayout_3.addWidget(self.convert_punctuation) self.release_ars = QtWidgets.QCheckBox(self.metadata_groupbox) self.release_ars.setObjectName("release_ars") self.verticalLayout_3.addWidget(self.release_ars) self.track_ars = QtWidgets.QCheckBox(self.metadata_groupbox) self.track_ars.setObjectName("track_ars") self.verticalLayout_3.addWidget(self.track_ars)
self.guess_tracknumber_and_title = QtWidgets.QCheckBox(self.metadata_groupbox) self.guess_tracknumber_and_title.setObjectName("guess_tracknumber_and_title") self.verticalLayout_3.addWidget(self.guess_tracknumber_and_title) self.verticalLayout.addWidget(self.metadata_groupbox) self.custom_fields_groupbox = QtWidgets.QGroupBox(MetadataOptionsPage) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.custom_fields_groupbox.sizePolicy().hasHeightForWidth()) self.custom_fields_groupbox.setSizePolicy(sizePolicy) self.custom_fields_groupbox.setMinimumSize(QtCore.QSize(397, 0)) self.custom_fields_groupbox.setObjectName("custom_fields_groupbox") self.gridlayout = QtWidgets.QGridLayout(self.custom_fields_groupbox) self.gridlayout.setSpacing(2) self.gridlayout.setObjectName("gridlayout") self.label_6 = QtWidgets.QLabel(self.custom_fields_groupbox) self.label_6.setObjectName("label_6") self.gridlayout.addWidget(self.label_6, 0, 0, 1, 2) self.label_7 = QtWidgets.QLabel(self.custom_fields_groupbox) self.label_7.setObjectName("label_7") self.gridlayout.addWidget(self.label_7, 2, 0, 1, 2) self.nat_name = QtWidgets.QLineEdit(self.custom_fields_groupbox) self.nat_name.setObjectName("nat_name") self.gridlayout.addWidget(self.nat_name, 3, 0, 1, 1) self.nat_name_default = QtWidgets.QPushButton(self.custom_fields_groupbox) self.nat_name_default.setObjectName("nat_name_default") self.gridlayout.addWidget(self.nat_name_default, 3, 1, 1, 1) self.va_name_default = QtWidgets.QPushButton(self.custom_fields_groupbox) self.va_name_default.setObjectName("va_name_default") self.gridlayout.addWidget(self.va_name_default, 1, 1, 1, 1) self.va_name = QtWidgets.QLineEdit(self.custom_fields_groupbox) self.va_name.setObjectName("va_name") self.gridlayout.addWidget(self.va_name, 1, 0, 1, 1) self.verticalLayout.addWidget(self.custom_fields_groupbox) spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout.addItem(spacerItem) self.label_6.setBuddy(self.va_name_default) self.label_7.setBuddy(self.nat_name_default) self.retranslateUi(MetadataOptionsPage) QtCore.QMetaObject.connectSlotsByName(MetadataOptionsPage) MetadataOptionsPage.setTabOrder(self.translate_artist_names, self.translate_artist_names_script_exception) MetadataOptionsPage.setTabOrder(self.translate_artist_names_script_exception, self.standardize_artists) MetadataOptionsPage.setTabOrder(self.standardize_artists, self.standardize_instruments) MetadataOptionsPage.setTabOrder(self.standardize_instruments, self.convert_punctuation) MetadataOptionsPage.setTabOrder(self.convert_punctuation, self.release_ars) MetadataOptionsPage.setTabOrder(self.release_ars, self.track_ars) MetadataOptionsPage.setTabOrder(self.track_ars, self.guess_tracknumber_and_title) MetadataOptionsPage.setTabOrder(self.guess_tracknumber_and_title, self.va_name) MetadataOptionsPage.setTabOrder(self.va_name, self.va_name_default) MetadataOptionsPage.setTabOrder(self.va_name_default, self.nat_name) MetadataOptionsPage.setTabOrder(self.nat_name, self.nat_name_default) def retranslateUi(self, MetadataOptionsPage): _translate = QtCore.QCoreApplication.translate self.metadata_groupbox.setTitle(_("Metadata")) self.translate_artist_names.setText(_("Translate artist names to these locales where possible:")) self.select_locales.setText(_("Select..."))
gkc1000/pyscf
pyscf/geomopt/__init__.py
Python
apache-2.0
977
0.002047
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the L
icense for the specific language governing permissions and # limitations under the License. #from . import berny_solver as berny from .addons import as_pyscf_method def optimize(method, *args, **kwargs): try:
from . import geometric_solver as geom except ImportError as e1: try: from . import berny_solver as geom except ImportError as e2: raise e1 return geom.optimize(method, *args, **kwargs)
demisto/content
Packs/AzureDevOps/Integrations/AzureDevOps/AzureDevOps.py
Python
mit
59,271
0.002733
# type: ignore import demistomock as demisto # noqa: F401 from CommonServerPython import * # noqa: F401 import copy from requests import Response from typing import Callable INCIDENT_TYPE_NAME = "Azure DevOps" OUTGOING_MIRRORED_FIELDS = {'status': 'The status of the pull request.', 'title': 'The title of the pull request.', 'description': 'The description of the pull request.', 'project': 'The name of the project.', 'repository_id': 'The repository ID of the pull request target branch.', 'pull_request_id': 'the ID of the pull request'} class Client: """ API Client to communicate with AzureDevOps. """ def __init__(self, client_id: str, organization: str, verify: bool, proxy: bool): if '@' in client_id: # for use in test-playbook client_id, refresh_token = client_id.split('@') integration_context = get_integration_context() integration_context.update(current_refresh_token=refresh_token) set_integration_context(integration_context) self.ms_client = MicrosoftClient( self_deployed=True, auth_id=client_id, token_retrieval_url='https://login.microsoftonline.com/organizations/oauth2/v2.0/token', grant_type=DEVICE_CODE, base_url=f'https://dev.azure.com/{organ
ization}', verify=verify, proxy=proxy, scope='499b84ac-1321-427f-aa17-267ca6975798/user_impersonation offline_access') self.organization = organization def pipeline_run_request(self, project: str, pipeline_id: str, branch_name: str) -> dict: """ Run a pipeline. Args:
project (str): The name or the ID of the project. pipeline_id (str): The ID of the pipeline. branch_name (str): The name of the repository branch which run the pipeline. Returns: dict: API response from Azure. """ params = {'api-version': '6.1-preview.1'} data = {"resources": {"repositories": {"self": {"refName": f'refs/heads/{branch_name}'}}}} url_suffix = f"{project}/_apis/pipelines/{pipeline_id}/runs" response = self.ms_client.http_request(method='POST', url_suffix=url_suffix, params=params, json_data=data, resp_type='json') return response def user_add_request(self, user_email: str, account_license_type: str, group_type: str, project_id: str) -> dict: """ Add a user, assign license and extensions and make them a member of a project group in an account. Args: user_email (str): The Email of the user to add to the organization. account_license_type (str): The type of account license (e.g. Express, Stakeholder etc.). group_type (str): Project Group (e.g. Contributor, Reader etc.). project_id (str): The ID of the project. Returns: dict: API response from Azure. """ params = {'api-version': '6.1-preview.3'} data = { "accessLevel": { "accountLicenseType": account_license_type }, "projectEntitlements": [ { "group": { "groupType": group_type }, "projectRef": { "id": project_id} } ], "user": { "principalName": user_email, "subjectKind": "user" } } full_url = f"https://vsaex.dev.azure.com/{self.organization}/_apis/UserEntitlements" response = self.ms_client.http_request(method='POST', full_url=full_url, params=params, json_data=data, resp_type='json') return response def user_remove_request(self, user_id: str) -> Response: """ Delete a user from the account. Args: user_id (str): The ID of the user to remove from the account. Returns: Response: API response from Azure. """ params = {'api-version': '6.1-preview.3'} full_url = f'https://vsaex.dev.azure.com/{self.organization}/_apis/userentitlements/{user_id}' response = self.ms_client.http_request(method='DELETE', full_url=full_url, params=params, resp_type='response') return response def pull_request_create_request(self, project: str, repository_id: str, source_branch: str, target_branch: str, title: str, description: str, reviewers: list) -> dict: """ Create a new pull request in Azure DevOps. Args: project (str): The name or the ID of the project. repository_id (str): The repository ID of the pull request's target branch. source_branch (str): The name of the source branch of the pull request. target_branch (str): The name of the target branch of the pull request. title (str): The title of the pull request. description (str): The description of the pull request. reviewers (list): Pull-request reviewers IDs. Returns: dict: API response from Azure. """ params = {'api-version': '6.1-preview.1'} data = { "sourceRefName": source_branch, "targetRefName": target_branch, "description": description, "reviewers": reviewers, "title": title } url_suffix = f'{project}/_apis/git/repositories/{repository_id}/pullrequests' response = self.ms_client.http_request(method='POST', url_suffix=url_suffix, params=params, json_data=data, resp_type='json') return response def pull_request_update_request(self, project: str, repository_id: str, pull_request_id: str, title: str = None, description: str = None, status: str = None, last_merge_source_commit: dict = None) -> dict: """ Update a pull request. Args: project (str): The name or the ID of the project. repository_id (str): The repository ID of the pull request's target branch. pull_request_id (str): The ID of the pull-request. title (str): The updated pull-request title. description (str): The updated pull-request description. status (str): The updated pull-request status. last_merge_source_commit (dict): Commit object at the head of the source branch at the time of the last pull request merge. Returns: dict: API response from Azure. """ params = {'api-version': '6.1-preview.1'} data = remove_empty_elements({"description": description, "status": status, "title": title, "LastMergeSourceCommit": last_merge_source_commit}) url_suffix = f'{project}/_apis/git/repositories/{repository_id}/pullrequests/{pull_request_id}' response = self.ms_client.http_request(method='PATCH', url_suffix=url_suffix, params=params, json_data=data,
sunqm/pyscf
pyscf/eph/eph_fd.py
Python
apache-2.0
5,884
0.008158
#!/usr/bin/env python # Copyright 2014-2020 The PySCF Developers. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: Yang Gao <younggao1994@gmail.com> # ''' electron-phonon matrix from finite difference ''' import copy import numpy as np from pyscf import scf, dft, gto, hessian from pyscf.eph import rhf as rhf_eph from pyscf.lib import logger from pyscf.data.nist import MP_ME CUTOFF_FREQUENCY = rhf_eph.CUTOFF_FREQUENCY KEEP_IMAG_FREQUENCY = rhf_eph.KEEP_IMAG_FREQUENCY def run_mfs(mf, mols_a, mols_b): nconfigs = len(mols_a) dm0 = mf.make_rdm1() mflist = [] for i in range(nconfigs): mf1 = copy.copy(mf) mf1.reset(mols_a[i]) mf2 = copy.copy(mf) mf2.reset(mols_b[i]) mf1.kernel(dm0=dm0) mf2.kernel(dm0=dm0) if not (mf1.converged): logger.warn(mf, "%ith config mf1 not converged", i) if not (mf2.converged): logger.warn(mf, "%ith config mf2 not converged", i) mflist.append((mf1, mf2)) return mflist def get_mode(mf, cutoff_frequency=CUTOFF_FREQUENCY, keep_imag_frequency=KEEP_IMAG_FREQUENCY): hmat = mf.Hessian().kernel() w_new, c_new = rhf_eph.solve_hmat(mf.mol, hmat, cutoff_frequency, keep_imag_frequency) return w_new, c_new def gen_moles(mol, disp): """From the given equilibrium molecule, generate 3N molecules with a shift on + displacement(mol_a) and - displacement(mol_s) on each Cartesian coordinates """ coords = mol.atom_coords() natoms = len(coords) mol_a, mol_s = [],[] for i in range(natoms): for x in range(3): new_coords_a, new_coords_s = coords.copy(), coords.copy() new_coords_a[i][x] += disp new_coords_s[i][x] -= disp atoma = [[mol.atom_symbol(j), coord] for (j, coord) in zip(range(natoms), new_coords_a)] atoms = [[mol.atom_symbol(j), coord] for (j, coord) in zip(range(natoms), new_coords_s)] mol_a.append(mol.set_geom_(atoma, inplace=False, unit='B')) mol_s.append(mol.set_geom_(atoms, inplace=False, unit='B')) return mol_a, mol_s def get_vmat(mf, mfset, disp): vmat=[] mygrad = mf.nuc_grad_method() ve = mygrad.get_veff() + mygrad.get_hcore() + mf.mol.intor("int1e_ipkin") RESTRICTED = (ve.ndim==3) aoslice = mf.mol.aoslice_by_atom() for ki, (mf1, mf2) in enumerate
(mfset): atmid, axis = np.divmod(k
i, 3) p0, p1 = aoslice[atmid][2:] vfull1 = mf1.get_veff() + mf1.get_hcore() - mf1.mol.intor_symmetric('int1e_kin') # <u+|V+|v+> vfull2 = mf2.get_veff() + mf2.get_hcore() - mf2.mol.intor_symmetric('int1e_kin') # <u-|V-|v-> vfull = (vfull1 - vfull2)/disp # (<p+|V+|q+>-<p-|V-|q->)/dR if RESTRICTED: vfull[p0:p1] -= ve[axis,p0:p1] vfull[:,p0:p1] -= ve[axis,p0:p1].T else: vfull[:,p0:p1] -= ve[:,axis,p0:p1] vfull[:,:,p0:p1] -= ve[:,axis,p0:p1].transpose(0,2,1) vmat.append(vfull) return np.asarray(vmat) def kernel(mf, disp=1e-4, mo_rep=False, cutoff_frequency=CUTOFF_FREQUENCY, keep_imag_frequency=KEEP_IMAG_FREQUENCY): if hasattr(mf, 'xc'): mf.grids.build() if not mf.converged: mf.kernel() RESTRICTED = (mf.mo_coeff.ndim==2) mol = mf.mol omega, vec = get_mode(mf, cutoff_frequency, keep_imag_frequency) mass = mol.atom_mass_list() * MP_ME vec = rhf_eph._freq_mass_weighted_vec(vec, omega, mass) mols_a, mols_b = gen_moles(mol, disp/2.0) # generate a bunch of molecules with disp/2 on each cartesion coord mfset = run_mfs(mf, mols_a, mols_b) # run mean field calculations on all these molecules vmat = get_vmat(mf, mfset, disp) # extracting <p|dV|q>/dR if mo_rep: if RESTRICTED: vmat = np.einsum('xuv,up,vq->xpq', vmat, mf.mo_coeff.conj(), mf.mo_coeff) else: vmat = np.einsum('xsuv,sup,svq->xspq', vmat, mf.mo_coeff.conj(), mf.mo_coeff) if RESTRICTED: mat = np.einsum('xJ,xpq->Jpq', vec, vmat) else: mat = np.einsum('xJ,xspq->sJpq', vec, vmat) return mat, omega if __name__ == '__main__': mol = gto.M() mol.atom = '''O 0.000000000000 0.00000000136 0.459620634131 H 0.000000000000 -0.77050867841 1.139170094494 H 0.000000000000 0.77050867841 1.139170094494''' mol.unit = 'angstrom' mol.basis = 'sto3g' mol.verbose=4 mol.build() # this is a pre-computed relaxed geometry mf = dft.RKS(mol) mf.grids.level=4 mf.grids.build() mf.xc = 'b3lyp' mf.conv_tol = 1e-14 mf.conv_tol_grad = 1e-8 mf.kernel() grad = mf.nuc_grad_method().kernel() print("Force on the atoms/au:") print(grad) assert(abs(grad).max()<1e-5) mat, omega = kernel(mf) matmo, _ = kernel(mf, mo_rep=True) from pyscf.eph.rks import EPH myeph = EPH(mf) eph, _ = myeph.kernel() ephmo, _ = myeph.kernel(mo_rep=True) print("***Testing on RKS***") for i in range(len(mat)): print("AO",min(np.linalg.norm(eph[i]-mat[i]), np.linalg.norm(eph[i]+mat[i]))) print("AO", min(abs(eph[i]-mat[i]).max(), abs(eph[i]+mat[i]).max())) print("MO",min(np.linalg.norm(ephmo[i]-matmo[i]), np.linalg.norm(ephmo[i]+matmo[i]))) print("MO", min(abs(ephmo[i]-matmo[i]).max(), abs(ephmo[i]+matmo[i]).max()))
brianwc/courtlistener
cl/visualizations/migrations/0001_initial.py
Python
agpl-3.0
3,649
0.005755
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('search', '0003_auto_20150826_0632'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='JSONVersions', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('date_created', models.DateTimeField(help_text=b'The time when this item was created', auto_now_add=True, db_index=True)), ('date_modified', models.DateTimeField(help_text=b'The last moment when the item was modified.', auto_now=True, db_index=True)), ('json_data', models.TextField(help_text=b'The JSON data for a particular version of the visualization.')), ], ), migrations.CreateModel( name='SCOTUSMap', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('date_created', models.DateTimeField(help_text=b'The time when this item was created', auto_now_add=True, db_index=True)), ('date_modified', models.DateTimeField(help_text=b'The last moment when the item was modified.', auto_now=True, db_index=True)), ('title', models.CharField(help_text=b"The title of the visualization that you're creating.", max_length=200)), ('subtitle', models.CharField(help_text=b"The subtitle of the visualization that you're creating.", max_length=300, blank=True)), ('slug', models.SlugField(help_text=b'The URL path that the visua
lization will map to (the slug)', max_length=75)), ('notes', models.TextFi
eld(help_text=b'Any notes that help explain the diagram, in Markdown format', blank=True)), ('degree_count', models.IntegerField(help_text=b'The number of degrees to display between cases')), ('view_count', models.IntegerField(default=0, help_text=b'The number of times the visualization has been seen.')), ('published', models.BooleanField(default=False, help_text=b'Whether the visualization can be seen publicly.')), ('deleted', models.BooleanField(default=False, help_text=b'Has a user chosen to delete this visualization?')), ('generation_time', models.FloatField(default=0, help_text=b'The length of time it takes to generate a visuzalization, in seconds.')), ('cluster_end', models.ForeignKey(related_name='visualizations_ending_here', to='search.OpinionCluster', help_text=b'The ending cluster for the visualization')), ('cluster_start', models.ForeignKey(related_name='visualizations_starting_here', to='search.OpinionCluster', help_text=b'The starting cluster for the visualization')), ('clusters', models.ManyToManyField(help_text=b'The clusters involved in this visualization', related_name='visualizations', to='search.OpinionCluster', blank=True)), ('user', models.ForeignKey(related_name='scotus_maps', to=settings.AUTH_USER_MODEL, help_text=b'The user that owns the visualization')), ], ), migrations.AddField( model_name='jsonversions', name='map', field=models.ForeignKey(related_name='json_versions', to='visualizations.SCOTUSMap', help_text=b'The visualization that the json is affiliated with.'), ), ]
indictranstech/erpnext
erpnext/stock/doctype/delivery_trip/test_delivery_trip.py
Python
agpl-3.0
2,568
0.025312
# -*- coding: utf-8 -*- # Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors # See license.txt from __future__ import unicode_literals import frappe import erpnext import unittest from frappe.utils import nowdate, add_days from erpnext.tests.utils import create_test_contact_and_address from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address class TestDeliveryTrip(unittest.TestCase): def setUp(self): create_driver() create_vehicle() create_delivery_notfication() create_test_contact_and_address() def test_delivery_trip(self): contact = get_contact_and_address("_Test Customer") if not frappe.db.exists("Delivery Trip", "TOUR-00000"): delivery_trip = frappe.new_doc("Delivery Trip") delivery_trip.company = erpnext.get_default_company() delivery_trip.date = add_days(nowdate(), 5) delivery_trip.driver = "DRIVER-00001" delivery_trip.vehicle = "JB 007" delivery_trip.append("delivery_stops", { "customer": "_Test Customer", "address": contact.shipping_address.parent, "contact": contact.contact_person.parent }) delivery_trip.delivery_notification = 'Delivery Notification' delivery_trip.insert() sender_email = frappe.db.get_value("User", frappe.session.user, "email") notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver, vehicle=delivery_trip.vehicle, sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification) self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0) def create_driver(): if not frappe.db.exists("Driver", "Newton Scmander"): driver = frappe.new_doc("Driver") driver.full_name = "Newton Scmander" driver.cell_number = "98343424242" driver.license_number = "B809" driver.insert() def create_delivery_notfication(): if not frappe.db.exists("Standard Reply", "Delivery Notification"): frappe.get_doc({ 'doctype': 'Standard Reply', 'name': 'Delivery Notification', 'response': 'Test
Delivery Trip', 'subject': 'Test Subject', 'owner': frappe.session.user }).insert() def create_vehicle(): if not frappe.db.exists("Vehicle", "JB 007"): vehicle = frappe.get_doc({ "doctype": "Vehicle", "license_plate": "JB 007", "make": "Maruti", "model": "PCM", "last_odometer": 5000, "acquisition_date": frappe.utils.nowdate(), "location": "Mumbai", "chassis_no": "1234ABCD", "uom": "Litre", "vehicle_value": frappe.utils.flt(500000) }) vehic
le.insert()
Frankkkkk/arctic
tests/unit/scripts/test_arctic_fsck.py
Python
lgpl-2.1
2,098
0.006673
from mock import patch, sentinel, call from arctic.scripts.arctic_fsck import main from ...util import run_as_main import sys import pytest def test_main(): with patch('arctic.scripts.arctic_fsck.Arctic') as Arctic, \ patch('arctic.scripts.arctic_fsck.get_mongodb_uri') as get_mongodb_uri, \ patch('arctic.scripts.arctic_fsck.do_db_auth') as do_db_auth: run_as_main(main, '--host', '%s:%s' % (sentinel.host, sentinel.port), '-v', '--library', 'sentinel.library', 'lib2', '-f') get_mongodb_uri.assert_called_once_with('sentinel.host:sentinel.port') Arctic.assert_called_once_with(get_mongodb_uri.return_value) assert do_db_auth.call_args_list == [call('%s:%s' % (sentinel.host, sentinel.port),
Ar
ctic.return_value._conn, 'arctic_sentinel'), call('%s:%s' % (sentinel.host, sentinel.port), Arctic.return_value._conn, 'arctic')] assert Arctic.return_value.__getitem__.return_value._fsck.call_args_list == [call(False), call(False), ] def test_main_dry_run(): with patch('arctic.scripts.arctic_fsck.Arctic') as Arctic, \ patch('arctic.scripts.arctic_fsck.get_mongodb_uri') as get_mongodb_uri, \ patch('arctic.scripts.arctic_fsck.do_db_auth') as do_db_auth: run_as_main(main, '--host', '%s:%s' % (sentinel.host, sentinel.port), '-v', '--library', 'sentinel.library', 'sentinel.lib2') get_mongodb_uri.assert_called_once_with('sentinel.host:sentinel.port') Arctic.assert_called_once_with(get_mongodb_uri.return_value) assert do_db_auth.call_count == 0 assert Arctic.return_value.__getitem__.return_value._fsck.call_args_list == [call(True), call(True), ]
Meisterschueler/ogn-python
migrations/versions/7f5b8f65a977_add_receiverranking.py
Python
agpl-3.0
2,677
0.004483
"""Add ReceiverRanking Revision ID: 7f5b8f65a977 Revises: c53fdb39f5a5 Create Date: 2020-12-02 22:33:58.821112 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '7f5b8f65a977' down_revision = 'c53fdb39f5a5' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('receiver_rankings', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.Date(), nullable=True), sa.Column('local_rank', sa.Integ
er(), nullable=True), sa.Column('global_rank', sa.Integer(), nullable=True), sa.Column('max_distance', sa.Float(precision=2), nullable=True), sa.Column('max_normalized_quality', sa.Float(precision=2), nulla
ble=True), sa.Column('messages_count', sa.Integer(), nullable=True), sa.Column('coverages_count', sa.Integer(), nullable=True), sa.Column('senders_count', sa.Integer(), nullable=True), sa.Column('receiver_id', sa.Integer(), nullable=True), sa.Column('country_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['country_id'], ['countries.gid'], ondelete='CASCADE'), sa.ForeignKeyConstraint(['receiver_id'], ['receivers.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id') ) op.create_index('idx_receiver_rankings_uc', 'receiver_rankings', ['date', 'receiver_id'], unique=True) op.create_index(op.f('ix_receiver_rankings_country_id'), 'receiver_rankings', ['country_id'], unique=False) op.create_index(op.f('ix_receiver_rankings_receiver_id'), 'receiver_rankings', ['receiver_id'], unique=False) op.drop_column('receiver_statuses', 'agl') op.drop_column('receiver_statuses', 'location_mgrs') op.drop_column('receiver_statuses', 'location_mgrs_short') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('receiver_statuses', sa.Column('location_mgrs_short', sa.VARCHAR(length=9), autoincrement=False, nullable=True)) op.add_column('receiver_statuses', sa.Column('location_mgrs', sa.VARCHAR(length=15), autoincrement=False, nullable=True)) op.add_column('receiver_statuses', sa.Column('agl', sa.REAL(), autoincrement=False, nullable=True)) op.drop_index(op.f('ix_receiver_rankings_receiver_id'), table_name='receiver_rankings') op.drop_index(op.f('ix_receiver_rankings_country_id'), table_name='receiver_rankings') op.drop_index('idx_receiver_rankings_uc', table_name='receiver_rankings') op.drop_table('receiver_rankings') # ### end Alembic commands ###
cdw/multifil
multifil/aws/userdata.py
Python
mit
3,807
0.008406
#!/usr/bin/env python3 # encoding: utf-8 """ userdata_script.py - control an aws instance from a sqs queue Run this guy on startup as a userdata scrip
t and he will connect to s3 to download code
to a directory, and run commands in it that are provided by an SQS queue, one job at a time per core Processing as a string template, we replace the following keys with their equivalents: - aws_access_key - aws_secret_key - job_queue_name - code_zip_key Created by Dave Williams on 2011-02-08 """ ## Import present packages import os import sys import time import traceback import subprocess as subp import multiprocessing as mp ## Handle logging and thrown fatal errors def log_it(log_message): print(log_message) with open('/dev/console', 'w') as console: console.write("USER DATA: "+log_message+'\n') def fatal_error(error_log_message, feed_me = "differently"): log_it("ERROR: " + error_log_message) log_it("SHUTTING DOWN: feed me " + feed_me + " next time") #os.system("shutdown now -h") def try_and_log(command, message): out = subp.call(command, shell=True) log_it(message + str(out)) ## Install extra software on the node log_it("#"*60 + "\n START OF USERDATA SCRIPT\n"*3 + "#"*60) try_and_log("apt-get -qq update", "Synced package index with result: ") try_and_log("apt-get -qq install python3-scipy python3-pip unzip > \\dev\\null", "Installed scipy, pip, unzip with result: ") try_and_log("pip3 install boto ujson", "Installed boto, ujson: ") ## Userdata runs as root, but in /, let's move os.chdir('/root') HOMEDIR = os.getcwd()+'/' ## Configure control parameters ACCESS_KEY = '$aws_access_key' SECRET_KEY = '$aws_secret_key' JOB_QUEUE = '$job_queue_name' CODE_ZIP_KEY = '$code_zip_key' ## Write out boto configuration lines = """[Credentials] aws_access_key_id = %s aws_secret_access_key = %s \n"""%(ACCESS_KEY, SECRET_KEY) with open('.boto', 'w') as config_file: config_file.writelines(lines) ## Connect to aws with boto try: log_it("Connecting to boto") import boto # Had to wait until .boto was written S3 = boto.connect_s3() SQS = boto.connect_sqs() SQS.get_all_queues() # Call to test if our keys were accepted except (boto.exception.NoAuthHandlerFound, boto.exception.SQSError) as e: fatal_error("Probably gave bad aws keys", "valid credentials") ## Download files from passed bucket try: log_it("Downloading from code bucket") bucket_name = [n for n in CODE_ZIP_KEY.split('/') if len(n)>3][0] #s3:// & / key_name = CODE_ZIP_KEY[len(bucket_name)+CODE_ZIP_KEY.index(bucket_name)+1:] code_bucket = S3.get_bucket(bucket_name) key = code_bucket.get_key(key_name) key.get_contents_to_filename(key_name) try_and_log("unzip %s"%key_name, "Unzipped local code file %s with result: "%key_name) time.sleep(3) # poor man's race condition control! except boto.exception.S3ResponseError: fatal_error("No bucket with given name %s"%(CODE_ZIP_KEY), "a valid bucket") except IOError: fatal_error("Couldn't write code_bucket contents locally") ## Turn control over to the job queue try: log_it(str(dir())) log_it("Turning things over to queue eater processes") commandment = "python3 -c \"import multifil;\ multifil.aws.instance.multi_eaters('%s',shutdown=True)\""%JOB_QUEUE try_and_log(commandment, "Called sub-process to manage queue eaters") log_it("All done") except Exception as e: log_it("### An error occurred while running jobs") log_it("Exception of type " + str(type(e))) exc_type, exc_value, exc_traceback = sys.exc_info() log_it(repr(traceback.format_exception(exc_type, exc_value, exc_traceback))) log_it("Going no further, shutting down now") finally: os.system('shutdown now -h')
catlinman/campdown
campdown/discography.py
Python
mit
10,389
0.001636
import html from .helpers import * from .track import Track from .album import Album class Discography: """ Discography class of Campdown. This class takes in a URL and treats it as a Bandcamp discography page. Takes over downloading of files as well as fetching of general information which can be used by other modules Args: url (str): Bandcamp URL to analyse and download from. output (str): relative or absolute path to write to. request (request): if supplied this given request's content will be analysed instead of making a new request to the mandatory URL. verbose (bool): sets if status messages and general information should be printed. Errors are still printed regardless of this. silent (bool): sets if error messages should be hidden. short (bool): omits arist and album fields from downloaded track filenames. sleep (number): timeout duration between failed requests in seconds. art_enabled (bool): if True the Bandcamp page's artwork will be downloaded and saved alongside each of the found albums/tracks. id3_enabled (bool): if True tracks downloaded will receive new ID3 tags. """ def __init__(self, url, output, request=None, verbose=False, silent=False, short=False, sleep=30, art_enabled=True, id3_enabled=True, abort_missing=False): # Requests and other information can optionally be filled to remove unneccessary # operations such as making a request to a URL that has already been fetched # by another component. self.url = url # URL to get information from. self.output = output # Basic information used when writing tracks. self.artist = None # Queue array to store album tracks in. self.queue = [] # Store the album request object for later reference. self.request = request self.content = None # Base Bandcamp URL. self.base_url = None # Set if status messages should be printed to the console. self.verbose = verbose # Set if error messages should be silenced. self.silent = silent # Set if the filename should be kept short. self.short = short # Store the timeout duration between failed requests. self.sleep = sleep # Set if the cover should be downloaded as well. self.art_enabled = art_enabled # Set if ID3 tags should be written to files. self.id3_enabled = id3_enabled # Sets if a missing album track aborts the entire album download. self.abort_missing = abort_missing def prepare(self): """ Prepares the discography class by gathering information about albums and tracks. If no previous request was made and supplied during instantiation one will be made at this point. This process does not require making requests to the album and track URLs. """ if not valid_url(self.url): # Validate the URL print("The supplied URL is not a valid URL.") return False if not self.request: # Make a request to the album URL. self.request = safe_get(self.url) if self.request.status_code != 200: print("An error occurred while trying to access your supplied URL. Status code: {}".format( self.request.status_code)) self.request = None return False # Get the content from the request and decode it correctly. self.content = self.request.content.decode('utf-8') # Verify that this is an discography page. if not page_type(self.content) == "discography": print("The supplied URL is not a discography page.") # Retrieve the base page URL. self.base_url = "{}//{}".format(str(self.url).split("/")[ 0], str(self.url).split("/")[2]) print(self.base_url) meta = html.unescape(s
tring_between(self.content, '<meta name="Description" content="', ">")).strip() self.artist = meta.split(".\n", 1)[0] if self.artist:
self.output = os.path.join(self.output, self.artist, "") # Create a new artist folder if it doesn't already exist. if not os.path.exists(self.output): os.makedirs(self.output) safe_print( '\nSet "{}" as the working directory.'.format(self.output)) # Make the artist name safe for file writing. self.artist = safe_filename(self.artist) # Define search markers to find the index of for track URLs. track_search_markers = [ '<a href="/track/', '<a href="{}/track/'.format(self.base_url), '<a href="https://\w+.bandcamp.com/track/' ] # Run a search through our track markers and handle regex options and duplicates. track_filtered_markers = [] for marker in track_search_markers: results = re.findall(marker, self.content) for result in results: if result not in track_filtered_markers: track_filtered_markers.append(result) # Create a list of indices for track links. tracks = [] for marker in track_filtered_markers: tracks.extend(find_string_indices(self.content, marker)) # Define search markers to find the index of for album URLs. album_search_markers = [ '<a href="/album/', '<a href="{}/album/'.format(self.base_url), '<a href="https://\w+.bandcamp.com/album/' ] # Run a search through our album markers and handle regex options and duplicates. album_filtered_markers = [] for marker in album_search_markers: results = re.findall(marker, self.content) for result in results: if result not in album_filtered_markers: album_filtered_markers.append(result) # Create a list of indices for album links. albums = [] for marker in album_filtered_markers: albums.extend(find_string_indices(self.content, marker)) if self.verbose: print('\nListing found discography content') for i, position in enumerate(albums): album_url = "" # Begin iteration over characters until the string begins. while self.content[position] != '"': position += 1 # Begin iteration over characters until the string closes. while self.content[position + 1] != '"' and self.content[position + 1] != '?': album_url += self.content[position + 1] position += 1 if album_url == "": continue if "http://" not in album_url and "https://" not in album_url: album_url = self.base_url + album_url # Print the prepared track. if self.verbose: safe_print(album_url) # Create a new track instance with the given URL. album = Album( album_url, self.output, verbose=self.verbose, silent=self.silent, short=self.short, sleep=self.sleep, art_enabled=self.art_enabled, id3_enabled=self.id3_enabled, abort_missing=self.abort_missing ) self.queue.insert(len(self.queue), album) for i, position in enumerate(tracks): track_url = "" # Begin iteration over characters until the string begins. while self.content[position] != '"': position += 1 # Begin iteration over characters until the string closes. while self.content[position + 1] != '"' and self.content[position + 1] != '?': track_url += self.content[position + 1] position += 1 if track_url == "": continue if not "ht
LAMAC-IFUNAM/lafrioc-electronics-labjack
Python_LJM/Examples/eReadAddresses.py
Python
gpl-3.0
1,058
0.008507
""" Demonstrates how to use the labjack.ljm.eReadAddresses (LJM_eReadAddresses) function. """ from labjack import ljm # Open first found LabJack handle = ljm.open(ljm.constants.dtANY, ljm.constants.ctANY, "ANY") #handle = ljm.openS("ANY", "ANY", "ANY") info = ljm.getHandleInfo(handle) print("Opened a LabJack with Device type: %i, Connection type: %i,\n" \ "Serial number: %i, IP address: %s, Port: %i,\nMax bytes per MB: %i" % \ (info[0], info[1], info[2], ljm.numberToIP(info[3]), info[4], info[5])) # Setup and call eReadAddresses to read values from the LabJack. nu
mFrames = 3 aAddresses = [60028, 60000, 60004] # [serial number, product ID, firmware version] aDataTypes = [ljm.constants.UINT32, ljm.constants.FLOAT32, ljm.constants.FLOAT32] results = ljm.eReadAddresses(handle, numFrames, aAddresses, aDataTypes) print("\neReadAddresses res
ults: ") for i in range(numFrames): print(" Address - %i, data type - %i, value : %f" % \ (aAddresses[i], aDataTypes[i], results[i])) # Close handle ljm.close(handle)
zepto/musio
musio/portaudio/__init__.py
Python
gpl-3.0
830
0.003614
#!/usr/bin/env python # -*- coding: UTF8 -*- # # Provides access to portaudio. # Copyright (C) 2010 Josiah Gordon <josiahg@gmail.com> # # This pr
ogram is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ A portaudio module. """ __all__ = ['_portaudio']
lyoniionly/django-cobra
src/cobra/apps/dashboard/autocheck/app.py
Python
apache-2.0
480
0
from django.co
nf.urls import url from cobra.core.application import Application from cobra.core.loading import get_class class AutoCheckDashboardApplication(Application): name = None index_view = get_class('dashboard.autocheck.views', 'IndexView') def get_urls(self): urls = [ url(r'^$', self.index_view.as_view(), name='autocheck-index'), ] return self.post_process_urls(urls) application = AutoCheckDashboardApp
lication()
oppia/oppia
core/controllers/resources_test.py
Python
apache-2.0
33,854
0.000148
# Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for Oppia resource handling (e.g. templates, images).""" from __future__ import annotations import os from core import feconf from core import utils from core.constants import constants from core.domain import exp_services from core.domain import fs_domain from core.domain import fs_services from core.domain import rights_manager from core.domain import skill_services from core.domain import story_services from core.domain import topic_domain from core.domain import topic_fetchers from core.domain import user_services from core.tests import test_utils class AssetDevHandlerImageTests(test_utils.GenericTestBase): ASSET_HANDLER_URL_PREFIX = '/assetsdevhandler' def _get_image_url(self, entity_type, entity_id, filename): """Gets the image URL.""" return '%s/%s/%s/assets/image/%s' % ( self.ASSET_HANDLER_URL_PREFIX, entity_type, entity_id, filename) def setUp(self): """Load a demo exploration and register self.EDITOR_EMAIL.""" super(AssetDevHandlerImageTests, self).setUp() exp_services.delete_demo('0') self.system_user = user_services.get_system_user()
exp_services.load_demo('0') rights_manager.release_ownership_of_exploration( self.system_user, '0') self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME) def test_image_upload_with_no_filename_raises_error(self): self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None ) as f: raw_image = f.read() response_dict = self.post_json( '%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, {}, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image),), expected_status_int=400) self.assertEqual( response_dict['error'], 'Missing key in handler args: filename.') self.logout() def test_get_image_with_invalid_page_context_raises_error(self): self.login(self.EDITOR_EMAIL) # Only 404 is raised here due to the try - except block in the # controller. self.get_json( self._get_image_url('invalid_context', '0', 'filename'), expected_status_int=404) self.logout() def test_image_upload_with_invalid_filename_raises_error(self): self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() with utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None ) as f: raw_image = f.read() response_dict = self.post_json( '%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, {'filename': '.png'}, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image),), expected_status_int=400) error_msg = ( 'Schema validation for \'filename\' failed: Validation' ' failed: is_regex_matched ({\'regex_pattern\': ' '\'\\\\w+[.]\\\\w+\'}) for object .png' ) self.assertEqual(response_dict['error'], error_msg) self.logout() def test_cannot_upload_duplicate_image(self): self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() with utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None ) as f: raw_image = f.read() response_dict = self.post_json( '%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, {'filename': 'test.png'}, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image),)) filename = response_dict['filename'] response = self.get_custom_response( self._get_image_url('exploration', '0', filename), 'image/png') self.assertEqual(response.body, raw_image) response_dict = self.post_json( '%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, {'filename': 'test.png'}, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image),), expected_status_int=400) self.assertEqual( response_dict['error'], 'A file with the name test.png already exists. Please choose a ' 'different name.') def test_image_upload_and_download(self): """Test image uploading and downloading.""" self.signup(self.CURRICULUM_ADMIN_EMAIL, self.CURRICULUM_ADMIN_USERNAME) admin_id = self.get_user_id_from_email(self.CURRICULUM_ADMIN_EMAIL) self.set_curriculum_admins([self.CURRICULUM_ADMIN_USERNAME]) subtopic = topic_domain.Subtopic.create_default_subtopic( 1, 'Subtopic Title') story_id = story_services.get_new_story_id() topic_id = topic_fetchers.get_new_topic_id() skill_id = skill_services.get_new_skill_id() self.save_new_story(story_id, admin_id, topic_id) self.save_new_topic( topic_id, admin_id, name='Name', description='Description', canonical_story_ids=[story_id], additional_story_ids=[], uncategorized_skill_ids=[], subtopics=[subtopic], next_subtopic_id=2) self.save_new_skill(skill_id, admin_id, description='Description') # Page context: Exploration. self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() with utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None ) as f: raw_image = f.read() response_dict = self.post_json( '%s/exploration/0' % feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, {'filename': 'test.png'}, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image),) ) filename = response_dict['filename'] self.logout() response = self.get_custom_response( self._get_image_url('exploration', '0', filename), 'image/png') self.assertEqual(response.body, raw_image) # Page context: Topic. self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() with utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None ) as f: raw_image = f.read() response_dict = self.post_json( '%s/topic/%s' % ( feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, topic_id), {'filename': 'test.png'}, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image),) ) filename = response_dict['filename'] self.logout() response = self.get_custom_response( self._get_image_url('topic', topic_id, filename), 'image/png') self.assertEqual(response.body, raw_image) # Page context: Story. self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() with utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None ) as f: raw_image = f.read() response_dict = self.post_json( '%s/story/%s' % ( feconf.EXPLORATION_IMAGE_UPLOAD_PREFIX, story_
listyque/TACTIC-Handler
thlib/side/ntlm/des_c.py
Python
epl-1.0
9,208
0.016725
# This file is part of 'NTLM Authorization Proxy Server' http://sourceforge.net/projects/ntlmaps/ # Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru> # # This library is free software: you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation, either # version 3 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more deta
ils. # # You should have received a copy of the GNU Lesser General Public # License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/lic
enses/lgpl.txt>. from U32 import U32 # --NON ASCII COMMENT ELIDED-- #typedef unsigned char des_cblock[8]; #define HDRSIZE 4 def c2l(c): "char[4] to unsigned long" l = U32(c[0]) l = l | (U32(c[1]) << 8) l = l | (U32(c[2]) << 16) l = l | (U32(c[3]) << 24) return l def c2ln(c,l1,l2,n): "char[n] to two unsigned long???" c = c + n l1, l2 = U32(0), U32(0) f = 0 if n == 8: l2 = l2 | (U32(c[7]) << 24) f = 1 if f or (n == 7): l2 = l2 | (U32(c[6]) << 16) f = 1 if f or (n == 6): l2 = l2 | (U32(c[5]) << 8) f = 1 if f or (n == 5): l2 = l2 | U32(c[4]) f = 1 if f or (n == 4): l1 = l1 | (U32(c[3]) << 24) f = 1 if f or (n == 3): l1 = l1 | (U32(c[2]) << 16) f = 1 if f or (n == 2): l1 = l1 | (U32(c[1]) << 8) f = 1 if f or (n == 1): l1 = l1 | U32(c[0]) return (l1, l2) def l2c(l): "unsigned long to char[4]" c = [] c.append(int(l & U32(0xFF))) c.append(int((l >> 8) & U32(0xFF))) c.append(int((l >> 16) & U32(0xFF))) c.append(int((l >> 24) & U32(0xFF))) return c def n2l(c, l): "network to host long" l = U32(c[0] << 24) l = l | (U32(c[1]) << 16) l = l | (U32(c[2]) << 8) l = l | (U32(c[3])) return l def l2n(l, c): "host to network long" c = [] c.append(int((l >> 24) & U32(0xFF))) c.append(int((l >> 16) & U32(0xFF))) c.append(int((l >> 8) & U32(0xFF))) c.append(int((l ) & U32(0xFF))) return c def l2cn(l1, l2, c, n): "" for i in range(n): c.append(0x00) f = 0 if f or (n == 8): c[7] = int((l2 >> 24) & U32(0xFF)) f = 1 if f or (n == 7): c[6] = int((l2 >> 16) & U32(0xFF)) f = 1 if f or (n == 6): c[5] = int((l2 >> 8) & U32(0xFF)) f = 1 if f or (n == 5): c[4] = int((l2 ) & U32(0xFF)) f = 1 if f or (n == 4): c[3] = int((l1 >> 24) & U32(0xFF)) f = 1 if f or (n == 3): c[2] = int((l1 >> 16) & U32(0xFF)) f = 1 if f or (n == 2): c[1] = int((l1 >> 8) & U32(0xFF)) f = 1 if f or (n == 1): c[0] = int((l1 ) & U32(0xFF)) f = 1 return c[:n] # array of data # static unsigned long des_SPtrans[8][64]={ # static unsigned long des_skb[8][64]={ from des_data import des_SPtrans, des_skb def D_ENCRYPT(tup, u, t, s): L, R, S = tup #print 'LRS1', L, R, S, u, t, '-->', u = (R ^ s[S]) t = R ^ s[S + 1] t = ((t >> 4) + (t << 28)) L = L ^ (des_SPtrans[1][int((t ) & U32(0x3f))] | \ des_SPtrans[3][int((t >> 8) & U32(0x3f))] | \ des_SPtrans[5][int((t >> 16) & U32(0x3f))] | \ des_SPtrans[7][int((t >> 24) & U32(0x3f))] | \ des_SPtrans[0][int((u ) & U32(0x3f))] | \ des_SPtrans[2][int((u >> 8) & U32(0x3f))] | \ des_SPtrans[4][int((u >> 16) & U32(0x3f))] | \ des_SPtrans[6][int((u >> 24) & U32(0x3f))]) #print 'LRS:', L, R, S, u, t return ((L, R, S), u, t, s) def PERM_OP (tup, n, m): "tup - (a, b, t)" a, b, t = tup t = ((a >> n) ^ b) & m b = b ^ t a = a ^ (t << n) return (a, b, t) def HPERM_OP (tup, n, m): "tup - (a, t)" a, t = tup t = ((a << (16 - n)) ^ a) & m a = a ^ t ^ (t >> (16 - n)) return (a, t) shifts2 = [0,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0] class DES: KeySched = None # des_key_schedule def __init__(self, key_str): # key - UChar[8] key = [] for i in key_str: key.append(ord(i)) #print 'key:', key self.KeySched = des_set_key(key) #print 'schedule:', self.KeySched, len(self.KeySched) def decrypt(self, str): # block - UChar[] block = [] for i in str: block.append(ord(i)) #print block block = des_ecb_encrypt(block, self.KeySched, 0) res = '' for i in block: res = res + (chr(i)) return res def encrypt(self, str): # block - UChar[] block = [] for i in str: block.append(ord(i)) block = des_ecb_encrypt(block, self.KeySched, 1) res = '' for i in block: res = res + (chr(i)) return res #------------------------ def des_encript(input, ks, encrypt): # input - U32[] # output - U32[] # ks - des_key_shedule - U32[2][16] # encrypt - int # l, r, t, u - U32 # i - int # s - U32[] l = input[0] r = input[1] t = U32(0) u = U32(0) r, l, t = PERM_OP((r, l, t), 4, U32(0x0f0f0f0fL)) l, r, t = PERM_OP((l, r, t), 16, U32(0x0000ffffL)) r, l, t = PERM_OP((r, l, t), 2, U32(0x33333333L)) l, r, t = PERM_OP((l, r, t), 8, U32(0x00ff00ffL)) r, l, t = PERM_OP((r, l, t), 1, U32(0x55555555L)) t = (r << 1)|(r >> 31) r = (l << 1)|(l >> 31) l = t s = ks # ??????????????? #print l, r if(encrypt): for i in range(0, 32, 4): rtup, u, t, s = D_ENCRYPT((l, r, i + 0), u, t, s) l = rtup[0] r = rtup[1] rtup, u, t, s = D_ENCRYPT((r, l, i + 2), u, t, s) r = rtup[0] l = rtup[1] else: for i in range(30, 0, -4): rtup, u, t, s = D_ENCRYPT((l, r, i - 0), u, t, s) l = rtup[0] r = rtup[1] rtup, u, t, s = D_ENCRYPT((r, l, i - 2), u, t, s) r = rtup[0] l = rtup[1] #print l, r l = (l >> 1)|(l << 31) r = (r >> 1)|(r << 31) r, l, t = PERM_OP((r, l, t), 1, U32(0x55555555L)) l, r, t = PERM_OP((l, r, t), 8, U32(0x00ff00ffL)) r, l, t = PERM_OP((r, l, t), 2, U32(0x33333333L)) l, r, t = PERM_OP((l, r, t), 16, U32(0x0000ffffL)) r, l, t = PERM_OP((r, l, t), 4, U32(0x0f0f0f0fL)) output = [l] output.append(r) l, r, t, u = U32(0), U32(0), U32(0), U32(0) return output def des_ecb_encrypt(input, ks, encrypt): # input - des_cblock - UChar[8] # output - des_cblock - UChar[8] # ks - des_key_shedule - U32[2][16] # encrypt - int #print input l0 = c2l(input[0:4]) l1 = c2l(input[4:8]) ll = [l0] ll.append(l1) #print ll ll = des_encript(ll, ks, encrypt) #print ll l0 = ll[0] l1 = ll[1] output = l2c(l0) output = output + l2c(l1) #print output l0, l1, ll[0], ll[1] = U32(0), U32(0), U32(0), U32(0) return output def des_set_key(key): # key - des_cblock - UChar[8] # schedule - des_key_schedule # register unsigned long c,d,t,s; # register unsigned char *in; # register unsigned long *k; # register int i; #k = schedule # in = key k = [] c = c2l(key[0:4]) d = c2l(key[4:8]) t = U32(0) d, c, t = PERM_OP((d, c, t), 4, U32(0x0f0f0f0fL)) c, t = HPERM_OP((c, t), -2, U32(0xcccc0000L)) d, t = HPERM_OP((d, t), -2, U32(0xcccc0000L)) d, c, t = PERM_OP((d, c, t), 1, U32(0x55555555L)) c, d, t = PERM_OP((c, d, t), 8, U32(0x00ff00ffL)) d, c, t = PERM_OP((d, c, t), 1, U32(0x55555555L)) d = (((d & U32(0x000000ffL)) << 16)|(d & U32(0x0000ff00L))|((d & U32(0x00ff0000L)) >> 16)|((c & U32(0xf0000000L)) >> 4)) c = c & U32(0x0fffffffL) for i in range(16): if (shifts2[i]): c = ((c >> 2)|(c <<
tadgh/Shizuka
src/test_client/test_MonitorManager.py
Python
mit
2,845
0.004569
import unittest import MonitorManager import RamByteMonitor import Constants import logging import Utils #TODO figure out proper logging practice. logging.basicConfig(level=logging.INFO) class TestMonitorManager(unittest.TestCase): def setUp(self): self.manager = MonitorManager.MonitorManager() #have to manually wipe monitors in between tests because this singleton stubbornly holds onto data. self.manager.clear_monitors() def test_adding_monitor_to_manager(self): monitor_1 = RamByteMonitor.RamByteMonitor() self.manager.add_monitor(monitor_1) self.assertTrue(Constants.RAM_BYTE_MONITOR in self.manager.monitor_list.keys(), "uh oh, guess its not in the keys,.") def test_monitor_properly_deleted_through_object_deletion(self): monitor_1 = RamByteMonitor.RamByteMonitor() self.manager.add_monitor(monitor_1) self.manager.remove_monitor(monitor_1) self.assertFalse(Constants.RAM_BYTE_MONITOR in self.manager.monitor_list.keys(), "Not Properly Deleted, found ID in the keys.") def test_empty_list_raises_error(self): self.assertRaises(KeyError, self.manager.remove_monitor_by_type(Constants.RAM_BYTE_MONITOR)) def test_remove_monitor_by_type(self): self.manager.add_monitor(RamByteMonitor.RamByteMonitor()) self.manager.remove_monitor_by_type(Constants.RAM_BYTE_MONITOR) print(self.manager.list_monitors()) self.assertTrue(len(self.manager.list_monitors()) == 0) def test_monitor_factory_generates_correct_monitor(self): mon1 = self.manager.create_monitor(Constants.RAM_BYTE_MONITOR) self.assertIsInstance(mon1, RamByteMonitor.RamByteMonitor) def test_monitor_factory_fails_on_unknown_type(self): self.assertRaises(ValueError, self.manager.create_monitor, "Some Garbage Type") def test_monitor_factory_fails_on_bad_option_parse(self): mon1 = self.manager.create_monitor(Constants.STORAGE_BYTE_MONITOR + "ASDAS")#a garbage mount point. self.assertIsNone(mon1) def test_handle_config_successful_c
all(self): mp = Utils.get_drive_mountpoints()[0] cpu_mon = self.manager.create_monitor(Constants.CPU_PERCENT_MONITOR) self.manager.add_monitor(cpu_mon) config_dict = { "add": [ Constants.RAM_BYTE_MONITOR, Constants.BYTES_RECEIVED_MONITOR, Constants.STORAGE_BYTE_MONITOR + mp ], "remove": [ Constants.CPU_PERCENT_MONITOR ] } self.manager.h
andle_config(config_dict) self.assertTrue(len(self.manager.list_monitors()) == 3 and Constants.CPU_PERCENT_MONITOR not in self.manager.list_monitors().keys()) if __name__ == "__main__": unittest.main()
flyingSprite/spinelle
task_inventory/order_1_to_30/order_17_show_time_with_current_and_given.py
Python
mit
1,185
0
"""Order 17: show time with give time and current time. """ class ShowTimeWithCurrentAndGiven(object): @staticmethod def show(current_timestamp=0, given_timestamp=0): if current_tim
estamp - given_timestamp < 0: return '' if current_timestamp - given_timestamp < 120: return '1分钟前' if current_timestamp - given_timestamp < 60 * 60: munite = int((current_timestamp - given_timestamp) / 60) return f'{munite}分钟前'
if current_timestamp - given_timestamp < 24 * 60 * 60: hour = int((current_timestamp - given_timestamp) / 60 / 60) return f'{hour}小时前' day = int((current_timestamp - given_timestamp) / 24 / 60 / 60) return f'{day}天前' # # 1分钟前 # print(ShowTimeWithCurrentAndGiven.show(1502173717, 1502173700)) # # 3分钟前 # print(ShowTimeWithCurrentAndGiven.show(1502173717, 1502173517)) # # 2小时前 # print(ShowTimeWithCurrentAndGiven.show(1502173717, 1502163517)) # # 11天前 # print(ShowTimeWithCurrentAndGiven.show(1502173717, 1501163517)) # # 127天前 # print(ShowTimeWithCurrentAndGiven.show(1502173717, 1491163517))
javihernandez/accerciser-mirror
src/lib/accerciser/__init__.py
Python
bsd-3-clause
1,039
0.014437
''' Configures the path to pyatspi. Exposes all other package conten
ts. @author: Eitan Isaacson @author: Peter Parente @organization: IBM Corporation @copyright: Copyright (c) 2006, 2007 IBM Corporation @license: BSD All rights reserved. This program and the accompanying materials are made available under the terms of the BSD which accompanies this distribution, and is available at U{http://www.opensource.org/licenses/bsd-license.php} ''' import sys, os from i18n import
_ import signal def signal_handler(signal, frame): print _( 'You pressed Ctrl+Z. This would normally freeze your keyboard') print _( 'Ctrl+Z has been disabled; use "accerciser &" instead from the command line') signal.signal(signal.SIGTSTP, signal_handler) # If pyatspi not installed seperately, add pyatspi zip file to the path try: import pyatspi except ImportError: sys.path.insert(1, os.path.join(os.path.dirname(__file__), 'pyatspi.zip')) def main(): ''' Run program. ''' from accerciser import Main main = Main() main.run()
isloux/Shannon
python/rbawz2.py
Python
bsd-3-clause
3,690
0.066667
#!/usr/bin/env python # # Computation of the rate-distortion function for source coding with side # information at the decoder using the Blahut-Arimoto algorithm. # # Formulation similar to R.E. Blahut "Computation of Channel Capacity and # Rate-Distortion Functions," IEEE Transactions on Information Theory, 18, # no. 4, 1972. # # Author: Christophe Ramananjaona # (c) 2005, Department of Electrical and Computer Engineering, Duke University. # (c) 2017, Isloux, for the Python version. from numpy import shape,sum,zeros,ones,arange,log,exp,array,longdouble,finfo from sys import float_info from os.path import isfile from sys import argv #from code_generator0 import code_generator from code_generator import code_generator def distortion_measure(n): # Hamming distance D=ones((n,n),dtype='longdouble') for i in range(n): D[i][i]=0.0 return(D) def blahut_arimoto(q): nx,ny=shape(q) qx=[] for i in range(nx): qx.append(longdouble(sum(q[i,:]))) qy=[] for j in range(ny): qy.append(longdouble(sum(q[:,j]))) nz=nx #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # The array t contains all the possible codes that map Y into Z nt=nx+1 t,nt=code_generator(nz,ny) # If nx!=nz t
here is a problem D=distortion_measure(max(nx,ny)) npoints=100 ds=arange(-10.0,0.0,0.1) c=zeros((nx,nt),dtype='longdou
ble') vtx=zeros((nt,nx),dtype='longdouble') sexp=zeros(nt,dtype='longdouble') #epsilon=finfo(longdouble(1.0)).eps epsilon=1.0e-7 for s in range(npoints): qty=ones((nt,ny),dtype='longdouble') qty=qty/nt/ny # Initialise stop test stop=longdouble(1.0e5) n=0 while stop>epsilon: n=n+1 for i in range(nx): if qx[i]!=0.0: qxid=longdouble(1.0)/qx[i] for k in range(nt): ssum=longdouble(0.0) for j in range(ny): if qx[i]!=0.0: ssum+=q[i][j]*qxid*log(qty[k][j]) else: ssum+=qy[j]*log(qty[k][j]) c[i][k]=exp(ssum) for i in range(nx): sexp=zeros(nt,dtype='longdouble') sd=longdouble(0.0) if qx[i]!=0.0: qxid=longdouble(1.0)/qx[i] for k in range(nt): for j in range(ny): if qx[i]!=0.0: sexp[k]+=q[i][j]*qxid*D[i,t[2][k,j]] else: sexp[k]+=qy[j]*D[i,t[2][k,j]] sd+=c[i][k]*exp(ds[s]*sexp[k]) sd=longdouble(1.0)/sd for k in range(nt): vtx[k][i]=c[i][k]*exp(ds[s]*sexp[k])*sd qtym=qty qty=zeros((nt,ny),dtype='longdouble') stop=longdouble(0.0) for j in range(ny): qyjd=longdouble(1.0)/qy[j] for k in range(nt): for i in range(nx): qty[k][j]+=q[i][j]*qyjd*vtx[k][i] stop+=qy[j]*qty[k][j]*log(qty[k][j]/qtym[k][j]) ssum=longdouble(0.0) dv=longdouble(0.0) for i in range(nx): ssum2=longdouble(0.0) if qx[i]!=0.0: qxid=longdouble(1.0)/qx[i] for k in range(nt): ssexp=longdouble(0.0) for j in range(ny): if qx[i]!=0.0: ssexp+=q[i][j]*qxid*D[i,t[2][k,j]] else: ssexp+=qy[j]*D[i,t[2][k,j]] dv+=q[i][j]*vtx[k][i]*D[i,t[2][k,j]] ssum2+=c[i][k]*exp(ds[s]*ssexp) ssum+=qx[i]*log(ssum2) R=ds[s]*dv-ssum print dv,R,n def readinputfile(inputfile): a=[ line.split() for line in file(inputfile) ] nx=len(a) # Number of lines ny=len(a[0]) # Number of columns q=zeros((nx,ny),dtype='longdouble') for i in range(nx): for j in range(ny): q[i][j]=a[i][j] return(q) def main(inputfile="q.txt"): if isfile(inputfile): q=readinputfile(inputfile) else: nx=2 ny=2 q=array([[0.3,0.2],[0.24,0.26]],dtype='longdouble') blahut_arimoto(q) if __name__=="__main__": if len(argv)>1: main(argv[1]) else: main()
slimta/python-slimta
test/test_slimta_edge_smtp.py
Python
mit
5,342
0
import unittest from mox import MoxTestBase, IsA, IgnoreArg import gevent from gevent.socket import create_connection from gevent.ssl import SSLSocket from slimta.edge.smtp import SmtpEdge, SmtpSession from slimta.envelope import Envelope from slimta.queue import QueueError from slimta.smtp.reply import Reply from slimta.smtp import ConnectionLost, MessageTooBig from slimta.smtp.client import Client class TestEdgeSmtp(MoxTestBase, unittest.TestCase): def test_call_validator(self): mock = self.mox.CreateMockAnything() mock.__call__(IsA(SmtpSession)).AndReturn(mock) mock.handle_t
est('arg') self.mox.ReplayAll() h = SmtpSession(None, mock, None) h._call_validator('test', 'arg') def test_protocol_attribute(self):
h = SmtpSession(None, None, None) self.assertEqual('SMTP', h.protocol) h.extended_smtp = True self.assertEqual('ESMTP', h.protocol) h.security = 'TLS' self.assertEqual('ESMTPS', h.protocol) h.auth = 'test' self.assertEqual('ESMTPSA', h.protocol) def test_simple_handshake(self): mock = self.mox.CreateMockAnything() mock.__call__(IsA(SmtpSession)).AndReturn(mock) mock.handle_banner(IsA(Reply), ('127.0.0.1', 0)) mock.handle_helo(IsA(Reply), 'there') self.mox.ReplayAll() h = SmtpSession(('127.0.0.1', 0), mock, None) h.BANNER_(Reply('220')) h.HELO(Reply('250'), 'there') self.assertEqual('there', h.ehlo_as) self.assertFalse(h.extended_smtp) def test_extended_handshake(self): creds = self.mox.CreateMockAnything() creds.authcid = 'testuser' creds.authzid = 'testzid' ssl_sock = self.mox.CreateMock(SSLSocket) mock = self.mox.CreateMockAnything() mock.__call__(IsA(SmtpSession)).AndReturn(mock) mock.handle_banner(IsA(Reply), ('127.0.0.1', 0)) mock.handle_ehlo(IsA(Reply), 'there') mock.handle_tls() mock.handle_tls2(IsA(SSLSocket)) mock.handle_auth(IsA(Reply), creds) self.mox.ReplayAll() h = SmtpSession(('127.0.0.1', 0), mock, None) h.BANNER_(Reply('220')) h.EHLO(Reply('250'), 'there') h.TLSHANDSHAKE2(ssl_sock) h.AUTH(Reply('235'), creds) self.assertEqual('there', h.ehlo_as) self.assertTrue(h.extended_smtp) self.assertEqual('TLS', h.security) self.assertEqual(('testuser', 'testzid'), h.auth) self.assertEqual('ESMTPSA', h.protocol) def test_mail_rcpt_data_rset(self): mock = self.mox.CreateMockAnything() mock.__call__(IsA(SmtpSession)).AndReturn(mock) mock.handle_mail(IsA(Reply), 'sender@example.com', {}) mock.handle_rcpt(IsA(Reply), 'rcpt@example.com', {}) mock.handle_data(IsA(Reply)) self.mox.ReplayAll() h = SmtpSession(None, mock, None) h.MAIL(Reply('250'), 'sender@example.com', {}) h.RCPT(Reply('250'), 'rcpt@example.com', {}) self.assertEqual('sender@example.com', h.envelope.sender) self.assertEqual(['rcpt@example.com'], h.envelope.recipients) h.DATA(Reply('550')) h.RSET(Reply('250')) self.assertFalse(h.envelope) def test_have_data_errors(self): h = SmtpSession(None, None, None) reply = Reply('250') h.HAVE_DATA(reply, None, MessageTooBig()) self.assertEqual('552', reply.code) with self.assertRaises(ValueError): h.HAVE_DATA(reply, None, ValueError()) def test_have_data(self): class PtrLookup(object): def finish(self, *args): return 'localhost' env = Envelope() handoff = self.mox.CreateMockAnything() handoff(env).AndReturn([(env, 'testid')]) self.mox.ReplayAll() h = SmtpSession(('127.0.0.1', 0), None, handoff) h.envelope = env h._ptr_lookup = PtrLookup() reply = Reply('250') h.HAVE_DATA(reply, b'', None) self.assertEqual('250', reply.code) self.assertEqual('2.6.0 Message accepted for delivery', reply.message) self.assertEqual('localhost', env.client['host']) def test_have_data_queueerror(self): env = Envelope() handoff = self.mox.CreateMockAnything() handoff(env).AndReturn([(env, QueueError())]) self.mox.ReplayAll() h = SmtpSession(('127.0.0.1', 0), None, handoff) h.envelope = env reply = Reply('250') h.HAVE_DATA(reply, b'', None) self.assertEqual('451', reply.code) self.assertEqual('4.3.0 Error queuing message', reply.message) def test_smtp_edge(self): queue = self.mox.CreateMockAnything() queue.enqueue(IsA(Envelope)).AndReturn([(Envelope(), 'testid')]) self.mox.ReplayAll() server = SmtpEdge(('127.0.0.1', 0), queue) server.start() gevent.sleep(0) client_sock = create_connection(server.server.address) client = Client(client_sock) client.get_banner() client.ehlo('there') client.mailfrom('sender@example.com') client.rcptto('rcpt@example.com') client.data() client.send_empty_data() client.quit() client_sock.close() # vim:et:fdm=marker:sts=4:sw=4:ts=4
bolt-project/bolt
test/spark/test_spark_basic.py
Python
apache-2.0
4,090
0.003912
from numpy import arange, dtype, int64, float64 from bolt import array, ones from bolt.utils import allclose def test_shape(sc): x = arange(2*3).reshape((2, 3)) b = array(x, sc) assert b.shape == x.shape x = arange(2*3*4).reshape((2, 3, 4)) b = array(x, sc) assert b.shape == x.shape def test_size(sc): x = arange(2*3*4).reshape((2, 3, 4)) b = array(x, sc, axis=0) assert b.size == x.size def test_split(sc): x = arange(2*3*4).reshape((2, 3, 4)) b = array(x, sc, axis=0) assert b.split == 1 b = array(x, sc, axis=(0, 1)) assert b.split == 2 def test_ndim(sc): x = arange(2**5).reshape(2, 2, 2, 2, 2) b = array(x, sc, axis=(0, 1, 2)) assert b.keys.ndim == 3 assert b.values.ndim == 2 assert b.ndim == 5 def test_mask(sc): x = arange(2*3*4).reshape((2, 3, 4)) b = array(x, sc, axis=0) assert b.mask == (1, 0, 0) b = array(x, sc, axis=(0, 1)) assert b.mask == (1, 1, 0) b = array(x, sc, axis=(0, 1, 2)) assert b.mask == (1, 1, 1) def test_cache(sc): x = arange(2*3).reshape((2, 3)) b = array(x, sc) b.cache() assert b._rdd.is_cached b.unpersist() assert not b._rdd.is_cached def test_repartition(sc): x = arange(2 * 3).reshape((2, 3)) b = array(x, sc) assert b._ordered b = b.repartition(10) assert not b._ordered assert b._rdd.getNumPartitions() == 10 def test_concatenate(sc): from numpy import concatenate x = arange(2*3).reshape((2, 3)) b = array(x, sc) c = array(x) assert allclose(b.concatenate(x).toarray(), concatenate((x, x))) assert allclose(b.concatenate(b).toarray(), concatenate((x, x))) assert allclose(b.concatenate(c).toarray(), concatenate((x, x))) def test_dtype(sc): a = arange(2**8, dtype=int64) b = array(a, sc, dtype=int64) assert a.dtype == b.dtype assert b.dtype == dtype(int64) dtypes = b._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(int64) a = arange(2.0**8) b = array(a, sc) assert a.dtype == b.dtype assert b.dtype == dtype(float64) dtypes = b._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(float64) a = arange(2**8) b = array(a, sc) assert a.dtype == b.dtype assert b.dtype == dtype(int64) dtypes = b._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(int64) from numpy import ones as npones a = npones(2**8, dtype=bool) b = array(a, sc) assert a.dtype == b.dtype assert b.dtype == dtype(bool) dtypes = b._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(bool) b = ones(2**8, sc) assert b.dtype == dtype(float64) dtypes = b._rdd.map(la
mbda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(float64) b = ones(2**8, sc, dtype=bool) assert b.dtype == dtype(bool) dtypes = b._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(bool) def test_astype(sc): from numpy
import ones as npones a = npones(2**8, dtype=int64) b = array(a, sc, dtype=int64) c = b.astype(bool) assert c.dtype == dtype(bool) dtypes = c._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(bool) b = ones((100, 100), sc, dtype=int64) c = b.astype(bool) assert c.dtype == dtype(bool) dtypes = c._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(bool) b = ones((100, 100), sc) c = b.astype(bool) assert c.dtype == dtype(bool) dtypes = c._rdd.map(lambda x: x[1].dtype).collect() for dt in dtypes: assert dt == dtype(bool) def test_clip(sc): from numpy import arange a = arange(4).reshape(2, 2) b = array(a, sc) assert allclose(b.clip(0).toarray(), a.clip(0)) assert allclose(b.clip(2).toarray(), a.clip(2)) assert allclose(b.clip(1, 2).toarray(), a.clip(1, 2))
codefisher/web_games
million/migrations/0002_auto_20150622_0949.py
Python
mit
2,654
0.00113
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('million', '0001_initial'), ] operations = [ migrations.AlterModelOptions( name='game', options={'verbose_name': 'Game', 'verbose_name_plural': 'Games'}, ), migrations.AlterField( model_name='game', name='name', field=models.CharField(max_length=50, verbose_name='Name'), ), migrations.AlterField( model_name='question', name='answer_four', field=models.CharField(max_length=50, verbose_name='Fourth Answer'), ), migrations.AlterField( model_name='question', name='answer_four_correct', field=models.BooleanField(verbose_name='Fourth Answer is Correct'), ), migrations.AlterField( model_name='question', name='answer_one', field=models.CharField(max_length=50, verbose_name='First Answer'), ), migrations.AlterField( model_name='question', name='answer_one_correct', field=models.BooleanField(verbose_name='First Answer is Correct'), ), migrations.AlterField( model_name='question', name='answer_three', field=models.CharField(max_length=50, verbose_name='Third Answer'), ), migrations.AlterField( model_name='question', name='answer_three_correct', field=models.BooleanField(verbose_name='Third Answer is Co
rrect'), ), migrations.AlterField( model_name='question', name='answer_two', field=models.CharField(max_length=50, verbose_name='Second Answer'), ), migrations.AlterField( model_name='question', name='answer_two_correct', field=models.BooleanField(verbose_name='Second Answer is Cor
rect'), ), migrations.AlterField( model_name='question', name='game', field=models.ForeignKey(verbose_name='Game', to='million.Game', on_delete=models.CASCADE), ), migrations.AlterField( model_name='question', name='question', field=models.CharField(max_length=255, verbose_name='Question'), ), migrations.AlterField( model_name='question', name='value', field=models.IntegerField(verbose_name='Value'), ), ]
samgoodgame/sf_crime
iterations/spark-sklearn/spark_sklearn_test.py
Python
mit
1,383
0.004345
## Non-optimized: # # from sklearn import grid_search, datasets # from sklearn.ensemble import RandomForestClassifier # from sklearn.grid_search import GridSearchCV # # # digits = datasets.load_digits() # X, y = digits.data, digits.target # param_grid = {"max_depth": [3, None], # "max_features": [1, 3, 10], # "min_samples_split": [2, 3, 10], # "min_samples_leaf": [1, 3, 10], # "bootstrap": [True, False], # "criterion": ["gini", "entropy"], # "n_es
timators": [10, 20, 40, 80]} # gs = grid_search.GridSearchCV(RandomForestClassifier(), param_grid=param_grid) # print(gs.fit(X, y)) ## Spark-optimized: print("Spark-optimized grid search:") from sklearn import grid_search, datasets from sklearn.ensemble import RandomForestClassifier # Use spark_sklearn’s grid search instead: from spark_sklearn import GridSearchCV digits = datasets.load_digits() X, y = digits.data, digits.target param_grid =
{"max_depth": [3, None], "max_features": [1, 3, 10], "min_samples_split": [2, 3, 10], "min_samples_leaf": [1, 3, 10], "bootstrap": [True, False], "criterion": ["gini", "entropy"], "n_estimators": [10, 20, 40, 80]} gs = grid_search.GridSearchCV(RandomForestClassifier(), param_grid=param_grid) print(gs.fit(X, y))
stonewell/pymterm
pymterm/term_pylibui/main.py
Python
mit
10,403
0.007402
#coding=utf-8 import json import logging import os from pylibui.core import App from pylibui.controls import Window, Tab, OpenGLArea import cap.cap_manager from session import create_session from term import TextAttribute, TextMode, reserve import term.term_keyboard from term.terminal_gui import TerminalGUI from term.terminal_widget import TerminalWidget from term_menu import basic_menus from file_transfer import FileTransferDialog, FileTransferProgressDialog padding = 10 class TermWindow(Window): def __init__(self, *args, **kwargs): Window.__init__(self, *args, **kwargs) class TerminalApp(App): def __init__(self, cfg): Application.__init__(self) self.cfg = cfg self.current_tab = None self.conn_history = [] self.menus = basic_menus(self.cfg.get_session_names()) self._cls_view = self._select_view_render() def _try_import_render(self, render): if render == 'cairo': try: from glview_pycairo import TerminalPyGUIGLView as TerminalPyGUIView logging.getLogger('term_pylibui').info('using opengl cairo/pango render') return TerminalPyGUIView except: logging.getLogger('term_pylibui').exception('failed load opengl cairo render') return None if render == 'pygame': try: from glview_pygame import TerminalPyGUIGLView as TerminalPyGUIView logging.getLogger('term_pylibui').info('using opengl pygame render') return TerminalPyGUIView except: logging.getLogger('term_pylibui').exception('failed load opengl pygame render') return None if render == 'native': try: from view import TerminalPyGUIView as TerminalPyGUIView logging.getLogger('term_pylibui').info('using native pygui render') return TerminalPyGUIView except: return None logging.getLogger('term_pylibui').info('unsupported render:{}'.format(render)) return None def _select_view_render(self): render = self.cfg.render _cls_view = None if render and render in self.cfg.gui_renders: _cls_view = self._try_import_render(render) if _cls_view: return _cls_view for render in self.cfg.gui_renders: _cls_view = self._try_import_render(render) if _cls_view: return _cls_view logging.getLogger('term_pylibui').error("unable to find a valid render") stop_alert("unable to find a valid render, supported render:{}".format(self.cfg.renders)) def get_application_name(self): return 'Multi-Tab Terminal Emulator in Python & pyGUI' def _create_view(self): return self._cls_view() def connect_to(self, conn_str = None, port = None, session_name = None, win = None): cfg = self.cfg.clone() if conn_str: cfg.set_conn_str(conn_str) elif session_name: cfg.session_name = session_name cfg.config_session() if port: cfg.port = port doc = self.make_new_document() doc.new_contents() doc.cfg = cfg if win: view = self._create_view(doc) self._create_new_tab(win, view) else: self.make_window(doc) def create_terminal(self, cfg): return TerminalPyGUI(cfg) def start(self): self.run() def open_app(self): self.connect_to() def open_window_cmd(self): self.connect_to() def make_window(self
, document): view = self._create_view(document) w, h = view.get_prefered_size() win = TermWindow(bounds = (0, 0, w + 10, h + 50), document = document) win.tabview = tabview = TermTabView() win.auto_position = False self._create_new_tab(win, view) win.place(tabview, left = 0, top = 0, right = 0, bottom = 0, sticky = 'nsew') win.c
enter() win.show() view.become_target() def _remove_session_tab(self, win, view): selected_index = win.tabview.selected_index count = len(win.tabview.items) if selected_index < 0 or selected_index >= count: return win.tabview.remove_item(view) count = len(win.tabview.items) win.tabview.selected_index = -1 if count == 0: win.close_cmd() application()._check_for_no_windows() elif selected_index < count and selected_index >= 0: win.tabview.selected_index = selected_index else: win.tabview.selected_index = count - 1 def _on_session_stop(self, session): if not session.window or not session.term_widget: logging.getLogger('term_pylibui').warn('invalid session, window:{}, term_widget:{}'.format(session.window, session.term_widget)) return win = session.window view = session.term_widget self._remove_session_tab(win, view) def _create_new_tab(self, win, view): win.tabview.add_item(view) cfg = view.model.cfg session = create_session(cfg, self.create_terminal(cfg)) session.on_session_stop = self._on_session_stop session.term_widget = view session.window = win session.terminal.term_widget = view view.session = session view.tab_width = session.get_tab_width() self._session_task = Task(session.start, .1) #session.start() win.tabview.selected_index = len(win.tabview.items) - 1 def make_document(self, fileref): doc = TerminalPyGUIDoc() doc.cfg = self.cfg.clone() doc.title = 'Multi-Tab Terminal Emulator in Python & pyGUI' return doc def new_window_cmd(self): self.connect_to() def next_tab_cmd(self): self._change_cur_tab(1) def prev_tab_cmd(self): self._change_cur_tab(-1) def _change_cur_tab(self, step): win = self.get_target_window() tab_view = win.tabview count = len(tab_view.items) if count == 0: return selected_index = 0 if tab_view.selected_index < 0 else tab_view.selected_index new_index = selected_index + step if new_index < 0: new_index = count - 1 elif new_index >= count: new_index = 0 if new_index != selected_index: tab_view.selected_index = new_index def close_tab_cmd(self): win = self.get_target_window() tab_view = win.tabview if tab_view.selected_index < 0: return view = tab_view.items[tab_view.selected_index] if view.session.stopped: self._remove_session_tab(win, view) else: view.session.stop() def new_cmd(self): self.connect_to(win = self.get_target_window()) def open_session_cmd(self, *args): index, = args self.connect_to(session_name=self.cfg.get_session_names()[index], win=self.get_target_window()) def transfer_file_cmd(self): win = self.get_target_window() tab_view = win.tabview if tab_view.selected_index < 0: return view = tab_view.items[tab_view.selected_index] dlog = FileTransferDialog(view.session) dlog.present() class TerminalPyGUIDoc(Document): def new_contents(self): pass def read_contents(self, file): pass def write_contents(self, file): pass class TermTabView(TabView): def __init__(self, *args, **kwargs): TabView.__init__(self, *args, **kwargs) self._generic_tabbing = False def tab_changed(self, tab_index): if tab_index >= 0 and tab_index < len(self.items): v = self.items[tab_index] self.__focus_task = Task(lambda:v.become_target(), .01) class TerminalPyGUI(TerminalGUI): def __init__(self, cfg): super(TerminalPyGUI, self).__init__(cfg) def prompt_log
openstack/nova
nova/virt/powervm/disk/localdisk.py
Python
apache-2.0
8,561
0.000117
# Copyright 2013 OpenStack Foundation # Copyright 2015, 2018 IBM Corp. # # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_log.log as logging from pypowervm import const as pvm_const from pypowervm.tasks import scsi_mapper as tsk_map from pypowervm.tasks import storage as tsk_stg from pypowervm.wrappers import storage as pvm_stg from pypowervm.wrappers import virtual_io_server as pvm_vios from nova import conf from nova import exception from nova.image import glance from nova.virt.powervm.disk import driver as disk_dvr from nova.virt.powervm import vm LOG = logging.getLogger(__name__) CONF = conf.CONF IMAGE_API = glance.API() class LocalStorage(disk_dvr.DiskAdapter): def __init__(self, adapter, host_uuid): super(LocalStorage, self).__init__(adapter, host_uuid) self.capabilities = { 'shared_storage': False, 'has_imagecache': False, # NOTE(efried): 'snapshot' capability set dynamically below. } # Query to get the Volume Group UUID if not CONF.powervm.volume_group_name: raise exception.OptRequiredIfOtherOptValue( if_opt='disk_driver', if_value='localdisk', then_opt='volume_group_name') self.vg_name = CONF.powervm.volume_group_name vios_w, vg_w = tsk_stg.find_vg(adapter, self.vg_name) self._vios_uuid = vios_w.uuid self.vg_uuid = vg_w.uuid # Set the 'snapshot' capability dynamically. If we're hosting I/O on # the management partition, we can snapshot. If we're hosting I/O on # traditional VIOS, we are limited by the fact that a VSCSI device # can't be mapped to two partitions (the VIOS and the management) at # once. self.capabilities['snapshot'] = self.mp_uuid == self._vios_uuid LOG.info("Local Storage driver initialized: volume group: '%s'", self.vg_name) @property def _vios_uuids(self): """List the UUIDs of the Virtual I/O Servers hosting the storage. For localdisk, there's only one. """ return [self._vios_uuid] @staticmethod def _disk_match_func(disk_type, instance): """Return a matching function to locate the disk for an instance. :param disk_type: One of the DiskType enum values. :param instance: The instance whose disk is to be found. :return: Callable suitable for the match_func parameter of the pypowervm.tasks.scsi_mapper.find_maps method. """ disk_name = LocalStorage._get_disk_name( disk_type, instance, short=True) return tsk_map.gen_match_func(pvm_stg.VDisk, names=[disk_name]) @property def capacity(self): """Capacity of the storage in gigabytes.""" vg_wrap = self._get_vg_wrap() return float(vg_wrap.capacity) @property def capacity_used(self): """Capacity of the storage in gigabytes that is used.""" vg_wrap = self._get_vg_wrap() # Subtract available from capacity return float(vg_wrap.capacity) - float(vg_wrap.available_size) def delete_disks(self, storage_elems): """Removes the specified disks. :param storage_elems: A list of the storage elements that are to be deleted. Derived from the return value from detach_disk. """ # All of localdisk is done against the volume group. So reload # that (to get new etag) and then update against it. tsk_stg.rm_vg_storage(self._get_vg_wrap(), vdisks=storage_elems) def detach_disk(self, instance): """Detaches the storage adapters from the image disk. :param instance: Instance to disconnect the image for. :return: A list of all the backing storage elements that were disconnected from the I/O Server and VM. """ lpar_uuid = vm.get_pvm_uuid(instance) # Build the match function match_func = tsk_map.gen_match_func(pvm_stg.VDisk) vios_w = pvm_vios.VIOS.get( self._adapter, uuid=self._vios_uuid, xag=[pvm_const.XAG.VIO_SMAP]) # Remove the mappings. mappings = tsk_map.remove_maps( vios_w, lpar_uuid, match_func=match_func) # Update the VIOS with the removed mappings. vios_w.update() return [x.backing_storage for x in mappings] def disconnect_disk_from_mgmt(self, vios_uuid, disk_name): """Disconnect a disk from the management partition. :param vios_uuid: The UUID of the Virtual I/O Server serving the mapping. :param disk_name: The name of the disk to unmap. """ tsk_map.remove_vdisk_mapping(self._adapter, vios_uuid, self.mp_uuid, disk_names=[disk_name]) LOG.info("Unmapped boot disk %(disk_name)s from the management " "partition from Virtual I/O Server %(vios_name)s.", {'disk_name': disk_name, 'mp_uuid': self.mp_uuid, 'vios_name': vios_uuid}) def create_disk_from_image(self, context, instance, image_meta): """Creates a disk and copies the specified image to it. Cleans up the created disk if an error occurs. :param context: nova context used to retrieve image from glance :param instance: instance to create the disk for. :param image_meta: The metadata of the image of the instance. :return: The backing pypowervm storage object that was created. """ LOG.info('Create disk.', instance=instance) return self._upload_image(context, instance, image_meta) # TODO(esberglu): Copy vdisk when implementing image cache. def _upload_image(self, context, instance, image_meta): """Upload a new image. :param context: Nova context used to retrieve image from glance. :param image_meta: The metadata of the image of the instance. :return: The virtual disk containing the image. """ img_name = self._get_disk_name(disk_dvr.DiskType.BOOT, instance, short=True) # TODO(esberglu) Add check for cached image when adding imagecache. return
ts
k_stg.upload_new_vdisk( self._adapter, self._vios_uuid, self.vg_uuid, disk_dvr.IterableToFileAdapter( IMAGE_API.download(context, image_meta.id)), img_name, image_meta.size, d_size=image_meta.size, upload_type=tsk_stg.UploadType.IO_STREAM, file_format=image_meta.disk_format)[0] def attach_disk(self, instance, disk_info, stg_ftsk): """Attaches the disk image to the Virtual Machine. :param instance: nova instance to connect the disk to. :param disk_info: The pypowervm storage element returned from create_disk_from_image. Ex. VOptMedia, VDisk, LU, or PV. :param stg_ftsk: The pypowervm transaction FeedTask for the I/O Operations. The Virtual I/O Server mapping updates will be added to the FeedTask. This defers the updates to some later point in time. """ lpar_uuid = vm.get_pvm_uuid(instance) def add_func(vios_w): LOG.info("Adding logical volume disk connection to VIOS %(vios)s.", {'vios': vios_w.name}, instance=instance) mapping = tsk_map.build_vscsi_mapping( self._
idies/pyJHTDB
tests.py
Python
apache-2.0
1,897
0.01845
######################################################################## # # Copyright 2014 Johns Hopkins University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtai
n a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under t
he License. # # Contact: turbulence@pha.jhu.edu # Website: http://turbulence.pha.jhu.edu/ # ######################################################################## import sys sys.path[0] = '' import argparse parser = argparse.ArgumentParser( description = 'Test pyJHTDB installation.') parser.add_argument( '-p', '--plain', dest = 'plain', action = 'store_true', help = 'run plain test, i.e. turbc clone.') parser.add_argument( '--grid-splines', dest = 'grid_splines', action = 'store_true', help = 'run basic grid spline test.') parser.add_argument( '--cutout', dest = 'cutout', action = 'store_true', help = 'run cutout test.') parser.add_argument( '--misc', dest = 'misc', action = 'store_true', help = 'run misc test.') parser.add_argument( '--interpolator', dest = 'interpolator', action = 'store_true', help = 'run interpolator test.') opt = parser.parse_args() import pyJHTDB if opt.plain: pyJHTDB.test_plain() if opt.grid_splines: pyJHTDB.test_gs() if opt.interpolator: pyJHTDB.test_interpolator() if opt.misc and pyJHTDB.found_matplotlib: pyJHTDB.test_misc() if opt.cutout and pyJHTDB.found_h5py: pyJHTDB.test_cutout()
magenta/magenta
magenta/pipelines/lead_sheet_pipelines.py
Python
apache-2.0
7,193
0.003337
# Copyright 2022 The Magenta Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Data processing pipelines for lead sheets.""" import copy from magenta.pipelines import chord_pipelines from magenta.pipelines import melody_pipelines from magenta.pipelines import pipeline from magenta.pipelines import statistics from note_seq import chord_symbols_lib from note_seq import chords_lib from note_seq import events_lib from note_seq import lead_sheets_lib from note_seq import LeadSheet from note_seq import sequences_lib from note_seq.protobuf import music_pb2 import tensorflow.compat.v1 as tf class LeadSheetExtractor(pipeline.Pipeline): """Extracts lead sheet fragments from a quantized NoteSequence.""" def __init__(self, min_bars=7, max_steps=512, min_unique_pitches=5, gap_bars=1.0, ignore_polyphonic_notes=False, filter_drums=True, require_chords=True, all_transpositions=True, name=None): super(LeadSheetExtractor, self).__init__( input_type=music_pb2.NoteSequence, output_type=lead_sheets_lib.LeadSheet, name=name) self._min_bars = min_bars self._max_steps = max_steps self._min_unique_pitches = min_unique_pitches self._gap_bars = gap_bars self._ignore_polyphonic_notes = ignore_polyphonic_notes self._filter_drums = filter_drums self._require_chords = require_chords self._all_transpositions = all_transpositions def transform(self, input_object): quantized_sequence = input_object try: lead_sheets, stats = extract_lead_sheet_fragments( quantized_sequence, min_bars=self._min_bars, max_steps_truncate=self._max_steps, min_unique_pitches=self._min_unique_pitches, gap_bars=self._gap_bars, ignore_polyphonic_notes=self._ignore_polyphonic_notes, filter_drums=self._filter_drums, require_chords=self._require_chords, all_transpositions=self._all_transpositions) except events_lib.NonIntegerStepsPerBarError as detail: tf.logging.warning('Skipped sequence: %s', detail) lead_sheets = [] stats = [statistics.Counter('non_integer_steps_per_bar', 1)] except chord_symbols_lib.ChordSymbolError as detail: tf.logging.warning('Skipped sequence: %s', detail) lead_sheets = [] stats = [statistics.Counter('chord_symbol_exception', 1)] self._set_stats(stats) return lead_sheets def extract_lead_sheet_fragments(quantized_sequence, search_start_step=0, min_bars=7, max_steps_truncate=None, max_steps_discard=None, gap_bars=1.0, min_unique_pitches=5, ignore_polyphonic_notes=True, pad_end=False, filter_drums=True, require_chords=False, all_transpositions=False): """Extracts a list of lead sheet fragments from a quantized NoteSequence. This function first extracts melodies using melodies_lib.extract_melodies, then extracts the chords underlying each melod
y using chords_lib.extract_chords_for_melodies. Args: quantized_sequence: A quantized NoteSequence object. search_start_step: Start searching for a melody at this time step. Assumed to be the first step of a bar. min_bars: Minimum length of melodies in number of bars. Shorter melodies are discarded. max_steps_truncate: Maximum number of steps in extracted melodies. If defined, longer melodies are truncated to this threshold. If pad_end is also True
, melodies will be truncated to the end of the last bar below this threshold. max_steps_discard: Maximum number of steps in extracted melodies. If defined, longer melodies are discarded. gap_bars: A melody comes to an end when this number of bars (measures) of silence is encountered. min_unique_pitches: Minimum number of unique notes with octave equivalence. Melodies with too few unique notes are discarded. ignore_polyphonic_notes: If True, melodies will be extracted from `quantized_sequence` tracks that contain polyphony (notes start at the same time). If False, tracks with polyphony will be ignored. pad_end: If True, the end of the melody will be padded with NO_EVENTs so that it will end at a bar boundary. filter_drums: If True, notes for which `is_drum` is True will be ignored. require_chords: If True, only return lead sheets that have at least one chord other than NO_CHORD. If False, lead sheets with only melody will also be returned. all_transpositions: If True, also transpose each lead sheet fragment into all 12 keys. Returns: A python list of LeadSheet instances. Raises: NonIntegerStepsPerBarError: If `quantized_sequence`'s bar length (derived from its time signature) is not an integer number of time steps. """ sequences_lib.assert_is_relative_quantized_sequence(quantized_sequence) stats = dict([('empty_chord_progressions', statistics.Counter('empty_chord_progressions'))]) melodies, melody_stats = melody_pipelines.extract_melodies( quantized_sequence, search_start_step=search_start_step, min_bars=min_bars, max_steps_truncate=max_steps_truncate, max_steps_discard=max_steps_discard, gap_bars=gap_bars, min_unique_pitches=min_unique_pitches, ignore_polyphonic_notes=ignore_polyphonic_notes, pad_end=pad_end, filter_drums=filter_drums) chord_progressions, chord_stats = chord_pipelines.extract_chords_for_melodies( quantized_sequence, melodies) lead_sheets = [] for melody, chords in zip(melodies, chord_progressions): # If `chords` is None, it's because a chord progression could not be # extracted for this particular melody. if chords is not None: if require_chords and all(chord == chords_lib.NO_CHORD for chord in chords): stats['empty_chord_progressions'].increment() else: lead_sheet = LeadSheet(melody, chords) if all_transpositions: for amount in range(-6, 6): transposed_lead_sheet = copy.deepcopy(lead_sheet) transposed_lead_sheet.transpose(amount) lead_sheets.append(transposed_lead_sheet) else: lead_sheets.append(lead_sheet) return lead_sheets, list(stats.values()) + melody_stats + chord_stats
alfredo/django-simpleblocks
src/simpleblocks/tests.py
Python
bsd-3-clause
1,975
0.000506
from django.contrib.sites.models import Site from django.db.utils import IntegrityError from django.test import TestCase from django.template import Context, Template from simpleblocks.models import SimpleBlock def render_to_string(template, data): t = Template(template) c = Context(data) return t.render(c) class SimpleBlocksTest(TestCase): def setUp(self): """Actions to be executed before each test""" self.body = 'Test Body' self.site = Site.objects.get_current() self.template = '{% load simpleblocks_tags %}{% get_block "test" %}' self.data = {} def tearDown(self): """Actions to be executed after each test""" SimpleBlock.objects.all().delete() def create_block(self, key='test'):
"""Helper to create block""" data = {'body': self.body, 'key': key, 'site': self.site} return SimpleBlock.objects.create(**data) def testCreateBlock(self): """Test block creation""" data = {'body': self.body, 'key': 'test', 'site': self.site} block = SimpleBlock.objects.create(**data) assert block, 'Failed to create block' d
ef testRenderedStatic(self): """Test the tag with a static key""" self.create_block() rendered = render_to_string(self.template, self.data) self.assertEquals(rendered, self.body) def testRenderedVariable(self): """Test the tag with a variable key""" self.create_block() data = {'test_variable': 'test'} template = '{% load simpleblocks_tags %}{% get_block test_variable %}' rendered = render_to_string(template, data) self.assertEquals(rendered, self.body) def testFailedDuplicated(self): """Test failure upon duplicated key and site""" self.create_block() with self.assertRaises(IntegrityError): self.create_block()
vishnu2kmohan/dcos-commons
__init__.py
Python
apache-2.0
191
0
import sys import os.path # Add /testing/ to PYTHONPATH: this_file_dir = os.path.dirname(os.path.abspath(__file__)
) sys.path.append(os.path.normpath(os.path.join(this_file_
dir, 'testing')))
bigzz/autotest
server/autoserv.py
Python
gpl-2.0
8,355
0.001317
""" Library for autotest-remote usage. """ import sys import os import re import traceback import signal import time import logging import getpass try: import autotest.common as common except ImportError: import common from autotest.client.shared.settings import settings require_atfork = settings.get_value('AUTOSERV', 'require_atfork_module', type=bool, default=True) try: import atfork atfork.monkeypatch_os_fork_functions() import atfork.stdlib_fixer # Fix the Python standard library for threading+fork safety with its # internal locks. http://code.google.com/p/python-atfork/ import warnings warnings.filterwarnings('ignore', 'logging module already imported') try: atfork.stdlib_fixer.fix_logging_module() except Exception: pass except ImportError, e: from autotest.client.shared.settings import settings if settings.get_value('AUTOSERV', 'require_atfork_module', type=bool, default=False): print >>sys.stderr, 'Please run utils/build_externals.py' print e sys.exit(1) from autotest.server import server_logging_config from autotest.server import server_job, autoserv_parser from autotest.server import autotest_remote from autotest.client.shared import pidfile, logging_manager def run_autoserv(pid_file_manager, results, parser): # send stdin to /dev/null dev_null = os.open(os.devnull, os.O_RDONLY) os.dup2(dev_null, sys.stdin.fileno()) os.close(dev_null) # Create separate process group os.setpgrp() # Implement SIGTERM handler def handle_sigterm(signum, frame): if pid_file_manager: pid_file_manager.close_file(1, signal.SIGTERM) os.killpg(os.getpgrp(), signal.SIGKILL) # Set signal handler signal.signal(signal.SIGTERM, handle_sigterm) # Ignore SIGTTOU's generated by output from forked children. signal.signal(signal.SIGTTOU, signal.SIG_IGN) # Server side tests that call shell scripts often depend on $USER being set # but depending on how you launch your autotest scheduler it may not be set. os.environ['USER'] = getpass.getuser() if parser.options.machines: machines = parser.options.machines.replace(',', ' ').strip().split() else: machines = [] machines_file = parser.options.machines_file label = parser.options.label group_name = parser.options.group_name user = parser.options.user client = parser.options.client server = parser.options.server install_before = parser.options.install_before install_after = parser.options.install_after verify = parser.options.verify repair = parser.options.repair cleanup = parser.options.cleanup no_tee = parser.options.no_tee parse_job = parser.options.parse_job execution_tag = parser.options.execution_tag if not execution_tag: execution_tag = parse_job host_protection = parser.options.host_protection ssh_user = parser.options.ssh_user ssh_port = parser.options.ssh_port ssh_pass = parser.options.ssh_pass collect_crashinfo = parser.options.collect_crashinfo control_filename = parser.options.control_filename # can't be both a client and a server side test if client and server: parser.parser.error("Can not specify a test as both server and client!") if len(parser.args) < 1 and not (verify or repair or cleanup or collect_crashinfo): parser.parser.error("Missing argument: control file") # We have a control file unless it's just a verify/repair/cleanup job if len(parser.args) > 0: control = parser.args[0] else: control = None if machines_file: machines = [] for m in open(machines_file, 'r').readlines(): # remove comments, spaces m = re.sub('#.*', '', m).strip() if m: machines.append(m) print "Read list of machines from file: %s" % machines_file print ','.join(machines) if machines: for machine in machines: if not machine or re.search('\s', machine): parser.parser.error("Invalid machine: %s" % str(machine)) machines = list(set(machines)) machines.sort() if group_name and len(machines) < 2: parser.parser.error("-G %r may only be supplied with more than one machine." % group_name) kwargs = {'group_name': group_name, 'tag': execution_tag} if control_filename: kwargs['control_filename'] = control_filename job = server_job.server_job(control, parser.args[1:], results, label, user, machines, client, parse_job, ssh_user, ssh_port, ssh_pass, **kwargs) job.logging.start_logging() job.init_parser() # perform checks job.precheck() # run the job exit_code = 0 try: try: if repair: job.repair(host_protection) elif verify: job.verify() else: job.run(cleanup, install_before, install_after, only_collect_crashinfo=collect_crashinfo) finally: while job.hosts: host = job.hosts.pop() host.close() except: exit_code = 1 traceback.print_exc() if pid_file_manager: pid_file_manager.num_tests_failed = job.num_tests_failed pid_file_manager.close_file(exit_code) job.cleanup_parser() sys.exit(exit_code) def main(): # grab the parser parser = autoserv_parser.autoserv_parser parser.parse_args() if len(sys.argv) == 1: parser.parser.print_help() sys.exit(1) if parser.options.no_logging: results = None else: output_dir = settings.get_value('COMMON', 'test_output_dir', def
ault="") results = parser.options.results if not results: results = 're
sults.' + time.strftime('%Y-%m-%d-%H.%M.%S') if output_dir: results = os.path.join(output_dir, results) results = os.path.abspath(results) resultdir_exists = False for filename in ('control.srv', 'status.log', '.autoserv_execute'): if os.path.exists(os.path.join(results, filename)): resultdir_exists = True if not parser.options.use_existing_results and resultdir_exists: error = "Error: results directory already exists: %s\n" % results sys.stderr.write(error) sys.exit(1) # Now that we certified that there's no leftover results dir from # previous jobs, lets create the result dir since the logging system # needs to create the log file in there. if not os.path.isdir(results): os.makedirs(results) logging_manager.configure_logging( server_logging_config.ServerLoggingConfig(), results_dir=results, use_console=not parser.options.no_tee, verbose=parser.options.verbose, no_console_prefix=parser.options.no_console_prefix) if results: logging.info("Results placed in %s" % results) # wait until now to perform this check, so it get properly logged if parser.options.use_existing_results and not resultdir_exists: logging.error("No existing results directory found: %s", results) sys.exit(1) if parser.options.write_pidfile: pid_file_manager = pidfile.PidFileManager(parser.options.pidfile_label, results) pid_file_manager.open_file() else: pid_file_manager = None autotest_remote.BaseAutotest.set_install_in_tmpdir( parser.options.install_in_tmpdir) exit_code = 0 try: try: run_autoserv(pid_file_manager, results, parser) except SystemExit, e: exit_code = e.code except: traceback.print_exc() # If we don't know what happened, we'll classify it as
nicococo/scRNA
scRNA/simulation.py
Python
mit
24,906
0.003132
import pdb import random import sys import numpy as np from sklearn.model_selection import train_test_split def recursive_dirichlet(cluster_spec, num_cells, dirichlet_parameter_cluster_size): num_clusters = len(cluster_spec) cluster_sizes = np.ones(num_clusters) while min(cluster_sizes) == 1: cluster_sizes = np.floor( np.random.dirichlet(np.ones(num_clusters) * dirichlet_parameter_cluster_size, size=None) * num_cells) # Because of the floor call we always have a little too few cells if np.sum(cluster_sizes) != num_cells: cluster_sizes[0] = cluster_sizes[0] - (np.sum(cluster_sizes) - num_cells) # if min(cluster_sizes)<=1: # pdb.set_trace() assert min(cluster_sizes) > 1 assert sum(cluster_sizes) == num_cells cluster_sizes = cluster_sizes.astype(int).tolist() for i, spec in enumerate(cluster_spec): if type(spec) is list: cluster_sizes[i] = recursive_dirichlet( spec, cluster_sizes[i], dirichlet_parameter_cluster_size ) return (cluster_sizes) def generate_de_logfc(ngenes, prop_genes_de, de_logfc): nde_genes = int(np.floor(ngenes * prop_genes_de)) up_down = np.sign(np.random.normal(size=nde_genes)) logfc = list(map((lambda x: x * de_logfc), up_down)) logfc = logfc + [0] * (ngenes - nde_genes) random.shuffle(logfc) return (logfc) def recursive_generate_counts(cluster_nums, num_genes, true_means, parent_logfc, nb_dispersion, min_prop_genes_de, max_prop_genes_de, mean_de_logfc, sd_de_logfc): cluster_counts = [0] * len(cluster_nums) for i, num_cells in enumerate(cluster_nums): # Set DE for th
is cluster or set of cluster
s prop_genes_de = np.random.uniform(min_prop_genes_de, max_prop_genes_de) de_logfc = np.random.normal(mean_de_logfc, sd_de_logfc) logfc = np.add( parent_logfc, generate_de_logfc(num_genes, prop_genes_de, de_logfc) ) if type(num_cells) is list: cluster_counts[i] = \ recursive_generate_counts( num_cells, num_genes, true_means, logfc, nb_dispersion, min_prop_genes_de, max_prop_genes_de, mean_de_logfc, sd_de_logfc ) else: cluster_counts[i] = \ generate_counts( num_cells, num_genes, true_means, logfc, nb_dispersion ) return (np.hstack(cluster_counts)) def generate_counts(num_cells, num_genes, true_means, logfc, nb_dispersion): # Per cell noise all_facs = np.power( 2, np.random.normal( loc=0, scale=0.5, size=num_cells ) ) effective_means = np.outer(true_means, all_facs) effective_means = np.transpose( np.multiply(np.transpose(effective_means), np.power(2, logfc)) ) # Generate data sample = np.random.negative_binomial( p=(1 / nb_dispersion) / ((1 / nb_dispersion) + effective_means), n=1 / nb_dispersion, size=[num_genes, num_cells] ) return (sample) def generate_toy_data( num_genes=10000, num_cells=1000, cluster_spec=None, dirichlet_parameter_cluster_size=10, gamma_shape=2, gamma_rate=2, nb_dispersion=0.1, min_prop_genes_de=0.1, max_prop_genes_de=0.4, mean_de_logfc=1, sd_de_logfc=0.5, ): # Toy experiment parameters # Data generation parameters # num_genes = 10000 # 10000, number of genes # num_cells = 1000 # 1000, number of cells # Cluster spec = None # Definition of cluster hierarchy # dirichlet_parameter_cluster_size = 10 # 10, Dirichlet parameter for cluster sizes, between 0 and inf, bigger values make cluster sizes more similar # Generate Cluster sizes cluster_sizes = recursive_dirichlet( cluster_spec, num_cells, dirichlet_parameter_cluster_size ) # Define the 'true' population mean expression levels true_means = np.random.gamma( gamma_shape, scale=1 / float(gamma_rate), size=num_genes ) counts = recursive_generate_counts( cluster_sizes, num_genes, true_means, [0] * num_genes, nb_dispersion, min_prop_genes_de, max_prop_genes_de, mean_de_logfc, sd_de_logfc ) def flatten(l): if type(l) is list: return flatten(l[0]) + (flatten(l[1:]) if len(l) > 1 else []) else: return ([l]) flat_sizes = flatten(cluster_sizes) flat_labels = flatten(cluster_spec) labels = [] for x in zip(flat_labels, flat_sizes): labels = labels + ([x[0]] * x[1]) return [counts, labels] def flatten(xs): res = [] def loop(ys): for i in ys: if isinstance(i, list): loop(i) else: res.append(i) loop(xs) if res == []: return res else: return np.hstack(res) def split_source_target(toy_data, true_toy_labels, target_ncells=1000, source_ncells=1000, mode=2, source_clusters=None, noise_target=False, noise_sd=0.5, common=0, cluster_spec=None): # Parameters for splitting data in source and target set: # target_ncells = 1000 # How much of the data will be target data? # source_ncells = 1000 # How much of the data will be source data? # splitting_mode = 2 # Splitting mode: 1 = split randomly, # 2 = split randomly, but stratified, # 3 = split randomly, but anti-stratified [not implemented] # 4 = Have some overlapping and some exclusive clusters, # 5 = have only non-overlapping clusters # 6 = Define source matrix clusters # 7 = Define number of overlapping clusters # source_clusters = None # Array of cluster ids to use in mode 6 # noise_target = False # Add some per gene gaussian noise to the target? # noise_sd = 0.5 # SD of gaussian noise # nscr = 2 # number of source clusters # ntrg = 2 # number of target clusters # common = 2 # number of shared clusters assert (target_ncells + source_ncells <= toy_data.shape[1]) go_back_flag = False # First split the 'truth' matrix into a set we will use and a set we wont # For mode 6,4,7 we do this differently if target_ncells + source_ncells < toy_data.shape[1] and mode != 6 and mode != 4 and mode != 7: toy_data, _, true_toy_labels, _ = \ train_test_split( np.transpose(toy_data), true_toy_labels, test_size=toy_data.shape[1] - (target_ncells + source_ncells), stratify=true_toy_labels ) toy_data = np.transpose(toy_data) proportion_target = float(target_ncells) / (source_ncells + target_ncells) if mode == 1: toy_data_source, \ toy_data_target, \ true_toy_labels_source, \ true_toy_labels_target = \ train_test_split( np.transpose(toy_data), true_toy_labels, test_size=target_ncells ) toy_data_source = np.transpose(toy_data_source) toy_data_target = np.transpose(toy_data_target) elif mode == 2: toy_data_source, \ toy_data_target, \ true_toy_labels_source, \ true_toy_labels_target = \ train_test_split( np.transpose(toy_data), true_toy_labels, test_size=target_ncells, stratify=true_toy_labels ) toy_data_source = np.transpose(toy_data_source) toy_data_target = np.transpose(toy_data_target) elif mode == 3: print("Mode 3 not implemented!") toy_data_source = [] toy_data_target = [] true_toy_labels_source = []
chamikaramj/incubator-beam
sdks/python/apache_beam/examples/snippets/snippets.py
Python
apache-2.0
40,001
0.0134
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Code snippets used in webdocs. The examples here are written specifically to read well with the accompanying web docs. Do not rewrite them until you make sure the webdocs still read well and the rewritten code supports the concept being described. For example, there are snippets that could be shorter but they are written like this to make a specific point in the docs. The code snippets are all organized as self contained functions. Parts of the function body delimited by [START tag] and [END tag] will be included automatically in the web docs. The naming convention for the tags is to have as prefix the PATH_TO_HTML where they are included followed by a descriptive string. The tags can contain only letters, digits and _. """ import apache_beam as beam from apache_beam.test_pipeline import TestPipeline from apache_beam.metrics import Metrics # Quiet some pylint warnings that happen because of the somewhat special # format for the code snippets. # pylint:disable=invalid-name # pylint:disable=expression-not-assigned # pylint:disable=redefined-outer-name # pylint:disable=reimported # pylint:disable=unused-variable # pylint:disable=wrong-import-order, wrong-import-position class SnippetUtils(object): from apache_beam.pipeline import PipelineVisitor class RenameFiles(PipelineVisitor): """RenameFiles will rewire read/write paths for unit testing. RenameFiles will replace the GCS files specified in the read and write transforms to local files so the pipeline can be run as a unit test. This assumes that read and write transforms defined in snippets have already been replaced by transforms 'DummyReadForTesting' and 'DummyReadForTesting' (see snippets_test.py). This is as close as we can get to have code snippets that are executed and are also ready to presented in webdocs. """ def __init__(self, renames): self.renames = renames def visit_transform(self, transform_node)
: if transform_node.full_label.find('DummyReadForTesting') >= 0: transform_node.transform.fn.file_to_read = self.renames['read'] elif transform_node.full_label.find('DummyWriteForTesting') >= 0: transform_node.transform.fn.file_to_wri
te = self.renames['write'] def construct_pipeline(renames): """A reverse words snippet as an example for constructing a pipeline.""" import re class ReverseWords(beam.PTransform): """A PTransform that reverses individual elements in a PCollection.""" def expand(self, pcoll): return pcoll | beam.Map(lambda e: e[::-1]) def filter_words(unused_x): """Pass through filter to select everything.""" return True # [START pipelines_constructing_creating] from apache_beam.utils.pipeline_options import PipelineOptions p = beam.Pipeline(options=PipelineOptions()) # [END pipelines_constructing_creating] p = TestPipeline() # Use TestPipeline for testing. # [START pipelines_constructing_reading] lines = p | 'ReadMyFile' >> beam.io.ReadFromText('gs://some/inputData.txt') # [END pipelines_constructing_reading] # [START pipelines_constructing_applying] words = lines | beam.FlatMap(lambda x: re.findall(r'[A-Za-z\']+', x)) reversed_words = words | ReverseWords() # [END pipelines_constructing_applying] # [START pipelines_constructing_writing] filtered_words = reversed_words | 'FilterWords' >> beam.Filter(filter_words) filtered_words | 'WriteMyFile' >> beam.io.WriteToText( 'gs://some/outputData.txt') # [END pipelines_constructing_writing] p.visit(SnippetUtils.RenameFiles(renames)) # [START pipelines_constructing_running] p.run() # [END pipelines_constructing_running] def model_pipelines(argv): """A wordcount snippet as a simple pipeline example.""" # [START model_pipelines] import re import apache_beam as beam from apache_beam.utils.pipeline_options import PipelineOptions class MyOptions(PipelineOptions): @classmethod def _add_argparse_args(cls, parser): parser.add_argument('--input', dest='input', default='gs://dataflow-samples/shakespeare/kinglear' '.txt', help='Input file to process.') parser.add_argument('--output', dest='output', required=True, help='Output file to write results to.') pipeline_options = PipelineOptions(argv) my_options = pipeline_options.view_as(MyOptions) p = beam.Pipeline(options=pipeline_options) (p | beam.io.ReadFromText(my_options.input) | beam.FlatMap(lambda x: re.findall(r'[A-Za-z\']+', x)) | beam.Map(lambda x: (x, 1)) | beam.combiners.Count.PerKey() | beam.io.WriteToText(my_options.output)) result = p.run() # [END model_pipelines] result.wait_until_finish() def model_pcollection(argv): """Creating a PCollection from data in local memory.""" from apache_beam.utils.pipeline_options import PipelineOptions class MyOptions(PipelineOptions): @classmethod def _add_argparse_args(cls, parser): parser.add_argument('--output', dest='output', required=True, help='Output file to write results to.') pipeline_options = PipelineOptions(argv) my_options = pipeline_options.view_as(MyOptions) # [START model_pcollection] p = beam.Pipeline(options=pipeline_options) (p | beam.Create([ 'To be, or not to be: that is the question: ', 'Whether \'tis nobler in the mind to suffer ', 'The slings and arrows of outrageous fortune, ', 'Or to take arms against a sea of troubles, ']) | beam.io.WriteToText(my_options.output)) result = p.run() # [END model_pcollection] result.wait_until_finish() def pipeline_options_remote(argv): """Creating a Pipeline using a PipelineOptions object for remote execution.""" from apache_beam import Pipeline from apache_beam.utils.pipeline_options import PipelineOptions # [START pipeline_options_create] options = PipelineOptions(flags=argv) # [END pipeline_options_create] # [START pipeline_options_define_custom] class MyOptions(PipelineOptions): @classmethod def _add_argparse_args(cls, parser): parser.add_argument('--input') parser.add_argument('--output') # [END pipeline_options_define_custom] from apache_beam.utils.pipeline_options import GoogleCloudOptions from apache_beam.utils.pipeline_options import StandardOptions # [START pipeline_options_dataflow_service] # Create and set your PipelineOptions. options = PipelineOptions(flags=argv) # For Cloud execution, set the Cloud Platform project, job_name, # staging location, temp_location and specify DataflowRunner. google_cloud_options = options.view_as(GoogleCloudOptions) google_cloud_options.project = 'my-project-id' google_cloud_options.job_name = 'myjob' google_cloud_options.staging_location = 'gs://my-bucket/binaries' google_cloud_options.temp_location = 'gs://my-bucket/temp' options.view_as(StandardOptions).runner = 'DataflowRunner' # Create the Pipeline with the specified options. p = Pipeline(options=options) # [END pipeline_options_dataflow_service] my_options = options.view_as(MyOptions) my_input = my_options.input my_output = my_options.output
LaoLiulaoliu/hzkgelastic2-doc-manager
setup.py
Python
apache-2.0
2,047
0.000489
try: from setuptools import setup except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup import sys test_suite = "tests" tests_require = ["mongo-orchestration >= 0.2, < 0.4", "requests >= 2.5.1"] if sys.version_info[:2] == (2, 6): # Need unittest2 to run unittests in Python 2.6 tests_require.append("unittest2") test_suite = "unittest2.collector" try: with open("README.rst", "r") as fd: long_description = fd.read() except IOError: long_description = None # Install without README.rst setup(name='hzkgelastic2-doc-manager', version='0.2.1.dev0', maintainer='mongodb', description='Elastic2 plugin for mongo-connector', long_de
scription=lo
ng_description, platforms=['any'], author='anna herlihy', author_email='mongodb-user@googlegroups.com', url='https://github.com/mongodb-labs/hzkgelastic2-doc-manager', install_requires=['mongo-connector >= 2.3.0', "elasticsearch>=2.0.0,<3.0.0"], packages=["mongo_connector", "mongo_connector.doc_managers"], extras_require={'aws': ['boto3 >= 1.4.0', 'requests-aws-sign >= 0.1.1']}, license="Apache License, Version 2.0", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Database", "Topic :: Software Development :: Libraries :: Python Modules", "Operating System :: Unix", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX" ], keywords=['mongo-connector', "mongodb", "elastic", "elasticsearch"], test_suite=test_suite, tests_require=tests_require )
wolverton-research-group/qmpy
qmpy/analysis/tests.py
Python
mit
1,996
0.001503
import os from qmpy import * from django.test import TestCase peak_locations = [] class MiedemaTestCase(TestCase): def setUp(self): read_elements() def test_methods(self): ## test that it generally works self.assertEqual(Miedema("FeNi").energy, -0.03) self.assertEqual(Miedema("FeNi").energy, -0.03) c = Composition.get("LiBe") self.assertEqual(Miedema(c).energy, -0.08) self.assertEqual(Miedema({"Pt": 1, "Ti": 3}).energy, -0.76) ## test that non-metals are ignored self.assertEqual(Miedema("Fe2O3").energy, None) ## test that it is quantity invariant self.assertEqual(Miedema("Fe5Ni5").energy, -0.03) class PDFTestCase(TestCase): def test_distances(self): pass class NearestNeighborTestCase(TestCase):
def setUp(self): read_elements() sample_files_loc = os.path.join(INSTALL_PATH, "io", "files") self.fcc = io.poscar.read(os.path.join(sample_files_loc, "POSCAR_FCC")) self.bcc = io.poscar.read(os.path.join(sample_files_loc, "POSCAR_BCC")) self.sc = io.poscar.read(os.path.join(sample_files_loc, "POSCAR_SC")) def test_heuristic(self): self.fcc.find_nearest_neighbors() self.a
ssertEqual(len(self.fcc[0].neighbors), 12) self.bcc.find_nearest_neighbors() self.assertEqual(len(self.bcc[0].neighbors), 8) self.sc.find_nearest_neighbors() self.assertEqual(len(self.sc[0].neighbors), 6) def test_voronoi(self): self.fcc.find_nearest_neighbors(method="voronoi") self.assertEqual(len(self.fcc[0].neighbors), 12) self.bcc.find_nearest_neighbors(method="voronoi") self.assertEqual(len(self.bcc[0].neighbors), 14) self.bcc.find_nearest_neighbors(method="voronoi", tol=5) self.assertEqual(len(self.bcc[0].neighbors), 8) self.sc.find_nearest_neighbors(method="voronoi") self.assertEqual(len(self.sc[0].neighbors), 6)
sapcc/monasca-agent
tests_to_fix/test_redis.py
Python
bsd-3-clause
5,369
0.001863
""" Redis check tests. """ import logging import os import unittest import subprocess import time import pprint import redis from tests.common import load_check from nose.plugins.skip import SkipTest logger = logging.getLogger() MAX_WAIT = 20 NOAUTH_PORT = 16379 AUTH_PORT = 26379 DEFAULT_PORT = 6379 MISSING_KEY_TOLERANCE = 0.5 class TestRedis(unittest.TestCase): def is_travis(self): global logger logger.info("Running on travis-ci") return "TRAVIS" in os.environ def wait4(self, p, pattern): """Waits until a specific pattern shows up in the stdout """ out = p.stdout loop = 0 while True: l = out.readline() if l.find(pattern) > -1: break else: time.sleep(0.1) loop += 1 if loop >= MAX_WAIT: break def setUp(self): raise SkipTest("Requires Redis installed") if not self.is_travis(): self.redis_noauth = subprocess.Popen( ["redis-server", "tests/redisnoauth.cfg"], stdout=subprocess.PIPE) self.wait4(self.redis_noauth, "The server is now ready to accept connections") self.redis_auth = subprocess.Popen( ["redis-server", "tests/redisauth.cfg"], stdout=subprocess.PIPE) self.wait4(self.redis_auth, "The server is now ready to accept connections") def tearDown(self): if not self.is_travis(): self.redis_noauth.terminate() self.redis_auth.terminate() def test_redis_auth(self): # Test connection with password if not self.is_travis(): # correct password r =
load_check('redisdb', {}, {}) instance = { 'host': 'localhost', 'port': AUTH_PORT, 'password': 'datadog-is-devops-best-friend' } r.check(instance)
metrics = self._sort_metrics(r.get_metrics()) assert len(metrics) > 0, "No metrics returned" # wrong passwords instances = [ { 'host': 'localhost', 'port': AUTH_PORT, 'password': '' }, { 'host': 'localhost', 'port': AUTH_PORT, 'password': 'badpassword' } ] for instance in instances: r = load_check('redisdb', {}, {}) r.check(instance) metrics = self._sort_metrics(r.get_metrics()) assert len( metrics) == 0, "Should have failed with bad password; got %s instead" % metrics def test_redis_default(self): # Base test, uses the noauth instance if self.is_travis(): port = DEFAULT_PORT else: port = NOAUTH_PORT instance = { 'host': 'localhost', 'port': port } db = redis.Redis(port=port, db=14) # Datadog's test db db.flushdb() db.set("key1", "value") db.set("key2", "value") db.setex("expirekey", "expirevalue", 1000) r = load_check('redisdb', {}, {}) r.check(instance) metrics = self._sort_metrics(r.get_metrics()) assert metrics, "No metrics returned" # Assert we have values, timestamps and dimensions for each metric. for m in metrics: assert isinstance(m[1], int) # timestamp assert isinstance(m[2], (int, float, long)) # value dimensions = m[3]["dimensions"] expected_dimensions = {"redis_host": "localhost", "redis_port": port} for e in expected_dimensions: assert e in dimensions def assert_key_present(expected, present, tolerance): "Assert we have the rest of the keys (with some tolerance for missing keys)" e = set(expected) p = set(present) assert len(e - p) < tolerance * len(e), pprint.pformat((p, e - p)) # gauges collected? remaining_keys = [m[0] for m in metrics] expected = r.GAUGE_KEYS.values() assert_key_present(expected, remaining_keys, MISSING_KEY_TOLERANCE) # Assert that the keys metrics are tagged by db. just check db0, since # it's the only one we can guarantee is there. db_metrics = self._sort_metrics( [m for m in metrics if m[0] in ['redis.keys', 'redis.expires'] and "redis_db:db14" in m[3]["dimensions"]]) self.assertEqual(2, len(db_metrics)) self.assertEqual('redis.expires', db_metrics[0][0]) self.assertEqual(1, db_metrics[0][2]) self.assertEqual('redis.keys', db_metrics[1][0]) self.assertEqual(3, db_metrics[1][2]) # Run one more check and ensure we get total command count # and other rates time.sleep(5) r.check(instance) metrics = self._sort_metrics(r.get_metrics()) keys = [m[0] for m in metrics] assert 'redis.net.commands' in keys def _sort_metrics(self, metrics): def sort_by(m): return m[0], m[1], m[3] return sorted(metrics, key=sort_by) if __name__ == "__main__": unittest.main()
elcolie/fight
hopes/views.py
Python
mit
2,543
0.002359
from django.core.urlresolvers import reverse_lazy from django.views.generic import View from hopes.forms import StudentForm, SchoolForm, OneTimeForm, SpecificDateTimeForm from hopes.models import Student, School, OneTime, SpecificDateTime from vanilla import CreateView, DeleteView, ListView, UpdateView class ListStudents(ListView): model = Student class CreateStudent(CreateView): model = Student form_class = StudentForm success_url = reverse_lazy('list_students') class EditStudent(UpdateView): model = Student form_class = StudentForm success_url = reverse_lazy('list_students') class DeleteStudent(DeleteView): model = Student success_url = reverse_lazy('list_students') """-----------------------------------------------------------""" class ListSchools(ListView): model = School class CreateSchool(CreateView): model = School form_class = SchoolForm success_url = reverse_lazy('list_schools') class EditSchool(UpdateView): model = School form_class = SchoolForm success_url = reverse_lazy('list_schools') class DeleteSchool(DeleteView): model = School success_url = reverse_lazy('list_schools') """----------------------------------------------------------""" class ListOneTime(ListView): model = OneTime class CreateOneTime(CreateView): model = OneTime form_class = OneTimeForm success_url = reverse_lazy('list_onetime') class EditOneTime(UpdateView): model = OneTime form_class = OneTimeForm success_url = reverse_lazy('list_onetime') class DeleteOneTime(DeleteView): model = OneTime success_url = reverse_lazy('list_onetime') """-----------------------------------------------------------""" from django.shortcuts import render_to_response, render from django.template import RequestContext, loader class ListSpecDateTime(View): def index(self, request): template = loader.get_template('hopes/simple_spec.html') context = { 'spec_list' : 'Test my string', } return render(request, 'hopes/simple_spec.html', context) class CreateSpecDateTime(CreateView): model = SpecificDateTime form_class = SpecificDateTimeForm success_url = reverse_lazy('list_spec_time') class EditSpecDateTime(UpdateView):
model = SpecificDateTime form_class = SpecificDateTimeForm success_url =
reverse_lazy('list_spec_time') class DeleteSpecDateTime(DeleteView): model = SpecificDateTime success_url = reverse_lazy('list_spec_time')
emanueldima/b2share
b2share/modules/files/cli.py
Python
gpl-2.0
3,385
0.001182
# -*- coding: utf-8 -*- # # This file is part of EUDAT B2Share. # Copyright (C) 2016 CERN. # # B2Share is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # B2Share is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with B2Share; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """B2Share Schemas module Command Line Interface.""" from __future__ import absolute_import import json import os import sys from sqlalchemy import exists, or_ import click from flask.cli import with_appcontext from invenio_db import db from invenio_files_rest.models import Location @click.group() def files(): """Files management commands.""" @files.command('add-location') @with_appcontext @click.argument('name') @click.argument('uri') @click.option('-d', '--default', is_flag=True, default=False, help='Use this location as the default location.') def add_location(name, uri, default): """Add a file storage location. The URI should point to the location where the files will be stored. The NAME will be used to reference this location. """ matching_locations = Location.query.filter( or_(Location.uri == uri, Location.name == name)).all() if len(matching_l
ocations) > 0: if matching_loca
tions[0].name == name: raise click.BadParameter( 'Another location with the same name already exists.') else: raise click.BadParameter( 'Existing location "{}" has the same uri.'.format( matching_locations[0].name)) if default: db.session.query(Location).filter(Location.default == True).update( {Location.default: False}) location = Location(name=name, uri=uri, default=default) db.session.add(location) db.session.commit() @files.command('set-default-location') @with_appcontext @click.argument('name') def set_default_location(name): """Change the default file storage location. The NAME should point to an existing location. """ location = Location.query.filter(Location.name == name).one() if location.default: return db.session.query(Location).filter(Location.default == True).update( {Location.default: False}) location.default = True db.session.commit() @files.command('list-locations') @with_appcontext def list_location(): """List all file storage locations.""" locations = Location.query.order_by(Location.default.desc(), Location.name).all() for location in locations: click.echo(json.dumps({c.name: str(getattr(location, c.name)) for c in Location.__table__.columns}, sort_keys=True))
UnbDroid/robomagellan
Codigos/Raspberry/desenvolvimentoRos/devel/lib/python2.7/dist-packages/tf2_ros/__init__.py
Python
gpl-3.0
1,035
0.000966
# -*- coding: utf-8 -*- # generated from catkin/cmake/template/__init__.py.in # keep symbol table as clean as possible by deleting all unnecessary symbols from os import path as os_path from sys import path as sys_path from pkgutil import extend_path __extended_path = "/home/pi/Documents/desenvolvimentoRos/src/tf2_ros/src".split(";") for p in reversed(__extended_path):
sys_path.insert(0, p) del p del sys_path __path__ = extend_path(__path__, __name__) del extend_path __execfiles = [] for p in __extended_path: src_init_file = os_path.join(p, __name__ + '.py') if os_path.isfile(src_init_file): __execfiles.append(src_init_file) else: src_init_file = os_path.join(p, __name__, '__init__.py') if
os_path.isfile(src_init_file): __execfiles.append(src_init_file) del src_init_file del p del os_path del __extended_path for __execfile in __execfiles: with open(__execfile, 'r') as __fh: exec(__fh.read()) del __fh del __execfile del __execfiles
RegioHelden/django-datawatch
django_datawatch/management/commands/datawatch_run_checks.py
Python
mit
701
0.001427
# -*- coding: UTF-8 -*- from django.core.management.base import BaseCommand from django_datawatch.datawatch import Scheduler class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', acti
on='store_true', dest='force', default=False, help='Execute all checks.', ) parser.add_argument( '--slug', dest='slug', default=None, help='Slug of check to refresh, all checks will be
refreshed if slug is not provided', ) def handle(self, force, slug, *args, **options): Scheduler().run_checks(force=force, slug=slug)
cliftonmcintosh/openstates
openstates/mn/__init__.py
Python
gpl-3.0
5,612
0.000713
from pupa.scrape import Jurisdiction, Organization from .bills import MNBillScraper from .committees import MNCommitteeScraper from .people import MNPersonScraper from .vote_events import MNVoteScraper from .events import MNEventScraper from .common import url_xpath """ Minnesota legislative data can be found at the Office of the Revisor of Statutes: https://www.revisor.mn.gov/ Votes: There are not detailed vote data for Senate votes, simply yes and no counts. Bill pages have vote counts and links to House details, so it makes more sense to get vote data from the bill pages. """ class Minnesota(Jurisdiction): division_id = "ocd-division/country:us/state:mn" classification = "government" name = "Minnesota" url = "http://state.mn.us/" check_sessions = True scrapers = { "bills": MNBillScraper, "committees": MNCommitteeScraper, "people": MNPersonScraper, "vote_events": MNVoteScraper, "events": MNEventScraper, } parties = [{'name': 'Republican'}, {'name': 'Democratic-Farmer-Labor'}] legislative_sessions = [ { '_scraped_name': '86th Legislature, 2009-2010', 'classification': 'primary', 'identifier': '2009-2010', 'name': '2009-2010 Regular Session' }, { '_scraped_name': '86th Legislature, 2010 1st Special Session', 'classification': 'special', 'identifier': '2010 1st Special Session', 'name': '2010, 1st Special Session' }, { '_scraped_name': '86th Legislature, 2010 2nd Special Session', 'classification': 'special', 'identifier': '2010 2nd Special Session', 'name': '2010, 2nd Special Session' }, { '_scraped_name': '87th Legislature, 2011-2012', 'classification': 'primary', 'identifier': '2011-2012', 'name': '2011-2012 Regular Session' }, { '_scraped_name': '87th Legislature, 2011 1st Special Session', 'classification': 'special', 'identifier': '2011s1', 'name': '2011, 1st Special Session' }, { '_scraped_name': '87th Legislature, 2012 1st Special Session', 'classification': 'special', 'identifier': '2012s1', 'name': '2012, 1st Special Session' }, { '_scraped_name': '88th Legislature, 2013-2014', 'classification': 'primary', 'identifier': '2013-2014', 'name': '2013-2014 Regular Session' }, { '_scraped_name': '88th Legislature, 2013 1st Special Session', 'classification': 'special', 'identifier': '2013s1', 'name': '2013, 1st Special Session' }, { '_scraped_name': '89th Legislature, 2015-2016', 'classification': 'primary', 'identifier': '2015-2016', 'name': '2015-2016 Regular Session' }, { '_scraped_name': '89th Legislature, 2015 1st Special Session', 'classification': 'special', 'identifier': '2015s1', 'name': '2015, 1st Special Session' }, { '_scraped_name': '90th Legislature, 2017-2018', 'classification': 'primary', 'identifier': '2017-2018', 'name': '2017-2018 Regular Session' }, ] ignored_scraped_sessions = [ '85th Legislature, 2007-2008', '85th Legislature, 2007 1st Special Session', '84th Legislature, 2005-2006', '84th Legislature, 2005 1st Special Session', '83rd Legislature, 2003-2004', '83rd Legislature, 2003 1st Special Session', '82nd Legislature, 2001-2002', '82nd Legislature, 2002 1st Special Session', '82nd Legislature, 2001 1st Special Session', '81st Legislature, 1999-2000', '80th Legislature, 1997-1998', '80th Legislature, 1998 1st Special Session', '80th Legislature, 1997 3rd Special Session
', '80th Legislature, 1997 2nd Special Session', '80th Legislature, 1
997 1st Special Session', '79th Legislature, 1995-1996', '79th Legislature, 1995 1st Special Session', '89th Legislature, 2015-2016', ] def get_organizations(self): legis = Organization('Minnesota Legislature', classification='legislature') upper = Organization('Minnesota Senate', classification='upper', parent_id=legis._id) lower = Organization('Minnesota House of Representatives', classification='lower', parent_id=legis._id) for n in range(1, 68): upper.add_post(label=str(n), role='Senator', division_id='ocd-division/country:us/state:mn/sldu:{}'.format(n)) lower.add_post(label=str(n) + 'A', role='Representative', division_id='ocd-division/country:us/state:mn/sldl:{}a'.format(n)) lower.add_post(label=str(n) + 'B', role='Representative', division_id='ocd-division/country:us/state:mn/sldl:{}b'.format(n)) yield legis yield upper yield lower def get_session_list(self): return url_xpath('https://www.revisor.mn.gov/revisor/pages/' 'search_status/status_search.php?body=House', '//select[@name="session"]/option/text()')
mattdelhey/kaggle-galaxy
saveData.py
Python
mit
440
0.004545
def saveData(X, f_out, colfmt='%i'): ''' Quick alias for saving data matricies. If X and f_out are tuples, this function will save multiple matricies at once. ''' import numpy as np if isinstance(X, tuple): assert(len(X) == len(f_out)) for idx,Z in enumerate(X): np.
savetxt(f_out[idx], Z, delimiter=',', fmt=colfmt) else: np.savet
xt(f_out, X, delimiter=',', fmt=colfmt)
andrevmatos/Librix-ThinClient
src/ui/export/ssh_export/__init__.py
Python
gpl-2.0
739
0
#!/usr/bin/env python3 # # # # This file is part of librix-thinclient. # # librix-thinclient is free software: you can redistribute it and/
or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # librix-thinclient is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A P
ARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with librix-thinclient. If not, see <http://www.gnu.org/licenses/>. __all__ = [ ]