repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
elena/django
tests/template_tests/syntax_tests/i18n/test_blocktranslate.py
Python
bsd-3-clause
27,773
0.003497
import inspect import os from functools import partial, wraps from asgiref.local import Local from django.template import Context, Template, TemplateSyntaxError from django.test import SimpleTestCase, override_settings from django.utils import translation from django.utils.safestring import mark_safe from django.utils.translation import trans_real from ...utils import setup as base_setup from .base import MultipleLocaleActivationTestCase, extended_locale_paths, here def setup(templates, *args, **kwargs): blocktranslate_setup = base_setup(templates, *args, **kwargs) blocktrans_setup = base_setup({ name: template.replace( '{% blocktranslate ', '{% blocktrans ' ).replace( '{% endblocktranslate %}', '{% endblocktrans %}' ) for name, template in templates.items() }) tags = { 'blocktrans': blocktrans_setup, 'blocktranslate': blocktranslate_setup, } def decorator(func): @wraps(func) def inner(self, *args): signature = inspect.signature(func) for tag_name, setup_func in tags.items(): if 'tag_name' in signature.parameters: setup_func(partial(func, tag_name=tag_name))(self) else: setup_func(func)(self) return inner return decorator class I18nBlockTransTagTests(SimpleTestCase): libraries = {'i18n': 'django.templatetags.i18n'} @setup({'i18n03': '{% load i18n %}{% blocktranslate %}{{ anton }}{% endblocktranslate %}'}) def test_i18n03(self): """simple translation of a variable""" output = self.engine.render_to_string('i18n03', {'anton': 'Å'}) self.assertEqual(output, 'Å') @setup({'i18n04': '{% load i18n %}{% blocktranslate with berta=anton|lower %}{{ berta }}{% endblocktranslate %}'}) def test_i18n04(self): """simple translation of a variable and filter""" output = self.engine.render_to_string('i18n04', {'anton': 'Å'}) self.assertEqual(output, 'å') @setup({'legacyi18n04': '{% load i18n %}' '{% blocktranslate with anton|lower as berta %}{{ berta }}{% endblocktranslate %}'}) def test_legacyi18n04(self): """simple translation of a variable and filter""" output = self.engine.render_to_string('legacyi18n04', {'anton': 'Å'}) self.as
sertEqual(output, 'å') @setup({'i18n05': '{% load i18n %}{% blocktranslate %}xxx{{ anton }}xxx{% endblocktranslate %}'}) def test_i18n05(self): """simple translation of a stri
ng with interpolation""" output = self.engine.render_to_string('i18n05', {'anton': 'yyy'}) self.assertEqual(output, 'xxxyyyxxx') @setup({'i18n07': '{% load i18n %}' '{% blocktranslate count counter=number %}singular{% plural %}' '{{ counter }} plural{% endblocktranslate %}'}) def test_i18n07(self): """translation of singular form""" output = self.engine.render_to_string('i18n07', {'number': 1}) self.assertEqual(output, 'singular') @setup({'legacyi18n07': '{% load i18n %}' '{% blocktranslate count number as counter %}singular{% plural %}' '{{ counter }} plural{% endblocktranslate %}'}) def test_legacyi18n07(self): """translation of singular form""" output = self.engine.render_to_string('legacyi18n07', {'number': 1}) self.assertEqual(output, 'singular') @setup({'i18n08': '{% load i18n %}' '{% blocktranslate count number as counter %}singular{% plural %}' '{{ counter }} plural{% endblocktranslate %}'}) def test_i18n08(self): """translation of plural form""" output = self.engine.render_to_string('i18n08', {'number': 2}) self.assertEqual(output, '2 plural') @setup({'legacyi18n08': '{% load i18n %}' '{% blocktranslate count counter=number %}singular{% plural %}' '{{ counter }} plural{% endblocktranslate %}'}) def test_legacyi18n08(self): """translation of plural form""" output = self.engine.render_to_string('legacyi18n08', {'number': 2}) self.assertEqual(output, '2 plural') @setup({'i18n17': '{% load i18n %}' '{% blocktranslate with berta=anton|escape %}{{ berta }}{% endblocktranslate %}'}) def test_i18n17(self): """ Escaping inside blocktranslate and translate works as if it was directly in the template. """ output = self.engine.render_to_string('i18n17', {'anton': 'α & β'}) self.assertEqual(output, 'α & β') @setup({'i18n18': '{% load i18n %}' '{% blocktranslate with berta=anton|force_escape %}{{ berta }}{% endblocktranslate %}'}) def test_i18n18(self): output = self.engine.render_to_string('i18n18', {'anton': 'α & β'}) self.assertEqual(output, 'α & β') @setup({'i18n19': '{% load i18n %}{% blocktranslate %}{{ andrew }}{% endblocktranslate %}'}) def test_i18n19(self): output = self.engine.render_to_string('i18n19', {'andrew': 'a & b'}) self.assertEqual(output, 'a & b') @setup({'i18n21': '{% load i18n %}{% blocktranslate %}{{ andrew }}{% endblocktranslate %}'}) def test_i18n21(self): output = self.engine.render_to_string('i18n21', {'andrew': mark_safe('a & b')}) self.assertEqual(output, 'a & b') @setup({'legacyi18n17': '{% load i18n %}' '{% blocktranslate with anton|escape as berta %}{{ berta }}{% endblocktranslate %}'}) def test_legacyi18n17(self): output = self.engine.render_to_string('legacyi18n17', {'anton': 'α & β'}) self.assertEqual(output, 'α & β') @setup({'legacyi18n18': '{% load i18n %}' '{% blocktranslate with anton|force_escape as berta %}' '{{ berta }}{% endblocktranslate %}'}) def test_legacyi18n18(self): output = self.engine.render_to_string('legacyi18n18', {'anton': 'α & β'}) self.assertEqual(output, 'α & β') @setup({'i18n26': '{% load i18n %}' '{% blocktranslate with extra_field=myextra_field count counter=number %}' 'singular {{ extra_field }}{% plural %}plural{% endblocktranslate %}'}) def test_i18n26(self): """ translation of plural form with extra field in singular form (#13568) """ output = self.engine.render_to_string('i18n26', {'myextra_field': 'test', 'number': 1}) self.assertEqual(output, 'singular test') @setup({'legacyi18n26': '{% load i18n %}' '{% blocktranslate with myextra_field as extra_field count number as counter %}' 'singular {{ extra_field }}{% plural %}plural{% endblocktranslate %}'}) def test_legacyi18n26(self): output = self.engine.render_to_string('legacyi18n26', {'myextra_field': 'test', 'number': 1}) self.assertEqual(output, 'singular test') @setup({'i18n27': '{% load i18n %}{% blocktranslate count counter=number %}' '{{ counter }} result{% plural %}{{ counter }} results' '{% endblocktranslate %}'}) def test_i18n27(self): """translation of singular form in Russian (#14126)""" with translation.override('ru'): output = self.engine.render_to_string('i18n27', {'number': 1}) self.assertEqual(output, '1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442') @setup({'legacyi18n27': '{% load i18n %}' '{% blocktranslate count number as counter %}{{ counter }} result' '{% plural %}{{ counter }} results{% endblocktranslate %}'}) def test_legacyi18n27(self): with translation.override('ru'): output = self.engine.render_to_string('legacyi18n27', {'number': 1}) self.assertEqual(output, '1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u04
tensorflow/similarity
tensorflow_similarity/retrieval_metrics/bndcg.py
Python
apache-2.0
5,659
0.00053
# Copyright 2021 The TensorFlow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import math import tensorflow as tf from .retrieval_metric import RetrievalMetric from tensorflow_similarity.types import FloatTensor, IntTensor, BoolTensor class BNDCG(RetrievalMetric): """Binary normalized discounted cumulative gain. This is normalized discounted cumulative gain where the relevancy weights are binary, i.e., either a correct match or an incorrect match. The NDCG is a score between [0,1] representing the rank weighted results. The DCG represents the sum of the correct matches weighted by the log2 of the rank and is normalized by the 'ideal DCG'. The IDCG is computed as the match_mask, sorted descending, weighted by the log2 of the post sorting rank order. This metric takes into account both the correctness of the match and the position. The normalized DCG is computed as: $$ nDCG_{p} = \frac{DCG_{p}}{IDCG_{p}} $$ The DCG is computed for each query using the match_mask as: $$ DCG_{p} = \sum_{i=1}^{p} \frac{match_mask_{i}}{\log_{2}(i+1)} $$ The IDCG uses the same equation but sorts the match_mask descending along axis=-1. Additionally, all positive matches with a distance above the threshold are set to 0, and the closest K matches are taken. Args: name: Name associated with the metric object, e.g., precision@5 canonical_name: The canonical name associated with metric, e.g., precision@K k: The number of nearest neighbors over which the metric is computed. distance_threshold: The max distance below which a nearest neighbor is considered a valid match. average: {'micro', 'macro'} Determines the type of averaging performed on the data. * 'micro': Calculates metrics globally over all data. * 'macro': Calculates metrics for each label and takes the unweighted mean. """ def __init__( self, name: str = "ndcg", k: int = 5, distance_threshold: float = math.inf, **kwargs, ) -> None: if "canonical_name" not in kwargs: kwargs["canonical_name"] = "ndcg@k" super().__init__( name=name, k=k, distance_threshold=distance_threshold, **kwargs ) def compute( self, *, # keyword only arguments see PEP-570 query_labels: IntTensor, lookup_distances: FloatTensor, match_mask: BoolTensor, **kwargs, ) -> FloatTensor: """Compute the metric Computes the binary NDCG. The query labels are only used when the averaging is set to "macro". Args: query_labels: A 1D array of the labels associated with the embedding queries. lookup_distances: A 2D array where the jth row is the distances between the jth query and the set of k neighbors. match_mask: A 2D mask where a 1 indicates a match between the jth query and the kth neighbor and a 0 indicates a mismatch. Returns: A rank 0 tensor containing the metric. """ self._check_shape(query_labels, match_mask) if tf.shape(lookup_distances)[0] != tf.shape(query_labels)[0]: raise ValueError( "The number of lookup distance rows must equal the number " "of query labels. Number of lookup distance rows is " f"{tf.shape(lookup_distances)[0]} but the number of query " f"labels is {tf.shape(query_labels)[0]}." ) dist_mask = tf.math.less_equal( lookup_distances, self.distance_threshold ) k_slice = tf.math.multiply( tf.cast(match_mask, dtype="float"), tf.cast(dist_mask, dtype="float"), )[:, : self.k]
rank = tf.range(1, self.k + 1, dtype="float") rank_weights = tf.math.divide(tf.math.log1p(rank), tf.math.log(2.0))
# the numerator is simplier here because we are using binary weights dcg = tf.math.reduce_sum(k_slice / rank_weights, axis=1) # generate the "ideal ordering". ideal_ordering = tf.sort(k_slice, direction="DESCENDING", axis=1) idcg = tf.math.reduce_sum(ideal_ordering / rank_weights, axis=1) per_example_ndcg = tf.math.divide_no_nan(dcg, idcg) if self.average == "micro": ndcg = tf.math.reduce_mean(per_example_ndcg) elif self.average == "macro": per_class_metrics = 0 class_labels = tf.unique(query_labels)[0] for label in class_labels: idxs = tf.where(query_labels == label) c_slice = tf.gather(per_example_ndcg, indices=idxs) per_class_metrics += tf.math.reduce_mean(c_slice) ndcg = tf.math.divide(per_class_metrics, len(class_labels)) else: raise ValueError( f"{self.average} is not a supported average " "option" ) result: FloatTensor = ndcg return result
Neluso/SIFPAF
plot.py
Python
gpl-3.0
1,454
0
import matplotlib.pyplot as plt from h5py import File from numpy import array def launch_plots(): # TODO set activation of different plots plot3d = plt.figure(
'Plot 3D') xy_plane = plt.fi
gure('XY') xz_plane = plt.figure('XZ') yz_plane = plt.figure('YZ') ax_plot3d = plot3d.add_subplot(111, projection='3d') ax_xy = xy_plane.add_subplot(111) ax_xz = xz_plane.add_subplot(111) ax_yz = yz_plane.add_subplot(111) ax_plot3d.set_title('3D') ax_plot3d._axis3don = False ax_xy.set_ylabel('y') ax_xy.set_xlabel('x') ax_xz.set_ylabel('z') ax_xz.set_xlabel('x') ax_yz.set_ylabel('z') ax_yz.set_xlabel('y') fh5 = File('data.h5', 'r') total_particles = len(list(fh5['/particles'])) + 1 for particle_count in range(1, total_particles): route = '/particles/' + str(particle_count) + '/' trace = fh5[route + 'trace'].value[0] initial_position = fh5[route + 'initial_position'] final_position = fh5[route + 'final_position'] xs = array([initial_position[0], final_position[0]]) ys = array([initial_position[1], final_position[1]]) zs = array([initial_position[2], final_position[2]]) ax_plot3d.plot(xs, ys, zs, trace) ax_xy.plot(xs, ys, trace) ax_xz.plot(xs, zs, trace) ax_yz.plot(ys, zs, trace) xy_plane.savefig('XY.jpg') xz_plane.savefig('XZ.jpg') yz_plane.savefig('YZ.jpg') plt.show()
cmusatyalab/elijah-provisioning
elijah/provisioning/server.py
Python
apache-2.0
46,655
0.004758
#!/usr/bin/env python # # Cloudlet Infrastructure for Mobile Computing # # Author: Kiryong Ha <krha@cmu.edu> # # Copyright (C) 2011-2013 Carnegie Mellon University # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with
the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # dist
ributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import functools import traceback import sys import time import SocketServer import socket import tempfile import struct import shutil import threading import synthesis as synthesis from package import VMOverlayPackage from db.api import DBConnector from db.table_def import BaseVM, Session, OverlayVM from synthesis_protocol import Protocol as Protocol from configuration import Const as Cloudlet_Const from configuration import Synthesis_Const as Synthesis_Const import msgpack from pprint import pformat from optparse import OptionParser from multiprocessing import Process, JoinableQueue, Queue, Manager from lzma import LZMADecompressor import log as logging LOG = logging.getLogger(__name__) session_resources = dict() # dict[session_id] = obj(SessionResource) class RapidSynthesisError(Exception): pass class SessionResource(object): DELTA_PROCESS = "delta_proc" RESUMED_VM = "resumed_vm" FUSE = "fuse" OVERLAY_PIPE = "overlay_pipe" OVERLAY_DIR = "overlay_dir" OVERLAY_DB_ENTRY = "overlay_db_entry" def __init__(self, session_id): self.session_id = session_id self.resource_dict = dict() self.resource_list = list() self.resource_list.append(SessionResource.DELTA_PROCESS) self.resource_list.append(SessionResource.RESUMED_VM) self.resource_list.append(SessionResource.FUSE) self.resource_list.append(SessionResource.OVERLAY_PIPE) self.resource_list.append(SessionResource.OVERLAY_DIR) self.resource_list.append(SessionResource.OVERLAY_DB_ENTRY) def add(self, name, obj): if name not in self.resource_list: msg = "Resource (%s) is not allowed" % name msg += "Allowed resources: %s" % ' '.join(self.resource_list) raise RapidSynthesisError(msg) resource = self.resource_dict.get(name, None) if resource is not None: msg = "resource %s is already existing at session(%s)" % \ (name, str(self.session)) raise RapidSynthesisError(msg) self.resource_dict[name] = obj def deallocate(self): delta_proc = self.resource_dict.get(SessionResource.DELTA_PROCESS, None) resumed_vm = self.resource_dict.get(SessionResource.RESUMED_VM, None) fuse = self.resource_dict.get(SessionResource.FUSE, None) overlay_pipe = self.resource_dict.get(SessionResource.OVERLAY_PIPE, None) overlay_dir = self.resource_dict.get(SessionResource.OVERLAY_DIR, None) overlay_db_entry = self.resource_dict.get(SessionResource.OVERLAY_DB_ENTRY, None) if delta_proc: delta_proc.finish() if delta_proc.is_alive(): delta_proc.terminate() del self.resource_dict[SessionResource.DELTA_PROCESS] if resumed_vm: resumed_vm.terminate() del self.resource_dict[SessionResource.RESUMED_VM] if fuse: fuse.terminate() del self.resource_dict[SessionResource.FUSE] if overlay_pipe: os.unlink(overlay_pipe) del self.resource_dict[SessionResource.OVERLAY_PIPE] if overlay_dir and os.path.exists(overlay_dir): shutil.rmtree(overlay_dir) del self.resource_dict[SessionResource.OVERLAY_DIR] if overlay_db_entry: overlay_db_entry.terminate() def wrap_process_fault(function): """Wraps a method to catch exceptions related to instances. This decorator wraps a method to catch any exceptions and terminate the request gracefully. """ @functools.wraps(function) def decorated_function(self, *args, **kwargs): try: return function(self, *args, **kwargs) except Exception, e: if hasattr(self, 'exception_handler'): self.exception_handler() kwargs.update(dict(zip(function.func_code.co_varnames[2:], args))) LOG.error("failed with : %s" % str(kwargs)) return decorated_function class NetworkUtil(object): @staticmethod def recvall(sock, size): data = '' while len(data) < size: data += sock.recv(size - len(data)) return data @staticmethod def encoding(data): return msgpack.packb(data) @staticmethod def decoding(data): return msgpack.unpackb(data) class NetworkStepThread(threading.Thread): MAX_REQUEST_SIZE = 1024*512 # 512 KB def __init__(self, network_handler, overlay_urls, overlay_urls_size, demanding_queue, out_queue, time_queue, chunk_size): self.network_handler = network_handler self.read_stream = network_handler.rfile self.overlay_urls = overlay_urls self.overlay_urls_size = overlay_urls_size self.demanding_queue = demanding_queue self.out_queue = out_queue self.time_queue = time_queue self.chunk_size = chunk_size threading.Thread.__init__(self, target=self.receive_overlay_blobs) def exception_handler(self): self.out_queue.put(Synthesis_Const.ERROR_OCCURED) self.time_queue.put({'start_time':-1, 'end_time':-1, "bw_mbps":0}) @wrap_process_fault def receive_overlay_blobs(self): total_read_size = 0 counter = 0 index = 0 finished_url = dict() requesting_list = list() out_of_order_count = 0 total_urls_count = len(self.overlay_urls) start_time = time.time() while len(finished_url) < total_urls_count: #request to client until it becomes more than MAX_REQUEST_SIZE while True: requesting_size = sum([self.overlay_urls_size[item] for item in requesting_list]) if requesting_size > self.MAX_REQUEST_SIZE or len(self.overlay_urls) == 0: # Enough requesting list or nothing left to request break; # find overlay to request urgent_overlay_url = None while not self.demanding_queue.empty(): # demanding_queue can have multiple same request demanding_url = self.demanding_queue.get() if (finished_url.get(demanding_url, False) == False) and \ (demanding_url not in requesting_list): urgent_overlay_url = demanding_url break requesting_overlay = None if urgent_overlay_url != None: requesting_overlay = urgent_overlay_url out_of_order_count += 1 if requesting_overlay in self.overlay_urls: self.overlay_urls.remove(requesting_overlay) else: requesting_overlay = self.overlay_urls.pop(0) # request overlay blob to client message = NetworkUtil.encoding({ Protocol.KEY_COMMAND : Protocol.MESSAGE_COMMAND_ON_DEMAND, Protocol.KEY_REQUEST_SEGMENT:requesting_overlay }) message_size = struct.pack("!I", len(message)) self.network_handler.request.send(message_size) self.network_handler.wfile.write(message) self.network_handler.wfile.flush() requesting_list.append(requesting_over
pandaoknight/leetcode
sum_problem_dynamic_programming/two-sum-iv-input-is-a-bst/hash-table.py
Python
gpl-2.0
1,485
0.005506
#!/usr/bin/python # -*- coding: utf-8 -*- # Definition for a binary tree node. class TreeNode(object): def __init__(self, x, left = None, right = None): self.val = x self.left = left self.right = right class Solution(object): """ 【思路】 1. 这里我们使用hashtable,而python中的dict就是hashtable """ def findTarget(self, root, k): """ :type root: TreeNode :type k: int :rtype: bool """ traveled = {} for n in self.yieldBreadthFirstSearch(root): if traveled.get(k-n): return True else: traveled[n] = True return False def yieldBreadthFirstSearch(self, root): que = [root] while que: cur = que.pop(0) if cur.left: que.append(cur.left) if cur.right: que.append(cur.right) yield cur.val if "__main__" == __name__: s = Solution() print s.findTarget(TreeNode(2, None, TreeNode(7)), 9) print s.findTarget(TreeNode(7, TreeNode(2), TreeNode(2, TreeNode(7))), 9) pri
nt s.findTarget(TreeNode(5, TreeNode(3, TreeNode(2), TreeNode(4)), TreeNode(6, None, TreeNode(7))), 9) print s.findTarget(TreeNode(5, TreeNode(3, TreeNode(2), TreeNode(4)), TreeNode(6, None, TreeNode(7))), 13) print s.findTarget(TreeNode(5, TreeNode(3, TreeNode(2), TreeNode(4)), TreeNode(6, None, Tre
eNode(7))), 20)
simbha/mAngE-Gin
lib/django/utils/version.py
Python
mit
2,279
0.000878
from __future__ import unicode_literals import datetime import os import subprocess from django.utils.lru_cache import lru_cache def get_version(version=None): "Returns a PEP 386-compliant version number from VERSION." version = get_complete_version(version) # Now build the two parts of the version number: # major = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases major = get_major_version(version) sub = '' if version[3] == 'alpha' and version[4] == 0: git_changeset = get_git_changeset() if git_changeset: sub = '.dev%s' % git_changeset elif version[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} sub = mapping[version[3]] + str(version[4]) return str(major + sub) def get_major_version(version=None): "Returns major version from VERSION." version = get_complete_version(version) parts = 2 if version[2] == 0 else 3 major = '.'.join(str(x) for x in version[:parts]) return major def get_complete_version(version=None): """Returns a tuple of the django version. If version argument is non-empty, then checks for correctness of the tuple provided. """ if version is None: from django import VERSION as version else: assert len(version) == 5 assert version[3] in ('alpha', 'beta', 'rc', 'final') return version @lru_cache() def get_git_changeset(): """Returns a numeric identifier of the latest git changeset. The result is the UTC timestamp of the changeset in YYYYMMDDHHMMS
S format. This value isn't guaranteed to be unique, but collisions are very unlikely, so it's sufficient for generating the development version numbers. """ repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file_
_))) git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=repo_dir, universal_newlines=True) timestamp = git_log.communicate()[0] try: timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) except ValueError: return None return timestamp.strftime('%Y%m%d%H%M%S')
zsdonghao/tensorlayer
tensorlayer/initializers.py
Python
apache-2.0
7,005
0.001999
#! /usr/bin/python # -*- coding: utf-8 -*- import numpy as np import tensorflow as tf __all__ = [ 'Initializer', 'Zeros', 'Ones', 'Constant', 'RandomUniform', 'RandomNormal', 'TruncatedNormal', 'deconv2d_bilinear_upsampling_initializer' ] class Initializer(object): """Initializer base class: all initializers inherit from this class. """ def __call__(self, shape, dtype=None): """Returns a tensor object initialized as specified by the initializer. Parameters ---------- shape : tuple of int. The shape of the tensor. dtype : Optional dtype of the tensor. If not provided will return tensor of `tf.float32`. Returns ------- """ raise NotImplementedError def get_config(self): """Returns the configuration of the initializer as a JSON-serializable dict. Returns ------- A JSON-serializable Python dict. """ return {} @classmethod def from_config(cls, config): """Instantiates an initializer from a configuration dictionary. Parameters ---------- config : A python dictionary. It will typically be the output of `get_config`. Returns ------- An Initializer instance. """ if 'dtype' in config: config.pop('dtype') return cls(**config) class Zeros(Initializer): """Initializer that generates tensors initialized to 0. """ def __call__(self, shape, dtype=tf.float32): return tf.zeros(shape, dtype=dtype) class Ones(Initializer): """Initializer that generates tensors initialized to 1. """ def __call__(self, shape, dtype=tf.float32): return tf.ones(shape, dtype=dtype) class Constant(Initiali
zer): """Initializer that generates tensors initialized to a con
stant value. Parameters ---------- value : A python scalar or a numpy array. The assigned value. """ def __init__(self, value=0): self.value = value def __call__(self, shape, dtype=None): return tf.constant(self.value, shape=shape, dtype=dtype) def get_config(self): return {"value": self.value} class RandomUniform(Initializer): """Initializer that generates tensors with a uniform distribution. Parameters ---------- minval : A python scalar or a scalar tensor. Lower bound of the range of random values to generate. maxval : A python scalar or a scalar tensor. Upper bound of the range of random values to generate. seed : A Python integer. Used to seed the random generator. """ def __init__(self, minval=-0.05, maxval=0.05, seed=None): self.minval = minval self.maxval = maxval self.seed = seed def __call__(self, shape, dtype=tf.float32): return tf.random.uniform(shape, self.minval, self.maxval, dtype=dtype, seed=self.seed) def get_config(self): return {"minval": self.minval, "maxval": self.maxval, "seed": self.seed} class RandomNormal(Initializer): """Initializer that generates tensors with a normal distribution. Parameters ---------- mean : A python scalar or a scalar tensor. Mean of the random values to generate. stddev : A python scalar or a scalar tensor. Standard deviation of the random values to generate. seed : A Python integer. Used to seed the random generator. """ def __init__(self, mean=0.0, stddev=0.05, seed=None): self.mean = mean self.stddev = stddev self.seed = seed def __call__(self, shape, dtype=tf.float32): return tf.random.normal(shape, self.mean, self.stddev, dtype=dtype, seed=self.seed) def get_config(self): return {"mean": self.mean, "stddev": self.stddev, "seed": self.seed} class TruncatedNormal(Initializer): """Initializer that generates a truncated normal distribution. These values are similar to values from a `RandomNormal` except that values more than two standard deviations from the mean are discarded and re-drawn. This is the recommended initializer for neural network weights and filters. Parameters ---------- mean : A python scalar or a scalar tensor. Mean of the random values to generate. stddev : A python scalar or a scalar tensor. Standard deviation of the andom values to generate. seed : A Python integer. Used to seed the random generator. """ def __init__(self, mean=0.0, stddev=0.05, seed=None): self.mean = mean self.stddev = stddev self.seed = seed def __call__(self, shape, dtype=tf.float32): return tf.random.truncated_normal(shape, self.mean, self.stddev, dtype=dtype, seed=self.seed) def get_config(self): return {"mean": self.mean, "stddev": self.stddev, "seed": self.seed} def deconv2d_bilinear_upsampling_initializer(shape): """Returns the initializer that can be passed to DeConv2dLayer for initializing the weights in correspondence to channel-wise bilinear up-sampling. Used in segmentation approaches such as [FCN](https://arxiv.org/abs/1605.06211) Parameters ---------- shape : tuple of int The shape of the filters, [height, width, output_channels, in_channels]. It must match the shape passed to DeConv2dLayer. Returns ------- ``tf.constant_initializer`` A constant initializer with weights set to correspond to per channel bilinear upsampling when passed as W_int in DeConv2dLayer """ if shape[0] != shape[1]: raise Exception('deconv2d_bilinear_upsampling_initializer only supports symmetrical filter sizes') if shape[3] < shape[2]: raise Exception( 'deconv2d_bilinear_upsampling_initializer behaviour is not defined for num_in_channels < num_out_channels ' ) filter_size = shape[0] num_out_channels = shape[2] num_in_channels = shape[3] # Create bilinear filter kernel as numpy array bilinear_kernel = np.zeros([filter_size, filter_size], dtype=np.float32) scale_factor = (filter_size + 1) // 2 if filter_size % 2 == 1: center = scale_factor - 1 else: center = scale_factor - 0.5 for x in range(filter_size): for y in range(filter_size): bilinear_kernel[x, y] = (1 - abs(x - center) / scale_factor) * (1 - abs(y - center) / scale_factor) weights = np.zeros((filter_size, filter_size, num_out_channels, num_in_channels), dtype=np.float32) for i in range(num_out_channels): weights[:, :, i, i] = bilinear_kernel # assign numpy array to constant_initalizer and pass to get_variable return tf.constant_initializer(value=weights) # Alias zeros = Zeros ones = Ones constant = Constant random_uniform = RandomUniform random_normal = RandomNormal truncated_normal = TruncatedNormal
codelieche/codelieche.com
apps/account/views/message.py
Python
mit
3,318
0.000353
# -*- coding:utf-8 -*- """ 用户消息相关的视图 """ from rest_framework import generics from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated from django_filters.rest_framework import DjangoFilterBackend from rest_framework.filters import SearchFilter from account.serializers.message import MessageSerializer from account.models import Message class MessageCreateView(generics.CreateAPIView): """创建用户消息api""" queryset = Message.objects.all() serializer_cla
ss = MessageSerializer # 权限控制 permission_classes = (IsAuthenticated,) class MessageListView(generics.ListAPIView): """ 用户消息列表api View > 用户只能看到自
己的消息列表 """ # queryset = Message.objects.filter(deleted=False) serializer_class = MessageSerializer # 权限控制 permission_classes = (IsAuthenticated,) # 搜索和过滤 filter_backends = (DjangoFilterBackend, SearchFilter) filter_fields = ('category', 'unread') search_fields = ('title', 'content') ordering_fields = ('id', 'time_added') ordering = ('-time_added',) def get_queryset(self): # 第1步:获取到请求的用户 # 用户只可以看到自己的消息列表 user = self.request.user # 第2步:获取到是否已读:unread=0/1(已读/未读) queryset = Message.objects.filter(user=user, is_deleted=False).order_by('-id') # 第3步:返回结果集 return queryset class MessageDetailView(generics.RetrieveDestroyAPIView): """ 用户消息详情View > 只能获取到用户自己的消息,即使是超级用户,也只能查看到自己的消息,不可以去看别人的 """ queryset = Message.objects.filter(is_deleted=False) serializer_class = MessageSerializer # 权限控制 permission_classes = (IsAuthenticated,) def get_object(self): # 1. 先获取到用户 user = self.request.user # 2. 调用父类的方法获取到这个对象 instance = super().get_object() # 3. 如果这个对象user是请求的用户,那么返回对象,不是的话返回None if instance and user == instance.user: return instance else: return None def retrieve(self, request, *args, **kwargs): # 1. 获取到对象 instance = self.get_object() # 2. 修改unread if instance.unread: instance.unread = False instance.save(update_fields=('unread',)) return super().retrieve(request, *args, **kwargs) def delete(self, request, *args, **kwargs): # 1. 获取到user和对象 user = self.request.user instance = self.get_object() # 2. 如果是自己的消息或者是超级管理员,那么就可以删除本条消息 if instance.is_deleted: response = Response(status=204) else: if instance.user == user or user.is_superuser: instance.is_deleted = True instance.save() response = Response(status=204) else: response = Response("没权限删除", status=403) # 3. 返回响应 return response
o-unity/lanio
old/lsrv/bin/getTemp.py
Python
gpl-2.0
977
0.022518
#!/usr/bin/python # -*- coding: utf-8 -*- import re, os, time # function: read and parse sensor data file def read_sensor(path): value = "U" try: f = open(path, "r") line = f.readline() if re.match(r"([0-9a-f]{2} ){9}: crc=[0-9a-f]{2} YES", line): line = f.readline() m = re.match(r"([0-9a-f]{2} ){9}t=([+-]?[0-9]+)", line) if m: value = str(round(float(m.group(2)) / 1000.0,1)) f.close() except (IOError), e: print time.strftime("%x %X"), "Error reading", path, ": ", e return value # define pathes to 1-wire sensor data pathes = ( "/sys/bus/w1/devices/28-0314640daeff/w1_slave" ) # read sensor data #for path in pathe
s: # path = "/sys/bus/w1/devices/28-0314640daeff/w1_slave" # print read_sensor(path) # time.sleep(30) flag = 1 temp = 0 temp2 = 0 while (flag): temp2 = temp temp = read_sensor("/sys/bus/w1/devices/28-0314640daeff/w1_slave")
if temp2 != temp: print temp time.sleep(11)
pierce403/EmpirePanel
lib/modules/collection/screenshot.py
Python
bsd-3-clause
3,876
0.015222
from lib.common import helpers class Module: def __init__(self, mainMenu, params=[]): self.info = { 'Name': 'Get-Screenshot', 'Author': ['@obscuresec', '@harmj0y'], 'Description': ('Takes a screenshot of the current desktop and ' 'returns the output as a .PNG.'), 'Background' : False, 'OutputExtension' : 'png', 'NeedsAdmin' : False, 'OpsecSafe' : True, 'MinPSVersion' : '2', 'Comments': [ 'https://github.com/mattifestation/PowerSploit/blob/master/Exfiltration/Get-TimedScreenshot.ps1' ] } # any options needed by the module, settable during runtime self.options = { # format: # value_name : {description, required, default_value} 'Agent' : { 'Description' : 'Agent to run module on.', 'Required' : True, 'Value' : '' }, 'Ratio' : { 'Description' : "JPEG Compression ratio: 1 to 100.", 'Required' : False, 'Value' : '' } } # save off a copy of the mainMenu object to access external functionality # like listeners/agent handlers/etc. self.mainMenu = mainMenu for param in params: # parameter format is [Name, Value] option, value = param if option in self.options: self.options[option]['Value'] = value def generate(self): script = """ function Get-Screenshot { param ( [Parameter(Mandatory = $False)] [string] $Ratio ) Add-Type -Assembly System.Windows.Forms; $ScreenBounds = [Windows.Forms.SystemInformation]::VirtualScreen; $ScreenshotObject = New-Object Drawing.Bitmap $ScreenBounds.Width, $ScreenBounds.Height; $DrawingGraphics = [Drawing.Graphics]::FromImage($ScreenshotObject); $DrawingGraphics.CopyFromScreen( $ScreenBounds.Location, [Drawing.Point]::Empty, $ScreenBounds.Size); $DrawingGraphics.Dispose(); $ms = New-Object System.IO.MemoryStream; if ($Ratio) { try { $iQual = [convert]::ToInt32($Ratio); } catch { $iQual=80; } if ($iQual -gt 100){ $iQual=100; } elseif ($iQual -lt 1){ $iQual=1; } $encoderParams = New-Object System.Drawing.Imaging.EncoderParameters; $encoderParams.Param[0] = New-Object Drawing.Imaging.EncoderParameter ([System.Drawing.Imaging.Encoder]::Quality, $iQual); $jpegCodec = [Drawing.Imaging.ImageCodecInfo]::GetImageEncoders() | Where-Object { $_.FormatDescription -eq \"JPEG\" } $ScreenshotObject.save($ms, $jpegCodec, $encoderParams); } else { $ScreenshotObject.save($ms, [Drawing.Imaging.ImageFormat]::Png); } $ScreenshotObject.Dispose(); [convert]::ToBase64String($ms.ToArray()); } Get-Screenshot"""
if self.options['Ratio']['Value']: if self.options['Ratio']['Value']!='0': self.info['OutputExtension'] = 'jpg' else: self.options['Ratio']['Value'] = '' self.info['OutputExtension'] = 'png' else: self.info['OutputExtension'] = 'png' for option,values in self.options.iteritems(): if option.lower() != "agent": if
values['Value'] and values['Value'] != '': if values['Value'].lower() == "true": # if we're just adding a switch script += " -" + str(option) else: script += " -" + str(option) + " " + str(values['Value']) return script
brainwane/zulip
zerver/tests/test_external.py
Python
apache-2.0
5,500
0.003273
import time from unittest import mock import DNS from django.conf import settings from django.core.exceptions import ValidationError from django.http import HttpResponse from zerver.forms import email_is_not_mit_mailing_list from zerver.lib.rate_limiter import ( RateLimitedUser, RateLimiterLockingException, add_ratelimit_rule, remove_ratelimit_rule, ) from zerver.lib.test_classes import ZulipTestCase from zerver.lib.zephyr import compute_mit_user_fullname from zerver.models import UserProfile class MITNameTest(ZulipTestCase): def test_valid_hesiod(self) -> None: with mock.patch('DNS.dnslookup', return_value=[['starnine:*:84233:101:Athena Consulting Exchange User,,,:/mit/starnine:/bin/bash']]): self.assertEqual(compute_mit_user_fullname(self.mit_email("starnine")), "Athena Consulting Exchange User") with mock.patch('DNS.dnslookup', return_value=[['sipbexch:*:87824:101:Exch Sipb,,,:/mit/sipbexch:/bin/athena/bash']]): self.assertEqual(compute_mit_user_fullname("sipbexch@mit.edu"), "Exch Sipb") def test_invalid_hesiod(s
elf) -> None: with mock.patch('DNS.dnslookup', side_effect=DNS.Base.ServerError('DNS query status: NXDOMAIN', 3)): self.assertEqual(compute_mit_user_fullname("1234567890@mit.edu"), "1234567890@mit.edu") with mock.patch('DNS.dnslookup', side_effect=DNS.Base.ServerError('DNS query status: NXDOMAIN', 3)): self.assertEqual(compute_mit_user_fullname("ec-discuss@mit.edu"), "ec-discuss@mit.edu") def test_mailinglist(self) -> None: with mock.pa
tch('DNS.dnslookup', side_effect=DNS.Base.ServerError('DNS query status: NXDOMAIN', 3)): self.assertRaises(ValidationError, email_is_not_mit_mailing_list, "1234567890@mit.edu") with mock.patch('DNS.dnslookup', side_effect=DNS.Base.ServerError('DNS query status: NXDOMAIN', 3)): self.assertRaises(ValidationError, email_is_not_mit_mailing_list, "ec-discuss@mit.edu") def test_notmailinglist(self) -> None: with mock.patch('DNS.dnslookup', return_value=[['POP IMAP.EXCHANGE.MIT.EDU starnine']]): email_is_not_mit_mailing_list("sipbexch@mit.edu") class RateLimitTests(ZulipTestCase): def setUp(self) -> None: super().setUp() settings.RATE_LIMITING = True add_ratelimit_rule(1, 5) def tearDown(self) -> None: settings.RATE_LIMITING = False remove_ratelimit_rule(1, 5) super().tearDown() def send_api_message(self, user: UserProfile, content: str) -> HttpResponse: return self.api_post(user, "/api/v1/messages", {"type": "stream", "to": "Verona", "client": "test suite", "content": content, "topic": "whatever"}) def test_headers(self) -> None: user = self.example_user('hamlet') RateLimitedUser(user).clear_history() result = self.send_api_message(user, "some stuff") self.assertTrue('X-RateLimit-Remaining' in result) self.assertTrue('X-RateLimit-Limit' in result) self.assertTrue('X-RateLimit-Reset' in result) def test_ratelimit_decrease(self) -> None: user = self.example_user('hamlet') RateLimitedUser(user).clear_history() result = self.send_api_message(user, "some stuff") limit = int(result['X-RateLimit-Remaining']) result = self.send_api_message(user, "some stuff 2") newlimit = int(result['X-RateLimit-Remaining']) self.assertEqual(limit, newlimit + 1) def test_hit_ratelimits(self) -> None: user = self.example_user('cordelia') RateLimitedUser(user).clear_history() start_time = time.time() for i in range(6): with mock.patch('time.time', return_value=(start_time + i * 0.1)): result = self.send_api_message(user, f"some stuff {i}") self.assertEqual(result.status_code, 429) json = result.json() self.assertEqual(json.get("result"), "error") self.assertIn("API usage exceeded rate limit", json.get("msg")) self.assertEqual(json.get('retry-after'), 0.5) self.assertTrue('Retry-After' in result) self.assertEqual(result['Retry-After'], '0.5') # We actually wait a second here, rather than force-clearing our history, # to make sure the rate-limiting code automatically forgives a user # after some time has passed. with mock.patch('time.time', return_value=(start_time + 1.01)): result = self.send_api_message(user, "Good message") self.assert_json_success(result) @mock.patch('zerver.lib.rate_limiter.logger.warning') def test_hit_ratelimiterlockingexception(self, mock_warn: mock.MagicMock) -> None: user = self.example_user('cordelia') RateLimitedUser(user).clear_history() with mock.patch('zerver.lib.rate_limiter.RedisRateLimiterBackend.incr_ratelimit', side_effect=RateLimiterLockingException): result = self.send_api_message(user, "some stuff") self.assertEqual(result.status_code, 429) mock_warn.assert_called_with( "Deadlock trying to incr_ratelimit for %s", f"RateLimitedUser:{user.id}:api_by_user", )
lk-geimfari/elizabeth
mimesis/data/int/person.py
Python
mit
140,388
0
"""Provides all the generic data related to the personal information.""" from typing import Tuple BLOOD_GROUPS = ( "O+", "A+", "B+", "AB+", "O−", "A−", "B−", "AB−", ) GENDER_SYMBOLS: Tuple[str, str, str] = ( "♂", "♀", "⚲", ) USERNAMES = [ "aaa", "aaron", "abandoned", "abc", "aberdeen", "abilities", "ability", "able", "aboriginal", "abortion", "about", "above", "abraham", "abroad", "abs", "absence", "absent", "absolute", "absolutely", "absorption", "abstract", "abstracts", "abu", "abuse", "academic", "academics", "academy", "acc", "accent", "accept", "acc
eptable", "acceptance", "accepte
d", "accepting", "accepts", "access", "accessed", "accessibility", "accessible", "accessing", "accessories", "accessory", "accident", "accidents", "accommodate", "accommodation", "accommodations", "accompanied", "accompanying", "accomplish", "accomplished", "accordance", "according", "accordingly", "account", "accountability", "accounting", "accounts", "accreditation", "accredited", "accuracy", "accurate", "accurately", "accused", "acdbentity", "ace", "acer", "achieve", "achieved", "achievement", "achievements", "achieving", "acid", "acids", "acknowledge", "acknowledged", "acm", "acne", "acoustic", "acquire", "acquired", "acquisition", "acquisitions", "acre", "acres", "acrobat", "across", "acrylic", "act", "acting", "action", "actions", "activated", "activation", "active", "actively", "activists", "activities", "activity", "actor", "actors", "actress", "acts", "actual", "actually", "acute", "ada", "adam", "adams", "adaptation", "adapted", "adapter", "adapters", "adaptive", "adaptor", "add", "added", "addiction", "adding", "addition", "additional", "additionally", "additions", "address", "addressed", "addresses", "addressing", "adds", "adelaide", "adequate", "adidas", "adipex", "adjacent", "adjust", "adjustable", "adjusted", "adjustment", "adjustments", "admin", "administered", "administration", "administrative", "administrator", "administrators", "admission", "admissions", "admit", "admitted", "adobe", "adolescent", "adopt", "adopted", "adoption", "adrian", "ads", "adsl", "adult", "adults", "advance", "advanced", "advancement", "advances", "advantage", "advantages", "adventure", "adventures", "adverse", "advert", "advertise", "advertisement", "advertisements", "advertiser", "advertisers", "advertising", "advice", "advise", "advised", "advisor", "advisors", "advisory", "advocacy", "advocate", "adware", "aerial", "aerospace", "affair", "affairs", "affect", "affected", "affecting", "affects", "affiliate", "affiliated", "affiliates", "affiliation", "afford", "affordable", "afghanistan", "afraid", "africa", "african", "after", "afternoon", "afterwards", "again", "against", "age", "aged", "agencies", "agency", "agenda", "agent", "agents", "ages", "aggregate", "aggressive", "aging", "ago", "agree", "agreed", "agreement", "agreements", "agrees", "agricultural", "agriculture", "ahead", "aid", "aids", "aim", "aimed", "aims", "air", "aircraft", "airfare", "airline", "airlines", "airplane", "airport", "airports", "aka", "ala", "alabama", "alan", "alarm", "alaska", "albania", "albany", "albert", "alberta", "album", "albums", "albuquerque", "alcohol", "alert", "alerts", "alex", "alexander", "alexandria", "alfred", "algebra", "algeria", "algorithm", "algorithms", "ali", "alias", "alice", "alien", "align", "alignment", "alike", "alive", "all", "allah", "allan", "alleged", "allen", "allergy", "alliance", "allied", "allocated", "allocation", "allow", "allowance", "allowed", "allowing", "allows", "alloy", "almost", "alone", "along", "alot", "alpha", "alphabetical", "alpine", "already", "also", "alt", "alter", "altered", "alternate", "alternative", "alternatively", "alternatives", "although", "alto", "aluminium", "aluminum", "alumni", "always", "amanda", "amateur", "amazing", "amazon", "ambassador", "amber", "ambien", "ambient", "amd", "amend", "amended", "amendment", "amendments", "amenities", "america", "american", "americans", "americas", "amino", "among", "amongst", "amount", "amounts", "amp", "ampland", "amplifier", "amsterdam", "amy", "ana", "anaheim", "analog", "analysis", "analyst", "analysts", "analytical", "analyze", "analyzed", "analyzes", "anatomy", "anchor", "ancient", "and", "andale", "anderson", "andorra", "andrea", "andreas", "andrew", "andrews", "andy", "angel", "angela", "angeles", "angels", "anger", "angle", "angola", "angry", "animal", "animals", "animated", "animation", "anime", "ann", "anna", "anne", "annex", "annie", "anniversary", "annotated", "annotation", "announce", "announced", "announcement", "announcements", "announces", "annoying", "annual", "annually", "anonymous", "another", "answer", "answered", "answering", "answers", "ant", "antarctica", "antenna", "anthony", "anthropology", "anti", "antibodies", "antibody", "anticipated", "antigua", "antique", "antiques", "antivirus", "antonio", "anxiety", "any", "anybody", "anymore", "anyone", "anything", "anytime", "anyway", "anywhere", "aol", "apache", "apart", "apartment", "apartments", "api", "apnic", "apollo", "app", "apparatus", "apparel", "apparent", "apparently", "appeal", "appeals", "appear", "appearance", "appeared", "appearing", "appears", "appendix", "apple", "appliance", "appliances", "applicable", "applicant", "applicants", "application", "applications", "applied", "applies", "apply", "applying", "appointed", "appointment", "appointments", "appraisal", "appreciate", "appreciated", "appreciation", "approach", "approaches", "appropriate", "appropriations", "approval", "approve", "approved", "approx", "approximate", "approximately", "apps", "apr", "april", "apt", "aqua", "aquarium", "aquatic", "arab", "arabia", "arabic", "arbitrary", "arbitration", "arbor", "arc", "arcade", "arch", "architect", "architects", "architectural", "architecture", "archive", "archived", "archives", "arctic", "are", "area", "areas", "arena", "arg", "argentina", "argue", "argued", "argument", "arguments", "arise", "arising", "arizona", "arkansas", "arlington", "arm", "armed", "armenia", "armor", "arms", "armstrong", "army", "arnold", "around
mlflow/mlflow
examples/pytorch/AxHyperOptimizationPTL/ax_hpo_iris.py
Python
apache-2.0
2,854
0.002803
import argparse import mlflow from ax.service.ax_client import AxClient from iris import IrisClassification from iris_data_module import IrisDataModule import pytorch_lightning as pl def train_evaluate(params, max_epochs=100): model = IrisClassification(**params) dm = IrisDataModule() dm.setup(stage="fit") trainer = pl.Trainer(max_epochs=max_epochs) mlflow.pytorch.autolog() trainer.fit(model, dm) trainer.test(datamodule=dm) test_accuracy = trainer.callback_metrics.get("test_acc") return test_accuracy def model_training_hyperparameter_tuning(max_epochs, total_trials, params): """ This function takes input params max_epochs, total_trials, params and creates a nested run in Mlflow. The parameters, metrics, model and summary are dumped into their respective mlflow-run ids. The best parameters are dumped along with the baseline model. :param max_epochs: Max epochs used for training the model. Type:int :param total_trials: Numb
er of ax-client experimental trials. Type:int :param params: Model parameters. Type:dict """ with mlflow.start_run(run_name="Parent Run"): train
_evaluate(params=params, max_epochs=max_epochs) ax_client = AxClient() ax_client.create_experiment( parameters=[ {"name": "lr", "type": "range", "bounds": [1e-3, 0.15], "log_scale": True}, {"name": "weight_decay", "type": "range", "bounds": [1e-4, 1e-3]}, {"name": "momentum", "type": "range", "bounds": [0.7, 1.0]}, ], objective_name="test_accuracy", ) for i in range(total_trials): with mlflow.start_run(nested=True, run_name="Trial " + str(i)) as child_run: parameters, trial_index = ax_client.get_next_trial() test_accuracy = train_evaluate(params=parameters, max_epochs=max_epochs) # completion of trial ax_client.complete_trial(trial_index=trial_index, raw_data=test_accuracy.item()) best_parameters, metrics = ax_client.get_best_parameters() for param_name, value in best_parameters.items(): mlflow.log_param("optimum_" + param_name, value) if __name__ == "__main__": parser = argparse.ArgumentParser() parser = pl.Trainer.add_argparse_args(parent_parser=parser) parser.add_argument( "--total_trials", default=3, help="umber of trials to be run for the optimization experiment", ) args = parser.parse_args() if "max_epochs" in args: max_epochs = args.max_epochs else: max_epochs = 100 params = {"lr": 0.1, "momentum": 0.9, "weight_decay": 0} model_training_hyperparameter_tuning( max_epochs=int(max_epochs), total_trials=int(args.total_trials), params=params )
alexforencich/python-ivi
ivi/anritsu/anritsuMN9610B.py
Python
mit
7,432
0.005113
""" Python Interchangeable Virtual Instrument Library Copyright (c) 2017 Alex Forencich Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from .. import ivi class anritsuMN9610B(ivi.Driver): "Anritsu MN9610B series optical attenuator driver" def __init__(self, *args, **kwargs): self.__dict__.setdefault('_instrument_id', '') super(anritsuMN9610B, self).__init__(*args, **kwargs) self._identity_description = "Anritsu MN9610B series optical attenuator driver" self._identity_identifier = "" self._identity_revision = "" self._identity_vendor = "" self._identity_instrument_manufacturer = "Anritsu" self._identity_instrument_model = "MN9610B" self._identity_instrument_firmware_revision = "" self._identity_specification_major_version = 0 self._identity_specification_minor_version = 0 self._identity_supported_instrument_models = ['MN9610B'] self._attenuation = 0.0 self._reference = 0.0 self._wavelength = 1300.0 self._disable = False self._add_property('attenuation', self._get_attenuation, self._set_attenuation, None, ivi.Doc(""" Specifies the attenuation of the optical path. The units are dB. """)) self._add_property('reference', self._get_reference, self._set_reference, None, ivi.Doc(""" Specifies the zero dB reference level for the attenuation setting. The units are dB. """)) self._add_property('wavelength', self._get_wavelength, self._set_wavelength, None, ivi.Doc(""" Specifies the wavelength of light used for accurate attenuation. The units are meters. """)) self._add_property('disable', self._get_disable, self._set_disable, None, ivi.Doc(""" Controls a shutter in the optical path. Shutter is closed when disable is set to True. """)) def _initialize(self, resource = None, id_query = False, reset = False, **keywargs): "Opens an I/O session to the instrument." super(anritsuMN9610B, self)._initialize(resource, id_query, reset, **keywargs) # interface clear if not self._driver_operation_simulate: self._clear() # check ID not supported (no ID command) # reset if reset: self.utility_reset() def _get_identity_instrument_manufacturer(self): return self._identity_instrument_manufacturer def _get_identity_instrument_model(self): return self._identity_instrument_model def _get_identity_instrument_firmware_revision(self): return self._identity_instrument_firmware_revision def _utility_disable(self): pass def _utility_error_query(self): error_code = 0 error_message = "No error" if not self._driver_operation_simulate: error_
code = int(self._ask("ERR?").split(' ')[1]) error_message = ["No error", "Command error", "Execution error", "Co
mmand and execution error"][error_code] return (error_code, error_message) def _utility_lock_object(self): pass def _utility_reset(self): pass def _utility_reset_with_defaults(self): self._utility_reset() def _utility_self_test(self): code = 0 message = "Self test passed" if not self._driver_operation_simulate: pass return (code, message) def _utility_unlock_object(self): pass def _get_attenuation(self): if not self._driver_operation_simulate and not self._get_cache_valid(): resp = self._ask("ATT?").split(' ')[1] self._attenuation = float(resp) self._set_cache_valid() return self._attenuation def _set_attenuation(self, value): value = round(float(value), 2) if value < -99.99 or value > 159.99: raise ivi.OutOfRangeException() if not self._driver_operation_simulate: self._write("ATT %.2f" % (value)) self._attenuation = value self._set_cache_valid() def _get_reference(self): if not self._driver_operation_simulate and not self._get_cache_valid(): resp = self._ask("OFS?").split(' ')[1] self._reference = float(resp) self._set_cache_valid() return self._reference def _set_reference(self, value): value = round(float(value), 2) if value < -99.99 or value > 99.99: raise ivi.OutOfRangeException() if not self._driver_operation_simulate: self._write("OFS %.2f" % (value)) self._reference = value self._set_cache_valid() self._set_cache_valid(False, 'attenuation') def _get_wavelength(self): if not self._driver_operation_simulate and not self._get_cache_valid(): resp = self._ask("WVL?").split(' ')[1] self._wavelength = float(resp) self._set_cache_valid() return self._wavelength def _set_wavelength(self, value): value = round(float(value), 9) if value < -1100e-9 or value > 1650e-9: raise ivi.OutOfRangeException() if not self._driver_operation_simulate: self._write("WVL %de-9" % (int(value*1e9))) self._wavelength = value self._set_cache_valid() def _get_disable(self): if not self._driver_operation_simulate and not self._get_cache_valid(): resp = self._ask("D?").split(' ')[1] self._disable = bool(int(resp)) self._set_cache_valid() return self._disable def _set_disable(self, value): value = bool(value) if not self._driver_operation_simulate: self._write("D %d" % (int(value))) self._disable = value self._set_cache_valid()
shawnadelic/shuup
shuup/admin/modules/shops/views/edit.py
Python
agpl-3.0
4,592
0.001089
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django import forms from django.conf import settings from django.db.transaction import atomic from django.utils.translation import ugettext_lazy as _ from shuup import configuration from shuup.admin.form_part import ( FormPart, FormPartsViewMixin, SaveFormPartsMixin, TemplatedFormDef ) from shuup.admin.forms.widgets import MediaChoiceWidget from shuup.admin.toolbar import get_default_edit_toolbar from shuup.admin.utils.views import ( check_and_raise_if_only_one_allowed, CreateOrUpdateView ) from shuup.core.models import MutableAddress, Shop from shuup.core.utils.form_mixins import ProtectedFieldsMixin from shuup.utils.i18n import get_current_babel_locale from shuup.utils.multilanguage_model_form import MultiLanguageModelForm class ShopBaseForm(ProtectedFieldsMixin, MultiLanguageModelForm): change_protect_field_text = _("This field cannot be changed since there are existing orders for this shop.") class Meta: model = Shop exclude = ("owner", "options", "contact_address") def __init__(self, **kwargs): initial_languages = [i[0] for i in kwargs.get("languages", [])] super(ShopBaseForm, self).__init__(**kwargs) self.fields["logo"].widget = MediaChoiceWidget(clearable=True) locale = get_current_babel_locale() self.fields["currency"] = forms.ChoiceField( choices=sorted(locale.currencies.items()), required=True, label=_("Currency") ) self.fields["languages"] = forms.MultipleChoiceField( choices=settings.LANGUAGES, initial=initial_languages, required=True, label=_("Languages") ) self.disable_protected_fields() def save(self): obj = super(ShopBaseForm, self).save() languages = set(self.cleaned_data.get("languages")) shop_languages = [(code, name) for code, name in settings.LANGUAGES if code in languages] configuration.set(obj, "la
nguages", shop_languages) return obj class ShopBaseFormPart(FormPart): priority = 1 def get_form_defs(self): yield Templated
FormDef( "base", ShopBaseForm, template_name="shuup/admin/shops/_edit_base_shop_form.jinja", required=True, kwargs={ "instance": self.object, "languages": configuration.get(self.object, "languages", settings.LANGUAGES) } ) def form_valid(self, form): self.object = form["base"].save() class ContactAddressForm(forms.ModelForm): class Meta: model = MutableAddress fields = ( "prefix", "name", "suffix", "name_ext", "phone", "email", "street", "street2", "street3", "postal_code", "city", "region_code", "region", "country" ) class ContactAddressFormPart(FormPart): priority = 2 def get_form_defs(self): initial = {} yield TemplatedFormDef( "address", ContactAddressForm, template_name="shuup/admin/shops/_edit_contact_address_form.jinja", required=False, kwargs={"instance": self.object.contact_address, "initial": initial} ) def form_valid(self, form): addr_form = form["address"] if addr_form.changed_data: addr = addr_form.save() setattr(self.object, "contact_address", addr) self.object.save() class ShopEditView(SaveFormPartsMixin, FormPartsViewMixin, CreateOrUpdateView): model = Shop template_name = "shuup/admin/shops/edit.jinja" context_object_name = "shop" base_form_part_classes = [ShopBaseFormPart, ContactAddressFormPart] form_part_class_provide_key = "admin_shop_form_part" def get_object(self, queryset=None): obj = super(ShopEditView, self).get_object(queryset) check_and_raise_if_only_one_allowed("SHUUP_ENABLE_MULTIPLE_SHOPS", obj) return obj def get_toolbar(self): save_form_id = self.get_save_form_id() return get_default_edit_toolbar(self, save_form_id, with_split_save=settings.SHUUP_ENABLE_MULTIPLE_SHOPS) @atomic def form_valid(self, form): return self.save_form_parts(form)
emilybache/DiamondKata
python/test_diamond_centrist_iterative.py
Python
mit
5,199
0.004039
""" These test cases can be used to test-drive a solution to the diamond kata, in an interative manner. The idea is that you iterate towards a full solution, each test cycle you are closer to a full solution than in the previous one. The thing with iterating is you may delete stuff that was there before, or add stuff you know you will need to delete later. When you have got a test to pass, you will 'recycle' it, ie hide/delete the previous one. This is counter-intuitive for many people! to run the tests, use 'py.test' - see http://pytest.org Instructions: 1. Make the first test for Diamond A (which is failing) pass 2. change the 'ignore_' to 'test_' in the next test case. Make it pass too. 3. Uncomment the next line of the test case. Make it pass 4. When you've got a new test case passing, you may find you need to COMMENT OUT A PREVIOUS TEST that now fails. This is expected. You are 'recycling' tests. 5. When all the 'DiamondX' test cases in this file are uncommented and passing, you should have a full working solution. """ import diamond def test_DiamondA(): assert diamond.Diamond('A').print_diamond() == "A" def ignore_Diamond_with_only_spaces(): assert diamond.Diamond('A').diamond() == [[" "]] # assert diamond.Diamond('B').diamond() == \ # [[" ", " ", " "], # [" ", " ", " "], # [" ", " ", " "]] # assert diamond.Diamond('C').diamond() == \ # [[" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "]] def ignore_Diamond_with_center_marked_with_a_Z(): assert diamond.Diamond('A').diamond() == [["Z"]] # assert diamond.Diamond('B').diamond() == \ # [[" ", " ", " "], # [" ", "Z", " "], # [" ", " ", " "]] # assert diamond.Diamond('C').diamond() == \ # [[" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", "Z", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "]] def ignore_Diamond_with_1_0_coordinates_marked_with_a_Z(): assert diamond.Diamond('A').diamond() == [[" "]] # assert diamond.Diamond('B').diamond() == \ # [[" ", " ", " "], # [" ", " ",
"Z"], # [" ", " ", " "]] def ignore_Diamond_with_0_1_coordinates_marked_with_a_Z(): assert diamond.Diamond('B').diamond() == \ [[" ", "Z", " "], [" ", " ", " "], [" ", " ", " "]] # assert diamond.Diamond('C').diamond() == \ # [[" ", " ", " ", " ", " "], # [" ", " ", "Z", " ", " "], # [" ", " ", " ", " ", " "], #
[" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "]] def ignore_Diamond_with_minus2_1_coordinates_marked_with_a_Z(): assert diamond.Diamond('C').diamond() == \ [[" ", " ", " ", " ", " "], ["Z", " ", " ", " ", " "], [" ", " ", " ", " ", " "], [" ", " ", " ", " ", " "], [" ", " ", " ", " ", " "]] def ignore_Diamond_plot_As(): assert diamond.Diamond('B').diamond() == \ [[" ", "A", " "], [" ", " ", " "], [" ", "A", " "]] # assert diamond.Diamond('C').diamond() == \ # [[" ", " ", "A", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", " ", " ", " "], # [" ", " ", "A", " ", " "]] def ignore_Diamond_plot_As_and_middle_letter(): assert diamond.Diamond('B') == \ [[" ", "A", " "], ["B", " ", "B"], [" ", "A", " "]] # assert diamond.Diamond('C') == \ # [[" ", " ", "A", " ", " "], # [" ", " ", " ", " ", " "], # ["C", " ", " ", " ", "C"], # [" ", " ", " ", " ", " "], # [" ", " ", "A", " ", " "]] # assert diamond.Diamond('D').diamond() == \ # [[" ", " ", " ", "A", " ", " ", " "], # [" ", " ", " ", " ", " ", " ", " "], # [" ", " ", " ", " ", " ", " ", " "], # ["D", " ", " ", " ", " ", " ", "D"], # [" ", " ", " ", " ", " ", " ", " "], # [" ", " ", " ", " ", " ", " ", " "], # [" ", " ", " ", "A", " ", " ", " "]] def ignore_DiamondB(): assert diamond.Diamond('B').print_diamond() == " A\nB B\n A" def ignore_Diamond_plot_other_letter(): assert diamond.Diamond('C').diamond() == \ [[" ", " ", "A", " ", " "], [" ", "B", " ", "B", " "], ["C", " ", " ", " ", "C"], [" ", "B", " ", "B", " "], [" ", " ", "A", " ", " "]] # assert diamond.Diamond('D').diamond() == \ # [[" ", " ", " ", "A", " ", " ", " "], # [" ", " ", "B", " ", "B", " ", " "], # [" ", "C", " ", " ", " ", "C", " "], # ["D", " ", " ", " ", " ", " ", "D"], # [" ", "C", " ", " ", " ", "C", " "], # [" ", " ", "B", " ", "B", " ", " "], # [" ", " ", " ", "A", " ", " ", " "]] def ignore_DiamondC(): assert diamond.Diamond('C').print_diamond() == """\ A B B C C B B A""" def ignore_DiamondD(): assert diamond.Diamond('D').print_diamond() == """\ A B B C C D D C C B B A"""
coskundeniz/bitirme-projesi
config.py
Python
gpl-2.0
370
0
# -*- coding: utf-8 -*- import os DEBUG = True SECRET_KEY = '\x0f v\xa5!\xb8*\x14\xfeY[\xaf\x83\xd4}vv*\xfb\x85' abs_pat
h = os.path.abspath('app.db') SQLALCHEMY_DATABASE_URI = 'sqlite:///' + abs_path # config for forms CSRF_ENABLED = True CSRF_SESSION_KEY = '\x0f v\xa5!\xb8*\x14\xfeY[\xaf\x83\xd4}vv*\xfb\x85' UPLOAD_FOLDER = os.path.j
oin(os.getcwd(), "uploads/")
ge0rgi/cinder
cinder/scheduler/filter_scheduler.py
Python
apache-2.0
29,487
0
# Copyright (c) 2011 Intel Corporation # Copyright (c) 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The FilterScheduler is for creating volumes. You can customize this scheduler by specifying your own volume Filters and Weighing Functions. """ from oslo_config import cfg from oslo_log import log as logging from oslo_serialization import jsonutils from cinder import exception from cinder.i18n import _, _LE, _LW from cinder.scheduler import driver from cinder.scheduler import scheduler_options from cinder.volume import utils CONF = cfg.CONF LOG = logging.getLogger(__name__) class FilterScheduler(driver.Scheduler): """Scheduler that can be used for filtering and weighing.""" def __init__(self, *args, **kwargs): super(FilterScheduler, self).__init__(*args, **kwargs) self.cost_function_cache = None self.options = scheduler_options.SchedulerOptions() self.max_attempts = self._max_attempts() def schedule(self, context, topic, method, *args, **kwargs): """Schedule contract that returns best-suited host for this request.""" self._schedule(context, topic, *args, **kwargs) def _get_configuration_options(self): """Fetch options dictionary. Broken out for testing.""" return self.options.get_configuration() def populate_filter_properties(self, request_spec, filter_properties): """Stuff things into filter_properties. Can be overridden in a subclass to add more data. """ vol = request_spec['volume_properties'] filter_properties['size'] = vol['size'] filter_properties['availability_zone'] = vol.get('availability_zone') filter_properties['user_id'] = vol.get('user_id') filter_properties['metadata'] = vol.get('metadata') filter_properties['qos_specs'] = vol.get('qos_specs') def schedule_create_consistencygroup(self, context, group, request_spec_list, filter_properties_list): weighed_backend = self._schedule_group( context, request_spec_list, filter_properties_list) if not weighed_backend: raise exception.NoValidBackend(reason=_("No weighed backends " "available")) backend = weighed_backend.obj updated_group = driver.group_update_db(context, group, backend.host, backend.cluster_name) self.volume_rpcapi.create_consistencygroup(context, updated_group) def schedule_create_group(self, context, group, group_spec, request_spec_list, group_filter_properties, filter_properties_list): weighed_backend = self._schedule_generic_group( context, group_spec, request_spec_list, group_filter_properties, filter_properties_list) if not weighed_backend: raise exception.NoValidBackend(reason=_("No weighed backends " "available")) backend = weighed_backend.obj updated_group = driver.generic_group_update_db(context, group, backend.host, backend.cluster_name) self.volume_rpcapi.create_group(context, updated_group) def schedule_create_volume(self, context, request_spec, filter_properties): backend = self._schedule(context, request_spec, filter_properties) if not backend: raise exception.NoValidBackend(reason=_("No weighed backends " "available")) backend = backend.obj volume_id = request_spec['volume_id'] updated_volume = driver.volume_update_db(context, volume_id, backend.host, backend.cluster_name) self._post_select_populate_filter_properties(filter_properties, backend) # context is not serializable filter_properties.pop('context', None) self.volume_rpcapi.create_volume(context, updated_volume, request_spec, filter_properties, allow_reschedule=True) def backend_passes_filters(self, context, backend, request_spec, filter_properties): """Check if the specified backend passes the filters.""" weighed_backends = self._get_weighted_candidates(context, request_spec, filter_properties) # If backend has no pool defined we will ignore it in the comparison ignore_pool = not bool(utils.extract_host(backend, 'pool')) for weighed_backend in weighed_backends: backend_id = weighed_backend.obj.backend_id if ignore_pool: backend_id = utils.extract_host(backend_id) if backend_id == backend: return weighed_backend.obj volume_id = request_spec.get('volume_id', '??volume_id missing??') raise exception.NoValidBackend(reason=_('Cannot place volume %(id)s ' 'on %(backend)s') % {'id': volume_id, 'backend': backend}) def find_retype_backend(self, context, request_spec, filter_properties=None, migration_policy='never'): """Find a backend that can accept the volume with its new type.""" filter_properties = filter_properties or {} backend = (request_spec['volume_properties'].get('cluster_name') or request_spec['volume_properties']['host']) # The volume already exists on this backend, and so we shouldn't check # if it can accept the volume again in the CapacityFilter. filter_properties['vo
l_exists_on'] = backend weighed_backends = self._get_weighted_candidates(context, request_spec, filter_properties) if n
ot weighed_backends: raise exception.NoValidBackend( reason=_('No valid backends for volume %(id)s with type ' '%(type)s') % {'id': request_spec['volume_id'], 'type': request_spec['volume_type']}) for weighed_backend in weighed_backends: backend_state = weighed_backend.obj if backend_state.backend_id == backend: return backend_state if utils.extract_host(backend, 'pool') is None: # legacy volumes created before pool is introduced has no pool # info in host. But host_state.host always include pool level # info. In this case if above exact match didn't work out, we # find host_state that are of the same host of volume being # retyped. In other words, for legacy volumes, retyping could # cause migration between pools on same host, which we consider # it is different from migration between hosts thus allow that # to happen even migration policy is 'never'. for we
gem/sidd
ui/helper/ms_attr_delegate.py
Python
agpl-3.0
3,434
0.00728
# Copyright (c) 2011-2013, ImageCat Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be
useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. #
# You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ dialog for editing mapping scheme branches """ from PyQt4.QtCore import Qt, QVariant from PyQt4.QtGui import QItemDelegate, QComboBox, QMessageBox from ui.constants import get_ui_string class MSAttributeItemDelegate(QItemDelegate): def __init__(self, parent, valid_codes, min_editables, allow_repeats=False): super(MSAttributeItemDelegate, self).__init__(parent) self.valid_codes = valid_codes self.valid_code_names = [] for description in valid_codes.keys(): self.valid_code_names.append(description) self.valid_code_names.sort() self.min_editables = min_editables self.allow_repeats = allow_repeats # returns the widget used to change data from the model and can be re-implemented to customize editing behavior. def createEditor(self, parent, option, index): if index.row() >= self.min_editables: editor = QComboBox(parent) return editor else: return None # provides the widget with data to manipulate def setEditorData(self, editor, index): current_val = str(index.data(Qt.DisplayRole).toString()) editor.clear() for idx, name in enumerate(self.valid_code_names): editor.addItem(name) # set current value as selected from the drop-down if self.valid_codes[name] == current_val: editor.setCurrentIndex(idx) # ensures that the editor is displayed correctly with respect to the item view. def updateEditorGeometry(self, editor, option, index): editor.setGeometry(option.rect); pass # returns updated data to the model. def setModelData(self, editor, model, index): existing_values = index.model().values code = self.valid_codes[str(editor.currentText())] if self.allow_repeats: model.setData(index, QVariant(code), Qt.EditRole) else: try: existing_values.index(code) # check to see if it is the same one if index.data().toString() != code: # not the same one, show warning QMessageBox.warning(None, get_ui_string("app.warning.title"), get_ui_string("dlg.msbranch.error.attribute.exists", (code))) except: # code not in existing values list model.setData(index, QVariant(code), Qt.EditRole) def getCurrentModelValue(self, model, index): return model.data(index, Qt.DisplayRole)
upsight/doctor
doctor/types.py
Python
mit
33,198
0.000151
""" Copyright © 2017, Encode OSS Ltd. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. This file is a modified version of the typingsystem.py module in apistar. https://github.com/encode/apistar/blob/973c6485d8297c1bcef35a42221ac5107dce25d5/apistar/typesystem.py """ import math import re import typing from datetime import datetime from typing import Any import isodate import rfc3987 from doctor.errors import SchemaError, SchemaValidationError, TypeSystemError from doctor.parsers import parse_value StrOrList = typing.Union[str, typing.List[str]] class classproperty(object): """A decorator that allows a class to contain a class property. This is a function that can be executed on a non-instance but accessed via a property. >>> class Foo(object): ... a = 1 ... @classproperty ... def b(cls): ... return cls.a + 1 ... >>> Foo.b 2 """ def __init__(self, fget): self.fget = fget def __get__(self, owner_self, owner_cls): return self.fget(owner_cls) class MissingDescriptionError(ValueError): """An exception raised when a type is missing a description.""" pass class SuperType(object): """A super type all custom types must extend from. This super type requires all subclasses define a description attribute that describes what the type represents. A `ValueError` will be raised if the subclass does not define a `description` attribute. """ #: The description of what the type represents. description = None # type: str #: An example value for the type. example: Any = None #: Indicates if the value of this type is allowed to be None. nullable = False # type: bool #: An optional name of where to find the request parameter if it does not #: match the variable name in your logic function. param_name = None # type: str #: An optional callable to parse a request paramter before it gets validated #: by a type. It should accept a single value paramter and return the #: parsed value. parser = None # type: typing.Callable def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self.description is None: cls = self.__class__ raise MissingDescriptionError( '{} did not define a description attribute'.format(cls)) @classmethod def validate(cls, value: typing.Any): """Additional validation for a type. All types will have a validate method where custom validation logic can be placed. The implementor should return nothing if the value is valid, otherwise a `TypeSystemError` should be raised. :param value: The value to be validated. """ pass class UnionType(SuperType): """A type that can be one of any of the defined `types`. The first type that does not raise a :class:`~doctor.errors.TypeSystemError` will be used as the type for the variable. """ #: A list of allowed types. types = [] _native_type = None def __new__(cls, *args, **kwargs): if not cls.types: raise TypeSystemError( 'Sub-class must define a `types` list attribute containing at ' 'least 1 type.', cls=cls) valid = False value = None errors = {} for obj_class in cls.types: try: value = obj_class(*args, **kwargs) valid = True # Dynamically change the native_type based on that of the value. cls._native_type = obj_class.native_type break except TypeSystemError as e: errors[obj_class.__name__] = str(e) continue if not valid: klasses = [klass.__name__ for klass in cls.types] raise TypeSystemError('Value is not one of {}. {}'.format( klasses, errors)) cls.validate(value) return value @classmethod def get_example(cls): """Returns an example value for the UnionType.""" return cls.types[0].get_example() @classproperty def native_type(cls): """Returns the native type. Since
UnionType can have multiple types, simply return the native type of the first type defined in the types attribute. If _native_type is set based on initializing a value with the class, then we return the dynamically modified
type that matches that of the value used during instantiation. e.g. >>> from doctor.types import UnionType, string, boolean >>> class BoolOrStr(UnionType): ... description = 'bool or str' ... types = [boolean('a bool'), string('a string')] ... >>> BoolOrStr.native_type <class 'bool'> >>> BoolOrStr('str') 'str' >>> BoolOrStr.native_type <class 'str'> >>> BoolOrStr(False) False >>> BoolOrStr.native_type <class 'bool'> """ if cls._native_type is not None: return cls._native_type return cls.types[0].native_type class String(SuperType, str): """Represents a `str` type.""" native_type = str errors = { 'blank': 'Must not be blank.', 'max_length': 'Must have no more than {max_length} characters.', 'min_length': 'Must have at least {min_length} characters.', 'pattern': 'Must match the pattern /{pattern}/.', } #: Will check format of the string for `date`, `date-time`, `email`, #: `time` and `uri`. format = None #: The maximum length of the string. max_length = None # type: int #: The minimum length of the string. min_length = None # type: int #: A regex pattern that the string should match. pattern = None # type: str #: Whether to trim whitespace on a string. Defaults to `True`. trim_whitespace = True def __new__(cls, *args, **kwargs): if cls.nullable and args[0] is None: return None value = super().__new__(cls, *args, **kwargs) if cls.trim_whitespace: value = value.strip() if cls.min_length is not None: if len(value) < cls.min_length: if cls.min_length == 1: raise TypeSystemError(cls=cls, code='blank') else: raise TypeSystemError(cls=cls, code='min_length') if cls.max_length is not None: if len(value) > cls.max_length: raise TypeSystemError(cls=cls, code='max_length') if cls.pattern is not None: if not re.search(cls.pattern, value): raise TypeSystemError(cls=cls,
dataversioncontrol/dvc
setup.py
Python
apache-2.0
1,895
0
from setuptools import setup, find_packages from dvc import VERSION install_requires = [ "ply>=3.9", # See https://github.com/pyinstaller/pyinstaller/issues/1945 "configparser>=3.5.0", "zc.lockfile>=1.2.1", "future>=0.16.0", "colorama>=0.3.9", "configobj>=5.0.6", "networkx>=2.1", "pyyaml>=3.12", "gitpython>=2.1.8", "setuptools>=34.0.0", "nanotime>=0.5.2", "pyasn1>=0.4.1", "schema>=0.6.7", "jsonpath-rw==1.4.0", "requests>=2.18.4", "grandalf==0.6", "asciimatics>=1.10.0", "distro>=1.3.0", "appdirs>=1.4.3", "treelib>=1.5.5", ] # Extra dependencies for remote integrations gs = ["google-cloud-storage==1.13.0"] s3 = ["boto3==1.9.115"] azure = ["azure-storage-blob==1.3.0"] ssh = ["paramiko>=2.4.1"] all_
remotes = gs + s3 + azure + ssh setup( name="dvc", version=VERSION, description="Git for data scientists - manage your code and data together", long_description=open("README.rst", "r").read(), author="Dmitry Petrov", author_email="dmitry@dataversioncontrol.com", download_url="https://github.com/iterative/dvc", license="Apache License 2.0", install_requires=install_requires, extras_require={ "all": all_remotes, "gs
": gs, "s3": s3, "azure": azure, "ssh": ssh, # NOTE: https://github.com/inveniosoftware/troubleshooting/issues/1 ':python_version=="2.7"': ["futures"], }, keywords="data science, data version control, machine learning", classifiers=[ "Development Status :: 4 - Beta", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], packages=find_packages(exclude=["tests"]), include_package_data=True, url="http://dataversioncontrol.com", entry_points={"console_scripts": ["dvc = dvc.main:main"]}, zip_safe=False, )
cynja/coffeenator
webinterface/urls.py
Python
gpl-3.0
551
0.00363
from django.conf.urls import patterns, include, url urlpatt
erns =
patterns('', url(r'^$', 'webinterface.view.dashboard.main'), url(r'^dashboard/$', 'webinterface.view.dashboard.main'), url(r'^login/$', 'webinterface.view.login.main'), url(r'^login/ajax/$', 'webinterface.view.login.ajax'), url(r'^settings/$', 'webinterface.view.settings.main'), url(r'^settings/ajax/$', 'webinterface.view.settings.ajax'), url(r'^orders/$', 'webinterface.view.orders.main'), url(r'^orders/ajax/$', 'webinterface.view.orders.ajax'), )
OCA/knowledge
document_page_portal/models/document_page.py
Python
agpl-3.0
381
0
# Copyright 2020 - TODAY, Marcel S
avegnago - Escodoo # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo import fields, models class DocumentPage(models.Model): _inherit = "document.page" is_public = fields.Boolean( "Public Page", help="If true
it allows any user of the portal to have " "access to this document.", )
kehao95/Wechat_LearnHelper
src/env/lib/python3.5/site-packages/aiohttp/hdrs.py
Python
gpl-3.0
3,148
0
"""HTTP Headers constants.""" from .multidict import upstr METH_ANY = upstr('*') METH_CONNECT = upstr('CONNECT') METH_HEAD = upstr('HEAD') METH_GET = upstr('GET') METH_DELETE = upstr('DELETE') METH_OPTIONS = upstr('OPTIONS') METH_PATCH = upstr('PATCH') METH_POST = upstr('POST') METH_PUT = upstr('PUT') METH_TRACE = upstr('TRACE') ACCEPT = upstr('ACCEPT') ACCEPT_CHARSET = upstr('ACCEPT-CHARSET') ACCEPT_ENCODING = upstr('ACCEPT-ENCODING') ACCEPT_LANGUAGE = upstr('ACCEPT-LANGUAGE') ACCEPT_RANGES = upstr('ACCEPT-RANGES') ACCESS_CONTROL_MAX_AGE = upstr('ACCESS-CONTROL-MAX-AGE') ACCESS_CONTROL_ALLOW_CREDENTIALS = upstr('ACCESS-CONTROL-ALLOW-CREDENTIALS') ACCESS_CONTROL_ALLOW_HEADERS = upstr('ACCESS-CONTROL-ALLOW-HEADERS') ACCESS_CONTROL_ALLOW_METHODS = upstr('ACCESS-CONTROL-ALLOW-METHODS') ACCESS_CONTROL_ALLOW_ORIGIN = upstr('ACCESS-CONTROL-ALLOW-ORIGIN') ACCESS_CONTROL_EXPOSE_HEADERS = upstr('ACCESS-CONTROL-EXPOSE-HEADERS') ACCESS_CONTROL_REQUEST_HEADERS = upstr('ACCESS-CONTROL-REQUEST-HEADERS') ACCESS_CONTROL_REQUEST_METHOD = upstr('ACCESS-CONTROL-REQUEST-METHOD') AGE = upstr('AGE') ALLOW = upstr('ALLOW')
AUTHORIZATION = upstr('AUTHORIZATION') CACHE_CONTROL = upstr('CACHE-CONTROL') CONNECTION = upstr('CONNECTION') CONTENT_DISPOSITION = upstr('CONTENT-DISPOSITION') CONTENT_ENCODING = upstr('CONTENT-ENCODING')
CONTENT_LANGUAGE = upstr('CONTENT-LANGUAGE') CONTENT_LENGTH = upstr('CONTENT-LENGTH') CONTENT_LOCATION = upstr('CONTENT-LOCATION') CONTENT_MD5 = upstr('CONTENT-MD5') CONTENT_RANGE = upstr('CONTENT-RANGE') CONTENT_TRANSFER_ENCODING = upstr('CONTENT-TRANSFER-ENCODING') CONTENT_TYPE = upstr('CONTENT-TYPE') COOKIE = upstr('COOKIE') DATE = upstr('DATE') DESTINATION = upstr('DESTINATION') DIGEST = upstr('DIGEST') ETAG = upstr('ETAG') EXPECT = upstr('EXPECT') EXPIRES = upstr('EXPIRES') FROM = upstr('FROM') HOST = upstr('HOST') IF_MATCH = upstr('IF-MATCH') IF_MODIFIED_SINCE = upstr('IF-MODIFIED-SINCE') IF_NONE_MATCH = upstr('IF-NONE-MATCH') IF_RANGE = upstr('IF-RANGE') IF_UNMODIFIED_SINCE = upstr('IF-UNMODIFIED-SINCE') KEEP_ALIVE = upstr('KEEP-ALIVE') LAST_EVENT_ID = upstr('LAST-EVENT-ID') LAST_MODIFIED = upstr('LAST-MODIFIED') LINK = upstr('LINK') LOCATION = upstr('LOCATION') MAX_FORWARDS = upstr('MAX-FORWARDS') ORIGIN = upstr('ORIGIN') PRAGMA = upstr('PRAGMA') PROXY_AUTHENTICATE = upstr('PROXY_AUTHENTICATE') PROXY_AUTHORIZATION = upstr('PROXY-AUTHORIZATION') RANGE = upstr('RANGE') REFERER = upstr('REFERER') RETRY_AFTER = upstr('RETRY-AFTER') SEC_WEBSOCKET_ACCEPT = upstr('SEC-WEBSOCKET-ACCEPT') SEC_WEBSOCKET_VERSION = upstr('SEC-WEBSOCKET-VERSION') SEC_WEBSOCKET_PROTOCOL = upstr('SEC-WEBSOCKET-PROTOCOL') SEC_WEBSOCKET_KEY = upstr('SEC-WEBSOCKET-KEY') SEC_WEBSOCKET_KEY1 = upstr('SEC-WEBSOCKET-KEY1') SERVER = upstr('SERVER') SET_COOKIE = upstr('SET-COOKIE') TE = upstr('TE') TRAILER = upstr('TRAILER') TRANSFER_ENCODING = upstr('TRANSFER-ENCODING') UPGRADE = upstr('UPGRADE') WEBSOCKET = upstr('WEBSOCKET') URI = upstr('URI') USER_AGENT = upstr('USER-AGENT') VARY = upstr('VARY') VIA = upstr('VIA') WANT_DIGEST = upstr('WANT-DIGEST') WARNING = upstr('WARNING') WWW_AUTHENTICATE = upstr('WWW-AUTHENTICATE')
persandstrom/home-assistant
tests/components/config/test_customize.py
Python
apache-2.0
3,519
0
"""Test Customize config panel.""" import asyncio import json from unittest.mock import patch from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.config import DATA_CUSTOMIZE @asyncio.coroutine def test_get_entity(hass, aiohttp_client): """Test getting entity.""" with patch.object(config, 'SECTIONS', ['customize']): yield from async_setup_component(hass, 'config', {}) client = yield from aiohttp_client(hass.http.app) def mock_read(path): """Mock reading data.""" return { 'hello.beer': { 'free': 'beer', }, 'other.entity': { 'do': 'something', }, } hass.data[DATA_CUSTOMIZE] = {'hello.beer': {'cold': 'beer'}} with patch('homeassistant.components.config._read', mock_read): resp = yield from client.get( '/api/config/customize/config/hello.beer') assert resp.status == 200 result = yield from resp.json() assert result == {'local': {'free': 'beer'}, 'global': {'cold': 'beer'}} @asyncio.coroutine def test_update_entity(hass, aiohttp_client): """Test updating entity.""" with patch.object(config, 'SECTIONS', ['customize']): yield from async_setup_component(hass, 'config', {}) client = yield from aiohttp_client(hass.http.app) orig_data = { 'hello.beer': { 'ignored': True, }, 'other.entity': { 'polling_intensity': 2, }, } def mock_read(path): """Mock reading data.""" return orig_data written = [] def mock_write(path, data): """Mock writing data.""" written.append(data) hass.states.async_set('hello.world', 'state', {'a': 'b'}) with patch('homeassistant.components.config._read', mock_read), \ patch('homeassistant.components.config._write', mock_write): resp = yield from client.post( '/api/config/customize/config/hello.world', data=json.dumps({ 'name': 'Beer', 'entities': ['light.top', 'light.bottom'], }
)) assert resp.status == 200 result = yield from resp.json() assert result == {'result': 'ok'} state = hass.states.get('hello.world') assert state.state == 'state' assert dict(state.attributes) == { 'a': 'b', 'name': 'Beer', 'entities': ['light.top', 'light.bottom']} orig_data['hello.world']['name'] = 'Beer' orig_data['hello.world']['entities'] = ['light.top', 'light.bottom'] assert written[0] == orig
_data @asyncio.coroutine def test_update_entity_invalid_key(hass, aiohttp_client): """Test updating entity.""" with patch.object(config, 'SECTIONS', ['customize']): yield from async_setup_component(hass, 'config', {}) client = yield from aiohttp_client(hass.http.app) resp = yield from client.post( '/api/config/customize/config/not_entity', data=json.dumps({ 'name': 'YO', })) assert resp.status == 400 @asyncio.coroutine def test_update_entity_invalid_json(hass, aiohttp_client): """Test updating entity.""" with patch.object(config, 'SECTIONS', ['customize']): yield from async_setup_component(hass, 'config', {}) client = yield from aiohttp_client(hass.http.app) resp = yield from client.post( '/api/config/customize/config/hello.beer', data='not json') assert resp.status == 400
sghai/robottelo
robottelo/ui/locators/common.py
Python
gpl-3.0
9,297
0
# -*- encoding: utf-8 -*- """Implements different locators for UI""" from selenium.webdriver.common.by import By from .model import LocatorDict common_locators = LocatorDict({ # common locators "body": (By.CSS_SELECTOR, "body"), # Notifications "notif.error": ( By.XPATH, "//div[contains(@class, 'jnotify-notification-error')]"), "notif.warning": ( By.XPATH, "//div[contains(@class, 'jnotify-notification-warning')]"), "notif.success": ( By.XPATH, "//div[contains(@class, 'jnotify-notification-success')]"), "notif.close": ( By.XPATH, "//a[@class='jnotify-close']"), "alert.success": ( By.XPATH, "//div[contains(@class, 'alert-success')]"), "alert.error": ( By.XPATH, "//div[contains(@class, 'alert-danger')]"), "alert.success_sub_form": ( By.XPATH, "//div[contains(@bst-alert, 'success')]"), "alert.error_sub_form": ( By.XPATH, "//div[contains(@bst-alert, 'danger')]"), "alert.close": (By.XPATH, "//button[@class='close ng-scope']"), "selected_entity": ( By.XPATH, ("//div[@class='ms-selection']/ul[@class='ms-list']" "/li[@class='ms-elem-selection ms-selected']")), "select_filtered_entity": ( By.XPATH, "//table//a/span[contains(@data-original-title, '%s')]"), "checked_entity": ( By.XPATH, "//input[@checked='checked']/parent::label"), "entity_select": ( By.XPATH, ("//div[@class='ms-selectable']//" "li[not(contains(@style, 'display: none'))]/span[contains(.,'%s')]")), "entity_deselect": ( By.XPATH, ("//div[@class='ms-selection']//" "li[not(contains(@style, 'display: none'))]/span[contains(.,'%s')]")), "entity_checkbox": ( By.XPATH, "//label[normalize-space(.)='%s']/input[@type='checkbox']"), "entity_select_list": ( By.XPATH, "//ul/li/div[normalize-space(.)='%s']"), "entity_select_list_vmware": ( By.XPATH, "//ul/li/div[contains(normalize-space(.),'%s')]"), "select_list_search_box": ( By.XPATH, "//div[@id='select2-drop']//input"), "name_haserror": ( By.XPATH, ("//label[@for='name']/../../" "div[contains(@class,'has-error')]")), "haserror": ( By.XPATH, "//div[contains(@class,'has-error')]"), "common_haserror": ( By.XPATH, ("//span[@class='help-block']/ul/" "li[contains(@ng-repeat,'error.messages')]")), "table_haserror": ( By.XPATH, "//tr[contains(@class,'has-error')]/td/span"), "common_invalid": ( By.XPATH, "//input[@id='name' and contains(@class,'ng-invalid')]"), "common_param_error": ( By.XPATH, ("//div[@id='parameters']/span[@class='help-block'" "and string-length(text()) > 10]")), "search": (By.ID, "search"), "clear_search": (By.XPATH, "//a[@class='autocomplete-clear']"), "search_no_results": (By.XPATH, "//div[text()='No entries found']"), "auto_search": ( By.XPATH, ("//ul[contains(@class, 'ui-autocomplete') or " "contains(@template-url, 'autocomplete')]/li/a[contains(., '%s')]")), "search_button": (By.XPATH, "//button[contains(@type,'submit')]"), "search_dropdown": ( By.XPATH, ("//button[contains(@class, 'dropdown-toggle')]" "[@data-toggle='dropdown']")), "cancel_form": (By.XPATH, "//a[text()='Cancel']"), "submit": (By.NAME, "commit"), "select_action_dropdown": ( By.XPATH, "//td[descendant::*[normalize-space(.)='%s']]/" "following-sibling::td/div/a[@data-toggle='dropdown']"), "delete_button": ( By.XPATH, "//a[contains(@data-confirm, '%s') and @data-method='delete']"), "copy_name_input": (By.XPATH, "//input[@ng-model='copyName']"), "copy_create_button": (By.XPATH, "//button[@ng-click='copy(copyName)']"), "filter": (By.XPATH, ("//div[@id='ms-%s_ids']" "//input[@class='ms-filter']")), "parameter_tab": (By.XPATH, "//a[contains(., 'Parameters')]"), "add_parameter": ( By.XPATH, "//a[contains(text(),'+ Add Parameter')]"), "new_parameter_name": ( By.XPATH, "//input[@placeholder='Name' and not(@value)]"), "parameter_value": ( By.XPATH, ("//table[contains(@id, 'parameters')]//tr" "/td[input[contains(@id, 'name')][contains(@value, '%s')]]" "/following-sibling::td//textarea")), "new_parameter_value": ( By.XPATH, "//textarea[@placeholder='Value' and not(text())]"), "parameter_remove": ( By.XPATH, "//tr/td/input[@value='%s']/following::td/a"), "table_column_title": (By.XPATH, "//th[contains(., '%s')]/*"), "table_cell_link": ( By.XPATH, "//table[contains(@class, 'table')]" "//td[contains(normalize-space(.), '%s')]" "/parent::tr" "/td[count(//thead//tr/th[.='%s']/preceding-sibling::*)+1]/a" ), "table_cell_value": ( By.XPATH, "//table[contains(@class, 'table')]" "//td[contains(normalize-space(.), '%s')]" "/parent::tr" "/td[count(//thead//tr/th[.='%s']/preceding-sibling::*)+1]" ), "table_column_values": ( By.XPATH, "//table//td/parent::tr/td[count(//thead//tr/th[contains(., '%s')]" "/preceding-sibling::*)+1]" ), "table_select_all_checkbox": ( By.XPATH, "//table[contains(@class, 'table')]" "//input[@type='checkbox'and @ng-model='selection.allSelected']" ), "application_logo": ( By.XPATH, "//img[contains(@alt, 'Header logo')]"), "permission_denied": ( By.XPATH, "//h1[contains(.,'Permission denied')]" ), # Katello Common Locators "confirm_remove": ( By.XPATH, "//button[@ng-click='ok()' or @ng-click='delete()']"), "create": (By.XPATH, "//button[contains(@ng-click,'Save')]"), "save": ( By.XPATH, ("//button[contains(@ng-click,'save')" "and not(contains(@class,'ng-hide'))]")), "close": (By.XPATH, "//button[@ar
ia-label='Close']"), "cancel": ( By.XPA
TH, "//button[contains(@ng-click,'cancel') and " "not(contains(@class,'ng-hide'))][contains(., 'Cancel')]" ), "name": (By.ID, "name"), "label": (By.ID, "label"), "description": (By.ID, "description"), "kt_select_action_dropdown": ( By.XPATH, ("//button[contains(@ng-click, 'toggleDropdown')]" "[descendant::span[text()='Select Action']]")), "select_action": ( By.XPATH, "//li/a/span[@class='ng-scope' and contains(., '%s')]"), "kt_search": (By.XPATH, "//input[@ng-model='table.searchTerm']"), "kt_clear_search": ( By.XPATH, "//button[contains(@ng-click, 'searchCompleted = false')]"), "kt_search_no_results": ( By.XPATH, "//table//span[@data-block='no-search-results-message']"), "kt_search_button": ( By.XPATH, "//button[@ng-click='table.search(table.searchTerm)']"), "kt_table_search": ( By.XPATH, "//input[@ng-model='detailsTable.searchTerm']"), "kt_table_search_button": ( By.XPATH, "//button[@ng-click='detailsTable.search(detailsTable.searchTerm)']"), "kt_table_cell_value": ( By.XPATH, "//table[@bst-table='table']//td[contains(normalize-space(.), '%s')]" "/parent::tr/td[count(//thead//tr/th[.='%s']/preceding-sibling::*)+1]" ), # Katello common Product and Repo locators "gpg_key": (By.ID, "gpg_key_id"), "all_values": (By.XPATH, ("//div[contains(@class,'active')]//input[@type='checkbox'" " and contains(@name, '%s')]")), "all_values_selection": ( By.XPATH, ("//div[@class='ms-selection']//ul[@class='ms-list']/li" "/span[contains(.,'%s')]/..")), "usage_limit": ( By.XPATH, "//input[contains(@ng-model, 'max')" "and contains(@ng-model, 'hosts')]"), "usage_limit_checkbox": ( By.XPATH, "//input[contains(@ng-model, 'unlimited')" "and contains(@ng-model
blue-bird1/xss_fuzz
data/attribute.py
Python
apache-2.0
636
0.001572
#! /usr/bin/env python # coding:utf-8 """html tag attribute""" from lib.data import BaseXssData class Attribute(BaseXssData): """html tag attribute data""" def __init__(
self): _data = [ 'accesskey', 'class', 'contenteditable', 'contextmenu', 'data-*', 'dir', 'draggable', 'dropzone', 'hidden', 'id', 'lang', 'spellcheck', 'style', 'tabindex', 'title', 'translate', ]
super(Attribute, self).__init__(_data)
aepereyra/smslock
creatabla.py
Python
apache-2.0
264
0.003788
#!/usr/bin/python import sqlite3 conn = sqlite3.connect('accesslist.db') conn.execute('''CREATE TA
BLE USUARIO (CELLPHONE CHAR(11) PRIMARY KEY NOT NULL, PASSWD CHAR(138) NOT NULL);''') pr
int "Table created successfully"; conn.close()
tfroehlich82/erpnext
erpnext/regional/india/setup.py
Python
gpl-3.0
7,127
0.027641
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe, os, json from frappe.custom.doctype.custom_field.custom_field import create_custom_fields from frappe.permissions import add_permission from erpnext.regional.india import states def setup(company=None, patch=True): make_custom_fields() add_permissions() add_custom_roles_for_reports() frappe.enqueue('erpnext.regional.india.setup.add_hsn_sac_codes', now=frappe.flags.in_test) add_print_formats() if not patch: update_address_template() make_fixtures() def update_address_template(): with open(os.path.join(os.path.dirname(__file__), 'address_template.html'), 'r') as f: html = f.read() address_template = frappe.db.get_value('Address Template', 'India') if address_template: frappe.db.set_value('Address Template', 'India', 'template', html) else: # make new html template for India frappe.get_doc(dict( doctype='Address Template', country='India', template=html )).insert() def add_hsn_sac_codes(): # HSN codes with open(os.path.join(os.path.dirname(__file__), 'hsn_code_data.json'), 'r') as f: hsn_codes = json.loads(f.read()) create_hsn_codes(hsn_codes, code_field="hsn_code") # SAC Codes with open(os.path.join(os.path.dirname(__file__), 'sac_code_data.json'), 'r') as f: sac_codes = json.loads(f.read()) create_hsn_codes(sac_codes, code_field="sac_code") def create_hsn_codes(data, code_field): for d in data: hsn_code = frappe.new_doc('GST HSN Code') hsn_code.description = d["description"] hsn_code.hsn_code = d[code_field] hsn_code.name = d[code_field] try: hsn_code.db_insert() except frappe.DuplicateEntryError: pass def add_custom_roles_for_reports(): for report_name in ('GST Sales Register', 'GST Purchase Register', 'GST Itemised Sales Register', 'GST Itemised Purchase Register'): if not frappe.db.get_value('Custom Role', dict(report=report_name)): frappe.get_doc(dict( doctype='Custom Role', report=report_name, roles= [ dict(role='Accounts User'), dict(role='Accounts Manager') ] )).insert() def add_permissions(): for doctype in ('GST HSN Code', 'GST Settings'): add_permission(doctype, 'All', 0) def add_print_formats(): frappe.reload_doc("regional", "print_format", "gst_tax_invoice") frappe.reload_doc("accounts", "print_format", "gst_pos_invoice") frappe.db.sql(""" update `tabPrint Format` set disabled = 0 where name in('GST POS Invoice', 'GST Tax Invoice') """) def make_custom_fields(): hsn_sac_field = dict(fieldname='gst_hsn_code', label='HSN/SAC', fieldtype='Data', options='item_code.gst_hsn_code', insert_after='description', allow_on_submit=1, print_hide=1) invoice_gst_fields = [ dict(fieldname='gst_section', label='GST Details', fieldtype='Section Break', insert_after='select_print_heading', print_hide=1, collapsible=1), dict(fieldname='invoice_copy', label='Invoice Copy', fieldtype='Select', insert_after='gst_section', print_hide=1, allow_on_submit=1, options='Original for Recipient\nDuplicate for Transporter\nDuplicate for Supplier\nTriplicate for Supplier'), dict(fieldname='reverse_charge', label='Reverse Charge', fieldtype='Select', insert_after='invoice_copy', print_hide=1, options='Y\nN', default='N'), dict(fieldname='gst_col_break', fieldtype='Column Break', insert_after='reverse_charge'), dict(fieldname='invoice_type', label='Invoice Type', fieldtype='Select', insert_after='reverse_charge', print_hide=1, options='Regular\nSEZ\nExport\nDeemed Export', default='Regular'), dict(fieldname='export_type', label='Export Type', fieldtype='Select', insert_after='invoice_type', print_hide=1, depends_on='eval:in_list(["SEZ", "Export", "Deemed Export"], doc.invoice_type)', options='\nWith Payment of Tax\nWithout Payment of Tax'), dict(fieldname='ecommerce_gstin', label='E-commerce GSTIN', fieldtype='Data', insert_after='export_type', print_hide=1) ] purchase_invoice_gst_fields = [ dict(fieldname='supplier_gstin', label='Supplier GSTIN', fieldtype='Data', insert_after='supplier_address', options='supplier_address.gstin', print_hide=1), dict(fieldname='company_gstin', label='Company GSTIN', fieldtype='Data', insert_after='shipping_address', options='shipping_address.gstin', print_hide=1) ] sales_invoice_gst_fields = [ dict(fieldname='billing_address_gstin', label='Billing Address GSTIN', fieldtype='Data', insert_after='customer_address', options='customer_address.gstin', print_hide=1), dict(fieldname='customer_gstin', label='Customer GSTIN', fieldtype='Data', insert_after='shipping_address', options='shipping_address_name.gstin', print_hide=1), dict(fieldname='place_of_supply', label='Place of Supply', fieldtype='Data', insert_after='customer_gstin', print_hide=1, read_only=0), dict(fieldname='company_gstin', label='Company GSTIN', fieldtype='Data', insert_after='company_address', options='company_address.gstin', print_hide=1) ] custom_fields = { 'Address': [ dict(fieldname='gstin', label='Party GSTIN', fieldtype='Data', insert_after='fax'), dict(fieldname='gst_state', label='GST State', fieldtype='Select', options='\n'.join(states), insert_after='gstin'), dict(fieldname='gst_state_number', label='GST State Number', fieldtype='Int', insert_after='gst_state', read_only=1), ], 'Purchase Invoice': purchase_invoice_gst_fields + invoice_gst_fields, 'Sales Invoice': sales_invoice_gst_fields + invoice_gst_fields, "Delivery Note": sales_invoice_gst_fields, 'Item': [ dict(fieldname='gst_hsn_code', label='HSN/SAC', fieldtype='Link', options='GST HSN Code', insert_after='item_group'), ], 'Quotation Item': [hsn_sac_field], 'Supplier Quotation Item': [hsn_sac_field], 'Sales Order Item': [hsn_sac_field], 'Delivery Note Item': [hsn_sac_field], 'Sales Invoice Item': [hsn_sac_field], 'Purchase Order Item': [hsn_sac_field], 'Purchase Receipt Item': [hsn_sac_field], 'Purchase Invoice Item': [hsn_sac_field] } create_custom_fields(custom_fields) def make_fixtures(): docs = [ {'doctype': 'Salary Component', 'salary_component': 'Professional Tax', 'description': 'Professional Tax', 'type': 'Deduction'}, {'doctype': 'Salary Component', 'salary_component': 'Provident Fund', 'description': 'Provident fund', 'type': 'Deduction'}, {'doctype': 'Salary Component', 'salary_component': 'House Rent Allowance', 'description': 'House Rent Allowance', 'type': 'Earning'}, {'doctype': 'Salary Component', 'salary_component': 'Basic', 'description': 'Basic', 'type': 'Earning'}, {'doctype': 'Sa
lary Component', 'salary_component': 'Arrear', 'description': 'Arrear', 'type': 'Earning'}, {'doctype': 'Salary Component', 'salary_component': 'Leave Enca
shment', 'description': 'Leave Encashment', 'type': 'Earning'} ] for d in docs: try: doc = frappe.get_doc(d) doc.flags.ignore_permissions = True doc.insert() except frappe.NameError: pass
ProfessorX/Config
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdeui/KPassivePopupMessageHandler.py
Python
gpl-2.0
584
0.008562
# encoding: utf-8 # module PyKDE4.kdeui # from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so # by generator 1.135 # no doc # imports import PyKDE4.kdecore as __PyKDE4_kdecore import PyQt4.QtCore as __PyQt4_QtCore im
port PyQt4.QtGui as __PyQt4_QtGui import PyQt4.QtSvg as __PyQt4_QtSvg class KPassivePopupMessageHandler(__PyQt4_QtCore.QObject, __PyKDE4_kdecore.KMessageHandler): # no doc def message(self, *args, **kwargs): # real signature unknown pass def __init
__(self, *args, **kwargs): # real signature unknown pass
coderwjq/adt_python
02-linked_list/04-single_cycle_linked_list.py
Python
apache-2.0
5,206
0.000222
# coding:utf-8 # 单向循环链表的相关操作: # is_empty() 判断链表是否为空 # length() 返回链表的长度 # travel() 遍历 # add(item) 在头部添加一个节点 # append(item) 在尾部添加一个节点 # insert(pos, item) 在指定位置pos添加节点 # remove(item) 删除一个节点 # search(item) 查找节点是否存在 class Node(object): """节点""" def __init__(self, item): self.elem = item self.next = None class SingleCycleLinkedList(object): """单向循环链表""" def __init__(self, node=None): self.__head = node # 如果node不为空,则需要指向自己构成一个循环链表
if node: node.next = node def is_empty(self): """判断链表是否为空""" return self.__head is None def length(self): """返回链表的长度""" if self.is_empty(): return 0 else: cur = self.__head count = 1 while cur.next is not self.__head: count += 1 cur = cur.next return count def travel(self): """遍历""" if self.is_empty():
return else: cur = self.__head while cur.next is not self.__head: print(cur.elem, end=" ") cur = cur.next # 循环结束,cur指向尾节点,但是尾节点元素尚未打印,需要单独输出 print(cur.elem) def add(self, item): """在头部添加一个节点,头插法""" node = Node(item) if self.is_empty(): self.__head = node node.next = node else: # 需要获取到尾节点 cur = self.__head while cur.next is not self.__head: cur = cur.next node.next = self.__head self.__head = node cur.next = node def append(self, item): """在尾部添加一个节点,尾插法""" node = Node(item) if self.is_empty(): self.__head = node node.next = node else: # 同样需要获取到尾节点 cur = self.__head while cur.next is not self.__head: cur = cur.next cur.next = node node.next = self.__head def insert(self, pos, item): """在指定位置pos添加节点""" if pos <= 0: self.add(item) elif pos > (self.length() - 1): self.append(item) else: node = Node(item) prev = self.__head count = 0 while count < pos - 1: count += 1 prev = prev.next # 循环结束,prev指向要插入位置的前一个元素 node.next = prev.next prev.next = node def remove(self, item): """删除一个节点,需要考虑链表是否为空,删除的节点是头节点,尾节点,还是中间节点""" if self.is_empty(): return else: cur = self.__head pre = None while cur.next is not self.__head: if cur.elem == item: # 判断是头节点,还是中间节点 if cur is self.__head: # 头节点,需要找到尾节点 rear = self.__head while rear.next is not self.__head: rear = rear.next self.__head = cur.next rear.next = self.__head else: # 中间节点 pre.next = cur.next return else: pre = cur cur = cur.next # 退出循环,cur指向尾节点 if cur.elem == item: # 注意判断链表中是否只有一个节点 if cur is self.__head: self.__head = None else: pre.next = self.__head def search(self, item): """查找节点是否存在""" if self.is_empty(): return False else: cur = self.__head while cur.next is not self.__head: if cur.elem == item: return True else: cur = cur.next # 循环结束,cur指向尾节点,但是尾节点并未参与比较,需要单独进行判断的 if cur.elem == item: return True else: return False if __name__ == "__main__": scll = SingleCycleLinkedList() print("befor initialized:", scll.is_empty()) print("befor initialized:", scll.length()) scll.add(1) scll.add(2) scll.add(3) scll.add(4) scll.add(5) scll.add(6) scll.travel() scll.append(7) scll.travel() scll.insert(3, 99) scll.travel() print("scll.search(99):", scll.search(99)) scll.remove(99) scll.travel()
Baumelbi/IntroPython2016
students/weidnem/session2/grid.py
Python
unlicense
289
0.010381
''' ''
' def printgrid(): print("this will be a grid") pos = 0 while pos < 11: if pos % 5 == 0: print("+----+----+") pos += 1 else: print("| | |") pos += 1 else: print() printgrid()
ginolhac/tutorials
python/advanced/celery/code/ulhpccelery/tasks.py
Python
gpl-3.0
224
0
from __future__ import absolute_import, unicode_literals from .celery import app @app.task def add(x, y): return x + y @app.task def mul(x, y): return x * y @app.task def xsum(numbers): return sum(number
s)
dmych/cn
sync.py
Python
gpl-3.0
3,222
0.034761
# This file is part of Coffee Notes project # # Coffee Notes is a crossplatform note-taking application # inspired by Notational Velocity. # <https://github.com/dmych/cn> # # Copyright (c) Dmitri Brechalov, 2011 # # Coffee Notes is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Coffee Notes is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Coffee Notes. If not, see <http://www.gnu.org/licenses/>. from api import Simplenote from notes import Notes, KEY_PREFIX import time import sys VERBOSE_DEBUG = True def dbg(msg): if not VERBOSE_DEBUG: return from sys import stderr stderr.write('**** %s\n' % (msg)) from utils import log def sync(dbpath, user, password): notes = Notes(dbpath) log('LOCAL TO REMOTE:') synced_count = 0 for note in notes.values(): if note['CHANGED']: note['content'] = notes.getContent(note['key']) if note['key'].startswith(KEY_PREFIX): log('NEW NOTE') k = note['key'] del note['key'] else: log('CHANGED: %s' % note['key']) k = None note = api.update(note) note['CHANGED'] = False db.update(note) if k is not None: db.remove(k) synced_count += 1 def OLD_sync(localdb, user, password, since=None): db = Notes(localdb) api = Simplenote(user, password) log('LOCAL TO REMOTE:') synced_count = 0 for note in db.values(): if note['CHANGED']: if not note.has_key('key') or note['key'].startswith(KEY_PREFIX): log('NEW NOTE') else: log('CHANGED: %s' % note['key']) if note['key'].startswith(KEY_PREFIX): k = note['key'] del note['key'] else: k = None note = api.update(note) note['CHANGED'] = False db.update(note) if k is not None: db.remove(k) synced_count += 1 if since: rindex = api.index(since=since) log('>>>> SINCE: %s' % since) else: rindex = api.index() log('REMOTE TO LOCAL:') log('>>>> RINDEX LEN: %s' % len(rindex)) for ritem in rindex: key = ritem['key'] if key not in db.keys(deleted=True): log(' NEW: %s' % (key)) db.update(api.get(key)) synced_count += 1 litem = db.get(key) if ritem['syncnum'] > litem['syncnum']: log(' UPD: %s' % (key)) db.update(api.get(key)) synced_count += 1 log('CLEAN UP:') if since is None: rkeys = api.keys().keys() for k in db.keys(deleted=True): if k not in rkeys: log(' DEL: %s' % k) db
.remove(k) synced_count += 1 else: for k in db.keys(deleted=True): litem = db.get(k) if litem['deleted'] != 0: log(' DEL: %s' % k) db.remove(k) sys.stderr.write('Synced %s notes.\n' % synced_count) return time.time() if __name__ == '__main__': import
sys email = sys.argv[1] password = sys.argv[2] sync('./', email, password)
Rusk85/pyload
module/plugins/hoster/ReloadCc.py
Python
gpl-3.0
4,866
0.003494
from module.plugins.Hoster import Hoster from module.common.json_layer import json_loads from module.network.HTTPRequest import BadHeader class ReloadCc(Hoster): __name__ = "ReloadCc" __version__ = "0.5" __type__ = "hoster" __description__ = """Reload.Cc hoster plugin""" # Since we want to allow the user to specify the list of hoster to use we let MultiHoster.coreReady # create the regex patterns for us using getHosters in our ReloadCc hook. __pattern__ = None __author_name__ = ("Reload Team") __author_mail__ = ("hello@reload.cc") def process(self, pyfile): # Check account if not self.account or not self.account.canUse(): self.logError(_("Please enter your %s account or deactivate this plugin") % "reload.cc") self.fail("No valid reload.cc account provided") # In some cases hostsers do not supply us with a filename at download, so we # are going to set a fall back filename (e.g. for freakshare or xfileshare) self.pyfile.name = self.pyfile.name.split('/').pop() # Remove everthing before last slash # Correction for automatic assigned filename: Removing html at end if needed suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] temp = self.pyfile.name.split('.') if temp.pop() in suffix_to_remove: self.pyfile.name = ".".join(temp) # Get account data (user, data) = self.account.selectAccount() query_params = dict( via='pyload', v=1, user=user, uri=self.pyfile.url ) try: query_params.update(dict(hash=self.account.infos[user]['pwdhash'])) except Exception: query_params.update(dict(pwd=data['password'])) try: answer = self.load("http://api.reload.cc/dl", get=query_params) except BadHeader, e: if e.code == 400: self.fail("The URI is not supported by Reload.cc.") elif e.code == 401: self.fail("Wrong username or password") elif e.code == 402: self.fail("Your account is inactive. A payment is required for downloading!") elif e.code == 403: self.fail("Your account is disabled. Please contact the Reload.cc support!") elif e.code == 409: self.logWarning("The hoster seems to be a limited hoster and you've used your daily traffic for this hoster: %s" % self.pyfile.url) # Wait for 6 hours and retry up to 4 times => one day self.retry(max_retries=4, wait_time=(3600 * 6), reason="Limited hoster traffic limit exceeded") elif e.code == 429: # Too many connections, wait 2 minutes and try again self.retry(max_retries=5, wait_time=120, reason="Too many concurrent connections") elif e.code == 503: # Retry in 10 minutes self.retry(wait_time=600, reason="Reload.cc is currently in maintenance mode! Please check again later.") else: self.fail( "Internal error within Reload.cc. Please contact the Reload.cc support for further information.") return data = json_loads(answer) # Check status and decide what to do status = data.get('status', None) if status == "ok": conn_limit = data.get('msg', 0) # API says these connections are limited # Make sure this limit is used - the download will fail if not if conn_limit > 0: try: self.limitDL = int(conn_limit)
except ValueError: self.limitDL = 1 else: self.limitDL = 0 try: self.download(data['link'], disposition=True) except BadHeader, e: if e.code == 404: self.fail
("File Not Found") elif e.code == 412: self.fail("File access password is wrong") elif e.code == 417: self.fail("Password required for file access") elif e.code == 429: # Too many connections, wait 2 minutes and try again self.retry(max_retries=5, wait_time=120, reason="Too many concurrent connections") else: self.fail( "Internal error within Reload.cc. Please contact the Reload.cc support for further information." ) return else: self.fail("Internal error within Reload.cc. Please contact the Reload.cc support for further information.")
alexston/calibre-webserver
src/calibre/devices/teclast/__init__.py
Python
gpl-3.0
237
0.008439
#!/
usr/bin/env python # vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai from __future__ import with_statement __license__ = 'GPL v3' __copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>' __docformat__ = 'restructuredtext en'
khchine5/opal
opal/migrations/0013_inpatientadmission.py
Python
agpl-3.0
1,852
0.0027
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings import opal.models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('opal', '0012_maritalstatus_title'), ] operations = [ migrations.CreateModel( name='InpatientAdmission', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(null=True, blank=True)), ('updated', models.DateTimeField(null=True, blank=True)), ('consistency_token', models.CharField(max_length=8)), ('admitted', models.DateTimeField()), ('discharged', models.DateTimeField()), ('hospital', models.CharField(max_length=255, blank=True)), ('ward', models.CharField(max_length=255, blank=True)), ('bed', models.CharField(max_length=255, blank=True)), ('admission_diagnosis', models.CharField(max_length=255, blank=True)), ('external_identifier', models.CharField(max_length=255, blank=True)), ('created_by', models.ForeignKey(related_name='created_opal_inpatientadmission_subrecords', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ('patient', models.ForeignKey(to='opal.Patient')), ('updated_by', models.ForeignKey(related_name='updated_opal_inpatientadmission_subrecords', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
], options={ 'abstract': False, }, bases=(opal.models.UpdatesFromDictMixin, models.Model), ), ]
DMSC-Instrument-Data/plankton
docs/conf.py
Python
gpl-3.0
1,551
0.001934
# -*- coding: utf-8 -*- # # lewis documentation build configuration file, created by # sphinx-quickstart on Wed Nov 9 16:42:53 2016. import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) # -- General configuration ------------------------------------------------ needs_sphinx = "1.4.5" extensions = [ "sphinx.ext.autodoc", "sphinx.ext.viewcode", ] templates_path = ["_templates"] source_suffix = ".rst" master_doc = "index" # General information about the project. project = u"lewis" copyright = u"2016-2021, European Spallation Source ERIC" author = u"ScreamingUdder" version = u"2.0" release = u"1.3.1" languag
e = None exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] pygments_style = "sphinx" todo_include_todos = False modindex_common_prefix = ["lewis."] # -- Options for HTML output --------------------------------------------- # This is from the sphinx_rtd_theme documentation to make the page work with RTD on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: import sphinx_rtd_theme html_theme = "sphinx_
rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_logo = "resources/logo/lewis-logo.png" html_static_path = [] html_show_sourcelink = True htmlhelp_basename = "lewisdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { "papersize": "a4paper", } latex_documents = [ (master_doc, "lewis.tex", u"lewis Documentation", u"ScreamingUdder", "manual"), ]
FeodorM/amm_code
cm/lab_3/2_.py
Python
mit
205
0
from numpy import * from cmlib import showMatr A = matrix([[1, 2, 0],
[0, 2, 2]]) B = matrix([[3, -1], [-1, 3], [1, 0]]) res = (A *
B).T showMatr(array(res))
kapilrastogi/Impala
tests/util/parse_util.py
Python
apache-2.0
1,994
0.015045
# Copyright (c) 2015 Cloudera, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import re from datetime import datetime NEW_GLOG_ENTRY_PATTERN = re.compile(r"[IWEF](?P<Time>\d{4} \d{2}:\d{2}:\d{2}\.\d{6}).*") def parse_glog(text, start_time=None): '''Parses the log 'text' and returns a list of log entries. If a 'start_time' is provided only log entries that are after the time will be returned. ''' year = datetime.now().year found_start = False log = list() entry = None for line in text.splitlines(): if not found_start: found_start = line.startswith("Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu") continue match = NEW_GLOG_ENTRY_PATTERN.match(line) if match: if entry: log.append("\n".join(entry)) if not start_time or start_time <= datetime.strptime( match.group("Time"), "%m%d %H:%M:%S.%f").replace(year): entry = [line] else: entry = None elif entry: entry.append(line) if entry: log.append("\n".join(entry)) return log def parse_mem
_to_mb(mem, units): mem = float(mem) if mem <= 0: return units = units.strip().upper() if units.endswith("B"): units = units[:-1] if not units: mem /= 10 ** 6 elif units == "K": mem /= 10 ** 3 elif units == "M": pass elif units == "G": mem *= 10 ** 3 elif units == "T": mem *= 10 ** 6
else: raise Exception('Unexpected memory unit "%s"' % units) return int(mem)
mjirayu/sit_academy
cms/djangoapps/upload_videos/migrations/0001_initial.py
Python
agpl-3.0
1,745
0.006877
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'UploadVideo' db.create_table('upload_videos_uploadvideo', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)), ('course_id', self.gf('x
module_django.models.CourseKeyField')(max_length=255, db_index=True)), ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_index=True, blank=True)), ('video', self.gf('django.db.models.fields.files.FileField')(max_length=255)), )) db.send_create_signal('upload_videos', ['UploadVideo']) def backwards(self, orm): # Deleting model 'UploadVideo' db.delete_table('upload_videos_uploadvideo') models = { 'upload_videos.uploadvideo': { 'Meta': {'object_name': 'UploadVideo'}, 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'video': ('django.db.models.fields.files.FileField', [], {'max_length': '255'}) } } complete_apps = ['upload_videos']
algernon/hy
hy/compiler.py
Python
mit
90,185
0
# -*- encoding: utf-8 -*- # # Copyright (c) 2013, 2014 Paul Tagliamonte <paultag@debian.org> # Copyright (c) 2013 Julien Danjou <julien@danjou.info> # Copyright (c) 2013 Nicolas Dandrimont <nicolas.dandrimont@crans.org> # Copyright (c) 2013 James King <james@agentultra.com> # Copyright (c) 2013, 2014 Bob Tolbert <bob@tolbert.org> # # Perm
ission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without li
mitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from hy.models.expression import HyExpression from hy.models.keyword import HyKeyword from hy.models.integer import HyInteger from hy.models.complex import HyComplex from hy.models.string import HyString from hy.models.symbol import HySymbol from hy.models.float import HyFloat from hy.models.list import HyList from hy.models.set import HySet from hy.models.dict import HyDict from hy.models.cons import HyCons from hy.errors import HyCompileError, HyTypeError from hy.lex.parser import hy_symbol_mangle import hy.macros from hy._compat import ( str_type, long_type, PY27, PY33, PY3, PY34, PY35, raise_empty) from hy.macros import require, macroexpand, reader_macroexpand import hy.importer import traceback import importlib import codecs import ast import sys import keyword from collections import defaultdict _compile_time_ns = {} def compile_time_ns(module_name): ns = _compile_time_ns.get(module_name) if ns is None: ns = {'hy': hy, '__name__': module_name} _compile_time_ns[module_name] = ns return ns _stdlib = {} def load_stdlib(): import hy.core for module in hy.core.STDLIB: mod = importlib.import_module(module) for e in mod.EXPORTS: _stdlib[e] = module # True, False and None included here since they # are assignable in Python 2.* but become # keywords in Python 3.* def _is_hy_builtin(name, module_name): extras = ['True', 'False', 'None', 'true', 'false', 'nil'] if name in extras or keyword.iskeyword(name): return True # for non-Hy modules, check for pre-existing name in # _compile_table if not module_name.startswith("hy."): return name in _compile_table return False _compile_table = {} def ast_str(foobar): if PY3: return str(foobar) try: return str(foobar) except UnicodeEncodeError: pass enc = codecs.getencoder('punycode') foobar, _ = enc(foobar) return "hy_%s" % (str(foobar).replace("-", "_")) def builds(_type): unpythonic_chars = ["-"] really_ok = ["-"] if any(x in unpythonic_chars for x in str_type(_type)): if _type not in really_ok: raise TypeError("Dear Hypster: `build' needs to be *post* " "translated strings... `%s' sucks." % (_type)) def _dec(fn): _compile_table[_type] = fn return fn return _dec def builds_if(_type, condition): if condition: return builds(_type) else: return lambda fn: fn class Result(object): """ Smart representation of the result of a hy->AST compilation This object tries to reconcile the hy world, where everything can be used as an expression, with the Python world, where statements and expressions need to coexist. To do so, we represent a compiler result as a list of statements `stmts`, terminated by an expression context `expr`. The expression context is used when the compiler needs to use the result as an expression. Results are chained by addition: adding two results together returns a Result representing the succession of the two Results' statements, with the second Result's expression context. We make sure that a non-empty expression context does not get clobbered by adding more results, by checking accesses to the expression context. We assume that the context has been used, or deliberately ignored, if it has been accessed. The Result object is interoperable with python AST objects: when an AST object gets added to a Result object, it gets converted on-the-fly. """ __slots__ = ("imports", "stmts", "temp_variables", "_expr", "__used_expr", "contains_yield") def __init__(self, *args, **kwargs): if args: # emulate kw-only args for future bits. raise TypeError("Yo: Hacker: don't pass me real args, dingus") self.imports = defaultdict(set) self.stmts = [] self.temp_variables = [] self._expr = None self.contains_yield = False self.__used_expr = False # XXX: Make sure we only have AST where we should. for kwarg in kwargs: if kwarg not in ["imports", "contains_yield", "stmts", "expr", "temp_variables"]: raise TypeError( "%s() got an unexpected keyword argument '%s'" % ( self.__class__.__name__, kwarg)) setattr(self, kwarg, kwargs[kwarg]) @property def expr(self): self.__used_expr = True return self._expr @expr.setter def expr(self, value): self.__used_expr = False self._expr = value def add_imports(self, mod, imports): """Autoimport `imports` from `mod`""" self.imports[mod].update(imports) def is_expr(self): """Check whether I am a pure expression""" return self._expr and not (self.imports or self.stmts) @property def force_expr(self): """Force the expression context of the Result. If there is no expression context, we return a "None" expression. """ if self.expr: return self.expr # Spoof the position of the last statement for our generated None lineno = 0 col_offset = 0 if self.stmts: lineno = self.stmts[-1].lineno col_offset = self.stmts[-1].col_offset return ast.Name(id=ast_str("None"), arg=ast_str("None"), ctx=ast.Load(), lineno=lineno, col_offset=col_offset) # XXX: Likely raise Exception here - this will assertionfail # pypy since the ast will be out of numerical order. def expr_as_stmt(self): """Convert the Result's expression context to a statement This is useful when we want to use the stored expression in a statement context (for instance in a code branch). We drop ast.Names if they are appended to statements, as they can't have any side effect. "Bare" names still get converted to statements. If there is no expression context, return an empty result. """ if self.expr and not (isinstance(self.expr, ast.Name) and self.stmts): return Result() + ast.Expr(lineno=self.expr.lineno, col_offset=self.expr.col_offset, value=self.expr) return Result() def rename(self, new_name): """Rename the Result's temporary variables to a `new_name`. We know how to handle ast.Names and ast.FunctionDefs. """ new_
fossilet/6.00x
week2/problemset2/ps2_2.py
Python
mit
470
0.004255
from __future__ imp
ort division balance = 9999999 annualInterestRate = 0.18 min_pay = 10 def pay(m, min_pay): if m == 1: ub = (balance - min_pay) * (1 + annualInterestRate / 12) return ub else: last_ub = pay(m - 1, min_pay) ub = (la
st_ub - min_pay) * (1 + annualInterestRate / 12) return ub ub = pay(12, min_pay) while ub > 0: min_pay += 10 ub = pay(12, min_pay) print('Lowest Payment: %d' % min_pay)
mikelarre/odoomrp-wip-1
product_last_purchase_sale_info/models/sale_order.py
Python
agpl-3.0
1,749
0
# -*- encoding: utf-8 -*- ############################################################################
## # # Avanzosc - Avanced Open Source Consulting # Copyright (C) 2011 - 2014 Avanzosc <http://www.avanzosc.com> # # This program is free software: you can redistribute
it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # ############################################################################## from openerp.osv import orm import time class SaleOrder(orm.Model): _inherit = 'sale.order' def action_wait(self, cr, uid, ids, context=None): product_obj = self.pool['product.product'] res = super(SaleOrder, self).action_wait(cr, uid, ids, context) for o in self.browse(cr, uid, ids, context): for line in o.order_line: if line.product_id: vals = {'last_sale_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'last_customer_id': line.order_id.partner_id.id, } product_obj.write(cr, uid, [line.product_id.id], vals, context) return res
ypu/virt-test
virttest/virsh_unittest.py
Python
gpl-2.0
12,157
0.000082
#!/usr/bin/python import unittest import logging import common from autotest.client import utils class bogusVirshFailureException(unittest.TestCase.failureException): def __init__(self, *args, **dargs): self.virsh_args = args self.virsh_dargs = dargs def __str__(self): msg = ("Codepath under unittest attempted call to un-mocked virsh" " method, with args: '%s' and dargs: '%s'" % (self.virsh_args, self.virsh_dargs)) return msg def FakeVirshFactory(preserve=None): """ Return Virsh() instance with methods to raise bogusVirshFailureException. Users of this class should override methods under test on instance. :param preserve: List of symbol names NOT to modify, None for all """ import virsh def raise_bogusVirshFailureException(*args, **dargs): raise bogusVirshFailureException() if preserve is None: preserve = [] fake_virsh = virsh.Virsh(virsh_exec='/bin/false', uri='qemu:///system', debug=True, ignore_status=True) # Make all virsh commands throw an exception by calling it for symbol in dir(virsh): # Get names of just closure functions by Virsh class if symbol in virsh.NOCLOSE + preserve: continue if isinstance(getattr(fake_virsh, symbol), virsh.VirshClosure): xcpt = lambda *args, **dargs: raise_bogusVirshFailureException() # fake_virsh is a propcan, can't use setattr. fake_virsh.__super_set__(symbol, xcpt) return fake_virsh class ModuleLoad(unittest.TestCase): import virsh class ConstantsTest(ModuleLoad): def test_ModuleLoad(self): self.assertTrue(hasattr(self.virsh, 'NOCLOSE')) self.assertTrue(hasattr(self.virsh, 'SCREENSHOT_ERROR_COUNT')) self.assertTrue(hasattr(self.virsh, 'VIRSH_COMMAND_CACHE')) self.assertTrue(hasattr(self.virsh, 'VIRSH_EXEC')) class TestVirshClosure(ModuleLoad): @staticmethod def somefunc(*args, **dargs): return (args, dargs) class SomeClass(dict): def somemethod(self): return "foobar" def test_init(self): # save some typing VC = self.virsh.VirshClosure # self is guaranteed to be not dict-like self.assertRaises(ValueError, VC, self.somefunc, self) self.assertRaises(ValueError, VC, lambda: None, self) def test_args(self): # save some typing VC = self.virsh.VirshClosure tcinst = self.SomeClass() vcinst = VC(self.somefunc, tcinst) args, dargs = vcinst('foo') self.assertEqual(len(args), 1) self.assertEqual(args[0], 'foo') self.assertEqual(len(dargs), 0) def test_fake_virsh(self): fake_virsh = FakeVirshFactory() for symb in dir(self.virsh): if symb in self.virsh.NOCLOSE: continue value = fake_virsh.__super_get__(symb) self.assertRaises(unittest.TestCase.failureException, value) def test_dargs(self): # save some typing VC = self.virsh.VirshClosure tcinst = self.SomeClass(foo='bar') vcinst = VC(self.somefunc, tcinst) args, dargs = vcinst() self.assertEqu
al(len(args), 0) self.assertEqual(len(dargs), 1) self.assertEqual(dargs.keys(), ['foo']) self.assertEqual(dargs.values(), ['bar']) def test_args_and_dargs(self): # save some typing VC = self.virsh.VirshClosure tcinst = sel
f.SomeClass(foo='bar') vcinst = VC(self.somefunc, tcinst) args, dargs = vcinst('foo') self.assertEqual(len(args), 1) self.assertEqual(args[0], 'foo') self.assertEqual(len(dargs), 1) self.assertEqual(dargs.keys(), ['foo']) self.assertEqual(dargs.values(), ['bar']) def test_args_dargs_subclass(self): # save some typing VC = self.virsh.VirshClosure tcinst = self.SomeClass(foo='bar') vcinst = VC(self.somefunc, tcinst) args, dargs = vcinst('foo') self.assertEqual(len(args), 1) self.assertEqual(args[0], 'foo') self.assertEqual(len(dargs), 1) self.assertEqual(dargs.keys(), ['foo']) self.assertEqual(dargs.values(), ['bar']) def test_update_args_dargs_subclass(self): # save some typing VC = self.virsh.VirshClosure tcinst = self.SomeClass(foo='bar') vcinst = VC(self.somefunc, tcinst) args, dargs = vcinst('foo') self.assertEqual(len(args), 1) self.assertEqual(args[0], 'foo') self.assertEqual(len(dargs), 1) self.assertEqual(dargs.keys(), ['foo']) self.assertEqual(dargs.values(), ['bar']) # Update dictionary tcinst['sna'] = 'fu' # Is everything really the same? args, dargs = vcinst('foo', 'baz') self.assertEqual(len(args), 2) self.assertEqual(args[0], 'foo') self.assertEqual(args[1], 'baz') self.assertEqual(len(dargs), 2) self.assertEqual(dargs['foo'], 'bar') self.assertEqual(dargs['sna'], 'fu') def test_multi_inst(self): # save some typing VC1 = self.virsh.VirshClosure VC2 = self.virsh.VirshClosure tcinst1 = self.SomeClass(darg1=1) tcinst2 = self.SomeClass(darg1=2) vcinst1 = VC1(self.somefunc, tcinst1) vcinst2 = VC2(self.somefunc, tcinst2) args1, dargs1 = vcinst1(1) args2, dargs2 = vcinst2(2) self.assertEqual(len(args1), 1) self.assertEqual(len(args2), 1) self.assertEqual(args1[0], 1) self.assertEqual(args2[0], 2) self.assertEqual(len(dargs1), 1) self.assertEqual(len(dargs2), 1) self.assertEqual(dargs1['darg1'], 1) self.assertEqual(dargs2['darg1'], 2) class ConstructorsTest(ModuleLoad): def test_VirshBase(self): vb = self.virsh.VirshBase() del vb # keep pylint happy def test_Virsh(self): v = self.virsh.Virsh() del v # keep pylint happy def test_VirshPersistent(self): test_virsh = self.virsh.Virsh() if test_virsh['virsh_exec'] == '/bin/true': return else: logging.disable(logging.INFO) vp = self.virsh.VirshPersistent() vp.close_session() # Make sure session gets cleaned up def TestVirshClosure(self): vc = self.virsh.VirshClosure(None, {}) del vc # keep pylint happy # Ensure the following tests ONLY run if a valid virsh command exists ##### class ModuleLoadCheckVirsh(unittest.TestCase): import virsh def run(self, *args, **dargs): test_virsh = self.virsh.Virsh() if test_virsh['virsh_exec'] == '/bin/true': return # Don't run any tests, no virsh executable was found else: super(ModuleLoadCheckVirsh, self).run(*args, **dargs) class SessionManagerTest(ModuleLoadCheckVirsh): def test_del_VirshPersistent(self): """ Unittest for __del__ of VirshPersistent. This test makes sure the __del__ method of VirshPersistent works well in `del vp_instance`. """ vp = self.virsh.VirshPersistent() virsh_exec = vp.virsh_exec self.assertTrue(utils.process_is_alive(virsh_exec)) del vp self.assertFalse(utils.process_is_alive(virsh_exec)) def test_VirshSession(self): """ Unittest for VirshSession. This test use VirshSession over VirshPersistent with auto_close=True. """ virsh_exec = self.virsh.Virsh()['virsh_exec'] # Build a VirshSession object. session_1 = self.virsh.VirshSession(virsh_exec, auto_close=True) self.assertTrue(utils.process_is_alive(virsh_exec)) del session_1 self.assertFalse(utils.process_is_alive(virsh_exec)) def test_VirshPersistent(self): """ Unittest for session manager of VirshPersistent. """ virsh_exec = self.virsh.Virsh()['virsh_exec'] vp_1 = self.virsh.VirshPersis
jml/flocker
flocker/volume/functional/test_filesystems_zfs.py
Python
apache-2.0
19,664
0
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Functional tests for ZFS filesystem implementation. These tests require the ability to create a new ZFS storage pool (using ``zpool``) and the ability to interact with that pool (using ``zfs``). Further coverage is provided in :module:`flocker.volume.test.test_filesystems_zfs`. """ import subprocess import errno from twisted.internet import reactor from twisted.internet.task import cooperate from twisted.trial.unittest import TestCase from twisted.python.filepath import FilePath from ..test.filesystemtests import ( make_ifilesystemsnapshots_tests, make_istoragepool_tests, create_and_copy, copy, assertVolumesEqual, ) from ..filesystems.errors import MaximumSizeTooSmall from ..filesystems.zfs import ( Snapshot, ZFSSnapshots, Filesystem, StoragePool, volume_to_dataset, zfs_command, ) from ..service import Volume, VolumeName from .._model import VolumeSize from ..testtools import create_zfs_pool, service_for_pool class IFilesystemSnapshotsTests(make_ifilesystemsnapshots_tests( lambda test_case: ZFSSnapshots( reactor, Filesystem(create_zfs_pool(test_case), None)))): """``IFilesystemSnapshots`` tests for ZFS.""" def build_pool(test_case): """ Create a ``StoragePool``. :param TestCase test_case: The test in which this pool will exist. :return: A new ``StoragePool``. """ return StoragePool(reactor, create_zfs_pool(test_case), FilePath(test_case.mktemp())) class IStoragePoolTests(make_istoragepool_tests( build_pool, lambda fs: ZFSSnapshots(reactor, fs))): """ ``IStoragePoolTests`` for ZFS storage pool. """ MY_VOLUME = VolumeName(namespace=u"myns", dataset_id=u"myvolume") MY_VOLUME2 = VolumeName(namespace=u"myns", dataset_id=u"myvolume2") class VolumeToDatasetTests(TestCase): """Tests for ``volume_to_dataset``.""" def test_volume_to_dataset(self): """``volume_to_dataset`` includes the node ID, dataset name and (for future functionality) a default branch name. """ volume = Volume(node_id=u"my-uuid", name=MY_VOLUME, service=None) self.assertEqual(volume_to_dataset(volume), b"my-uuid.myns.myvolume") class StoragePoolTests(TestCase): """ ZFS-specific ``StoragePool`` tests. """ def test_mount_root(self): """Mountpoints are children of the mount root.""" mount_root = FilePath(self.mktemp()) mount_root.makedirs() pool = StoragePool(reactor, create_zfs_pool(self), mount_root) service = service_for_pool(self, pool) volume = service.get(MY_VOLUME) d = pool.create(volume) def gotFilesystem(filesystem): self.assertEqual(filesystem.get_path(), mount_root.child(volume_to_dataset(volume))) d.addCallback(gotFilesystem) return d def test_filesystem_identity(self): """ Filesystems are created with the correct pool and dataset names. """ mount_root = FilePath(self.mktemp()) pool_name = create_zfs_pool(self) pool = StoragePool(reactor, pool_name, mount_root) service = service_for_pool(self, pool) volume = service.get(MY_VOLUME) d = pool.create(volume) def gotFilesystem(filesystem): self.assertEqual( filesystem, Filesystem(pool_name, volume_to_dataset(volume))) d.addCallback(gotFilesystem) return d def test_actual_mountpoint(self): """ The mountpoint of the filesystem is the actual ZFS mountpoint. """ mount_root = FilePath(self.mktemp()) pool_name = create_zfs_pool(self) pool = StoragePool(reactor, pool_name, mount_root) service = service_for_pool(self, pool) volume = service.get(MY_VOLUME) d
= pool.create(volume) def gotFilesystem(filesystem): self.assertEqual( filesystem.get_path().path, subprocess.check_output( [b"zfs", b"get", b"-H", b"-o", b"
value", b"mountpoint", filesystem.name]).strip()) d.addCallback(gotFilesystem) return d def test_no_maximum_size(self): """ The filesystem is created with no ``refquota`` property if the maximum size is unspecified. """ mount_root = FilePath(self.mktemp()) pool_name = create_zfs_pool(self) pool = StoragePool(reactor, pool_name, mount_root) service = service_for_pool(self, pool) volume = service.get(MY_VOLUME) d = pool.create(volume) def created_filesystem(filesystem): refquota = subprocess.check_output([ b"zfs", b"get", b"-H", b"-o", b"value", b"refquota", filesystem.name]).strip() self.assertEqual(b"none", refquota) d.addCallback(created_filesystem) return d def test_maximum_size_sets_refquota(self): """ The filesystem is created with a ``refquota`` property set to the value of the volume's maximum size if that value is not ``None``. """ size = VolumeSize(maximum_size=1024 * 64) mount_root = FilePath(self.mktemp()) pool_name = create_zfs_pool(self) pool = StoragePool(reactor, pool_name, mount_root) service = service_for_pool(self, pool) volume = service.get(MY_VOLUME, size=size) d = pool.create(volume) def created_filesystem(filesystem): refquota = subprocess.check_output([ b"zfs", b"get", # Skip displaying the header b"-H", # Display machine-parseable (exact) values b"-p", # Output only the value b"-o", b"value", # Get the value of the refquota property b"refquota", # For this filesystem filesystem.name]).decode("ascii").strip() if refquota == u"none": refquota = None else: refquota = int(refquota) self.assertEqual(size.maximum_size, refquota) d.addCallback(created_filesystem) return d def test_change_owner_does_not_remove_non_empty_mountpoint(self): """ ``StoragePool.change_owner()`` doesn't delete the contents of the original mountpoint, if it is non-empty. ZFS doesn't like to mount volumes over non-empty directories. To test this, we change the original mount to be a legacy mount (mounted using manpage:`mount(8)`). """ pool = StoragePool(reactor, create_zfs_pool(self), FilePath(self.mktemp())) service = service_for_pool(self, pool) volume = service.get(MY_VOLUME) new_volume = Volume(node_id=u"other-uuid", name=MY_VOLUME2, service=service) original_mount = volume.get_filesystem().get_path() d = pool.create(volume) def created_filesystems(ignored): filesystem_name = volume.get_filesystem().name subprocess.check_call(['zfs', 'unmount', filesystem_name]) # Create a file hiding under the original mount point original_mount.child('file').setContent('content') # Remount the volume at the original mount point as a legacy mount. subprocess.check_call(['zfs', 'set', 'mountpoint=legacy', filesystem_name]) subprocess.check_call(['mount', '-t', 'zfs', filesystem_name, original_mount.path]) return pool.change_owner(volume, new_volume) d.addCallback(created_filesystems) self.assertFailure(d, OSError) def changed_owner(filesystem): self.assertEqual(original_mount.child('file').getContent(), b'content') d.addCallback(changed_owner) return d def tes
t-brink/pscic
psciclib/units.py
Python
gpl-3.0
1,401
0.002143
# Copyright (C) 2015 Tobias Brink # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import pint from . import currency # Default init. ureg = pint.UnitRegistry() ureg.default_format = "~" # print abbrevia
tions by default. Q_ = ureg.Quantity UndefinedUnitError = pint.UndefinedUnitError def _init(): # Add currencies to registry. aliases = {"PLN": "zł"} # TODO: make the download thing optional! ship default .xml! # TODO: error handling data = currency.get_exchange_rates() ureg.define("EUR = [
currency]") for cur, rate in data["rates"].items(): if cur in aliases: ureg.define("{} = {} * EUR = {}".format(aliases[cur], 1/rate, cur)) else: ureg.define("{} = {} * EUR".format(cur, 1/rate))
piyush-jain1/GSoC17OctaveGeometry
inst/io/@svg/simplepath.py
Python
gpl-3.0
6,961
0.0102
#!/usr/bin/env python """ simplepath.py functions for digesting paths into a simple list structure Copyright (C) 2005 Aaron Spike, aaron@ekips.org This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see <http://www.gnu.org/licenses/>. """ import re, math def lexPath(d): """ returns and iterator that breaks path data identifies command and parameter tokens """ offset = 0 length = len(d) delim = re.compile(r'[ \t\r\n,]+') command = re.compile(r'[MLHVCSQTAZmlhvcsqtaz]') parameter = re.compile(r'(([-+]?[0-9]+(\.[0-9]*)?|[-+]?\.[0-9]+)([eE][-+]?[0-9]+)?)') while 1: m = delim.match(d, offset) if m: offset = m.end() if offset >= length: break m = command.match(d, offset) if m: yield [d[offset:m.end()], True] offset = m.end() continue m = parameter.match(d, offset) if m: yield [d[offset:m.end()], False] offset = m.end() continue #TODO: create new exception raise Exception, 'Invalid path data!' ''' pathdefs = {commandfamily: [ implicitnext, #params, [casts,cast,cast], [coord type,x,y,0] ]} ''' pathdefs = { 'M':['L', 2, [float, float], ['x','y']], 'L':['L', 2, [float, float], ['x','y']], 'H':['H', 1, [float], ['x']], 'V':['V', 1, [float], ['y']], 'C':['C', 6, [float, float, float, float, float, float], ['x','y','x','y','x','y']], 'S':['S', 4, [float, float, float, float], ['x','y','x','y']], 'Q':['Q', 4, [float, float, float, float], ['x','y','x','y']], 'T':['T', 2, [float, float], ['x','y']], 'A':['A', 7, [float, float, float, int, int, float, float], ['r','r','a',0,'s','x','y']], 'Z':['L', 0, [], []] } def parsePath(d): """ Parse SVG path and return an array of segments. Removes all shorthand notation. Converts coordinates to absolute. """ retval = [] lexer = lexPath(d) pen = (0.0,0.0) subPathStart = pen lastControl = pen lastCommand = '' while 1: try: token, isCommand = lexer.next() except StopIteration: break params = [] needParam = True if isCommand: if not lastCommand and token.upper() != 'M': raise Exception, 'Invalid path, must begin with moveto.' else: command = token
else: #command was omited #use last command's implicit next command needParam = False if lastCommand: if lastCommand.isupper(): command = pathdefs[lastCommand][0] else: command = pathdefs[lastCommand.upper()][0].lower() else: raise Exception, 'Invalid path, no initial command.' numParams = pathdefs[command.upper()][1]
while numParams > 0: if needParam: try: token, isCommand = lexer.next() if isCommand: raise Exception, 'Invalid number of parameters' except StopIteration: raise Exception, 'Unexpected end of path' cast = pathdefs[command.upper()][2][-numParams] param = cast(token) if command.islower(): if pathdefs[command.upper()][3][-numParams]=='x': param += pen[0] elif pathdefs[command.upper()][3][-numParams]=='y': param += pen[1] params.append(param) needParam = True numParams -= 1 #segment is now absolute so outputCommand = command.upper() #Flesh out shortcut notation if outputCommand in ('H','V'): if outputCommand == 'H': params.append(pen[1]) if outputCommand == 'V': params.insert(0,pen[0]) outputCommand = 'L' if outputCommand in ('S','T'): params.insert(0,pen[1]+(pen[1]-lastControl[1])) params.insert(0,pen[0]+(pen[0]-lastControl[0])) if outputCommand == 'S': outputCommand = 'C' if outputCommand == 'T': outputCommand = 'Q' #current values become "last" values if outputCommand == 'M': subPathStart = tuple(params[0:2]) pen = subPathStart if outputCommand == 'Z': pen = subPathStart else: pen = tuple(params[-2:]) if outputCommand in ('Q','C'): lastControl = tuple(params[-4:-2]) else: lastControl = pen lastCommand = command retval.append([outputCommand,params]) return retval def formatPath(a): """Format SVG path data from an array""" return "".join([cmd + " ".join([str(p) for p in params]) for cmd, params in a]) def translatePath(p, x, y): for cmd,params in p: defs = pathdefs[cmd] for i in range(defs[1]): if defs[3][i] == 'x': params[i] += x elif defs[3][i] == 'y': params[i] += y def scalePath(p, x, y): for cmd,params in p: defs = pathdefs[cmd] for i in range(defs[1]): if defs[3][i] == 'x': params[i] *= x elif defs[3][i] == 'y': params[i] *= y elif defs[3][i] == 'r': # radius parameter params[i] *= x elif defs[3][i] == 's': # sweep-flag parameter if x*y < 0: params[i] = 1 - params[i] elif defs[3][i] == 'a': # x-axis-rotation angle if y < 0: params[i] = - params[i] def rotatePath(p, a, cx = 0, cy = 0): if a == 0: return p for cmd,params in p: defs = pathdefs[cmd] for i in range(defs[1]): if defs[3][i] == 'x': x = params[i] - cx y = params[i + 1] - cy r = math.sqrt((x**2) + (y**2)) if r != 0: theta = math.atan2(y, x) + a params[i] = (r * math.cos(theta)) + cx params[i + 1] = (r * math.sin(theta)) + cy # vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 encoding=utf-8 textwidth=99
googleapis/python-aiplatform
samples/snippets/model_service/get_model_evaluation_tabular_regression_sample_test.py
Python
apache-2.0
1,059
0
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writi
ng, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import get_model_evaluation_sample PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") MODEL_ID = "8842430840248991744" # bq all EVALUATION_ID = "4944816689650806017" # bq all evaluation def test_ucaip_generated_get_mod
el_evaluation_sample(capsys): get_model_evaluation_sample.get_model_evaluation_sample( project=PROJECT_ID, model_id=MODEL_ID, evaluation_id=EVALUATION_ID ) out, _ = capsys.readouterr() assert "metrics_schema_uri" in out
toomoresuch/pysonengine
parts/google_appengine/google/appengine/api/taskqueue/taskqueue.py
Python
mit
33,902
0.006224
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Task Queue API. Enables an application to queue background work for itself. Work is done through webhooks that process tasks pushed from a queue. Tasks will execute in best-effort order of ETA. Webhooks that fail will cause tasks to be retried at a later time. Multiple queues may exist with independent throttling controls. Webhook URLs may be specified directly for Tasks, or the default URL scheme may be used, which will translate Task names into URLs relative to a Queue's base path. A default queue is also provided for simple usage. """ __all__ = [ 'BadTaskStateError', 'BadTransactionState', 'BadTransactionStateError', 'DatastoreError', 'DuplicateTaskNameError', 'Error', 'InternalError', 'InvalidQueueError', 'InvalidQueueNameError', 'InvalidTaskError', 'InvalidTaskNameError', 'InvalidUrlError', 'PermissionDeniedError', 'TaskAlreadyExistsError', 'TaskTooLargeError', 'TombstonedTaskError', 'TooManyTasksError', 'TransientError', 'UnknownQueueError', 'MAX_QUEUE_NAME_LENGTH', 'MAX_TASK_NAME_LENGTH', 'MAX_TASK_SIZE_BYTES', 'MAX_URL_LENGTH', 'Queue', 'Task', 'TaskRetryOptions', 'add'] import calendar import datetime import math import os import re import time import urllib import urlparse from google.appengine.api import apiproxy_stub_map from google.appengine.api import namespace_manager from google.appengine.api import urlfetch from google.appengine.api.taskqueue import taskqueue_service_pb from google.appengine.runtime import apiproxy_errors class Error(Exception): """Base-class for exceptions in this module.""" class UnknownQueueError(Error): """The queue specified is unknown.""" class TransientError(Error): """There was a transient error while accessing the queue. Please Try again later. """ class InternalError(Error): """There was an internal error while accessing this queue. If this problem continues, please contact the App Engine team through our support forum with a description of your problem. """ class InvalidTaskError(Error): """The task's parameters, headers, or method is invalid.""" class InvalidTaskNameError(InvalidTaskError): """The task's name is invalid.""" class TaskTooLargeError(InvalidTaskError): """The task is too large with its headers and payload.""" class TaskAlreadyExistsError(InvalidTaskError): """Task already exists. It has not yet run.""" class TombstonedTaskError(InvalidTaskError): """Task has been tombstoned.""" class InvalidUrlError(InvalidTaskE
rror): """The task's relative URL is invalid.""" class BadTaskStateError(Error): """The task is in the wrong state for the requested operation.""" class InvalidQueueError(Error): """The Queue's configuration is invalid.""" class InvalidQueueNameError(InvalidQueueError): """The Queue's name is invalid.""" class _RelativeUrlError(Error): """The relative URL supplied is invalid.""" class PermissionDeniedError(Error): """The requested operation is not allowed f
or this app.""" class DuplicateTaskNameError(Error): """The add arguments contain tasks with identical names.""" class TooManyTasksError(Error): """Too many tasks were present in a single function call.""" class DatastoreError(Error): """There was a datastore error while accessing the queue.""" class BadTransactionStateError(Error): """The state of the current transaction does not permit this operation.""" class InvalidTaskRetryOptionsError(Error): """The task retry configuration is invalid.""" BadTransactionState = BadTransactionStateError MAX_QUEUE_NAME_LENGTH = 100 MAX_TASK_NAME_LENGTH = 500 MAX_TASK_SIZE_BYTES = 10 * (2 ** 10) MAX_URL_LENGTH = 2083 _DEFAULT_QUEUE = 'default' _DEFAULT_QUEUE_PATH = '/_ah/queue' _METHOD_MAP = { 'GET': taskqueue_service_pb.TaskQueueAddRequest.GET, 'POST': taskqueue_service_pb.TaskQueueAddRequest.POST, 'HEAD': taskqueue_service_pb.TaskQueueAddRequest.HEAD, 'PUT': taskqueue_service_pb.TaskQueueAddRequest.PUT, 'DELETE': taskqueue_service_pb.TaskQueueAddRequest.DELETE, } _NON_POST_METHODS = frozenset(['GET', 'HEAD', 'PUT', 'DELETE']) _BODY_METHODS = frozenset(['POST', 'PUT']) _TASK_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_TASK_NAME_LENGTH _TASK_NAME_RE = re.compile(_TASK_NAME_PATTERN) _QUEUE_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_QUEUE_NAME_LENGTH _QUEUE_NAME_RE = re.compile(_QUEUE_NAME_PATTERN) _ERROR_MAPPING = { taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE: UnknownQueueError, taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR: TransientError, taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR: InternalError, taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE: TaskTooLargeError, taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME: InvalidTaskNameError, taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME: InvalidQueueNameError, taskqueue_service_pb.TaskQueueServiceError.INVALID_URL: InvalidUrlError, taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE: InvalidQueueError, taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED: PermissionDeniedError, taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS: TaskAlreadyExistsError, taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK: TombstonedTaskError, taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA: InvalidTaskError, taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST: Error, taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK: Error, taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE: Error, taskqueue_service_pb.TaskQueueServiceError.DUPLICATE_TASK_NAME: DuplicateTaskNameError, taskqueue_service_pb.TaskQueueServiceError.TOO_MANY_TASKS: TooManyTasksError, } _PRESERVE_ENVIRONMENT_HEADERS = ( ('X-AppEngine-Default-Namespace', 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'),) class _UTCTimeZone(datetime.tzinfo): """UTC timezone.""" ZERO = datetime.timedelta(0) def utcoffset(self, dt): return self.ZERO def dst(self, dt): return self.ZERO def tzname(self, dt): return 'UTC' _UTC = _UTCTimeZone() def _parse_relative_url(relative_url): """Parses a relative URL and splits it into its path and query string. Args: relative_url: The relative URL, starting with a '/'. Returns: Tuple (path, query) where: path: The path in the relative URL. query: The query string in the URL without the '?' character. Raises: _RelativeUrlError if the relative_url is invalid for whatever reason """ if not relative_url: raise _RelativeUrlError('Relative URL is empty') (scheme, netloc, path, query, fragment) = urlparse.urlsplit(relative_url) if scheme or netloc: raise _RelativeUrlError('Relative URL may not have a scheme or location') if fragment: raise _RelativeUrlError('Relative URL may not specify a fragment') if not path or path[0] != '/': raise _RelativeUrlError('Relative URL path must start with "/"') return path, query def _flatten_params(params): """Converts a dictionary of parameters to a list of parameters. Any unicode strings in keys or values will be encoded as UTF-8. Args: params: Dictionary mapping parameter keys to values. Values will be converted to a string and added to the list as tuple (key, value). If a values is iterable and not a string, each contained value will be added as a separate (key, val
bailabs/bench-v7
setup.py
Python
gpl-3.0
963
0.015576
from setuptools import setup, find_packages try: # for pip >= 10 from pip._internal.req import parse_requirements except ImportError: # for pip <= 9.0.3 from pip.req import parse_requirements import re, ast # get version from __version_
_ variable in bench/__init__.py _version_re = re.compile(r'__version__\s+=\s+(.*)') with open('bench/__init__.py', 'rb') as f: version = str(ast.literal_eval(_version_re.search( f.read().decode('utf-8')).group(1))) requirements = parse_requirements("requirements.txt", session="") setup( name='bench', description='Metadata driven, full-stack web framework', author='Frappe Technologies', author_email='info@frappe.io', version=version, packages=find_packages(), zip_safe=False,
include_package_data=True, install_requires=[str(ir.req) for ir in requirements], dependency_links=[str(ir._link) for ir in requirements if ir._link], entry_points=''' [console_scripts] bench=bench.cli:cli ''', )
arenadata/ambari
ambari-server/src/main/resources/stacks/ADH/1.0/services/YARN/package/scripts/params_windows.py
Python
apache-2.0
2,998
0.005337
""" Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Ambari Agent """ from resource_management import * from resource_management.libraries import functions import os from status_params import * # server configurations config = Script.get_config() hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"] yarn_user = hadoop_user hdfs_user = hadoop_user smokeuser = hadoop_user config_dir = os.environ["HADOOP_CONF_DIR"] hadoop_home = os.environ["HADOOP_HOME"] yarn_home = os.environ["HADOOP_YARN_HOME"] hadoop_ssl_enabled = default("/configurations/core-site/hadoop.ssl.enabled", False) _authentication = config['configurations']['core-site']['hadoop.security.authentication'] security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos') smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab'] kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) rm_host = config['clusterHostInfo']['rm_host'][0] rm_port = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1] rm_https_port = "8090" rm_webui_address = format("{rm_host}:{rm_port}") rm_webui_https_address = format("{rm_host}:{rm_https_port}") hs_host = config['clusterHostInfo']['hs_host'][0] hs_port = config['configurations']['mapred-site']['mapreduce.jobhistory.webapp.address'].split(':')[-1] hs_webui_address = format("{hs_host}:{hs_port}") hadoop_mapred2_jar_location = os.path.join(o
s.environ["HADOOP_COMMON_HOME"], "share", "hadoop", "mapreduce") hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar" exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", []) exclude_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude") nm
_hosts = default("/clusterHostInfo/nm_hosts", []) #incude file include_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.include-path", None) include_hosts = None manage_include_files = default("/configurations/yarn-site/manage.include.files", False) if include_file_path and manage_include_files: include_hosts = list(set(nm_hosts) - set(exclude_hosts)) update_files_only = default("/commandParams/update_files_only",False)
timemath/hmfs
fs/hmfs/test/hold_file_open.py
Python
gpl-2.0
296
0.003378
#!/us
r/bin/python3 from os.path import expanduser home = expanduser('~') file_list = [] for i in range(2048): with open(home + "/mount_hmfs/orphan_{:d}.txt".format(i), 'w') as file: file_list.append(file) file.write("ssssssssssssssssssss") #hold files while True: p
ass
ashang/calibre
src/calibre/gui2/actions/show_template_tester.py
Python
gpl-3.0
1,940
0.006701
#!/usr/bin/env python2 # vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai __license__ = 'GPL v3' __copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>' __docformat__ = 'restructuredtext en'
from calibre.gui2.actions import InterfaceAction from calibre.gui2.dialogs.template_dialog import TemplateDialog from calibre.gui2 import error_dialog class ShowTemplateTesterAction(InterfaceAction): name = 'Template tester' action_spec = (_('Template tester'), 'debug.png', None, '') dont_add_to = frozenset(['context-menu-device']) action_type = 'current' def genesis(self): self.previous_text = _('Enter a temp
late to test using data from the selected book') self.first_time = True self.qaction.triggered.connect(self.show_template_editor) def show_template_editor(self, *args): view = self.gui.current_view() if view is not self.gui.library_view: return error_dialog(self.gui, _('No template tester available'), _('Template tester is not available for books ' 'on the device.')).exec_() rows = view.selectionModel().selectedRows() if not rows: return error_dialog(self.gui, _('No books selected'), _('One book must be selected'), show=True) if len(rows) > 1: return error_dialog(self.gui, _('Selected multiple books'), _('Only one book can be selected'), show=True) index = rows[0] if index.isValid(): db = view.model().db t = TemplateDialog(self.gui, self.previous_text, mi=db.get_metadata(index.row(), index_is_id=False, get_cover=False), text_is_placeholder=self.first_time) t.setWindowTitle(_('Template tester')) if t.exec_() == t.Accepted: self.previous_text = t.rule[1] self.first_time = False
puiterwijk/HttpCA
Signer/setup.py
Python
bsd-3-clause
2,106
0.019468
#-*- coding: UTF-8 -*- # Copyright (c) 2013, Patrick Uiterwijk <puiterwijk@gmail.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of Patrick Uiterwijk nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL Patrick Uiterwijk BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## These two lines are needed to run on EL
6 __requires__ = ['SQLAlchemy >= 0.7'] import pkg_resources from setuptools import setup, find_packages setup( name = 'httpca_signer' , version
= '0.1' , author = 'Patrick Uiterwijk' , author_email = 'puiterwijk@gmail.com' , packages = find_packages() , zip_safe = False , include_package_data = True , install_requires = ['pika', 'SQLAlchemy>=0.7'])
janusnic/py-21v
scope/2.py
Python
mit
188
0.047872
def mathem(a,b): a = a/2 b = b+10 print(a+b) num1 = 100 num2 = 12 math
em(num1,num2) print num1 print num2 def mathem2():
print(num1+num2) mathem2() print a print b
nke001/attention-lvcsr
libs/Theano/theano/gof/compilelock.py
Python
mit
14,977
0.000134
# Locking mechanism to ensure no two compilations occur simultaneously # in the same compilation directory (which can cause crashes). import atexit import os import socket # only used for gethostname() import time import logging from contextlib import contextmanager import numpy as np from theano import config from theano.configparser import AddConfigVar, IntParam random = np.random.RandomState([2015, 8, 2]) _logger = logging.getLogger("theano.gof.compilelock") # If the user provided a logging level, we don't want to override it. if _logger.level == logging.NOTSET: # INFO will show the "Refreshing lock" messages _logger.setLevel(logging.INFO) AddConfigVar('compile.wait', """Time to wait before retrying to aquire the compile lock.""", IntParam(5, lambda i: i > 0, allow_override=False), in_c_key=False) def _timeout_default(): return config.compile.wait * 24 AddConfigVar('compile.timeout', """In seconds, time that a process will wait before deciding to override an existing lock. An override only happens when the existing lock is held by the same owner *and* has not been 'refreshed' by this owner for more than this period. Refreshes are done every half timeout period for running processes.""", IntParam(_timeout_default, lambda i: i >= 0, allow_override=False), in_c_key=False) hostname = socket.gethostname() def force_unlock(): """ Delete the compilation lock if someone else has it. """ get_lock(min_wait=0, max_wait=0.001, timeout=0) release_lock() @contextmanager def lock_ctx(lock_dir=None, keep_lock=False, **kw): get_lock(lock_dir=lock_dir, **kw) yield if not keep_lock: release_lock() # We define this name with an underscore so that python shutdown # deletes this before non-underscore names (like os). We need to do # it this way to avoid errors on shutdown. def _get_lock(lock_dir=None, **kw): """ Obtain lock on compilation directory. :param kw: Additional arg
uments to be forwarded to the `lock` function when acquiring the lock. :note: We can lock only on 1 directory at a time. """ if lock_dir i
s None: lock_dir = os.path.join(config.compiledir, 'lock_dir') if not hasattr(get_lock, 'n_lock'): # Initialization. get_lock.n_lock = 0 if not hasattr(get_lock, 'lock_is_enabled'): # Enable lock by default. get_lock.lock_is_enabled = True get_lock.lock_dir = lock_dir get_lock.unlocker = Unlocker(get_lock.lock_dir) else: if lock_dir != get_lock.lock_dir: # Compilation directory has changed. # First ensure all old locks were released. assert get_lock.n_lock == 0 # Update members for new compilation directory. get_lock.lock_dir = lock_dir get_lock.unlocker = Unlocker(get_lock.lock_dir) if get_lock.lock_is_enabled: # Only really try to acquire the lock if we do not have it already. if get_lock.n_lock == 0: lock(get_lock.lock_dir, **kw) atexit.register(Unlocker.unlock, get_lock.unlocker) # Store time at which the lock was set. get_lock.start_time = time.time() else: # Check whether we need to 'refresh' the lock. We do this # every 'config.compile.timeout / 2' seconds to ensure # no one else tries to override our lock after their # 'config.compile.timeout' timeout period. if get_lock.start_time is None: # This should not happen. So if this happen, clean up # the lock state and raise an error. while get_lock.n_lock > 0: release_lock() raise Exception("For some unknow reason, the lock was already " "taken, but no start time was registered.") now = time.time() if now - get_lock.start_time > config.compile.timeout / 2: lockpath = os.path.join(get_lock.lock_dir, 'lock') _logger.info('Refreshing lock %s', str(lockpath)) refresh_lock(lockpath) get_lock.start_time = now get_lock.n_lock += 1 get_lock = _get_lock def release_lock(): """ Release lock on compilation directory. """ get_lock.n_lock -= 1 assert get_lock.n_lock >= 0 # Only really release lock once all lock requests have ended. if get_lock.lock_is_enabled and get_lock.n_lock == 0: get_lock.start_time = None get_lock.unlocker.unlock(force=False) def set_lock_status(use_lock): """ Enable or disable the lock on the compilation directory (which is enabled by default). Disabling may make compilation slightly faster (but is not recommended for parallel execution). :param use_lock: whether to use the compilation lock or not :type use_lock: bool """ get_lock.lock_is_enabled = use_lock # This is because None is a valid input for timeout notset = object() def lock(tmp_dir, timeout=notset, min_wait=None, max_wait=None, verbosity=1): """ Obtain lock access by creating a given temporary directory (whose base will be created if needed, but will not be deleted after the lock is removed). If access is refused by the same lock owner during more than 'timeout' seconds, then the current lock is overridden. If timeout is None, then no timeout is performed. The lock is performed by creating a 'lock' file in 'tmp_dir' that contains a unique id identifying the owner of the lock (the process id, followed by a random string). When there is already a lock, the process sleeps for a random amount of time between min_wait and max_wait seconds before trying again. If 'verbosity' is >= 1, then a message will be displayed when we need to wait for the lock. If it is set to a value >1, then this message will be displayed each time we re-check for the presence of the lock. Otherwise it is displayed only when we notice the lock's owner has changed. :param str tmp_dir: lock directory that will be created when acquiring the lock :param timeout: time (in seconds) to wait before replacing an existing lock (default config 'compile.timeout') :type timeout: int or None :param int min_wait: minimum time (in seconds) to wait before trying again to get the lock (default config 'compile.wait') :param int max_wait: maximum time (in seconds) to wait before trying again to get the lock (default 2 * min_wait) :param int verbosity: amount of feedback displayed to screen (default 1) """ if min_wait is None: min_wait = config.compile.wait if max_wait is None: max_wait = min_wait * 2 if timeout is notset: timeout = config.compile.timeout # Create base of lock directory if required. base_lock = os.path.dirname(tmp_dir) if not os.path.isdir(base_lock): try: os.makedirs(base_lock) except OSError: # Someone else was probably trying to create it at the same time. # We wait two seconds just to make sure the following assert does # not fail on some NFS systems. time.sleep(2) assert os.path.isdir(base_lock) # Variable initialization. lock_file = os.path.join(tmp_dir, 'lock') my_pid = os.getpid() no_display = (verbosity == 0) nb_error = 0 # The number of time we sleep when their is no errors. # Used to don't display it the first time to display it less frequently. # And so don't get as much email about this! nb_wait = 0 # Acquire lock. while True: try: last_owner = 'no_owner' time_start = time.time() other_dead = False while os.path.isdir(tmp_dir): try: with open(lock_file) as f:
jameschch/Lean
Algorithm.Python/FutureOptionCallITMExpiryRegressionAlgorithm.py
Python
apache-2.0
6,809
0.006168
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License from AlgorithmImports import * ### <summary> ### This regression algorithm tests In The Money (ITM) future option expiry for calls. ### We expect 3 orders from the algorithm, which are: ### ### * Initial entry, buy ES Call Option (expiring ITM) ### * Option exercise, receiving ES future contracts ### * Future contract liquidation, due to impending expiry ### ### Additionally, we test delistings for future options and assert that our ### portfolio holdings reflect the orders the algorithm has submitted. ### </summary> class FutureOptionCallITMExpiryRegressionAlgorithm(QCAlgorithm): def Initialize(self): self.SetStartDate(2020, 1, 5) self.SetEndDate(2020, 6, 30) self.es19m20 = self.AddFutureContract( Symbol.CreateFuture( Futures.Indices.SP500EMini, Market.CME, datetime(2020, 6, 19) ), Resolution.Minute).Symbol # Select a future option expiring ITM, and adds it to the algorithm. self.esOption = self.AddFutureOptionContract( list( sorted([x for x in self.OptionChainProvider.GetOptionContractList(self.es19m20, self.Time) if x.ID.StrikePrice <= 3200.0 and x.ID.OptionRight == OptionRight.Call], key=lambda x: x.ID.StrikePrice, reverse=True) )[0], Resolution.Minute).Symbol self.expectedContract = Symbol.CreateOption(self.es19m20, Market.CME, OptionStyle.American, OptionRight.Call, 3200.0, datetime(2020, 6, 19)) if self.esOption != self.expectedContract: raise AssertionError(f"Contract {self.expectedContract} was not found in the chain") self.Schedule.On(self.DateRules.Tomorrow, self.TimeRules.AfterMarketOpen(self.es19m20, 1), self.ScheduleCallback) def ScheduleCallback(self): self.MarketOrder(self.esOption, 1) def OnData(self, data: Slice): # Assert delistings, so that we can make sure that we receive the delisting warnings at # the expected time. These assertions detect bug #4872 for delisting in data.Delistings.Values: if delisting.Type == DelistingType.Warning: if delisting.Time != datetime(2020, 6, 19): raise AssertionError(f"Delisting warning issued at unexpected date: {delisting.Time}") elif delisting.Type == DelistingType.Delisted: if delisting.Time != datetime(2020, 6, 20): raise AssertionError(f"Delisting happened at unexpected date: {delisting.Time}") def OnOrderEvent(self, orderEvent: OrderE
vent): if orderEvent.Status != OrderStatus.F
illed: # There's lots of noise with OnOrderEvent, but we're only interested in fills. return if not self.Securities.ContainsKey(orderEvent.Symbol): raise AssertionError(f"Order event Symbol not found in Securities collection: {orderEvent.Symbol}") security = self.Securities[orderEvent.Symbol] if security.Symbol == self.es19m20: self.AssertFutureOptionOrderExercise(orderEvent, security, self.Securities[self.expectedContract]) elif security.Symbol == self.expectedContract: # Expected contract is ES19H21 Call Option expiring ITM @ 3250 self.AssertFutureOptionContractOrder(orderEvent, security) else: raise AssertionError(f"Received order event for unknown Symbol: {orderEvent.Symbol}") self.Log(f"{self.Time} -- {orderEvent.Symbol} :: Price: {self.Securities[orderEvent.Symbol].Holdings.Price} Qty: {self.Securities[orderEvent.Symbol].Holdings.Quantity} Direction: {orderEvent.Direction} Msg: {orderEvent.Message}") def AssertFutureOptionOrderExercise(self, orderEvent: OrderEvent, future: Security, optionContract: Security): expectedLiquidationTimeUtc = datetime(2020, 6, 20, 4, 0, 0) if orderEvent.Direction == OrderDirection.Sell and future.Holdings.Quantity != 0: # We expect the contract to have been liquidated immediately raise AssertionError(f"Did not liquidate existing holdings for Symbol {future.Symbol}") if orderEvent.Direction == OrderDirection.Sell and orderEvent.UtcTime.replace(tzinfo=None) != expectedLiquidationTimeUtc: raise AssertionError(f"Liquidated future contract, but not at the expected time. Expected: {expectedLiquidationTimeUtc} - found {orderEvent.UtcTime.replace(tzinfo=None)}") # No way to detect option exercise orders or any other kind of special orders # other than matching strings, for now. if "Option Exercise" in orderEvent.Message: if orderEvent.FillPrice != 3200.0: raise AssertionError("Option did not exercise at expected strike price (3200)") if future.Holdings.Quantity != 1: # Here, we expect to have some holdings in the underlying, but not in the future option anymore. raise AssertionError(f"Exercised option contract, but we have no holdings for Future {future.Symbol}") if optionContract.Holdings.Quantity != 0: raise AssertionError(f"Exercised option contract, but we have holdings for Option contract {optionContract.Symbol}") def AssertFutureOptionContractOrder(self, orderEvent: OrderEvent, option: Security): if orderEvent.Direction == OrderDirection.Buy and option.Holdings.Quantity != 1: raise AssertionError(f"No holdings were created for option contract {option.Symbol}") if orderEvent.Direction == OrderDirection.Sell and option.Holdings.Quantity != 0: raise AssertionError(f"Holdings were found after a filled option exercise") if "Exercise" in orderEvent.Message and option.Holdings.Quantity != 0: raise AssertionError(f"Holdings were found after exercising option contract {option.Symbol}") def OnEndOfAlgorithm(self): if self.Portfolio.Invested: raise AssertionError(f"Expected no holdings at end of algorithm, but are invested in: {', '.join([str(i.ID) for i in self.Portfolio.Keys])}")
prando/photoselector
photo.py
Python
mit
11,323
0.012276
try: from Tkinter import * except ImportError: from tkinter import * try: import tkMessageBox except ImportError: from tkinter import messagebox as tkMessageBox try: import tkFileDialog except ImportError: from tkinter import filedialog as tkFileDialog import os from PIL import Image, ImageTk class App: def __init__(self, master): # Set NULL references to image & label objects at APP init self.curimage = None self.oldimlabel = None self.oldtxtlabel = None self.curimgidx = 0 # Initialize empty lists to denote loaded, selected, rejected images self.loaded = [] self.selected = [] self.rejected = [] self.tentative = [] # Use a string var and anchor it to a text label. Any change to string var will # be displayed by the text label. self.textstring = StringVar() self.photoindex = StringVar() # Image load path self.file_path_str = [] # Selected image list file path self.out_file_path_str = [] # Setup a frame (child of master) to display buttons self.frame = Frame (master) # Show frame. self.frame.pack() # Setup a frame (child of Frame) to display image self.imframe = Frame (self.frame, relief=SUNKEN) # Show frame. self.imframe.pack(side=BOTTOM) # Setup a frame (child of imrame) to display image self.txtboxframe = Frame (self.imframe, relief=SUNKEN) # Show frame. self.txtboxframe.pack(side=BOTTOM) # Setup buttons with actions triggering command=$$$ function. self.loadbutton = Button (self.frame, text="LOAD", command=self.loadpic) self.loadbutton.pack(side=LEFT) self.firstbutton = Button (self.frame, text="FIRST", command=self.firstpic) self.firstbutton.pack(side=LEFT) self.lastbutton = Button (self.frame, text="LAST", command=self.lastpic) self.lastbutton.pack(side=LEFT) self.quitbutton = Button (self.frame, text="QUIT", command=self.quitprog) self.quitbutton.pack(side=RIGHT) self.selectbutton = Button (self.frame, text="SELECT", command=self.selectpic, height=10, width=10) self.selectbutton.pack(side=LEFT) self.nextbutton = Button (self.frame, text="NEXT", command=self.nextpic) self.nextbutton.pack(side=LEFT) self.previousbutton = Button (self.frame, text="PREVIOUS", command=self.previouspic) self.previousbutton.pack(side=LEFT) self.rotatebutton = Button (self.frame, text="ROTATE LEFT", command=self.rotatepicleft) self.rotatebutton.pack(side=RIGHT) self.rotatebutton = Button (self.frame, text="ROTATE RIGHT", command=self.rotatepicright) self.rotatebutton.pack(side=RIGHT) # Setup a text label to show display image index and anchor it to a string var. # self.txtlabel = Label (self.imframe, textvar=self.textstring) # self.txtlabel.pack(side=BOTTOM) # Set up a label with entry to take input for Go to a particular photo self.gotolabel = Label (self.txtboxframe, textvar= self.textstring) self.gotolabel.pack(side=RIGHT) self.txtbox = Entry (self.txtboxframe, textvariable=self.photoindex, bd=1, width=4, justify=RIGHT) self.txtbox.bind('<Return>', self.get) self.txtbox.pack(side=LEFT) # self.gotobutton = Button (self.frame, text="GO TO", command=self.gotopicture) # self.gotobutton.pack(side=BOTTOM) # Note that the default pic is un-rotated. Used to toggle thumbnail # self.rotated = 0 # Quit button action. def quitprog (self): # If selected list is not empty, prompt user for location to save list of sele
cted images & append to it. if self.selected: self.out_file_path_str = tkFileDialog.askdirectory (title='Choose target dir to s
tore selected files') if not self.out_file_path_str: tkMessageBox.showerror ("Error", "Choose valid dir") return self.out_file_path_str = os.path.join (self.out_file_path_str, 'selected_photos.txt') with open (self.out_file_path_str, "a") as f: for n in self.selected: f.write (n+"\n") # Quit program. self.frame.quit () # Select button action. def selectpic (self): # Handle error condition: No images loaded yet. if (self.curimage is None): tkMessageBox.showerror ("Error", "Load images first!") return # If selected, add to list if not previously added. if self.selectbutton ["text"] == "SELECT": if self.curimage not in self.selected: self.selected.append (self.curimage) self.selectbutton ["text"] = "UNSELECT" else: tkMessageBox.showwarning ("Warning", "Already selected!") else: self.selected.remove (self.curimage) self.selectbutton ["text"] = "SELECT" def showimage (self): # if self.rotated: # self.image.thumbnail ((648, 648), Image.ANTIALIAS) # else: # self.image.thumbnail ((648, 648), Image.ANTIALIAS) self.image.thumbnail ((648, 648), Image.ANTIALIAS) photo = ImageTk.PhotoImage (self.image) self.imlabel = Label (self.imframe, image=photo, height=648, width=648) self.imlabel.image = photo self.imlabel.pack (side=BOTTOM) if self.oldimlabel is not None: self.oldimlabel.destroy () # Save a reference to image label (enables destroying to repaint) self.oldimlabel = self.imlabel def rotatepicleft (self): if (self.curimage is None): tkMessageBox.showerror ("Error", "Load images first!") return self.image = self.image.rotate (90, expand=True) # self.rotated = self.rotated ^ 1 self.showimage () def rotatepicright (self): if (self.curimage is None): tkMessageBox.showerror ("Error", "Load images first!") return self.image = self.image.rotate (-90, expand=True) # self.rotated = self.rotated ^ 1 self.showimage () def firstpic (self): if (self.curimage is None): tkMessageBox.showerror ("Error", "Load images first!") return # Go to the first image in the list self.curimgidx = 0 self.curimage = self.loaded [self.curimgidx] self.image = Image.open (str(self.curimage)) self.showimage () self.photoindex.set( str (self.curimgidx + 1)) if self.curimage not in self.selected: self.selectbutton ["text"] = "SELECT" else: self.selectbutton ["text"] = "UNSELECT" def lastpic (self): if (self.curimage is None): tkMessageBox.showerror ("Error", "Load images first!") return # Go to the last image in the list self.curimgidx = self.loadedsize - 1 self.curimage = self.loaded [self.curimgidx] self.image = Image.open (str(self.curimage)) self.showimage () self.photoindex.set( str (self.curimgidx + 1)) if self.curimage not in self.selected: self.selectbutton ["text"] = "SELECT" else: self.selectbutton ["text"] = "UNSELECT" def previouspic (self): if (self.curimage is None): tkMessageBox.showerror ("Error", "Load images first!") return # Check for valid bounds of image list. if (self.curimgidx - 1 >= 0): self.curimage = self.loaded [self.curimgidx - 1] self.curimgidx = self.curimgidx - 1 self.image = Image.open (str(self.curimage)) self.showimage () self.photoindex.set( str (self.curimgidx + 1)) if self.curimage not in self.selected: self.selectbutton ["text"] = "SELECT" else: self.selectbutton ["text"] = "UNSELECT" else: tkMess
luiseiherrera/jsmd
blog/admin.py
Python
gpl-3.0
653
0.01072
from
django.contrib import admin from blog.models imp
ort Post class PostAdmin(admin.ModelAdmin): #fields display on change list list_display = ('title', 'description') #fields to filter the change list with list_filter = ('published', 'created') #fields to search in change list search_fields = ('title', 'description', 'content') #enable the date drill down on change list date_hierarchy = 'created' #enable the save buttons on top on change form save_on_top = True #prepopulate the slug from the title - big timesaver! prepoplulated_fields = {"slug":("title",)} admin.site.register(Post, PostAdmin)
PoornimaNayak/autotest-client-tests
linux-tools/ibus/ibus.py
Python
gpl-2.0
1,230
0.004878
#!/bin/python import os, subprocess import logging from autotest.client import test from autotest.client.shared import error class ibus(test.test): """ Autotest module for testing basic functionality of ibus @author Ramesh YR, rameshyr@linux.vnet.ibm.com ## """ version = 1 nfail = 0 path = '' def initialize(self): """ Sets the overall failure counter for the test.
""" self.nfail = 0 logging.info('\n Test initialize successfully') def run_once(self, test_path=''): """ Trigger test run """ try: os.environ["LTPBIN"] = "%s/shared" %(test_path) ret_val = subprocess.Popen(['./ibus.sh'], cwd="%s/ibus" %(test_path)) ret_val.communicate() if ret_val.returncode != 0: self.nfail += 1 except error.CmdError, e: self.nfail += 1
logging.error("Test Failed: %s", e) def postprocess(self): if self.nfail != 0: logging.info('\n nfails is non-zero') raise error.TestError('\nTest failed') else: logging.info('\n Test completed successfully ')
arthurdejong/python-stdnum
stdnum/se/__init__.py
Python
lgpl-2.1
1,012
0
# __init__.py - collection of Swedish numbers # coding: utf-8 # # Copyright (C) 2012 Arthur de Jong # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2
.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Frankli
n Street, Fifth Floor, Boston, MA # 02110-1301 USA """Collection of Swedish numbers.""" # provide aliases from stdnum.se import personnummer as personalid # noqa: F401 from stdnum.se import postnummer as postal_code # noqa: F401
gm2211/vpnAlfredWorkflow
src/alp/request/requests_cache/backends/base.py
Python
gpl-3.0
5,357
0.001307
#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends.base ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Contains BaseCache class which can be used as in-memory cache backend or extended to support persistence. """ from datetime import datetime import hashlib from copy import copy from alp.request import requests from alp.request.requests_cache.compat import is_py2 class BaseCache(object): """ Base class for cache implementations, can be used as in-memory cache. To extend it you can provide dictionary-like objects for :attr:`keys_map` and :attr:`responses` or override public methods. """ def __init__(self, *args, **kwargs): #: `key` -> `key_in_responses` mapping self.keys_map = {} #: `key_in_cache` -> `response` mapping self.responses = {} def save_response(self, key, response): """ Save response to cache :param key: key for this response :param response: response to save .. note:: Response is reduced before saving (with :meth:`reduce_response`) to make it picklable """ self.responses[key] = self.reduce_response(response), datetime.utcnow() def add_key_mapping(self, new_key, key_to_response): """ Adds mapping of `new_key` to `key_to_response` to make it possible to associate many keys with single response :param new_key: new key (e.g. url from redirect) :param key_to_response: key which can be found in :attr:`responses` :return: """ self.keys_map[new_key] = key_to_response def get_response_and_time(self, key, default=(None, None)): """ Retrieves response and timestamp for `key` if it's stored in cache, otherwise returns `default` :param key: key of resource :param default: return this if `key` not found in cache :returns: tuple (response, datetime) .. note:: Response is restored after unpickling with :meth:`restore_response` """ try: if key not in self.responses: key = self.keys_map[key] response, timestamp = self.responses[key] except KeyError: return default return self.restore_response(response), timestamp def delete(self, key): """ Delete `key` from cache. Also deletes all responses from response history """ try: if key in self.responses: response, _ = self.responses[key] del self.responses[key] else: response, _ = self.responses[self.keys_map[key]] del self.keys_map[key] for r in response.history: del self.keys_map[self.create_key(r.request)] except KeyError: pass def delete_url(self, url): """ Delete response associated with `url` from cache. Also deletes all responses from response history. Works only for GET requests """ self.delete(self._url_to_key(url)) def clear(self): """ Clear cache """ self.responses.clear() self.keys_map.clear() def has_key(self, key): """ Returns `True` if cache has `key`, `False` otherwise """ return key in self.responses or key in self.keys_map def has_url(self, url): """ Returns `True` if cache has `url`, `False` otherwise. Works only for GET request urls """ return self.has_key(self._url_to_key(url)) def _url_to_key(self, url): from requests import Request return self.create_key(Request('GET', url).prepare()) _response_attrs = ['_content', 'url', 'status_code', 'cookies', 'headers', 'encoding', 'request', 'reason'] def reduce_response(self, response): """ Reduce response object to make it compatible with ``pickle`` """ result = _Store() # prefetch response.content for field in self._response_attrs: setattr(result, field, self._picklable_field(response, field)) result.history = tuple(self.reduce_response(r) for r in response.history) return result def _picklable_field(self, response, name): value = getattr(response, name) if name == 'request': value = copy(value) value.hooks = [] return value def restore_response(self, response): """ Restore response object after unpickling """ result = requests.Response() for field in self._response_attrs: setattr(result, field, getattr(response, field)) result.hist
ory = tuple(self.restore_response(r) for r in response.history) return result def create_key(self, request): key = hashlib.sha256() key.update(_to_bytes(request.method.upper())) k
ey.update(_to_bytes(request.url)) if request.body: key.update(_to_bytes(request.body)) return key.hexdigest() def __str__(self): return 'keys: %s\nresponses: %s' % (self.keys_map, self.responses) # used for saving response attributes class _Store(object): pass def _to_bytes(s, encoding='utf-8'): if is_py2 or isinstance(s, bytes): return s return bytes(s, encoding)
philpot/trump-insult-haiku
nytorg.py
Python
apache-2.0
4,644
0.004091
[('GROUP', 10), ('UNITED STATES SENATOR', 8), ("''", 4), # media outlet ('NEWS ORGANIZATION', 5), ('NEWSPAPER', 4), ('MAGAZINE', 3), ('TELEVISION SHOW', 2), ('NEWS WEBSITE', 1), # media person ('BLOGGER, THE WASHINGTON POST', 1), ('ANCHOR, FOX NEWS', 1), ('FOX NEWS ANCHOR', 1), ("CONTRIBUTOR, 'THE VIEW'", 1), ("CONTRIBUTOR, 'MORNING EDITION'", 1), ('OPINION WRITER, THE WASHINGTON POST', 1), ('JOURNALIST, PBS', 1), ('JOURNALIST, NBC NEWS', 1), ('JOURNALIST, BLOOMBERG', 1), ('GLOBAL ANCHOR, YAHOO NEWS', 1), ('PUBLISHER, NEW HAMPSHIRE UNION LEADER', 1), ('REPORTER, THE NEW YORK TIMES', 3), ('COLUMNIST, THE NEW YORK TIMES', 3), ('COLUMNIST', 3), ('JOURNALIST, THE NEW YORK TIMES', 2), ('JOURNALIST', 2), ('WHITE HOUSE CORRESPONDENT, CBS', 1), ('WALL STREET EXECUTIVE, NEW YORK TIMES CONTRIBUTING WRITER', 1), ('TELEVISION PERSONALITY', 1), ('TELEVISION HOST, MSNBC', 1), ('TELEVISION HOST', 1), ('STAFF WRITER, FORBES', 1), ('REPORTER, THE ASSOCIATED PRESS', 1), ('REPORTER, FOX NEWS', 1), ('REPORTER, CBS NEWS', 1), ('POLITICO REPORTER', 1), ('EDITOR-IN-CHIEF, ROLL CALL', 1), ('EDITOR, VANITY FAIR', 1), ('EDITOR, THE WEEKLY STANDARD', 1), ('EDITOR, NATIONAL REVIEW', 1), ('EDITOR, FOX NEWS CHANNEL', 1), ('COLUMNIST, THE WASHINGTON POST', 1), ('COLUMNIST AND FOX NEWS CONTRIBUTOR', 1), ("CO-HOST, 'TODAY'", 1), ("CO-HOST, 'MORNING JOE'", 1), ("CO-ANCHOR, 'NEW DAY'", 1), ('CNN CONTRIBUTOR', 1), ('CNN ANCHOR', 1), ('CHIEF WASHINGTON CORRESPONDENT, CNBC', 1), ('CHIEF NATIONAL CORRESPONDENT, YAHOO NEWS', 1), ('FOUNDER, THE HUFFINGTON POST', 1), ("HOST, 'MORNING JOE'", 1), ("FORMER CO-HOST, 'THE VIEW'", 1), ("MODERATOR, 'MEET THE PRESS'", 1), ('CORRESPONDENT, NBC NEWS', 1), # media/pundit/commentator ('CONSERVATIVE COMMENTATOR', 1), ('POLITICAL CORRESPONDENT, MSNBC', 1), ('POLITICAL COMMENTATOR', 1), ('POLITICAL ANALYST, CNN', 1), ('POLITICAL ANALYST', 1), # political organization ('POLITICAL PARTY', 3), ('FORMER PRESIDENT OF THE UNITED STATES', 3), ('POLITICAL CONSULTANT', 2), ('POLITICAL ANALYST, FOX NEWS', 2), ('CNN NEWS PROGRAM', 2), # political: governor ('SOUTH CAROLINA GOVERNOR', 1), ('OHIO GOVERNOR', 1), # political: GOP rival ('FORMER NEW YORK GOVERNOR', 1), ('NEW JERSEY GOVERNOR', 1), ('WISCONSIN GOVERNOR', 1), ('FORMER LOUISIANA GOVERNOR', 1), ('FORMER FLORIDA GOVERNOR', 1), ('FLORIDA GOVERNOR', 1), ('RETIRED NEUROSURGEON', 1), ('FORMER TEXAS GOVERNOR', 1), # political: GOP misc ('FORMER NEW HAMPSHIRE GOVERNOR', 1), ('SUPREME COURT CHIEF JUSTICE', 1), ('FORMER PENNSYLVANIA GOVERNOR', 1), # campaign/staffer ('THE PRESIDENTIAL CAMPAIGN OF TED CRUZ', 1), ('THE PRESIDENTIAL CAMPAIGN OF JEB BUSH', 1), ('STAFFER FOR JOHN KASICH', 1), ('EMPLOYEE FOR JEB BUSH', 1), ('JEB BUSH, SUPPORTERS OF', 1), # foreign entity ('TERRORIST GROUP', 1), ('INTERNATIONAL ALLIANCE', 1), # political organization ('REPUBLICAN POLITICA
L CONSULTANT', 1), # political: Democratic rival ('DEMOCRATIC CANDIDATE, FORMER GOVERNOR OF MARYLAND', 1), ('FORMER RHODE ISLAND GOVERNOR', 1), # p
olitical: other Democratic ('MARYLAND SENATOR', 1), ('MAYOR OF SAN JOSE, CALIF.', 1), ('MAYOR OF NEW YORK CITY', 1), ('FORMER MAYOR OF PHILADELPHIA', 1), ("PROTESTERS OF MR. TRUMP'S RALLIES", 1), # foreign leader ('PRINCE, SAUDI ARABIA', 1), ('GERMAN CHANCELLOR', 1), # business leader ('FORMER BUSINESS EXECUTIVE', 1), ('OWNER, THE NEW YORK JETS', 1), ('OWNER, THE NEW YORK DAILY NEWS', 1), ('HEDGE FUND MANAGER', 1), ('ENTREPRENEUR', 1), ('PRESIDENT OF THE UNITED STATES', 1), ('PRESIDENT AND CHIEF EXECUTIVE, THE FAMILY LEADER', 1), ('POLITICAL FUND-RAISING COMMITTEES', 1), ('PERFORMER', 1), ('MUSICIAN', 1), ('MOSTLY REPUBLICAN POLITICIANS', 1), ('MIXED MARTIAL ARTIST', 1), ('MISS UNIVERSE, 2014', 1), ('LAWYER', 1), ('FORMER WHITE HOUSE PRESS SECRETARY', 1), ("FORMER TRUMP EXECUTIVE AND AUTHOR OF 'ALL ALONE ON THE 68TH FLOOR']", 1), ('FORMER SECRETARY OF STATE', 1), ('FORMER POLITICAL ADVISER TO BILL CLINTON', 1), ('FORMER MASSACHUSETTS GOVERNOR', 1), ('FORMER DEPUTY WHITE HOUSE CHIEF OF STAFF', 1), ('EVANGELICAL LEADER', 1), ('DISTRICT JUDGE OF THE UNITED STATES DISTRICT COURT FOR THE SOUTHERN DISTRICT OF CALIFORNIA', 1), ('DEPUTY EDITOR, WALL STREET JOURNAL EDITORIAL PAGE', 1), ('CONSERVATIVE DONOR, BILLIONAIRE, PHILANTHROPIST', 1), ("COMEDIAN, HOST, 'LAST WEEK TONIGHT'", 1), ('CHIEF EXECUTIVE, T-MOBILE', 1), ('BOSTON MAYOR', 1), ("AUTHOR, 'LOST TYCOON: THE MANY LIVES OF DONALD J. TRUMP'", 1), ('ANTITAX POLITICAL GROUP', 1), ('ACTRESS AND TELEVISION PERSONALITY', 1), ('ACTOR', 1), ('', 1)]
LuqueDaniel/LoL-Server-Status
lol_server_status/gui/widgets/about.py
Python
gpl-3.0
2,800
0.003214
# -*- coding: utf-8 -*- # # This file is part of LoL Server Status # # LoL Server Status is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # any later version. # # LoL Server Status is distributed in the hope that it will be useful, # but WITHOU
T ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have receiv
ed a copy of the GNU General Public License # along with LoL Server Status. If not, see <http://www.gnu.org/licenses/>. # # Source: <http://github.com/LuqueDaniel/LoL-Server-Status> #LoL Server Status imports from lol_server_status import __version__ from lol_server_status import __author__ from lol_server_status import __license__ from lol_server_status import __source__ #PyQt4.QtGui imports from PyQt4.QtGui import QWidget from PyQt4.QtGui import QVBoxLayout from PyQt4.QtGui import QLabel from PyQt4.QtGui import QPushButton from PyQt4.QtGui import QMessageBox #PyQt4.QtCore imports from PyQt4.QtCore import Qt from PyQt4.QtCore import SIGNAL class aboutWidget(QWidget): def __init__(self, parent=None): super(aboutWidget, self).__init__() self.setWindowFlags(Qt.FramelessWindowHint) self.setWindowTitle('LoL Server Status - About') self.setMinimumWidth(parent.width()) self.move(parent.pos()) self.setFocus(False) #label_title label_title = QLabel('LoL Server Status') label_title.setObjectName('label_title') label_title.setAlignment(Qt.AlignCenter) #label_source label_source = QLabel( 'Source: <a style="color:#0073de" href="%s">Github repository</a>' % __source__) label_source.setToolTip('Github repository') label_source.setOpenExternalLinks(True) #btn_about_qt btn_about_qt = QPushButton('About Qt') #General layout vbox = QVBoxLayout(self) vbox.addWidget(label_title) vbox.addWidget(QLabel('Version: %s' % __version__)) vbox.addWidget(QLabel('Author: %s' % __author__)) vbox.addWidget(QLabel('License: %s' % __license__)) vbox.addWidget(label_source) vbox.addWidget(btn_about_qt) #CONNECT SGNALS self.connect(btn_about_qt, SIGNAL('clicked()'), self.open_about_qt) def open_about_qt(self): QMessageBox.aboutQt(self, 'About Qt') def mouseDoubleClickEvent(self, event): if event.button() == Qt.LeftButton: self.close() def keyPressEvent(self, event): if event.key() == Qt.Key_Escape: self.close()
rsalmaso/wagtail
wagtail/contrib/redirects/filters.py
Python
bsd-3-clause
828
0
import django_filters from
django.utils.translation import gettext as _ from wagtail.admin.filters import WagtailFilterSet from wagtail.admin.widgets import ButtonSelect from wagtail.core.models import Site class RedirectsReportFilterSet(WagtailFilterSet): is_permanent = django_filters.ChoiceFilter( label=_("Type"), method="filter_type", choices=( (True, _("Permanent")), (False, _("Temporary")), ), empty_label=_("All"), widget=ButtonSelect, )
site = django_filters.ModelChoiceFilter( field_name="site", queryset=Site.objects.all() ) def filter_type(self, queryset, name, value): if value and self.request and self.request.user: queryset = queryset.filter(is_permanent=value) return queryset
robertnishihara/ray
rllib/train.py
Python
apache-2.0
7,814
0
#!/usr/bin/env python import argparse import os from pathlib import Path import yaml import ray from ray.cluster_utils import Cluster from ray.tune.config_parser import make_parser from ray.tune.result import DEFAULT_RESULTS_DIR from ray.tune.resources import resources_to_json from ray.tune.tune import _make_scheduler, run_experiments from ray.rllib.utils.framework import try_import_tf, try_import_torch # Try to import both backends for flag checking/warnings. tf1, tf, tfv = try_import_tf() torch, _ = try_import_torch() EXAMPLE_USAGE = """ Training example via RLlib CLI: rllib train --run DQN --env CartPole-v0 Grid search example via RLlib CLI: rllib train -f tuned_examples/cartpole-grid-search-example.yaml Grid search example via executable: ./train.py -f tuned_examples/cartpole-grid-search-example.yaml Note that -f overrides all other trial-specific command-line options. """ def create_parser(parser_creator=None): parser = make_parser( parser_creator=parser_creator, formatter_class=argparse.RawDescriptionHelpFormatter, description="Train a reinforcement learning agent.", epilog=EXAMPLE_USAGE) # See also the base parser definition in ray/tune/config_parser.py parser.add_argument( "--ray-address", default=None, type=str, help="Connect to an existing Ray cluster at this address instead " "of starting a new one.") parser.add_argument( "--no-ray-ui", action="store_true", help="Whether to disable the Ray web ui.") parser.add_argument( "--local-mode", action="store_true", help="Whether to run ray with `local_mode=True`. " "Only if --ray-num-nodes is not used.") parser.add_argument( "--ray-num-cpus", default=None, type=int, help="--num-cpus to use if starting a new cluster.") parser.add_argument( "--ray-num-gpus", default=None, type=int, help="--num-gpus to use if starting a new cluster.") parser.add_argument( "--ray-num-nodes", default=None, type=int, help="Emulate multiple cluster nodes for debugging.") parser.add_argument( "--ray-object-store-memory", default=None, type=int, help="--object-store-memory to use if starting a new cluster.") parser.add_argument( "--experiment-name", default="default", type=str, help="Name of the subdirectory under `local_dir` to put results in.") parser.add_argument( "--local-dir", default=DEFAULT_RESULTS_DIR, type=str, help="Local dir to save training results to. Defaults to '{}'.".format( DEFAULT_RESULTS_DIR)) parser.add_argument( "--upload-dir", default="", type=str, help="Optional URI to sync training results to (e.g. s3://bucket).") parser.add_argument( "-v", action="store_true", help="Whether to use INFO level logging.") parser.add_argument( "-vv", action="store_true", help="Whether to use DEBUG level logging.") parser.add_argument( "--resume", action="store_true", help="Whether to attempt to resume previous Tune experiments.") parser.add_argument( "--torch", action="store_true", help="Whether to use PyTorch (instead of tf) as the DL framework.") parser.add_argument( "--eager", action="store_true", help="Whether to attempt to enable TF eager execution.") parser.add_argument( "--trace", action="store_true", help="Whether to attempt to enable tracing for eager mode.") parser.add_argument( "--env", default=None, type=str, help="The gym environment to use.") parser.add_argument( "--queue-trials", action="store_true", help=( "Whether to queue trials when the cluster does not currently have " "enough resources to launch one. This should be set to True when " "running on an autoscaling cluster to enable automatic scale-up.")) parser.add_argument( "-f", "--config-file", default=None, type=str, help="If specified, use config options from this file. Note that this " "overrides any trial-specific options set via flags above.") return parser def run(args, parser): if args.config_file: with open(args.config_file) as f: experiments = yaml.safe_load(f) else: # Note: keep this in sync with tune/config_parser.py experiments = { args.experiment_name: { # i.e. log to ~/ray_results/default "run": args.run, "checkpoint_freq": args.checkpoint_freq, "checkpoint_at_end": args.checkpoint_at_end, "keep_checkpoints_num": args.keep_checkpoints_num, "checkpoint_score_attr": args.checkpoint_score_attr, "local_dir": args.local_dir, "resources_per_trial": ( args.resources_per_trial and resources_to_json(args.resources_per_trial)), "stop": args.stop, "config": dict(args.config, env=args.env), "restore": args.restore, "num_samples": args.num_samples, "upload_dir": args.upload_dir, } } verbose = 1 for exp in experiments.values(): # Bazel makes it hard to find files specified in `args` (and `data`). # Look for them here. # NOTE: Some of our yaml files don't have a `config` section. if exp.get("config", {}).get("input") and \ not os.path.exists(exp["config"]["input"]): # This script runs in the ray/rllib dir. rllib_dir = Path(__file__).parent input_file = rllib_dir.absolute().joinpath(exp["config"]["input"]) exp["config"]["input"] = str(input_file) if not exp.get("run"): parser.error("the following arguments are required: --run") if not exp.get("env") and not exp.get("config", {}).get("env"): parser.error("the following arguments are required: --env") if args.torch: exp["config"]["framework"] = "torch" elif args.eager: exp["config"]["framework"] = "tfe" if args.trace:
if exp["config"]["framework"] not in ["tf2", "tfe"]:
raise ValueError("Must enable --eager to enable tracing.") exp["config"]["eager_tracing"] = True if args.v: exp["config"]["log_level"] = "INFO" verbose = 2 if args.vv: exp["config"]["log_level"] = "DEBUG" verbose = 3 if args.ray_num_nodes: cluster = Cluster() for _ in range(args.ray_num_nodes): cluster.add_node( num_cpus=args.ray_num_cpus or 1, num_gpus=args.ray_num_gpus or 0, object_store_memory=args.ray_object_store_memory) ray.init(address=cluster.address) else: ray.init( include_dashboard=not args.no_ray_ui, address=args.ray_address, object_store_memory=args.ray_object_store_memory, num_cpus=args.ray_num_cpus, num_gpus=args.ray_num_gpus, local_mode=args.local_mode) run_experiments( experiments, scheduler=_make_scheduler(args), resume=args.resume, queue_trials=args.queue_trials, verbose=verbose, concurrent=True) ray.shutdown() if __name__ == "__main__": parser = create_parser() args = parser.parse_args() run(args, parser)
papajijaat/Face-Detect
code/face_detect.py
Python
mit
869
0.002301
import cv2 im
port sys #cascPath = sys.argv[1] #faceCascade = cv2.CascadeClassifier(cascPath) faceCascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml') video_capture = cv2.VideoCapture(0) while True: # Capture frame-by-frame ret, frame = video_capture.read() gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) faces = faceCascade.detectMultiScale( gray, s
caleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags=cv2.CASCADE_SCALE_IMAGE ) # Draw a rectangle around the faces for (x, y, w, h) in faces: cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2) # Display the resulting frame cv2.imshow('Video', frame) if cv2.waitKey(1) & 0xFF == ord('q'): break # When everything is done, release the capture video_capture.release() cv2.destroyAllWindows()
askdaddy/PerfKitBenchmarker
perfkitbenchmarker/__init__.py
Python
apache-2.0
679
0
# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the L
icense is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the speci
fic language governing permissions and # limitations under the License. import gflags as flags # NOQA import gflags_validators as flags_validators # NOQA
HellerCommaA/flask-material-lite
sample_application/__init__.py
Python
mit
2,763
0.001448
from flask import Flask, render_template, flash from flask_material_lite import Material_Lite from flask_appconfig import AppConfig from flask_wtf import Form, RecaptchaField from flask_wtf.file import FileField from wtforms import TextField, HiddenField, ValidationError, RadioField,\ BooleanField, SubmitField, IntegerField, FormField, validators from wtforms.validators import Required # straight from the wtforms docs: class TelephoneForm(Form): country_code = IntegerField('Country Code', [validators.required()]) area_code = IntegerField('Area Code/Exchange', [validators.required()]) number = TextField('Number') class ExampleForm(Form): field1 = TextField('First Field', description='This is field one.') field2 = TextField('Second Field', description='This is field two.', validators=[Required()]) hidden_field = HiddenField('You cannot see this', description='Nope') recaptcha = RecaptchaField('A sample recaptcha field') radio_field = RadioField('This is a radio field', choices=[ ('head_radio', 'Head radio'), ('radio_76fm', "Radio '76 FM"), ('lips_106', 'Lips 106'), ('wctr', 'WCTR'), ]) checkbox_field = BooleanField('This is a checkbox', description='Checkboxes can be tricky.') # subforms mobile_phone = FormField(TelephoneForm) # you can change the label as well office_phone = FormField(TelephoneForm, label='Your office phone') ff = FileField('Sample upload') submit_button = SubmitField('Submit Form') def validate_hidden_field(form, field): raise ValidationError('Always wrong') def create_app(configfile=None): app = Flask(__name__) AppConfig(app, configfile) # Flask-A
ppconfig is not necessary, but # highly recommend =) # https://github.com/mbr/flask-appconfig Material_Lite(app) # in a real app, these should be configured through Flask-Appconfig app.config['SECRET_KEY'] = 'devkey' app.config['RECAPTCHA_PUBLIC_
KEY'] = \ '6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw' @app.route('/', methods=('GET', 'POST')) def index(): form = ExampleForm() form.validate_on_submit() # to get error messages to the browser flash('critical message', 'critical') flash('error message', 'error') flash('warning message', 'warning') flash('info message', 'info') flash('debug message', 'debug') flash('different message', 'different') flash('uncategorized message') return render_template('index.html', form=form) return app if __name__ == '__main__': create_app().run(debug=True)
ray-project/ray
rllib/utils/tf_ops.py
Python
apache-2.0
229
0
from ray.rllib.utils.deprecation import depr
ecation_warning from ray.rllib.utils.tf_utils import * # noqa deprecation_warning( old="ray.rllib.utils.tf_ops.[...]", new="ray.rllib.utils.tf_utils.[...]", error=Tru
e, )
infoxchange/django-localflavor
tests/test_pk/forms.py
Python
bsd-3-clause
355
0.002817
from django.forms import ModelForm from .models import PakistaniPlace class PakistaniPlaceForm(ModelForm): ""
" Form for storing a Pakistani place. """ class Meta: model = PakistaniPlace fields = ('state', 'state_required', 'state_default', 'postcode', '
postcode_required', 'postcode_default', 'phone', 'name')
MrCrawdaddy/humans
profiles/views.py
Python
mit
2,056
0.002918
from django.shortcuts import render, HttpResponseRedirect, redirect from django.contrib.auth.decorators import login_required from django.views.generic.edit import CreateView from django.contrib.auth.models import User from django.contrib.auth.forms import UserCreationForm from django.forms.models import inlineformset_factory from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse_lazy from .models import Profile from .forms import ProfileForm class RegistrationView(CreateView): model = User form_class = UserCreationForm template_name = 'profiles/user_create.html' success_url = reverse_lazy('profiles:redirect') @login_required def account_redirect(request): return redirect('profiles:edit', pk=request.user.pk) @login_required def edit_user(request, pk): user = User.objects.get(pk=pk) user_form = ProfileForm(instance=user) # In the line below list the names of your Profile model fields. These are the ones I used. ProfileInlineFormset = inlineformset_factory(User, Profile, fields=('preferred_name', 'birthdate
', 'interests', 'state')) formset = ProfileInlineFormset(instance=user) if request.user.is_authenticated() and r
equest.user.id == user.id: if request.method == "POST": user_form = ProfileForm(request.POST, request.FILES, instance=user) formset = ProfileInlineFormset(request.POST, request.FILES, instance=user) if user_form.is_valid(): created_user = user_form.save(commit=False) formset = ProfileInlineFormset(request.POST, request.FILES, instance=created_user) if formset.is_valid(): created_user.save() formset.save() return HttpResponseRedirect('/documentaries/') return render(request, "profiles/profile_update.html", { "noodle": pk, "noodle_form": user_form, "formset": formset, }) else: raise PermissionDenied
EmanueleCannizzaro/scons
src/engine/SCons/Tool/yacc.py
Python
mit
4,613
0.003685
"""SCons.Tool.yacc Tool-specific initialization for yacc. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001 - 2016 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/yacc.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog" import os.path import SCons.Defaults import SCons.Tool import SCons.Util YaccAction = SCons.Action.Action("$YACCCOM", "$YACCCOMSTR") def _yaccEmitter(target, source, env, ysuf, hsuf): yaccflags = env.subst("$YACCFLAGS", target=target, source=source) flags = SCons.Util.CLVar(yaccflags) targetBase, targetExt = os.path.splitext(SCons.Util.to_String(target[0])) if '.ym' in ysuf: # If using Objective-C target = [targetBase + ".m"] # the extension is ".m". # If -d is specified on the command line, yacc will emit a .h # or .hpp file with the same name as the .c or .cpp output file. if '-d' in flags: target.append(targetBase + env.subst(hsuf, target=target, source=source)) # If -g is specified on the command line, yacc will emit a .vcg # file with the same base name as the .y, .yacc, .ym or .yy file. if "-g" in flags: base, ext = os.path.splitext(SCons.Util.to_String(source[0])) target.append(base + env.subst("$YACCVCGFILESUFFIX")) # If -v is specified yacc will create the output debug file # which is not really source for any process, but should # be noted and also be cleaned # Bug #2558 if "-v" in flags: env.SideEffect(targetBase+'.output',target[0]) env.Clean(target[0],targetBase+'.output') # With --defines and --graph, the name of the file is totally defined # in the options. fileGenOptions = ["--defines=", "--graph="] for option in flags: for fileGenOption in fileGenOptions: l = len(fileGenOption) if option[:l] == fileGenOption: # A file generating option is present, so add the file # name to the list of targets. fileName = option[l:].strip() target.append(fileName) return (target, source) def yEmitter(target, source, env): return _yaccEmitter(target, source, env, ['.y', '.yacc'], '$YACCHFILESUFFIX') def ymEmitter(target, source, env): return _yaccEmitter(target, source, env, ['.ym'], '$YACCHFILESUFFIX') def yyEmitter(target, source, env): return _yaccEmitter(target, source, env, ['.yy'], '$YACCHXXFILESUFFIX') def generate(env): """Add Builders and construction variables for yacc to an Environment.""" c_file, cxx_file = SCons.Tool.createCFileBuilders(env) # C c_f
ile.add_action('.y', YaccAction) c_file.add_emitter('.y', yEmitter) c_file.add_action('.yacc', YaccAction) c_file.add_emitter('.yacc', yEmitter) # Objective-C c_file.add_action('.ym', YaccAction) c_file.add_emitter('.ym', ymEmitter) # C++
cxx_file.add_action('.yy', YaccAction) cxx_file.add_emitter('.yy', yyEmitter) env['YACC'] = env.Detect('bison') or 'yacc' env['YACCFLAGS'] = SCons.Util.CLVar('') env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES' env['YACCHFILESUFFIX'] = '.h' env['YACCHXXFILESUFFIX'] = '.hpp' env['YACCVCGFILESUFFIX'] = '.vcg' def exists(env): return env.Detect(['bison', 'yacc']) # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
lnls-sirius/dev-packages
siriuspy/tests/pwrsupply/variables.py
Python
gpl-3.0
3,146
0.000638
"""Test values to test read_all_variables methods. Used by test in controller and model modulesself. """ # TODO: change string format return from serial values = [ 8579, 6.7230000495910645, 6.
7230000495910645, [b'V', b'0', b'.', b'0', b'7', b' ', b'2', b'0', b'1', b'8', b'-', b'0', b'3', b'-', b'2', b'6', b'V', b'0', b'.', b'0', b'7', b' ', b'2', b'0', b'1', b'8', b'-', b'0', b'3', b'-', b'2', b'6'], 5, 8617, 0, 2, 1, 0.0, 0.0, 1.0
, 0.0, [1.0, 1.0, 1.0, 0.0], 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 6.722831726074219, 1.23291015625, 5.029296875, 53.0] bsmp_values = [ '\x00', '\x13', '\x00', 'ô', '\x83', '!', 'Ñ', '"', '×', '@', 'Ñ', '"', '×', '@', 'V', '0', '.', '0', '7', ' ', '2', '0', '1', '8', '-', '0', '3', '-', '2', '6', 'V', '0', '.', '0', '7', ' ', '2', '0', '1', '8', '-', '0', '3', '-', '2', '6', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x05', '\x00', '\x00', '\x00', '©', '!', '\x00', '\x00', '\x00', '\x00', '\x02', '\x00', '\x01', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x80', '?', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x80', '?', '\x00', '\x00', '\x80', '?', '\x00', '\x00', '\x80', '?', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', 'p', '!', '×', '@', '\x00', 'Ð', '\x9d', '?', '\x00', 'ð', '\xa0', '@', '\x00', '\x00', 'T', 'B', 'c'] # Missing entries dict_values = { 'PwrState-Sts': 1, 'OpMode-Sts': 0, 'CtrlLoop-Sel': 0, 'CtrlMode-Sts': 0, 'Current-RB': 6.7230000495910645, 'CurrentRef-Mon': 6.7230000495910645, 'Version-Cte': 'V0.07 2018-03-26V0.07 2018-03-26', 'CycleEnbl-Mon': 0, 'CycleType-Sts': 2, 'CycleNrCycles-RB': 1, 'CycleIndex-Mon': 0.0, 'CycleFreq-RB': 0.0, 'CycleAmpl-RB': 1.0, 'CycleOffset-RB': 0.0, 'CycleAuxParam-RB': [1.0, 1.0, 1.0, 0.0], 'IntlkSoft-Mon': 0, 'IntlkHard-Mon': 0, 'Current-Mon': 6.722831726074219, 'WfmData-RB': list(range(4000))}
johnwoltman/geotest
geotest/settings.py
Python
mit
2,023
0
""" Django settings for geotest project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'i77bm_9c7(h0r#6s%=d0_d$t!vvj(#j9fkr&-$j8)vkj0q1=x_' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'djan
go.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.gis', 'geotest', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.midd
leware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'geotest.urls' WSGI_APPLICATION = 'geotest.wsgi.application' # Database # https://docs.djangoproject.com/en/1.6/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'geodb', 'USER': 'jdoe', } } # Internationalization # https://docs.djangoproject.com/en/1.6/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ STATIC_URL = '/static/'
gunnery/gunnery
gunnery/task/migrations/0002_auto__add_field_executioncommandserver_celery_task_id.py
Python
apache-2.0
12,042
0.007806
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'ExecutionCommandServer.celery_task_id' db.add_column(u'task_executioncommandserver', 'celery_task_id', self.gf('django.db.models.fields.CharField')(default='', max_length=36, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'ExecutionCommandServer.celery_task_id' db.delete_column(u'task_executioncommandserver', 'celery_task_id') models = { u'account.customuser': { 'Meta': {'object_name': 'CustomUser'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}) }, u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'core.application': { 'Meta': {'unique_together': "(('department', 'name'),)", 'object_name': 'Application'}, 'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': u"orm['core.Department']"}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, u'core.department': { 'Meta': {'object_name': 'Department'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) }, u'core.environment': { 'Meta': {'unique_together': "(('application', 'name'),)", 'object_name': 'Environment'}, 'application': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'environments'", 'to': u"orm['core.Application']"}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_production': ('django.db.models.fields.Bo
oleanField', [], {'default': 'False'}), 'name': ('django.db.models.f
ields.CharField', [], {'max_length': '128'}) }, u'core.server': { 'Meta': {'unique_together': "(('environment', 'name'),)", 'object_name': 'Server'}, 'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'servers'", 'to': u"orm['core.Environment']"}), 'host': ('django.db.models.fields.CharField', [], {'max_length': '128'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'roles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'servers'", 'symmetrical': 'False', 'to': u"orm['core.ServerRole']"}), 'user': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, u'core.serverrole': { 'Meta': {'object_name': 'ServerRole'}, 'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'serverroles'", 'to': u"orm['core.Department']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}) }, u'task.execution': { 'Meta': {'object_name': 'Execution'}, 'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executions'", 'to': u"orm['core.Environment']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'status': ('django.db.models.fields.IntegerField', [], {'default': '3'}), 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executions'", 'to': u"orm['task.Task']"}), 'time': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'time_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'time_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'time_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executions'", 'to': u"orm['account.CustomUser']"}) }, u'task.executioncommand': { 'Meta': {'object_name': 'ExecutionCommand'}, 'command': ('django.db.models.fields.TextField', [], {}), 'execution': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commands'", 'to': u"orm['task.Execution']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'roles': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['core.ServerRole']", 'symmetrical': 'False'}) }, u'task.executioncommandserver': { 'Meta': {'object_name': 'ExecutionCommandServer'}, 'celery_task_id': ('django.db.models.field
brezerk/q4wine-web
rss/views.py
Python
gpl-3.0
1,075
0.005581
# Create your views here. from django.contrib.syndication.views import Feed from q4wine.news.models import News from django.utils.feed
generator import Atom1Feed import string class RssSiteNewsFeed(Feed): title = "News related to q4wine development stuff and its community life." link = "/rss/" description = """Here is all news related to q4wine development stuff and its community life.\ If you are involved into the q4wine development process, don't forget to \ subscribe to our RSS feed.""" def items(self): return News.objects.order_by('-dat
e')[:10] def item_title(self, item): rss_title = str(item.date.year) + "-" + str(item.date.month) + "-" + str(item.date.day) rss_title += " " + item.title return rss_title def item_description(self, item): return item.content def item_link(self, item): url = "/#" + str(item.id) return url class AtomSiteNewsFeed(RssSiteNewsFeed): feed_type = Atom1Feed subtitle = RssSiteNewsFeed.description
kwikteam/phy
phy/gui/tests/test_state.py
Python
bsd-3-clause
2,810
0.001423
# -*- coding: utf-8 -*- """Test gui.""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ import logging import os import shutil from ..state import GUIState, _gui_state_path, _get_default_state_path from phylib.utils import Bunch, load_json, save_json logger = logging.getLogger(__name__) #-------------------
----------------------------------------------------------- # Test GUI state #------------------------------------------------------------------------------ class MyClass(object): pass def test_get_default_state_path(): assert str(_get_default_state_path(
MyClass())).endswith( os.sep.join(('gui', 'tests', 'static', 'state.json'))) def test_gui_state_view_1(tempdir): view = Bunch(name='MyView0') path = _gui_state_path('GUI', tempdir) state = GUIState(path) state.update_view_state(view, dict(hello='world')) assert not state.get_view_state(Bunch(name='MyView')) assert not state.get_view_state(Bunch(name='MyView (1)')) assert state.get_view_state(view) == Bunch(hello='world') state.save() # Copy the state.json to a "default" location. default_path = tempdir / 'state.json' shutil.copy(state._path, default_path) state._path.unlink() logger.info("Create new GUI state.") # The default state.json should be automatically copied and loaded. state = GUIState(path, default_state_path=default_path) assert state.MyView0.hello == 'world' def test_gui_state_view_2(tempdir): global_path = tempdir / 'global/state.json' local_path = tempdir / 'local/state.json' data = {'a': {'b': 2, 'c': 3}} # Keep the entire dictionary with 'a' key. state = GUIState(global_path, local_path=local_path, local_keys=('a.d',)) state.update(data) state.save() # Local and global files are identical. assert load_json(global_path) == data assert load_json(local_path) == {} state = GUIState(global_path, local_path=local_path, local_keys=('a.d',)) assert state == data def test_gui_state_view_3(tempdir): global_path = tempdir / 'global/state.json' local_path = tempdir / 'local/state.json' data = {'a': {'b': 2, 'c': 3}} state = GUIState(global_path, local_path=local_path) state.add_local_keys(['a.b']) state.update(data) state.save() assert load_json(global_path) == {'a': {'c': 3}} # Only kept key 'b'. assert load_json(local_path) == {'a': {'b': 2}} # Update the JSON save_json(local_path, {'a': {'b': 3}}) state = GUIState(global_path, local_path=local_path, local_keys=('a.b',)) data_1 = {'a': {'b': 3, 'c': 3}} assert state == data_1 assert state._local_data == {'a': {'b': 3}}
nosix/PyCraft
src/pycraft/service/composite/entity/__init__.py
Python
lgpl-3.0
188
0.005319
# -*- coding: utf8 -*- from
.player import PlayerEntity from .base import MobEntity from .item import ItemEntity __all__ = [ 'PlayerEntity',
'MobEntity', 'ItemEntity', ]
geky/pyOCD
pyOCD/flash/flash_kl28z.py
Python
apache-2.0
6,139
0.018081
""" mbed CMSIS-DAP debugger Copyright (c) 2006-2013 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from flash_kinetis import Flash_Kinetis flash_algo = { 'load_address' : 0x20000000, 'instructions' : [ 0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2, 0x4831b510, 0x6041492f, 0x60814930, 0x22806801, 0x22204391, 0x60014311, 0x4448482d, 0xf8cef000, 0xd0002800, 0xbd102001, 0x47702000, 0xb5104828, 0x44484928, 0xf8aef000, 0xd1050004, 0x21004824, 0xf0004448, 0x4604f983, 0xf835f000, 0xbd104620, 0x4d1fb570, 0x444d4606, 0x4b1e4601, 0x68ea4628, 0xf85ef000, 0xd1060004, 0x46312300, 0x68ea4628, 0xf934f000, 0xf0004604, 0x4620f81e, 0xb5febd70, 0x460b460d, 0x46014607, 0x46164811, 0xf0004448, 0x0004f8f5, 0x9001d10b, 0x21019002, 0x9100480c, 0x462a4633, 0x44484639, 0xf95ef000, 0xf0004604, 0x4620f802, 0x4808bdfe, 0x220168c1, 0x43110292, 0x477060c1, 0xd928c520, 0x40076000, 0x0000ffff, 0x00000004, 0x6b65666b, 0xf0003000, 0x2800b500, 0x2a00d009, 0x000bd007, 0xfa35f000, 0x0b0b0708, 0x13110f0d, 0x20041715, 0x68c0bd00, 0x20006010, 0x6840bd00, 0x6880e7fa, 0x6800e7f8, 0x2001e7f6, 0x6900e7f4, 0x6940e7f2, 0x206ae7f0, 0x0000bd00, 0x4607b5f8, 0x460d4614, 0xf0004618, 0x2800f889, 0x2308d12a, 0x46294622, 0xf0004638, 0x0006f867, 0x192cd122, 0x68f91e64, 0x91004620, 0xf956f000, 0xd0162900, 0x1c409c00, 0x1e644344, 0x480be011, 0x68004478, 0x490a6005, 0x71c82009, 0xf92ef000, 0x69b84606, 0xd0002800, 0x2e004780, 0x68f8d103, 0x42a51945, 0x4630d9eb, 0x0000bdf8, 0x0000042c, 0x40020000, 0x4604b510, 0xf0004608, 0x2800f851, 0x2c00d106, 0x4904d005, 0x71c82044, 0xf90ef000, 0x2004bd10, 0x0000bd10, 0x40020000, 0x2800b510, 0x492ad019, 0x4a2a68c9, 0x00490e09, 0x5a51447a, 0xd0120309, 0x60022200, 0x21026041, 0x02896081, 0x492460c1, 0x158b7a0c, 0x610340a3, 0x61827ac9, 0x46106141, 0x2004bd10, 0x2064bd10, 0x2800bd10, 0x6181d002, 0x47702000, 0x47702004, 0x2800b510, 0x1e5bd004, 0x421c460c, 0xe001d104, 0xbd102004, 0xd001421a, 0xbd102065, 0x428b6803, 0x6840d804, 0x18181889, 0xd2014288, 0xbd102066, 0xbd102000, 0x4288490d, 0x206bd001, 0x20004770, 0x28004770, 0x290fd008, 0x2a04d802, 0xe005d104, 0xd8012913, 0xd0012a08, 0x47702004, 0x47702000, 0x40075040, 0x000003a0, 0x40020020, 0x6b65666b, 0xb081b5ff, 0x0015461e, 0xd007460f, 0x46322304, 0xf7ff9801, 0x0004ffbd
, 0xe018d101, 0xb0052004, 0x480dbdf0, 0x68014478, 0xcd02600f, 0x60416800, 0x2006490a, 0xf00071c8, 0x4604f88b, 0x69809801, 0xd0002800, 0x2c004780, 0x1d3fd103, 0x2e001f36, 0x4620d1e7, 0x0000e7e3, 0x000002ec, 0x40020000, 0xb081b5ff, 0x460e4614, 0x23084605, 0xff90f7ff, 0xd1272800, 0x686868a9, 0xf882f000, 0x42719000, 0x40014240, 0x42b5424d, 0x9800d101, 0x2c00182d, 0x1bafd017, 0xd90042a7, 0x480b4627, 0x447808f9, 0x60066800, 0x22014809, 0x0a0a71c2, 0x728172c2, 0x72419904, 0x
f84cf000, 0xd1032800, 0x19f61be4, 0x2000e7e3, 0xbdf0b005, 0x00000272, 0x40020000, 0x2800b510, 0x4804d006, 0x71c22240, 0xf0007181, 0xbd10f837, 0xbd102004, 0x40020000, 0x9f08b5f8, 0x4616001c, 0xd005460d, 0xf7ff2304, 0x2800ff49, 0xe01dd101, 0xbdf82004, 0x4478480f, 0x600d6801, 0x2202490e, 0x9a0671ca, 0x680072ca, 0x60816821, 0xf816f000, 0xd0082800, 0x29009907, 0x600dd000, 0xd0e82f00, 0x60392100, 0x1f36bdf8, 0x1d2d1d24, 0xd1e12e00, 0x0000bdf8, 0x00000206, 0x40020000, 0x2170480a, 0x21807001, 0x78017001, 0xd5fc0609, 0x06817800, 0x2067d501, 0x06c14770, 0x2068d501, 0x07c04770, 0x2069d0fc, 0x00004770, 0x40020000, 0x09032200, 0xd32c428b, 0x428b0a03, 0x2300d311, 0xe04e469c, 0x430b4603, 0x2200d43c, 0x428b0843, 0x0903d331, 0xd31c428b, 0x428b0a03, 0x4694d301, 0x09c3e03f, 0xd301428b, 0x1ac001cb, 0x09834152, 0xd301428b, 0x1ac0018b, 0x09434152, 0xd301428b, 0x1ac0014b, 0x09034152, 0xd301428b, 0x1ac0010b, 0x08c34152, 0xd301428b, 0x1ac000cb, 0x08834152, 0xd301428b, 0x1ac0008b, 0x08434152, 0xd301428b, 0x1ac0004b, 0x1a414152, 0x4601d200, 0x46104152, 0xe05d4770, 0xd0000fca, 0x10034249, 0x4240d300, 0x22004053, 0x0903469c, 0xd32d428b, 0x428b0a03, 0x22fcd312, 0xba120189, 0x428b0a03, 0x0189d30c, 0x428b1192, 0x0189d308, 0x428b1192, 0x0189d304, 0x1192d03a, 0x0989e000, 0x428b09c3, 0x01cbd301, 0x41521ac0, 0x428b0983, 0x018bd301, 0x41521ac0, 0x428b0943, 0x014bd301, 0x41521ac0, 0x428b0903, 0x010bd301, 0x41521ac0, 0x428b08c3, 0x00cbd301, 0x41521ac0, 0x428b0883, 0x008bd301, 0x41521ac0, 0x0843d2d9, 0xd301428b, 0x1ac0004b, 0x1a414152, 0x4601d200, 0x41524663, 0x4610105b, 0x4240d301, 0xd5002b00, 0x47704249, 0x105b4663, 0x4240d300, 0x2000b501, 0x46c046c0, 0xb430bd02, 0x1e644674, 0x1c647825, 0xd20042ab, 0x5d63461d, 0x18e3005b, 0x4718bc30, 0x00040002, 0x00080000, 0x00100000, 0x00200000, 0x00400000, 0x00000080, 0x00000000, 0x00800000, 0x40020004, 0x00000000, ], 'pc_init' : 0x20000021, 'pc_eraseAll' : 0x2000004D, 'pc_erase_sector' : 0x20000071, 'pc_program_page' : 0x2000009F, 'begin_stack' : 0x20000800, 'begin_data' : 0x20000a00, # Analyzer uses a max of 2 KB data (512 pages * 4 bytes / page) 'page_buffers' : [0x20000a00, 0x20001200], # Enable double buffering 'static_base' : 0x20000000 + 0x20 + 0x594, 'page_size' : 2048, 'analyzer_supported' : True, 'analyzer_address' : 0x1fffa000 }; # @brief Flash algorithm for Kinetis L-series devices. class Flash_kl28z(Flash_Kinetis): def __init__(self, target): super(Flash_kl28z, self).__init__(target, flash_algo)
francbartoli/geonode
geonode/maps/management/commands/remove_broken_layers.py
Python
gpl-3.0
1,375
0
# -*- coding: utf-8 -*- ######################################################################### # #
Copyright (C) 2018 OSGeo # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Remove broken map layers' def handle(self, *args, **options): from geonode.maps.models import MapLayer from geonode.layers.models import Layer map_layers = MapLayer.objects.filter(local=True) for maplayer in map_layers: if not Layer.objects.filter(alternate=maplayer.name).exists(): print('Removing broken map layer {}'.format(maplayer.name)) maplayer.delete()
mashaoze/esp-idf
examples/wifi/iperf/iperf_test.py
Python
apache-2.0
26,855
0.002495
""" Test case for iperf example. This test case might have problem running on windows: 1. direct use of `make` 2. use `sudo killall iperf` to force kill iperf, didn't implement windows version The test env Example_ShieldBox do need the following config:: Example_ShieldBox: ap_list: - ssid: "ssid" password: "password" outlet: 1 apc_ip: "192.168.1.88" attenuator_port: "/dev/ttyUSB0" iperf: "/dev/ttyUSB1" apc_ip: "192.168.1.88" pc_nic: "eth0" """ from __future__ import division from __future__ import unicode_literals from builtins import str from builtins import range from builtins import object import re import os import sys import time import subprocess # add current folder to system path for importing test_report sys.path.append(os.path.dirname(__file__)) # this is a test case write with tiny-test-fw. # to run test cases outside tiny-test-fw, # we need to set environment variable `TEST_FW_PATH`, # then get and insert `TEST_FW_PATH` to sys path before import FW module test_fw_path = os.getenv("TEST_FW_PATH") if test_fw_path and test_fw_path not in sys.path: sys.path.insert(0, test_fw_path) import IDF import DUT import Utility from Utility import (Attenuator, PowerControl, LineChart) from test_report import (ThroughputForConfigsReport, ThroughputVsRssiReport) # configurations TEST_TIME = TEST_TIMEOUT = 60 WAIT_AP_POWER_ON_TIMEOUT = 90 SCAN_TIMEOUT = 3 SCAN_RETRY_COUNT = 3 RETRY_COUNT_FOR_BEST_PERFORMANCE = 2 ATTEN_VALUE_LIST = range(0, 60, 2) # constants FAILED_TO_SCAN_RSSI = -97 INVALID_HEAP_SIZE = 0xFFFFFFFF PC_IPERF_TEMP_LOG_FILE = ".tmp_iperf.log" CONFIG_NAME_PATTERN = re.compile(r"sdkconfig\.defaults\.(.+)") # We need to auto compare the difference between adjacent configs (01 -> 00, 02 -> 01, ...) and put them to reports. # Using numbers fo
r config will make this easy. # Use
default value `99` for config with best performance. BEST_PERFORMANCE_CONFIG = "99" class TestResult(object): """ record, analysis test result and convert data to output format """ PC_BANDWIDTH_LOG_PATTERN = re.compile(r"(\d+).0\s*-\s*(\d+).0\s+sec\s+[\d.]+\s+MBytes\s+([\d.]+)\s+Mbits/sec") DUT_BANDWIDTH_LOG_PATTERN = re.compile(r"(\d+)-\s+(\d+)\s+sec\s+([\d.]+)\s+Mbits/sec") ZERO_POINT_THRESHOLD = -88 # RSSI, dbm ZERO_THROUGHPUT_THRESHOLD = -92 # RSSI, dbm BAD_POINT_RSSI_THRESHOLD = -85 # RSSI, dbm BAD_POINT_MIN_THRESHOLD = 3 # Mbps BAD_POINT_PERCENTAGE_THRESHOLD = 0.3 # we need at least 1/2 valid points to qualify the test result THROUGHPUT_QUALIFY_COUNT = TEST_TIME//2 def __init__(self, proto, direction, config_name): self.proto = proto self.direction = direction self.config_name = config_name self.throughput_by_rssi = dict() self.throughput_by_att = dict() self.att_rssi_map = dict() self.heap_size = INVALID_HEAP_SIZE self.error_list = [] def _save_result(self, throughput, ap_ssid, att, rssi, heap_size): """ save the test results: * record the better throughput if att/rssi is the same. * record the min heap size. """ if ap_ssid not in self.att_rssi_map: # for new ap, create empty dict() self.throughput_by_att[ap_ssid] = dict() self.throughput_by_rssi[ap_ssid] = dict() self.att_rssi_map[ap_ssid] = dict() self.att_rssi_map[ap_ssid][att] = rssi def record_throughput(database, key_value): try: # we save the larger value for same att if throughput > database[ap_ssid][key_value]: database[ap_ssid][key_value] = throughput except KeyError: database[ap_ssid][key_value] = throughput record_throughput(self.throughput_by_att, att) record_throughput(self.throughput_by_rssi, rssi) if int(heap_size) < self.heap_size: self.heap_size = int(heap_size) def add_result(self, raw_data, ap_ssid, att, rssi, heap_size): """ add result for one test :param raw_data: iperf raw data :param ap_ssid: ap ssid that tested :param att: attenuate value :param rssi: AP RSSI :param heap_size: min heap size during test :return: throughput """ fall_to_0_recorded = 0 throughput_list = [] result_list = self.PC_BANDWIDTH_LOG_PATTERN.findall(raw_data) if not result_list: # failed to find raw data by PC pattern, it might be DUT pattern result_list = self.DUT_BANDWIDTH_LOG_PATTERN.findall(raw_data) for result in result_list: if int(result[1]) - int(result[0]) != 1: # this could be summary, ignore this continue throughput_list.append(float(result[2])) if float(result[2]) == 0 and rssi > self.ZERO_POINT_THRESHOLD \ and fall_to_0_recorded < 1: # throughput fall to 0 error. we only record 1 records for one test self.error_list.append("[Error][fall to 0][{}][att: {}][rssi: {}]: 0 throughput interval: {}-{}" .format(ap_ssid, att, rssi, result[0], result[1])) fall_to_0_recorded += 1 if len(throughput_list) > self.THROUGHPUT_QUALIFY_COUNT: throughput = sum(throughput_list) / len(throughput_list) else: throughput = 0.0 if throughput == 0 and rssi > self.ZERO_THROUGHPUT_THRESHOLD: self.error_list.append("[Error][Fatal][{}][att: {}][rssi: {}]: No throughput data found" .format(ap_ssid, att, rssi)) self._save_result(throughput, ap_ssid, att, rssi, heap_size) return throughput def post_analysis(self): """ some rules need to be checked after we collected all test raw data: 1. throughput value 30% worse than the next point with lower RSSI 2. throughput value 30% worse than the next point with larger attenuate """ def analysis_bad_point(data, index_type): for ap_ssid in data: result_dict = data[ap_ssid] index_list = list(result_dict.keys()) index_list.sort() if index_type == "att": index_list.reverse() for i, index_value in enumerate(index_list[1:]): if index_value < self.BAD_POINT_RSSI_THRESHOLD or \ result_dict[index_list[i]] < self.BAD_POINT_MIN_THRESHOLD: continue _percentage = result_dict[index_value] / result_dict[index_list[i]] if _percentage < 1 - self.BAD_POINT_PERCENTAGE_THRESHOLD: self.error_list.append("[Error][Bad point][{}][{}: {}]: drop {:.02f}%" .format(ap_ssid, index_type, index_value, (1 - _percentage) * 100)) analysis_bad_point(self.throughput_by_rssi, "rssi") analysis_bad_point(self.throughput_by_att, "att") @staticmethod def _convert_to_draw_format(data, label): keys = data.keys() keys.sort() return { "x-axis": keys, "y-axis": [data[x] for x in keys], "label": label, } def draw_throughput_figure(self, path, ap_ssid, draw_type): """ :param path: folder to save figure. make sure the folder is already created. :param ap_ssid: ap ssid string or a list of ap ssid string :param draw_type: "att" or "rssi" :return: file_name """ if draw_type == "rssi": type_name = "RSSI" data = self.throughput_by_rssi elif draw_type == "att": type_name = "Att" data = self.throughput_by_att else: raise AssertionError("draw type not supported") if isinstance(ap_ssid, list): file_name = "ThroughputVs{}_{}_{}_{}.png".f
tsh/coursera-algorithmic-thinking
Week 1/graph_loader.py
Python
apache-2.0
908
0.005507
""" Provided code for Application portion of Module 1 Imports physics citation graph """ ################################### # Code for loading citation graph CITATION_URL = "phys-cite_graph.txt" def load_graph(graph_url): """ Function that loads a graph given the URL for a text representation of the graph Returns a dictionary that models a graph """ graph_file = op
en(graph_url) graph_text = graph_file.read() graph_lines = graph_text.split('\n') graph_lines = graph_lines[ : -1] print "Loaded graph with", len(graph_lines), "nodes" answer_graph = {} for line in graph_lines: neighbors = line.split(' ') node = int(neighbors[0]) answer_graph[node] = set([]) for neighbor in neighbors[1 : -1]: answer_graph[node].add(int(neighbor)) return answer_graph citation_g
raph = load_graph(CITATION_URL)
detectlanguage/detectlanguage-python
detectlanguage/api.py
Python
mit
353
0.033994
import detectlanguage def detect(data): result = detectlanguage.client.post('detect', { 'q': data }) re
turn result['data']['detections'] def simple_detect(data): result = detect(data) return result[0]['language'] def user_status(): return detectlanguage.
client.get('user/status') def languages(): return detectlanguage.client.get('languages')
stuti-rastogi/leetcodesolutions
140_wordBreak2.py
Python
apache-2.0
760
0.003947
class Solution(object): def wordBreak(self, s, wordDict): """ :type s: str :type wordDict: Set[str] :rtype: List[str] """ return self.helper(s, wordDict, {})
def helper(self, s, wordDict, memo): if s in memo: return memo[s] if not s: return [] res = [] for word in wordDict: if not s.star
tswith(word): continue if len(word) == len(s): res.append(word) else: resultOfTheRest = self.helper(s[len(word):], wordDict, memo) for item in resultOfTheRest: item = word + ' ' + item res.append(item) memo[s] = res return res
Guts/isogeo-notifier
web/isogeo_notify/management/commands/api2db.py
Python
gpl-3.0
2,944
0.001359
# -*- coding: UTF-8 -*- #!/usr/bin/env python # ############################################################################ # ########## Libraries ############# # ################################## # Standard library import logging from os import path # 3rd party modules import arrow from isogeo_pysdk import Isogeo # Django project from django.conf import settings from django.core.management.base import BaseCommand from django.db import IntegrityError from isogeo_notify.models import Metadata, Workgroup # ############################################################################ #
########## Globals ############## # ################################# # logger = logging.getLogger("ElPaso") # ############################################################################ #
########### Classes ############# # ################################# class Command(BaseCommand): args = '<foo bar ...>' help = 'our help string comes here' def _update_db(self): """Update metadata list from API.""" # get stored metadata db_mds = Metadata.objects.all() db_wgs = Workgroup.objects.all() # connect to isogeo isogeo = Isogeo(client_id=settings.ISOGEO_CLIENT_ID, client_secret=settings.ISOGEO_CLIENT_SECRET, lang="fr") token = isogeo.connect() search = isogeo.search(token, # page_size=10, order_by="modified", # whole_share=0, # sub_resources=["events"] ) # tags tags = search.get("tags") for tag in tags: if tag.startswith("owner"): new_owner = Workgroup(isogeo_uuid=tag[6:-1], label=tags.get(tag)) new_owner.save() # metadatas # for md in search.get("results"): # try: # new_md = Metadata(isogeo_id=md.get("_id"), # title=md.get("title", "No title"), # name=md.get("name"), # abstract=md.get("abstract"), # md_dt_crea=md.get("_created"), # md_dt_update=md.get("_modified"), # rs_dt_crea=md.get("created"), # rs_dt_update=md.get("modified"), # source=True) # new_md.save() # logging.info("Metadata added") # except IntegrityError: # # in case of duplicated offer # logging.error("Metadata already existed") # continue logging.info("{} metadata added") def handle(self, *args, **options): self._update_db()
sanskritiitd/sanskrit
dictionary/sanskrit-english/wil.py
Python
gpl-3.0
442
0.025
# -*- coding: utf-8 -*- out = open('wil_orig.words.out', 'w') for line in open('wil_orig_utf8_slp1.txt').xreadlines(): line = line.strip() if ".{#" in line: word = line.split('{#')[1].split('#}')[0].split(' ')
[0].split('(')[0].split(',')[0].split('.')[0].split('/')[0].split('\\')[0].split('-')[0].split('{')[0].replace("'","").replace('*','').replace('†','').replace('[','').replace('?','') out.write(word+'\n'
); out.close()
Vaypron/ChromaPy
Example Scripts/Mouse/1. setColor.py
Python
mit
342
0
import ChromaPy32 as Chroma # Import the Chroma Module fro
m time import sleep Mouse = Chrom
a.Mouse() # Initialize a new Mouse Instance RED = (255, 0, 0) # Initialize a new color by RGB (RED,GREEN,BLUE) Mouse.setColor(RED) # sets the whole Mouse-Grid to RED Mouse.applyGrid() # applies the Mouse-Grid to the connected Mouse sleep(5)
hosiet/flasktex
src/client.py
Python
bsd-3-clause
2,633
0.002659
#!/usr/bin/env python3 """ Testing Client for flasktex. """ __license__ = 'BSD-3' __docformat__ = 'reStructuredText' import os import urllib.request def ft_checkalive(url:str): """ Check whether given server is alive. """ resp = None try: resp = urllib.request.urlopen(url+'/ping').read() except: return False if resp == b'pong': return True else: return False def ft_test_client(): url = input('flasktex url: ') url = url.rstrip('/') texdir = input('tex file dir: ') entryfile = input('entryfile filename: ') worker = input('worker name: ') timeout = input('timeout: ') print(' ** Checking Given Parameters...') print('checking server status...', end='') if not ft_checkalive(url): print('Cannot connect to server. Giving up.') return print('pass') print('checking local dir status...', end='') dir_content = None try: dir_content = os.listdir(texdir) except: print('Error occurred when listing dir. Giving up.') raise return print('pass'
) print('checking entryfile...', end='') if not entryfile in dir_content: print('Cannot find given entryfile. Giving up.') return print('pass') print('checking worker name...', end='') print('skipped') print('checking timeout value...', end='') if int(timeout) < 30: print('Value too small. Giving up
.') return print('pass') print('\n...Success!') return { 'url': str(url), 'texdir': str(texdir), 'entryfile': str(entryfile), 'worker': str(worker), 'timeout': int(timeout) } def ft_client_submission(user_input): import flasktex from flasktex.tex2bundle import ft_dir_to_b64 import flasktex.tex2bundle b64data = ft_dir_to_b64(user_input['texdir']) json_str = flasktex.tex2bundle._ft_gen_texbundle_json_bundled( b64data, entryfile=user_input['entryfile'], worker=user_input['worker'], timeout=user_input['timeout'], ) print(json_str) resp = urllib.request.urlopen( user_input['url']+'/api/1.0/submit/json', data=json_str.encode('UTF-8')) return_data = resp.read() print(return_data) # TODO FIXME pass if __name__ == '__main__': user_input = ft_test_client() if user_input: # TODO NEXT command = input('Submit? y/n: ') if command is '' or command is 'y': ft_client_submission(user_input) else: pass
RNAer/qiita
qiita_db/test/test_study.py
Python
bsd-3-clause
28,241
0
from unittest import TestCase, main from datetime import datetime from future.utils import viewitems from qiita_core.exceptions import IncompetentQiitaDeveloperError from qiita_core.util import qiita_test_checker from qiita_db.base import QiitaObject from qiita_db.study import Study, StudyPerson from qiita_db.investigation import Investigation from qiita_db.user import User from qiita_db.data import RawData from qiita_db.util import convert_to_id from qiita_db.exceptions import ( QiitaDBColumnError, QiitaDBStatusError, QiitaDBError, QiitaDBUnknownIDError) # ----------------------------------------------------------------------------- # Copyright (c) 2014--, The Qiita Development Team. # # Distributed under the terms of the BSD 3-clause License. # # The full license is in the file LICENSE, distributed with this software. # ----------------------------------------------------------------------------- @qiita_test_checker() class TestStudyPerson(TestCase): def setUp(self): self.studyperson = StudyPerson(1) def test_create_studyperson(self): new = StudyPerson.create('SomeDude', 'somedude@foo.bar', 'affil', '111 fake street', '111-121-1313') self.assertEqual(new.id, 4) obs = self.conn_handler.execute_fetchall( "SELECT * FROM qiita.study_person WHERE study_person_id = 4") self.assertEqual(obs, [[4, 'SomeDude', 'somedude@foo.bar', 'affil', '111 fake street', '111-121-1313']]) def test_iter(self): """Make sure that each and every StudyPerson is retrieved""" expected = [ ('LabDude', 'lab_dude@foo.bar', 'knight lab', '123 lab street', '121-222-3333'), ('empDude', 'emp_dude@foo.bar', 'broad', None, '444-222-3333'), ('PIDude', 'PI_dude@foo.bar', 'Wash U', '123 PI street', None)] for i, person in enumerate(StudyPerson.iter()): self.assertTrue(person.id == i+1) self.assertTrue(person.name == expected[i][0]) self.assertTrue(person.email == expected[i][1]) self.assertTrue(person.affiliation == expected[i][2]) self.assertTrue(person.address == expected[i][3]) self.assertTrue(person.phone == expected[i][4]) def test_create_studyperson_already_exists(self): obs = StudyPerson.create('LabDude', 'lab_dude@foo.bar', 'knight lab') self.assertEqual(obs.name, 'LabDude') self.assertEqual(obs.email, 'lab_dude@foo.bar') def test_retrieve_name(self): self.assertEqual(self.studyperson.name, 'LabDude') def test_set_name_fail(self): with self.assertRaises(AttributeError): self.studyperson.name = 'Fail Dude' def test_retrieve_email(self): self.assertEqual(self.studyperson.email, 'lab_dude@foo.bar') def test_retrieve_affiliation(self): self.assertEqual(self.studyperson.affiliation, 'knight lab') def test_set_email_fail(self): with self.assertRaises(AttributeError): self.studyperson.email = 'faildude@foo.bar' def test_set_affiliation_fail(self): with self.assertRaises(AttributeError): self.studyperson.affiliation = 'squire lab' def test_retrieve_address(self): self.assertEqual(self.studyperson.address, '123 lab street') def test_retrieve_address_null(self): person = StudyPerson(2) self.assertEqual(person.address, None) def test_set_address(self): self.studyperson.address = '123 nonsense road' self.assertEqual(self.studyperson.address, '123 nonsense road') def test_retrieve_phone(self): self.assertEqual(self.studyperson.phone, '121-222-3333') def test_retrieve_phone_null(self): person = StudyPerson(3) self.assertEqual(person.phone, None) def test_set_phone(self): self.studyperson.phone = '111111111111111111121' self.assertEqual(self.studyperson.phone, '111111111111111111121') @qiita_test_checker() class TestStudy(TestCase): def setUp(self): self.study = Study(1) self.info = { "timeseries_type_id": 1, "metadata_complete": True, "mixs_compliant": True, "number_samples_collected": 25, "number_samples_promised": 28, "portal_type_id": 3, "study_alias": "FCM", "study_description": "Microbiome of people who eat nothing but " "fried chicken", "study_abstract": "Exploring how a high fat diet changes the " "gut microbiome", "emp_person_id": StudyPerson(2), "principal_investigator_id": StudyPerson(3), "lab_person_id": StudyPerson(1) } self.infoexp = { "timeseries_type_id": 1, "metadata_complete": True, "mixs_compliant": True, "number_samples_collected": 25, "number_samples_promised": 28, "portal_type_id": 3, "study_alias": "FCM", "study_description": "Microbiome of people who eat nothing but " "fried chicken", "study_abstract": "Exploring how a high fat diet changes the " "gut microbiome", "emp_person_id": 2, "principal_investigator_id": 3, "lab_person_id": 1 } self.existingexp = { 'mixs_compliant': True, 'metadata_complete': True, 'reprocess': False, 'number_samples_promised': 27, 'emp_person_id': StudyPerson(2), 'funding': None, 'vamps_id': None, 'first_contact': datetime(2014, 5, 19, 16, 10), 'principal_investigator_id': StudyPerson(3), 'timeseries_type_id': 1, 'study_abstract': "This is a preliminary study to examine the " "microbiota associated with the Cannabis plant. Soils samples " "from the bulk soil, soil associated with the roots, and the " "rhizosphere were extracted and the DNA sequenced. Roots " "from three independent plan
ts of different strains were " "examined. These roots were obtained November 11, 2011 fr
om " "plants that had been harvested in the summer. Future " "studies will attempt to analyze the soils and rhizospheres " "from the same location at different time points in the plant " "lifecycle.", 'spatial_series': False, 'study_description': 'Analysis of the Cannabis Plant Microbiome', 'portal_type_id': 2, 'study_alias': 'Cannabis Soils', 'most_recent_contact': '2014-05-19 16:11', 'most_recent_contact': datetime(2014, 5, 19, 16, 11), 'lab_person_id': StudyPerson(1), 'number_samples_collected': 27} def _change_processed_data_status(self, new_status): # Change the status of the studies by changing the status of their # processed data id_status = convert_to_id(new_status, 'processed_data_status', self.conn_handler) self.conn_handler.execute( "UPDATE qiita.processed_data SET processed_data_status_id = %s", (id_status,)) def test_get_info(self): # Test get all info for single study obs = Study.get_info([1]) self.assertEqual(len(obs), 1) obs = dict(obs[0]) exp = { 'mixs_compliant': True, 'metadata_complete': True, 'reprocess': False, 'timeseries_type': 'None', 'portal_description': 'EMP portal', 'number_samples_promised': 27, 'emp_person_id': 2, 'funding': None, 'vamps_id': None, 'first_contact': datetime(2014, 5, 19, 16, 10), 'principal_investigator_id': 3, 'timeseries_type_id': 1, 'pmid': ['123456', '7891011'], 'study_alias': 'Cannabis Soils',
chrishadi/SublimeRunOnSave
runonsave.py
Python
mit
830
0.015663
import sublime, sublime_plugin class RunOnSave(sublime_plugin.EventListener): def on_post_save(self, view): # Check if project has run-on-save enabled. settings = view.settings() if settings.get('run_on_save') == 1: command =
settings.get('command') if command is not None: option_dict = {'cmd': command} folders = view.window().folders() if folders is not None and len(folders) > 0: option_dict['working_dir'] = folders[0] path = settings.get('path') if path is not None: option_dict['path'] = path environment_dict = settings.get('environment_variables') if environment_dict is not None and len(environment_dict) > 0:
option_dict['env'] = environment_dict; view.window().run_command('exec', option_dict)
pincopallino93/rdfendpoints
parser/dbpediamap.py
Python
apache-2.0
2,410
0.007054
__author__ = 'Lorenzo' planet_mapper = { '<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>': 'planet type', # link to yago category, can be explored more '<http://live.dbpedia.org/ontology/wikiPageExternalLink>': 'external link', # many '<http://live.dbpedia.org/property/inclination>': 'inclination', # quantity and text '<http://www.w3.org/2000/01/rdf-schema#seeAlso>': 'see also', # many '<http://live.dbpedia.org/property/albedo>': 'albedo', # quantity '<http://xmlns.com/foaf/0.1/depiction>': 'depiction', # svg shape '<http://live.dbpedia.org/property/rotVelocity>': 'rotation velocity', # quant
ity '<http://live.dbpedia.org/property/period>': 'period', # quantity '<http://live.
dbpedia.org/property/meanTemp>': 'average temperature', # quantity '<http://live.dbpedia.org/ontology/abstract>': 'abstract', # text '<http://live.dbpedia.org/property/meanAnomaly>': 'average anomaly', # quantity '<http://live.dbpedia.org/property/siderealDay>': 'sideral day', # quantity '<http://live.dbpedia.org/property/scaleHeight>': 'atmospheric scale height', # quantity '<http://live.dbpedia.org/property/mass>': 'mass', # quantity '<http://live.dbpedia.org/property/escapeVelocity>': 'escape velocity (Km/s)', # quantity '<http://live.dbpedia.org/property/atmosphere>': 'has atmosphere', # yes/no '<http://live.dbpedia.org/property/ascNode>': 'asc node', # quantity '<http://live.dbpedia.org/property/surfaceArea>': 'surface area', # quantity '<http://live.dbpedia.org/property/equatorialRadius>': 'equatorial radius', # quantity '<http://live.dbpedia.org/property/polarRadius>': 'polar radius', # quantity '<http://live.dbpedia.org/ontology/escapeVelocity>': 'escape velocity (double)', # quantity '<http://live.dbpedia.org/property/atmosphereComposition>': 'atmosphere chemistry', # text '<http://live.dbpedia.org/property/surfacePressure>': 'surface pressure', '<http://live.dbpedia.org/property/volume> ': 'volume', '<http://live.dbpedia.org/property/angularSize>': 'angular size', '<http://live.dbpedia.org/property/avgSpeed>': 'average speed (Km/s)', '<http://live.dbpedia.org/property/declination>': 'declination', '<http://live.dbpedia.org/property/surfaceGrav>': 'surface gravity (grams)', '<http://live.dbpedia.org/property/satellites>': 'number of satellites' }
geeklhem/pimad
setup.py
Python
gpl-3.0
504
0.001984
from setuptools import setup setup(name='pimad', version=open('VERSION').read(), descrip
tion='Pimad is modeling adaptive dynamics', url='http://www.eleves.ens.fr/home/doulcier/projects/celladhesion/', author='Guilhem Doulcier', long_description=open('README').read(), author_email='guilhem.doulcier@
ens.fr', license='GPLv3', packages=['pimad'], install_requires=[ 'numpy', 'scipy', 'pandas', 'matplotlib', ], )
qedsoftware/commcare-hq
corehq/apps/receiverwrapper/views.py
Python
bsd-3-clause
7,843
0.001275
import logging from couchdbkit import ResourceNotFound from couchdbkit.ext.django.loading import get_db from django.http import ( HttpResponseBadRequest, HttpResponseForbidden, ) from casexml.apps.case.xform import get_case_updates, is_device_report from corehq.apps.domain.decorators import ( check_domain_migration, login_or_digest_ex, login_or_basic_ex ) from corehq.apps.receiverwrapper.auth import ( AuthContext, WaivedAuthContext, domain_requires_auth, ) from corehq.apps.receiverwrapper.util import ( get_app_and_build_ids, determine_authtype, from_demo_user, should_ignore_submission, DEMO_SUBMIT_MODE, ) from corehq.form_processor.interfaces.dbaccessors import CaseAccessors from corehq.form_processor.submission_post import SubmissionPost from corehq.form_processor.utils import convert_xform_to_json from corehq.util.datadog.metrics import MULTIMEDIA_SUBMISSION_ERROR_COUNT from corehq.util.datadog.utils import count_by_response_code, log_counter import couchforms from django.views.decorators.http import require_POST from django.views.decorators.csrf import csrf_exempt from couchforms.const import MAGIC_PROPERTY from couchforms.getters import MultimediaBug from dimagi.utils.logging import notify_exception from corehq.apps.ota.utils import handle_401_response from corehq import toggles @count_by_response_code('commcare.xform_submissions') def _process_form(request, domain, app_id, user_id, authenticated, auth_cls=AuthContext): if should_ignore_submission(request): # silently ignore submission if it meets ignore-criteria return SubmissionPost.submission_ignored_response() if toggles.FORM_SUBMISSION_BLACKLIST.enabled(domain): return SubmissionPost.get_blacklisted_response() try: instance, attachments = couchforms.get_instance_and_attachment(request) except MultimediaBug as e: try: instance = request.FILES[MAGIC_PROPERTY].read() xform = convert_xform_to_json(instance) meta = xform.get("meta", {}) except: meta = {} details = { "domain": domain, "app_id": app_id, "user_id": user_id, "authenticated": authenticated, "form_meta": meta, } log_counter(MULTIMEDIA_SUBMISSION_ERROR_COUNT, details) notify_exception(None, "Received a submission with POST.keys()", details) return HttpResponseBadRequest(e.message) app_id, build_id = get_app_and_build_ids(domain, app_id) response = SubmissionPost( instance=instance, attachments=attachments, domain=domain, app_id=app_id, build_id=build_id, auth_context=auth_cls( domain=domain, user_id=user_id, authenticated=authenticated, ), location=couchforms.get_location(request), received_on=couchforms.get_received_on(request), date_header=couchforms.get_date_header(request), path=couchforms.get_path(request), submit_ip=couchforms.get_submit_ip(request), last_sync_token=couchforms.get_last_sync_token(request), openrosa_headers=couchforms.get_openrosa_headers(request), ).get_response() if response.status_code == 400: logging.error( 'Status code 400 for a form submission. ' 'Response is: \n{0}\n' ) return response @csrf_exempt @require_POST @che
ck_domain_migration def post(request, domain, app_id=None): try: if domain_requires_auth(domain): # "redirect" to the secure version # an actual redirect doesn't work because it becomes a GET
return secure_post(request, domain, app_id) except ResourceNotFound: return HttpResponseBadRequest( 'No domain with name %s' % domain ) return _process_form( request=request, domain=domain, app_id=app_id, user_id=None, authenticated=False, ) def _noauth_post(request, domain, app_id=None): """ This is explictly called for a submission that has secure submissions enabled, but is manually overriding the submit URL to not specify auth context. It appears to be used by demo mode. It mainly just checks that we are touching test data only in the right domain and submitting as demo_user. """ instance, _ = couchforms.get_instance_and_attachment(request) form_json = convert_xform_to_json(instance) case_updates = get_case_updates(form_json) def form_ok(form_json): return (from_demo_user(form_json) or is_device_report(form_json)) def case_block_ok(case_updates): """ Check for all cases that we are submitting as demo_user and that the domain we are submitting against for any previously existing cases matches the submission domain. """ allowed_ids = ('demo_user', 'demo_user_group_id', None) case_ids = set() for case_update in case_updates: case_ids.add(case_update.id) create_action = case_update.get_create_action() update_action = case_update.get_update_action() index_action = case_update.get_index_action() if create_action: if create_action.user_id not in allowed_ids: return False if create_action.owner_id not in allowed_ids: return False if update_action: if update_action.owner_id not in allowed_ids: return False if index_action: for index in index_action.indices: case_ids.add(index.referenced_id) # todo: consider whether we want to remove this call, and/or pass the result # through to the next function so we don't have to get the cases again later cases = CaseAccessors(domain).get_cases(list(case_ids)) for case in cases: if case.domain != domain: return False if case.owner_id or case.user_id not in allowed_ids: return False return True if not (form_ok(form_json) and case_block_ok(case_updates)): if request.GET.get('submit_mode') != DEMO_SUBMIT_MODE: # invalid submissions under demo mode submission can be processed return HttpResponseForbidden() return _process_form( request=request, domain=domain, app_id=app_id, user_id=None, authenticated=False, auth_cls=WaivedAuthContext, ) @login_or_digest_ex(allow_cc_users=True) def _secure_post_digest(request, domain, app_id=None): """only ever called from secure post""" return _process_form( request=request, domain=domain, app_id=app_id, user_id=request.couch_user.get_id, authenticated=True, ) @handle_401_response @login_or_basic_ex(allow_cc_users=True) def _secure_post_basic(request, domain, app_id=None): """only ever called from secure post""" return _process_form( request=request, domain=domain, app_id=app_id, user_id=request.couch_user.get_id, authenticated=True, ) @csrf_exempt @require_POST @check_domain_migration def secure_post(request, domain, app_id=None): authtype_map = { 'digest': _secure_post_digest, 'basic': _secure_post_basic, 'noauth': _noauth_post, } try: decorated_view = authtype_map[determine_authtype(request)] except KeyError: return HttpResponseBadRequest( 'authtype must be one of: {0}'.format(','.join(authtype_map.keys())) ) return decorated_view(request, domain, app_id=app_id)
google/ml-fairness-gym
runner.py
Python
apache-2.0
1,900
0.003684
# coding=utf-8 # Copyright 2022 The ML Fairness Gym Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python2, python3 r"""A gin-configurable experiment runner for the fairness gym. Example usage: runner.py -- \ --alsologtostderr \ --gin_config_path=\ path/to/fairness_gym/examples/config/example_config.gin \ --output_path=/tmp/output.json After that finishes, /tmp/output.json should look like this: {"agent": {"name": "DummyAgent"}, "environment": {"name": "DummyEnv", "params": {}}, "metrics": {"num_steps": 10}} """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app from absl import flags from absl import logging import core import runner_lib import gin flags.DEFINE_string( 'gin_config_path', '/tmp/config.gin', 'Path to the gin configuration that specifies this experiment.') flags.DEFINE_string( 'output_path', '/tmp/output.json', 'Path where output JS
ON will be written.') FLAGS = flags.FLAGS def main(argv): if len(argv) > 1: raise app.UsageError('Too many command-line arguments.') gin.parse_config_file(FLAGS.gin_config_path) runner = runner_lib.Runner() results = runner.run() logging.info('Results: %s', results) with open(FLAGS.output_path, 'w') as f: f.write(core.to_json(results)) if __name__ == '
__main__': app.run(main)
gena/qgis-earthengine-plugin
contrib/__init__.py
Python
mit
108
0.018519
# Migrating som
e useful EE utils from https://code.earthengine.google.com/?accept_repo
=users/gena/packages
looooo/pivy
scons/scons-local-1.2.0.d20090919/SCons/compat/_scons_shlex.py
Python
isc
11,866
0.001517
# -*- coding: iso-8859-1 -*- """A lexical analyzer class for simple shell-like syntaxes.""" from __future__ import print_function # Module and documentation by Eric S. Raymond, 21 Dec 1998 # Input stacking and error message cleanup added by ESR, March 2000 # push_source() and pop_source() made explicit by ESR, January 2001. # Posix compliance, split(), string arguments, and # iterator interface by Gustavo Niemeyer, April 2003. import os.path import sys #from collections import deque class deque: def __init__(self): self.data = [] def __len__(self): return len(self.data) def appendleft(self, item): self.data.insert(0, item) def popleft(self): return self.data.pop(0) try: basestring except NameError: import types def is_basestring(s): return isinstance(s, bytes) else: def is_basestring(s): return isinstance(s, basestring) try: from cStringIO import StringIO except ImportError: from StringIO import StringIO __all__ = ["shlex", "split"] class shlex: "A lexical analyzer class for simple shell-like syntaxes." def __init__(self, instream=None, infile=None, posix=False): if is_basestring(instream): instream = StringIO(instream) if instream is not None: self.instream = instream self.infile = infile else: self.instream = sys.stdin self.infile = None self.posix = posix if posix: self.eof = None else: self.eof = '' self.commenters = '#' self.wordchars = ('abcdfeghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_') if self.posix: self.wordchars = self.wordchars + ('ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ' 'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ') self.whitespace = ' \t\r\n' self.whitespace_split = False self.quotes = '\'"' self.escape = '\\' self.escapedquotes = '"' self.state = ' ' self.pushback = deque() self.lineno = 1 self.debug = 0 self.token = '' self.filestack = deque() self.source = None if self.debug: print('shlex: reading from %s, line %d' \ % (self.instream, self.lineno)) def push_token(self, tok): "Push a token onto the stack popped by the get_token method" if self.debug >= 1: print("shlex: pushing token " + repr(tok)) self.pushback.appendleft(tok) def push_source(self, newstream, newfile=None): "Push an input source onto
the lexer's input source stack." if is_basestring(newstream): newstream = StringIO(newstream) self.filestack.appendleft((self.infile, self.instream, self.lineno)) self
.infile = newfile self.instream = newstream self.lineno = 1 if self.debug: if newfile is not None: print('shlex: pushing to file %s' % (self.infile,)) else: print('shlex: pushing to stream %s' % (self.instream,)) def pop_source(self): "Pop the input source stack." self.instream.close() (self.infile, self.instream, self.lineno) = self.filestack.popleft() if self.debug: print('shlex: popping to %s, line %d' \ % (self.instream, self.lineno)) self.state = ' ' def get_token(self): "Get a token from the input stream (or from stack if it's nonempty)" if self.pushback: tok = self.pushback.popleft() if self.debug >= 1: print("shlex: popping token " + repr(tok)) return tok # No pushback. Get a token. raw = self.read_token() # Handle inclusions if self.source is not None: while raw == self.source: spec = self.sourcehook(self.read_token()) if spec: (newfile, newstream) = spec self.push_source(newstream, newfile) raw = self.get_token() # Maybe we got EOF instead? while raw == self.eof: if not self.filestack: return self.eof else: self.pop_source() raw = self.get_token() # Neither inclusion nor EOF if self.debug >= 1: if raw != self.eof: print("shlex: token=" + repr(raw)) else: print("shlex: token=EOF") return raw def read_token(self): quoted = False escapedstate = ' ' while True: nextchar = self.instream.read(1) if nextchar == '\n': self.lineno = self.lineno + 1 if self.debug >= 3: print("shlex: in state", repr(self.state), \ "I see character:", repr(nextchar)) if self.state is None: self.token = '' # past end of file break elif self.state == ' ': if not nextchar: self.state = None # end of file break elif nextchar in self.whitespace: if self.debug >= 2: print("shlex: I see whitespace in whitespace state") if self.token or (self.posix and quoted): break # emit current token else: continue elif nextchar in self.commenters: self.instream.readline() self.lineno = self.lineno + 1 elif self.posix and nextchar in self.escape: escapedstate = 'a' self.state = nextchar elif nextchar in self.wordchars: self.token = nextchar self.state = 'a' elif nextchar in self.quotes: if not self.posix: self.token = nextchar self.state = nextchar elif self.whitespace_split: self.token = nextchar self.state = 'a' else: self.token = nextchar if self.token or (self.posix and quoted): break # emit current token else: continue elif self.state in self.quotes: quoted = True if not nextchar: # end of file if self.debug >= 2: print("shlex: I see EOF in quotes state") # XXX what error should be raised here? raise ValueError("No closing quotation") if nextchar == self.state: if not self.posix: self.token = self.token + nextchar self.state = ' ' break else: self.state = 'a' elif self.posix and nextchar in self.escape and \ self.state in self.escapedquotes: escapedstate = self.state self.state = nextchar else: self.token = self.token + nextchar elif self.state in self.escape: if not nextchar: # end of file if self.debug >= 2: print("shlex: I see EOF in escape state") # XXX what error should be raised here? raise ValueError("No escaped character") # In posix shells, only the quote itself or the escape # character may be escaped within quotes. if escapedstate in self.quotes and \ nextchar != self.state and nextchar != escapedstate: self.token = self.token + self.state self.token = self.token + nextchar self.state = es
kiddinn/l2t-tools
plugins/yara_match.py
Python
gpl-3.0
3,103
0.003867
#!/usr/bin/python """ This is a simple plugin that does the same deal as the l2t_find_evil.py script does. It loads up a YARA rule file and runs it against each line in the CSV file and if there is a match it will fire up an alert. Copyright 2012 Kristinn Gudjonsson (kristinn ( a t ) log2timeline (d o t) net) This file is part of l2t-tools. l2t-tools is a collection of free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. l2t-tools is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with log2timeline. If not, see <http://www.gnu.org/licenses/>. """ import logging import re import os import yara from l2t_tools.lib import plugin __author__ = 'Kristinn Gudjonsson (kristinn@log2tim
eline.net)' __version__ = '0.1' class YaraMatch(plugin.L2tPlugin): """Count the number of lines that contain a file inside System32.""" def __init__(self, separator, rule_file): """Constructor. Args: separator: The CSV file sep
arator, usually a comma or a tab. rule_file: The path to a YARA rule file. Raises: IOError: If the YARA rule file does not exist. """ if not os.path.isfile(rule_file): raise IOError('The YARA rule file does not exist.') super(YaraMatch, self).__init__(separator) self.rules = yara.compile(rule_file) logging.info('Plugin: YaraMatch Turned ON.') self.alerts = [] def AppendLine(self, entries): """Appends a line to this plugin. This function should begin with evaluating the line to see if it fits into the plugins spear of interest. If it does some processing takes place here. Args: entries: A list of two entries, timestamp and the full line. """ _, line = entries columns = line.split(self.separator) hits = self.rules.match(data='[%s] %s' % (columns[15], columns[10])) if hits: for hit in hits: meta_desc = hit.meta.get('description', '') meta_case = '' if 'case_nr' in hit.meta: meta_case = ' (known from case: %s)' % hit.meta['case_nr'] self.alerts.append('[%s - %s%s] %s %s [%s] = %s' % ( hit.rule, meta_desc, meta_case, columns[0], columns[1], columns[2], columns[10])) def Report(self): """Return a report of findings. Returns: A string containing the results of the plugin. """ append_string = '' for alert in self.alerts: append_string += '\n\t%s' % alert if append_string: return 'YARA rule matches: %d.%s' % (len(self.alerts), append_string) else: return 'YARA rule matches: None found, have a nice day.'
yujiali/pynn
pynn/nn.py
Python
mit
20,622
0.003831
""" A python neural network package based on gnumpy. Yujia Li, 09/2014 TODO: - right now YNeuralNet I/O only supports NeuralNet as the type for component nets (network construction and forward/backward prop works for other types of component nets just fine). Ideally this should be extended to StackedNeuralNet and other types as well. """ import gnumpy as gnp import numpy as np import layer as ly import loss as ls import struct class NetworkConstructionError(Exception): pass class NetworkCompositionError(Exception): pass class TargetLoadingError(Exception): pass class BaseNeuralNet(object): """ Feed-forward neural network base class, each layer is fully connected. """ def __init__(self): pass def forward_prop(self, X, add_noise=False, compute_loss=False, is_test=True): """ Do a forward propagation, which maps input matrix X (n_cases, n_dims) to an output matrix Y (n_cases, n_out_dims). add_noise - add noise if set. compute_loss - compute all the losses if set. """ raise NotImplementedError() def forward_prop_setup_bn_mean_std_on_big_set(self, X, **kwargs): """ Special for networks that use batch-normalization, but otherwise has no effect. """ pass def load_target(self, *args, **kwargs): """ Load targets used in the losses. """ raise NotImplementedError() def get_loss(self): """ Return the loss computed in a previous forward propagation. """ raise NotImplementedError() def backward_prop(self, grad=None): """ Given the gradients for the output layer, back propagate through the network and compute all the gradients. """ raise NotImplementedError() def clear_gradient(self): """ Reset all parameter gradients to 0. """ raise NotImplementedError() def get_param_vec(self): """ Get a vector representation of all parameters in the network. """ raise NotImplementedError() def get_noiseless_param_vec(self): """ Get an approximate vector representation of all parameters in the network, that corresponds to the noiseless case when using dropout in training. """ return self.get_param_vec() def _set_param_from_vec(self, v, is_noiseless=False): """ is_noiseless=True -> set_noiseless_param_from_vec, is_noiseless=False -> set_param_from_vec """ raise NotImplementedError() def set_param_from_vec(self, v): """ Set the parameters of the network from a complete vector representation. """ self._set_param_from_vec(v, is_noiseless=False) def set_noiseless_param_from_vec(self, v): """ Set the parameters of the network from a complete vector representation, but properly scale it to be used in noiseless setting. """ self._set_param_from_vec(v, is_noiseless=True) def get_grad_vec(self): """ Get a vector representation of all gradients for parameters in the network. """ raise NotImplementedError() def save_model_to_binary(self): """ Return a binary representation of the network. """ raise NotImplementedError() def load_model_from_stream(self, f): """ Load model from binary stream, f can be an open file. """ raise NotImplementedError() def save_model_to_file(self, file_name): with open(file_name, 'wb') as f: f.write(self.save_model_to_binary()) def load_model_from_file(self, file_name): with open(file_name, 'rb') as f: self.load_model_from_stream(f) def get_type_code(self): """ A type code used in model I/O to distinguish among different models. This should return a 32-bit integer. """ raise NotImplementedError() def check_type_code(self, type_code): """ Check if the type code matches the model itself. """ if type_code == self.get_type_code(): return else: raise Exception('Type code mismatch!') def _update_param_size(self): """ Update parameter size. After a call to this function the param_size attribute will be set properly. """ raise NotImplementedError() def get_status_info(self): """ Return a string that represents some internal states of the network, can be used for debugging the training process or monitoring the state of the network. """ return '' class NeuralNet(BaseNeuralNet): """ A simple one input one output la
yer neural net, loss is only (possibly) added at the output layer. """ def __init__(self, in_dim=None, out_dim=None): self.in_dim = in_dim self.out_dim = out_dim self.layers = [] self.layer_params = [] self.param_size = 0 self.loss = None self.output_layer_added = False def add_layer(self, out_dim=0, no
nlin_type=None, dropout=0, sparsity=0, sparsity_weight=0, init_scale=1, params=None, init_bias=0, use_batch_normalization=False): """ By default, nonlinearity is linear. Return the newly added layer. """ if self.output_layer_added: raise NetworkConstructionError( 'Trying to add more layers beyond output layer.') if len(self.layers) == 0: in_dim = self.in_dim else: in_dim = self.layers[-1].out_dim if params is not None: if in_dim != params.W.shape[0]: raise NetworkConstructionError( 'Loading shared parameter failure: size mismatch.') else: out_dim = params.W.shape[1] if out_dim == 0: out_dim = self.out_dim self.output_layer_added = True self.layers.append(ly.Layer(in_dim, out_dim, nonlin_type, dropout, sparsity, sparsity_weight, init_scale, params, init_bias=init_bias, use_batch_normalization=use_batch_normalization)) if params is None: self.layer_params.append(self.layers[-1].params) if use_batch_normalization: self.layer_params.append(self.layers[-1].bn_layer.params) self._update_param_size() return self.layers[-1] def _update_param_size(self): self.param_size = sum([p.param_size for p in self.layer_params]) def set_loss(self, loss_type, loss_weight=1, loss_after_nonlin=False, **kwargs): """ loss_type is the name of the loss. """ self.loss = ls.get_loss_from_type_name(loss_type, **kwargs) self.loss.set_weight(loss_weight) self.layers[-1].set_loss(self.loss, loss_after_nonlin=loss_after_nonlin) def load_target(self, target, *args, **kwargs): if self.loss is not None and target is not None: self.loss.load_target(target, *args, **kwargs) def forward_prop(self, X, add_noise=False, compute_loss=False, is_test=True): """ Compute forward prop, return the output of the network. """ if isinstance(X, gnp.garray): x_input = X else: x_input = gnp.garray(X) for i in range(len(self.layers)): x_input = self.layers[i].forward_prop(x_input, add_noise=add_noise, compute_loss=compute_loss, is_test=is_test) return x_input def forward_prop_setup_bn_mean_std_on_big_set(self, X, minibatch_size=1000, early_exit=True): if early_exit and not any([l.use_batch_normalization for l in self.layers]): return if isinstance(X, gnp.garray): x_input = X else: x_input = gnp.garray(X) for i in range(len(self.layers)): x_input = self.layers[i].forward_prop_setup_bn_mean_std_on_big_set(