repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
stack-of-tasks/rbdlpy | tutorial/lib/python2.7/site-packages/ttfquery/glyphquery.py | Python | lgpl-3.0 | 3,146 | 0.013986 | """Glyph-specific queries on font-files"""
from ttfquery import describe
try:
from OpenGLContext.debug.logs import text_log
except ImportError:
text_log = None
def hasGlyph( font, char, encoding=None ):
"""Check to see if font appears to have explicit glyph for char"""
glyfName = explicitGlyph( font, char, encoding )
if glyfName is None:
return False
return True
def explicitGlyph( font, char, encoding=None ):
"""Return glyphName or None if there is not explicit glyph for char"""
cmap = font['cmap']
if encoding is None:
encoding = describe.guessEncoding( font )
table = cmap.getcmap( *encoding )
glyfName = table.cmap.get( ord(char))
return glyfName
def glyphName( font, char, encoding=None, warnOnFailure=1 ):
"""Retrieve the glyph name for the given character
XXX
Not sure what the effect of the Unicode mapping
will be given the use of ord...
"""
glyfName = explicitGlyph( font, char, encoding )
if glyfName is None:
encoding = describe.guessEncoding( font ) #KH
cmap = font['cmap'] #KH
table = cmap.getcmap( *encoding ) #KH
glyfName = table.cmap.get( -1)
if glyfName is None:
glyfName = font['glyf'].glyphOrder[0]
if text_log and warnOnFailure:
text_log.warn(
"""Unable to find glyph name for %r, in %r using first glyph in table (%r)""",
char,
describe.shortName(font),
glyfName
)
return glyfName
def width( font, glyphName ):
"""Retrieve the width of the giving character for given font
The horizontal metrics table provides both the
width and the left side bearing, we should really
be using the left side bearing to adjust the
character, but that's a later project.
"""
try:
return font['hmtx'].metrics[ glyphName ][0]
except KeyError:
raise ValueError( """Couldn't find glyph for glyphName %r"""%(
glyphName,
))
def lineHeight( font ):
"""Get the base-line to base-line height for the font
XXX
There is some fudging going on here as I
workaround what appears to be a problem with the
specification for sTypoDescender, which states
that it should normally be a negative value, but
winds up being positive in at least one font that
defines points below the zero axis.
XXX The entire OS/2 table doesn't appear in a few
fonts (symbol fonts in particular), such as Corel's
BeeHive and BlackLight 686.
"""
return charHeight(font) + font['OS/2'].sTypoLineGap
def charHeight( font ):
"""Determine the general character height for the font (for scaling)"""
ascent = font['OS/2'].sTypoAscender
descent = fo | nt['OS/2'].sTypoDescender
if descent > 0:
descent = - descent
return ascent - desce | nt
def charDescent( font ):
"""Determine the general descent for the font (for scaling)"""
return font['OS/2'].sTypoDescender
|
mozillazg/ShortURL | shorturl/settings.py | Python | mit | 569 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
SITE_ROOT = os.path.dirn | ame(os.path.abspath(__file__))
DEBUG = True # 调试模式
TEMPLATE_DIR = os.path.join(SITE_ROOT, 'templates') # 模板目录
BASE_TEMPLATE = 'base' # 基础模板
# URL 映射
URLS = (
'/', 'Index',
'(/j)?/shorten', 'Shorten',
'/([0-9a-zA-Z]{5 | ,})', 'Expand',
'/j/expand', 'Expand',
'/.*', 'Index',
)
# 数据库配置
DATABASES = {
'dbn': 'mysql',
'db': 'shorturl',
'user': 'py',
'pw': 'py_passwd',
'host': 'localhost',
'port': 3306,
}
|
sahikaru/DP | chapter1/strategymode.py | Python | gpl-2.0 | 1,364 | 0.012463 | #!/usr/env python
class Flyable:
def fly(self):
pass
class Quackable(object):
def quack(self):
pass
class ReadHeadDuckFly(Flyable):
def fly(self):
print "I am a readheadduck, I can fly"
class ReadHeadDuckQack(Quackable):
def quack(self):
print "I am a readheadduck,Dcuk duck duck..."
class Duck():
def swim(self):
print "I am a duck,I can swim..."
class ReadHeadDuck(Duck):
def __init__(self,flyable,quackable):
self.f = flyable
self.q = quackable
def fly(self):
return self.f.fly()
def quack(self):
| return self.q.quack()
class Mallardduckflyable(Flyable):
def fly(self):
print "I am a Mallardduck....,I can fly"
class MallardduckQuackble(Quackable):
def quack(self):
print "I am a Mallardduck,Duck.duck..duck.."
class Mallardduck(Duck):
def __init__(self,fl | yable,quackable):
self.f = flyable
self.q = quackable
def fly(self):
return self.f.fly()
def quack(self):
return self.q.quack()
if __name__ == "__main__":
duck = Duck()
duck.swim()
rhduck = ReadHeadDuck(ReadHeadDuckFly(),ReadHeadDuckQack())
rhduck.fly()
rhduck.swim()
rhduck.quack()
md = Mallardduck(Mallardduckflyable(),MallardduckQuackble())
md.fly()
md.quack()
md.swim()
|
huntzhan/magic-constraints | magic_constraints/types.py | Python | mit | 15,147 | 0 | # -*- coding: utf-8 -*-
from __future__ import (
division, absolute_import, print_function, unicode_literals,
)
from builtins import * # noqa
from future.builtins.disabled import * # noqa
from future.utils import with_metaclass
from abc import ABCMeta
# collections.abc dosn't esist in Python 2.x.
import collections as abc
from magic_constraints.utils import (
type_object, nontype_object,
conditional_to_bytes, conditional_repr,
)
from magic_constraints.exception import (
MagicTypeError, MagicIndexError
)
def meta_create_class(prefix, classname, baseclass, generator_cls):
injected_functions = {}
for name in dir(generator_cls):
if not name.startswith(prefix):
continue
# get unbound function.
function = getattr(generator_cls, name)
if hasattr(function, '__func__'):
function = function.__func__
# remove prefix.
injected_functions[name[len(prefix):]] = function
return type(
conditional_to_bytes(classname), (baseclass,), injected_functions,
)
def create_metaclass(baseclass, generator_cls):
return meta_create_class(
'_metaclass_', 'MetaMagicClass', baseclass, generator_cls,
)
def create_class(baseclass, generator_cls):
return meta_create_class(
'_class_', 'MagicClass', baseclass, generator_cls,
)
def safe_getmethod(cls, name):
def do_nothing(*args, **kwargs):
return True
method = getattr(cls, name, None)
return method if method else do_nothing
class BasicMagicType(object):
pass
class BasicMetaMagicType(ABCMeta):
def __getitem__(cls, type_decl):
if not safe_getmethod(cls, 'check_getitem_type_decl')(type_decl):
raise MagicTypeError(
'invalid type.',
type_decl=type_decl,
)
ret_cls = cls.generator_cls(cls.main_cls)
ret_cls.partial_cls = type_decl
return ret_cls
def __subclasscheck__(cls, subclass):
if nontype_object(subclass):
return False
if not safe_getmethod(cls, 'check_subclass')(subclass):
return False
# corner case, | subclass isn't MagicType.
if not issubclass(subclass, BasicMagicType):
return issubclass(subclass, cls.main_cls)
# subclass is MagicType.
if cls.partial_cls or subclass.partial_cls:
# if subclass has partial_cls, return False.
return False
else:
# 1. subclass is normal type object.
# 2. subclass is a MagicType.
return issubclass(subclass.mai | n_cls, cls.main_cls)
def __instancecheck__(cls, instance):
return safe_getmethod(cls, 'check_instance')(instance)
def __repr__(cls):
name = conditional_repr(cls.main_cls)
if cls.partial_cls:
partial = ', '.join(
map(
conditional_repr,
cls.partial_cls
if isinstance(cls.partial_cls, tuple)
else [cls.partial_cls],
),
)
name = '{0}[{1}]'.format(
name, partial,
)
return conditional_to_bytes(name)
# 1. _metaclass_{name} -> {name} in metaclass.
# 2. _class_{name} -> {name} in class.
class MagicTypeGenerator(type):
def __new__(generator_cls, ABC):
MetaMagicType = create_metaclass(
BasicMetaMagicType,
generator_cls,
)
MagicType = create_class(
with_metaclass(MetaMagicType, BasicMagicType),
generator_cls,
)
MagicType.generator_cls = generator_cls
MagicType.main_cls = ABC
MagicType.partial_cls = None
return MagicType
def check_type_of_instance(cls, instance):
return any(
issubclass(T, cls)
# handle old style class.
for T in (instance.__class__, type(instance))
)
def check_getitem_tuple(type_decl, n):
# type_decl should be a n-tuple.
if not isinstance(type_decl, tuple):
return False
return len(type_decl) == n
def generate_immutable_abc(supercls, mutable_subclass):
class ABCImmutableMeta(ABCMeta):
def __subclasscheck__(cls, subclass):
if not issubclass(subclass, supercls):
return False
return not issubclass(subclass, mutable_subclass)
class ABCImmutable(with_metaclass(ABCImmutableMeta, object)):
pass
# dirty hack to assert issubclass(ABCImmutable, supercls).
supercls._abc_cache.add(ABCImmutable)
return ABCImmutable
class SequenceGenerator(MagicTypeGenerator):
def _metaclass_check_getitem_type_decl(cls, type_decl):
if type_object(type_decl):
return True
if isinstance(type_decl, tuple):
for T in type_decl:
if nontype_object(T):
return False
return True
else:
return False
def _metaclass_check_instance(cls, instance):
if not check_type_of_instance(cls, instance):
return False
if not cls.partial_cls:
return True
if type_object(cls.partial_cls):
for e in instance:
if not isinstance(e, cls.partial_cls):
return False
else:
if len(cls.partial_cls) != len(instance):
return False
for i in range(len(instance)):
if not isinstance(instance[i], cls.partial_cls[i]):
return False
return True
class SetGenerator(MagicTypeGenerator):
def _metaclass_check_getitem_type_decl(cls, type_decl):
return type_object(type_decl)
def _metaclass_check_instance(cls, instance):
if not check_type_of_instance(cls, instance):
return False
if not cls.partial_cls:
return True
if type_object(cls.partial_cls):
for e in instance:
if not isinstance(e, cls.partial_cls):
return False
return True
class MappingGenerator(MagicTypeGenerator):
def _metaclass_check_getitem_type_decl(cls, type_decl):
if not check_getitem_tuple(type_decl, 2):
return False
return type_object(type_decl[0]) and type_object(type_decl[1])
def _metaclass_check_instance(cls, instance):
if not check_type_of_instance(cls, instance):
return False
if cls.partial_cls:
key_cls, val_cls = cls.partial_cls
for key, val in instance.items():
if not (isinstance(key, key_cls) and isinstance(val, val_cls)):
return False
return True
class IteratorGenerator(MagicTypeGenerator):
_class_ITERATOR_CASE_LENGTH = 1
_class_ITERATOR_CASE_NO_LENGTH = 2
def _metaclass_check_getitem_type_decl(cls, type_decl):
# 1. Iterator[T, ...]
if isinstance(type_decl, tuple):
for T in type_decl:
if nontype_object(T):
return False
return True
# 2. Iterator[T]
elif type_object(type_decl):
return True
else:
return False
def _metaclass_check_instance(cls, instance):
if cls.partial_cls or not check_type_of_instance(cls, instance):
return False
else:
# is Iterator and not Iterator[...].
return True
def _class___init__(self, iterator):
if self.partial_cls is None:
raise MagicTypeError(
'Iterator should be specified.'
)
if not isinstance(iterator, self.main_cls):
raise MagicTypeError(
'require Iterator.',
iterator=iterator,
)
if isinstance(self.partial_cls, tuple):
# Iterator[T, ...]. Checking on:
# 1. the number of elements in the iterator.
# 2. the type of each element.
self.case = self.ITERATOR_CASE_LENGTH
self._type_idx = 0
else:
|
thaim/ansible | lib/ansible/modules/cloud/ovirt/ovirt_vmpool_info.py | Python | mit | 4,023 | 0.002237 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_vmpool_info
short_description: Retrieve information about one or more oVirt/RHV vmpools
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve information about one or more oVirt/RHV vmpools."
- This module was called C(ovirt_vmpool_facts) before Ansible 2.9, returning C(ansible_facts).
Note that the M(ovirt_vmpool_info) module no longer returns C(ansible_facts)!
notes:
- "This module returns a variable C(ovirt_vmpools), which
contains a list of vmpools. You need to register the result with
the I(register) keyword to use it."
options:
pattern:
description:
- "Search term which is accepted by oVirt/RHV search backend."
- "For example to search vmpool X: name=X"
extends_documentation_fragment: ovirt_info
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather information about all vm pools which names start with C(centos):
- ovirt_vmpool_info:
pattern: name=centos*
register: result
- debug:
msg: "{{ result.ovirt_vm_pools }}"
'''
RETURN = '''
ovirt_vm_pools:
description: "List of dictionaries describing the vmpools. Vm pool attributes are mapped to dictionary keys,
all vmpools attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/vm_pool."
returned: On success.
type: list
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_info_full_argument_spec,
)
def main():
argument_spec = ovirt_info_full_argument_spec(
pattern=dict(default='', required=False),
)
module = AnsibleModule(argument_spec)
is_old_facts = module._name == 'ovirt_vmpool_facts'
if is_old_facts:
module.deprecate("The 'ovirt_vmpool_facts' module has been renamed to 'ovirt_vmpool_info', "
"and the renamed one no longer returns ansible_facts", version='2.13')
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
vmpools_service = connection.system_service().vm_pools_service()
vmpools = vmpools_service.list(search=module.params['pattern'])
result = dict(
ovirt_vm_pools=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in vmpools
],
)
if is_old_facts:
module.exit_json(changed=False, ansible_facts=result)
else:
module.exit_json(changed=False, **re | sult)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth | .get('token') is None)
if __name__ == '__main__':
main()
|
ArcherSys/ArcherSys | Lib/idlelib/FormatParagraph.py | Python | mit | 22,001 | 0.003273 | <<<<<<< HEAD
<<<<<<< HEAD
"""Extension to format a paragraph or selection to a max width.
Does basic, standard text formatting, and also understands Python
comment blocks. Thus, for editing Python source code, this
extension is really only suitable for reformatting these comment
blocks or triple-quoted strings.
Known problems with comment reformatting:
* If there is a selection marked, and the first line of the
selection is not complete, the block will probably not be detected
as comments, and will have the normal "text formatting" rules
applied.
* If a comment block has leading whitespace that mixes tabs and
spaces, they will not be considered part of the same block.
* Fancy comments, like this bulleted list, aren't handled :-)
"""
import re
from idlelib.configHandler import idleConf
class FormatParagraph:
menudefs = [
('format', [ # /s/edit/format dscherer@cmu.edu
('Format Paragraph', '<<format-paragraph>>'),
])
]
def __init__(self, editwin):
self.editwin = editwin
def close(self):
self.editwin = None
def format_paragraph_event(self, event, limit=None):
"""Formats paragraph to a max width specified in idleConf.
If text is selected, format_paragraph_event will start breaking lines
at the max width, starting from the beginning selection.
If no text is selected, format_paragraph_event uses the current
cursor location to determine the paragraph (lines of text surrounded
by blank lines) and formats it.
The length limit parameter is for testing with a known value.
"""
if limit is None:
# The default length limit is that defined by pep8
limit = idleConf.GetOption(
'extensions', 'FormatParagraph', 'max-width',
type='int', default=72)
text = self.editwin.text
first, last = self.editwin.get_selection_indices()
if first and last:
data = text.get(first, last)
comment_header = get_comment_header(data)
else:
first, last, comment_header, data = \
find_paragraph(text, text.index("insert"))
if comment_header:
newdata = reformat_comment(data, limit, comment_header)
else:
newdata = reformat_paragraph(data, limit)
text.tag_remove("sel", "1.0", "end")
if newdata != data:
text.mark_set("insert", first)
text.undo_block_start()
text.delete(first, last)
text.insert(first, newdata)
text.undo_block_stop()
else:
text.mark_set("insert", last)
text.see("insert")
return "break"
def find_paragraph(text, mark):
"""Returns the start/stop indices enclosing the paragraph that mark is in.
Also returns the comment format string, if any, and paragraph of text
between the start/stop indices.
"""
lineno, col = map(int, mark.split("."))
line = text.get("%d.0" % lineno, "%d.end" % lineno)
# Look for start of next paragraph if the index passed in is a blank line
while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line):
lineno = lineno + 1
line = text.get("%d.0" % lineno, "%d.end" % lineno)
first_lineno = lineno
comment_header = get_comment_header(line)
comment_header_len = len(comment_header)
# Once start line found, search for end of paragraph (a blank line)
while get_comment_header(line)==comment_header and \
not is_all_white(line[comment_header_len:]):
lineno = lineno + 1
line = text.get("%d.0" % lineno, "%d.end" % lineno)
last = "%d.0" % lineno
# Search back to beginning of paragraph (first blank line before)
lineno = first_lineno - 1
line = text.get("%d.0" % lineno, "%d.end" % lineno)
while lineno > 0 and \
| get_comment_header(line)==comment_header and \
not is_all_white(line[comment_header_len:] | ):
lineno = lineno - 1
line = text.get("%d.0" % lineno, "%d.end" % lineno)
first = "%d.0" % (lineno+1)
return first, last, comment_header, text.get(first, last)
# This should perhaps be replaced with textwrap.wrap
def reformat_paragraph(data, limit):
"""Return data reformatted to specified width (limit)."""
lines = data.split("\n")
i = 0
n = len(lines)
while i < n and is_all_white(lines[i]):
i = i+1
if i >= n:
return data
indent1 = get_indent(lines[i])
if i+1 < n and not is_all_white(lines[i+1]):
indent2 = get_indent(lines[i+1])
else:
indent2 = indent1
new = lines[:i]
partial = indent1
while i < n and not is_all_white(lines[i]):
# XXX Should take double space after period (etc.) into account
words = re.split("(\s+)", lines[i])
for j in range(0, len(words), 2):
word = words[j]
if not word:
continue # Can happen when line ends in whitespace
if len((partial + word).expandtabs()) > limit and \
partial != indent1:
new.append(partial.rstrip())
partial = indent2
partial = partial + word + " "
if j+1 < len(words) and words[j+1] != " ":
partial = partial + " "
i = i+1
new.append(partial.rstrip())
# XXX Should reformat remaining paragraphs as well
new.extend(lines[i:])
return "\n".join(new)
def reformat_comment(data, limit, comment_header):
"""Return data reformatted to specified width with comment header."""
# Remove header from the comment lines
lc = len(comment_header)
data = "\n".join(line[lc:] for line in data.split("\n"))
# Reformat to maxformatwidth chars or a 20 char width,
# whichever is greater.
format_width = max(limit - len(comment_header), 20)
newdata = reformat_paragraph(data, format_width)
# re-split and re-insert the comment header.
newdata = newdata.split("\n")
# If the block ends in a \n, we dont want the comment prefix
# inserted after it. (Im not sure it makes sense to reformat a
# comment block that is not made of complete lines, but whatever!)
# Can't think of a clean solution, so we hack away
block_suffix = ""
if not newdata[-1]:
block_suffix = "\n"
newdata = newdata[:-1]
return '\n'.join(comment_header+line for line in newdata) + block_suffix
def is_all_white(line):
"""Return True if line is empty or all whitespace."""
return re.match(r"^\s*$", line) is not None
def get_indent(line):
"""Return the initial space or tab indent of line."""
return re.match(r"^([ \t]*)", line).group()
def get_comment_header(line):
"""Return string with leading whitespace and '#' from line or ''.
A null return indicates that the line is not a comment line. A non-
null return, such as ' #', will be used to find the other lines of
a comment block with the same indent.
"""
m = re.match(r"^([ \t]*#*)", line)
if m is None: return ""
return m.group(1)
if __name__ == "__main__":
import unittest
unittest.main('idlelib.idle_test.test_formatparagraph',
verbosity=2, exit=False)
=======
"""Extension to format a paragraph or selection to a max width.
Does basic, standard text formatting, and also understands Python
comment blocks. Thus, for editing Python source code, this
extension is really only suitable for reformatting these comment
blocks or triple-quoted strings.
Known problems with comment reformatting:
* If there is a selection marked, and the first line of the
selection is not complete, the block will probably not be detected
as comments, and will have the normal "text formatting" rules
applied.
* If a comment block has leading whitespace that mixes tabs and
spaces, they will not be considered part of the same block.
* Fancy comments, like this bulleted list, aren't handled :-)
"""
import re
from idlelib.configHandler import idleConf
class FormatParagraph:
menudefs = [
('format', [ # /s/edit/for |
cvegaj/ElectriCERT | venv3/lib/python3.6/site-packages/pycoin/tx/script/errno.py | Python | gpl-3.0 | 1,047 | 0 | OK = 0
UNKNOWN_ERROR = 1
EVAL_FALSE = 2
OP_RETURN = 3
# Max sizes
SCRIPT_SIZE = 4
PUSH_SIZE = 5
OP_COUNT = 6
STACK_SIZE = 7
SIG_COUNT = 8
PUBKEY_COUNT = 9
# Failed verify operations
VERIFY = 10
EQUALVERIFY = 11
CHECKMULTISIGVERIFY | = 12
CHECKSIGVERIFY = 13
NUMEQUALVERIFY = 14
# Logical/For | mat/Canonical errors
BAD_OPCODE = 15
DISABLED_OPCODE = 16
INVALID_STACK_OPERATION = 17
INVALID_ALTSTACK_OPERATION = 18
UNBALANCED_CONDITIONAL = 19
# CHECKLOCKTIMEVERIFY and CHECKSEQUENCEVERIFY
NEGATIVE_LOCKTIME = 20
UNSATISFIED_LOCKTIME = 21
# Malleability
SIG_HASHTYPE = 22
SIG_DER = 23
MINIMALDATA = 24
SIG_PUSHONLY = 25
SIG_HIGH_S = 26
SIG_NULLDUMMY = 27
PUBKEYTYPE = 28
CLEANSTACK = 29
MINIMALIF = 30
NULLFAIL = 31
# softfork safeness
DISCOURAGE_UPGRADABLE_NOPS = 32
DISCOURAGE_UPGRADABLE_WITNESS_PROGRAM = 33
# segregated witness
WITNESS_PROGRAM_WRONG_LENGTH = 34
WITNESS_PROGRAM_WITNESS_EMPTY = 35
WITNESS_PROGRAM_MISMATCH = 36
WITNESS_MALLEATED = 37
WITNESS_MALLEATED_P2SH = 38
WITNESS_UNEXPECTED = 39
WITNESS_PUBKEYTYPE = 40
ERROR_COUNT = 41
|
SuliacLEGUILLOU/computor | srcs/Array.py | Python | mit | 575 | 0.001739 | """ Module of mathematical array """
class Array(object):
"""
Multidimentionnal Array of Number
"""
def __init__(self):
self.data = []
def add(self, target):
""" Add another Array to self """
for (i, table) in enumerate(target | .data):
for (j, val) in enumerate(table):
self.data[i][j] += val
def | sub(self, target):
""" Substract another Array to self """
for (i, table) in enumerate(target.data):
for (j, val) in enumerate(table):
self.data[i][j] -= val
|
40123148/2015cdb_40123148 | wsgi.py | Python | gpl-3.0 | 34,518 | 0.004126 | #@+leo-ver=5-thin
#@+node:2014fall.20141212095015.1775: * @file wsgi.py
# coding=utf-8
# 上面的程式內容編碼必須在程式的第一或者第二行才會有作用
################# (1) 模組導入區
# 導入 | cherrypy 模組, 為了在 OpenShift 平台上使用 cherrypy 模組, 必須透過 setup.py 安裝
#@@language python
#@@tabwidth -4
#@+<<declarations>>
#@+node:2014fall.20141212095015.1776: ** <<d | eclarations>> (wsgi)
import cherrypy
# 導入 Python 內建的 os 模組, 因為 os 模組為 Python 內建, 所以無需透過 setup.py 安裝
import os
# 導入 random 模組
import random
# 導入 gear 模組
import gear
################# (2) 廣域變數設定區
# 確定程式檔案所在目錄, 在 Windows 下有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
download_root_dir = os.environ['OPENSHIFT_DATA_DIR']
data_dir = os.environ['OPENSHIFT_DATA_DIR']
else:
# 表示程式在近端執行
download_root_dir = _curdir + "/local_data/"
data_dir = _curdir + "/local_data/"
'''以下為近端 input() 與 for 迴圈應用的程式碼, 若要將程式送到 OpenShift 執行, 除了採用 CherryPy 網際框架外, 還要轉為 html 列印
# 利用 input() 取得的資料型別為字串
toprint = input("要印甚麼內容?")
# 若要將 input() 取得的字串轉為整數使用, 必須利用 int() 轉換
repeat_no = int(input("重複列印幾次?"))
for i in range(repeat_no):
print(toprint)
'''
#@-<<declarations>>
#@+others
#@+node:2014fall.20141212095015.1777: ** class Hello
################# (3) 程式類別定義區
# 以下改用 CherryPy 網際框架程式架構
# 以下為 Hello 類別的設計內容, 其中的 object 使用, 表示 Hello 類別繼承 object 的所有特性, 包括方法與屬性設計
class Hello(object):
# Hello 類別的啟動設定
_cp_config = {
'tools.encode.encoding': 'utf-8',
'tools.sessions.on' : True,
'tools.sessions.storage_type' : 'file',
#'tools.sessions.locking' : 'explicit',
# session 以檔案儲存, 而且位於 data_dir 下的 tmp 目錄
'tools.sessions.storage_path' : data_dir+'/tmp',
# session 有效時間設為 60 分鐘
'tools.sessions.timeout' : 60
}
#@+others
#@+node:2014fall.20141212095015.2004: *3* __init__
def __init__(self):
# 配合透過案例啟始建立所需的目錄
if not os.path.isdir(data_dir+'/tmp'):
os.mkdir(data_dir+'/tmp')
if not os.path.isdir(data_dir+"/downloads"):
os.mkdir(data_dir+"/downloads")
if not os.path.isdir(data_dir+"/images"):
os.mkdir(data_dir+"/images")
#@+node:2014fall.20141212095015.1778: *3* index_orig
# 以 @ 開頭的 cherrypy.expose 為 decorator, 用來表示隨後的成員方法, 可以直接讓使用者以 URL 連結執行
@cherrypy.expose
# index 方法為 CherryPy 各類別成員方法中的內建(default)方法, 當使用者執行時未指定方法, 系統將會優先執行 index 方法
# 有 self 的方法為類別中的成員方法, Python 程式透過此一 self 在各成員方法間傳遞物件內容
def index_orig(self, toprint="Hello World!"):
return toprint
#@+node:2014fall.20141212095015.1779: *3* hello
@cherrypy.expose
def hello(self, toprint="Hello World!"):
return toprint
#@+node:2014fall.20141215194146.1791: *3* index
@cherrypy.expose
def two(self, guess=None):
# 將標準答案存入 answer session 對應區
theanswer = random.randint(1, 100)
thecount = 0
# 將答案與計算次數變數存進 session 對應變數
cherrypy.session['answer'] = theanswer
cherrypy.session['count'] = thecount
# 印出讓使用者輸入的超文件表單
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=doCheck>
請輸入您所猜的整數:<input type=text name=guess><br />
<input type=submit value=send>
</form>
<hr>
<!-- 以下在網頁內嵌 Brython 程式 -->
<script type="text/python">
from browser import document, alert
def echo(ev):
alert(document["zone"].value)
# 將文件中名稱為 mybutton 的物件, 透過 click 事件與 echo 函式 bind 在一起
document['mybutton'].bind('click',echo)
</script>
<input id="zone"><button id="mybutton">click !</button>
<hr>
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
# 以下使用中文變數名稱
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1713: *3* twoDgear
@cherrypy.expose
# A齒數1,B齒數2,C齒數3,D齒數4,E齒數5,F齒數6, M 為模數, P 為壓力角
def index(self,A=None,B=None,C=None,D=None, E=None, F=None, M=None, P=None):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
40123148<br>蔡宗霖
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=mygeartest2>
<p>齒數1:
<select name=A>
<option>10
<option>11
<option>12
<option>13
<option>14
<option>15
<option>16
<option>17
<option>18
<option>19
<option>20
<option>21
<option>22
<option>23
<option>24
<option>25
<option>26
<option>27
<option>28
<option>29
<option>30
</select>
<p>齒數2:
<select name=B>
<option>10
<option>11
<option>12
<option>13
<option>14
<option>15
<option>16
<option>17
<option>18
<option>19
<option>20
<option>21
<option>22
<option>23
<option>24
<option>25
<option>26
<option>27
<option>28
<option>29
<option>30
</select>
<p>齒數3:
<select name=C>
<option>10
<option>11
<option>12
<option>13
<option>14
<option>15
<option>16
<option>17
<option>18
<option>19
<option>20
<option>21
<option>22
<option>23
<option>24
<option>25
<option>26
<option>27
<option>28
<option>29
<option>30
</select>
<p>齒數4:
<select name=D>
<option>10
<option>11
<option>12
<option>13
<option>14
<option>15
<option>16
<option>17
<option>18
<option>19
<option>20
<option>21
<option>22
<option>23
<option>24
<option>25
<option>26
<option>27
<option>28
<option>29
<option>30
</select>
<p>齒數5:
<select name=E>
<option>10
<option>11
<option>12
<option>13
<option>14
<option>15
<option>16
<option>17
<option>18
<option>19
<option>20
<option>21
<option>22
<option>23
<option>24
<option>25
<option>26
<option>27
<option>28
<option>29
<option>30
</select>
</select>
<p>齒數6:
<select name=F>
<option>10
<option>11
<option>12
<option>13
<option>14
<option>15
<option>16
<option>17
<option>18
<option>19
<option>20
<option>21
<option>22
<option>23
<option>24
<option>25
<option>26
<option>27
<option>28
<option>29
<option>30
</select>
<p>模數:
<select name=M>
<option>2
<option>3
<option>4
<option>5
<option>6
<option>7
<option>8
<option>9
<option>10
<option>11
<option>12
<option>13
<option>14
<option>15
<option>16
<o |
vicnet/weboob | weboob/applications/qgalleroob/__init__.py | Python | lgpl-3.0 | 61 | 0 | from .qgalleroob import QGalleroob
__all | __ = ['QGalleroob' | ]
|
martinezmizael/Escribir-con-la-mente | object/entrenarFannNormalizado.py | Python | mit | 5,218 | 0.043887 | # -*- encoding: utf-8 -*-
'''
Created on: 2015
Author: Mizael Martinez
'''
from pyfann import libfann
from login import Login
from escribirArchivo import EscribirArchivo
import inspect, sys, os
sys.path.append("../model")
from baseDatos import BaseDatos
class CtrlEntrenarRNANormalizado:
def __init__(self):
self.__coneccion=1
self.__tasa_aprendizaje=0.7
self.__numero_entradas=0
self.__numero_salidas=0
self.__neuronas_capa_oculta=0
self.__error_deseado=0
self.__epocas=0
self.__iteraciones_entre_reporte=1000
self.__red=None
self.__error_real=0
self.__url_prueba=None
self.__url_guardar=None
self.__path="../files/"
#self.__path="files/"
self.__bd=BaseDatos()
self.__interfaz=None
def entrenar(self):
print("Entrenando ...")
self.__red=libfann.neural_net()
self.__red.create_sparse_array(self.__coneccion,(self.__numero_entradas,self.__neuronas_capa_oculta,self.__numero_salidas))
self.__red.set_learning_rate(self.__tasa_aprendizaje)
self.__red.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE)
self.__red.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)
self.__red.train_on_file(self.__path+self.__url_prueba, self.__epocas,self.__iteraciones_entre_reporte, self.__error_deseado)
self.__error_real=self.__red.get_MSE()
datos={"numerodeneuronas":self.__neuronas_capa_oculta,"error":self.__error_real,"tipo":"normalizado"}
id=self.__bd.agregarEntrenamiento(datos)
print("id: %s"%(str(id)))
self.__url_guardar="mizael_rna%s.net"%(id)
self.__bd.actualizarRegistroEntrenamiento(self.__url_guardar,id)
self.__red.save(self.__path + self.__url_guardar)
if self.__interfaz != None:
self.__interfaz.lineEdit_4.setText("%s"%str(self.__error_real))
def entrenarGamma(self):
print("Entrenando Gamma...")
self.__red=libfann.neural_net()
self.__red.create_sparse_array(self.__coneccion,(self.__numero_entradas,self.__neuronas_capa_oculta,self.__numero_salidas))
self.__red.set_learning_rate(self.__tasa_aprendizaje)
self.__red.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE)
self.__red.set_activation_function_output(libfann.LINEAR)
self.__red.train_on_file(self.__path+self.__url_prueba, self.__epocas,self.__iteraciones_entre_reporte, self.__error_deseado)
self.__error_real=self.__red.get_MSE()
datos={"numerodeneuronas":self.__neuronas_capa_oculta,"error":self.__error_real,"tipo":"gamma"}
id=self.__bd.agregarEntrenamiento(datos)
print("id: %s"%(str(id)))
self.__url_guardar="mizael_rna%s.net"%(id)
self.__bd.actualizarRegistroEntrenamiento(self.__url_guardar,id)
self.__red.save(self.__path + self.__url_guardar)
if self.__interfaz != None:
self.__interfaz.lineEdit_4.setText("%s"%str(self.__error_real))
def setConeccion(self,conexion):
self.__coneccion=conexion
def setTasaAprendizaje(self,tasa_aprendizaje):
self.__tasa_aprendizaje=tasa_aprendizaje
def setNumeroEntradas(self,numero_entradas):
self.__numero_entradas=numero_entradas
def setNumeroSalidas(self,numero_salidas):
self.__numero_salidas=numero_salidas
def setNeuronasCapaOculta(self,neuronas_capa_oculta):
self.__neuronas_capa_oculta=neuronas_capa_oculta
def setErrorDeseado(self,error_deseado):
self.__error_deseado=error_deseado
def setEpocas(self,epocas):
self.__epocas=epocas
def setIteracionesEntreReporte(self,iteraciones_entre_reporte):
self.__iteraciones_entre_reporte=iteraciones_entre_reporte
def setErrorReal(self,error_real):
self.__error_real=error_real
def setUrlPrueba(self,url_prueba):
self.__url_prueba=url_prueba
def setUrlGuardar(self,url_guardar):
self.__url_guardar=url_guardar
def setInterfaz(self,interfaz):
self.__interfaz=interfaz
def getConeccion(self):
return self.__coneccion
def getTasaAprendizaje(self):
return self.__tasa_aprendizaje
def getNumeroEntradas(self):
return self.__numero_entradas
def getNumeroSalidas(self):
return self.__numero_salidas
def getNeuronasCapaOculta(self):
return self.__neuronas_capa_oculta
def getErrorDeseado(self):
return self.__error_deseado
def getEpocas(self):
return | self.__epocas
def getIteracionesEntreReporte(self):
return self.__iteraciones_en | tre_reporte
def getErrorReal(self):
return self.__error_real
def getUrlPrueba(self):
return self.__url_prueba
def getUrlGuardar(self):
return self.__url_guardar
def getInterfaz(self):
return self.__interfaz
'''
#Entrenar para todos los valores
o=CtrlEntrenarRNANormalizado()
o.setConeccion(1)
o.setTasaAprendizaje(0.7)
o.setNumeroEntradas(8)
o.setNumeroSalidas(5)
#Cambian el # de neuronas y error deseado
o.setNeuronasCapaOculta(15)
o.setErrorDeseado(0.001)
#Cambian el # de epocas
o.setEpocas(130000)
o.setIteracionesEntreReporte(10000)
o.setUrlPrueba("rna_normalizado.data")
o.entrenar()
'''
'''
#Entrenar para las Gamma
g=CtrlEntrenarRNANormalizado()
g.setConeccion(1)
g.setTasaAprendizaje(0.7)
g.setNumeroEntradas(2)
g.setNumeroSalidas(1)
#Cambian el # de neuronas y error deseado
g.setNeuronasCapaOculta(150)
g.setErrorDeseado(0.9)
#Cambian el # de epocas
g.setEpocas(30000)
g.setIteracionesEntreReporte(10000)
g.setUrlPrueba("rna_gamma_normalizado.data")
g.entrenarGamma()
''' |
zozo123/buildbot | master/buildbot/scripts/stop.py | Python | gpl-3.0 | 2,284 | 0.000876 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import with_statement
import errno
import os
import signal
import time
from buildbot.scripts import base
def stop(config, signame="TERM", wait=False):
basedir = config['basedir']
quiet = config['quiet']
if config['clean']:
signame = 'USR1'
if not base.isBuildmasterDir(config['basedir']):
return 1
pidfile = os.path.join(basedir, 'twistd.pid')
try:
with open(pidfile, "rt") as f:
pid = int(f.read().strip())
except:
if not config['quiet']:
print "buildmaster not running"
return 0
signum = getattr(signal, "SIG" + signame)
try:
os.kill(pid, signum)
except OSError, e:
if e.errno != errno.ESR | CH:
raise
else:
if not config['quiet']:
print "buildmaster not running"
try:
os.unlink(pidfile)
except:
pass
return 0
if not w | ait:
if not quiet:
print "sent SIG%s to process" % signame
return 0
time.sleep(0.1)
# poll once per second until twistd.pid goes away, up to 10 seconds,
# unless we're doing a clean stop, in which case wait forever
count = 0
while count < 10 or config['clean']:
try:
os.kill(pid, 0)
except OSError:
if not quiet:
print "buildbot process %d is dead" % pid
return 0
time.sleep(1)
count += 1
if not quiet:
print "never saw process go away"
return 1
|
pbougue/navitia | source/eitri/ed_handler.py | Python | agpl-3.0 | 6,071 | 0.001812 | # Copyright (c) 2001-2015, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# | a non ending quest to the respon | sive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from contextlib import contextmanager
import glob
import os
from navitiacommon import utils, launch_exec
from navitiacommon.launch_exec import launch_exec
import psycopg2
import zipfile
import logging
ALEMBIC_PATH_ED = os.environ.get('ALEMBIC_PATH', '../sql')
ALEMBIC_PATH_CITIES = os.environ.get('ALEMBIC_PATH_CITIES', '../cities')
@contextmanager
def cd(new_dir):
"""
small helper to change the current dir
"""
prev_dir = os.getcwd()
os.chdir(os.path.expanduser(new_dir))
try:
yield
finally:
os.chdir(prev_dir)
def binarize(ed_db_params, output, ed_component_path, cities_db_params):
logger = logging.getLogger(__name__)
logger.info('creating data.nav')
ed2nav = 'ed2nav'
if ed_component_path:
ed2nav = os.path.join(ed_component_path, ed2nav)
launch_exec(
ed2nav,
[
"-o",
output,
"--connection-string",
ed_db_params.old_school_cnx_string(),
"--cities-connection-string",
cities_db_params.old_school_cnx_string(),
],
logger,
)
logger.info("data.nav is created successfully: {}".format(output))
def import_data(data_dir, db_params, ed_component_path):
"""
call the right component to import the data in the directory
we loop through all files until we recognize one on them
"""
log = logging.getLogger(__name__)
files = glob.glob(data_dir + "/*")
data_type, file_to_load = utils.type_of_data(files)
if not data_type:
log.info('unknown data type for dir {}, skipping'.format(data_dir))
return
# Note, we consider that we only have to load one kind of data per directory
import_component = data_type + '2ed'
if ed_component_path:
import_component = os.path.join(ed_component_path, import_component)
if file_to_load.endswith('.zip') or file_to_load.endswith('.geopal'):
# TODO: handle geopal as non zip
# if it's a zip, we unzip it
zip_file = zipfile.ZipFile(file_to_load)
zip_file.extractall(path=data_dir)
file_to_load = data_dir
if launch_exec(
import_component, ["-i", file_to_load, "--connection-string", db_params.old_school_cnx_string()], log
):
raise Exception('Error: problem with running {}, stoping'.format(import_component))
def load_cities(cities_file, cities_db_params, cities_exec_path):
logger = logging.getLogger(__name__)
cities_exec = os.path.join(cities_exec_path, 'cities')
if launch_exec(
cities_exec, ["-i", cities_file, "--connection-string", cities_db_params.old_school_cnx_string()], logger
):
raise Exception('Error: problem with running {}, stoping'.format(cities_exec))
def load_data(data_dirs, ed_db_params, ed_component_path):
logging.getLogger(__name__).info('loading {}'.format(data_dirs))
for d in data_dirs:
import_data(d, ed_db_params, ed_component_path)
def update_db(db_params, alembic_path):
"""
enable postgis on the db and update it's scheme
"""
cnx_string = db_params.cnx_string()
# we need to enable postgis on the db
cnx = psycopg2.connect(
database=db_params.dbname, user=db_params.user, password=db_params.password, host=db_params.host
)
c = cnx.cursor()
c.execute("create extension postgis;")
c.close()
cnx.commit()
logging.getLogger(__name__).info('message = {}'.format(c.statusmessage))
with cd(alembic_path):
res = os.system('PYTHONPATH=. alembic -x dbname="{cnx}" upgrade head'.format(cnx=cnx_string))
if res:
raise Exception('problem with db update')
def generate_nav(
data_dir, docker_ed, docker_cities, output_file, ed_component_path, cities_exec_path, import_cities
):
"""
load all data either directly in data_dir if there is no sub dir, or all data in the subdir
"""
cities_db_params = docker_cities.get_db_params()
update_db(cities_db_params, ALEMBIC_PATH_CITIES)
ed_db_params = docker_ed.get_db_params()
update_db(ed_db_params, ALEMBIC_PATH_ED)
if import_cities:
if not os.path.exists(import_cities):
raise Exception('Error: impossible to find {}, exiting'.format(import_cities))
load_cities(import_cities, cities_db_params, cities_exec_path)
if not os.path.exists(data_dir):
raise Exception('Error: impossible to find {}, exiting'.format(data_dir))
data_dirs = [
os.path.join(data_dir, sub_dir_name)
for sub_dir_name in os.listdir(data_dir)
if os.path.isdir(os.path.join(data_dir, sub_dir_name))
] or [
data_dir
] # if there is no sub dir, we import only the files in the dir
load_data(data_dirs, ed_db_params, ed_component_path)
binarize(ed_db_params, output_file, ed_component_path, cities_db_params)
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/checkbox/lib/__init__.py | Python | gpl-3.0 | 54 | 0.018519 | ../../.. | /../../share/pyshared/checkbox/lib/__i | nit__.py |
shollen/evennia | evennia/objects/models.py | Python | bsd-3-clause | 12,067 | 0.002818 | """
This module defines the database models for all in-game objects, that
is, all objects that has an actual existence in-game.
Each database object is 'decorated' with a 'typeclass', a normal
python class that implements all the various logics needed by the game
in question. Objects created of this class transparently communicate
with its related database object for storing all attributes. The
admin should usually not have to deal directly with this database
object layer.
Attributes are separate objects that store values persistently onto
the database object. Like everything else, they can be accessed
transparently through the decorating TypeClass.
"""
from builtins import object
from django.conf import settings
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from evennia.typeclasses.models import TypedObject
from evennia.objects.manager import ObjectDBManager
from evennia.utils import logger
from evennia.utils.utils import (make_iter, dbref, lazy_property)
class ContentsHandler(object):
"""
Handles and caches the contents of an object to avoid excessive
lookups (this is done very often due to cmdhandler needing to look
for object-cmdsets). It is stored on the 'contents_cache' property
of the ObjectDB.
"""
def __init__(self, obj):
"""
Sets up the contents handler.
Args:
obj (Object): The object on which the
handler is defined
"""
self.obj = obj
self._pkcache = {}
self._idcache = obj.__class__.__instance_cache__
self.init()
def init(self):
"""
Re-initialize the content cache
"""
self._pkcache.update(dict((obj.pk, None) for obj in
ObjectDB.objects.filter(db_location=self.obj) if obj.pk))
def get(self, exclude=None):
"""
Return the contents of the cache.
Args:
exclude (Object or list of Object): object(s) to ignore
Returns:
objects (list): the Objects inside this location
"""
if exclude:
pks = [pk for pk in self._pkcache if pk not in [excl.pk for excl in make_iter(exclude)]]
else:
pks = self._pkcache
try:
return [self._idcache[pk] for pk in pks]
except KeyError:
# this can happen if the idmapper cache was cleared for an object
# in the contents cache. If so we need to re-initialize and try again.
self.init()
try:
return [self._idcache[pk] for pk in pks]
except KeyError:
# this means an actual failure of caching. Return real database match.
logger.log_err("contents cache failed for %s." % (self.obj.key))
return list(ObjectDB.objects.filter(db_location=self.obj))
def add(self, obj):
"""
Add a new object to this location
Args:
obj (Object): object to add
"""
self._pkcache[obj.pk] = None
def remove(self, obj):
"""
Remove object from this location
Args:
obj (Object): object to remove
"""
self._pkcache.pop(obj.pk, None)
def clear(self):
"""
Clear the contents cache and re-initialize
"""
self._pkcache = {}
self.init()
#------------------------------------------------------------
#
# ObjectDB
#
#------------------------------------------------------------
class ObjectDB(TypedObject):
"""
All objects in the game use the ObjectDB model to store
data in the database. This is handled transparently through
the typeclass system.
Note that the base objectdb is very simple, with
few defined fields. Use attributes to extend your
type class with new database-stored variables.
The TypedObject supplies the following (inherited) properties:
- key - main name
- name - alias for key
- db_typeclass_path - the path to the decorating typeclass
- db_date_created - time stamp of object creation
- permissions - perm strings
- locks - lock definitions (handler)
- dbref - #id of object
- db - persistent attribute storage
- ndb - non-persistent attribute storage
The ObjectDB adds the following properties:
- player - optional connected player (always together with sessid)
- sessid - optional connection session id (always together with player)
- location - in-game location of object
- home - safety location for object (handler)
- scripts - scripts assigned to object (handler from typeclass)
- cmdset - active cmdset on object (handler from typeclass)
- aliases - aliases for this object (property)
- nicks - nicknames for *other* things in Evennia (handler)
- sessions - sessions connected to this object (see also player)
- has_player - bool if an active player is currently connected
- contents - other objects having this object as location
- exits - exits from this object
"""
#
# ObjectDB Database model setup
#
#
# inherited fields (from TypedObject):
# db_key (also 'name' works), db_typeclass_path, db_date_created,
# db_permissions
#
# These databse fields (including the inherited ones) should normally be
# managed by their corresponding wrapper properties, named | same as the
# field, but without the db_* prefix (e.g. th | e db_key field is set with
# self.key instead). The wrappers are created at the metaclass level and
# will automatically save and cache the data more efficiently.
# If this is a character object, the player is connected here.
db_player = models.ForeignKey("players.PlayerDB", null=True, verbose_name='player', on_delete=models.SET_NULL,
help_text='a Player connected to this object, if any.')
# the session id associated with this player, if any
db_sessid = models.CommaSeparatedIntegerField(null=True, max_length=32, verbose_name="session id",
help_text="csv list of session ids of connected Player, if any.")
# The location in the game world. Since this one is likely
# to change often, we set this with the 'location' property
# to transparently handle Typeclassing.
db_location = models.ForeignKey('self', related_name="locations_set", db_index=True, on_delete=models.SET_NULL,
blank=True, null=True, verbose_name='game location')
# a safety location, this usually don't change much.
db_home = models.ForeignKey('self', related_name="homes_set", on_delete=models.SET_NULL,
blank=True, null=True, verbose_name='home location')
# destination of this object - primarily used by exits.
db_destination = models.ForeignKey('self', related_name="destinations_set", db_index=True, on_delete=models.SET_NULL,
blank=True, null=True, verbose_name='destination',
help_text='a destination, used only by exit objects.')
# database storage of persistant cmdsets.
db_cmdset_storage = models.CharField('cmdset', max_length=255, null=True, blank=True,
help_text="optional python path to a cmdset class.")
# Database manager
objects = ObjectDBManager()
# defaults
__settingsclasspath__ = settings.BASE_OBJECT_TYPECLASS
__defaultclasspath__ = "evennia.objects.objects.DefaultObject"
__applabel__ = "objects"
@lazy_property
def contents_cache(self):
return ContentsHandler(self)
# cmdset_storage property handling
def __cmdset_storage_get(self):
"getter"
storage = self.db_cmdset_storage
return [path.strip() for path in storage.split(',')] if storage else []
def __cmdset_storage_set(self, value):
"setter"
self.db_cmdset_storage = ",".join(str(val).strip() for val in make_iter(value))
self.save(update_fields=["db_cmdse |
doriancoins/doriancoin | test/functional/wallet_basic.py | Python | mit | 21,630 | 0.005457 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Doriancoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet."""
from test_framework.test_framework import DoriancoinTestFramework
from test_framework.util import *
class WalletTest(DoriancoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
def setup_network(self):
self.add_nodes(4)
self.start_node(0)
self.start_node(1)
self.start_node(2)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.sync_all([self.nodes[0:3]])
def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
"""Return curr_balance after asserting the fee was in range"""
fee = balance_with_fee - curr_balance
assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
return curr_balance
def get_vsize(self, txn):
return self.nodes[0].decoderawtransaction(txn)['vsize']
def run_test(self):
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 50)
assert_equal(walletinfo['balance'], 0)
self.sync_all([self.nodes[0:3]])
self.nodes[1].generate(101)
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 50)
assert_equal(self.nodes[1].getbalance(), 50)
assert_equal(self.nodes[2].getbalance(), 0)
# Check that only first and second nodes have UTXOs
utxos = self.nodes[0].listunspent()
assert_equal(len(utxos), 1)
assert_equal(len(self.nodes[1].listunspent()), 1)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("test gettxout")
confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
# First, outputs that are unspent both in the chain and in the
# mempool should appear with or without include_mempool
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False)
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True)
assert_equal(txout['value'], 50)
# Send 21 DRC from 0 to 2 using sendtoaddress call.
# Locked memory should use at least 32 bytes to sign each transaction
self.log.info("test getmemoryinfo")
memory_before = self.nodes[0].getmemoryinfo()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
memory_after = self.nodes[0].getmemoryinfo()
assert(memory_before['locked']['used'] + 64 <= memory_after['locked']['used'])
self.log.info("test gettxout (second part)")
# utxo spent in mempool should be visible if you exclude mempool
# but invisible if you include mempool
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False)
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True)
assert txout is None
# new utxo from mempool should be invisible if you exclude mempool
# but visible if you include mempool
txout = self.nodes[0].gettxout(mempool_txid, 0, False)
assert txout is None
txout1 = self.nodes[0].gettxout(mempool_txid, 0, True)
txout2 = self.nodes[0].gettxout(mempool_txid, 1, True)
# note the mempool tx will have randomly assigned indices |
# but 10 will go to node2 and the rest will go to node0
balance = self.nodes[0].getbalance()
assert_equ | al(set([txout1['value'], txout2['value']]), set([10, balance]))
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 0)
# Have node0 mine a block, thus it will collect its own fee.
self.nodes[0].generate(1)
self.sync_all([self.nodes[0:3]])
# Exercise locking of unspent outputs
unspent_0 = self.nodes[2].listunspent()[0]
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0])
self.nodes[2].lockunspent(False, [unspent_0])
assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
assert_equal([unspent_0], self.nodes[2].listlockunspent())
self.nodes[2].lockunspent(True, [unspent_0])
assert_equal(len(self.nodes[2].listlockunspent()), 0)
assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction",
self.nodes[2].lockunspent, False,
[{"txid": "0000000000000000000000000000000000", "vout": 0}])
assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds",
self.nodes[2].lockunspent, False,
[{"txid": unspent_0["txid"], "vout": 999}])
# Have node1 generate 100 blocks (so node0 can recover the fee)
self.nodes[1].generate(100)
self.sync_all([self.nodes[0:3]])
# node0 should end up with 100 DRC in block rewards plus fees, but
# minus the 21 plus fees sent to node2
assert_equal(self.nodes[0].getbalance(), 100-21)
assert_equal(self.nodes[2].getbalance(), 21)
# Node0 should have two unspent outputs.
# Create a couple of transactions to send them to node2, submit them through
# node1, and make sure both node0 and node2 pick them up properly:
node0utxos = self.nodes[0].listunspent(1)
assert_equal(len(node0utxos), 2)
# create both transactions
txns_to_send = []
for utxo in node0utxos:
inputs = []
outputs = {}
inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]})
outputs[self.nodes[2].getnewaddress("from1")] = utxo["amount"] - 3
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx))
# Have node 1 (miner) send the transactions
self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True)
# Have node1 mine a block to confirm transactions:
self.nodes[1].generate(1)
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 94)
assert_equal(self.nodes[2].getbalance("from1"), 94-21)
# Verify that a spent output cannot be locked anymore
spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]}
assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0])
# Send 10 DRC normal
address = self.nodes[0].getnewaddress("test")
fee_per_byte = Decimal('0.001') / 1000
self.nodes[2].settxfee(fee_per_byte * 1000)
txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].g |
dimagi/commcare-hq | corehq/apps/app_manager/management/commands/migrate_advanced_form_preload.py | Python | bsd-3-clause | 1,053 | 0.001899 | from corehq.apps.app_manager.management.commands.helpers | import (
AppMigrationCommandBase,
)
from corehq.apps.app_manager.models import Application
class Command(AppMigrationCommandBase):
help = "Migrate preload dict in advanced forms to " \
"allow loading the same case property into multiple questions."
include_builds = False
def migrate_app(self, app_doc):
modules = [m for m in app_doc['modules'] if m.get('module_type | ', '') == 'advanced']
should_save = False
for module in modules:
forms = module['forms']
for form in forms:
load_actions = form.get('actions', {}).get('load_update_cases', [])
for action in load_actions:
preload = action['preload']
if preload and list(preload.values())[0].startswith('/'):
action['preload'] = {v: k for k, v in preload.items()}
should_save = True
return Application.wrap(app_doc) if should_save else None
|
jniediek/combinato | tools/parse_cheetah_logfile.py | Python | mit | 19,113 | 0.000262 | #!/usr/bin/env python3
# JN 2015-07-29
"""
Log file parser for Cheetah by Johannes Niediek
This script reads out the reference settings
by sequentially following all crs, rbs, and gbd commands.
Please keep in mind that the following scenario is possible with Cheetah:
Start the recording
Stop the recording
Change the reference settings
Start the recording
If you do this there will be .ncs with their reference changing
at some point during the recording.
In most cases, this is probably not what you want,
so this script displays a warning message if you did it.
Cheetah ATLAS:
There is an undocumented channel nummber 32000038.
I reverse-engineered its use, but that might depend on the exact version
of ATLAS etc.
This script partially mirrors the system of variable definitions
in Cheeatah. For complex arithmethic with variables, the script might fail.
Please check the GitHub repository (github.com/jniediek/combinato.git)
for updates and manual.
Contact me (jonied@posteo.de) for access to the repository.
"""
from __future__ import print_function, division
import os
import re
from collections import defaultdict
import datetime
from csv import writer as csv_writer
DATE_FNAME = 'start_stop_datetime.txt'
def parse_times(setting):
"""
read out the date and times of a recording
"""
def timestr2timeobj(time_str):
"""
convert a time string with milliseconds to a datetime object
"""
time, milli = time_str.split('.')
time = datetime.datetime.strptime(time, '%H:%M:%S')
time += datetime.timedelta(seconds=int(milli)/1000)
return time
tstart, tstop = [timestr2timeobj(rec[1])
for rec in setting.start_rec, setting.stop_rec]
if setting.folder is None:
folder_date_obj = None
else:
date_str = date_pattern.match(setting.folder).groups()[0]
folder_date_obj = datetime.datetime.strptime(date_str,
r'%Y-%m-%d_%H-%M-%S')
tstart = datetime.datetime.combine(folder_date_obj, tstart.time())
tstop = datetime.datetime.combine(folder_date_obj, tstop.time())
# by default assume that recording is stopped once every day
if tstop < tstart:
tstop += datetime.timedelta(days=1)
return folder_date_obj, tstart, tstop
class Setting(object):
"""
simple class that stores reference settings
"""
def __init__(self):
self.num2name = None
self.name2num = None
self.lrefs = None
self.grefs = None
self.crefs = None
self.start_rec = None
self.stop_rec = None
self.start_timestamp = None
self.stop_timestamp = None
self.folder = None
DEBUG = False
# The following are the interesting commands
# You can still trick the parser, e.g. by sending -SetChannelNumber commands
# via NetCom.
# But it's very easy to adapt the parser to such situations
set_drs_strings = ('Processing line: -SetDRS', # old systems
'Processing line: -SetAcqEntReference') # new systems
set_channel_pattern = re.compile(r'Processing line:\s*-SetChannelNumber')
channel_number_pattern = re.compile(r'.*\"(.*)\" (\d.*)')
channel_number_pattern_var = re.compile(r'.* (.*) (.*)')
drs_command_pattern = re.compile(r'DRS Command\(b(\w) (\w*)\s{1,2}'
r'(\d*)\s{0,2}(\d*)')
variable_pattern = re.compile(r'.*(%\w*) = \"?(\w*)\"?')
date_pattern = re.compile(r'.*(\d{4}-\d{1,2}-\d{1,2}_'
'\d{1,2}-\d{1,2}-\d{1,2}).*')
def board_num_to_chan(board, num):
return (board - 1) * 16 + num
def chan_to_board_num(chan):
return 2 * int(chan/32) + 1, chan % 32
def parser(fname):
"""
transform logfile into header, log, and ignored lines
"""
with open(fname, 'r') as fid:
lines = fid.readlines()
fid.close()
in_header = True
is_notice = False
ignored_lines = []
protocol = []
header = {}
for line in lines:
if line[:13] == '-* NOTICE *-':
is_notice = True
else:
is_notice = False
if in_header:
# this means header is over
if is_notice:
in_header = False
else:
if len(line) > 3:
key, value = line.split(':', 1)
header[key] = value.strip()
else:
if is_notice:
fields = line[15:].split(' - ', 4)
time = fields[0]
stamp = int(fields[1])
msg = fields[2].strip().replace('\r', '')
if len(fields) == 4:
msg2 = fields[3].strip().replace('\r', '')
else:
msg2 = ''
protocol.append((time, stamp, msg, msg2))
elif line.startswith('Log file successfully moved to'):
target = line.split()[-1]
# this indicates a log file move
# mov is our key
protocol.append((0, 0, 'mov', target))
else:
ignored_lines.append(line.strip())
try:
bn = 'Cheetah ' + header['Cheetah Build Number']
except KeyError:
bn = 'ATLAS ' + header['Cheetah ATLAS Build Number']
print(bn)
return header, protocol, ignored_lines
def all_defined_check(chnum2name, crefs):
"""
check if a reference has been defined for all existing channels
"""
# print(chnum2name)
for chnum in chnum2name:
board, lnum = chan_to_board_num(chnum)
try:
ref = crefs[chnum]
if DEBUG:
print('Channel {} (board {} channel {}) - {}'.
format(chnum, board, lnum, ref))
except KeyError:
print('No reference defined for channel {} ({})'.
format(chnum, chnum2name[chnum][0]))
def print_refs(lrefs, grefs):
"""
overview of local and global refrences
"""
sorted_keys = sorted(lrefs.keys())
for board, ref in sorted_keys:
lref = lrefs[(board, ref)]
if lref in grefs:
gboard = grefs[lref]
stri = 'global, board {}'.format(gboard)
else:
stri = 'local'
print('board {} ref {} - {} ({})'.
format(board, ref, lrefs[(board, ref)], stri))
def analyze_drs(protocol):
"""
go through a protocol and analyze all drs settings
"""
# for each board, store the 8 local refs
# 32..35 are the 4 local reference wires
# 36, 37 are subject ground, patient ground
# 38 seems to be specific to ATLAS
# this is a (board, ref) -> local_num dict
local_refs = dict()
# 8 ref numbers can be driven globally
# this is a ref_num -> board dict
global_refs = dict()
# each channel has a reference which
# refers to its board's local referenes
# this is a ch_num -> ref_num dict
channel_refs = dict()
| # name2num is unique
ch_name2num = dict()
# num2name is *not* unique, values are lists
ch_num2name = defaultdict(list)
# save the settings
all_setting | s = []
variables = dict()
temp_setting = None
for line in protocol:
time, timestamp, msg1, msg2 = line
if temp_setting is None:
temp_setting = Setting()
if msg1 == 'mov':
temp_setting.folder = msg2
elif '::SendDRSCommand()' in msg1:
# log all reference settings (command file and GUI interaction)
board, cmd, arg1, arg2 = drs_command_pattern.match(msg2).groups()
arg1 = int(arg1)
board = int(board, 16)
if cmd != 'hsp':
arg2 = int(arg2)
else:
arg2 = ''
if cmd == 'gbd':
# this is the global drive
# if a reference is driven globally, it overrides
# the local settings of that reference
if arg2 == 1:
global_refs[arg1] = board
print('{} is now driven by board {}'.format(arg1, board))
elif arg2 == 0:
|
privacyidea/privacyidea | tests/test_api_applications.py | Python | agpl-3.0 | 799 | 0 | """
This test case test the REST API
api/applications.py
"""
import json
from .base import MyApiTestCase
class APIApplicationsResol | verTestCase(MyApiTestCase):
def test_get_applications(self):
with self.app.test_request_context('/application/',
method='GET',
headers={'Authorization': self.at}):
res = self.app.full_dispatch_request()
self.assertTrue(res.status_code == 200, res)
result = res.json.get("result")
detail = res.json.get("detail")
value = result.get("value")
| self.assertTrue("ssh" in value)
self.assertTrue("luks" in value)
self.assertTrue(value["ssh"]["options"]["optional"] == ["user"])
|
stdweird/aquilon | tests/broker/test_del_10gig_hardware.py | Python | apache-2.0 | 2,595 | 0.000385 | #!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing commands that remove virtual hardware."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestDel10GigHardware(TestBrokerCommand):
def test_200_del_hosts(self):
for i in range(0, 8) + range(9, 17):
hostname = "ivirt%d.aqd-unittest.ms.com" % (1 + i)
command = "del_host --hostname %s" % hostname
if i < 9:
net_index = (i % 4) + 2
usable_index = i / 4
else:
net_index = ((i - 9) % 4) + 6
usable_index = (i - 9) / 4
ip = self.net.unknown[net_index].usable[usable_index]
self.dsdb_expect_delete(ip)
(out, err) = self.successtest(command.split(" "))
self.assertEmptyOut(out, command)
self.dsdb_verify()
def t | est_300_delaux(self):
for i in range(1, 25):
hostname = "evh%d-e1.aqd-unittest.ms.com" % (i + 50)
self.dsdb_expect_delete(self.net.vm_storage_net[0].usable[i - 1])
command = ["del", "auxiliary", "--auxiliary", hostname]
(out, err) = self.successtest(command)
self.assertEmptyOut( | out, command)
self.dsdb_verify()
def test_700_delmachines(self):
for i in range(0, 8) + range(9, 17):
machine = "evm%d" % (10 + i)
self.noouttest(["del", "machine", "--machine", machine])
def test_800_verifydelmachines(self):
for i in range(0, 18):
machine = "evm%d" % (10 + i)
command = "show machine --machine %s" % machine
self.notfoundtest(command.split(" "))
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestDel10GigHardware)
unittest.TextTestRunner(verbosity=2).run(suite)
|
boxed/RegexAsYouType | main.py | Python | mit | 420 | 0.004762 | #
# main.py
# RegexAsYouType
#
# Created by Anders Hovmoll | er on 5/19/09.
# Copyright Calidris 2009. All rights reserved.
#
#import modules required by application
i | mport objc
import Foundation
import AppKit
from PyObjCTools import AppHelper
# import modules containing classes required to start application and load MainMenu.nib
import RegexAsYouTypeAppDelegate
# pass control to AppKit
AppHelper.runEventLoop()
|
aninoy/cowsnbulls | checker.py | Python | mit | 432 | 0.032407 | # import s | ys
def check(answer, guess):
bulls = 0
cows = 0
answer = str(answer)
guess = str(guess)
for x in range(0, len(answer)):
# foundX = false;
for y in range(0, len(guess)):
if answer[x] == guess[y]:
if x == y:
bulls += 1
else:
cows += 1
retVal = {'bulls': bulls, 'cows': cows}
return retVal
# print "Enter your next guess:"
# guess = raw_input()
# result = | check(9370, guess)
# print result
|
freelancer/freelancer-sdk-python | examples/delete_user_jobs.py | Python | lgpl-3.0 | 794 | 0 | from freelancersdk.re | sources.users import delete_user_jobs
from freelancersdk.session import Session
from freelancersdk.exceptions import UserJobsNotDeletedException
import os
def sample_delete_user_jobs():
url = os.environ.get('FLN_URL')
oauth_to | ken = os.environ.get('FLN_OAUTH_TOKEN')
session = Session(oauth_token=oauth_token, url=url)
user_jobs_data = {
'job_ids': [
1,
2,
3
]
}
try:
m = delete_user_jobs(session, **user_jobs_data)
except UserJobsNotDeletedException as e:
print(('Error message: %s' % e.message))
print(('Server response: %s' % e.error_code))
return None
else:
return m
m = sample_delete_user_jobs()
if m:
print(("User jobs set: %s" % m))
|
eggsandbeer/scheduler | synergy/mx/base_request_handler.py | Python | bsd-3-clause | 1,418 | 0.000705 | __author__ = 'Bohdan Mushkevych'
import functools
from werkzeug.wrappers import Request
from synergy.mx.utils import jinja_env
def valid_action_request(method):
""" wraps method with verification for is_request_valid"""
@functools.wraps(method)
def _wrapper(self, *args, **kwargs):
assert isinstance(self, BaseRequestHandler)
if not self.is_request_valid:
return self.reply_bad_request()
try:
return method(self, *args, **kwargs)
except UserWarning as e:
return self.reply_server_error(e)
except Exception as e:
return self.reply_server_error(e)
return _wrapper
class BaseRequestHandler(object):
def __init__(self, request, **values):
assert isinstance(request, Request)
self.scheduler = jinja_env.globals['mbean']
self.logger = self.scheduler.logger
self.request = request
self.values = values
self.request_arguments = request.args if request.args else request.form
self.is_request_valid = False
def reply_o | k(self):
return {'status': 'OK'}
def reply_bad_request(self):
self.logger.error('Bad request: {0}'.format(self.request))
return {}
def reply_server_error(self, e):
self.logger.error('MX Processing Exception: {0}'.format(e), exc_info=True)
return {'status': 'Server Internal Error | '}
|
glumu/django-redis-cluster | django_redis_cluster/serializers/msgpack.py | Python | bsd-3-clause | 311 | 0.003215 | #coding:ut | f8
from __future__ import absolute_import, unicode_literals
import msgpack
from .base import BaseSerializer
class MSGPackSerializer(BaseSerializer):
def dumps(self, value):
return msgpack.dumps(value)
def loads(self, value):
return msgpack.loads(value, enco | ding="utf-8")
|
arunkgupta/gramps | gramps/gui/editors/displaytabs/__init__.py | Python | gpl-2.0 | 2,332 | 0.013722 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2006 Donald N. Allingham
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA | 02111-1307 USA
#
# $Id$
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
log = logging.getLogger("gui.editors | .displaytabs")
# first import models
from childmodel import ChildModel
# Then import tab classes
from grampstab import GrampsTab
from embeddedlist import EmbeddedList
from addrembedlist import AddrEmbedList
from attrembedlist import AttrEmbedList
from backreflist import BackRefList
from dataembedlist import DataEmbedList
from eventbackreflist import EventBackRefList
from eventembedlist import EventEmbedList
from familyattrembedlist import FamilyAttrEmbedList
from familyldsembedlist import FamilyLdsEmbedList
from gallerytab import GalleryTab
from ldsembedlist import LdsEmbedList
from locationembedlist import LocationEmbedList
from mediabackreflist import MediaBackRefList
from nameembedlist import NameEmbedList
from notebackreflist import NoteBackRefList
from notetab import NoteTab
from citationbackreflist import CitationBackRefList
from citationembedlist import CitationEmbedList
from personeventembedlist import PersonEventEmbedList
from personrefembedlist import PersonRefEmbedList
from personbackreflist import PersonBackRefList
from placebackreflist import PlaceBackRefList
from repoembedlist import RepoEmbedList
from surnametab import SurnameTab
from sourcebackreflist import SourceBackRefList
from webembedlist import WebEmbedList
|
fsufitch/homeweb | src/homeweb/handlers/demos/chess.py | Python | gpl-2.0 | 231 | 0.004329 | from tornado.web import RequestHandler
from homeweb.util imp | ort apply_template, write_return
class ChessBoardHandler(RequestHandler):
@ | write_return
@apply_template("demos/chess.html")
def get(self):
return {}
|
jjhelmus/adventofcode | day01.py | Python | mit | 390 | 0.002564 | from __future__ import print_function
f = open('inputs/input_01.txt')
contents = f.read | ()
print("Floor:", contents.count('(') - contents.count(')'))
# Part Two
change = {'(': 1, ')': -1}
floor = 0
position = 1
for | c in contents:
if c in change:
floor += change[c]
if floor == -1:
print("Basement entered at position:", position)
break
position += 1
|
chokribr/inveniotest | modules/websearch/lib/websearch_regression_tests.py | Python | gpl-2.0 | 256,725 | 0.003771 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0301
# pylint: disable=E1102
"""WebSearch module regression tests."""
__revision__ = "$Id$"
from invenio.testutils import InvenioTestCase, InvenioXmlTestCase
import re
import urlparse, cgi
import sys
import cStringIO
if sys.hexversion < 0x2040000:
# pylint: disable=W0622
from sets import Set as set
# pylint: enable=W0622
from mechanize import Browser, LinkNotFoundError
from invenio.config import (CFG_SITE_URL,
CFG_SITE_NAME,
CFG_SITE_LANG,
CFG_SITE_RECORD,
CFG_SITE_LANGS,
CFG_SITE_SECURE_URL,
CFG_WEBSEARCH_SPIRES_SYNTAX,
CFG_BASE_URL)
from invenio.testutils import (make_test_suite,
run_test_suite,
nottest,
make_url,
make_surl,
make_rurl,
test_web_page_content,
merge_error_messages,
InvenioXmlTestCase)
from invenio.urlutils import same_urls_p
from invenio.dbquery import run_sql
from invenio.webinterface_handler_wsgi import SimulatedModPythonRequest
from invenio.search_engine import perform_request_search, \
guess_primary_collection_of_a_record, guess_collection_of_a_record, \
collection_restricted_p, get_permitted_restricted_collections, \
search_pattern, search_unit, search_unit_in_bibrec, \
wash_colls, record_public_p
from invenio import search_engine_summarizer
from invenio.search_engine_utils import get_fieldvalues
from invenio.intbitset import intbitset
from invenio.search_engine import inter | sect_results_with_collrecs
from invenio.bibrank_bridge_utils import get_external_word_similarity_ranker
from invenio.search_engine_query_parser_unit_tests import DATEUTIL_AVAILABLE
from invenio.bibindex_engine_utils import get_index_tags
from invenio.bibindex_engine_config import CFG_BIBINDEX_INDEX_TABLE_TYPE
if 'fr' in CFG_SITE_LANGS:
lang_french_configured = True
else:
lang_french_configured | = False
def parse_url(url):
parts = urlparse.urlparse(url)
query = cgi.parse_qs(parts[4], True)
return parts[2].split('/')[1:], query
def string_combinations(str_list):
"""Returns all the possible combinations of the strings in the list.
Example: for the list ['A','B','Cd'], it will return
[['Cd', 'B', 'A'], ['B', 'A'], ['Cd', 'A'], ['A'], ['Cd', 'B'], ['B'], ['Cd'], []]
It adds "B", "H", "F" and "S" values to the results so different
combinations of them are also checked.
"""
out_list = []
for i in range(len(str_list) + 1):
out_list += list(combinations(str_list, i))
for i in range(len(out_list)):
out_list[i] = (list(out_list[i]) + {
0: lambda: ["B", "H", "S"],
1: lambda: ["B", "H", "F"],
2: lambda: ["B", "F", "S"],
3: lambda: ["B", "F"],
4: lambda: ["B", "S"],
5: lambda: ["B", "H"],
6: lambda: ["B"]
}[i % 7]())
return out_list
def combinations(iterable, r):
"""Return r length subsequences of elements from the input iterable."""
# combinations('ABCD', 2) --> AB AC AD BC BD CD
# combinations(range(4), 3) --> 012 013 023 123
pool = tuple(iterable)
n = len(pool)
if r > n:
return
indices = range(r)
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
class WebSearchWebPagesAvailabilityTest(InvenioTestCase):
"""Check WebSearch web pages whether they are up or not."""
def test_search_interface_pages_availability(self):
"""websearch - availability of search interface pages"""
baseurl = CFG_SITE_URL + '/'
_exports = ['', 'collection/Poetry', 'collection/Poetry?as=1']
error_messages = []
for url in [baseurl + page for page in _exports]:
error_messages.extend(test_web_page_content(url))
if error_messages:
self.fail(merge_error_messages(error_messages))
return
def test_search_results_pages_availability(self):
"""websearch - availability of search results pages"""
baseurl = CFG_SITE_URL + '/search'
_exports = ['', '?c=Poetry', '?p=ellis', '/cache', '/log']
error_messages = []
for url in [baseurl + page for page in _exports]:
error_messages.extend(test_web_page_content(url))
if error_messages:
self.fail(merge_error_messages(error_messages))
return
def test_search_detailed_record_pages_availability(self):
"""websearch - availability of search detailed record pages"""
baseurl = CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/'
_exports = ['', '1', '1/', '1/files', '1/files/']
error_messages = []
for url in [baseurl + page for page in _exports]:
error_messages.extend(test_web_page_content(url))
if error_messages:
self.fail(merge_error_messages(error_messages))
return
def test_browse_results_pages_availability(self):
"""websearch - availability of browse results pages"""
baseurl = CFG_SITE_URL + '/search'
_exports = ['?p=ellis&f=author&action_browse=Browse']
error_messages = []
for url in [baseurl + page for page in _exports]:
error_messages.extend(test_web_page_content(url))
if error_messages:
self.fail(merge_error_messages(error_messages))
return
def test_help_page_availability(self):
"""websearch - availability of Help Central page"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/help',
expected_text="Help Central"))
if lang_french_configured:
def test_help_page_availability_fr(self):
"""websearch - availability of Help Central page in french"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/help/?ln=fr',
expected_text="Centre d'aide"))
def test_search_tips_page_availability(self):
"""websearch - availability of Search Tips"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/help/search-tips',
expected_text="Search Tips"))
if lang_french_configured:
def test_search_tips_page_availability_fr(self):
"""websearch - availability of Search Tips in french"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/help/search-tips?ln=fr',
expected_text="Conseils de recherche"))
def test_search_guide_page_availability(self):
"""websearch - availability of Search Guide |
stackingfunctions/scrapeforum | python/src/mylogger.py | Python | gpl-3.0 | 862 | 0.00232 | import os
import logging.config
cla | ss MyLogger(object):
# set logging to both file and screen
def __init__(self):
logging.config.fileConfig( | '../config/logging.conf')
self.logger = logging.getLogger('scrapeforum')
self.logger.addHandler(logging.StreamHandler())
self.errorIndicated = False
def isErrorIndicated(self):
return self.errorIndicated
def debug(self, msg):
self.logger.debug(msg)
def info(self, msg):
self.logger.info(msg)
def warning(self, msg):
self.logger.warning(msg)
def error(self, msg):
self.logger.error(msg)
self.errorIndicated = True
def critical(self, msg):
self.logger.critical(msg)
self.errorIndicated = True
def fatal(self, msg):
self.logger.fatal(msg)
self.errorIndicated = True
|
myd7349/DiveIntoPython3Practices | chapter_11_Files/read_line.py | Python | lgpl-3.0 | 804 | 0.003759 | # -*- coding: utf-8 -*-
# 2014-11-24 22:43
line_number = 0
with open('favorite-people.txt', encoding = 'utf-8') as a_file:
# To read a file one line at a time, use a for loop. That’s it.
# Besides having explicit methods like read() , the stream object
# is also an iterator which spits out a single line every time you
# ask for a value.
for a_line in a_file:
line_number += 1
# The format specifier {:>4} means
#“print this argument right-justified within 4 spaces.”
#
# Python 3.0 supported string formatting, but only with
# explicitly numbered format specifiers.
# Python 3.1 al | lows you to omit the argument indexes in your
# format specifiers.
print('{:>4} {}'.for | mat(line_number, a_line.rstrip()))
|
karllessard/tensorflow | tensorflow/python/distribute/mirrored_run.py | Python | apache-2.0 | 19,404 | 0.005978 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class MirroredStrategy implementing tf.distribute.Strategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import functools
import threading
import weakref
from tensorflow.python import pywrap_tfe
from tensorflow.python.autograph.core import ag_ctx as autograph_ctx
from tensorflow.python.autograph.impl import api as autograph
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distribute_utils
from tensorflow.python.distribute import shared_variable_creator
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import ops
from tensorflow.python.ops import summary_ops_v2
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import coordinator
def call_for_each_replica(strategy, fn, args=None, kwargs=None):
"""Call `fn` on each worker devices(replica).
It's highly recommended to wrap the call to this function inside a
`tf.function`, otherwise the performance is poor.
Args:
strategy: `tf.distribute.Strategy`.
fn: function to call on each worker devices.
args: positional arguments to `fn`.
kwargs: keyword arguments to `fn`.
Returns:
Wrapped returned value of `fn` from all replicas.
"""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
if isinstance(fn, def_function.Function):
if strategy not in _cfer_fn_cache:
_cfer_fn_cache[strategy] = weakref.WeakKeyDictionary()
wrapped = _cfer_fn_cache[strategy].get(fn)
if wrapped is None:
# We need to wrap fn such that it triggers _call_for_each_replica inside
# the tf.function. We use _clone() instead of @tf.function wrapped
# call_for_each_replica() because we would like to retain the arguments to
# the @tf.function decorator of fn.
wrapped = fn._clone( # pylint: disable=protected-access
python_function=functools.partial(call_for_each_replica, strategy,
fn.python_function))
_cfer_fn_cache[strategy][fn] = wrapped
return wrapped(args, kwargs)
if context.executing_eagerly():
logging.log_first_n(
logging.WARN, "Using %s eagerly has significant "
"overhead currently. We will be working on improving "
"this in the future, but for now please wrap "
"`call_for_each_replica` or `experimental_run` or "
"`run` inside a tf.function to get "
"the best performance." % strategy.__class__.__name__, 5)
else:
# When a tf.function is wrapped to trigger _call_for_each_replica (see
# the other branch above), AutoGraph stops conversion at
# _call_for_each_replica itself (TF library functions are allowlisted).
# This makes sure that the Python function that originally passed to
# the tf.function is still converted.
fn = autograph.tf_convert(fn, autograph_ctx.control_status_ctx())
return _call_for_each_replica(strategy, fn, args, kwargs)
# Per strategy cache for call_for_each_replica def_function.Function objects.
_cfer_fn_cache = weakref.WeakKeyDictionary()
@contextlib.contextmanager
def _enter_graph(g, eager, creator_stack=None):
"""Context manager for selecting a graph and maybe eager mode."""
if eager:
with g.as_default(), context.eager_mode():
if creator_stack is not None:
g._variable_creator_stack = creator_stack # pylint: disable=protected-access
yield
else:
with g.as_default():
if creator_stack is not None:
g._variable_creator_stack = creator_stack # pylint: disable=protecte | d-access
yield
def _cpu_device(device):
cpu_device = tf_device.DeviceSpec.from_string(device)
cpu_device = cpu_device.replace(device_type="CPU", device_index=0)
ret | urn cpu_device.to_string()
class _RequestedStop(Exception): # pylint: disable=g-bad-exception-name
pass
def _call_for_each_replica(distribution, fn, args, kwargs):
"""Run `fn` in separate threads, once per replica/worker device.
Args:
distribution: the DistributionStrategy object.
fn: function to run (will be run once per replica, each in its own thread).
args: positional arguments for `fn`
kwargs: keyword arguments for `fn`.
Returns:
Merged return value of `fn` across all replicas.
Raises:
RuntimeError: If fn() calls get_replica_context().merge_call() a different
number of times from the available devices.
"""
# TODO(josh11b): Add this option once we add synchronization to variable
# creation. Until then, this is pretty unsafe to use.
run_concurrently = False
if not context.executing_eagerly():
# Needed for per-thread device, etc. contexts in graph mode.
ops.get_default_graph().switch_to_thread_local()
coord = coordinator.Coordinator(clean_stop_exception_types=(_RequestedStop,))
shared_variable_store = {}
devices = distribution.extended.worker_devices
# TODO(isaprykin): Create these threads once instead of during every call.
threads = []
for index in range(len(devices)):
variable_creator_fn = shared_variable_creator.make_fn(
shared_variable_store, index)
t = _MirroredReplicaThread(
distribution, coord, index, devices, variable_creator_fn, fn,
distribute_utils.select_replica(index, args),
distribute_utils.select_replica(index, kwargs))
threads.append(t)
for t in threads:
t.start()
# When `fn` starts `should_run` event is set on _MirroredReplicaThread
# (`MRT`) threads. The execution waits until
# `MRT.has_paused` is set, which indicates that either `fn` is
# complete or a `get_replica_context().merge_call()` is called. If `fn` is
# complete, then `MRT.done` is set to True. Otherwise, arguments
# of `get_replica_context().merge_call` from all paused threads are grouped
# and the `merge_fn` is performed. Results of the
# `get_replica_context().merge_call` are then set to `MRT.merge_result`.
# Each such `get_replica_context().merge_call` call returns the
# `MRT.merge_result` for that thread when `MRT.should_run` event
# is reset again. Execution of `fn` resumes.
try:
with coord.stop_on_exception():
all_done = False
while not all_done and not coord.should_stop():
done = []
if run_concurrently:
for t in threads:
t.should_run.set()
for t in threads:
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
else:
for t in threads:
t.should_run.set()
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
if coord.should_stop():
return None
all_done = all(done)
if not all_done:
if any(done):
raise RuntimeError("Some replicas made a different number of "
"replica_context().merge_call() calls.")
# get_replica_context().merge_call() case
merge_args = distribute_utils.regroup(
tuple(t.merge_args for t in threads))
merge_kwargs = distribute_utils.regroup(
|
mediatum/mediatum | core/nodecache.py | Python | gpl-3.0 | 1,485 | 0.006061 | # -*- coding: utf-8 -*-
"""
:copyright: (c) 2016 by the mediaTUM auth | ors
:license: GPL3, see COPYING for details
"""
from __future__ import absolute_import
from sqlalchemy.orm import undefer, joinedload
from sqlalchemy.orm.exc import NoResultFound
from core import db as _db
from utils.lrucache import lru_cache as _lru_cache
@_lru_cache(maxsize=128)
def get_singleton_node_from_cache(nodeclass):
"""Returns the singleton instance for the given node class.
Fetches the requested singleton from the DB if it's not in the cach | e.
"""
return _db.session.query(nodeclass).options(undefer(nodeclass.attrs),
undefer(nodeclass.system_attrs),
joinedload(nodeclass.file_objects)).one()
def get_root_node():
"""Root object may not change during runtime, so we can cache it indefinitely"""
from core.systemtypes import Root
return _db.session.merge(get_singleton_node_from_cache(Root), load=False)
def get_collections_node():
"""Collections object may not change during runtime, so we can cache it indefinitely"""
from contenttypes import Collections
return _db.session.merge(get_singleton_node_from_cache(Collections), load=False)
def get_home_root_node():
"""Home object may not change during runtime, so we can cache it indefinitely"""
from contenttypes import Home
return _db.session.merge(get_singleton_node_from_cache(Home), load=False)
|
josauder/procedural_city_generation | procedural_city_generation/building_generation/roofs.py | Python | mpl-2.0 | 6,492 | 0.008164 | # -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
import numpy.linalg as la
import matplotlib.pyplot as plt
from procedural_city_generation.building_generation.cuts import *
from procedural_city_generation.building_generation.building_tools import *
from procedural_city_generation.building_generation.Polygon3D import Polygon3D
from procedural_city_generation.additional_stuff.Singleton import Singleton
singleton=Singleton("building_generation")
def roof(walls, roofwalls, currentheight, housebool, texture, texture2=None):
"""Builds a roof on top of a house, depending on housetype
Parameters
----------
walls : procedural_city_generation.building_generation.Walls object
Walls object with cuts
roofwalls : procedural_city_generation.building_generation.Walls object
Walls object prior to cuts
currentheight : float
Current height, Z coordinate of the base of the roof-Polygon
housebool : boolean
Decides if the building is a house or not.
texture : procedural_citY_generation.building_generation.Texture object
Texture of the roof
texture2 (optional) : procedural_citY_generation.building_generation.Texture object
Texture of other elements (such as a box in boxroof) on the roof.
If not specified, will default to texture
Returns
-------
procedural_city_generation.building_generation.Polygon3D object
"""
roofheight=np.random.uniform(singleton.roofheight_min, singleton.roofheight_max)
if roofwalls.l == 4 and housebool:
return houseroof(roofwalls, currentheight, roofheight, texture)
else:
return kastenroof(walls, roofwalls, currentheight, roofheight, texture, texture2)
def houseroof(walls, currentheight, roofheight, texture):
"""Creates a "classic" roof with two triangles and two rectangles.
Used only for houses and assumes that the house has 4 sides.
Parameters
-----------
walls : procedural_city_generation.building_generation.Walls object
currentheight : float
Current height, Z coordinate of the base of the roof
roofheight : float
Height of the roof itself
texture : procedural_city_generation.building_generation.Texture object
Returns
-------
list<procedural_city_generation.building_generation.Polygon3D object>
"""
#Differentiation: the shorter of the first two walls is to be cut in half
if not np.linalg.norm(np.diff(walls.getWalls()[0], axis=0))<np.linalg.norm(np.diff(walls.getWalls()[1], axis=0)):
walls=Walls(np.roll(walls.vertices, 1, axis=0), walls.l)
h_low=np.array([0, 0, currentheight])
h_high=h_low+np.array([0, 0, roofheight])
#The gable coordinates
c1, c2=sum(walls.getWalls()[0]/2), sum(walls.getWalls()[2]/2)
#Verts are the vertices of the wall and the vertices of the gable
verts=[x+h_low for x in walls.vertices]+[c1+h_high, c2+h_high]
#Faces are two rectangles and two triangles
faces=[(0, 1, 5, 4), (3, 2, 5, 4), (0, 3, 4), (1, 2, 5)]
return [Polygon3D(verts, faces, texture)]
def kastenroof(walls, roofwalls, currentheight, roofheight, texture, texture2=None):
"""
Creates a flat roof with a box on top.
Parameters
-----------
walls : procedural_city_generation.building_generation.Walls object
Walls object after cuts
roofwalls : procedural_city_generation.building_generation.Walls object
Walls object prior to cuts
currentheight : float
Current height, Z coordinate of the base of the roof
roofheight : float
Height of the roof itself
texture : procedural_city_generation.building_generation.Texture object
texture2 (optional): procedural_city_generation.building_generation.Texture object
Will default to texture if not specified
Returns
-----------
- list<procedural_city_generation.building_generation.Polygon3D object>
"""
#Texture2 is optional: if not given it will be texture1
texture2= texture2 if texture2 else texture
#TODO: Move numeric values to conf.
#Box is a scaled down v | ersion of the roofwalls
box=scaletransform(roofwalls, random.uniform(0.07, 0.14))
if not roofwalls.l == 4:
| #Constructs a box with 4 sides if the box did not have 4 sides
a, b=box.vertices[0], box.vertices[1]
n=(b-a)
n=np.array([-n[1], n[0], 0])
box=Walls(np.array([a, b, b+n, a+n]), 4)
#Checks if every vertex of the box is "inside" the roof polygon so that the box does not float.
#If this is not the case for every vertex, then just a flat roof is built
for vert in box.vertices:
if not p_in_poly(walls, vert):
return [Polygon3D(walls.vertices+np.array([0, 0, currentheight]), [range(walls.l)], texture)]
#List of the walls and the top of the box and the flat roof
return [buildwalls(box, currentheight, currentheight+roofheight, texture2),
Polygon3D(box.vertices+np.array([0, 0, currentheight+roofheight]), [range(4)], texture2),
Polygon3D(walls.vertices+np.array([0, 0, currentheight]), [range(walls.l)], texture)]
def isleft(wall, point):
"""Helper function for p_in_poly
Taken from: http://geomalgorithms.com/a03-_inclusion.html, all credits to Dan Sunday.
Paramaters
----------
wall : numpy-array with shape 3, 2
point : numpy-array with shape 3, 1
Returns
-------
float
"""
return ((wall[1][0]-wall[0][0])*(point[1]-wall[0][1]) - (point[0]-wall[0][0]) * (wall[1][1]-wall[0][1]))
def p_in_poly(walls, point):
"""
Returns True if a point is in a "walls" polygon, eles False
Taken from: http://geomalgorithms.com/a03-_inclusion.html, all credits to Dan Sunday.
Parameters
----------
walls : procedural_city_generation.building_generation.walls object
point : np.ndarray with shape (3, )
Returns
----------
boolean
"""
counter=0
for wall in walls.getWalls():
if wall[0][1] <= point[1]:
if wall[1][1] > point[1]:
if isleft(wall, point) >0:
counter+=1
else:
if isleft(wall, point) <0:
counter-=1
if counter!=0:
return True
return False
|
olavph/builds | lib/versions_repository.py | Python | gpl-3.0 | 2,326 | 0.00086 | import logging
import os
from lib import exception
from lib import repository
from lib.constants import REPOSITORIES_DIR
LOG = logging.getLogger(__name__)
def get_versions_repository(co | nfig):
"""
Get the | packages metadata Git repository, cloning it if does not
yet exist.
Args:
config (dict): configuration dictionary
Raises:
exception.RepositoryError: if the clone is unsuccessful
"""
path = os.path.join(config.get('work_dir'),
REPOSITORIES_DIR)
url = config.get('packages_metadata_repo_url')
name = "versions_{subcommand}".format(
subcommand=config.get('subcommand'))
try:
versions_repo = repository.get_git_repository(url, path, name)
except exception.RepositoryError:
LOG.error("Failed to clone versions repository")
raise
return versions_repo
def setup_versions_repository(config):
"""
Prepare the packages metadata Git repository, cloning it and
checking out at the chosen branch.
Args:
config (dict): configuration dictionary
Raises:
exception.RepositoryError: if the clone or checkout are
unsuccessful
"""
versions_repo = get_versions_repository(config)
branch = config.get('packages_metadata_repo_branch')
refspecs = config.get('packages_metadata_repo_refspecs')
try:
versions_repo.checkout(branch, refspecs)
except exception.RepositoryError:
LOG.error("Failed to checkout versions repository")
raise
return versions_repo
def read_version_and_milestone(versions_repo):
"""
Read current version and milestone (alpha or beta) from VERSION file
Args:
versions_repo (GitRepository): packages metadata git repository
Returns:
version_milestone (str): version and milestone. Format:
<version>-<milestone>, valid milestone values: alpha, beta
"""
version_file_path = os.path.join(versions_repo.working_tree_dir, 'open-power-host-os', 'CentOS', '7', 'SOURCES', 'VERSION')
version_milestone = ""
with open(version_file_path, 'r') as version_file:
#ignore first line with file format information
version_file.readline()
version_milestone = version_file.readline().strip('\n')
return version_milestone
|
davidwaroquiers/custodian | tasks.py | Python | mit | 1,962 | 0.001529 | """
Deployment file to facilitate releases of custodian.
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyr | ight 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Apr 29, 2012"
import glob
from invoke import task
from monty.os import cd
from custodian import __version__ as ver
@task
def make_doc(ctx):
with cd("docs"):
ctx.run("sphinx-apidoc -o . -f ../custodian")
ctx.run("rm custo | dian*.tests.rst")
for f in glob.glob("docs/*.rst"):
if f.startswith('docs/custodian') and f.endswith('rst'):
newoutput = []
suboutput = []
subpackage = False
with open(f, 'r') as fid:
for line in fid:
clean = line.strip()
if clean == "Subpackages":
subpackage = True
if not subpackage and not clean.endswith("tests"):
newoutput.append(line)
else:
if not clean.endswith("tests"):
suboutput.append(line)
if clean.startswith("custodian") and not clean.endswith("tests"):
newoutput.extend(suboutput)
subpackage = False
suboutput = []
with open(f, 'w') as fid:
fid.write("".join(newoutput))
ctx.run("make html")
@task
def publish(ctx):
ctx.run("python setup.py release")
@task
def test(ctx):
ctx.run("nosetests")
@task
def setver(ctx):
ctx.run("sed s/version=.*,/version=\\\"{}\\\",/ setup.py > newsetup".format(ver))
ctx.run("mv newsetup setup.py")
@task
def release(ctx):
setver(ctx)
test(ctx)
make_doc(ctx)
publish(ctx)
|
lukw00/powerline | setup.py | Python | mit | 4,260 | 0.026069 | #!/usr/bin/env python
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import os
import sys
import subprocess
import logging
import shlex
from traceback import print_exc
from setuptools import setup, find_packages
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(CURRENT_DIR, 'README.rst'), 'rb').read().decode('utf-8')
except IOError:
README = ''
OLD_PYTHON = sys.version_info < (2, 7)
def compile_client():
'''Compile the C powerline-client script.'''
if hasattr(sys, 'getwindowsversion'):
raise NotImplementedError()
else:
from distutils.ccompiler import new_compiler
compiler = new_compiler().compiler
cflags = os.environ.get('CFLAGS', str('-O3'))
# A normal split would do a split on each space which might be incorrect. The
# shlex will not split if a space occurs in an arguments value.
subprocess.check_call(compiler + shlex.split(cflags) + ['client/powerline.c', '-o', 'scripts/powerline'])
try:
compile_client()
except Exception as e:
print('Compiling C version of powerline-client failed')
logging.exception(e)
# FIXME Catch more specific exceptions
import shutil
if hasattr(shutil, 'which'):
which = shutil.which
else:
sys.path.append(CURRENT_DIR)
from powerline.lib.shell import which
if which('socat') and which('sed') and which('sh'):
print('Using powerline.sh script instead of C version (requires socat, sed and sh)')
shutil.copyfile('client/powerline.sh', 'scripts/powerline')
can_use_scripts = True
else:
print('Using powerline.py script instead of C version')
shutil.copyfile('client/powerline.py', 'scripts/powerline')
can_use_scripts = True
else:
can_use_scripts = False
def get_version():
base_version = '2.2'
base_version += '.dev9999'
try:
return base_version + '+git.' + str(subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip())
except Exception:
print_exc()
return base_version
setup(
name='powerline-status',
version=get_version(),
description='The ultimate statusline/prompt utility.',
long_description=README,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Plugins',
'Intended Audience :: End Users/Desktop',
'Licens | e :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'P | rogramming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
download_url='https://github.com/powerline/powerline/archive/develop.zip',
author='Kim Silkebaekken',
author_email='kim.silkebaekken+vim@gmail.com',
url='https://github.com/powerline/powerline',
license='MIT',
# XXX Python 3 doesn’t allow compiled C files to be included in the scripts
# list below. This is because Python 3 distutils tries to decode the file to
# ASCII, and fails when powerline-client is a binary.
#
# XXX Python 2 fucks up script contents*. Not using it to install scripts
# any longer.
# * Consider the following input:
# % alias hex1=$'hexdump -e \'"" 1/1 "%02X\n"\''
# % diff <(hex1 ./scripts/powerline) <(hex1 ~/.local/bin/powerline)
# This will show output like
# 375c375
# < 0D
# ---
# > 0A
# (repeated, with diff segment header numbers growing up).
#
# FIXME Current solution does not work with `pip install -e`. Still better
# then solution that is not working at all.
scripts=[
'scripts/powerline-lint',
'scripts/powerline-daemon',
'scripts/powerline-render',
'scripts/powerline-config',
] + (['scripts/powerline'] if can_use_scripts else []),
data_files=(None if can_use_scripts else (('bin', ['scripts/powerline']),)),
keywords='',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
install_requires=[],
extras_require={
'docs': [
'Sphinx',
'sphinx_rtd_theme',
],
},
test_suite='tests' if not OLD_PYTHON else None,
)
|
home-assistant/home-assistant | homeassistant/components/switchbot/__init__.py | Python | apache-2.0 | 4,051 | 0.001234 | """Support for Switchbot devices."""
from asyncio import Lock
import switchbot # pylint: disable=import-error
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_SENSOR_TYPE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .const import (
ATTR_BOT,
ATTR_CURTAIN,
BTLE_LOCK,
COMMON_OPTIONS,
CONF_RETRY_COUNT,
CONF_RETRY_TIMEOUT,
CONF_SCAN_TIMEOUT,
CONF_TIME_BETWEEN_UPDATE_COMMAND,
DATA_COORDINATOR,
DEFAULT_RETRY_COUNT,
DEFAULT_RETRY_TIMEOUT,
DEFAULT_SCAN_TIMEOUT,
DEFAULT_TIME_BETWEEN_UPDATE_COMMAND,
DOMAIN,
)
from .coordinator import SwitchbotDataUpdateCoordinator
PLATFORMS_BY_TYPE = {
ATTR_BOT: [Platform.SWITCH, Platform.SENSOR],
ATTR_CURTAIN: [Platform.COVER, Platform.BINARY_SENSOR, Platform.SENSOR],
}
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Switchbot from a config entry."""
hass.data.setdefault(DOMAIN, {})
if not entry.options:
options = {
CONF_TIME_BETWEEN_UPDATE_COMMAND: DEFAULT_TIME_BETWEEN_UPDATE_COMMAND,
CONF_RETRY_COUNT: DEFAULT_RETRY_COUNT,
CONF_RETRY_TIMEOUT: DEFAULT_RETRY_TIMEOUT,
CONF_SCAN_TIMEOUT: DEFAULT_SCAN_TIMEOUT,
}
hass.config_entries.async_update_entry(entry, options=options)
# Use same coordinator instance for all entities.
# Uses BTLE advertisement data, all Switchbot devices in range is stored here.
if DATA_COORDINATOR not in hass.data[DOMAIN]:
# Check if asyncio.lock is stored in hass data.
# BTLE has issues with multiple connections,
# so we use a lock to ensure that only one API request is reaching it at a time:
if BTLE_LOCK not in hass.data[DOMAIN]:
hass.data[DOMAIN][BTLE_LOCK] = Lock()
if COMMON_OPTIONS not in hass.data[DOMAIN]:
hass.data[DOMAIN][COMMON_OPTIONS] = {**entry.options}
switchbot.DEFAULT_RETRY_TIMEOUT = hass.data[DOMAIN][CO | MMON_OPTIONS][
CONF_RETRY_TIMEOUT
]
# Store api in coordinator.
coordinator = SwitchbotDataUpdateCoordinator(
hass,
update_interval=hass.data[DOMAIN][COMMON_OPTIONS][
CONF_TIME_BETWEEN_UPDATE_COMMAND
],
api=switchbot,
retry_count=hass.data[DOMAIN][COMMON_OPTIONS][CONF_RETRY_COUNT],
scan_timeout=hass.data[DOMAIN][COMMON_OP | TIONS][CONF_SCAN_TIMEOUT],
api_lock=hass.data[DOMAIN][BTLE_LOCK],
)
hass.data[DOMAIN][DATA_COORDINATOR] = coordinator
else:
coordinator = hass.data[DOMAIN][DATA_COORDINATOR]
await coordinator.async_config_entry_first_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
hass.data[DOMAIN][entry.entry_id] = {DATA_COORDINATOR: coordinator}
sensor_type = entry.data[CONF_SENSOR_TYPE]
hass.config_entries.async_setup_platforms(entry, PLATFORMS_BY_TYPE[sensor_type])
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
sensor_type = entry.data[CONF_SENSOR_TYPE]
unload_ok = await hass.config_entries.async_unload_platforms(
entry, PLATFORMS_BY_TYPE[sensor_type]
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
if len(hass.config_entries.async_entries(DOMAIN)) == 0:
hass.data.pop(DOMAIN)
return unload_ok
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
# Update entity options stored in hass.
if {**entry.options} != hass.data[DOMAIN][COMMON_OPTIONS]:
hass.data[DOMAIN][COMMON_OPTIONS] = {**entry.options}
hass.data[DOMAIN].pop(DATA_COORDINATOR)
await hass.config_entries.async_reload(entry.entry_id)
|
adaptive-learning/proso-apps | proso_tasks/migrations/0001_initial.py | Python | mit | 4,601 | 0.003043 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-08-01 07:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
import proso.django.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('proso_models', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Context',
fields=[
('id', models.AutoFiel | d(auto_created=True, primar | y_key=True, serialize=False, verbose_name='ID')),
('identifier', models.SlugField()),
('lang', models.CharField(max_length=2)),
('name', models.TextField()),
('content', jsonfield.fields.JSONField(blank=True, null=True)),
('active', models.BooleanField(default=True)),
('item', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_contexts', to='proso_models.Item')),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.CreateModel(
name='Skill',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.SlugField()),
('lang', models.CharField(max_length=2)),
('name', models.TextField()),
('active', models.BooleanField(default=True)),
('item', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_skills', to='proso_models.Item')),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.SlugField()),
('lang', models.CharField(max_length=2)),
('content', jsonfield.fields.JSONField()),
('active', models.BooleanField(default=True)),
('item', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_tasks', to='proso_models.Item')),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.CreateModel(
name='TaskAnswer',
fields=[
('answer_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='proso_models.Answer')),
('question', models.CharField(blank=True, max_length=255, null=True)),
('answer', models.CharField(blank=True, max_length=255, null=True)),
],
bases=('proso_models.answer',),
),
migrations.CreateModel(
name='TaskInstance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.SlugField()),
('lang', models.CharField(max_length=2)),
('description', jsonfield.fields.JSONField(blank=True, null=True)),
('active', models.BooleanField(default=True)),
('context', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='instances', to='proso_tasks.Context')),
('item', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_instances', to='proso_models.Item')),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='instances', to='proso_tasks.Task')),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.AlterUniqueTogether(
name='taskinstance',
unique_together=set([('identifier', 'lang')]),
),
migrations.AlterUniqueTogether(
name='task',
unique_together=set([('identifier', 'lang')]),
),
migrations.AlterUniqueTogether(
name='skill',
unique_together=set([('identifier', 'lang')]),
),
migrations.AlterUniqueTogether(
name='context',
unique_together=set([('identifier', 'lang')]),
),
]
|
shobhitmishra/CodingProblems | LeetCode/Session3/ipo.py | Python | mit | 880 | 0.023864 | from heapq import *
from typing import List
class Solution:
def findMaximizedCapital(self, k: int, wealth: int, profits: List[int], capitals: List[int]) -> int:
minCapitalHeap, maxProfitHeap = [], []
for i in range(len(capitals)):
heappush(minCapitalHeap, (capitals[i], profits[i]))
for _ in range(k):
# find the projects which require less capital than available wealth
while minCapitalHeap and minCapitalHeap[0][0] <= wealth:
_, profit = heappop(minCapitalHeap)
he | appush(maxProfitHeap, -profit)
if not maxProfitHeap:
break
wealth += -heappop(maxProfitHeap)
return wealth
k=0
W=0
Profits=[1,2,3,5]
Capital=[0,1,2,3]
ob = Solution()
print(ob.find | MaximizedCapital(k, W, Profits, Capital))
|
CERNDocumentServer/invenio | modules/bibfield/lib/functions/check_field_existence.py | Python | gpl-2.0 | 4,158 | 0.004329 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
def check_field_existence(record, field, min_value, max_value=None, subfield=None, continuable=True):
"""
Checks field.subfield existence inside the record according to max and min values
@param record: BibFieldDict where the record is stored
@param field: Main json ID or field name to make test on
@param min_value: Minimum number of occurrences of field.
If max_value is not present then min_value represents the fix number of times that
field should be present.
@param max_value: Maximum number of occurrences of a field, this might be a fix number
or "n".
@param subfield: If this parameter is present, instead of applying the checker
to the field, it is applied to record['field.subfield']
@note: This checker a | lso modify the record if the field is not repeatable,
meaning that min_value=1 or min_value=0,max_value=1
"""
from invenio.bibfield_utils import InvenioBibFieldContinuableError, \
InvenioBibFieldError
error = continuable and InvenioBibFieldContinuableError or InvenioBibFieldError
field = '[n]' i | n field and field[:-3] or field
key = subfield and "%s.%s" % (field, subfield) or field
if min_value == 0: # (0,1), (0,'n'), (0,n)
if not max_value:
raise error("Minimun value = 0 and no max value for '%s'" % (key,))
if key in record:
value = record[key]
if max_value == 1 and isinstance(value, list) and len(value) != 1:
raise error("Field '%s' is not repeatable" % (key,))
elif max_value != 'n':
if isinstance(value, list) and len(value) > max_value:
raise error("Field '%s' is repeatable only %s times" % (key, max_value))
elif min_value == 1: # (1,-) (1,'n'), (1, n)
if not key in record:
raise error("Field '%s' is mandatory" % (key,))
value = record[key]
if not value:
raise error("Field '%s' is mandatory" % (key,))
if not max_value:
if isinstance(value, list) and len(value) != 1:
raise error("Field '%s' is mandatory and not repeatable" % (key,))
elif max_value != 'n':
if isinstance(value, list) and len(value) > max_value:
raise error("Field '%s' is mandatory and repeatable only %s times" % (key, max_value))
else:
if not key in record:
raise error("Field '%s' must be present inside the record %s times" % (key, min_value))
value = record[key]
if not value:
raise error("Field '%s' must be present inside the record %s times" % (key, min_value))
if not max_value:
if not isinstance(value, list) or len(value) != min_value:
raise error("Field '%s' must be present inside the record %s times" % (key, min_value))
else:
if max_value != 'n' and (not isinstance(value, list) or len(value) < min_value or len(value) > max_value):
raise error("Field '%s' must be present inside the record between %s and %s times" % (key, min_value, max_value))
elif not isinstance(value, list) or len(value) < min_value:
raise error("Field '%s' must be present inside the record between %s and 'n' times" % (key, min_value))
|
KhronosGroup/COLLADA-CTS | StandardDataSets/collada/library_lights/_reference/_reference_directional_white/_reference_directional_white.py | Python | mit | 3,987 | 0.006521 |
# Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
# See Core.Logic.FJudgementContext for the information
# of the 'context' parameter.
# This sample judging object does the following:
#
# JudgeBaseline: just verifies that the standard steps did not crash.
# JudgeSuperior: also verifies that the va | lidation steps are not in error.
# JudgeExemplary: same as intermediate badge.
# We import an assistant script that includes the common verifications
# methods. The assistant buffers its checks, so that running them again
# does not incurs an unnecessary performance hint.
from StandardD | ataSets.scripts import JudgeAssistant
# Please feed your node list here:
tagLst = ['library_lights', 'light', 'technique_common', 'directional']
attrName = ''
attrVal = ''
dataToCheck = ''
class SimpleJudgingObject:
def __init__(self, _tagLst, _attrName, _attrVal, _data):
self.tagList = _tagLst
self.attrName = _attrName
self.attrVal = _attrVal
self.dataToCheck = _data
self.status_baseline = False
self.status_superior = False
self.status_exemplary = False
self.__assistant = JudgeAssistant.JudgeAssistant()
def JudgeBaseline(self, context):
# No step should not crash
self.__assistant.CheckCrashes(context)
# Import/export/validate must exist and pass, while Render must only exist.
self.__assistant.CheckSteps(context, ["Import", "Export", "Validate"], ["Render"])
self.status_baseline = self.__assistant.GetResults()
return self.status_baseline
# To pass intermediate you need to pass basic, this object could also include additional
# tests that were specific to the intermediate badge.
def JudgeSuperior(self, context):
# if baseline fails, no point in further checking
if (self.status_baseline == False):
self.status_superior = self.status_baseline
return self.status_superior
# Compare the rendered images
self.__assistant.CompareRenderedImages(context)
# Check for preservation of element data
self.__assistant.ElementPreserved(context, self.tagList)
self.status_superior = self.__assistant.DeferJudgement(context)
return self.status_superior
# To pass advanced you need to pass intermediate, this object could also include additional
# tests that were specific to the advanced badge
def JudgeExemplary(self, context):
self.status_exemplary = self.status_superior
return self.status_exemplary
# This is where all the work occurs: "judgingObject" is an absolutely necessary token.
# The dynamic loader looks very specifically for a class instance named "judgingObject".
#
judgingObject = SimpleJudgingObject(tagLst, attrName, attrVal, dataToCheck);
|
open-mmlab/mmdetection | mmdet/models/detectors/fast_rcnn.py | Python | apache-2.0 | 2,164 | 0 | # Copyright (c) OpenMMLab. All rights reserved.
from ..builder import DETECTORS
from .two_stage import TwoStageDetector
@DETECTORS.register_module()
class FastRCNN(TwoStageDetector):
"""Implementation of `Fast R-CNN <https://arxiv.org/abs/1504.08083>`_"""
def __init__(self,
backbone,
roi_head,
train_cfg,
test_cfg,
neck=None,
pretrained=None,
init_cfg=None):
super(FastRCNN, self).__init__(
backbone=backbone,
neck=neck,
roi_head=roi_head,
train_cfg=train_cfg,
test_cfg=test_cfg,
pretrained=pretrained,
init_cfg=init_cfg)
def forward_test(self, imgs, img_metas, proposals, **kwargs):
"""
Args:
imgs (List[Tensor]): the outer list indicates test-time
augmentations and inner Tensor should have a shape NxCxHxW,
which contains all images in the batch.
img_metas (Li | st[List[dict]]): the outer list indicates test-time
augs (multiscale, flip, etc.) and the inner list indicates
i | mages in a batch.
proposals (List[List[Tensor]]): the outer list indicates test-time
augs (multiscale, flip, etc.) and the inner list indicates
images in a batch. The Tensor should have a shape Px4, where
P is the number of proposals.
"""
for var, name in [(imgs, 'imgs'), (img_metas, 'img_metas')]:
if not isinstance(var, list):
raise TypeError(f'{name} must be a list, but got {type(var)}')
num_augs = len(imgs)
if num_augs != len(img_metas):
raise ValueError(f'num of augmentations ({len(imgs)}) '
f'!= num of image meta ({len(img_metas)})')
if num_augs == 1:
return self.simple_test(imgs[0], img_metas[0], proposals[0],
**kwargs)
else:
# TODO: support test-time augmentation
assert NotImplementedError
|
Brett55/moto | moto/apigateway/urls.py | Python | apache-2.0 | 2,102 | 0.007612 | from __future__ import unicode_literals
from .responses import APIGatewayResponse
url_bases = [
"https?://apigateway.(.+).amazonaws.com"
]
url_paths = {
'{0}/restapis$': APIGatewayResponse().restapis,
'{0}/restapis/(?P<function_id>[^/]+)/?$': APIGatewayResponse().restapis_individual,
'{0}/restapis/(?P<function_id>[^/]+)/resources$': APIGatewayResponse().resources,
'{0}/restapis/(?P<function_id>[^/]+)/stages$': APIGatewayResponse().restapis_stages,
'{0}/restapis/(?P<function_id>[^/]+)/stages/(?P<stage_name>[^/]+)/?$': APIGatewayResponse().stages,
'{0}/restapis/(?P<function_id>[^/]+)/deployments$': APIGatewayResponse().deployments,
'{0}/restapis/(?P<function_id>[^/]+)/deplo | yments/(?P<deployment_id>[^/]+)/?$': APIGatewayResponse().individual_deployment,
'{0}/restapis/(?P<function_id>[^/]+)/resources/(?P | <resource_id>[^/]+)/?$': APIGatewayResponse().resource_individual,
'{0}/restapis/(?P<function_id>[^/]+)/resources/(?P<resource_id>[^/]+)/methods/(?P<method_name>[^/]+)/?$': APIGatewayResponse().resource_methods,
'{0}/restapis/(?P<function_id>[^/]+)/resources/(?P<resource_id>[^/]+)/methods/(?P<method_name>[^/]+)/responses/(?P<status_code>\d+)$': APIGatewayResponse().resource_method_responses,
'{0}/restapis/(?P<function_id>[^/]+)/resources/(?P<resource_id>[^/]+)/methods/(?P<method_name>[^/]+)/integration/?$': APIGatewayResponse().integrations,
'{0}/restapis/(?P<function_id>[^/]+)/resources/(?P<resource_id>[^/]+)/methods/(?P<method_name>[^/]+)/integration/responses/(?P<status_code>\d+)/?$': APIGatewayResponse().integration_responses,
'{0}/apikeys$': APIGatewayResponse().apikeys,
'{0}/apikeys/(?P<apikey>[^/]+)': APIGatewayResponse().apikey_individual,
'{0}/usageplans$': APIGatewayResponse().usage_plans,
'{0}/usageplans/(?P<usage_plan_id>[^/]+)/?$': APIGatewayResponse().usage_plan_individual,
'{0}/usageplans/(?P<usage_plan_id>[^/]+)/keys$': APIGatewayResponse().usage_plan_keys,
'{0}/usageplans/(?P<usage_plan_id>[^/]+)/keys/(?P<api_key_id>[^/]+)/?$': APIGatewayResponse().usage_plan_key_individual,
}
|
ayepezv/GAD_ERP | addons/mail/models/mail_message.py | Python | gpl-3.0 | 38,977 | 0.003746 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from email.header import decode_header
from email.utils import formataddr
from odoo import _, api, fields, models, SUPERUSER_ID, tools
from odoo.exceptions import UserError, AccessError
from odoo.osv import expression
_logger = logging.getLogger(__name__)
def decode(text):
"""Returns unicode() string conversion of the the given encoded smtp header text"""
# TDE proposal: move to tools ?
if text:
text = decode_header(text.replace('\r', ''))
# The joining space will not be needed as of Python 3.3
# See https://hg.python.org/cpython/rev/8c03fe231877
return ' '.join([tools.ustr(x[0], x[1]) for x in text])
class Message(models.Model):
""" Messages model: system notification (replacing res.log notifications),
comments (OpenChatter discussion) and incoming emails. """
_name = 'mail.message'
_description = 'Message'
_inherit = ['ir.needaction_mixin']
_order = 'id desc'
_rec_name = 'record_name'
_message_read_limit = 30
@api.model
def _get_default_from(self):
if self.env.user.alias_name and self.env.user.alias_domain:
return formataddr((self.env.user.name, '%s@%s' % (self.env.user.alias_name, self.env.user.alias_domain)))
elif self.env.user.email:
return formataddr((self.env.user.name, self.env.user.email))
raise UserError(_("Unable to send email, please configure the sender's email address or alias."))
@api.model
def _get_default_author(self):
return self.env.user.partner_id
# content
subject = fields.Char('Subject')
date = fields.Datetime('Date', default=fields.Datetime.now)
body = fields.Html('Contents', default='', strip_classes=True)
attachment_ids = fields.Many2many(
'ir.attachment', 'message_attachment_rel',
'message_id', 'attachment_id',
string='Attachments',
help='Attachments are linked to a document through model / res_id and to the message '
'through this field.')
parent_id = fields.Many2one(
'mail.message', 'Parent Message', select=True, ondelete='set null',
help="Initial thread message.")
child_ids = fields.One2many('mail.message', 'parent_id', 'Child Messages')
# related document
model = fields.Char('Related Document Model', select=1)
res_id = fields.Integer('Related Document ID', select=1)
record_name = fields.Char('Message Record Name', help="Name get of the related document.")
# characteristics
message_type = fields.Selection([
('email', 'Email'),
('comment', 'Comment'),
('notification', 'System notification')],
'Type', required=True, default='email',
help="Message type: email for email message, notification for system "
"message, comment for other messages such as user replies",
oldname='type')
subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', ondelete='set null', select=1)
# origin
email_from = fields.Char(
'From', default=_get_default_from,
help="Email address of the sender. This field is set when no matching partner is found and replaces the author_id field in the chatter.")
author_id = fields.Many2one(
'res.partner', 'Author', select=1,
ondelete='set null', default=_get_default_author,
help="Author of the message. If not set, email_from may hold an email address that did not match any partner.")
author_avatar = fields.Binary("Author's avatar", related='author_id.image_small')
# recipients
partner_ids = fields.Many2many('res.partner', string='Recipients')
needaction_partner_ids = fields.Many2many(
'res.partner', 'mail_message_res_partner_needaction_rel', string='Partners with Need Action')
needaction = fields.Boolean(
'Need Action', compute='_get_needaction', search='_search_needaction',
help='Need Action')
channel_ids = fields.Many2many(
'mail.channel', 'mail_message_mail_channel_rel', string='Channels')
# user interface
starred_partner_ids = fields.Many2many(
'res.partner', 'mail_message_res_partner_starred_rel', string='Favorited By')
starred = fields.Boolean(
'Starred', compute='_get_starred', search='_search_starred',
help='Current user has a starred notification linked to this message')
# tracking
tracking_value_ids = fields.One2many(
'mail.tracking.value', 'mail_message_id',
string='Tracking values',
help='Tracked values are stored in a separate model. This field allow to reconstruct '
'the tracking and to generate statistics on the model.')
# mail gateway
no_auto_thread = fields.Boolean(
'No threading for answers',
help='Answers do not go in the original document discussion thread. This has an impact on the generated message-id.')
message_id = fields.Char('Message-Id', help='Message unique identifier', select=1, readonly=1, copy=False)
reply_to = fields.Char('Reply-To', help='Reply email address. Setting the reply_to bypasses the automatic thread creation.')
mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing mail server') |
@api.multi
def _get_needaction(self):
""" Need action on a mail.message = notified on my channel """
my_messages = self.sudo().filtered(lambda msg: self.env.user.partner_id in msg.needaction_partner_ids)
for message in self:
message.needaction = message in my_messages
@api.multi
def _is_accessible(self):
self.ensure_one()
return False
@api.model
d | ef _search_needaction(self, operator, operand):
if operator == '=' and operand:
return [('needaction_partner_ids', 'in', self.env.user.partner_id.id)]
return [('needaction_partner_ids', 'not in', self.env.user.partner_id.id)]
@api.depends('starred_partner_ids')
def _get_starred(self):
""" Compute if the message is starred by the current user. """
# TDE FIXME: use SQL
starred = self.sudo().filtered(lambda msg: self.env.user.partner_id in msg.starred_partner_ids)
for message in self:
message.starred = message in starred
@api.model
def _search_starred(self, operator, operand):
if operator == '=' and operand:
return [('starred_partner_ids', 'in', [self.env.user.partner_id.id])]
return [('starred_partner_ids', 'not in', [self.env.user.partner_id.id])]
@api.model
def _needaction_domain_get(self):
return [('needaction', '=', True)]
#------------------------------------------------------
# Notification API
#------------------------------------------------------
@api.model
def mark_all_as_read(self, channel_ids=None, domain=None):
""" Remove all needactions of the current partner. If channel_ids is
given, restrict to messages written in one of those channels. """
partner_id = self.env.user.partner_id.id
if domain is None:
query = "DELETE FROM mail_message_res_partner_needaction_rel WHERE res_partner_id IN %s"
args = [(partner_id,)]
if channel_ids:
query += """
AND mail_message_id in
(SELECT mail_message_id
FROM mail_message_mail_channel_rel
WHERE mail_channel_id in %s)"""
args += [tuple(channel_ids)]
query += " RETURNING mail_message_id as id"
self._cr.execute(query, args)
self.invalidate_cache()
ids = [m['id'] for m in self._cr.dictfetchall()]
else:
# not really efficient method: it does one db request for the
# search, and one for each message in the result set to remove the
# current user from the relation.
msg_domain = [('needaction_partner_ids', 'in', partner_id)]
if channel_ids:
msg_domain += [('channe |
jmartinm/invenio-oauthclient | invenio_oauthclient/contrib/github.py | Python | gpl-2.0 | 3,966 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Pre-configured remote application for enabling sign in/up with GitHub.
**Usage:**
1. Ensure you have ``github3 | .py`` package installed:
.. code-block:: console
cdvirtualenv src/invenio
pip install -e .[github]
2. Edit your configuration and add:
.. code-block:: python
from invenio_oauthclient.contrib import github
OAUTHCLIENT_REMOTE_APPS = dict(
github=github.REMOTE_APP,
)
GITHUB_APP_CREDENTIALS = dict(
consumer_key="changeme",
consumer_secret="changeme",
)
3. Go to GitHub and register a ne | w application:
https://github.com/settings/applications/new. When registering the
application ensure that the *Authorization callback URL* points to:
``CFG_SITE_SECURE_URL/oauth/authorized/github/`` (e.g.
``http://localhost:4000/oauth/authorized/github/`` for development).
4. Grab the *Client ID* and *Client Secret* after registering the application
and add them to your instance configuration (``invenio.cfg``):
.. code-block:: python
GITHUB_APP_CREDENTIALS = dict(
consumer_key="<CLIENT ID>",
consumer_secret="<CLIENT SECRET>",
)
5. Now go to ``CFG_SITE_SECURE_URL/oauth/login/github/`` (e.g.
http://localhost:4000/oauth/login/github/)
6. Also, you should see GitHub listed under Linked accounts:
http://localhost:4000//account/settings/linkedaccounts/
By default the GitHub module will try first look if a link already exists
between a GitHub account and a user. If no link is found, the module tries to
retrieve the user email address from GitHub to match it with a local user. If
this fails, the user is asked to provide an email address to sign-up.
In templates you can add a sign in/up link:
.. code-block:: jinja
<a href="{{url_for('oauthclient.login', remote_app='github')}}">
Sign in with GitHub
</a>
"""
import github3
REMOTE_APP = dict(
title='GitHub',
description='Software collaboration platform.',
icon='fa fa-github',
authorized_handler="invenio_oauthclient.handlers"
":authorized_signup_handler",
disconnect_handler="invenio_oauthclient.handlers"
":disconnect_handler",
signup_handler=dict(
info="invenio_oauthclient.contrib.github:account_info",
setup="invenio_oauthclient.contrib.github:account_setup",
view="invenio_oauthclient.handlers:signup_handler",
),
params=dict(
request_token_params={'scope': 'user:email'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_url="https://github.com/login/oauth/access_token",
access_token_method='POST',
authorize_url="https://github.com/login/oauth/authorize",
app_key="GITHUB_APP_CREDENTIALS",
)
)
def account_info(remote, resp):
""" Retrieve remote account information used to find local user. """
gh = github3.login(token=resp['access_token'])
ghuser = gh.user()
return dict(email=ghuser.email, nickname=ghuser.login)
def account_setup(remote, token):
""" Perform additional setup after user have been logged in. """
pass
|
jeffque/circle-quest-squareland | elements/geometry.py | Python | unlicense | 969 | 0.001032 | def pitagoras_quad(coordenada):
return coordenada[0] ** 2 + coordenada[1] ** 2
def pitagoras(coordenada):
return pitagoras_quad(coordenada)**0.5
def coords_delta(coord_a, coord_b):
delta = []
delta.append(coord_a[0] - coord_b[0])
delta.append(coord_a[1] - coord_b[1])
return delta
def coords_soma(coord_a, coord_b):
delta = []
delta.append(coord_a[0] + coord_b[0])
delta.append(coord_a[1] + coord_b[1])
return delta
def distancia_quad(ponto_a, ponto_b):
return pitagoras_quad(coords_delta(ponto_a, ponto_b))
def distancia(pont | o_a, ponto_b):
return distancia_quad(ponto_a, ponto_b)**0.5
def direction2module(direcao, mo | dulo_desejado):
modulo_atual = pitagoras(direcao)
try:
fator = modulo_desejado/modulo_atual
return [x * fator for x in direcao]
except ZeroDivisionError:
return [0,0]
def direction_module_mutiply(direcao, fator):
return [x * fator for x in direcao]
|
valhallasw/phabricator-tools | py/abd/abdt_repooptions.py | Python | apache-2.0 | 6,892 | 0.000145 | """Per-repository configuration options."""
# =============================================================================
# CONTENTS
# -----------------------------------------------------------------------------
# abdt_repooptions
#
# Public Classes:
# Data
#
# Public Functions:
# merge_override_into_data
# merge_data_objects
# make_default_data
# data_from_json
# json_from_data
# validate_data
# data_from_repo_or_none
#
# -----------------------------------------------------------------------------
# (this contents block is generated, edits will be lost)
# =============================================================================
from __future__ import absolute_import
import copy
import json
import phlgit_show
import phlgit_showref
class Data(object):
def __init__(self):
super(Data, self).__init__()
self.description = None
self.branch_url_format = None
self.review_url_format = None
self.admin_emails = []
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def _merge_lists_as_sets(*list_list):
# sets aren't serializable as json so we want to store as list
new_set = set()
for l in list_list:
if l is not None:
new_set |= set(l)
return list(new_set)
def merge_override_into_data(default, override):
"""Return the result of overriding the non-None keys of 'override'.
:default: the lower precedence Data
:override: the higher precedence Data
:returns: the higher precedence Data
"""
# first create a copy of default, use deepcopy() for future-proofing
result = copy.deepcopy(default)
string_keys = [
"description",
"branch_url_format",
"review_url_format",
]
list_keys = [
"admin_emails",
]
assert set(string_keys + list_keys) == set(Data().__dict__.keys())
for key, value in override.__dict__.iteritems():
if value is not None:
if key in string_keys:
setattr(result, key, value)
else: # it's a list attribute
assert key in list_keys
if key in result.__dict__:
left = getattr(result, key)
right = getattr(override, key)
setattr(result, key, _merge_lists_as_sets(left, right))
else:
setattr(result, key, value)
return result
def merge_data_objects(*data_list):
"""Merge many Data objects, precedence increases with index in the list.
if an item in the list is None then it is ignored.
:object_list: multiple Data() args
:returns: a Data() that represents the composite of all the configs
"""
result = data_list[0]
data_list = data_list[1:]
for data in data_list:
if data is not None:
result = merge_override_into_data(result, data)
return result
def make_default_data():
"""Returns a 'Data' with sensible default values.
:returns: a 'Data'
"""
data = Data()
data.description = "(unnamed repo)"
return data
def data_from_json(json_string):
"""Returns a 'Data' from the supplied 'json_string'.
The 'json_string' doesn't have to mention all the attributes of Data, it
must not mention attributes that don't exist in Data already.
:json_string: a string of the json data
:returns: a abdt_repoconfig.Data based on 'json_string'
"""
data = Data()
for key, value in json.loads(json_string).iteritems():
getattr(data, key) # raise if the attribute doesn't already exist
setattr(data, key, value)
return data
def json_from_data(data):
"""Returns a json string from the supplied 'data'.
:data: a abdt_repoconfig.Data to encode a | s json
:returns: a json string based on 'data'
"""
return json.dumps(
data,
default=lambda x: x.__dict__,
sort_keys=True,
indent=4)
def validate_data(data):
"""Raise if th | e supplied data is invalid in any way.
:data: a Data() to be validated
:returns: None
"""
# make sure that 'data' has the same attributes as a blank data
data_key_set = set(data.__dict__.keys())
blank_data_key_set = set(Data().__dict__.keys())
if data_key_set != blank_data_key_set:
if data_key_set.issubset(blank_data_key_set):
raise Exception(
"supplied 'data' is missing fields: {fields}".format(
fields=list(blank_data_key_set - data_key_set)))
elif data_key_set.issuperset(blank_data_key_set):
raise Exception(
"supplied 'data' has extra fields: {fields}".format(
fields=list(data_key_set - blank_data_key_set)))
else:
raise Exception(
"supplied 'data' is missing or gained: {fields}".format(
fields=list(data_key_set ^ blank_data_key_set)))
if data.branch_url_format is not None:
branch = 'blahbranch'
repo_url = 'myorg/myrepo'
data.branch_url_format.format(branch=branch, repo_url=repo_url)
if data.review_url_format is not None:
review = 123
data.review_url_format.format(review=review)
def data_from_repo_or_none(repo):
"""Returns a valid 'Data' if 'repo' has a config file.
Will raise if the config file could not be parsed.
Will return 'None' if no config file was found.
:repo: a callable supporting git commands, e.g. repo("status")
:returns: a valid 'Data' or None
"""
config = None
# try to get the file content from the special ref, if it exists
ref = 'refs/config/origin/arcyd'
if ref in phlgit_showref.names(repo):
try:
config = phlgit_show.file_on_ref(
repo, 'repo.json', ref)
except Exception:
pass
if config is not None:
config = data_from_json(config)
return config
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
getsmarter/bda | utils/__init__.py | Python | mit | 235 | 0.004255 | from utils.haversine import haversine
from utils.geo import | llaToECEF
from utils.geo import ECEFTolla
from utils.median import getmedian
from utils.graph import draw_partitioned_graph
from utils.fancy_dendrogram imp | ort fancy_dendrogram |
fgmacedo/django-awards | awards/urls.py | Python | mit | 121 | 0 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.awards_list, name='list'),
] | ||
d4rt/SplunkModularInputsPythonFramework | implementations/rest/bin/authhandlers.py | Python | apache-2.0 | 1,678 | 0.014303 | from requests.auth import AuthBase
import hmac
import base64
import hashlib
import urlparse
import urllib
#add your custom auth handler class to this module
class MyCustomAuth(AuthBase):
def __init__(self,**args):
# setup any auth-related data here
#self.username = args['username']
#self.password = args['password']
pass
def __call__(self, r):
# modify and return the request
#r.headers['foouser'] = self.username
#r.headers['foopass'] = self.password
return r
class CloudstackAuth(AuthBase):
def __init__(self,**args):
# setup any auth-related data here
self.apikey = args['apikey']
self.secretkey = args['secretkey']
pass
def __call__(self, r):
# modify and return the request
parsed = urlparse.urlparse(r.url)
url = parsed.geturl().split('?',1)[0]
url_params= urlparse.parse_qs(parsed.query)
#normalize the list value
for param in url_ | params:
| url_params[param] = url_params[param][0]
url_params['apikey'] = self.apikey
keys = sorted(url_params.keys())
sig_params = []
for k in keys:
sig_params.append(k + '=' + urllib.quote_plus(url_params[k]).replace("+", "%20"))
query = '&'.join(sig_params)
signature = base64.b64encode(hmac.new(
self.secretkey,
msg=query.lower(),
digestmod=hashlib.sha1
).digest())
query += '&signature=' + urllib.quote_plus(signature)
r.url = url + '?' + query
return r |
cjekel/piecewiseLinearFitPython | examples/fitWithKnownLineSegmentLocations.py | Python | mit | 3,127 | 0 | # fit and predict with known line segment x locations
# import our libraries
import numpy as np
import matplotlib.pyplot as plt
import pwlf
# your data
y = np.array([0.00000000e+00, 9.69801700e-03, 2.94350340e-02,
4.39052750e-02, 5.45343950e-02, 6.74104940e-02,
8.34831790e-02, 1.02580042e-01, 1.22767939e-01,
1.42172312e-01, 0.00000000e+00, 8.58600000e-06,
8.31543400e-03, 2.34184100e-02, 3.39709150e-02,
4.03581990e-02, 4.53545600e-02, 5.02345260e-02,
5.55253360e-02, 6.14750770e-02, 6.82125120e-02,
7.55892510e-02, 8.38356810e-02, 9.26413070e-02,
1.02039790e-01, 1.11688258e-01, 1.21390666e-01,
| 1.31196948e-01, 0.00000000e+00, 1.56706510e-02,
3.54628780e-02, 4.63739040e-02, 5.61442590e-02,
6.78542550e-02, 8.16388310e-02, 9.77756110e-02,
1.16531753e-01, 1.37038283e-01, 0.00000000e+00,
| 1.16951050e-02, 3.12089850e-02, 4.41776550e-02,
5.42877590e-02, 6.63321350e-02, 8.07655920e-02,
9.70363280e-02, 1.15706975e-01, 1.36687642e-01,
0.00000000e+00, 1.50144640e-02, 3.44519970e-02,
4.55907760e-02, 5.59556700e-02, 6.88450940e-02,
8.41374060e-02, 1.01254006e-01, 1.20605073e-01,
1.41881288e-01, 1.62618058e-01])
x = np.array([0.00000000e+00, 8.82678000e-03, 3.25615100e-02,
5.66106800e-02, 7.95549800e-02, 1.00936330e-01,
1.20351520e-01, 1.37442010e-01, 1.51858250e-01,
1.64433570e-01, 0.00000000e+00, -2.12600000e-05,
7.03872000e-03, 1.85494500e-02, 3.00926700e-02,
4.17617000e-02, 5.37279600e-02, 6.54941000e-02,
7.68092100e-02, 8.76596300e-02, 9.80525800e-02,
1.07961810e-01, 1.17305210e-01, 1.26063930e-01,
1.34180360e-01, 1.41725010e-01, 1.48629710e-01,
1.55374770e-01, 0.00000000e+00, 1.65610200e-02,
3.91016100e-02, 6.18679400e-02, 8.30997400e-02,
1.02132890e-01, 1.19011260e-01, 1.34620080e-01,
1.49429370e-01, 1.63539960e-01, -0.00000000e+00,
1.01980300e-02, 3.28642800e-02, 5.59461900e-02,
7.81388400e-02, 9.84458400e-02, 1.16270210e-01,
1.31279040e-01, 1.45437090e-01, 1.59627540e-01,
0.00000000e+00, 1.63404300e-02, 4.00086000e-02,
6.34390200e-02, 8.51085900e-02, 1.04787860e-01,
1.22120350e-01, 1.36931660e-01, 1.50958760e-01,
1.65299640e-01, 1.79942720e-01])
# your desired line segment end locations
x0 = np.array([min(x), 0.039, 0.10, max(x)])
# initialize piecewise linear fit with your x and y data
my_pwlf = pwlf.PiecewiseLinFit(x, y)
# fit the data with the specified break points (ie the x locations of where
# the line segments should end
my_pwlf.fit_with_breaks(x0)
# predict for the determined points
xHat = np.linspace(min(x), max(x), num=10000)
yHat = my_pwlf.predict(xHat)
# plot the results
plt.figure()
plt.plot(x, y, 'o')
plt.plot(xHat, yHat, '-')
plt.show()
|
pgandev/RocketMap | pogom/utils.py | Python | agpl-3.0 | 43,916 | 0.000023 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import configargparse
import os
import math
import json
import logging
import random
import time
import socket
import struct
import zipfile
import requests
from uuid import uuid4
from s2sphere import CellId, LatLng
from . import config
log = logging.getLogger(__name__)
def parse_unicode(bytestring):
decoded_string = bytestring.decode(sys.getfilesystemencoding())
return decoded_string
def memoize(function):
memo = {}
def wrapper(*args):
if args in memo:
return memo[args]
else:
rv = function(*args)
memo[args] = rv
return rv
return wrapper
@memoize
def get_args():
# Pre-check to see if the -cf or --config flag is used on the command line.
# If not, we'll use the env var or default value. This prevents layering of
# config files as well as a missing config.ini.
defaultconfigfiles = []
if '-cf' not in sys.argv and '--config' not in sys.argv:
defaultconfigfiles = [os.getenv('POGOMAP_CONFIG', os.path.join(
os.path.dirname(__file__), '../config/config.ini'))]
parser = configargparse.ArgParser(
default_config_files=defaultconfigfiles,
auto_env_var_prefix='POGOMAP_')
parser.add_argument('-cf', '--config',
is_config_file=True, help='Set configuration file')
parser.add_argument('-a', '--auth-service', type=str.lower,
action='append', default=[],
help=('Auth Services, either one for all accounts ' +
'or one per account: ptc or google. Defaults ' +
'all to ptc.'))
parser.add_argument('-u', '--username', action='append', default=[],
help='Usernames, one per account.')
parser.add_argument('-p', '--password', action='append', default=[],
help=('Passwords, either single one for all ' +
'accounts or one per account.'))
parser.add_argument('-w', '--workers', type=int,
help=('Number of search worker threads to start. ' +
'Defaults to the number of accounts specified.'))
parser.add_argument('-asi', '--account-search-interval', type=int,
default=0,
help=('Seconds for accounts to search before ' +
'switching to a new account. 0 to disable.'))
parser.add_argument('-ari', '--account-rest-interval', type=int,
default=7200,
help=('Seconds for accounts to rest when they fail ' +
'or are switched out.'))
parser.add_argument('-ac', '--accountcsv',
help=('Load accounts from CSV file containing ' +
'"auth_service,username,passwd" lines.'))
parser. | add_argument('-hlvl', '--high-lvl-accounts',
help=('Load high level accounts from CSV file '
+ ' containing '
+ '"auth_service,username,passwd"'
+ ' lines.'))
parser.add_arg | ument('-bh', '--beehive',
help=('Use beehive configuration for multiple ' +
'accounts, one account per hex. Make sure ' +
'to keep -st under 5, and -w under the total ' +
'amount of accounts available.'),
action='store_true', default=False)
parser.add_argument('-wph', '--workers-per-hive',
help=('Only referenced when using --beehive. Sets ' +
'number of workers per hive. Default value ' +
'is 1.'),
type=int, default=1)
parser.add_argument('-l', '--location', type=parse_unicode,
help='Location, can be an address or coordinates.')
# Default based on the average elevation of cities around the world.
# Source: https://www.wikiwand.com/en/List_of_cities_by_elevation
parser.add_argument('-alt', '--altitude',
help='Default altitude in meters.',
type=int, default=507)
parser.add_argument('-altv', '--altitude-variance',
help='Variance for --altitude in meters',
type=int, default=1)
parser.add_argument('-uac', '--use-altitude-cache',
help=('Query the Elevation API for each step,' +
' rather than only once, and store results in' +
' the database.'),
action='store_true', default=False)
parser.add_argument('-nj', '--no-jitter',
help=("Don't apply random -9m to +9m jitter to " +
"location."),
action='store_true', default=False)
parser.add_argument('-al', '--access-logs',
help=("Write web logs to access.log."),
action='store_true', default=False)
parser.add_argument('-st', '--step-limit', help='Steps.', type=int,
default=12)
parser.add_argument('-sd', '--scan-delay',
help='Time delay between requests in scan threads.',
type=float, default=10)
parser.add_argument('--spawn-delay',
help=('Number of seconds after spawn time to wait ' +
'before scanning to be sure the Pokemon ' +
'is there.'),
type=float, default=10)
parser.add_argument('-enc', '--encounter',
help='Start an encounter to gather IVs and moves.',
action='store_true', default=False)
parser.add_argument('-cs', '--captcha-solving',
help='Enables captcha solving.',
action='store_true', default=False)
parser.add_argument('-ck', '--captcha-key',
help='2Captcha API key.')
parser.add_argument('-cds', '--captcha-dsk',
help='Pokemon Go captcha data-sitekey.',
default="6LeeTScTAAAAADqvhqVMhPpr_vB9D364Ia-1dSgK")
parser.add_argument('-mcd', '--manual-captcha-domain',
help='Domain to where captcha tokens will be sent.',
default="http://127.0.0.1:5000")
parser.add_argument('-mcr', '--manual-captcha-refresh',
help='Time available before captcha page refreshes.',
type=int, default=30)
parser.add_argument('-mct', '--manual-captcha-timeout',
help='Maximum time captchas will wait for manual ' +
'captcha solving. On timeout, if enabled, 2Captcha ' +
'will be used to solve captcha. Default is 0.',
type=int, default=0)
parser.add_argument('-ed', '--encounter-delay',
help=('Time delay between encounter pokemon ' +
'in scan threads.'),
type=float, default=1)
parser.add_argument('-encwf', '--enc-whitelist-file',
default='', help='File containing a list of '
'Pokemon IDs to encounter for'
' IV/CP scanning.')
parser.add_argument('-nostore', '--no-api-store',
help=("Don't store the API objects used by the high"
+ ' level accounts in memory. This will increase'
+ ' the number of logins per account, but '
+ ' decreases memory usage.'),
action='store_true', default=False)
webhook_list = parser.add_mutually_exclusive_group()
webhook_list.add_argument('-wwht', '--webhook-whitelist',
action=' |
endlessm/chromium-browser | third_party/llvm/lldb/third_party/Python/module/unittest2/unittest2/test/test_new_tests.py | Python | bsd-3-clause | 1,677 | 0 | from cStringIO import StringIO
import unittest
import unittest2
from unittest2.test.support import resultFactory
class TestUnittest(unittest2.TestCase):
def assertIsSubclass(self, actual, klass):
self.assertTrue(issubclass(actual, klass), "Not a subclass.")
def testInheritance(self):
self.assertIsSubclass(unittest2.TestCase, unittest.TestCase)
self.assertIsSubclass(unittest2.TestResult, unittest.TestResult)
self.assertIsSubclass(unittest2.TestSuite, unittest.TestSuite)
self.assertIsSubclass(
unittest2.TextTestRunner,
unittest.TextTestRunner)
self.assertIsSubclass(unittest2.TestLoader, unittest.TestLoader)
| self.assertIsSubclass(unittest2.TextTestResult, unitte | st.TestResult)
def test_new_runner_old_case(self):
runner = unittest2.TextTestRunner(resultclass=resultFactory,
stream=StringIO())
class Test(unittest.TestCase):
def testOne(self):
pass
suite = unittest2.TestSuite((Test('testOne'),))
result = runner.run(suite)
self.assertEqual(result.testsRun, 1)
self.assertEqual(len(result.errors), 0)
def test_old_runner_new_case(self):
runner = unittest.TextTestRunner(stream=StringIO())
class Test(unittest2.TestCase):
def testOne(self):
self.assertDictEqual({}, {})
suite = unittest.TestSuite((Test('testOne'),))
result = runner.run(suite)
self.assertEqual(result.testsRun, 1)
self.assertEqual(len(result.errors), 0)
if __name__ == '__main__':
unittest2.main()
|
moden-py/SWAPY-deleting | proxy.py | Python | lgpl-2.1 | 26,563 | 0.010014 | # GUI object/properties browser.
# Copyright (C) 2011 Matiychuk D.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
import pywinauto
import sys, os
import time
import wx
import thread
import exceptions
import platform
import warnings
from const import *
'''
proxy module for pywinauto
'''
pywinauto.timings.Timings.window_find_timeout = 1
def resource_path(filename):
if hasattr(sys, '_MEIPASS'):
# PyInstaller >= 1.6
###os.chdir(sys._MEIPASS)
filename = os.path.join(sys._MEIPASS, filename)
elif '_MEIPASS2' in os.environ:
# PyInstaller < 1.6 (tested on 1.5 only)
###os.chdir(os.environ['_MEIPASS2'])
filename = os.path.join(os.environ['_MEIPASS2'], filename)
else:
###os.chdir(sys.path.dirname(sys.argv[0]))
filename = os.path.join(os.path.dirname(sys.argv[0]), filename)
return filename
class SWAPYObject(object):
'''
Base proxy class for pywinauto objects.
'''
def __init__(self, pwa_obj):
'''
Constructor
'''
#original pywinauto object
self.pwa_obj = pwa_obj
default_sort_key = lambda name: name[0].lower()
self.subitems_sort_key = default_sort_key
def GetProperties(self):
'''
Return dict of original + additional properies
Can be owerridden for non pywinauto obects
'''
properties = {}
properties.update(self._get_properies())
properties.update(self._get_additional_properties())
return properties
def Get_subitems(self):
'''
Return list of children - [(control_text, swapy_obj),...]
Can be owerridden for non pywinauto obects
'''
subitems = []
subitems += self._get_children()
'''
for control in children:
try:
texts = control.Texts()
except exceptions.RuntimeError:
texts = ['Unknown control name2!'] #workaround
while texts.count(''):
texts.remove('')
c_name = ', '.join(texts)
if not c_name:
#nontext_controlname = pywinauto.findbestmatch.GetNonTextControlName(control, children)[0]
top_level_parent = control.TopLevelParent().Children()
nontext_controlname = pywinauto.findbestmatch.GetNonTextControlName(control, top_level_parent)[0]
if nontext_controlname:
c_name = nontext_controlname
else:
c_name = 'Unknown control name1!'
subitems.append((c_name, self._get_swapy_object(control)))
'''
subitems += self._get_additional_children()
subitems.sort(key=self.subitems_sort_key)
#encode names
subitems_encoded = []
for (name, obj) in subitems:
name = name.encode('cp1251', 'replace')
subitems_encoded.append((name, obj))
return subitems_encoded
def Exec_action(self, action_id):
'''
Execute action on the control
'''
action = ACTIONS[action_id]
#print('self.pwa_obj.'+action+'()')
exec('self.pwa_obj.'+action+'()')
return 0
def Get_actions(self):
'''
return allowed actions for this object. [(id,action_name),...]
'''
allowed_actions = []
try:
obj_actions = dir(self.pwa_obj.WrapperObject())
except:
obj_actions = dir(self.pwa_obj)
for id, action in ACTIONS.items():
if action in obj_actions:
allowed_actions.append((id,action))
allowed_actions.sort(key=lambda name: name[1].lower())
return allowed_actions
def Get_code(self, action_id):
'''
Generate code for pywinauto module
'''
action = ACTIONS[action_id]
code = "\
ctrl = window['"+self._get_additional_properties()['Access names'][0].encode('unicode-escape', 'replace')+"']\n\
ctrl."+action+"()\n"
return code
def Highlight_control(self):
if self._check_visibility():
thread.start_new_thread(self._highlight_control,(3,))
return 0
def _get_properies(self):
'''
Get original pywinauto's object properties
'''
#print type(self.pwa_obj)
try:
properties = self.pwa_obj.GetProperties()
except exceptions.RuntimeError:
properties = {} #workaround
return properties
|
def _get_additional_properties(self):
'''
Get additonal useful properties, like a handle, process ID, etc.
Can be overridden by derived class
'''
add | itional_properties = {}
pwa_app = pywinauto.application.Application()
#-----Access names
try:
#parent_obj = self.pwa_obj.Parent()
parent_obj = self.pwa_obj.TopLevelParent()
except:
pass
else:
try:
#all_controls = parent_obj.Children()
all_controls = [pwa_app.window_(handle=ch) for ch in pywinauto.findwindows.find_windows(parent=parent_obj.handle, top_level_only=False)]
except:
pass
else:
access_names = []
uniq_names = pywinauto.findbestmatch.build_unique_dict(all_controls)
for uniq_name, obj in uniq_names.items():
if uniq_name != '' and obj.WrapperObject() == self.pwa_obj:
access_names.append(uniq_name)
access_names.sort(key=len)
additional_properties.update({'Access names' : access_names})
#-----
#-----pwa_type
additional_properties.update({'pwa_type' : str(type(self.pwa_obj))})
#---
#-----handle
try:
additional_properties.update({'handle' : str(self.pwa_obj.handle)})
except:
pass
#---
return additional_properties
def _get_children(self):
'''
Return original pywinauto's object children & names
[(control_text, swapy_obj),...]
'''
def _get_name_control(control):
try:
texts = control.Texts()
except exceptions.WindowsError:
texts = ['Unknown control name2!'] #workaround for WindowsError: [Error 0] ...
except exceptions.RuntimeError:
texts = ['Unknown control name3!'] #workaround for RuntimeError: GetButtonInfo failed for button with command id 256
while texts.count(''):
texts.remove('')
text = ', '.join(texts)
if not text:
u_names = []
for uniq_name, obj in uniq_names.items():
if uniq_name != '' and obj.WrapperObject() == control:
#if uniq_name != '' and obj == control:
u_names.append(uniq_name)
if u_names:
u_names.sort(key=len)
name = u_names[-1]
else:
name = 'Unknown control name1!'
else:
name = text
return (name, self._get_swapy_object(control))
pwa_app = pywinauto.application.Application()
try:
parent_obj = self.pwa_obj.TopLevelParent()
except pywinauto.controls.HwndWrapper.InvalidWindowHandle:
#For non vis |
vitalti/sapl | sapl/api/forms.py | Python | gpl-3.0 | 7,717 | 0.00013 | from django.db.models import Q
from django.forms.fields import CharField, MultiValueField
from django.forms.widgets import MultiWidget, TextInput
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django_filters.filters import DateFilter, MethodFilter, ModelChoiceFilter
from rest_framework import serializers
from rest_framework.compat import django_filters
from rest_framework.filters import FilterSet
from sapl.base.models import Autor, TipoAutor
from sapl.parlamentares.models import Legislatura
from sapl.utils import generic_relations_for_model
class SaplGenericRelationSearchFilterSet(FilterSet):
q = MethodFilter()
def filter_q(self, queryset, value):
query = value.split(' ')
if query:
q = Q()
for qtext in query:
if not qtext:
continue
q_fs = Q(nome__icontains=qtext)
order_by = []
for gr in generic_relations_for_model(self._meta.model):
sgr = gr[1]
for item in sgr:
if item.related_model != self._meta.model:
continue
flag_order_by = True
for field in item.fields_search:
if flag_order_by:
flag_order_by = False
order_by.append('%s__%s' % (
item.related_query_name(),
field[0])
)
# if len(field) == 3 and field[2](qtext) is not
# None:
q_fs = q_fs | Q(**{'%s__%s%s' % (
item.related_query_name(),
field[0],
field[1]): qtext if len(field) == 2
else field[2](qtext)})
q = q & q_fs
if q:
queryset = queryset.filter(q).order_by(*order_by)
return queryset
class SearchForFieldWidget(MultiWidget):
def decompress(self, value):
if value is None:
return [None, None]
return value
def __init__(self, attrs=None):
widgets = (TextInput, TextInput)
MultiWidget.__init__(self, widgets, attrs)
class SearchForFieldField(MultiValueField):
widget = SearchForFieldWidget
def __init__(self, *args, **kwargs):
fields = (
CharField(),
CharField())
super(SearchForFieldField, self).__init__(fields, *args, **kwargs)
def compress(self, parameters):
if parameters:
return parameters
return None
class SearchForFieldFilter(django_filters.filters.MethodFilter):
field_class = SearchForFieldField
class AutorChoiceFilterSet(SaplGenericRelationSearchFilterSet):
q = MethodFilter()
tipo = ModelChoiceFilter(queryset=TipoAutor.objects.all())
class Meta:
model = Autor
fields = ['q',
'tipo',
'nome', ]
def filter_q(self, queryset, value):
return SaplGenericRelationSearchFilterSet.filter_q(
self, queryset, value).distinct('nome').order_by('nome')
class AutorSearchForFieldFilterSet(AutorChoiceFilterSet):
q = SearchForFieldFilter()
class Meta(AutorChoiceFilterSet.Meta):
pass
def filter_q(self, queryset, value):
value[0] = value[0].split(',')
value[1] = value[1].split(',')
params = {}
for key, v in list(zip(value[0], value[1])):
if v in ['True', 'False']:
v = '1' if v == 'True' else '0'
params[key] = v
return queryset.filter(**params).distinct('nome').order_by('nome')
class AutoresPossiveisFilterSet(FilterSet):
data_relativa = DateFilter(method='filter_data_relativa')
tipo = MethodFilter()
class Meta:
model = Autor
fields = ['data_relativa', 'tipo', ]
def filter_data_relativa(self, queryset, name, value):
return queryset
def filter_tipo(self, queryset, value):
try:
tipo = TipoAutor.objects.get(pk=value)
except:
raise serializers.ValidationError(_('Tipo de Autor inexistente.'))
qs = queryset.filter(tipo=tipo)
return qs
@property
def qs(self):
qs = super().qs
data_relativa = self.form.cleaned_data['data_relativa'] \
if 'data_relativa' in self.form.cleaned_data else None
tipo = self.form.cleaned_data['tipo'] \
if 'tipo' in self.form.cleaned_data else None
| if not tipo and not data_relativa:
return qs
if tipo:
# não precisa de try except, já foi validado em filter_tipo
tipo = TipoAutor.objects.get(pk=tipo)
if not tipo.content_type:
return qs
filter_for_model = 'filter_%s' % tipo.content_type.model
if not hasattr(self, filter_for_model):
return qs
if not data_relativa:
data_relativa = timezone.now()
re | turn getattr(self, filter_for_model)(qs, data_relativa).distinct()
def filter_parlamentar(self, queryset, data_relativa):
# não leva em conta afastamentos
legislatura_relativa = Legislatura.objects.filter(
data_inicio__lte=data_relativa,
data_fim__gte=data_relativa).first()
q = Q(
parlamentar_set__mandato__data_inicio_mandato__lte=data_relativa,
parlamentar_set__mandato__data_fim_mandato__isnull=True) | Q(
parlamentar_set__mandato__data_inicio_mandato__lte=data_relativa,
parlamentar_set__mandato__data_fim_mandato__gte=data_relativa)
if legislatura_relativa.atual():
q = q & Q(parlamentar_set__ativo=True)
return queryset.filter(q)
def filter_comissao(self, queryset, data_relativa):
return queryset.filter(
Q(comissao_set__data_extincao__isnull=True,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__isnull=True,
comissao_set__data_fim_comissao__gte=data_relativa) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__gte=data_relativa),
comissao_set__data_criacao__lte=data_relativa)
def filter_frente(self, queryset, data_relativa):
return queryset.filter(
Q(frente_set__data_extincao__isnull=True) |
Q(frente_set__data_extincao__gte=data_relativa),
frente_set__data_criacao__lte=data_relativa)
def filter_bancada(self, queryset, data_relativa):
return queryset.filter(
Q(bancada_set__data_extincao__isnull=True) |
Q(bancada_set__data_extincao__gte=data_relativa),
bancada_set__data_criacao__lte=data_relativa)
def filter_bloco(self, queryset, data_relativa):
return queryset.filter(
Q(bloco_set__data_extincao__isnull=True) |
Q(bloco_set__data_extincao__gte=data_relativa),
bloco_set__data_criacao__lte=data_relativa)
def filter_orgao(self, queryset, data_relativa):
# na implementação, não havia regras a implementar para orgao
return queryset
|
nk113/tastypie-rpc-proxy | rpc_proxy/test.py | Python | bsd-3-clause | 3,241 | 0.002468 | # -*- coding: utf-8 -*-
import base64
import inspect
import json
import logging
import requests
import types
from django.conf import settings
from django.core.management import call_command
from django_nose import FastFixtureTestCase
from functools import wraps
from mock import patch
from tastypie.test import ResourceTestCa | se, TestApiClient
from rpc_proxy.proxies import get_setting
INITIAL_DATA = ('initial_data',)
TEST_DATA = ('test_data',)
logger = logging.getLogger(__name__)
def mock_request(obj, method, url, **kwargs):
client = TestApiClient()
authentication = 'Basic %s' % base64.b64encode(':'.join([
get_setti | ng('SUPERUSER_USERNAME', None),
get_setting('SUPERUSER_PASSWORD', None),
]))
if method == 'GET':
data = kwargs.get('params', {})
djresponse = client.get(url, data=data, authentication=authentication)
elif method == 'POST':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.post(url, data=data, authentication=authentication)
elif method == 'PUT':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.put(url, data=data, authentication=authentication)
elif method == 'PATCH':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.patch(url, data=data, authentication=authentication)
elif method == 'DELETE':
data = kwargs.get('params', {})
djresponse = client.delete(url, data=data, authentication=authentication)
# convert django.http.HttpResponse to requests.models.Response
response = requests.models.Response()
response.status_code = djresponse.status_code
response.headers = {}
try:
response.headers['content-type'] = djresponse['content-type']
response.headers['location'] = djresponse['location']
except:
pass
response.encoding = requests.utils.get_encoding_from_headers(response.headers)
response._content = djresponse.content
return response
def mock_cache_set(key, value, timeout=None):
# do nothing
pass
def mock_api(func, **decorator_kwargs):
@patch('requests.sessions.Session.request', mock_request)
@patch('tastypie.cache.SimpleCache.set', mock_cache_set)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class TestCase(FastFixtureTestCase):
"""
Don't be smart in test cases!
"""
fixtures = INITIAL_DATA
def __new__(cls, name):
testcase = super(TestCase, cls).__new__(cls)
if get_setting('API_URL', None):
try:
func_type = types.UnboundMethodType
except:
func_type = types.FunctionType
for name, func in inspect.getmembers(testcase):
if isinstance(func, func_type) and name.startswith('test_'):
setattr(testcase, name, mock_api(func))
return testcase
def setUp(self):
call_command('loaddata', *TEST_DATA)
super(TestCase, self).setUp()
class Proxy(TestCase):
"""
Don't be smart in test cases!
CAVEAT: Proxy classes have to be imported within each test method
to mock the requests
"""
pass
|
osks/pylyskom | tests/test_datatypes.py | Python | gpl-2.0 | 6,849 | 0.004672 | # -*- coding: utf-8 -*-
import pytest
from .mocks import MockSocket
from pylyskom.errors import ReceiveError
from pylyskom.connection import ReceiveBuffer
from pylyskom.datatypes import ArrayInt32, Int32, String, ConfType, ExtendedConfType
def test_Array_can_parse_empty_array_with_star_format():
s = MockSocket([b"0 *"])
buf = ReceiveBuffer(s)
res = ArrayInt32.parse(buf)
assert res == []
def test_Array_can_parse_empty_array_with_normal_format():
# This is generally not sent by the server, because empty arrays
# are sent as "0 *", but I think we should handle it anyway.
s = MockSocket(b"0 { }")
buf = ReceiveBuffer(s)
res = ArrayInt32.parse(buf)
assert res == []
def test_Array_can_parse_array_non_zero_length_with_star_special_case():
s = MockSocket(b"5 *") # length 5 but no array content
buf = ReceiveBuffer(s)
res = ArrayInt32.parse(buf)
assert res == []
def test_String_can_parse_hollerith_string():
s = MockSocket(b"7Hfoo bar")
buf = ReceiveBuffer(s)
res = String.parse(buf)
assert res == b"foo bar"
def test_ConfType_length():
ct = ConfType()
assert ConfType.LENGTH == 4
assert len(ct) == ConfType.LENGTH
def test_ConfType_default_constructor():
ct = ConfType()
assert ct[0] == 0
assert ct.rd_prot == 0
assert ct[1] == 0
assert ct.original == 0
assert ct[2] == 0
assert ct.secret == 0
assert ct[3] == 0
assert ct.letterbox == 0
def test_ConfType_to_string():
ct = ConfType()
assert ct.to_string() == b"0000"
ct = ConfType([1, 1, 1, 1])
assert ct.to_string() == b"1111"
def test_ExtendedConfType_length():
ct = ExtendedConfType()
assert ExtendedConfType.LENGTH == 8
assert len(ct) == ExtendedConfType.LENGTH
def test_ExtendedConfType_default_constructor():
ct = ExtendedConfType()
assert ct.to_string() == b"00000000"
def test_ExtendedConfType_rd_prot():
ct = ExtendedConfType([1, 0, 0, 0, 0, 0, 0, 0])
assert ct.rd_prot == 1
assert ct.original == 0
assert ct.secret == 0
assert ct.letterbox == 0
assert ct.allow_anonymous == 0
assert ct.forbid_secret == 0
assert ct.reserved2 == 0
assert ct.reserved3 == 0
def test_ExtendedConfType_original():
ct = ExtendedConfType([0, 1, 0, 0, 0, 0, 0, 0])
assert ct.rd_prot == 0
assert ct.original == 1
assert ct.secret == 0
assert ct.letterbox == 0
assert ct.allow_anonymous == 0
assert ct.forbid_secret == 0
assert ct.reserved2 == 0
assert ct.reserved3 == 0
def test_ExtendedConfType_secret():
ct = ExtendedConfType([0, 0, 1, 0, 0, 0, 0, 0])
assert ct.rd_prot == 0
assert ct.original == 0
assert ct.secret == 1
assert ct.letterbox == 0
assert ct.allow_anonymous == 0
assert ct.forbid_secret == 0
assert ct.reserved2 == 0
assert ct.reserved3 == 0
def test_ExtendedConfType_letterbox():
ct = ExtendedConfType([0, 0, 0, 1, 0, 0, 0, 0])
assert ct.rd_prot == 0
assert ct.original == 0
assert ct.secret == 0
assert ct.letterbox == 1
assert ct.allow_anonymous == 0
assert ct.forbid_secret == 0
assert ct.reserved2 == 0
assert ct.reserved3 == 0
def test_ExtendedConfType_allow_anonymous():
ct = ExtendedConfType([0, 0, 0, 0, 1, 0, 0, 0])
assert ct.rd_prot == 0
assert ct.original == 0
assert ct.secret == 0
assert ct.letterbox == 0
assert ct.allow_anonymous == 1
assert ct.forbid_secret == 0
assert ct.reserved2 == 0
assert ct.reserved3 == 0
def test_ExtendedConfType_forbid_secret():
ct = ExtendedConfType([0, 0, 0, 0, 0, 1, 0, 0])
assert ct.rd_prot == 0
assert ct.original == 0
assert ct.secret == 0
assert ct.letterbox == 0
assert ct.allow_anonymous == 0
assert ct.forbid_secret == 1
assert ct.reserved2 == 0
assert ct.reserved3 == 0
def test_ExtendedConfType_reserved2():
ct = ExtendedConfType([0, 0, 0, 0, 0, 0, 1, 0])
assert ct.rd_prot == 0
assert ct.original == 0
assert ct.secret == 0
assert ct.letterbox == 0
assert ct.allow_anonymous == 0
assert ct.forbid_secret == 0
assert ct.reserved2 == 1
assert ct.reserved3 == 0
def test_ExtendedConfType_reserved3():
ct = ExtendedConfType([0, 0, 0, 0, 0, 0, 0, 1])
assert ct.rd_prot == 0
assert ct.original == 0
assert ct.secret == 0
assert ct.letterbox == 0
assert ct.allow_anonymous == 0
assert ct.forbid_secret == 0
assert ct.reserved2 == 0
assert ct.reserved3 == 1
def test_ExtendedConfType_constructor_can_convert_ConfType():
ct = ConfType([1, 1, 1, 1])
ect = ExtendedConfType(ct)
assert ect.to_string() = | = b"11110000"
def test_ExtendedConfType_to_string():
ct1 = ExtendedConfType()
assert ct1.to_string() == b"00000000"
ct2 = ExtendedConfType([1, 0, 1, 0,
1, 0, 1, 0])
assert ct2.to_string() == b"10101010"
ct3 = ExtendedConfType([1, 1, 1, 1,
1, 1, 1, 1])
assert ct3.to_string() == b"11111111"
def test_ExtendedConfType_parse():
with pytest.raises(ReceiveError):
| # Must have an extra character, so this will fail
ect = ExtendedConfType.parse(ReceiveBuffer(MockSocket(b"00110011")))
ect = ExtendedConfType.parse(ReceiveBuffer(MockSocket(b"00110011 ")))
assert ect.to_string() == b"00110011"
def test_ArrayInt32_parse():
a = ArrayInt32.parse(ReceiveBuffer(MockSocket(b"3 { 17 4711 0 }")))
for v in a:
assert isinstance(v, Int32)
assert a.to_string() == b"3 { 17 4711 0 }"
def test_ArrayInt32_empty_array():
a = ArrayInt32([])
assert a.to_string() == b"0 { }"
def test_ArrayInt32_constructor_convert_elements():
a = ArrayInt32([ 17, 4711, 0 ])
assert a.to_string() == b"3 { 17 4711 0 }"
def test_ArrayInt32_setitem_convert_element():
a = ArrayInt32([0, 0])
a[0] = 17
a[1] = 4711
assert a.to_string() == b"2 { 17 4711 }"
def test_ArrayInt32_append_convert_element():
a = ArrayInt32()
a.append(17)
a.append(4711)
assert a.to_string() == b"2 { 17 4711 }"
def test_ArrayInt32_insert_convert_element():
a = ArrayInt32()
a.insert(0, 17)
a.insert(1, 4711)
assert a.to_string() == b"2 { 17 4711 }"
#def test_ArrayInt32_repr():
# a = ArrayInt32([1, 2, 3])
# assert repr(a) == b"ArrayInt32([1, 2, 3])"
def test_ArrayInt32_add():
a = ArrayInt32([1, 2, 3])
b = ArrayInt32([4, 5, 6])
a = a + b
assert a.to_string() == b"6 { 1 2 3 4 5 6 }"
def test_ArrayInt32_add_convert_element():
a = ArrayInt32([1, 2, 3])
b = [4, 5, 6]
a = a + b
assert a.to_string() == b"6 { 1 2 3 4 5 6 }"
def test_ArrayInt32_extend_convert_element():
a = ArrayInt32([1, 2, 3])
b = [4, 5, 6]
a.extend(b)
assert a.to_string() == b"6 { 1 2 3 4 5 6 }"
|
mdsmus/MusiContour | tests/test_contour.py | Python | gpl-3.0 | 12,322 | 0.000487 | # -*- coding: utf-8 -*-
import contour.contour as contour
from contour.contour import Contour
import py
def test_build_classes_card():
fn = contour.build_classes_card
assert fn(4) == [(4, 1, (0, 1, 2, 3), True), (4, 2, (0, 1, 3, 2), False),
(4, 3, (0, 2, 1, 3), True), (4, 4, (0, 2, 3, 1), False),
(4, 5, (0, 3, 1, 2), False), (4, 6, (0, 3, 2, 1), False),
(4, 7, (1, 0, 3, 2), True), (4, 8, (1, 3, 0, 2), True)]
def test_build_classes():
fn = contour.build_classes
assert fn(4) == [[(2, 1, (0, 1), True)],
[(3, 1, (0, 1, 2), True), (3, 2, (0, 2, 1), False)],
[(4, 1, (0, 1, 2, 3), True), (4, 2, (0, 1, 3, 2), False),
(4, 3, (0, 2, 1, 3), True), (4, 4, (0, 2, 3, 1), False),
(4, 5, (0, 3, 1, 2), False), (4, 6, (0, 3, 2, 1), False),
(4, 7, (1, 0, 3, 2), True), (4, 8, (1, 3, 0, 2), True)]]
def test_contour_class():
assert contour.contour_class(6, 117) == Contour([0, 5, 4, 2, 1, 3])
def test_subsets_grouped():
n = {(0, 1, 3, 2): [[0, 1, 4, 2]],
(0, 2, 1, 3): [[0, 3, 1, 4]],
(0, 2, 3, 1): [[0, 3, 4, 2]],
(0, 3, 1, 2): [[0, 3, 1, 2]],
(1, 3, 0, 2): [[3, 1, 4, 2]]}
assert contour.subsets_grouped(n, "prime") == \
'Prime form < 0 1 3 2 > (1)\n< 0 1 4 2 >\n' + \
'Prime form < 0 2 1 3 > (1)\n< 0 3 1 4 >\n' + \
'Prime form < 0 2 3 1 > (1)\n< 0 3 4 2 >\n' + \
'Prime form < 0 3 1 2 > (1)\n< 0 3 1 2 >\n' + \
'Prime form < 1 3 0 2 > (1)\n< 3 1 4 2 >'
def test_rotation_1():
cseg = Contour([1, 4, 9, 9, 2, 1])
assert cseg.rotation() == [4, 9, 9, 2, 1, 1]
def test_rotation_2():
cseg = Contour([1, 4, 9, 9, 2, 1])
assert cseg.rotation(1) == [4, 9, 9, 2, 1, 1]
def test_rotation_3():
cseg = Contour([1, 4, 9, 9, 2, 1])
assert cseg.rotation(2) == [9, 9, 2, 1, 1, 4]
def test_rotation_4():
cseg = Contour([1, 4, 9, 9, 2, 1])
assert cseg.rotation(20) == [9, 9, 2, 1, 1, 4]
def test_retrograde():
cseg = Contour([1, 4, 9, 9, 2, 1])
assert cseg.retrograde() == [1, 2, 9, 9, 4, 1]
def test_inversion():
cseg = Contour([1, 4, 9, 9, 2, 1])
assert cseg.inversion() == [8, 5, 0, 0, 7, 8]
def test_translation():
cseg = Contour([1, 4, 9, 9, 2, 1])
assert cseg.translation() == [0, 2, 3, 3, 1, 0]
def test_prime_form_marvin_laprade_1():
cseg = Contour([1, 4, 9, 2])
assert cseg.prime_form_marvin_laprade() == [0, 2, 3, 1]
def test_prime_form_marvin_laprade_2():
cseg = Contour([5, 7, 9, 1])
assert cseg.prime_form_marvin_laprade() == [0, 3, 2, 1]
def test_prime_form_marvin_laprade_2():
cseg = Contour([5, 7, 9, 1])
assert cseg.prime_form_marvin_laprade() == [0, 3, 2, 1]
def test_prime_form_marvin_laprade_3():
cseg = Contour([0, 2, 1, 3, 4])
assert cseg.prime_form_marvin_laprade() == [0, 2, 1, 3, 4]
def test_prime_form_marvin_laprade_5():
cseg = Contour([0, 1, 2, 1, 2])
assert cseg.prime_form_marvin_laprade() == [[0, 1, 3, 2, 4], [0, 2, 4, 1, 3]]
def test_prime_form_sampaio_1():
cseg = Contour([1, 4, 9, 2])
assert cseg.prime_form_sampaio() == [0, 2, 3, 1]
def test_prime_form_sampaio_2():
cseg = Contour([5, 7, 9, 1])
assert cseg.prime_form_sampaio() == [0, 3, 2, 1]
def test_prime_form_sampaio_2():
cseg = Contour([5, 7, 9, 1])
assert cseg.prime_form_sampaio() == [0, 3, 2, 1]
def test_prime_form_sampaio_3():
cseg = Contour([0, 2, 1, 3, 4])
assert cseg.prime_form_sampaio() == [0, 1, 3, 2, 4]
def test_prime_form_sampaio_5():
cseg = Contour([0, 1, 2, 1, 2])
assert cseg.prime_form_sampaio() == [[0, 1, 3, 2, 4], [0, 1, 4, 2, 3],
[0, 2, 3, 1, 4], [0, 2, 4, 1, 3]]
def test_unique_prime_form_test_1():
cseg = Contour([0, 2, 1, 3, 4])
algorithm = "prime_form_marvin_laprade"
assert cseg.unique_prime_form_test(algorithm) == False
def test_unique_prime_form_test_2():
cseg = Contour([0, 2, 1, 3, 4])
algorithm = "prime_form_sampaio"
assert cseg.unique_prime_form_test(algorithm) == True
def test_subsets_1():
cseg = Contour([2, 8, 12, 9])
assert cseg.subsets(2) == [[2, 8], [2, 9], [2, 12], [8, 9], [8, 12],
[12, 9]]
def test_subsets_2():
cseg = Contour([2, 8, 12, 9])
assert cseg.subsets(3) == [[2, 8, 9], [2, 8, 12], [2, 12, 9], [8, 12, 9]]
def test_subsets_prime():
cseg = Contour([0, 3, 1, 4, 2])
assert cseg.subsets_prime(4) == {(0, 1, 3, 2): [[0, 1, 4, 2]],
(0, 2, 1, 3): [[0, 3, 1, 4]],
(0, 2, 3, 1): [[0, 3, 4, 2]],
(0, 3, 1, 2): [[0, 3, 1, 2]],
(1, 3, 0, 2): [[3, 1, 4, 2]]}
def test_subsets_normal():
cseg = Contour([0, 3, 1, 4, 2])
assert cseg.subsets_normal(4) == {(0, 1, 3, 2): [[0, 1, 4, 2]],
(0, 2, 1, 3): [[0, 3, 1, 4]],
(0, 2, 3, 1): [[0, 3, 4, 2]],
(0, 3, 1, 2): [[0, 3, 1, 2]],
(2, 0, 3, 1): [[3, 1, 4, 2]]}
def test_all_subsets():
cseg = Contour([2, 8, 12, 9])
assert cseg.all_subsets() == [[2, 8], [2, 9], [2, 12], [8, 9], [8, 12],
[12, 9], [2, 8, 9], [2, 8, 12], [2, 12, 9],
[8, 12, 9], [2, 8, 12, 9]]
def test_all_subsets_prime():
cseg = Contour([2, 8, 12])
assert cseg.all_subsets_prime() == {(0, 1): [[2, 8], [2, 12], [8, 12]],
(0, 1, 2): [[2, 8, 12]]}
def test_all_subsets_normal():
cseg = Contour([2, 8, 7])
assert cseg.all_subsets_normal() == {(0, 1): [[2, 7], [2, 8]],
(0, 2, 1): [[2, 8, 7]],
(1, 0): [[8, 7]]}
def test_subsets_adj():
cseg = Contour([2, | 8, 12, 9, 5, 7, 3, 12, 3, 7])
assert cseg.subsets_adj(4) == [[2, 8, 12, 9], [8, 12, 9, 5], [12, 9, 5, 7],
[9, 5, 7, 3], [5, 7, 3, 12], [7, 3, 12, 3],
[3, 12, 3, 7]]
def test_cps_position():
cseg = Con | tour([2, 8, 12, 9, 5, 7, 3, 12, 3, 7])
assert cseg.cps_position() == [(2, 0), (8, 1), (12, 2), (9, 3), (5, 4),
(7, 5), (3, 6), (12, 7), (3, 8), (7, 9)]
def test_reduction_morris_1():
cseg = Contour([0, 4, 3, 2, 5, 5, 1])
assert cseg.reduction_morris() == [[0, 2, 1], 2]
def test_reduction_morris_2():
cseg = Contour([7, 10, 9, 0, 2, 3, 1, 8, 6, 2, 4, 5])
assert cseg.reduction_morris() == [[2, 3, 0, 1], 3]
def test_maxima():
n = [(0, 0), (1, 1), (3, 2), (2, 3), (4, 4)]
assert contour.maxima(n) == [(0, 0), (3, 2), (4, 4)]
def test_minima():
n = [(0, 0), (1, 1), (3, 2), (2, 3), (4, 4)]
assert contour.minima(n) == [(0, 0), (2, 3), (4, 4)]
def test_contour_rotation_classes():
assert contour.contour_rotation_classes(4) == [[0, 1, 2, 3],
[0, 1, 3, 2],
[0, 2, 1, 3]]
def test_interval_succession():
cseg = Contour([0, 1, 3, 2])
assert cseg.interval_succession() == [1, 2, -1]
def test_internal_diagonals_1():
cseg = Contour([0, 2, 3, 1])
n = 1
assert cseg.internal_diagonals(n) == [1, 1, -1]
def test_internal_diagonals_2():
cseg = Contour([0, 2, 3, 1])
n = 2
assert cseg.internal_diagonals(n) == [1, -1]
def test_internal_diagonals_3():
cseg = Contour([1, 0, 4, 3, 2])
n = 1
assert cseg.internal_diagonals(n) == [-1, 1, -1, -1]
def test_internal_diagonals_4():
cseg = Contour([1, 0, 4, 3, 2])
n = 2
assert cseg.internal_diagonals(n) == [1, 1, -1]
def test_comparison_matrix_1():
cseg = Contour([0, 2, 3, 1])
assert cseg.comparison_matrix() == [[0, 2, 3, 1], [0, 1, 1, 1], [-1, 0, 1, -1],
[- |
huntzhan/magic-constraints | magic_constraints/argument.py | Python | mit | 2,768 | 0 | # -*- coding: utf-8 -*-
from __future__ import (
division, absolute_import, print_function, unicode_literals,
)
from builtins import * # noqa
from future.builtins.disabled import * # noqa
from magic_constraints.exception import MagicSyntaxError, MagicTypeError
def transform_to_slots(constraints_package, *args, **kwargs):
class UnFill(object):
pass
plen = len(constraints_package.parameters)
if len(args) > plen:
raise MagicSyntaxError(
'argument length unmatched.',
parameters=constraints_package.parameters,
args=args,
)
slots = [UnFill] * plen
unfill_count = plen
# 1. fill args.
for i, val in enumerate(args):
slots[i] = val
unfill_count -= len(args)
# 2. fill kwargs.
for key, val in kwargs.items():
if key not in constraints_package.name_hash:
raise MagicSyntaxError(
'invalid keyword argument',
parameters=constraints_package.parameters,
key=key,
)
i = constraints_package.name_hash[key]
if slots[i] is not UnFill:
raise MagicSyntaxError(
'key reassignment error.',
parameters=constraints_package.parameters,
key=key,
)
slots[i] = val
unfill_count -= 1
# 3. fill defaults if not set.
# 3.1. deal with the case that default not exists.
default_begin = constraints_package.start_of_defaults
if default_begin < 0:
default_begin = plen
# 3.2 fill defaults.
for i in range(default_begin, plen):
parameter = constraints_package.parameters[i]
j = constraints_package.name_hash[parameter.name]
if slots[j] is UnFill:
slots[j] = parameter.default
unfill_count -= 1
# 4. test if slots contains UnFill.
if unfill_count != 0:
raise MagicSyntaxError(
'slots contains unfilled arg | ument(s).',
parameters=constraints_package.parameters,
slots=slots,
)
return slots
def check_and_bind_arguments(parameters, sl | ots, bind_callback):
plen = len(parameters)
for i in range(plen):
arg = slots[i]
parameter = parameters[i]
wrapper = parameter.wrapper_for_deferred_checking()
# defer checking by wrapping the element of slot.
if wrapper:
slots[i] = wrapper(arg)
# check now.
elif not parameter.check_instance(arg):
raise MagicTypeError(
'argument unmatched.',
parameter=parameter,
argument=arg,
)
# bind.
bind_callback(parameter.name, arg)
|
YuxuanLing/trunk | trunk/code/study/python/Fluent-Python-example-code/attic/sequences/slice_dump.py | Python | gpl-3.0 | 581 | 0.001721 | """
>>> sd = SliceDump()
>>> sd[1]
1
>>> sd[2:5]
slice(2, 5, None)
>>> sd[:2]
slice(None, 2, None)
>>> sd[7:]
slice(7, None, None)
>>> sd[:]
slice(None, None, None)
>>> sd[1:9:3]
slice(1, 9, 3)
>>> sd[1:9:3, 2:3]
(slice(1, 9, 3), slice(2, 3, None))
>>> s = sd[1:9:3]
>>> s.indices(20)
(1, 9, 3)
>>> s.indices(5)
(1, 5, 3)
>>> s.indices(1)
(1 | , 1, 3)
>>> s.indices(0)
(0, 0, 3)
"""
class SliceDump:
def __getitem__(self, pos):
return pos | |
mgx2/python-nvd3 | examples/discreteBarChart.py | Python | mit | 899 | 0.003337 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Examples for Python-nvd3 is a Python wrapper for NVD3 graph library.
NVD3 is an attempt to build re-usable charts and chart components
for d3.js without taking away the power that d3.js gives you.
Project location : https://github.com/areski/python-nvd3
"""
from nvd3 import discreteBarChart
#Open Fil | e for test
output_file = open('test_discreteBarChart.html', 'w')
type = "discreteBarChart"
chart = discreteBarChart(name='mygraphname', height=400, width=600)
chart.set_containerheader("\n\n<h2>" + type + "</h2>\n\n")
xdata = ["A", "B", "C", "D", "E", "F", "G"]
ydata = [3, 12, -10, 5, 25, -7, 2]
extra_serie = {"tooltip": {"y_start": "", "y_end": " cal"}}
chart.add_serie(y=ydata, x=xdata, extra=extra_serie)
chart.buildht | ml()
output_file.write(chart.htmlcontent)
#---------------------------------------
#close Html file
output_file.close()
|
NLeSC/noodles | noodles/lib/__init__.py | Python | apache-2.0 | 3,960 | 0 | """
Coroutine streaming module
==========================
.. note::
In a break with tradition, some classes in this module have lower case
names because they tend to be used as function decorators.
We use coroutines to communicate messages between different components
in the Noodles runtime. Coroutines can have input or output in two ways
*passive* and *active*. An example:
.. code-block:: python
def f_pulls(coroutine):
for msg in coroutine:
print(msg)
def g_produces(lines):
for l in lines:
yield lines
lines = ['aap', 'noot', 'mies']
f_pulls(g_produces(lines))
This prints the words 'aap', 'noot' and 'mies'. This same program could be
written where the co-routine is the one receiving messages:
.. code-block:: python
def f_receives():
while True:
| msg = yield
print(msg)
def g_pushes(coroutine, lines):
for l in lines:
coroutine.send(l)
sink = f_receives()
sink.send(None) # the co-routine needs to be initialised
# alternatively, .next() does the same as .send(None)
g_pushes(sink, lines)
The action of creating a c | oroutine and setting it to the first `yield`
statement can be performed by a little decorator:
.. code-block:: python
from functools import wraps
def coroutine(f):
@wraps(f)
def g(*args, **kwargs):
sink = f(*args, **kwargs)
sink.send(None)
return sink
return g
Pull and push
-------------
The |pull| and |push| classes capture the idea of pushing and pulling
coroutines, wrapping them in an object. These objects can then be chained
using the ``>>`` operator. Example:
.. code-block:: python
>>> from noodles.lib import (pull_map, pull_from)
>>> @pull_map
... def square(x):
... return x*x
...
>>> squares = pull_from(range(10)) >> square
>>> list(squares)
[0, 1, 4, 9, 16, 25, 36, 49, 64, 81]
Queues
------
Queues in python are thread-safe objects. We can define a new |Queue| object
that uses the python `queue.Queue` to buffer and distribute messages over
several threads:
.. code-block:: python
import queue
class Queue(object):
def __init__(self):
self._q = queue.Queue()
def source(self):
while True:
msg = self._q.get()
yield msg
self._q.task_done()
@coroutine
def sink(self):
while True:
msg = yield
self._q.put(msg)
def wait(self):
self.Q.join()
Note, that both ends of the queue are, as we call it, passive. We could make
an active source (it would become a normal function), taking a call-back as
an argument. However, we're designing the Noodles runtime so that it easy to
interleave functionality. Moreover, the `Queue` object is only concerned
with the state of its own queue. The outside universe is only represented by
the `yield` statements, thus preserving the principle of encapsulation.
"""
from .decorator import (
decorator)
from .coroutine import (
coroutine)
from .streams import (
stream, pull, push, pull_map, push_map, sink_map,
broadcast, branch, patch, pull_from, push_from)
from .connection import (
Connection)
from .queue import (
Queue, EndOfQueue, FlushQueue)
from .thread_pool import (
thread_counter, thread_pool)
from .utility import (
object_name, look_up, importable, deep_map, inverse_deep_map, unwrap,
is_unwrapped)
__all__ = [
'decorator', 'coroutine',
'stream', 'pull', 'push', 'pull_map', 'push_map', 'sink_map',
'broadcast', 'branch', 'patch', 'pull_from', 'push_from',
'Connection', 'Queue', 'EndOfQueue', 'FlushQueue',
'thread_pool', 'thread_counter',
'object_name', 'look_up', 'importable', 'deep_map', 'inverse_deep_map',
'unwrap', 'is_unwrapped']
|
rcbops-qe/horizon-selenium | pages/navigation_bars.py | Python | apache-2.0 | 9,234 | 0 | import basepage
class NavigationBars(basepage.BasePage):
def expand_project_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_admin_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-admin"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_identity_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-identity"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_developer_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-developer"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
"""
Project > Compute > Resource
"""
def expand_project_compute(self):
NavigationBars.expand_project_panel(self)
elm = self.driv | er.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-compute"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
| elm.click()
else:
pass
def click_project_compute_overview(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/"]').click()
def click_project_compute_instance(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/instances/"]').click()
def click_project_compute_volumes(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/volumes/"]').click()
def click_project_compute_images(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/images/"]').click()
def click_project_compute_access_and_security(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/access_and_security/"]').click()
"""
Project > Network > Resource
"""
def expand_project_network(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-network"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_network_network_topology(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/network_topology/"]').click()
def click_project_network_networks(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/networks/"]').click()
def click_project_network_routers(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/routers/"]').click()
def click_project_network_loadbalancers(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/ngloadbalancersv2/"]').click()
"""
Project > Orchestration > Resource
"""
def expand_project_orchestration(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-orchestration"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_orchestration_stacks(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/"]').click()
def click_project_orchestration_resource_types(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/resource_types/"]').click()
def click_project_orchestration_template_versions(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/template_versions/"]').click()
"""
Project > Object Store > Resource
"""
def expand_project_object_store(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-object_store"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_object_store_containers(self):
NavigationBars.expand_project_object_store(self)
self.driver.find_element_by_css_selector(
'a[href="/project/containers/"]').click()
"""
Admin > System > Resource
"""
def expand_admin_system(self):
NavigationBars.expand_admin_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-admin-admin"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_admin_system_overview(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/"]').click()
def click_admin_system_hypervisors(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/hypervisors/"]').click()
def click_admin_system_host_aggregates(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/aggregates/"]').click()
def click_admin_system_instances(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/instances/"]').click()
def click_admin_system_volumes(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/volumes/"]').click()
def click_admin_system_flavors(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/flavors/"]').click()
def click_admin_system_images(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/images/"]').click()
def click_admin_system_networks(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/networks/"]').click()
def click_admin_system_routers(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/routers/"]').click()
def click_admin_system_floating_ips(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/floating_ips/"]').click()
def click_admin_system_defaults(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/defaults/"]').click()
def click_admin_system_metadata_definitions(self):
NavigationBars.expand_admin_system(self)
self.driver.find_ele |
jorvis/biocode | gff/report_gff_intron_and_intergenic_stats.py | Python | mit | 9,713 | 0.008545 | #!/usr/bin/env python3
import argparse
from biocode import utils, gff
def main():
'''
This script reports statistics on the areas of a genome where features aren't - introns and
intergenic space. Pass a valid GFF3 file (along with FASTA data) and get a report like this:
Molecule count: 9
Gene count: 4171
Intergenic space count: 4061
Average intergenic space distance: 361.7 bp
Median intergenic space distance: 245 bp
Minimum intergenic space distance: 0 bp
Maximum intergenic space distance: 6272 bp
Intron count: 10533
Intron space count: 989024
Average intron size: 93.9 bp
Median intron size: 63 bp
Minimum intron size: 2 bp
Maximum intron size: 1676 bp
Optionally, you can pass the path to a PNG file to be created using the --histogram parameter,
which will generate a size distribution histogram with two overlaying plots - one representing
the distribu | tion of intergenic region sizes and the other the intron lengths. Because these
can often have long tails, you can limit both the Y- and X-axes values with the --ylimit and
--xlimit options, respectively.
FASTA:
If your FASTA isn't embedded at the end of your GFF3 file after a ##FASTA directive you'll need
to specify the --fasta option in this script and pass it as a separate file.
Definitions:
Intergenic space was a little ambiguous to me | as I started writing this. Does one count the space from
the beginning of the contig until the first gene, or only between them? What about short contigs which
have no annotated genes at all? From the Sequence Ontology:
SO:0000605: A region containing or overlapping no genes that is bounded on either side by a gene, or
bounded by a gene and the end of the chromosome.
To my reading, this includes contig ends but not gene-less contigs. To that end, I include the
former in intergenic space reporting but include the latter as a separate statistic.
Author: Joshua Orvis (jorvis AT gmail)
'''
parser = argparse.ArgumentParser( description='Reports statistics of reference gene coverage and extension by aligned RNA-seq transcript data.')
## output file to be written
parser.add_argument('-i', '--input_gff3', type=str, required=True, help='GFF3 file of a reference annotation' )
parser.add_argument('-g', '--histogram', type=str, required=False, help='Optional path to a histogram of intron/intergenic space size distribution to be created (PNG)' )
parser.add_argument('-x', '--xlimit', type=int, required=False, help='Use this if you want to limit the X-axis of the histogram (feature length)' )
parser.add_argument('-y', '--ylimit', type=int, required=False, help='Use this if you want to limit the Y-axis of the histogram (feature count)' )
parser.add_argument('-f', '--fasta', type=str, required=False, help='Required if you don\'t have GFF3 with embedded FASTA')
args = parser.parse_args()
(assemblies, features) = gff.get_gff3_features(args.input_gff3)
if args.fasta is not None:
seqs = utils.fasta_dict_from_file(args.fasta)
for seq_id in seqs:
if seq_id in assemblies:
assemblies[seq_id].residues = seqs[seq_id]['s']
assemblies[seq_id].length = len(assemblies[seq_id].residues)
## things to keep stats on and report
total_molecule_count = len(assemblies)
total_gene_count = 0
## this number is NOT just the total genes N - 1, since there can be multiple molecules
# genes can overlap, etc.
total_intergenic_space_count = 0
total_intergenic_space_residues = 0
intergenic_distances = list()
total_contig_residues = 0
empty_contig_residues = 0
total_intron_count = 0
total_intron_residues = 0
intron_sizes = list()
############################
## Calculation section
############################
for asm_id in assemblies:
#print("DEBUG: processing assembly: {0}".format(asm_id))
assembly = assemblies[asm_id]
genes = sorted(assembly.genes())
total_gene_count += len(genes)
previous_gene_loc = None
# we should have a length here
if assembly.length is None or assembly.length == 0:
raise Exception("ERROR: Detected assembly with undefined or 0 length: {0}".format(assembly.id))
if total_gene_count == 0:
empty_contig_residues += assembly.length
continue
total_contig_residues += assembly.length
first_gene_loc = None
last_gene_loc = None
for gene in genes:
gene_loc = gene.location_on(assembly)
# if this is the first gene, track the number of bases from the start of the molecule here
if first_gene_loc is None:
total_intergenic_space_count += 1
intergenic_distance = gene_loc.fmin
total_intergenic_space_residues += intergenic_distance
intergenic_distances.append(intergenic_distance)
first_gene_loc = gene_loc
if previous_gene_loc is not None:
## skip this gene if it overlaps the previous
if gene_loc.fmin < previous_gene_loc.fmax:
if gene_loc.fmax > previous_gene_loc.fmax:
previous_gene_loc = gene_loc
else:
total_intergenic_space_count += 1
intergenic_distance = gene_loc.fmin - previous_gene_loc.fmax
total_intergenic_space_residues += intergenic_distance
intergenic_distances.append(intergenic_distance)
for mRNA in gene.mRNAs():
introns = mRNA.introns( on=assembly )
for intron in sorted(introns):
total_intron_count += 1
intron_loc = intron.location_on(assembly)
intron_size = intron_loc.fmax - intron_loc.fmin
#if intron_size > 0:
#print("\tDEBUG: found mRNA:{0} intron {1}-{2} ({3} bp)".format(mRNA.id, intron_loc.fmin, intron_loc.fmax, intron_size))
if intron_size < 0:
print("\tWARN: Intron size ({1}) < 0 reported in gene {0}".format(gene.id, intron_size))
intron_sizes.append(intron_size)
total_intron_residues += intron_size
previous_gene_loc = gene_loc
last_gene_loc = previous_gene_loc
if last_gene_loc is not None:
total_intergenic_space_count += 1
intergenic_distance = assembly.length - last_gene_loc.fmax
total_intergenic_space_residues += intergenic_distance
intergenic_distances.append(intergenic_distance)
if total_intergenic_space_count == 0:
avg_intergenic_space_dist = None
intergenic_distances = None
median_int_space_dist = None
else:
avg_intergenic_space_dist = total_intergenic_space_residues / total_intergenic_space_count
intergenic_distances = sorted(intergenic_distances)
median_int_space_dist = intergenic_distances[ int(len(intergenic_distances)/2) ]
avg_intron_size = total_intron_residues / total_intron_count
intron_sizes = sorted(intron_sizes)
median_intron_size = intron_sizes[int(len(intron_sizes)/2)]
############################
## Reporting section
############################
print("\nMolecule count: {0}".format(total_molecule_count))
print("Gene count: {0}".format(total_gene_count) )
print("\nTotal molecule bases: {0} bp".format(total_contig_residues) )
print("Empty molecule bases: {0} bp".format(empty_contig_residues) )
if total_intergenic_space_count > 0:
print("Intergenic space count: {0}".format(total_intergenic_space_count) )
print("Average intergenic space distance: {0:.1f} bp".format(avg_intergenic_space_dist) )
print("Median intergenic space distance: {0} bp".format(median_int_space_dist) )
|
oleg-chubin/let_me_play | let_me_app/migrations/0008_auto_20150809_1341.py | Python | apache-2.0 | 474 | 0.00211 | # -*- coding: utf-8 -*-
from __future__ import un | icode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('let_me_app', '0007_auto_20150723_2238'),
]
operations = [
migrations.Alt | erField(
model_name='event',
name='start_at',
field=models.DateTimeField(verbose_name='date started', db_index=True),
preserve_default=True,
),
]
|
MozillaSecurity/peach | Peach/Publishers/raw.py | Python | mpl-2.0 | 11,798 | 0.000424 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import socket
import time
import sys
from Peach.publisher import Publisher
class RawEther(Publisher):
"""
A simple Raw publisher.
"""
def __init__(self, interface, timeout=0.1):
"""
@type host: string
@param host: Remote host
@type timeout: number
@param timeout: How long to wait for response
"""
Publisher.__init__(self)
self._host = None
self._socket = None
self._interface = interface
self._timeout = float(timeout)
def stop(self):
"""
Close connection if open.
"""
self.close()
def connect(self):
if self._socket is not None:
# Close out old socket first
self._socket.close()
self._socket = socket.socket(socket.AF_PACKET, socket.SOCK_RAW)
self._socket.bind((self._interface, 0))
def close(self):
if self._socket is not None:
self._socket.close()
self._socket = None
def send(self, data):
"""
Send data via sendall.
@type data: string
@param data: Data to send
"""
self._socket.sendall(data)
def receive(self, size=None):
"""
Receive up to 10000 bytes of data.
@rtype: string
@return: received data.
"""
if size is not None:
return self._socket.recv(size)
else:
self._socket.setblocking(0)
timeout = self._timeout
beginTime = time.time()
ret = ""
try:
while True:
if len(ret) > 0 or time.time() - beginTime > timeout:
break
try:
ret += self._socket.recv(10000)
except socket.error as e:
if str(e).find("The socket operation could not "
"complete without blocking") == -1:
raise
except socket.error as e:
print("Socket:Receive(): Caught socket.error [{}]".format(e))
self._socket.setblocking(1)
return ret
class Raw(Publisher):
"""
A simple Raw publisher.
"""
def __init__(self, interface, timeout=0.1):
"""
@type host: string
@param host: Remote host
@type timeout: number
@param timeout: How long to wait for response
"""
Publisher.__init__(self)
self._host = None
self._socket = None
self._interface = interface
self._timeout = float(timeout)
def stop(self):
"""
Close connection if open.
"""
self.close()
def connect(self):
if self._socket is not None:
# Close out old socket first
self._socket.close()
self._socket = socket.socket(socket.AF_INET, socket.SOCK_RAW)
self._socket.bind((self._interface, 0))
def close(self):
if self._socket is not None:
self._socket.close()
self._socket = None
def send(self, data):
"""
Send data via sendall.
@type data: string
@param data: Data to send
"""
self._socket.sendall(data)
def receive(self, size=None):
"""
Receive upto 10000 bytes of data.
@rtype: string
@return: received data.
"""
if size is not None:
return self._socket.recv(size)
else:
self._socket.setblocking(0)
timeout = self._timeout
beginTime = time.time()
ret = ""
try:
while True:
if len(ret) > 0 or time.time() - beginTime > timeout:
break
try:
ret += self._socket.recv(10000)
except socket.error as e:
if str(e).find("The socket operation could not "
"complete without blocking") == -1:
raise
except socket.error as e:
print("Socket:Receive(): Caught socket.error [{}]".format(e))
self._socket.setblockin | g(1)
return ret
class RawIp(Publisher):
"""
A simple Raw p | ublisher.
"""
def __init__(self, interface, timeout=0.1):
"""
@type host: string
@param host: Remote host
@type timeout: number
@param timeout: How long to wait for response
"""
Publisher.__init__(self)
self._host = None
self._socket = None
self._interface = interface
self._timeout = float(timeout)
def stop(self):
"""
Close connection if open.
"""
self.close()
def connect(self):
if self._socket is not None:
# Close out old socket first
self._socket.close()
# Include IP headers
self._socket = socket.socket(socket.AF_INET,
socket.SOCK_RAW,
socket.IPPROTO_RAW)
self._socket.bind((self._interface, 0))
self._socket.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
def close(self):
if self._socket is not None:
self._socket.close()
self._socket = None
def send(self, data):
"""
Send data via sendall.
@type data: string
@param data: Data to send
"""
self._socket.sendall(data)
def receive(self, size=None):
"""
Receive up to 10000 bytes of data.
@rtype: string
@return: received data.
"""
if size is not None:
return self._socket.recv(size)
else:
self._socket.setblocking(0)
timeout = self._timeout
beginTime = time.time()
ret = ""
try:
while True:
if len(ret) > 0 or time.time() - beginTime > timeout:
break
try:
ret += self._socket.recv(10000)
except socket.error as e:
if str(e).find("The socket operation could not "
"complete without blocking") == -1:
raise
except socket.error as e:
print("Socket:Receive(): Caught socket.error [{}]".format(e))
self._socket.setblocking(1)
return ret
class Raw6(Publisher):
"""
A simple Raw publisher.
"""
def __init__(self, dest_addr, timeout=0.1):
"""
@type host: string
@param host: Remote host
@type timeout: number
@param timeout: How long to wait for response
"""
Publisher.__init__(self)
self._host = None
self._socket = None
self._dest_addr = dest_addr
self._timeout = float(timeout)
def stop(self):
"""
Close connection if open.
"""
self.close()
def connect(self):
if self._socket is not None:
# Close out old socket first
self._socket.close()
try:
self._socket = socket.socket(23,
socket.SOCK_RAW,
socket.IPPROTO_IPV6)
except:
self._socket = socket.socket(socket.AF_INET6,
socket.SOCK_RAW,
socket.IPPROTO_IPV6)
def close(self):
if self._socket is not None:
self._socket.close()
self._socket = None
def send(self, data):
"""
Send data via sendall.
@type data: string
@param data: Data to send
"""
try:
se |
LucasMagnum/pyexplain | pyexplain/attach/examples/models.py | Python | mit | 547 | 0 | # coding: utf-8
from django.db import models
from attach.mo | dels import ContentTypeModel
class Example(ContentTypeModel):
"""
Exemplos serão adicionados como pedaços de códigos
para ajudar no entendime | nto do usuário sobre algum item.
"""
name = models.CharField(u'Nome', max_length=150, blank=True)
code = models.TextField(u'Código')
class Meta:
verbose_name = 'Exemplo'
verbose_name_plural = 'Exemplos'
ordering = ['-added']
def __unicode__(self):
return self.name
|
quentinhardy/odat | UtlHttp.py | Python | lgpl-3.0 | 4,947 | 0.03679 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from Http import Http
import logging
from sys import exit
from Utils import ErrorSQLRequest, checkOptionsGivenByTheUser
from Constants import *
class UtlHttp (Http):
'''
Allow the user to send HTTP request
'''
def __init__(self,args):
'''
Constructor
'''
logging.debug("UtlHttp object created")
Http.__init__(self,args)
def setTimeout(self,value):
'''
Set the timeout value with utl_http.set_transfer_timeout(value)
Default value = 60 s in oracle
Return True si Ok, otherwise return the Exception
'''
status = self.__execProc__('utl_http.set_transfer_timeout',options=[value])
if isinstance(status,Exception):
logging.warning("Impossible to set the timeout value: {0}".format(self.cleanError(status)))
return status
else :
logging.info('The timeout value is turned on {0} secs'.format(value))
return True
def sendGetRequest(self,url):
'''
send a HTTP get request to url
Return False if the current user is not allowed to use the httpuritype lib, else return False or response data
'''
logging.info('Send a HTTP GET request to {0}'.format(url))
query = "select utl_http.request('{0}') as data from dual".format(url)
response = self. __execThisQuery__(query=query,ld=['data'])
if isinstance(response,Exception):
logging.info('Error with the SQL request {0}: {1}'.format(query,str(response)))
return ErrorSQLRequest(response)
elif isinstance(response,list) and isinstance(response[0],dict):
return response[0]['data']
logging.info('Enough privileges')
return ''
def sendRequest(self,ip,port,filename):
'''
'''
params = self.parseRequest(nameFileRequest=filename)
if params == None : return False
request = "DECLARE req utl_http.req; res utl_http.resp; buffer varchar2(4000); BEGIN req := utl_http.begin_request('http://{0}:{1}{2}', '{3}','{4}');".format(ip,port,params['url'],params['method'],params['version'])
for key in list(params['header'].keys()):
request += "utl_http.set_header(req, '{0}','{1}');".format(key,params['header'][key])
if params['body'] != None:
request += "utl_http.write_text(req, '{0}');".format(params['body'])
request += "res := utl_http.get_response(req); BEGIN LOOP utl_http.read_line(res, buffer); dbms_output.put_line(buffer); END LOOP; utl_http.end_response(res); exception when utl_http.end_of_body then utl_http.end_response(res); END; END;"
response = self.__execPLSQLwithDbmsOutput__(request=request)
return response
def testAll (self):
'''
Test all functions
'''
self.args['print'].subtitle("UTL_HTTP library ?")
logging.info('Try to make the server send a HTTP request to 0.0.0.0 with the UTL_HTTP library')
response = self.sendGetRequest('http://0.0.0.0/')
if isinstance(response,Exception) and self.ERROR_NO_PRIVILEGE in str(response) or self.ERROR_NO_PRIVILEGE_INVALID_ID in str(response) or self.ERROR_XML_DB_SECU_NOT_INST in str(response): #ERROR_NO_PRIVILEGE | _INVALID_ID ==> For Oracle 10g
logging.info('Not enough privileges: {0}'.format(str(respo | nse)))
self.args['print'].badNews("KO")
return False
else:
self.args['print'].goodNews("OK")
return True
def runUtlHttpModule(args):
'''
Run the UTL_HTTP module
'''
status = True
if checkOptionsGivenByTheUser(args,["test-module","scan-ports","send"]) == False : return EXIT_MISS_ARGUMENT
utlHttp = UtlHttp(args)
status = utlHttp.connection(stopIfError=True)
utlHttp.setTimeout(5)
if args['test-module'] == True :
args['print'].title("Test if the UTL_HTTP library can be used")
status = utlHttp.testAll()
#Option 1: sendRequest
if args['send'] != None:
args['print'].title("Send the HTTP request stored in the {0} file".format(args['send'][2]))
data = utlHttp.sendRequest(args['send'][0],args['send'][1],args['send'][2])
if isinstance(data,Exception):
args['print'].badNews("Impossible to send the request: {0}".format(data))
else :
args['print'].goodNews("Response from the server:\n{0}".format(data))
#Option 2: scan-ports
if args['scan-ports'] != None:
ports = []
if "," in args['scan-ports'][1]: ports=args['scan-ports'][1].split(',')
elif '-' in args['scan-ports'][1]:
startEnd = args['scan-ports'][1].split('-')
for aPort in range(int(startEnd[0]),int(startEnd[1])): ports.append(str(aPort))
if ports == []:
logging.critical("The second parameter ('{0}') is not a valid: cancelation...".format(args['scan-ports'][1]))
return -1
elif args['scan-ports'][1].isdigit() == True:
ports = [args['scan-ports'][1]]
else:
logging.critical("The second parameter ('{0}') is not a valid port: cancelation...".format(args['scan-ports'][1]))
return -1
args['print'].title("Scan ports ({0}) of {1} ".format(args['scan-ports'][1],args['scan-ports'][0]))
resultats = utlHttp.scanTcpPorts(httpObject=utlHttp,ip=args['scan-ports'][0],ports=ports)
utlHttp.printScanPortResults(resultats)
utlHttp.close()
|
simplegeo/trialcoverage | twisted/plugins/trialcoveragereporterplugin.py | Python | gpl-2.0 | 1,724 | 0.00174 | #! /usr/bin/env python
from zope.interface import implements
from twisted.trial.itrial import IReporter
from twisted.plugin import IPlugin
# register a plugin that can create our CoverageReporter. The reporter itself
# lives separately, in trialcoverage/trialcoverage.py.
# note that this trialcoveragereporterplugin.py file is *not* in a package:
# there is no __init__.py in our parent directory. This is important, because
# otherwise ours would fight with Twisted's. When trial looks for plugins, it
# merely executes all the *.py files it finds in any twisted/plugins/
# subdirectories of anything on sys.path . The namespace that results from
# executing these .py files is examined for instances which provide both
# IPlugin and the target interface (in this case, trial is looking for
# IReporter instances). Each such instan | ce tells the application how to
# create a plugin by naming the module and class that should be instantiated.
# When installing our package via setup.py, arrange for this file to be
# installed to the system-wide twisted/plugins/ directory.
class _Reporter(object):
implements(IPlugin, IReporter)
def __ini | t__(self, name, module, description, longOpt, shortOpt, klass):
self.name = name
self.module = module
self.description = description
self.longOpt = longOpt
self.shortOpt = shortOpt
self.klass = klass
bwcov = _Reporter("Code-Coverage Reporter (colorless)",
"trialcoverage.trialcoverage",
description="Colorless verbose output (with 'coverage' coverage)",
longOpt="bwverbose-coverage",
shortOpt=None,
klass="CoverageTextReporter")
|
hill-a/stable-baselines | stable_baselines/trpo_mpi/utils.py | Python | mit | 1,008 | 0.00496 | import numpy as np
def add_vtarg_and_adv(seg, | gamma, lam):
"""
Compute target value using TD(lambda) estimator, and advantage with GAE(lambda)
:param seg: (dict) the current segment of the trajectory (see traj_segment_generator return for more information)
:param gamma: (float) Discount factor
:param lam: (float) GAE factor
"""
# last element is only used for last vtarg, but we already zeroed it if last new = 1
episode_starts = np.append(seg["episode_starts"], False)
vpred = np.append(seg["vpred"], seg["nextvpred"]) |
rew_len = len(seg["rewards"])
seg["adv"] = np.empty(rew_len, 'float32')
rewards = seg["rewards"]
lastgaelam = 0
for step in reversed(range(rew_len)):
nonterminal = 1 - float(episode_starts[step + 1])
delta = rewards[step] + gamma * vpred[step + 1] * nonterminal - vpred[step]
seg["adv"][step] = lastgaelam = delta + gamma * lam * nonterminal * lastgaelam
seg["tdlamret"] = seg["adv"] + seg["vpred"]
|
zitouni/ucla_zigbee_phy | src/python/crc16.py | Python | bsd-3-clause | 1,996 | 0.043587 | #!/usr/bin/env python
"""
Translation from a C code posted to a forum on the Internet.
@translator Thomas Schmid
"""
from array import array
def reflect(crc, bitnum):
# reflects the lower 'bitnum' bits of 'crc'
j=1
crcout=0
for b in range(bitnum):
i=1<<(bitnum-1-b)
if crc & i:
crcout |= j
j <<= 1
return crcout
def crcbitbybit(p):
# bit by bit algorithm with augmented zero bytes.
crc = 0
for i in range(len(p)):
c = p[i]
c = reflect(ord(c), 8)
j=0x80
for b in range(16):
bit = crc & 0x8000
crc <<= 1
crc &=0xFFFF
if c & j:
crc |= 1
if bit:
crc ^= 0x1021
j>>=1
if j == 0:
break
for i in range(16):
bit = crc & 0x8000
crc <<= 1
if bit:
| crc ^= 0x1021
|
crc = reflect(crc, 16)
return crc
class CRC16(object):
""" Class interface, like the Python library's cryptographic
hash functions (which CRC's are definitely not.)
"""
def __init__(self, string=''):
self.val = 0
if string:
self.update(string)
def update(self, string):
self.val = crcbitbybit(string)
def checksum(self):
return chr(self.val >> 8) + chr(self.val & 0xff)
def intchecksum(self):
return self.val
def hexchecksum(self):
return '%04x' % self.val
def copy(self):
clone = CRC16()
clone.val = self.val
return clone
crc = CRC16()
#crc.update("123456789")
import struct
crc.update(struct.pack("20B", 0x1, 0x88, 0xe5, 0xff, 0xff, 0xff, 0xff, 0x10, 0x0, 0x10, 0x0, 0x1, 0x80, 0x80, 0xff, 0xff, 0x10, 0x0, 0x20, 0x0))
assert crc.checksum() == '\x02\x82'
|
leanrobot/contestsite | team/scripts/python/correct.py | Python | gpl-3.0 | 79 | 0 | import sys
| import time
sys.stdout.write("stdout! | ")
sys.stderr.write("stderr!")
|
kivy/pyjnius | tests/test_interface.py | Python | mit | 620 | 0 | from __future__ import pri | nt_function
from __future__ import division
from __future__ import absolute_import
import unittest
from jnius import autoclass, JavaException
class Interface(unittest.TestCase):
def test_reflect_interface(self):
Interface = autoclass('org.jnius.InterfaceWithPublicEnum')
self.assertTrue(Interface)
def test_reflect_enum_in_interface(self):
ATTITUDE = autoclass('org.jnius.Interf | aceWithPublicEnum$ATTITUDE')
self.assertTrue(ATTITUDE)
self.assertTrue(ATTITUDE.GOOD)
self.assertTrue(ATTITUDE.BAD)
self.assertTrue(ATTITUDE.UGLY)
|
rnoldo/django-avatar | storages/backends/s3.py | Python | bsd-3-clause | 10,680 | 0.003839 | import os
import mimetypes
import warnings
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.conf import settings
from django.core.files.base import File
from django.core.files.storage import Storage
from django.core.exceptions import ImproperlyConfigured
try:
from S3 import AWSAuthConnection, QueryStringAuthGenerator, CallingFormat
except ImportError:
raise ImproperlyConfigured("Could not load amazon's S3 bindings.\nSee "
"http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134")
ACCESS_KEY_NAME = getattr(settings, 'AWS_S3_ACCESS_KEY_ID', getattr(settings, 'AWS_ACCESS_KEY_ID', None))
SECRET_KEY_NAME = getattr(settings, 'AWS_S3_SECRET_ACCESS_KEY', getattr(settings, 'AWS_SECRET_ACCESS_KEY', None))
HEADERS = getattr(settings, 'AWS_HEADERS', {})
DEFAULT_ACL = getattr(settings, 'AWS_DEFAULT_ACL', 'public-read') #access control policy (private, or public-read)
QUERYSTRING_ACTIVE = getattr(settings, 'AWS_QUERYSTRING_ACTIVE', False)
QUERYSTRING_EXPIRE = getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 60)
SECURE_URLS = getattr(settings, 'AWS_S3_SECURE_URLS', False)
BUCKET_PREFIX = getattr(settings, 'AWS_BUCKET_PREFIX', '')
CALLING_FORMAT = getattr(settings, 'AWS_CALLING_FORMAT', CallingFormat.PATH)
PRELOAD_METADATA = getattr(settings, 'AWS_PRELOAD_METADATA', False)
IS_GZIPPED = getattr(settings, 'AWS_IS_GZIPPED', False)
GZIP_CONTENT_TYPES = getattr(settings, 'GZIP_CONTENT_TYPES', (
'text/css',
'application/javascript',
'application/x-javascript'
))
if IS_GZIPPED:
from gzip import GzipFile
class S3Storage(Storage):
"""Amazon Simple Storage Service"""
def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
access_key=None, secret_key=None, acl=DEFAULT_ACL,
calling_format=CALLING_FORMAT, encrypt=False,
gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
preload_metadata=PRELOAD_METADATA):
warnings.warn(
"The s3 backend is deprecated and will be removed in version 1.2. "
"Use the s3boto backend instead.",
PendingDeprecationWarning
)
self.bucket = bucket
self.acl = acl
self.encrypt = encrypt
self.gzip = gzip
self.gzip_content_types = gzip_content_types
self.preload_metadata = preload_metadata
if encrypt:
try:
import ezPyCrypto
except ImportError:
raise ImproperlyConfigured("Could not load ezPyCrypto.\nSee "
"http://www.freenet.org.nz/ezPyCrypto/ to install it.")
self.crypto_key = ezPyCrypto.key
if not access_key and not secret_key:
access_key, secret_key = self._get_access_keys()
self.connection = AWSAuthConnection(access_key, secret_key,
calling_format=calling_format)
self.generator = QueryStringAuthGenerator(access_key, secret_key,
calling_format=calling_format,
is_secure=SECURE_URLS)
self.generator.set_expires_in(QUERYSTRING_EXPIRE)
self.headers = HEADERS
self._entries = {}
def _get_access_keys(self):
access_key = ACCESS_KEY_NAME
secret_key = SECRET_KEY_NAME
if (access_key or secret_key) and (not access_key or not secret_key):
access_key = os.environ.get(ACCESS_KEY_NAME)
secret_key = os.environ.get(SECRET_KEY_NAME)
if access_key and secret_key:
# Both were provided, so use them
return access_key, secret_key
return None, None
@property
def entries(self):
if self.preload_metadata and not self._entries:
self._entries = dict((entry.key, entry)
for entry in self.connection.list_bucket(self.bucket).entries)
return self._entries
def _get_connection(self):
return AWSAuthConnection(*self._get_access_keys())
def _clean_name(self, name):
# Useful for windows' paths
return os.path.join(BUCKET_PREFIX, os.path.normpath(name).replace('\\', '/'))
def _compress_string(self, s):
"""Gzip a given string."""
zbuf = StringIO()
zfile = GzipFi | le(mode='wb', compresslevel=6, fileobj=zbuf)
zfile.write(s)
zfile.close()
return zbuf.getvalue()
def _put_file(self, name, content):
if self.encrypt:
# Create a key object
key = self.crypto_ke | y()
# Read in a public key
fd = open(settings.CRYPTO_KEYS_PUBLIC, "rb")
public_key = fd.read()
fd.close()
# import this public key
key.importKey(public_key)
# Now encrypt some text against this public key
content = key.encString(content)
content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
if self.gzip and content_type in self.gzip_content_types:
content = self._compress_string(content)
self.headers.update({'Content-Encoding': 'gzip'})
self.headers.update({
'x-amz-acl': self.acl,
'Content-Type': content_type,
'Content-Length' : str(len(content)),
})
response = self.connection.put(self.bucket, name, content, self.headers)
if response.http_response.status not in (200, 206):
raise IOError("S3StorageError: %s" % response.message)
def _open(self, name, mode='rb'):
name = self._clean_name(name)
remote_file = S3StorageFile(name, self, mode=mode)
return remote_file
def _read(self, name, start_range=None, end_range=None):
name = self._clean_name(name)
if start_range is None:
headers = {}
else:
headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
response = self.connection.get(self.bucket, name, headers)
if response.http_response.status not in (200, 206):
raise IOError("S3StorageError: %s" % response.message)
headers = response.http_response.msg
if self.encrypt:
# Read in a private key
fd = open(settings.CRYPTO_KEYS_PRIVATE, "rb")
private_key = fd.read()
fd.close()
# Create a key object, and auto-import private key
key = self.crypto_key(private_key)
# Decrypt this file
response.object.data = key.decString(response.object.data)
return response.object.data, headers.get('etag', None), headers.get('content-range', None)
def _save(self, name, content):
name = self._clean_name(name)
content.open()
if hasattr(content, 'chunks'):
content_str = ''.join(chunk for chunk in content.chunks())
else:
content_str = content.read()
self._put_file(name, content_str)
return name
def delete(self, name):
name = self._clean_name(name)
response = self.connection.delete(self.bucket, name)
if response.http_response.status != 204:
raise IOError("S3StorageError: %s" % response.message)
def exists(self, name):
name = self._clean_name(name)
if self.entries:
return name in self.entries
response = self.connection._make_request('HEAD', self.bucket, name)
return response.status == 200
def size(self, name):
name = self._clean_name(name)
if self.entries:
entry = self.entries.get(name)
if entry:
return entry.size
return 0
response = self.connection._make_request('HEAD', self.bucket, name)
content_length = response.getheader('Content-Length')
return content_length and int(content_length) or 0
def url(self, name):
name = self._clean_name(name)
if QUERYSTRING_ACTIVE:
return self.generator.generate_url('GET', self.bucket, name)
else:
return |
iwxfer/wikitten | library/nlp/semantic_simi.py | Python | mit | 376 | 0.00266 | import spacy
nlp = spacy.load | ('en')
text = open('customer_feedback_627.txt').read()
doc = nlp(text)
for entity in doc.ents:
print(entity.text, entity.label_)
# Determine seman | tic similarities
doc1 = nlp(u'the fries were gross')
doc2 = nlp(u'worst fries ever')
doc1.similarity(doc2)
# Hook in your own deep learning models
nlp.add_pipe(load_my_model(), before='parser') |
weechat/weechat.org | weechat/doc/models.py | Python | gpl-3.0 | 9,804 | 0 | #
# Copyright (C) 2003-2022 Sébastien Helleu <flashcode@flashtux.org>
#
# This file is part of WeeChat.org.
#
# WeeChat.org is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# WeeChat.org is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WeeChat.org. If not, see <https://www.gnu.org/licenses/>.
#
"""Models for "doc" menu."""
from django.db import models
from django.db.models.signals import post_save
from django.utils.safestring import mark_safe
from django.utils.translation import gettext, gettext_noop
from weechat.common.i18n import i18n_autogen
from weechat.common.templatetags.localdate import localdate
from weechat.common.tracker import commits_links, tracker_links
URL_CVE = {
'MITRE': 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=%(cve)s',
'NVD': 'https://nvd.nist.gov/vuln/detail/%(cve)s',
}
URL_CVSS_VECTOR = (
'https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator?'
'vector=%(vector)s&version=3.1'
)
URL_CWE = 'https://cwe.mitre.org/data/definitions/%(cwe)s.html'
SECURITY_SEVERITIES = (
# Translators: this is a severity level for a security vulnerability
(0, gettext_noop('none')),
# Translators: this is a severity level for a security vulnerability
(1, gettext_noop('low')),
# Translators: this is a severity level for a security vulnerability
(2, gettext_noop('medium')),
# Translators: this is a severity level for a security vulnerability
(3, gettext_noop('high')),
# Translators: this is a severity level for a security vulnerability
(4, gettext_noop('critical')),
)
def get_severity(score):
"""Get severity (integer from 0 to 4) from score."""
for i, limit in enumerate([0, 3.9, 6.9, 8.9, 10.0]):
if score <= limit:
return i
return 0
def get_score_bar(score):
"""Return score bar."""
score = round(score)
content = []
content.append(
'<div class="d-inline-flex align-middle severity-flex">'
)
for i in range(0, 10):
severity = get_severity(i + 1)
css_class = f' severity{severity}' if i < score else ''
content.append(
f'<div class="flex-fill severity{css_class}"></div>'
)
content.append('</div>')
return ''.join(content)
class Language(models.Model):
"""A language with at least one translated doc."""
LANG_I18N = {
'cs': gettext_noop('Czech'),
'de': gettext_noop('German'),
'en': gettext_noop('English'),
'es': gettext_noop('Spanish'),
'fr': gettext_noop('French'),
'hu': gettext_noop('Hungarian'),
'it': gettext_noop('Italian'),
'ja': gettext_noop('Japanese'),
'pl': gettext_noop('Polish'),
'pt': gettext_noop('Portuguese'),
'pt_BR': gettext_noop('Portuguese (Brazil)'),
'ru': gettext_noop('Russian'),
'sr': gettext_noop('Serbian'),
'tr': gettext_noop('Turkish'),
}
lang = models.CharField(max_length=8, primary_key=True)
priority = models.IntegerField(default=0)
def __str__(self):
return f'{self.lang} ({self.priority})'
def lang_i18n(self):
"""Return the translated language."""
return gettext(self.LANG_I18N[self.lang])
class Meta:
ordering = ['priority']
class Version(models.Model):
"""A version for docs."""
version = models.CharField(max_length=32, primary_key=True)
priority = models.IntegerField(default=0)
directory = models.CharField(max_length=256, blank=True)
def __str__(self):
return self.version
class Meta:
ordering = ['priority']
class Doc(models.Model):
"""A WeeChat document file."""
NAME_I18N = {
'faq': gettext_noop('FAQ'),
'user': gettext_noop('User\'s guide'),
'plugin_api': gettext_noop('Plugin API reference'),
'scripting': gettext_noop('Scripting guide'),
'quickstart': gettext_noop('Quick Start guide'),
'tester': gettext_noop('Tester\'s guide'),
'dev': gettext_noop('Developer\'s guide'),
'relay_protocol': gettext_noop('Relay protocol'),
}
version = models.ForeignKey(Version, on_delete=models.CASCADE)
name = models.CharField(max_length=64)
devel = models.BooleanField(default=False)
priority = models.IntegerField(default=0)
def __str__(self):
return f'{self.name} ({self.version}, {self.priority})'
def name_i18n(self):
"""Return the translated doc name."""
return gettext(self.NAME_I18N[self.name])
class Meta:
ordering = ['priority']
class Security(models.Model):
"""A security vulnerability in WeeChat."""
visible = models.BooleanField(default=True)
date = models.DateTimeField()
wsa = models.CharField(max_length=64)
cve = models.CharField(max_length=64, blank=True)
cwe_id = models.IntegerField(default=0)
cwe_text = models.CharField(max_length=64)
cvss_vector = models.CharField(max_length=64)
cvss_score = models.DecimalField(max_digits=3, decimal_places=1)
tracker = models.CharField(max_length=64, blank=True)
affected = models.CharField(max_length=64, blank=True)
fixed = models.CharField(max_length=32, blank=True)
release_date = models.DateField(blank=True, null=True)
commits = models.CharField(max_length=1024, blank=True)
scope = models.CharField(max_length=64)
issue = models.TextField()
description = models.TextField()
mitigation = models.TextField(blank=True)
credit = models.TextField(blank=True)
def __str__(self):
return f'{self.wsa}: [{self.scope}] {self.issue} ({self.release_date})'
def date_l10n(self):
"""Return the date formatted with localized date format."""
return localdate(self.date)
def cve_links(self):
"""Return URLs for the CVE."""
if not self.cve:
return {}
return {
name: url % {'cve': self.cve}
for name, url in URL_CVE.items()
}
def cwe_i18n(self):
"""Return the translated vulnerability type."""
if self.cwe_text:
return gettext(self.cwe_text)
return ''
def url_cwe(self):
"""Return URL to CWE detail."""
if self.cwe_id > 0:
return URL_CWE % {'cwe': self.cwe_id}
return ''
def url | _cvss_vector(self):
"""Return URL to CVSS vector detail."""
if self.cvss_vector:
return URL_CVSS_VECTOR % {'vector': self.cvss_vector}
return ''
def url_tracker(self):
"""Return URL with links to tracker items."""
return mark_safe(tracker_links(self.tracker))
def severity_index(se | lf):
"""Return severity index based on CVSS score."""
return get_severity(self.cvss_score)
def severity_i18n(self):
"""Return translated severity based on CVSS score."""
text = dict(SECURITY_SEVERITIES).get(self.severity_index(), '')
return gettext(text) if text else ''
def score_bar(self):
"""Return HTML code with score bar."""
return mark_safe(get_score_bar(self.cvss_score))
def affected_html(self):
"""Return list of affected versions, as HTML."""
list_affected = []
for version in self.affected.split(','):
if '-' in version:
version1, version2 = version.split('-', 1)
list_affected.append(f'{version1} → {version2}')
else:
list_affected.append(version)
return mark_safe(', '.join(list_affected))
def fixed_html(self):
"""Return fixed version, as HTML."""
return mark_safe(f'<span class="text-success fw-bold">'
f'{self.fixed}</span>')
def release_ |
sprockets/sprockets.mixins.redis | setup.py | Python | bsd-3-clause | 2,498 | 0.0004 | import codecs
import sys
import setuptools
def read_requirements_file(req_name):
requirements = []
try:
with codecs.open(req_name, encoding='utf-8') as req_file:
for req_line in req_file:
if '#' in req_line:
req_line = req_line[0:req_line.find('#')].strip()
if req_line:
requirements.append(req_line.strip())
except IOError:
pass
return requirements
install_requires = read_requirements_file('requirements.txt')
setup_requires = read_requirements_file('setup-requirements.txt')
tests_require = read_requirements_file('test-requirements.txt')
if sys.version_info < (2, 7):
tests_require.append('unittest2')
if sys.version_info < (3, 0):
tests_require.append('mock')
setuptools.setup(
name='sprockets.mixins.redis',
version='0.0.0',
description='Tornado handler mixin to provide easy read/write access to Redis',
long_description=codecs.open('README.rst', encoding='utf-8').read(),
url='https://github.com/sprockets/sprockets.mixins.redis.git',
author='AWeber Communications',
author_email='api@aweber.com',
license=codecs.open('LICENSE', encoding='utf-8').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Progra | mming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python : | : 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=['sprockets',
'sprockets.mixins',
'sprockets.mixins.redis'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
namespace_packages=['sprockets',
'sprockets.mixins'],
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
test_suite='nose.collector',
zip_safe=False)
|
huntxu/neutron | neutron/cmd/ovs_cleanup.py | Python | apache-2.0 | 4,457 | 0 | # Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from neutron.agent.common import ovs_lib
from neutron.agent.linux import ip_lib
from neutron.common import config
from neutron.conf.agent import cmd
from neutron.conf.agent import common as agent_config
from neutron.conf.agent.l3 import config as l3_config
from neutron.plugins.ml2.drivers.openvswitch.agent.common import constants
LOG = logging.getLogger(__name__)
# Default ovsdb_timeout value for this script.
# It allows to clean bridges with even thousands of ports.
CLEANUP_OVSDB_TIMEOUT = 600
def setup_conf():
"""Setup the cfg for the clean up utility.
Use separate setup_conf for the utility because there are many options
from the main config that do not apply during clean-up.
"""
conf = cfg.CONF
cmd.register_cmd_opts(cmd.ovs_opts, conf)
l3_config.register_l3_agent_config_opts(l3_config.OPTS, conf)
agent_config.register_interface_driver_opts_helper(conf)
agent_config.register_interface_opts()
conf.set_default("ovsdb_timeout", CLEANUP_OVSDB_TIMEOUT, "OVS")
return conf
def get_bridge_deletable_ports(br):
"""
Return a list of OVS Bridge ports, excluding the ports who should not be
cleaned. such ports are tagged with the 'skip_cleanup' key in external_ids.
"""
return [port.port_name for port in br.get_vif_ports()
if constants.SKIP_CLEANUP not in
br.get_port_external_ids(port.port_name)]
def collect_neutron_ports(bridges):
"""Collect ports created by Neutron from OVS."""
ports = []
for bridge in bridges:
ovs = ovs_lib.OVSBridge(bridge)
ports += get_b | ridge_deletable_ports(ovs)
return ports
def delete_neutron_ports(ports):
"""Delete non-internal ports created by Neutron
Non-internal OVS ports need to be removed manually.
"""
for port in ports:
device = ip_lib.IPDevice(port)
if device.exists():
device.link.delete()
LOG | .info("Deleting port: %s", port)
def main():
"""Main method for cleaning up OVS bridges.
The utility cleans up the integration bridges used by Neutron.
"""
conf = setup_conf()
conf()
config.setup_logging()
do_main(conf)
def do_main(conf):
configuration_bridges = set([conf.ovs_integration_bridge,
conf.external_network_bridge])
ovs = ovs_lib.BaseOVS()
ovs_bridges = set(ovs.get_bridges())
available_configuration_bridges = configuration_bridges & ovs_bridges
if conf.ovs_all_ports:
bridges = ovs_bridges
else:
bridges = available_configuration_bridges
try:
# The ovs_cleanup method not added to the deprecated vsctl backend
for bridge in bridges:
LOG.info("Cleaning bridge: %s", bridge)
ovs.ovsdb.ovs_cleanup(bridge,
conf.ovs_all_ports).execute(check_error=True)
except AttributeError:
# Collect existing ports created by Neutron on configuration bridges.
# After deleting ports from OVS bridges, we cannot determine which
# ports were created by Neutron, so port information is collected now.
ports = collect_neutron_ports(available_configuration_bridges)
for bridge in bridges:
LOG.info("Cleaning bridge: %s", bridge)
ovs = ovs_lib.OVSBridge(bridge)
if conf.ovs_all_ports:
port_names = ovs.get_port_name_list()
else:
port_names = get_bridge_deletable_ports(ovs)
for port_name in port_names:
ovs.delete_port(port_name)
# Remove remaining ports created by Neutron (usually veth pair)
delete_neutron_ports(ports)
LOG.info("OVS cleanup completed successfully")
|
TacticalGoat/reddit | SubDumpPost/subdumppost.py | Python | mit | 5,238 | 0.009927 | #/u/GoldenSights
import traceback
import praw # simple interface to the reddit API, also handles rate limiting of requests
import time
import sqlite3
'''USER CONFIGURATION'''
APP_ID = ""
APP_SECRET = ""
APP_URI = ""
APP_REFRESH = ""
# https://www.reddit.com/comments/3cm1p8/how_to_make_your_bot_use_oauth2/
USERAGENT = ""
#This is a short description of what the bot does. For example "/u/GoldenSights' Newsletter bot"
SUBREDDIT = "CopperplateGothic"
#This is the sub or list of subs to scan for new posts. For a single sub, use "sub1". For multiple subs, use "sub1+sub2+sub3+...". For all use "all"
KEYWORDS = ['Request', 'Submitted', 'Release', 'Concept']
#Any post containing these words will be saved.
RSAVE = False
#Do you want the bot to save via Reddit Saving? Use True or False (Use capitals! no quotations!)
#praw DOES NOT allow comments to be saved. Don't ask me why. This will save the submission the comment is connected to.
MAILME = False
#Do you want the bot to send you a PM when it gets something? Use True or False (Use capitals! No quotations!)
RECIPIENT = "GoldenSights"
#If MAILME is set to True, you will need a name for the PM to go to
MTITLE = "SubDump automated message"
#If MAILME is set to True, you will need the PM to have a subject line.
MHEADER = "Comments containing your keywords:"
#This is part of the message body, on a line above the list of results. You can set it to "" if you just want the list by itself.
SUBDUMP = True
#Do you want the bot to dump into a subreddit as posts? Use True or False (Use capitals! No quotations!)
DSUB = "GoldTesting"
#If SUBDUMP is set to True, you will need to choose a subreddit to submit to.
POSTTITLE = "_title_"
#This is the title of the post that will go in DSUB
#You may use the following injectors to create a dynamic title
#_author_
#_subreddit_
#_score_
#_title_
TRUEURL = False
#If this is True, the dumped post will point to the URL that the orginal submission used.
#If this is False the dumped post will point to the reddit submission permalink
MAXPOSTS = 100
#This is how many posts you want to retrieve all at once. PRAW can download 100 at a time.
WAIT = 20
#This is how many seconds you will wait between cycles. The bot is completely inactive during this time.
'''All done!'''
WAITS = str(WAIT)
try:
import bot
USERAGENT = bot.aG
except ImportError:
pass
sql = sqlite3.connect('sql.db')
print('Loaded SQL Database')
cur = sql.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS oldposts(ID TEXT)')
print('Loaded Completed table')
sql.commit()
r = praw. | Reddit(USERAGENT)
r.set_oauth_app_info(APP_ID, APP_ | SECRET, APP_URI)
r.refresh_access_information(APP_REFRESH)
def scansub():
print('Searching '+ SUBREDDIT + '.')
subreddit = r.get_subreddit(SUBREDDIT)
posts = subreddit.get_new(limit=MAXPOSTS)
result = []
authors = []
for post in posts:
pid = post.id
pbody = post.title.lower() + '\n' + post.selftext.lower()
cur.execute('SELECT * FROM oldposts WHERE ID=?', [pid])
if not cur.fetchone():
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
sql.commit()
if post.subreddit.display_name.lower() == DSUB.lower():
continue
if KEYWORDS == [] or any(key.lower() in pbody for key in KEYWORDS):
try:
pauthor = post.author.name
print(pid + ', ' + pauthor)
if TRUEURL == True:
plink = post.url
else:
plink = post.permalink
result.append(plink)
authors.append(pauthor + ' in /r/' + post.subreddit.display_name)
if RSAVE == True:
submission = post.submission
submission.save()
print('\tSaved submission')
if SUBDUMP == True:
print('\tDumping to ' + DSUB + '...')
newtitle = POSTTITLE
newtitle = newtitle.replace('_author_', pauthor)
newtitle = newtitle.replace('_subreddit_', post.subreddit.display_name)
newtitle = newtitle.replace('_score_', str(post.score) + ' points')
newtitle = newtitle.replace('_title_', post.title)
if len(newtitle) > 300:
newtitle = newtitle[:297]
create = r.submit(DSUB, newtitle, url=plink, resubmit=True, captcha = None)
print('\tDumped to ' + DSUB + '.')
except AttributeError:
print(pid + ': Author deleted. Ignoring comment')
if len(result) > 0 and MAILME == True:
for m in range(len(result)):
result[m] = '- [%s](%s)' % (authors[m], result[m])
r.send_message(RECIPIENT, MTITLE, MHEADER + '\n\n' + '\n\n'.join(result), captcha=None)
print('Mailed ' + RECIPIENT)
while True:
try:
scansub()
except Exception as e:
traceback.print_exc()
print('Running again in ' + WAITS + ' seconds \n')
sql.commit()
time.sleep(WAIT)
|
ctgk/BayesianNetwork | test/linalg/test_det.py | Python | mit | 626 | 0 | import unittest
import numpy as np
import bayesnet as bn
class TestDeterminant(unittest.TestCase):
def test_determinant(self):
A = np.array([
[2., 1.],
[1., 3.]
])
detA = np.linalg.det(A)
self.ass | ertTrue((detA == bn.linalg.det(A).value).all())
A = bn.Parameter(A)
for _ in range(100):
A.cleargrad()
detA = bn.linalg.det(A)
loss = bn.square(detA - 1)
loss.backward()
A.value -= 0.1 * A.grad
self.assertAlmostEqual(detA.value, 1.)
if __name__ == '__main__':
| unittest.main()
|
gracfu/618_map_reduce | map_reduce_part2.py | Python | apache-2.0 | 802 | 0.03616 | '''
SI 618 - HW 4: Map-Reduce Part 2
Uniqname: gracfu
'''
from mrjob.job import MRJob
from mrjob.step import MRStep
import re
WORD_RE = re.compile(r"\b[\w']+\b")
class MRMostUsedWord(MRJob):
def mapper_get_words(self, _, line):
for word in WORD_RE.findall(line):
yield (word.lower(), 1)
def combiner_count_words(self, word, counts):
yield (word, sum(counts))
def reducer_count_words(self, word, counts):
yield None, (sum(counts), word)
def reducer_find_max_words(self, _, word_count_pairs):
yield max(word_count_pairs)
def steps(self):
return [
MRStep(mapper = self.mapper_get_words,
combiner = self.combiner_count_words,
reducer = self.reducer_cou | nt_words),
MRStep(re | ducer = self.reducer_find_max_words)
]
if __name__ == '__main__':
MRMostUsedWord.run()
|
CooperLuan/devops.notes | taobao/top/api/rest/ItemQuantityUpdateRequest.py | Python | mit | 406 | 0.03202 | '''
Created by auto_sdk on | 2014-12-17 17:22:51
'''
from top.api.base import RestApi
class ItemQuantityUpdateRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.num_iid = None
self.outer_id = None
self.quantity = None
self.sku_id = None
self.type = None
def getapiname(self):
return 'taobao.item.qua | ntity.update'
|
levilucio/SyVOLT | UMLRT2Kiltera_MM/MT_pre__match_contains.py | Python | mit | 4,951 | 0.029085 | """
__MT_pre__match_contains.py_____________________________________________________
Automatically generated AToM3 syntactic object (DO NOT MODIFY DIRECTLY)
Author: gehan
Modified: Sun Feb 15 10:22:14 2015
________________________________________________________________________________
"""
from ASGNode import *
from ATOM3Type import *
from ATOM3String import *
from ATOM3Boolean import *
from graph_MT_pre__match_contains import *
class MT_pre__match_contains(ASGNode, ATOM3Type):
def __init__(self, parent = None):
ASGNode.__init__(self)
ATOM3Type.__init__(self)
self.superTypes = []
self.graphClass_ = graph_MT_pre__match_contains
self.isGraphObjectVisual = True
if(hasattr(self, '_setHierarchicalLink')):
self._setHierarchicalLink(False)
if(hasattr(self, '_setHierarchicalNode')):
self._setHierarchicalNode(False)
self.parent = parent
self.MT_label__=ATOM3String('', 20)
self.MT_pivotOut__=ATOM3String('', 20)
self.MT_pivotIn__=ATOM3String('', 20)
self.MT_subtypeMatching__=ATOM3Boolean()
self.MT_subtypeMatching__.setValue(('True', 0))
self.MT_subtypeMatching__.config = 0
self.generatedAttributes = {'MT_label__': ('ATOM3String', ),
'MT_pivotOut__': ('ATOM3String', ),
'MT_pivotIn__': ('ATOM3String', ),
'MT_subtypeMatching__': ('ATOM3Boolean', ) }
self.realOrder = ['MT_label__','MT_pivotOut__','MT_pivotIn__','MT_subtypeMatching__']
self.directEditing = [1,1,1,1]
def clone(self):
cloneObject = MT_pre__match_contains( self.parent )
for atr in self.realOrder:
cloneObject.setAttrValue(atr, self.getAttrValue(atr).clone() )
ASGNode.cloneActions(self, cloneObject)
return cloneObject
def copy(self, other):
ATOM3Type.copy(self, other)
for atr in self.realOrder:
self.setAttrValue(atr, other.getAttrValue(atr) )
ASGNode.copy(self, other)
def preCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.preCondition(actionID, params)
else: return None
def postCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postCondition(actionID, params)
else: return None
def preAction (self, actionID, * params):
if actionID == self.CREATE:
self.autoIncrLabel(params)
if self.graphObject_:
return self.graphObject_.preAction(actionID, params)
else: return None
def postAction (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postAction(actionID, params)
else: return None
def QOCA(self, params):
"""
QOCA Constraint Template
NOTE: DO NOT select a POST/PRE action trigger
Constraints will be added/removed in a logical manner by other mechanisms.
"""
return # <--- Remove this if you want to use QOCA
# Get the high level constraint helper and solver
from Qoca.atom3constraints.OffsetConstraints import OffsetConstraints
oc = OffsetConstraints(self.parent.qocaSolver)
# Constraint only makes sense if there exists 2 objects connected to this link
if(not (self.in_connections_ and self.out_connections_)): return
# Get the graphical objects (subclass of graphEntity/graphLink)
graphicalObjectLink = self.graphObject_
graphicalObjectSource = self.in_connections_[0].graphObject_
graphicalObjectTarget = self.out_connections_[0].graphObject_
objTuple = (graphicalObjectSource, graphicalObjectTarget, graphicalObjectLink)
"""
Example constraint, see Kernel/QOCA/atom3constraints/OffsetConstraints.py
For more types of constraints
"""
oc.LeftExactDistan | ce(objTuple, 20)
oc.resolve() # Resolve immediately after creating entity & constraint
def autoIncrLabel(self, params):
#===============================================================================
# Auto increment the label
#===============================================================================
# I | f there is already one, ignore
if not self.MT_label__.isNone(): return
# Get the maximum label of all MT_pre__ elements
label = 0
for nt in self.parent.ASGroot.listNodes:
if nt.startswith('MT_pre__'):
for node in self.parent.ASGroot.listNodes[nt]:
currLabel = 0
try:
currLabel = int(node.MT_label__.getValue())
except:
pass
if currLabel > label:
label = currLabel
# The label of this instance will be the max label + 1
self.MT_label__.setValue(str(label + 1))
|
Zlash65/erpnext | erpnext/startup/report_data_map.py | Python | gpl-3.0 | 9,520 | 0.03771 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
# mappings for table dumps
# "remember to add indexes!"
data_map = {
"Company": {
"columns": ["name"],
"conditions": ["docstatus < 2"]
},
"Fiscal Year": {
"columns": ["name", "year_start_date", "year_end_date"],
"conditions": ["docstatus < 2"],
},
# Accounts
"Account": {
"columns": ["name", "parent_account", "lft", "rgt", "report_type",
"company", "is_group"],
"conditions": ["docstatus < 2"],
"order_by": "lft",
"links": {
"company": ["Company", "name"],
}
},
"Cost Center": {
"columns": ["name", "lft", "rgt"],
"conditions": ["docstatus < 2"],
"order_by": "lft"
},
"GL Entry": {
"columns": ["name", "account", "posting_date", "cost_center", "debit", "credit",
"is_opening", "company", "voucher_type", "voucher_no", "remarks"],
"order_by": "posting_date, account",
"links": {
"account": ["Account", "name"],
"company": ["Company", "name"],
"cost_center": ["Cost Center", "name"]
}
},
# Stock
"Item": {
"columns": ["name", "if(item_name=name, '', item_name) as item_name", "description",
"item_group as parent_item_group", "stock_uom", "brand", "valuation_method"],
# "conditions": ["docstatus < 2"],
"order_by": "name",
"links": {
"parent_item_group": ["Item Group", "name"],
"brand": ["Brand", "name"]
}
},
"Item Group": {
"columns": ["name", "parent_item_group"],
# "conditions": ["docstatus < 2"],
"order_by": "lft"
},
"Brand": {
"columns": ["name"],
"conditions": ["docstatus < 2"],
"order_by": "name"
},
"Project": {
"columns": ["name"],
"conditions": ["docstatus < 2"],
"order_by": "name"
},
"Warehouse": {
"columns": ["name"],
"conditions": ["docstatus < 2"],
"order_by": "name"
},
"Stock Ledger Entry": {
"columns": ["name", "posting_date", "posting_time", "item_code", "warehouse",
"actual_qty as qty", "voucher_type", "voucher_no", "project",
"incoming_rate as incoming_rate", "stock_uom", "serial_no",
"qty_after_transaction", "valuation_rate"],
"order_by": "posting_date, posting_time, creation",
"links": {
"item_code": ["Item", "name"],
"warehouse": ["Warehouse", "name"],
"project": ["Project", "name"]
},
"force_index": "posting_sort_index"
},
"Serial No": {
"columns": ["name", "purchase_rate as incoming_rate"],
"conditions": ["docstatus < 2"],
"order_by": "name"
},
"Stock Entry": {
"columns": ["name", "purpose"],
"conditions": ["docstatus=1"],
"order_by": "posting_date, posting_time, name",
},
"Material Request Item": {
"columns": ["item.name as name", "item_code", "warehouse",
"(qty - ordered_qty) as qty"],
"from": "`tabMaterial Request Item` item, `tabMaterial Request` main",
"conditions": ["item.parent = main.name", "main.docstatus=1", "main.status != 'Stopped'",
"ifnull(warehouse, '')!=''", "qty > ordered_qty"],
"links": {
"item_code": ["Item", "name"],
"warehouse": ["Warehouse", "name"]
},
},
"Purchase Order Item": {
"columns": ["item.name as name", "item_code", "warehouse",
"(qty - received_qty)*conversion_factor as qty"],
"from": "`tabPurchase Order Item` item, `tabPurchase Order` main",
"conditions": ["item.parent = main.name", "main.docstatus=1", "main.status != 'Stopped'",
"ifnull(warehouse, '')!=''", "qty > received_qty"],
"links": {
"item_code": ["Item", "name"],
"warehouse": ["Warehouse", "name"]
},
},
"Sales Order Item": {
"columns": ["item.name as name", "item_code", "(qty - delivered_qty)*conversion_factor as qty", "warehouse"],
"from": "`tabSales Order Item` item, `tabSales Order` main",
"conditions": ["item.parent = main.name", "main.docstatus=1", "main.status != 'Stopped'",
"ifnull(warehouse, '')!=''", "qty > delivered_qty"],
"links": {
"item_code": ["Item", "name"],
"warehouse": ["Warehouse", "name"]
},
},
# Sales
"Customer": {
"columns": ["name", "if(customer_name=name, '', customer_name) as customer_name",
"customer_group as parent_customer_group", "territory as parent_territory"],
"conditions": ["docstatus < 2"],
"order_by": "name",
"links": {
"parent_customer_group": ["Customer Group", "name"],
"parent_territory": ["Territory", "name"],
}
},
"Customer Group": {
"columns": ["name", "parent_customer_group"],
"conditions": ["docstatus < 2"],
"order_by": "lft"
},
"Territory": {
"columns": ["name", "parent_territory"],
"conditions": ["docstatus < 2"],
"order_by": "lft"
},
"Sales Invoice": {
"columns": ["name", "customer", "posting_date", "company"],
"conditions": ["docstatus=1"],
"order_by": "posting_date",
"links": {
"customer": ["Customer", "name"],
"company":["Company", "name"]
}
},
"Sales Invoice Item": {
"columns": ["name", "parent", "item_code", "stock_qty as qty", "base_net_amount"],
"conditions": ["docstatus=1", "ifnull(parent, '')!=''"],
"order_by": "parent",
"links": {
"parent": ["Sales Invoice", "name"],
"item_code": ["Item", "name"]
}
},
"Sales Order": {
"columns": ["name", "customer", "transaction_date as posting_date", "company"],
"conditions": ["docstatus=1"],
"order_by": "transaction_date",
"links": {
"customer": ["Customer", "name"],
"company":["Company", "name"]
}
},
"Sales Order Item[Sales Analytics]": {
"columns": ["name", "parent", "item_code", "stock_qty as qty", "base_net_amount"],
"conditions": ["docstatus=1", "ifnull(parent, '')!=''"],
"or | der_by": "parent",
"links": {
"parent": ["Sales Order", "name"],
"item_code": ["Item", "name"]
}
},
"Delivery Note": {
"columns": ["name", "customer", "posting_date", "company"],
"conditions": ["docstatus | =1"],
"order_by": "posting_date",
"links": {
"customer": ["Customer", "name"],
"company":["Company", "name"]
}
},
"Delivery Note Item[Sales Analytics]": {
"columns": ["name", "parent", "item_code", "stock_qty as qty", "base_net_amount"],
"conditions": ["docstatus=1", "ifnull(parent, '')!=''"],
"order_by": "parent",
"links": {
"parent": ["Delivery Note", "name"],
"item_code": ["Item", "name"]
}
},
"Supplier": {
"columns": ["name", "if(supplier_name=name, '', supplier_name) as supplier_name",
"supplier_group as parent_supplier_group"],
"conditions": ["docstatus < 2"],
"order_by": "name",
"links": {
"parent_supplier_group": ["Supplier Group", "name"],
}
},
"Supplier Group": {
"columns": ["name", "parent_supplier_group"],
"conditions": ["docstatus < 2"],
"order_by": "name"
},
"Purchase Invoice": {
"columns": ["name", "supplier", "posting_date", "company"],
"conditions": ["docstatus=1"],
"order_by": "posting_date",
"links": {
"supplier": ["Supplier", "name"],
"company":["Company", "name"]
}
},
"Purchase Invoice Item": {
"columns": ["name", "parent", "item_code", "stock_qty as qty", "base_net_amount"],
"conditions": ["docstatus=1", "ifnull(parent, '')!=''"],
"order_by": "parent",
"links": {
"parent": ["Purchase Invoice", "name"],
"item_code": ["Item", "name"]
}
},
"Purchase Order": {
"columns": ["name", "supplier", "transaction_date as posting_date", "company"],
"conditions": ["docstatus=1"],
"order_by": "posting_date",
"links": {
"supplier": ["Supplier", "name"],
"company":["Company", "name"]
}
},
"Purchase Order Item[Purchase Analytics]": {
"columns": ["name", "parent", "item_code", "stock_qty as qty", "base_net_amount"],
"conditions": ["docstatus=1", "ifnull(parent, '')!=''"],
"order_by": "parent",
"links": {
"parent": ["Purchase Order", "name"],
"item_code": ["Item", "name"]
}
},
"Purchase Receipt": {
"columns": ["name", "supplier", "posting_date", "company"],
"conditions": ["docstatus=1"],
"order_by": "posting_date",
"links": {
"supplier": ["Supplier", "name"],
"company":["Company", "name"]
}
},
"Purchase Receipt Item[Purchase Analytics]": {
"columns": ["name", "parent", "item_code", "stock_qty as qty", "base_net_amount"],
"conditions": ["docstatus=1", "ifnull(parent, '')!=''"],
"order_by": "parent",
|
ThiefMaster/indico | indico/modules/events/contributions/controllers/management.py | Python | mit | 34,546 | 0.003503 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import uuid
from operator import attrgetter
from flask import flash, jsonify, redirect, request, session
from sqlalchemy.orm import undefer
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
from indico.core.cache import make_scoped_cache
from indico.core.config import config
from indico.core.db import db
from indico.core.db.sqlalchemy.protection import ProtectionMode, render_acl
from indico.core.permissions import get_principal_permissions, update_permissions
from indico.legacy.pdfinterface.latex import ContribsToPDF, ContributionBook
from indico.modules.attachments.controllers.event_package import AttachmentPackageGeneratorMixin
from indico.modules.events.abstracts.forms import AbstractContentSettingsForm
from indico.modules.events.abstracts.settings import abstracts_settings
from indico.modules.events.contributions import contribution_settings, get_contrib_field_types
from indico.modules.events.contributions.clone import ContributionCloner
from indico.modules.events.contributions.controllers.common import ContributionListMixin
from indico.modules.events.contributions.forms import (ContributionDefaultDurationForm, ContributionDurationForm,
ContributionExportTeXForm, ContributionProtectionForm,
ContributionStartDateForm, ContributionTypeForm,
SubContributionForm)
from indico.modules.events.contributions.lists import ContributionListGenerator
from indico.modules.events.contributions.models.contributions import Contribution
from indico.modules.events.contributions.models.fields import ContributionField
from indico.modules.events.contributions.models.references import ContributionReference, SubContributionReference
from indico.modules.events.contributions.models.subcontributions import SubContribution
from indico.modules.events.contributions.models.types import ContributionType
from indico.modules.events.contributions.operations import (create_contribution, create_subcontribution,
delete_contribution, delete_subcontribution,
update_contribution, update_subcontribution)
from indico.modules.events.contributions.util import (contribution_type_row, generate_spreadsheet_from_contributions,
get_boa_export_formats, import_contributions_from_csv,
make_contribution_form)
from indico.modules.events.contributions.views import WPManageContributions
from indico.modules.events.logs import EventLogKind, EventLogRealm
from indico.modules.events.management.controllers import RHManageEventBase
from indico.modules.events.management.controllers.base import RHContributionPersonListMixin
from indico.modules.events.management.util import flash_if_unregistered
from indico.modules.events.models.references import ReferenceType
from indico.modules.events.sessions import Session
from indico.modules.events.timetable.forms import ImportContributionsForm
from indico.modules.events.timetable.operations import update_timetable_entry
from indico.modules.events.tracks.models.tracks import Track
from indico.modules.events.util import check_event_locked, get_field_values, track_location_changes, track_time_changes
from indico.util.date_time import format_datetime, format_human_timedelta
from indico.util.i18n import _, ngettext
from indico.util.spreadsheets impo | rt send_csv, send_xlsx
from indico.util.string import handle_legacy_description
from indico.web.flask.templating import get_template_module
from indico.web.flask.util import send_file, url_for
from indico.web.forms.base import FormDefaults
from indico.web.forms.fields.principals import serialize_principal
from indico.web.util import jsonify_data, jsonify_form, jsonify_template
export_list_cache = make_scoped_cache('contrib-export-list')
def _render_subcontribution_list(contrib):
| tpl = get_template_module('events/contributions/management/_subcontribution_list.html')
subcontribs = (SubContribution.query.with_parent(contrib)
.options(undefer('attachment_count'))
.order_by(SubContribution.position)
.all())
return tpl.render_subcontribution_list(contrib.event, contrib, subcontribs)
class RHManageContributionsBase(RHManageEventBase):
"""Base class for all contributions management RHs."""
def _process_args(self):
RHManageEventBase._process_args(self)
self.list_generator = ContributionListGenerator(event=self.event)
class RHManageContributionBase(RHManageContributionsBase):
"""Base class for a specific contribution."""
normalize_url_spec = {
'locators': {
lambda self: self.contrib
}
}
def _process_args(self):
RHManageContributionsBase._process_args(self)
self.contrib = Contribution.query.filter_by(id=request.view_args['contrib_id'], is_deleted=False).one()
def _check_access(self):
if not self.contrib.can_manage(session.user):
raise Forbidden
check_event_locked(self, self.event)
class RHManageSubContributionBase(RHManageContributionBase):
"""Base RH for a specific subcontribution."""
normalize_url_spec = {
'locators': {
lambda self: self.subcontrib
}
}
def _process_args(self):
RHManageContributionBase._process_args(self)
self.subcontrib = SubContribution.get_or_404(request.view_args['subcontrib_id'], is_deleted=False)
class RHManageContributionsActionsBase(RHManageContributionsBase):
"""Base class for classes performing actions on event contributions."""
def _process_args(self):
RHManageContributionsBase._process_args(self)
self.contrib_ids = [int(x) for x in request.form.getlist('contribution_id')]
self.contribs = Contribution.query.with_parent(self.event).filter(Contribution.id.in_(self.contrib_ids)).all()
class RHManageSubContributionsActionsBase(RHManageContributionBase):
"""Base class for RHs performing actions on subcontributions."""
def _process_args(self):
RHManageContributionBase._process_args(self)
ids = {int(x) for x in request.form.getlist('subcontribution_id')}
self.subcontribs = (SubContribution.query
.with_parent(self.contrib)
.filter(SubContribution.id.in_(ids))
.all())
class RHContributions(ContributionListMixin, RHManageContributionsBase):
"""Display contributions management page."""
template = 'management/contributions.html'
view_class = WPManageContributions
class RHContributionListCustomize(RHManageContributionsBase):
"""Filter options for the contributions list of an event."""
ALLOW_LOCKED = True
def _process_GET(self):
return jsonify_template('events/contributions/contrib_list_filter.html',
filters=self.list_generator.list_config['filters'],
static_items=self.list_generator.static_items)
def _process_POST(self):
self.list_generator.store_configuration()
return jsonify_data(**self.list_generator.render_list())
class RHContributionListStaticURL(RHManageContributionsBase):
"""Generate a static URL for the configuration of the contribution list."""
ALLOW_LOCKED = True
def _process(self):
return jsonify(url=self.list_generator.generate_static_url())
class RHCreateContribution(RHManageContributionsBase):
def _process(self):
inherited_location = self.event.location_data
inherited_location['inheriting'] = True
default_duration = contribution_settings.get(self.event, 'default_duration')
contrib_form_cla |
looker/sentry | tests/acceptance/test_member_list.py | Python | bsd-3-clause | 1,244 | 0.000804 | from __future__ import absolute_import
from sentry.models import OrganizationMember
from sentry.testutils import AcceptanceTestCase
class ListOrganizationMembersTest(AcceptanceTestCase):
def setUp(self):
super(ListOrganizationMembersTest, self).setUp()
self.user = self.create_user('foo@example.com')
self.org = self.create_organization(
name='Rowdy Tiger',
owner=None,
)
self.team = self.create_team(organization=self.org, name='Mariachi Band')
self.create_member(
user=self.user,
organization=self.org,
role='owner',
teams=[self.team],
)
OrganizationMember.objects.create(
email='bar@example.com', organization=self.org, role='member'
)
self.create_member(
user=self.create_user('baz@example.com'),
organization=s | elf.org,
| role='admin',
teams=[self.team],
)
self.login_as(self.user)
def test_list(self):
self.browser.get('/organizations/{}/members/'.format(self.org.slug))
self.browser.wait_until_not('.loading-indicator')
self.browser.snapshot(name='list organization members')
|
emakis/erpnext | erpnext/accounts/doctype/purchase_invoice/purchase_invoice.py | Python | gpl-3.0 | 26,853 | 0.025435 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
from frappe.utils import cint, formatdate, flt, getdate
from frappe import _, throw
import frappe.defaults
from erpnext.controllers.buying_controller import BuyingController
from erpnext.accounts.party import get_party_account, get_due_date
from erpnext.accounts.utils import get_account_currency, get_fiscal_year
from erpnext.stock.doctype.purchase_receipt.purchase_receipt import update_billed_amount_based_on_po
from erpnext.stock import get_warehouse_account_map
from erpnext.accounts.general_ledger import make_gl_entries, merge_similar_entries, delete_gl_entries
from erpnext.accounts.doctype.gl_entry.gl_entry import update_outstanding_amt
from erpnext.buying.utils import check_for_closed_status
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class PurchaseInvoice(BuyingController):
def __init__(self, arg1, arg2=None):
super(PurchaseInvoice, self).__init__(arg1, arg2)
self.status_updater = [{
'source_dt': 'Purchase Invoice Item',
'target_dt': 'Purchase Order Item',
'join_field': 'po_detail',
'target_field': 'billed_amt',
'target_parent_dt': 'Purchase Order',
'target_parent_field': 'per_billed',
'target_ref_field': 'amount',
'source_field': 'amount',
'percent_join_field': 'purchase_order',
'overflow_type': 'billing'
}]
def validate(self):
if not self.is_opening:
self.is_opening = 'No'
self.validate_posting_time()
super(PurchaseInvoice, self).validate()
if not self.is_return:
self.po_required()
self.pr_required()
self.validate_supplier_invoice()
# validate cash purchase
if (self.is_paid == 1):
self.validate_cash()
self.check_conversion_rate()
self.validate_credit_to_acc()
self.clear_unallocated_advances("Purchase Invoice Advance", "advances")
self.check_for_closed_status()
self.validate_with_previous_doc()
self.validate_uom_is_integer("uom", "qty")
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.set_expense_account(for_validate=True)
self.set_against_expense_account()
self.validate_write_off_account()
self.validate_multiple_billing("Purchase Receipt", "pr_detail", "amount", "items")
self.validate_fixed_asset()
self.validate_fixed_asset_account()
self.create_remarks()
self.set_status()
def validate_cash(self):
if not self.cash_bank_account and flt(self.paid_amount):
frappe.throw(_("Cash or Bank Account is mandatory for making payment entry"))
if flt(self.paid_amount) + flt(self.write_off_amount) \
- flt(self.grand_total) > 1/(10**(self.precision("base_grand_total") + 1)):
frappe.throw(_("""Paid amount + Write Off Amount can not be greater than Grand Total"""))
def create_remarks(self):
if not self.remarks:
if self.bill_no and self.bill_date:
self.remarks = _("Against Supplier Invoice {0} dated {1}").format(self.bill_no,
formatdate(self.bill_date))
else:
self.remarks = _("No Remarks")
def set_missing_values(self, for_validate=False):
if not self.credit_to:
self.credit_to = get_party_account("Supplier", self.supplier, self.company)
if not self.due_date:
self.due_date = get_due_date(self.posting_date, "Supplier", self.supplier, self.company)
super(PurchaseInvoice, self).set_missing_values(for_validate)
def check_conversion_rate(self):
default_currency = erpnext.get_company_currency(self.company)
if not default_currency:
throw(_('Please enter default currency in Company Master'))
if (self.currency == default_currency and flt(self.conversion_rate) != 1.00) or not self.conversion_rate or (self.currency != default_currency and flt(self.conversion_rate) == 1.00):
throw(_("Conversion rate cannot be 0 or 1"))
def validate_credit_to_acc(self):
account = frappe.db.get_value("Account", self.credit_to,
["account_type", "report_type", "account_currency"], as_dict=True)
if account.report_type != "Balance Sheet":
frappe.throw(_("Credit To account must be a Balance Sheet account"))
if self.supplier and account.account_type != "Payable":
frappe.throw(_("Credit To account must be a Payable account"))
self.party_account_currency = account.account_currency
def check_for_closed_status(self):
check_list = []
for d in self.get('items'):
if d.purchase_order and not d.purchase_order in check_list and not d.purchase_receipt:
check_list.append(d.purchase_order)
check_for_closed_status('Purchase Order', d.purchase_order)
def validate_with_previous_doc(self):
super(PurchaseInvoice, self).validate_with_previous_doc({
"Purchase Order": {
"ref_dn_field": "purchase_order",
"compare_fields": [["supplier", "="], ["company", "="], ["currency", "="]],
},
"Purchase Order Item": {
"ref_dn_field": "po_detail",
"compare_fields": [["project", "="], ["item_code", "="], ["uom", "="]],
"is_child_table": True,
"allow_duplicate_prev_row_id": True
},
"Purchase Receipt": {
"ref_dn_field": "purchase_receipt",
"compare_fields": [["supplier", "="], ["company", "="], ["currency", "="]],
},
"Purchase Receipt Item": {
"ref_dn_field": "pr_detail",
"compare_fields": [["project", "="], ["item_code", "="], ["uom", "="]],
"is_child_table": True
}
})
if cint(frappe.db.get_single_value('Buying Settings', 'maintain_same_rate')) and not self.is_return:
self.validate_rate_with_reference_doc([
["Purchase Order", "purchase_order", "po_detail"],
["Purchase Receipt", "purchase_receipt", "pr_detail"]
])
def validate_warehouse(self):
if self.update_stock:
for d in self.get('items'):
if not d.warehouse:
frappe.throw(_("Warehouse required at Row No {0}").format(d.idx))
super(PurchaseInvoice, self).validate_warehouse()
def validate_item_code(self):
for d in self.get('items'):
if not d.item_code:
frappe.msgprint(_("Item Code required at Row No {0}").format(d.idx), raise_exception=True)
def set_expense_account(self, for_validate=False):
auto_accounting_for_stock = erpnext.is_perpetual_inventory_enabled(self.company)
if auto_accounting_for_stock:
stock_not_billed_account = self.get_company_default("stock_received_but_not_billed")
stock_items = self.get_stock_items()
if self.update_stock:
self.validate_item_code()
self.validate_warehouse()
warehouse_account = get_warehouse_account_map()
for item in self.get("items"):
# in case of auto inventory accounting,
# expense account is always "Stock Received But Not Billed" for a stock item
# except epening entry, drop-ship entry and fixed asset items
if auto_accounting_for_stock and item.item_code in st | ock_items \
and | self.is_opening == 'No' and not item.is_fixed_asset \
and (not item.po_detail or
not frappe.db.get_value("Purchase Order Item", item.po_detail, "delivered_by_supplier")):
if self.update_stock:
item.expense_account = warehouse_account[item.warehouse]["account"]
else:
item.expense_account = stock_not_billed_account
elif not item.expense_account and for_validate:
throw(_("Expense account is mandatory for item {0}").format(item.item_code or item.item_name))
def set_against_expense_account(self):
against_accounts = []
for item in self.get("items"):
if item.expense_account not in against_accounts:
against_accounts.append(item.expense_account)
self.against_expense_account = ",".join(against_accounts)
def po_required(self):
if frappe.db.get_value("Buying Settings", None, "po_required") == 'Yes':
for d in self.get('items'):
if not d.purchase_order:
throw(_("As per the Buying Settings if Purchase Order Required == 'YES', then for creating Purchase Invoice, user need to create Purchase Order first for item {0}").format(d.item_code))
def pr_required(self):
stock_items = self.get_stock_items()
if frappe.db.get_value("Buying Settings", None, "pr_required") == 'Yes':
for d in self.get('items'):
if not d.purchase_receipt and d.item_code in stock_items:
throw(_("As per the Buying Setting |
PinguinoIDE/pinguino-multilanguage | files/frames/libraries_widget.py | Python | gpl-2.0 | 14,604 | 0.003287 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/yeison/Documentos/python/developing/pinguino/pinguino-ide/qtgui/frames/libraries_widget.ui'
#
# Created: Fri Dec 19 15:43:21 2014
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_LibraryManager(object):
def setupUi(self, LibraryManager):
LibraryManager.setObjectName("LibraryManager")
LibraryManager.resize(833, 376)
self.centralwidget = QtGui.QWidget(LibraryManager)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.pushButton_close = QtGui.QPushButton(self.centralwidget)
self.pushButton_close.setMinimumSize(QtCore.QSize(165, 0))
self.pushButton_close.setMaximumSize(QtCore.QSize(165, 16777215))
self.pushButton_close.setObjectName("pushButton_close")
self.gridLayout.addWidget(self.pushButton_close, 1, 2, 1, 1)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 1, 1, 1, 1)
self.commandLinkButton_how = QtGui.QCommandLinkButton(self.centralwidget)
self.commandLinkButton_how.setCursor(QtCore.Qt.WhatsThisCursor)
self.commandLinkButton_how.setObjectName("commandLinkButton_how")
self.gridLayout.addWidget(self.commandLinkButton_how, 1, 0, 1, 1)
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setObjectName("tabWidget")
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName("tab_2")
self.gridLayout_3 = QtGui.QGridLayout(self.tab_2)
self.gridLayout_3.setObjectName("gridLayout_3")
self.lineEdit_source = QtGui.QLineEdit(self.tab_2)
self.lineEdit_source.setObjectName("lineEdit_source")
self.gridLayout_3.addWidget(self.lineEdit_source, 0, 0, 1, 1)
self.gridLayout_4 = QtGui.QGridLayout()
self.gridLayout_4.setObjectName("gridLayout_4")
self.pushButton_update = QtGui.QPushButton(self.tab_2)
self.pushButton_update.setEnabled(False)
self.pushButton_update.setObjectName("pushButton_update")
self.gridLayout_4.addWidget(self.pushButton_update, 0, 0, 1, 1)
self.pushButton_remove = QtGui.QPushButton(self.tab_2)
self.pushButton_remove.setEnabled(False)
self.pushButton_remove.setObjectName("pushButton_remove")
self.gridLayout_4.addWidget(self.pushButton_remove, 0, 1, 1, 1)
self.pushButton_reload_s = QtGui.QPushButton(self.tab_2)
self.pushButton_reload_s.setObjectName("pushButton_reload_s")
self.gridLayout_4.addWidget(self.pushButton_reload_s, 0, 2, 1, 1)
self.gridLayout_3.addLayout(self.gridLayout_4, 4, 0, 1, 3)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.checkBox_sources = QtGui.QCheckBox(self.tab_2)
self.checkBox_sources.setObjectName("checkBox_sources")
self.horizontalLayout_3.addWidget(self.checkBox_sources)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem1)
self.gridLayout_3.addLayout(self.horizontalLayout_3, 3, 0, 1, 3)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.radioButton_repo_git = QtGui.QRadioButton(self.tab_2)
self.radioButton_repo_git.setChecked(True)
self.radioButton_repo_git.setObjectName("radioButton_repo_git")
self.horizontalLayout_2.addWidget(self.radioButton_repo_git)
self.radioButton_repo_hg = QtGui.QRadioButton(self.tab_2)
self.radioButton_repo_hg.setObjectName("radioButton_repo_hg")
self.horizontalLayout_2.addWidget(self.radioButton_repo_hg)
self.radioButton_repo_svn = QtGui.QRadioButton(self.tab_2)
self.radioButton_repo_svn.setObjectName("radioButton_repo_svn")
self.horizontalLayout_2.addWidget(self.radioButton_repo_svn)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem2)
self.gridLayout_3.addLayout(self.horizontalLayout_2, 1, 0, 1, 3)
self.pushButton_from_zip = QtGui.QPushButton(self.tab_2)
self.pushButton_from_zip.setObjectName("pushButton_from_zip")
self.gridLayout_3.addWidget(self.pushButton_from_zip, 0, 2, 1, 1)
self.pushButton_add = QtGui.QPushButton(self.tab_2)
| self.pushButton_add.setObjectName("pushButton_add")
self.gridLayout_3.addWidget(self.pushButton_add, 0, 1, 1, 1)
self.tableWidget_sources = QtGui.QTableWidget(self.tab_2)
self.tableWidget_sources.setAutoFillBackgro | und(True)
self.tableWidget_sources.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget_sources.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget_sources.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tableWidget_sources.setAlternatingRowColors(True)
self.tableWidget_sources.setVerticalScrollMode(QtGui.QAbstractItemView.ScrollPerPixel)
self.tableWidget_sources.setHorizontalScrollMode(QtGui.QAbstractItemView.ScrollPerPixel)
self.tableWidget_sources.setGridStyle(QtCore.Qt.NoPen)
self.tableWidget_sources.setObjectName("tableWidget_sources")
self.tableWidget_sources.setColumnCount(3)
self.tableWidget_sources.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidget_sources.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget_sources.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget_sources.setHorizontalHeaderItem(2, item)
self.tableWidget_sources.horizontalHeader().setHighlightSections(False)
self.tableWidget_sources.horizontalHeader().setStretchLastSection(True)
self.tableWidget_sources.verticalHeader().setVisible(False)
self.tableWidget_sources.verticalHeader().setHighlightSections(True)
self.tableWidget_sources.verticalHeader().setStretchLastSection(False)
self.gridLayout_3.addWidget(self.tableWidget_sources, 2, 0, 1, 3)
self.tabWidget.addTab(self.tab_2, "")
self.tab = QtGui.QWidget()
self.tab.setObjectName("tab")
self.gridLayout_2 = QtGui.QGridLayout(self.tab)
self.gridLayout_2.setObjectName("gridLayout_2")
self.pushButton_reload_l = QtGui.QPushButton(self.tab)
self.pushButton_reload_l.setObjectName("pushButton_reload_l")
self.gridLayout_2.addWidget(self.pushButton_reload_l, 3, 1, 1, 1)
self.pushButton_apply = QtGui.QPushButton(self.tab)
self.pushButton_apply.setEnabled(False)
self.pushButton_apply.setObjectName("pushButton_apply")
self.gridLayout_2.addWidget(self.pushButton_apply, 3, 0, 1, 1)
self.tableWidget_libs = QtGui.QTableWidget(self.tab)
self.tableWidget_libs.setAlternatingRowColors(True)
self.tableWidget_libs.setVerticalScrollMode(QtGui.QAbstractItemView.ScrollPerPixel)
self.tableWidget_libs.setHorizontalScrollMode(QtGui.QAbstractItemView.ScrollPerPixel)
self.tableWidget_libs.setGridStyle(QtCore.Qt.NoPen)
self.tableWidget_libs.setWordWrap(False)
self.tableWidget_libs.setCornerButtonEnabled(False)
self.tableWidget_libs.setObjectName("tableWidget_libs")
self.tableWidget_libs.setColumnCount(4)
self.tableWidget_libs.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidget_libs.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget_libs.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget_libs.setHorizontalHeaderItem(2, |
mpirnat/lets-be-bad-guys | manage.py | Python | mit | 250 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANG | O_SETTINGS_MODULE", "badguys. | settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Kakarot/PythonIO | FileManipulator.py | Python | mit | 696 | 0.025862 | def printFileContents(myFile):
print(open(myFile).read().splitlines())
def writeFileContents(myFile, somethingToWrite):
f = open(myFile,'a')
f.write(somethin | gToWrite +'\n') # python will convert \n to os.linesep
f.close() # you can omit in most cases as the destructor will call if
printFileContents("alive.txt")
writeFileContents("writer.txt", "All for one!")
#Consider for Example
#def writeToDatabase():
# import pyodbc
# cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;DATABASE=testdb;UID=me;PWD=pass')
# cursor = cnxn.cursor()
# cursor.execute("select user_id, user_name from | users")
# rows = cursor.fetchall()
# for row in rows:
# print row.user_id, row.user_name |
erikdejonge/puffin | puf/cli.py | Python | mit | 3,395 | 0.003829 | # coding=utf-8
"""
-
"""
from __future__ import division, unicode_literals, absolute_import
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
import argparse
from puf import cli_lib
def main(params=None):
"""
:param params:
:return:
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-l', '--line', help='Execute the command for each line of input.',
action='store_true', default=False)
# parsing arguments
parser.add_argument('-s', '--separator', help='Custom column separator.', default=None)
parser.add_argument('-t', '--tab-separator', help='Use tab as the column separator.',
action='store_true', default=False)
parser.add_argument('-h', '--skip-header',
help='Skip the first line of the stream.',
action='store_true', default=False)
# execution options
parser.add_argument('-b', '--before',
help='Statement to execute before the command '
'(e.g. set up accumulation variables).', default=None)
parser.add_argument('-f', '--command-file',
help='Execute the file (instead of evaluating a '
'command). Incompatible with -r.', default=None)
# output options
parser.add_argument('-r', '--raw',
help='Print the raw result of the command. '
'(No smart display.)',
action='store_true', default=False)
parser.add_argument('-i', '--in-place',
help='Edit files in-place, saving backups with the specified extension. '
'If a zero-length extension is given, no backup will be saved. '
'It is not recommended to give a zero-length extension when in-place '
'editing files, as you risk corruption or partial content in situations '
'where disk space is exhausted, etc.', default=None)
parser.add_argument('--help', action='help', help='Displ | ay this help message.')
parser.add_argument('--version', action='store_true', help='Display the version.')
parser.add_argument('command', nargs='?')
parser.add_argument('file', nargs='*')
args = parser.parse_args(params)
if args.tab_separator:
args.separator = '\t'
if args.version:
import pkg | _resources
print(pkg_resources.get_distribution('puffin').version)
return
if not (args.command or args.command_file):
return parser.print_help()
glob = {}
if args.before:
exec(args.before, glob)
for stream_in, stream_out in cli_lib.determine_streams(args):
for local in cli_lib.interpret_stream(
stream_in, args.line, args.skip_header, args.separator):
result = cli_lib.evaluate(local, glob, args.command, args.command_file)
if args.command_file:
continue
try:
if args.raw:
cli_lib.display_raw(result, stream_out)
else:
cli_lib.display(result, stream_out)
except IOError:
return
cli_lib.post_process(args, stream_in, stream_out)
|
xpenatan/dragome-backend | extensions/gdx-bullet/gdx-bullet-build/jni/emscripten/webidl_binder.py | Python | apache-2.0 | 25,177 | 0.01267 |
'''
WebIDL binder
http://kripken.github.io/emscripten-site/docs/porting/connecting_cpp_and_javascript/WebIDL-Binder.html
'''
import os, sys
sys.path.append(sys.argv[3])
import shared
sys.path.append(shared.path_from_root('third_party'))
sys.path.append(shared.path_from_root('third_party', 'ply'))
import WebIDL
# CHECKS='FAST' will skip most argument type checks in the wrapper methods for
# performance (~3x faster than default).
# CHECKS='ALL' will do extensive argument type checking (~5x slower than default).
# This will catch invalid numbers, invalid pointers, invalid strings, etc.
# Anything else defaults to legacy mode for backward compatibility.
CHECKS = os.environ.get('IDL_CHECKS') or 'DEFAULT'
# DEBUG=1 will print debug info in render_function
DEBUG = os.environ.get('IDL_VERBOSE') is '1'
if DEBUG: print "Debug print ON, CHECKS=%s" % CHECKS
class Dummy:
def __init__(self, init):
for k, v in init.iteritems():
self.__dict__[k] = v
def getExtendedAttribute(self, name):
return None
input_file = sys.argv[1]
output_base = sys.argv[2]
shared.try_delete(output_base + '.cpp')
shared.try_delete(output_base + '.js')
p = WebIDL.Parser()
p.parse(r'''
interface VoidPtr {
};
''' + open(input_file).read())
data = p.finish()
interfaces = {}
implements = {}
enums = {}
for thing in data:
if isinstance(thing, WebIDL.IDLInterface):
interfaces[thing.identifier.name] = thing
elif isinstance(thing, WebIDL.IDLImplementsStatement):
implements.setdefault(thing.implementor.identifier.name, []).append(thing.implementee.identifier.name)
elif isinstance(thing, WebIDL.IDLEnum):
enums[thing.identifier.name] = thing
#print interfaces
#print implements
pre_c = []
mid_c = []
mid_js = []
pre_c += [r'''
#include <emscripten.h>
''']
mid_c += [r'''
extern "C" {
''']
def emit_constructor(name):
global mid_js
mid_js += [r'''%s.prototype = %s;
%s.prototype.constructor = %s;
%s.prototype.__class__ = %s;
%s.__cache__ = {};
Module['%s'] = %s;
''' % (name, 'Object.create(%s.prototype)' % (implements[name][0] if implements.get(name) else 'WrapperObject'), name, name, name, name, name, name, name)]
mid_js += ['''
// Bindings utilities
function WrapperObject() {
}
''']
emit_constructor('WrapperObject')
mid_js += ['''
function getCache(__class__) {
return (__class__ || WrapperObject).__cache__;
}
Module['getCache'] = getCache;
function wrapPointer(ptr, __class__) {
var cache = getCache(__class__);
var ret = cache[ptr];
if (ret) return ret;
ret = Object.create((__class__ || WrapperObject).prototype);
ret.ptr = ptr;
return cache[ptr] = ret;
}
Module['wrapPointer'] = wrapPointer;
function castObject(obj, __class__) {
return wrapPointer(obj.ptr, __class__);
}
Module['castObject'] = castObject;
Module['NULL'] = wrapPointer(0);
function destroy(obj) {
if (!obj['__destroy__']) throw 'Error: Cannot destroy object. (Did you create it yourself?)';
obj['__destroy__']();
// Remove from cache, so the object can be GC'd and refs added onto it released
delete getCache(obj.__class__)[obj.ptr];
}
Module['destroy'] = destroy;
function compare(obj1, obj2) {
return obj1.ptr === obj2.ptr;
}
Module['compare'] = compare;
function getPointer(obj) {
return obj.ptr;
}
Module['getPointer'] = getPointer;
function getClass(obj) {
return obj.__class__;
}
Module['getClass'] = getClass;
// Converts big (string or array) values into a C-style storage, in temporary space
var ensureCache = {
buffer: 0, // the main buffer of temporary storage
size: 0, // the size of buffer
pos: 0, // the next free offset in b | uffer
temps: [], // extra allocations
needed: 0, // the total size we need next time
prepare: function() {
if (this.needed) {
// clear the temps
for (var i = 0; i < this.temps.length; i++) {
Module['_free'](this.temps[i]);
}
this.temps.length = 0;
// prepare to allocate a bigger buffer
Module['_free'](this.buffer);
this.buffer = 0;
this.size += this.needed;
// clean up
t | his.needed = 0;
}
if (!this.buffer) { // happens first time, or when we need to grow
this.size += 128; // heuristic, avoid many small grow events
this.buffer = Module['_malloc'](this.size);
assert(this.buffer);
}
this.pos = 0;
},
alloc: function(array, view) {
assert(this.buffer);
var bytes = view.BYTES_PER_ELEMENT;
var len = array.length * bytes;
len = (len + 7) & -8; // keep things aligned to 8 byte boundaries
var ret;
if (this.pos + len >= this.size) {
// we failed to allocate in the buffer, this time around :(
assert(len > 0); // null terminator, at least
this.needed += len;
ret = Module['_malloc'](len);
this.temps.push(ret);
} else {
// we can allocate in the buffer
ret = this.buffer + this.pos;
this.pos += len;
}
var retShifted = ret;
switch (bytes) {
case 2: retShifted >>= 1; break;
case 4: retShifted >>= 2; break;
case 8: retShifted >>= 3; break;
}
for (var i = 0; i < array.length; i++) {
view[retShifted + i] = array[i];
}
return ret;
},
};
function ensureString(value) {
if (typeof value === 'string') return ensureCache.alloc(intArrayFromString(value), HEAP8);
return value;
}
Module['ensureString'] = ensureString;
function ensureInt8(value) {
if (typeof value === 'object') return ensureCache.alloc(value, HEAP8);
return value;
}
Module['ensureInt8'] = ensureInt8;
function ensureInt16(value) {
if (typeof value === 'object') return ensureCache.alloc(value, HEAP16);
return value;
}
Module['ensureInt16'] = ensureInt16;
function ensureInt32(value) {
if (typeof value === 'object') return ensureCache.alloc(value, HEAP32);
return value;
}
Module['ensureInt32'] = ensureInt32;
function ensureFloat32(value) {
if (typeof value === 'object') return ensureCache.alloc(value, HEAPF32);
return value;
}
Module['ensureFloat32'] = ensureFloat32;
function ensureFloat64(value) {
if (typeof value === 'object') return ensureCache.alloc(value, HEAPF64);
return value;
}
Module['ensureFloat64'] = ensureFloat64;
''']
mid_c += ['''
// Not using size_t for array indices as the values used by the javascript code are signed.
void array_bounds_check(const int array_size, const int array_idx) {
if (array_idx < 0 || array_idx >= array_size) {
EM_ASM_INT({
throw 'Array index ' + $0 + ' out of bounds: [0,' + $1 + ')';
}, array_idx, array_size);
}
}
''']
C_FLOATS = ['float', 'double']
def full_typename(arg):
return arg.type.name + ('[]' if arg.type.isArray() else '')
def type_to_c(t, non_pointing=False):
#print 'to c ', t
t = t.replace(' (Wrapper)', '')
suffix = ''
if '[]' in t:
suffix = '*'
t = t.replace('[]', '')
if t == 'Long':
ret = 'int'
elif t == 'UnsignedLong':
ret = 'unsigned int'
elif t == 'Short':
ret = 'short'
elif t == 'UnsignedShort':
ret = 'unsigned short'
elif t == 'Byte':
ret = 'char'
elif t == 'Octet':
ret = 'unsigned char'
elif t == 'Void':
ret = 'void'
elif t == 'String':
ret = 'char*'
elif t == 'Float':
ret = 'float'
elif t == 'Double':
ret = 'double'
elif t == 'Boolean':
ret = 'bool'
elif t == 'Any' or t == 'VoidPtr':
ret = 'void*'
elif t in interfaces:
ret = (interfaces[t].getExtendedAttribute('Prefix') or [''])[0] + t + ('' if non_pointing else '*')
else:
ret = t
return ret + suffix
def take_addr_if_nonpointer(m):
if m.getExtendedAttribute('Ref') or m.getExtendedAttribute('Value'):
return '&'
return ''
def deref_if_nonpointer(m):
if m.getExtendedAttribute('Ref') or m.getExtendedAttribute('Value'):
return '*'
return ''
def type_to_cdec(raw):
name = ret = type_to_c(raw.type.name, non_pointing=True)
if raw.getExtendedAttribute('Const'): ret = 'const ' + ret
if name not in interfaces: return ret
if raw.getExtendedAttribute('Ref'):
return ret + '&'
if raw.getExtendedAttribute('Value'):
return ret
return ret + '*'
def render_function(class_name, func_name, sigs, return_type |
MartinAltmayer/pokerserver | tests/integration/controllers/test_table.py | Python | gpl-3.0 | 14,923 | 0.000804 | from http import HTTPStatus
from json import loads
from unittest.mock import Mock, patch
from uuid import uuid4
from tornado.testing import gen_test
from pokerserver.database import PlayerState, UUIDsRelation
from pokerserver.models import InvalidTurnError, NotYourTurnError, Player, PositionOccupiedError
from tests.utils import IntegrationHttpTestCase, create_table, return_done_future
class TestTableController(IntegrationHttpTestCase):
async def async_setup(self):
self.table_id = 1
self.uuid = uuid4()
self.uuid2 = uuid4()
self.player_name = 'c'
self.player_name2 = 'd'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
await UUIDsRelation.add_uuid(self.uuid2, self.player_name2)
players = [
Player(self.table_id, 1, 'a', 0, ['Ah', 'Ac'], 0),
Player(self.table_id, 2, 'b', 0, ['Kh', 'Kc'], 0),
Player(self.table_id, 5, 'c', 0, ['Qh', 'Qc'], 0)
]
table = await create_table(table_id=self.table_id, players=players)
self.table_name = table.name
@gen_test
async def test_get_for_player_at_table(self):
await self.async_setup()
response = await self.fetch_async('/table/{}?uuid={}'.format(self.table_name, self.uuid))
self.assertEqual(response.code, HTTPStatus.OK.value)
table = loads(response.body.decode())
self.assertEqual(table, {
'big_blind': 2,
'can_join': False,
'current_player': None,
'dealer': None,
'state': 'waiting for players',
'round': 'preflop',
'open_cards': [],
'players': [{
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'a',
'bet': 0,
'position': 1,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'b',
'bet': 0,
'position': 2,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': ['Qh', 'Qc'],
'name': 'c',
'bet': 0,
'position': 5,
'state': PlayerState.PLAYING.value
}],
'pots': [{
'bets': {}
}],
'small_blind': 1
})
@gen_test
async def test_get_for_player_not_at_table(self):
await self.async_setup()
response = await self.fetch_async('/table/{}?uuid={}'.format(self.table_name, self.uuid2))
self.assertEqual(response.code, HTTPStatus.OK.value)
table = loads(response.body.decode())
self.assertEqual(table, {
'big_blind': 2,
'can_join': True,
'current_player': None,
'dealer': None,
'state': 'waiting for players',
'round': 'preflop',
'open_cards': [],
'players': [{
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'a',
'bet': 0,
'position': 1,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'b',
'bet': 0,
'position': 2,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'c',
'bet': 0,
'position': 5,
'state': PlayerState.PLAYING.value
}],
'pots': [{
'bets': {}
}],
'small_blind': 1
})
@gen_test
async def test_get_for_unauthorized_player(self):
await self.async_setup()
response = await self.fetch_async('/table/{}'.format(self.table_name))
self.assertEqual(response.code, HTTPStatus.OK.value)
table = loads(response.body.decode())
self.assertEqual(table, {
'big_blind': 2,
'can_join': True,
'current_player': None,
'dealer': None,
'state': 'waiting for players',
'round': 'preflop',
'open_cards': [],
'players': [{
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'a',
'bet': 0,
'position': 1,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'b',
'bet': 0,
'position': 2,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'c',
'bet': 0,
'position': 5,
'state': PlayerState.PLAYING.value
}],
'pots': [{
'bets': {}
}],
'small_blind': 1
})
class TestJoinController(IntegrationHttpTestCase):
async def async_setup(self):
self.uuid = uuid4()
self.player_name = 'player'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
table = await create_table(max_player_count=2)
self.table_name = table.name
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_join(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.join.side_effect = return_done_future()
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={'position': 1}
)
self.assertEqual(response.code, HTTPStatus.OK.value)
load_mock.assert_called_once_with(self.table_name)
match_mock.join.assert_called_once_with(self.player_name, 1)
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_join_occupied_position(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.join.side_effect = return_done_future(exception=PositionOccupiedError)
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={'position': 1},
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.CONFLICT.value)
@gen_test
async def test_join_missing_parameter(self):
await self.async_setup()
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={},
raise_error=False)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
@gen_test
async def test_join_invalid_parameter(self):
await self.async_setup()
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={'position': | -1},
| raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
class TestFoldController(IntegrationHttpTestCase):
async def async_setup(self):
self.uuid = uuid4()
self.player_name = 'player'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
table = await create_table(max_player_count=2)
self.table_name = table.name
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
|
arnavd96/Cinemiezer | myvenv/lib/python3.4/site-packages/music21/test/testStream.py | Python | mit | 284,928 | 0.006914 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: testStream.py
# Purpose: tests for stream.py
#
# Authors: Michael Scott Cuthbert
# Christopher Ariza
#
# Copyright: Copyright © 2009-2014 Michael Scott Cuthbert and the music21 Project
# License: LGPL or BSD, see license.txt
#-------------------------------------------------------------------------------
import random
import unittest
import copy
from music21.stream import Stream
from music21.stream import Voice
from music21.stream import Measure
from music21.stream import Score
from music21.stream import Part
from music21 import bar
from music21 import chord
from music21 import clef
from music21 import common
from music21 import duration
from music21 import interval
from music21 import key
from music21 import metadata
from music21 import meter
from music21 import note
from music21 import pitch
from music21.musicxml import m21ToXml
from music21.midi import translate as midiTranslate
from music21 import environment
_MOD = "testStream.py"
environLocal = environment.Environment(_MOD)
#-------------------------------------------------------------------------------
class TestExternal(unittest.TestCase):
def runTest(self):
pass
def testLilySimple(self):
a = Stream()
ts = meter.TimeSignature("3/4")
b = Stream()
q = note.Note(type='quarter')
q.octave = 5
b.repeatInsert(q, [0,1,2,3])
bestC = b.bestClef(allowTreble8vb = True)
a.insert(0, bestC)
a.insert(0, ts)
a.insert(0, b)
a.show('lily.png')
def testLilySemiComplex(self):
a = Stream()
ts = meter.TimeSignature("3/8")
b = Stream()
q = note.Note(type='eighth')
dur1 = duration.Duration()
dur1.type = "eighth"
tup1 = duration.Tuplet()
tup1.tupletActual = [5, dur1]
tup1.tupletNormal = [3, dur1]
q.octave = 2
q.duration.appendTuplet(tup1)
for i in range(0,5):
b.append(copy.deepcopy(q))
b.elements[i].accidental = pitch.Accidental(i - 2)
b.elements[0].duration.tuplets[0].type = "start"
b.elements[-1].duration.tuplets[0].type = "stop"
b.elements[2].editorial.comment.text = "a real C"
bestC = b.bestClef(allowTreble8vb = True)
a.insert(0, bestC)
a.insert(0, ts)
a.insert(0, b)
a.show('lily.png')
def testScoreLily(self):
'''
Test the lilypond output of various score operations.
'''
c = note.Note("C4")
d = note.Note("D4")
ts = meter.TimeSignature("2/4")
s1 = Part()
s1.append(copy.deepcopy(c))
s1.append(copy.deepcopy(d))
s2 = Part()
s2.append(copy.deepcopy(d))
s2.append(copy.deepcopy(c))
score1 = Score()
score1.insert(ts)
score1.insert(s1)
score1.insert(s2)
score1.show('lily.png')
def testMXOutput(self):
'''A simple test of adding notes to measures in a stream.
'''
c = Stream()
for dummy in range(4):
b = Measure()
for p in ['a', 'g', 'c#', 'a#']:
a = note.Note(p)
b.append(a)
c.append(b)
c.show()
def testMxMeasures(self):
'''A test of the automatic partitioning of notes in a measure and the creation of ties.
'''
n = note.Note()
n.quarterLength = 3
a = Stream()
a.repeatInsert(n, list(range(0,120,3)))
#a.show() # default time signature used
a.insert( 0, meter.TimeSignature("5/4") )
a.insert(10, meter.TimeSignature("2/4") )
a.insert( 3, meter.TimeSignature("3/16") )
a.insert(20, meter.TimeSignature("9/8") )
a.insert(40, meter | .TimeSignature("10/4") )
a.show()
def testMultipartStreams(self):
'''Test the creation of multi-part streams by simply having streams within streams.
'''
q = Stream()
r = Stream()
for x in ['c3','a3','g#4','d2'] * 10:
n = | note.Note(x)
n.quarterLength = .25
q.append(n)
m = note.Note(x)
m.quarterLength = 1.125
r.append(m)
s = Stream() # container
s.insert(q)
s.insert(r)
s.insert(0, meter.TimeSignature("3/4") )
s.insert(3, meter.TimeSignature("5/4") )
s.insert(8, meter.TimeSignature("3/4") )
s.show()
def testMultipartMeasures(self):
'''This demonstrates obtaining slices from a stream and layering
them into individual parts.
OMIT_FROM_DOCS
TODO: this should show instruments
this is presently not showing instruments
probably b/c when appending to s Stream activeSite is set to that stream
'''
from music21 import corpus, converter
a = converter.parse(corpus.getWork(['mozart', 'k155','movement2.xml']))
b = a[8][4:8]
c = a[8][8:12]
d = a[8][12:16]
s = Stream()
s.insert(b)
s.insert(c)
s.insert(d)
s.show()
def testCanons(self):
'''
A test of creating a canon with shifted presentations of a source melody.
This also demonstrates
the addition of rests to parts that start late or end early.
The addition of rests happens with makeRests(), which is called in
musicxml generation of a Stream.
'''
a = ['c', 'g#', 'd-', 'f#', 'e', 'f' ] * 4
s = Stream()
partOffsetShift = 1.25
partOffset = 0
for junk in range(6):
p = Stream()
for pitchName in a:
n = note.Note(pitchName)
n.quarterLength = 1.5
p.append(n)
p.offset = partOffset
s.insert(p)
partOffset += partOffsetShift
s.show()
def testBeamsPartial(self):
'''This demonstrates a partial beam; a beam that is not connected between more than one note.
'''
q = Stream()
for x in [.125, .25, .25, .125, .125, .125] * 30:
n = note.Note('c')
n.quarterLength = x
q.append(n)
s = Stream() # container
s.insert(q)
s.insert(0, meter.TimeSignature("3/4") )
s.insert(3, meter.TimeSignature("5/4") )
s.insert(8, meter.TimeSignature("4/4") )
s.show()
def testBeamsStream(self):
'''A test of beams applied to different time signatures.
'''
q = Stream()
r = Stream()
p = Stream()
for x in ['c3','a3','c#4','d3'] * 30:
n = note.Note(x)
#n.quarterLength = random.choice([.25, .125, .5])
n.quarterLength = random.choice([.25])
q.append(n)
m = note.Note(x)
m.quarterLength = .5
r.append(m)
o = note.Note(x)
o.quarterLength = .125
p.append(o)
s = Stream() # container
s.append(q)
s.append(r)
s.append(p)
s.insert(0, meter.TimeSignature("3/4") )
s.insert(3, meter.TimeSignature("5/4") )
s.insert(8, meter.TimeSignature("4/4") )
self.assertEqual(len(s.flat.notes), 360)
s.show()
def testBeamsMeasure(self):
aMeasure = Measure()
aMeasure.timeSignature = meter.TimeSignature('4/4')
aNote = note.Note()
aNote.quarterLength = .25
aMeasure.repeatAppend(aNote,16)
bMeasure = aMeasure.makeBeams()
bMeasure.show()
#-------------------------------------------------------------------------------
class Test(unittest.TestCase):
def runTest(self):
pass
def testAdd(self):
import music21 # needed to do fully-qualified isinstance name checking
a = Stre |
fusionbox/django-extensions | django_extensions/management/commands/validate_templates.py | Python | mit | 3,269 | 0.003059 | import os
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import color_style
from django.template.base import add_to_builtins
from django.template.loaders.filesystem import Loader
from django_extensions.utils import validatingtemplatetags
#
# TODO: Render the template with fake request object ?
#
class Command(BaseCommand):
args = ''
help = "Validate templates on syntax and compile errors"
option_list = BaseCommand.option_list + (
make_option('--break', '-b', action='store_true', dest='break',
default=False, help="Break on first error."),
make_option('--check-urls', '-u', action='store_true', dest='check_urls',
default=False, help="Check url tag view names are quoted appropriately"),
make_option('--force-new-urls', '-n', action='store_true', dest='force_new_urls',
default=False, help="Error on usage of old style url tags (without {% load urls from future %}"),
make_option('--include', '-i', action='append', dest='includes',
default=[], help="Append these paths to TEMPLATE_DIRS")
)
def handle(self, *args, **options):
from django.conf import settings
style = color_style()
template_dirs = set(settings.TEMPLATE_DIRS)
template_dirs |= set(options.get('includes', []))
template_dirs |= set(getattr(settings, 'VALIDATE_TEMPLATES_EXTRA_TEMPLATE_DIRS', []))
settings.TEMPLATE_DIRS = list(template_dirs)
settings.TEMPLATE_DEBUG = True
verbosity = int(options.get('verbosity', 1))
errors = 0
template_loader = Loader()
# Replace built in template tags with our own validating versions
if options.get('check_urls', False):
add_to_builtins('django_extensions.utils.validatingtemplatetags')
for template_dir in template_dirs:
for root, dirs, filenames in os.walk(template_dir):
for filename in filenames:
if filename.endswith(".swp"):
continue
if filename.endswith("~"):
continue
filepath = os.path.join(root, filename)
if verbosity > 1:
print filepath
validatingtemplatetags.before_new_template(options.get('force_new_urls', False))
| try:
template_loader.load_template(filename, [root])
except Exception, e:
errors += 1
print "%s: %s" % (filepath, style.ERROR("%s %s" % (e.__class__.__name__, str(e))))
template_errors = validatingtemplatetags.get_template_errors()
for origin, line, message in template_errors:
errors += 1
| print "%s(%s): %s" % (origin, line, style.ERROR(message))
if errors and options.get('break', False):
raise CommandError("Errors found")
if errors:
raise CommandError("%s errors found" % errors)
print "%s errors found" % errors
|
potix2/crazyflie_rospy | scripts/crazyflie_add.py | Python | mit | 1,867 | 0.002142 | #!/usr/bin/env python
import sys
import rospy
from crazyflie_rospy.srv import AddCrazyflie, AddCrazyflieRequest, AddCrazyflieResponse
def main(args):
rospy.init_node('crazyflie_add')
uri = rospy.get_param("~uri")
tf_prefix = rospy.get_param("~tf_prefix")
roll_trim = rospy.get_param("~roll_trim", 0.0)
pitch_trim = rospy.get_param("~pitch_trim", 0.0)
enable_logging = rospy.get_param("~enable_logging", True)
enable_parameters = rospy.get_ | param("~enable_parameters", True)
use_ros_time = rospy.get_param("~use_ros_time", True)
enable_logging_imu = rospy.get_param("~enable_logging_imu", True)
enable_logging_temperature = rospy.get_param("~enable_logging_temperature", True)
enable_logging_magnetic_field = rospy.get_param("~enable_logging_magnetic_field", True)
enable_logging_pressure = rospy.get_param("~enable_logg | ing_pressure", True)
enable_logging_battery = rospy.get_param("~enable_logging_battery", True)
height_hold = rospy.get_param("~height_hold", False)
rospy.loginfo("wait_for_service add_crazyflie...")
rospy.wait_for_service('/add_crazyflie')
rospy.loginfo("done")
try:
add_crazyflie = rospy.ServiceProxy('/add_crazyflie', AddCrazyflie)
req = AddCrazyflieRequest(
uri,
tf_prefix,
roll_trim,
pitch_trim,
enable_logging,
enable_parameters,
[],
use_ros_time,
enable_logging_imu,
enable_logging_temperature,
enable_logging_magnetic_field,
enable_logging_pressure,
enable_logging_battery,
height_hold
)
add_crazyflie(req)
except rospy.ServiceException as exc:
rospy.logerr('Service did not process request: ' + str(exc))
if __name__ == '__main__':
main(sys.argv)
|
makinacorpus/Geotrek | geotrek/outdoor/templatetags/outdoor_tags.py | Python | bsd-2-clause | 1,633 | 0 | from django import template
from django.conf import settings
import json
from geotrek.outdoor.models import Practice, RatingScale, Site
register = template.Library()
@register.simple_tag
def is_outdoor_enabled():
return 'geotrek. | outdoor' in settings.INSTALLED_APPS
@register.simple_tag
def site_practices():
practices = {
str(practice.pk): {
'types': {
str(type.pk): type.name
for type in practice.site_types.all()
},
'scales': {
str(scale.pk): scale.name
for scale in practice.rating_scales.all()
| },
}
for practice in Practice.objects.all()
}
return json.dumps(practices)
@register.simple_tag
def course_sites():
sites = {
str(site.pk): {
'types': {
str(type.pk): type.name
for type in site.practice.course_types.all()
},
'scales': {
str(scale.pk): scale.name
for scale in site.practice.rating_scales.all()
},
} if not(site.practice is None) else {'types': {}, 'scales': {}}
for site in Site.objects.all()
}
return json.dumps(sites)
@register.simple_tag
def all_ratings_scales():
scales = {
str(scale.pk): scale.name
for scale in RatingScale.objects.all()
}
return json.dumps(scales)
@register.filter
def orientation_display(orientation):
return dict(Site.ORIENTATION_CHOICES)[orientation]
@register.filter
def wind_display(orientation):
return dict(Site.WIND_CHOICES)[orientation]
|
twitter/pants | contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py | Python | apache-2.0 | 2,327 | 0.011603 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import re
from builtins import open
INCLUDE_PARSER = re.compile(r'^\s*include\s+"([^"]+)"\s*([\/\/|\#].*)*$')
def find_includes(basedirs, source, log=None):
"""Finds all thri | ft files included by the given thrift source.
:basedirs: A set of thrift source file base directories to look for includes in.
:source: The thrift source file to scan for includes.
:log: An optional logger
"""
all_bas | edirs = [os.path.dirname(source)]
all_basedirs.extend(basedirs)
includes = set()
with open(source, 'r') as thrift:
for line in thrift.readlines():
match = INCLUDE_PARSER.match(line)
if match:
capture = match.group(1)
added = False
for basedir in all_basedirs:
include = os.path.join(basedir, capture)
if os.path.exists(include):
if log:
log.debug('{} has include {}'.format(source, include))
includes.add(include)
added = True
if not added:
raise ValueError("{} included in {} not found in bases {}"
.format(include, source, all_basedirs))
return includes
def find_root_thrifts(basedirs, sources, log=None):
"""Finds the root thrift files in the graph formed by sources and their recursive includes.
:basedirs: A set of thrift source file base directories to look for includes in.
:sources: Seed thrift files to examine.
:log: An optional logger.
"""
root_sources = set(sources)
for source in sources:
root_sources.difference_update(find_includes(basedirs, source, log=log))
return root_sources
def calculate_include_paths(targets, is_thrift_target):
"""Calculates the set of import paths for the given targets.
:targets: The targets to examine.
:is_thrift_target: A predicate to pick out thrift targets for consideration in the analysis.
:returns: Include basedirs for the target.
"""
basedirs = set()
def collect_paths(target):
basedirs.add(target.target_base)
for target in targets:
target.walk(collect_paths, predicate=is_thrift_target)
return basedirs
|
DeanSherwin/django-dynamic-scraper | tests/scraper/scraper_test.py | Python | bsd-3-clause | 10,956 | 0.011044 | from __future__ import unicode_literals
from builtins import str
from builtins import object
import logging, os, os.path, shutil
from django.test import TestCase
from scrapy import signals
from scrapy.exceptions import DropItem
from scrapy.utils.project import get_project_settings
settings = get_project_settings()
from twisted.internet import reactor
from scrapy.crawler import Crawler
from pydispatch import dispatcher
from scrapy.crawler import CrawlerProcess
from dynamic_scraper.spiders.django_spider import DjangoSpider
from dynamic_scraper.spiders.django_checker import DjangoChecker
from dynamic_scraper.spiders.checker_test import CheckerTest
from dynamic_scraper.models import *
from scraper.models import EventWebsite, Event, EventItem
# Tests need webserver for serving test pages: python manage.py runserver 0.0.0.0:8010
class EventSpider(DjangoSpider):
name = 'event_spider'
def __init__(self, *args, **kwargs):
self._set_ref_object(EventWebsite, **kwargs)
self.scraper = self.ref_object.scraper
self.scrape_url = self.ref_object.url
self.scheduler_runtime = self.ref_object.scraper_runtime
self.scraped_obj_class = Event
self.scraped_obj_item_class = EventItem
super(EventSpider, self).__init__(self, *args, **kwargs)
class DjangoWriterPipeline(object):
def process_item(self, item, spider):
if spider.conf['DO_ACTION']:
try:
item['event_website'] = spider.ref_object
checker_rt = SchedulerRuntime()
checker_rt.save()
item['checker_runtime'] = checker_rt
if not 'description' in item or item['description'] == None:
item['description'] = ''
item.save()
except IntegrityError as e:
spider.log(str(e), logging.ERROR)
raise DropItem("Missing attribute.")
return item
class EventChecker(DjangoChecker):
name = 'event_checker'
def __init__(self, *args, **kwargs):
self._set_ref_object(Event, **kwargs)
self.scraper = self.ref_object.event_website.scraper
self.scrape_url = self.ref_object.url
self.scheduler_runtime = self.ref_object.checker_runtime
super(EventChecker, self).__init__(self, *args, **kwargs)
class ScraperTest(TestCase):
SERVER_URL = 'http://localhost:8010/static/'
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
IMG_DIR = './scraper/imgs/'
def __init__(self, *args, **kwargs):
if args[0] == 'test_img_store_format_flat_with_thumbs' or args[0] == 'test_delete_with_img_flat_with_thumbs':
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings.images_store_format_flat_with_thumbs';
from settings import images_store_format_flat_with_thumbs as file_settings
elif args[0] == 'test_img_store_format_all_no_thumbs' or args[0] == 'test_delete_with_img_all_no_thumbs':
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings.images_store_format_all_no_thumbs';
from settings import images_store_format_all_no_thumbs as file_settings
elif args[0] == 'test_img_store_format_all_with_thumbs' or args[0] == 'test_delete_with_img_all_with_thumbs':
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings.images_store_format_all_with_thumbs';
from settings import images_store_format_all_with_thumbs as file_settings
elif args[0] == 'test_img_store_format_thumbs_with_thumbs' or args[0] == 'test_delete_with_img_thumbs_with_thumbs':
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings.images_store_format_thumbs_with_thumbs';
from settings import images_store_format_thumbs_with_thumbs as file_settings
elif args[0] == 'test_custom_processor':
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings.custom_processor'
from settings import custom_processor as file_settings
elif args[0] == 'test_custom_processor_wrong_path':
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings.custom_processor_wrong_path'
from settings import custom_processor_wrong_path as file_settings
else:
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings.base_settings';
from settings import base_settings as file_settings
self.dds_settings = {}
self.dds_settings['ITEM_PIPELINES'] = file_settings.ITEM_PIPELINES
self.dds_settings['SPLASH_URL'] = file_settings.SPLASH_URL
self.dds_settings['DUPEFILTER_CLASS'] = file_settings.DUPEFILTER_CLASS
self.dds_settings['DOWNLOADER_MIDDLEWARES'] = file_settings.DOWNLOADER_MIDDLEWARES
self.dds_settings['DSCRAPER_SPLASH_ARGS'] = file_settings.DSCRAPER_SPLASH_ARGS
self.dds_settings['IMAGES_STORE'] = file_settings.IMAGES_STORE
if 'IMAGES_THUMBS' in file_settings.__dict__:
self.dds_settings['IMAGES_THUMBS'] = file_settings.IMAGES_THUMBS
if 'DSCRAPER_IMAGES_STORE_FORMAT' in file_settings.__dict__:
self.dds_settings['DSCRAPER_IMAGES_STORE_FORMAT'] = file_settings.DSCRAPER_IMAGES_STORE_FORMAT
if 'DSCRAPER_CUSTOM_PROCESSORS' in file_settings.__dict__:
self.dds_settings['DSCRAPER_CUSTOM_PROCESSORS'] = file_settings.DSCRAPER_CUSTOM_PROCESSORS
super(ScraperTest, self).__init__(*args, **kwargs)
def record_signal(self, *args, **kwargs):
pass
#print kwargs
def run_event_spider(self, id, do_action='yes'):
kwargs = {
'id': id,
'do_action': do_action,
}
self.spider = EventSpider(**kwargs)
self.process.crawl(self.spider, **kwargs)
self.process.start()
def run_event_checker(self, id):
kwargs = {
'id': id,
'do_action': 'yes'
}
self.checker = EventChecker(**kwargs)
self.process.crawl(self.checker, **kwargs)
self.process.start()
def run_checker_test(self, id):
kwargs = {
'id': id,
}
self.checker_test = CheckerTest(**kwargs)
self.checker_test.conf['RUN_TYPE'] = 'TASK'
self.checker_test.conf['DO_ACTION'] = True
self.checker_test.conf['LOG_ENABLED'] = False
self.checker_test.conf['LOG_LEVEL'] = 'DEBUG'
self.crawler.crawl(self.checker_test)
self.crawler.start()
log.start(loglevel="DEBUG", logstdout=True)
reactor.run()
def setUp(self):
if os.path.exists(self.IMG_DIR):
shutil.rmtree(self.IMG_DIR)
os.mkdir(self.IMG_DIR)
settings.set('ITEM_PIPELINES', self.dds_settings['ITEM_PIPELINES'], priority='cmdline')
settings.set('SPLASH_URL', self.dds_settings['SPLASH_URL'], priority='cmdline')
settings.set('DUPEFILTER_CLASS', self.dds_settings['DUPEFILTER_CLASS'], priority='cmdline')
settings.set('DOWNLOADER_MIDDLEWARES', self.dds_settings['DOWNLOADER_MIDDLEWARES'], priority='cmdline')
settings.set('IMAGES_STORE', self.dds_settings['IMAGES_STORE'], priority='cmdline')
if 'IMAGES_THUMBS' in self.dds_settings:
settings.set('IMAGES_THUMBS', self.dds_settings['IMAGES_THUMBS'], priority='cmdline')
if 'DSCRAPER_IMAGES_STORE_FORMAT' in self.dds_settings:
settings.set('DSCRAPER_IMAGES_STORE_FORMAT', self.dds_settings['DSCRAPER_IMAGES_STORE_FORMAT'], priority='cmdline')
if 'DSCRAPER_CUSTOM_PROCESSORS' in self.dds_settings:
settings.set('DSCRAPER_CUSTOM_PROCESSORS', self.dds_settings['DSCRAPER_CUSTOM_PROCESSORS' | ], priority='cmdline')
settings.set('COOKIES_DEBUG', True)
settings.set('LOG_LEVEL', 'DEBUG')
settings.set('LOG_ENABLED', False)
#self.crawler = Crawler(settings)
#self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
#self.crawler.configure()
self.process = CrawlerProce | ss(settings)
self.sc = ScrapedObjClass(name='Event')
self.sc.save()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.