repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
spuriousdata/logrok
|
logrok/logrok.py
|
Python
|
mit
| 8,948
| 0.004582
|
#!/usr/bin/env python
"""Query and aggregate data from log files using SQL-like syntax"""
import sys
import argparse
import os
import re
import ast
import readline
import atexit
import time
import inspect
from multiprocessing import cpu_count
try:
from collections import OrderedDict
except ImportError:
# python < 2.7 compatability
from compat.OrderedDict import OrderedDict
from ply import yacc
import parser
import parallel
import screen
import sqlfuncs
import logformat
from util import NoTokenError, parse_format_string, Complete, Table, pretty_print
DEBUG = False
log_regex = None
class LogQuery(object):
def __init__(self, data, query):
self.data = data
self.query = query
try:
self.ast = parser.parse(query)
except NoTokenError, e:
print "ERROR: %s" % e.message
print query
return
except SyntaxError:
return
if DEBUG:
# pretty-printer
sq = str(self.ast)
pretty_print(sq)
print sq
print '-'*screen.width
pass
def run(self):
start_time = time.time()
op_data = sqlfuncs.do(self.ast, self.data[:]) # COPY!!!
response = OrderedDict()
for row in op_data:
for key in row.keys():
if not response.has_key(key):
response[key] = []
response[key].append(row[key])
Table(response, start_time).prnt()
class LoGrok(object):
def __init__(self, args, interactive=False, curses=False, chunksize=10000):
if curses:
screen.init_curses()
elif interactive:
screen.init_linebased()
self.interactive = interactive
self.args = args
self.processed_rows = 0
self.oldpct = 0
self.data = []
self.chunksize = chunksize
self.complete = Complete()
self.crunchlogs()
self.interact()
def crunchlogs(self):
global log_regex
if self.args.format is not None:
logformat = self.args.format
else:
logformat = logformat.TYPES[self.args.type]
print
lines = []
for logfile in self.args.logfile:
screen.print_mutable("Reading lines from %s:" % logfile.name)
lines += logfile.readlines()
screen.print_mutable("Reading lines from %s: %d" % (logfile.name, len(lines)))
logfile.close()
screen.print_mutable("", True)
log_regex = re.compile(parse_format_string(logformat))
if self.args.lines:
lines = lines[:self.args.lines]
st = time.time()
self.data = parallel.run(log_match, lines, _print=True)
et = time.time()
print "%d lines crunched in %0.3f seconds" % (len(lines), (et-st))
def interact(self):
if screen.is_curses():
screen.draw_curses_screen(self.data)
self.main_loop()
elif self.interactive:
self.shell()
else:
self.query(self.args.query)
def shell(self):
try:
history = os.path.expanduser('~/.logrok_history')
readline.read_history_file(history)
except IOError:
pass
atexit.register(readline.write_history_file, history)
readline.set_history_length(1000)
readline.parse_and_bind('tab: complete')
readline.set_completer(self.complete.complete)
# XXX This is ugly and needs to be more intelligent. Ideally, the
# completer would use readline.readline() to contextually switch out
# the returned matches
self.complete.addopts(['select', 'from log', 'where', 'between',
'order by', 'group by', 'limit', ] + get_sqlfuncs() + self.data[0].keys())
while True:
q = raw_input("logrok> ").strip()
while not q.endswith(";"):
q += raw_input("> ").strip()
self.query(q)
def query(self, query):
semicolon = query.find(';')
if semicolon != -1:
query = query[:semicolon]
if query in ('quit', 'bye', 'exit'):
sys.exit(0)
if query.startswith('help') or query.startswith('?'):
answer = "Use sql syntax against your log, `from` clauses are ignored.\n"\
"Queries can span multiple lines and _must_ end in a semicolon `;`.\n"\
" Try: `show fields;` to see available field names. Press TAB at the\n"\
" beginning of a new line to see all available completions."
print answer
return
if query in ('show fields', 'show headers'):
print ', '.join(self.data[0].keys())
return
else:
try:
q = LogQuery(self.data, query)
return q.run()
except SyntaxError, e:
return e.message
def main_loop(self):
while 1:
c = screen.getch()
if c == ord('x'): break
if c == ord('q'): screen.prompt("QUERY:", self.query)
def get_sqlfuncs():
return map(
lambda x: x[0],
filter(
lambda x: not x[0].startswith('_') and not x[0] == 'do',
inspect.getmembers(sqlfuncs, inspect.isfunction)
)
)
@parallel.map
def log_match(chunk):
response = []
for line in chunk:
out = {}
m = log_regex.match(line)
for key in log_regex.groupindex:
if logformat.types.has_key(key):
f = logformat.types[key]
else:
f = str
"""
# XXX
# This is a hack a big big hack
# It's here because I discovered that converting the date
# strings into date objects using strptime is a HUGE performance hit!
# -- don't know what to do about that
if f not in (int, str):
f = str
"""
d = m.group(key)
out[key] = f(d)
response.append(out)
return response
def main():
cmd = argparse.ArgumentParser(description="Grok/Query/Aggregate log files. Requires python2 >= 2.7")
typ = cmd.add_mutually_exclusive_group(required=True)
typ.add_argument('-t', '--type', metavar='TYPE', choices=logformat.TYPES, help='{%s} Use built-in log type (default: apache-common)'%', '.join(logformat.TYPES), default='apache-common')
typ.add_argument('-f', '--format', action='store', help='Log format (use apache LogFormat string)')
typ.add_argument('-C', '--config', type=argparse.FileType('r'), help='httpd.conf file in which to find LogFormat s
|
tring (requires -T)')
cmd.add_argument('-T', '--ctype', help='type-name for LogFormat from specified httpd.conf file (only works with -c)')
cmd.add_argument('-j', '--processes', action='store', type=int, help='Number of proces
|
ses to fork for log crunching (default: smart)', default=parallel.SMART)
cmd.add_argument('-l', '--lines', action='store', type=int, help='Only process LINES lines of input')
interactive = cmd.add_mutually_exclusive_group(required=False)
interactive.add_argument('-i', '--interactive', action='store_true', help="Use line-based interactive interface")
interactive.add_argument('-c', '--curses', action='store_true', help=argparse.SUPPRESS)
interactive.add_argument('-q', '--query', help="The query to run")
cmd.add_argument('-d', '--debug', action='store_true', help="Turn debugging on (you don't want this)")
cmd.add_argument('logfile', nargs='+', type=argparse.FileType('r'), help="log(s) to parse/query")
args = cmd.parse_args(sys.argv[1:])
if args.config and not args.ctype:
cmd.error("-C/--config option requires -T/--ctype option")
if args.ctype and not args.config:
cmd.error("-T/--ctype only works with -C/--config option")
if args.config and args.ctype:
config = args.config.read()
args.config.close()
m = re.search(r'^logformat[\s]+(.*)[\s]+%s' % args.ctype, config, re.I|re.M)
if m is Non
|
sopython/kesh
|
kesh/api/__init__.py
|
Python
|
bsd-3-clause
| 39
| 0.025641
|
from .con
|
nection import MongoConnection
|
|
damianpv/skeleton_django
|
sk_django/sk_django/settings/staging.py
|
Python
|
gpl-3.0
| 86
| 0.023256
|
# Config
|
uracion para una versin semi-privada en el servidor de produccion. Beta - A
|
lfa
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2016_12_01/operations/security_rules_operations.py
|
Python
|
mit
| 18,911
| 0.002327
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class SecurityRulesOperations(object):
"""SecurityRulesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2016-12-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-12-01"
self.config = config
def _delete_initial(
self, resource_group_name,
|
network_security_group_name, security_rule_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = '/subscriptions/{subsc
|
riptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'securityRuleName': self._serialize.url("security_rule_name", security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, network_security_group_name, security_rule_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security
group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
security_rule_name=security_rule_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, network_security_group_name, security_rule_name, custom_headers=None, raw=False, **operation_config):
"""Get the specified network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security
group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SecurityRule or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2016_12_01.models.SecurityRule or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'securityRuleName': self._serialize.url("security_rule_name", security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
q
|
sgiavasis/C-PAC
|
CPAC/GUI/interface/pages/vmhc.py
|
Python
|
bsd-3-clause
| 3,737
| 0.025957
|
import wx
import wx.html
from ..utils.generic_class import GenericClass
from ..utils.constants import control, dtype
from ..utils.validator import CharValidator
import pkg_resources as p
class VMHC(wx.html.HtmlWindow):
def __init__(self, parent, counter = 0):
from urllib2 import urlopen
wx.html.HtmlWindow.__init__(self, parent, style= wx.html.HW_SCROLLBAR_AUTO)
self.SetStandardFonts()
self.counter = counter
self.LoadFile(p.resource_filename('CPAC', 'GUI/resources/html/vmhc.html'))
# try:
# code = urlopen("http://fcp-indi.github.io/docs/user/vmhc.html").code
# if (code / 100 < 4):
# self.LoadPage('http://fcp-indi.github.io/docs/user/vmhc.html')
# else:
# self.LoadFile('html/vmhc.html')
# except:
# self.LoadFile('html/vmhc.html')
def get_counter(self):
return self.counter
class VMHCSettings(wx.ScrolledWindow):
def __init__(self, parent, counter = 0):
wx.ScrolledWindow.__init__(self, parent)
self.counter = counter
self.page = GenericClass(self, "Voxel-mirrored Homotopic Connectivity (VMHC) Options")
self.page.add(label="Calculate VMHC ",
control=control.CHOICE_BOX,
name='runVMHC',
type=dtype.LSTR,
comment="Calculate Voxel-mirrored Homotopic Connectivity (VMHC) for all voxels.",
values=["On","Off"],
wkf_switch = True)
self.page.add(label="Symmetric Template (Brain Only) ",
control=control.COMBO_BOX,
name='brainSymmetric',
type=dtype.STR,
values = "$FSLDIR/data/standard/MNI152_T1_${standardResolutionAnat}_brain_symmetric.nii.gz",
comment="Included as part of the 'Image Resource Files' package available on the Install page of the User Guide.\n\nIt is not necessary to change this path unless you intend to use a non-standard symmetric template.")
self.page.add(label="Symmetric Template (With Skull) ",
control=control.COMBO_BOX,
name='symmStandard',
type=dtype.STR,
values = "$FSLDIR/data/standard/MNI152_T1_${standardResolutionAnat}_symmetric.nii.gz",
comment="Included as part of the 'Image Resource Files' package available on the Install page of the User Guide.\n\nIt is not necessary to change this path unless you intend to use a non-standard symmetric template.")
self.page.add(label="Dilated Symmetric Brain Mask ",
control=control.COMBO_BOX,
name='twommBrainMaskDiluted',
type=dtype.STR,
values = "$FSLDIR/data/standard/MNI152_T1_${standardResolutionAnat}_brain_mask_symmetric_dil.nii.gz",
comment="Included as part of the 'Image Resource Files' package available on the Install page of the User Guide.\n\nIt is not necessary to change this path unless you intend to use a non-standard symmetric template.")
self.page.add(label="FLIRT Configuration File ",
control=control.COMBO_BOX,
name='configFileTwomm',
type=dtype.STR,
values = "$FSLDIR/etc/flirtsch/T1_2_MNI152_2mm.cnf",
comment="Included as part of the 'Image Resource Files'
|
package available on the Install page of the User Guide.\n\nIt is not necessary to change this path unless you intend to use a non-standard symmetric templ
|
ate.")
self.page.set_sizer()
parent.get_page_list().append(self)
def get_counter(self):
return self.counter
|
tjsavage/sfcsdatabase
|
sfcs/django/http/__init__.py
|
Python
|
bsd-3-clause
| 23,818
| 0.002393
|
import datetime
import os
import re
import time
from pprint import pformat
from urllib import urlencode, quote
from urlparse import urljoin
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
# The mo
|
d_python version is more efficient, so try importing it first.
from mod_python.util import parse_qsl
except ImportError:
try:
# Python 2.6 and greater
from urlpars
|
e import parse_qsl
except ImportError:
# Python 2.5, 2.4. Works on Python 2.6 but raises
# PendingDeprecationWarning
from cgi import parse_qsl
# httponly support exists in Python 2.6's Cookie library,
# but not in Python 2.4 or 2.5.
import Cookie
if Cookie.Morsel._reserved.has_key('httponly'):
SimpleCookie = Cookie.SimpleCookie
else:
class Morsel(Cookie.Morsel):
def __setitem__(self, K, V):
K = K.lower()
if K == "httponly":
if V:
# The superclass rejects httponly as a key,
# so we jump to the grandparent.
super(Cookie.Morsel, self).__setitem__(K, V)
else:
super(Morsel, self).__setitem__(K, V)
def OutputString(self, attrs=None):
output = super(Morsel, self).OutputString(attrs)
if "httponly" in self:
output += "; httponly"
return output
class SimpleCookie(Cookie.SimpleCookie):
def __set(self, key, real_value, coded_value):
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
from django.utils.datastructures import MultiValueDict, ImmutableList
from django.utils.encoding import smart_str, iri_to_uri, force_unicode
from django.utils.http import cookie_date
from django.http.multipartparser import MultiPartParser
from django.conf import settings
from django.core.files import uploadhandler
from utils import *
RESERVED_CHARS="!*'();:@&=+$,/?%#[]"
absolute_http_url_re = re.compile(r"^https?://", re.I)
class Http404(Exception):
pass
class HttpRequest(object):
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {}
self.path = ''
self.path_info = ''
self.method = None
def __repr__(self):
return '<HttpRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
(pformat(self.GET), pformat(self.POST), pformat(self.COOKIES),
pformat(self.META))
def get_host(self):
"""Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference.
if 'HTTP_X_FORWARDED_HOST' in self.META:
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = str(self.META['SERVER_PORT'])
if server_port != (self.is_secure() and '443' or '80'):
host = '%s:%s' % (host, server_port)
return host
def get_full_path(self):
return ''
def build_absolute_uri(self, location=None):
"""
Builds an absolute URI from the location and the variables available in
this request. If no location is specified, the absolute URI is built on
``request.get_full_path()``.
"""
if not location:
location = self.get_full_path()
if not absolute_http_url_re.match(location):
current_uri = '%s://%s%s' % (self.is_secure() and 'https' or 'http',
self.get_host(), self.path)
location = urljoin(current_uri, location)
return iri_to_uri(location)
def is_secure(self):
return os.environ.get("HTTPS") == "on"
def is_ajax(self):
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
def _set_encoding(self, val):
"""
Sets the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, it is removed and recreated on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, '_get'):
del self._get
if hasattr(self, '_post'):
del self._post
def _get_encoding(self):
return self._encoding
encoding = property(_get_encoding, _set_encoding)
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
def _set_upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def _get_upload_handlers(self):
if not self._upload_handlers:
# If thre are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
upload_handlers = property(_get_upload_handlers, _set_upload_handlers)
def parse_file_upload(self, META, post_data):
"""Returns a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning = "You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
def _get_raw_post_data(self):
if not hasattr(self, '_raw_post_data'):
if self._read_started:
raise Exception("You cannot access raw_post_data after reading from request's data stream")
try:
content_length = int(self.META.get('CONTENT_LENGTH', 0))
except (ValueError, TypeError):
# If CONTENT_LENGTH was empty string or not an integer, don't
# error out. We've also seen None passed in here (against all
# specs, but see ticket #8259), so we handle TypeError as well.
content_length = 0
if content_length:
self._raw_post_data = self.read(content_length)
else:
self._raw_post_data = self.read()
self._stream = StringIO(self._raw_post_data)
return self._raw_post_data
raw_post_data = property(_get_raw_post_data)
def _mark_post_parse_error(self):
self._post = QueryDict('')
self._files = MultiValueDict()
self._post_parse_error = True
def _load_post_and_files(self):
# Populates self._post and self._files
if self.method != 'POST':
self._post, self._files = QueryDict('', encoding=self._encoding), MultiValueDict()
return
if self._read_started:
self._mark_post_parse_error()
return
if self.META.get('CONTENT_TYPE', '').startswith('multipart'):
self._raw_post_data = ''
try:
self._post, self._files = self.parse_file_upload(self.META, self)
except:
# An error occured while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
# Mark that an error occured. This allows self.__repr__ to
# be explicit about it instead of simply representing an
# empty POST
self._mark_po
|
google-research/scenic
|
scenic/projects/baselines/clip/tokenizer.py
|
Python
|
apache-2.0
| 1,853
| 0.008095
|
"""Simple CLIP tokenizer wrapper."""
from absl import logging
import functools
from typing import Any, Callable, Optional, Sequence, Union
from clip.simple_tokenizer import SimpleTokenizer
import jax.numpy as jnp
import numpy as np
from scenic.projects.baselines.clip import download
# pylint: disable=line-too-long
DEFAULT_BPE_PATH = None
DEFAULT_BPE_URL = 'https://github.com/openai/CLIP/blob/main/clip/bpe_simple_vocab_16e6.txt.gz?raw=true'
MAX_TEXT_LENGTH = 77
# pylint: enable=line-too-long
def _tokenize(texts: Union[str, Sequence[str]], tokenizer: Any,
context_length: int) -> jnp.ndarray:
"""Tokenizes texts using tokenizer."""
if isinstance(texts, str):
texts = [texts]
sot_token = tokenizer.encoder['<|startoftext|>']
eot_token = tokenizer.encoder['<|endoftext|>']
all_tokens = [
[sot_token] + tokenizer.encode(text) + [eot_token] for text in texts
]
result = np.zeros((len(all_tokens), context_length), dtype=np.long)
for i, tokens in enumerate(all_tokens):
if len(tokens) > context_length:
raise RuntimeError(
f'Input {texts[i]} is too long for context length {context_length}')
result[i, :len(tokens)] = np.asarray(tokens)
return jnp.asarray(result)
def build_tokenizer(
bpe_path: Optional[str] = DEFAULT_BPE_PA
|
TH,
bpe_url: str = DEFAULT_BPE_URL,
download_dir: str = download.DEFAULT_DOWNLOAD_DIR
) -> Callable[[Union[str, Sequence[str]]], np.ndarray]:
"""Returns CLIP's tokenization function."""
if bpe_path is None:
bpe_path = download.download(bpe_url, download_dir)
logging.info('Downloaded vocabulary from %s to %s', bpe_url, download_dir)
tokenizer = SimpleTokenizer(bpe_path)
tokenizer_fn = functools.partial(_tokenize, tokenizer=tokenizer,
context_
|
length=MAX_TEXT_LENGTH)
return tokenizer_fn
|
jashort/SmartFileSorter
|
smartfilesorter/actionplugins/renameto.py
|
Python
|
bsd-3-clause
| 2,257
| 0.004431
|
import shutil
import os
import re
import logging
class RenameTo(object):
"""
Renames a given file. Performs a case sensitive search and replace on the filename, then renames it.
Also supports regular expressions.
"""
config_name = 'rename-to'
def __init__(self, parameters):
self.logger = logging.getLogger(__name__)
if 'match' in parameters:
self.match = parameters['match']
else:
raise ValueError('rename-to rule must have parameter "match"')
if 'replace-with' in parameters:
if parameters['replace-with'] is None:
self.replace_with = ''
else:
self.replace_with = parameters['replace-with']
else:
raise ValueError('rename-to rule must have "replace-with" parameter')
def do_action(self, target, dry_run=False):
"""
:param target: Full path and filename
:param dry_run: True - don't actually perform action. False: perform action. No effect for this rule.
:return: filename: Full path and filename after action completes
"""
original_path = os.path.dirname(target)
original_filename, original_extension = os.path.splitext(os.path.basename(target))
new_filename = re.sub(self.match, self.replace_with, original_filename) + original_extension
destination = os.path.join(original_path, new_filename)
if dry_run is True:
self.logger.debug("Dry run: Skipping rename {0} to {1}".format(target, new_filename))
return target
else:
self.logger.debug("Renaming {0} to {1}".format(original_filename + original_extension,
new_filename + original_extension))
if not os.path.exists(destination):
try:
shutil.move(target, destination)
except IOError:
|
self.logger.error("Error renaming file {0} to {1}".format(target, new_filename))
raise IOError
else:
self.
|
logger.error("Destination file already exists: {0}".format(new_filename))
raise IOError
return destination
|
michealcarrerweb/LHVent_app
|
stock/models.py
|
Python
|
mit
| 4,911
| 0.007738
|
from __future__ import unicode_literals
from django.db import models
import datetime
from django.db.models.signals import pre_save
from django.urls import reverse
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from source_utils.starters import CommonInfo, GenericCategory
from versatileimagefield.fields import (
VersatileImageField,
PPOIField
)
def upload_location(instance, filename):
return "%s/%s" %(instance.slug, filename)
ASSESEMENT = (
('units', 'Per unit'),
('square feet', 'Square foot'),
('linear feet', 'Linear foot'),
('square meters', 'Square meter'),
('linear meters', 'Linear meter'),
)
class Base(GenericCategory):
"""
This model represents the general type of product category offered.
"""
class Meta:
verbose_name = _('Product Category')
verbose_name_plural = _('Product Categories')
ordering = ["category"]
def get_success_url(self):
return reverse("product:c
|
ompany_list")
def get_absolute_url(self):
return reverse(
"product:base_product_detail",
kwargs={'slug': self.slug}
)
def pre_save_category(sender, instance, *args, **kwargs):
instance.slug = slugify(i
|
nstance.category)
pre_save.connect(pre_save_category, sender=Base)
class Product(CommonInfo):
"""
This model describes the specific product related to the category.
"""
base = models.ForeignKey(
Base,
on_delete=models.CASCADE
)
supplier = models.ForeignKey(
'company.Company',
on_delete=models.CASCADE
)
item = models.CharField(
max_length=30,
unique=True
)
admin_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
prep_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
field_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
admin_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2
)
prep_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2
)
field_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2
)
quantity_assesement = models.CharField(
max_length=12,
verbose_name=_("Quantity assesement method"),
choices=ASSESEMENT
)
order_if_below = models.SmallIntegerField()
discontinued = models.DateField(
null=True,
blank=True
)
order_now = models.BooleanField(
default=False
)
units_damaged_or_lost = models.SmallIntegerField(
default=0
)
quantity = models.SmallIntegerField(
"Usable quantity",
default=0,
null=True,
blank=True
)
quantity_called_for = models.SmallIntegerField(
default=0,
null=True,
blank=True
)
image = VersatileImageField(
'Image',
upload_to='images/product/',
null=True, blank=True,
width_field='width',
height_field='height',
ppoi_field='ppoi'
)
height = models.PositiveIntegerField(
'Image Height',
blank=True,
null=True
)
width = models.PositiveIntegerField(
'Image Width',
blank=True,
null=True
)
ppoi = PPOIField(
'Image PPOI'
)
no_longer_available = models.BooleanField(default=False)
class Meta:
ordering= ['item']
def __str__(self):
return self.item
def get_time(self):
return self.admin_time + self.prep_time + self.field_time
def get_cost(self):
return self.admin_material + self.prep_material + self.field_material
def get_usable_quantity(self):
return self.quantity - self.units_damaged_or_lost - self.quantity_called_for
def get_success_url(self):
return reverse("product:category_item_list", kwargs={'slug': self.base.slug})
def get_absolute_url(self):
return reverse("product:item_detail", kwargs={'slug': self.slug})
def pre_save_product(sender, instance, *args, **kwargs):
if not instance.no_longer_available:
instance.discontinued = None
elif instance.no_longer_available and instance.discontinued == None:
instance.discontinued = datetime.date.today()
if (
instance.quantity -
instance.units_damaged_or_lost -
instance.quantity_called_for
) < instance.order_if_below:
instance.order_now = True
else:
instance.order_now = False
instance.slug = slugify(instance.item)
pre_save.connect(pre_save_product, sender=Product)
|
vFense/vFense
|
tp/src/server/oauth/token.py
|
Python
|
lgpl-3.0
| 484
| 0.006198
|
from hashlib import sha512
from uuid import uuid4
from vFense.db.client import validate_session
class TokenManager():
def __init__(self, session):
self.session = session # DB sessio
|
n
def save_access_token(self, token):
self.session = validate_session(self.session)
self.session.add(token)
self.session.commit()
self.session
|
.close()
def generate_token(self, length=24):
return sha512(uuid4().hex).hexdigest()[0:length]
|
BloodyD/django-dbbackup
|
dbbackup/settings.py
|
Python
|
bsd-3-clause
| 2,525
| 0.005149
|
# DO NOT IMPORT THIS BEFORE django.configure() has been run!
import os
from django.conf import settings
DATABASES = getattr(settings, 'DBBACKUP_DATABASES', list(settings.DATABASES.keys()))
BACKUP_DIRECTORY = getattr(settings, 'DBBACKUP_BACKUP_DIRECTORY', os.getcwd())
# Days to keep backups
CLEANUP_KEEP = getattr(settings, 'DBBACKUP_CLEANUP_KEEP', 10)
# Days to keep backed up media (default: same as CLEANUP_KEEP)
CLEANUP_KEEP_MEDIA = getattr(settings, 'DBBACKUP_CLEANUP_KEEP_MEDIA', CLEANUP_KEEP)
MEDIA_PATH = getattr(settings, 'DBBACKUP_MEDIA_PATH', settings.MEDIA_ROOT)
DATE_FORMAT = getattr(settings, 'DBBACKUP_DATE_FORMAT', '%Y-%m-%d-%H%M%S')
DATE_FORMAT_REGEX = getattr(settings, '
|
DBBACKUP_DATE_FORMAT_REGEX', r"\d{4}-\d{2}-\d{2}-\d{6}")
SERVER_NAME = getattr(settings, 'DBBACKUP_SERVER_NAME', '')
FORCE_ENGINE = getattr(settings, 'DBBACKUP_FORCE_ENGINE', '')
FILENAME_TEMPLATE = getattr(settings, 'DBBACKUP_FILENAME_TEMPLATE', '{databasename}-{servername}-{datetime}.{extension
|
}')
READ_FILE = '<READ_FILE>'
WRITE_FILE = '<WRITE_FILE>'
# Environment dictionary
BACKUP_ENVIRONMENT = {}
RESTORE_ENVIRONMENT = {}
# TODO: Unify backup and restore commands to support adding extra flags instead
# of just having full statements.
SQLITE_BACKUP_COMMANDS = getattr(settings, 'DBBACKUP_SQLITE_BACKUP_COMMANDS', [
[READ_FILE, '{databasename}'],
])
SQLITE_RESTORE_COMMANDS = getattr(settings, 'DBBACKUP_SQLITE_RESTORE_COMMANDS', [
[WRITE_FILE, '{databasename}'],
])
# TODO: Why are these even here? The MySQL commands are built in a dynamic
# fashion through MySQLSettings
MYSQL_BACKUP_COMMANDS = getattr(settings, 'DBBACKUP_MYSQL_BACKUP_COMMANDS', None)
MYSQL_RESTORE_COMMANDS = getattr(settings, 'DBBACKUP_MYSQL_RESTORE_COMMANDS', None)
POSTGRESQL_BACKUP_COMMANDS = getattr(settings, 'DBBACKUP_POSTGRESQL_BACKUP_COMMANDS', None)
POSTGRESQL_RESTORE_COMMANDS = getattr(settings, 'DBBACKUP_POSTGRESQL_RESTORE_COMMANDS', None)
POSTGRESQL_RESTORE_SINGLE_TRANSACTION = getattr(settings, 'DBBACKUP_POSTGRESQL_RESTORE_SINGLE_TRANSACTION', True)
POSTGIS_SPATIAL_REF = getattr(settings, 'DBBACKUP_POSTGIS_SPACIAL_REF', False)
FAILURE_RECIPIENTS = getattr(settings, 'DBBACKUP_FAILURE_RECIPIENTS', settings.ADMINS)
SEND_EMAIL = getattr(settings, 'DBBACKUP_SEND_EMAIL', True)
SERVER_EMAIL = getattr(settings, 'DBBACKUP_SERVER_EMAIL', settings.SERVER_EMAIL)
GPG_ALWAYS_TRUST = getattr(settings, 'DBBACKUP_GPG_ALWAYS_TRUST', False)
GPG_RECIPIENT = GPG_ALWAYS_TRUST = getattr(settings, 'DBBACKUP_GPG_RECIPIENT', None)
|
NikNitro/Python-iBeacon-Scan
|
sympy/printing/conventions.py
|
Python
|
gpl-3.0
| 2,504
| 0
|
"""
A few practical conventions common to all printers.
"""
from __future__ import print_function, division
import re
import collections
_name_with_digits_p = re.compile(r'^([a-zA-Z]+)([0-9]+)$')
def split_super_sub(text):
"""Split a symbol name into a name, superscripts and subscripts
The first part of the symbol name is considered to be its actual
'name', followed by super- and subscripts. Each superscript is
preceded with a "^" character or by "__". Each subscript is preceded
by a "_" character. The three return values are the actual name, a
list with superscripts and a list with subscripts.
>>> from sympy.printing.conventions import split_super_sub
>>> split_super_sub('a_x^1')
('a', ['1'], ['x'])
>>> split_super_sub('var_sub1__sup_sub
|
2')
('var', ['sup'], ['sub1', 'sub2'])
"""
if len(text) == 0:
return text, [], []
pos = 0
name = None
supers = []
subs = []
while pos < len(text):
start = pos + 1
if text[pos:pos + 2] == "__":
start += 1
pos_hat = text.find("^", start)
if pos_hat < 0:
pos_hat = len(text)
pos_usc = text.find("_", start)
if pos
|
_usc < 0:
pos_usc = len(text)
pos_next = min(pos_hat, pos_usc)
part = text[pos:pos_next]
pos = pos_next
if name is None:
name = part
elif part.startswith("^"):
supers.append(part[1:])
elif part.startswith("__"):
supers.append(part[2:])
elif part.startswith("_"):
subs.append(part[1:])
else:
raise RuntimeError("This should never happen.")
# make a little exception when a name ends with digits, i.e. treat them
# as a subscript too.
m = _name_with_digits_p.match(name)
if m:
name, sub = m.groups()
subs.insert(0, sub)
return name, supers, subs
def requires_partial(expr):
"""Return whether a partial derivative symbol is required for printing
This requires checking how many free variables there are,
filtering out the ones that are integers. Some expressions don't have
free variables. In that case, check its variable list explicitly to
get the context of the expression.
"""
if not isinstance(expr.free_symbols, collections.Iterable):
return len(set(expr.variables)) > 1
return sum(not s.is_integer for s in expr.free_symbols) > 1
|
alseambusher/SemanticTyping
|
lib/utils.py
|
Python
|
mit
| 1,139
| 0.005268
|
import re
from main import sc
__author__ = 'minh'
class Utils:
def __init__(self):
pass
not_allowed_chars = '[\/*?"<>|\s\t]'
numeric_regex = r"\A(
|
(\\-)?[0-9]{1,3}(,[0-9]{3})+(\\.[0-9]+)?)|((\\-)?[0-9]*\\.[0-9]+)|((\\-)?[0-9]+)|((\\-)?[0" \
r"-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)\Z"
@staticmethod
def is_number(example):
matches = re.match(Utils.numeric_regex, example.s
|
trip())
if matches and matches.span()[1] == len(example.strip()):
return True
return False
@staticmethod
def clean_examples_numeric(examples):
return sc.parallelize(examples).map(lambda x: float(x) if Utils.is_number(x) else "").filter(
lambda x: x).collect()
@staticmethod
def get_distribution(data):
return sc.parallelize(data).map(lambda word: (word, 1)).reduceByKey(lambda a, b: a + b).sortBy(
lambda x: x).zipWithIndex().flatMap(lambda value, idx: [str(idx)] * int(value/len(data) * 100))
@staticmethod
def get_index_name(index_config):
return "%s!%s" % (index_config['name'], index_config['size'])
|
viswimmer1/PythonGenerator
|
data/python_files/32677285/views.py
|
Python
|
gpl-2.0
| 15,766
| 0.000698
|
import logging
import traceback
from django.conf import settings
from django.core.paginator import Paginator
from django.http import HttpResponse, HttpResponseServerError, Http404
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from djblets.siteconfig.models import SiteConfiguration
from djblets.util.misc import cache_memoize
from reviewboard.diffviewer.models import DiffSet, FileDiff
from reviewboard.diffviewer.diffutils import UserVisibleError, \
compute_chunk_last_header, \
get_diff_files, \
populate_diff_chunks, \
get_enable_highlighting
def build_diff_fragment(request, file, chunkindex, highlighting, collapseall,
lines_of_context, standalone=False, context=None,
template_name='diffviewer/diff_file_fragment.html'):
if not context:
context = {}
cache = not lines_of_context
key = ''
if cache:
filediff = file['filediff']
key = "%s-%s-%s-" % (template_name, file['index'],
filediff.diffset.revis
|
ion)
if file['force_interdiff']:
interfilediff = file['interfilediff']
if interfilediff:
key += 'interdiff-
|
%s-%s' % (filediff.pk, interfilediff.pk)
else:
key += 'interdiff-%s-none' % filediff.pk
else:
key += str(filediff.pk)
if chunkindex:
chunkindex = int(chunkindex)
num_chunks = len(file['chunks'])
if chunkindex < 0 or chunkindex >= num_chunks:
raise UserVisibleError(_(u"Invalid chunk index %s specified.") % \
chunkindex)
file['chunks'] = [file['chunks'][chunkindex]]
if cache:
key += '-chunk-%s' % chunkindex
if lines_of_context:
assert collapseall
context['lines_of_context'] = lines_of_context
chunk = file['chunks'][0]
lines = chunk['lines']
num_lines = len(lines)
new_lines = []
# If we only have one value, then assume it represents before
# and after the collapsed header area.
if len(lines_of_context) == 1:
lines_of_context.append(lines_of_context[0])
if lines_of_context[0] + lines_of_context[1] >= num_lines:
# The lines of context we're expanding to would cover the
# entire chunk, so just expand the entire thing.
collapseall = False
else:
lines_of_context[0] = min(num_lines, lines_of_context[0])
lines_of_context[1] = min(num_lines, lines_of_context[1])
# The start of the collapsed header area.
collapse_i = 0
# Compute the start of the second chunk of code, after the
# header.
if chunkindex < num_chunks - 1:
chunk2_i = max(num_lines - lines_of_context[1], 0)
else:
chunk2_i = num_lines
if lines_of_context[0] and chunkindex > 0:
# The chunk of context preceding the header.
collapse_i = lines_of_context[0]
file['chunks'].insert(0, {
'change': chunk['change'],
'collapsable': False,
'index': chunkindex,
'lines': lines[:collapse_i],
'meta': chunk['meta'],
'numlines': collapse_i,
})
# The header contents
new_lines += lines[collapse_i:chunk2_i]
if (chunkindex < num_chunks - 1 and
chunk2_i + lines_of_context[1] <= num_lines):
# The chunk of context after the header.
file['chunks'].append({
'change': chunk['change'],
'collapsable': False,
'index': chunkindex,
'lines': lines[chunk2_i:],
'meta': chunk['meta'],
'numlines': num_lines - chunk2_i,
})
if new_lines:
numlines = len(new_lines)
chunk.update({
'lines': new_lines,
'numlines': numlines,
'collapsable': True,
})
# Fix the headers to accommodate the new range.
if chunkindex < num_chunks - 1:
for prefix, index in (('left', 1), ('right', 4)):
chunk['meta'][prefix + '_headers'] = [
header
for header in chunk['meta'][prefix + '_headers']
if header[0] <= new_lines[-1][index]
]
chunk['meta']['headers'] = \
compute_chunk_last_header(new_lines, numlines,
chunk['meta'])
else:
file['chunks'].remove(chunk)
context.update({
'collapseall': collapseall,
'file': file,
'lines_of_context': lines_of_context or (0, 0),
'standalone': standalone,
})
func = lambda: render_to_string(template_name,
RequestContext(request, context))
if cache:
if collapseall:
key += '-collapsed'
if highlighting:
key += '-highlighting'
key += '-%s' % settings.AJAX_SERIAL
return cache_memoize(key, func)
else:
return func()
def get_collapse_diff(request):
if request.GET.get('expand', False):
return False
elif request.GET.get('collapse', False):
return True
elif request.COOKIES.has_key('collapsediffs'):
return (request.COOKIES['collapsediffs'] == "True")
else:
return True
def view_diff(request, diffset, interdiffset=None, extra_context={},
template_name='diffviewer/view_diff.html'):
highlighting = get_enable_highlighting(request.user)
try:
if interdiffset:
logging.debug("Generating diff viewer page for interdiffset ids "
"%s-%s",
diffset.id, interdiffset.id, request=request)
else:
logging.debug("Generating diff viewer page for filediff id %s",
diffset.id, request=request)
files = get_diff_files(diffset, None, interdiffset, request=request)
# Break the list of files into pages
siteconfig = SiteConfiguration.objects.get_current()
paginator = Paginator(files,
siteconfig.get("diffviewer_paginate_by"),
siteconfig.get("diffviewer_paginate_orphans"))
page_num = int(request.GET.get('page', 1))
if request.GET.get('file', False):
file_id = int(request.GET['file'])
for i, f in enumerate(files):
if f['filediff'].id == file_id:
page_num = i // paginator.per_page + 1
if page_num > paginator.num_pages:
page_num = paginator.num_pages
break
page = paginator.page(page_num)
collapse_diffs = get_collapse_diff(request)
context = {
'diffset': diffset,
'interdiffset': interdiffset,
'diffset_pair': (diffset, interdiffset),
'files': page.object_list,
'collapseall': collapse_diffs,
# Add the pagination context
'is_paginated': page.has_other
|
noman798/dcny
|
lib/f42/f42/ob.py
|
Python
|
mpl-2.0
| 1,570
| 0.000639
|
#!/usr/bin/env python
# coding:utf-8
class Ob(object):
def __init__(self, *args, **kwds):
for i in args:
self.__dict__.update(args)
self.__dict__.update(kwds)
def __getattr__(self, name):
return self.__dict__.get(name, '')
def __setattr__(self, name, value):
if value is not None:
self.__dict__[name] = value
def __delattr__(self, name):
if name in self.__dict__:
del self.__dict__[name]
def __repr__(self):
return self.__dict__.__repr__()
__getitem__ = __getattr__
__delitem__ = __delattr__
__setitem__ = __setattr__
def __len__(self):
return self.__dict__.__len__()
def __iter__(s
|
elf):
for k, v in self.__dict__.item
|
s():
yield k, v
def __contains__(self, name):
return self.__dict__.__contains__(name)
def __eq__(self, other):
return self.__dict__ == other.__dict__
class StripOb(Ob):
def __init__(self, *args, **kwds):
super(StripJsOb, self).__init__(*args, **kwds)
d = self.__dict__
for k, v in d.items():
if isinstance(v, str):
if "\n" not in v:
_v = v.strip()
if _v != v:
d[k] = _v
if __name__ == '__main__':
ob1 = Ob(a=1, b=2)
# ob1.xx = None
# print(ob1.__dict__)
# del ob1.a
# print(ob1.__dict__)
# o = Ob(a='张沈鹏')
# print(o)
# for k, v in o:
# print(k, v)
# print(dict)
# print(dict(iter(o)))
|
quietcoolwu/python-playground
|
imooc/python_advanced/8_1_multi_threading.py
|
Python
|
mit
| 4,077
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import tarfile
from io import BytesIO
from queue import Queue
from threading import Event, Thread
from xml.etree.ElementTree import Element, ElementTree
import requests
import unicodecsv as csv
class DownloadThread(Thread):
def __init__(self, stock_id, queue):
Thread.__init__(self)
self.stock_id = stock_id
self.url = 'http://table.finance.yahoo.com/table.csv?s=%s.sz'
self.url %= str(stock_id).rjust(6, '0')
self.queue = queue
# IO oprations
def download(self, url):
response = requests.get(url, timeout=5)
if response.ok:
return BytesIO(response.content)
def run(self):
print('Downloading', self.stock_id)
# Start downloading
data = self.download(self.url)
# Must add Thread Lock if use collections.deque
self.queue.put((self.stock_id, data))
class ConvertThread(Thread):
def __init__(self, queue, cvt_event, tar_event):
Thread.__init__(self)
self.queue = queue
self.cvt_event = cvt_event
self.tar_event = tar_event
def xml_pretty(self, element, level=0):
if element:
element.text = '\n' + '\t' * (level + 1)
for child in element:
self.xml_pretty(child, level + 1)
child.tail = child.tail[:-1]
element.tail = '\n' + '\t' * level
# CPU operations
def csv2xml(self, scsv, fxml):
reader = csv.reader(scsv)
headers = reader.__next__()
headers = list(map(lambda x: x.replace(' ', ''), headers))
root = Element('Data')
for row in reader:
ele_row = Element('Row')
root.append(ele_row)
for tag, text in zip(headers, row):
ele = Element(tag)
ele.text = text
ele_row.append(ele)
self.xml_pretty(root)
f = ElementTree(root)
f.write(fxml)
# Multi productors and one consumer
def run(self):
count = 0
while True:
stock_id, data = self.queue.get()
print('Converting', stock_id)
if stock_id == -1:
self.cvt_event.set()
self.tar_event.wait()
self.tar_event.clear()
break
if data:
fname = str(stock_id).rjust(6, '0') + '.xml'
with open(fname, 'wb') as wf:
self.csv2xml(data, wf)
count += 1
if count == 5:
|
self.cvt_event.set()
self.tar_event.wait()
self.tar_event.clear()
count = 0
# Armor Thread
class TarThread(Thread):
def __init__(self, cvt_event, tar_event):
Thread.__init__(self)
self.count = 0
|
self.cvt_event = cvt_event
self.tar_event = tar_event
self.setDaemon(True)
def tarXML(self):
self.count += 1
tfname = '{}.tgz'.format(self.count)
tf = tarfile.open(tfname, 'w:gz')
for fname in os.listdir('.'):
if fname.endswith('.xml'):
tf.add(fname)
os.remove(fname)
tf.close()
if not tf.members:
os.remove(tfname)
def run(self):
while True:
# cvt blocking
self.cvt_event.wait()
self.tarXML()
# For reuse mutua; event notification
self.cvt_event.clear()
# Sending notification
self.tar_event.set()
if __name__ == '__main__':
q = Queue()
cvt_event, tar_event = Event(), Event()
download_threads = [DownloadThread(i, q) for i in range(1, 11)]
convert_thread = ConvertThread(q, cvt_event, tar_event)
tar_thread = TarThread(cvt_event, tar_event)
tar_thread.start()
for thread in download_threads:
thread.start()
convert_thread.start()
for thread in download_threads:
thread.join()
q.put((-1, None))
print("Main Thread")
|
DarkFenX/Pyfa
|
gui/builtinContextMenus/ammoToDmgPattern.py
|
Python
|
gpl-3.0
| 1,296
| 0.002315
|
# noinspection PyPackageRequirements
import wx
import gui.globalEvents as GE
import gui.mainFrame
from gui.contextMenu import ContextMenuSingle
from service.fit import Fit
class AmmoToDmgPattern(ContextMenuSingle):
visibilitySetting = 'ammoPattern'
def __init__(self):
self.mainFr
|
ame = gui.ma
|
inFrame.MainFrame.getInstance()
def display(self, callingWindow, srcContext, mainItem):
if srcContext not in ("marketItemGroup", "marketItemMisc") or self.mainFrame.getActiveFit() is None:
return False
if mainItem is None:
return False
for attr in ("emDamage", "thermalDamage", "explosiveDamage", "kineticDamage"):
if mainItem.getAttribute(attr) is not None:
return True
return False
def getText(self, callingWindow, itmContext, mainItem):
return "Set {} as Damage Pattern".format(itmContext if itmContext is not None else "Item")
def activate(self, callingWindow, fullContext, mainItem, i):
fitID = self.mainFrame.getActiveFit()
Fit.getInstance().setAsPattern(fitID, mainItem)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,)))
def getBitmap(self, callingWindow, context, mainItem):
return None
AmmoToDmgPattern.register()
|
FelixLoether/flask-uploads
|
flask_uploads/models.py
|
Python
|
mit
| 458
| 0.002183
|
from .extensions import db, resizer
class Upload(db.Model):
__tablename__ = 'upload'
id = db.Column(db.Integer, autoincrement=True, primary_key=True)
name = db.Column(db.Unicode(255), nullable=False)
url = db.Column(db.Unicode(255), nullable=False)
if resizer:
for size in resizer.sizes.iterkeys():
setattr(Upload, size + '_name', db.Column(db.Unicode(255)))
setattr(Upload, size + '_url', db.Column(db.Unicode(255)))
| ||
papaloizouc/migrants
|
migrants/base/models.py
|
Python
|
gpl-2.0
| 1,356
| 0
|
from django.db import models
class DataCategory(models.Model):
id = models.IntegerField(primary_key=True)
title = models.CharField(max_length=150)
year = models.IntegerField()
def __unicode__(self):
# Ideadlly would be title but its too big
return u"{} - {}".format(self.year, self.id)
class Country(models.Model):
id = models.IntegerField(primary_key=True)
alpha2 = models.CharField(max_length=2, unique=True, db_index=True)
area = models.CharField(max_length=200)
alt_name = models.CharField(max_length=200)
name = models.CharField(max_length=200)
order = models.IntegerField(unique=True)
region = models.CharField(max_length=100)
center_lat = models.FloatField(default=0)
center_long = models.FloatField(default=0)
def __unicode__(self):
return u"{} - {}".format(self.name, self.alpha2)
class MigrationInfo(models.Model):
destination = models.ForeignKey(Country, related_name='destination')
origin = models.ForeignKey(Country, related_name='origin')
category = models.ForeignKey(DataCategory)
people = models.IntegerF
|
ield()
class Meta:
unique_together = ('destination', 'origin', 'category')
def __unicode__(self):
fields = [self.origin, " -> ", self.destination, self.category]
return u" ".
|
join(map(repr, fields))
|
jpapon/minimal_ros_nodes
|
cnn_classifier/src/cnn_classifier/tensorflow_fcn/fcn16_vgg.py
|
Python
|
bsd-3-clause
| 16,410
| 0
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
from math import ceil
import sys
import numpy as np
import tensorflow as tf
VGG_MEAN = [103.939, 116.779, 123.68]
class FCN16VGG:
def __init__(self, vgg16_npy_path=None):
if vgg16_npy_path is None:
path = sys.modules[self.__class__.__module__].__file__
# print path
path = os.path.abspath(os.path.join(path, os.pardir))
# print path
path = os.path.join(path, "vgg16.npy")
vgg16_npy_path = path
logging.info("Load npy file from '%s'.", vgg16_npy_path)
if not os.path.isfile(vgg16_npy_path):
logging.error(("File '%s' not found. Download it from "
"https://dl.dropboxusercontent.com/u/"
"50333326/vgg16.npy"), vgg16_npy_path)
sys.exit(1)
self.data_dict = np.load(vgg16_npy_path, encoding='latin1').item()
self.wd = 5e-4
print("npy file loaded")
def build(self, rgb, train=False, num_classes=20, random_init_fc8=False,
debug=False):
"""
Build the VGG model using loaded weights
Parameters
----------
rgb: image batch tensor
Image in rgb shap. Scaled to Intervall [0, 255]
train: bool
Whether to build train or inference graph
num_classes: int
How many classes should be predicted (by fc8)
random_init_fc8 : bool
Whether to initialize fc8 layer randomly.
Finetuning is required in this case.
debug: bool
Whether to print additional Debug Information.
"""
# Convert RGB to BGR
with tf.name_scope('Processing'):
# rgb = tf.image.convert_image_dtype(rgb, tf.float32)
red, green, blue = tf.split(rgb, 3, 3)
# assert red.get_shape().as_list()[1:] == [224, 224, 1]
# assert green.get_shape().as_list()[1:] == [224, 224, 1]
# assert blue.get_shape().as_list()[1:] == [224, 224, 1]
bgr = tf.concat_v2([
blue - VGG_MEAN[0],
green - VGG_MEAN[1],
red - VGG_MEAN[2]], axis=3)
if debug:
bgr = tf.Print(bgr, [tf.shape(bgr)],
message='Shape of input image: ',
summarize=4, first_n=1)
self.conv1_1 = self._conv_layer(bgr, "conv1_1")
self.conv1_2 = self._conv_layer(self.conv1_1, "conv1_2")
self.pool1 = self._max_pool(self.conv1_2, 'pool1', debug)
self.conv2_1 = self._conv_layer(self.pool1, "conv2_1")
self.conv2_2 = self._conv_layer(self.conv2_1, "conv2_2")
self.pool2 = self._max_pool(self.conv2_2, 'pool2', debug)
self.conv3_1 = self
|
._conv_layer(self.pool2, "conv3_1")
self.conv3_2 = self._conv_layer(self.conv3_1, "conv3_2")
self.conv3_3 = self._conv_layer(self.conv3_2, "conv3_3")
self.pool3 = self._max_pool(self.conv3_3, 'pool3', debug)
self.conv4_1 = self._conv_layer(self.pool3, "conv4_1
|
")
self.conv4_2 = self._conv_layer(self.conv4_1, "conv4_2")
self.conv4_3 = self._conv_layer(self.conv4_2, "conv4_3")
self.pool4 = self._max_pool(self.conv4_3, 'pool4', debug)
self.conv5_1 = self._conv_layer(self.pool4, "conv5_1")
self.conv5_2 = self._conv_layer(self.conv5_1, "conv5_2")
self.conv5_3 = self._conv_layer(self.conv5_2, "conv5_3")
self.pool5 = self._max_pool(self.conv5_3, 'pool5', debug)
self.fc6 = self._fc_layer(self.pool5, "fc6")
if train:
self.fc6 = tf.nn.dropout(self.fc6, 0.5)
self.fc7 = self._fc_layer(self.fc6, "fc7")
if train:
self.fc7 = tf.nn.dropout(self.fc7, 0.5)
if random_init_fc8:
self.score_fr = self._score_layer(self.fc7, "score_fr",
num_classes)
else:
self.score_fr = self._fc_layer(self.fc7, "score_fr",
num_classes=num_classes,
relu=False)
self.pred = tf.argmax(self.score_fr, dimension=3)
self.upscore2 = self._upscore_layer(self.score_fr,
shape=tf.shape(self.pool4),
num_classes=num_classes,
debug=debug, name='upscore2',
ksize=4, stride=2)
self.score_pool4 = self._score_layer(self.pool4, "score_pool4",
num_classes=num_classes)
self.fuse_pool4 = tf.add(self.upscore2, self.score_pool4)
self.upscore32 = self._upscore_layer(self.fuse_pool4,
shape=tf.shape(bgr),
num_classes=num_classes,
debug=debug, name='upscore32',
ksize=32, stride=16)
self.pred_up = tf.argmax(self.upscore32, dimension=3)
def _max_pool(self, bottom, name, debug):
pool = tf.nn.max_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME', name=name)
if debug:
pool = tf.Print(pool, [tf.shape(pool)],
message='Shape of %s' % name,
summarize=4, first_n=1)
return pool
def _conv_layer(self, bottom, name):
with tf.variable_scope(name) as scope:
filt = self.get_conv_filter(name)
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias(name)
bias = tf.nn.bias_add(conv, conv_biases)
relu = tf.nn.relu(bias)
# Add summary to Tensorboard
_activation_summary(relu)
return relu
def _fc_layer(self, bottom, name, num_classes=None,
relu=True, debug=False):
with tf.variable_scope(name) as scope:
shape = bottom.get_shape().as_list()
if name == 'fc6':
filt = self.get_fc_weight_reshape(name, [7, 7, 512, 4096])
elif name == 'score_fr':
name = 'fc8' # Name of score_fr layer in VGG Model
filt = self.get_fc_weight_reshape(name, [1, 1, 4096, 1000],
num_classes=num_classes)
else:
filt = self.get_fc_weight_reshape(name, [1, 1, 4096, 4096])
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias(name, num_classes=num_classes)
bias = tf.nn.bias_add(conv, conv_biases)
if relu:
bias = tf.nn.relu(bias)
_activation_summary(bias)
if debug:
bias = tf.Print(bias, [tf.shape(bias)],
message='Shape of %s' % name,
summarize=4, first_n=1)
return bias
def _score_layer(self, bottom, name, num_classes):
with tf.variable_scope(name) as scope:
# get number of input channels
in_features = bottom.get_shape()[3].value
shape = [1, 1, in_features, num_classes]
# He initialization Sheme
if name == "score_fr":
num_input = in_features
stddev = (2 / num_input)**0.5
elif name == "score_pool4":
stddev = 0.001
# Apply convolution
w_decay = self.wd
weights = self._variable_with_weight_decay(shape, stddev, w_decay)
conv = tf.nn.conv2d(bottom, weights, [1, 1, 1, 1], padding='SAME')
# Apply bias
conv_biases = self._bias_variable([num_classes], constant=0.0)
bias = tf.nn.bias_add(conv, conv_biases)
_activation_
|
kura/batfish
|
tests/test_client_authorize.py
|
Python
|
mit
| 1,645
| 0
|
import collections
import json
import unittest
import responses
from requests import HTTPError
from mock import patch
from batfish import Client
from batfish.__about__ import __version__
class TestClientAuthorize(unittest.TestCase):
def setUp(self):
with patch('batfish.client.read_token_from_conf',
return_value=None):
self.cli = Client()
@responses.activate
def test_authorize_error(self):
url = "https://api.digitalocean.com/v2/actions"
responses.add(responses.GET, url,
body='{"error": "something"}', status=500,
content_type="application/json")
with self.assertRaises(HTTPError):
self.cli.authorize("test_token")
@responses.activate
def test_authorize_unauthorized(self):
url = "https://api.digitalocean.com/v2/kura"
body = {'id': "unauthorized", 'message': "Unable to authenticate you."}
responses.add(responses.GET, url, body=json.dumps(body), status=401,
content_type="application/json")
self.cli.authorize("test_token")
self.assertEquals(responses.calls[0].response.status_code, 401)
@responses.activate
def test_authorize_unauthorized(self):
url = "https://api.digitalocean.com/v2/actions"
responses.add(responses.GET, url,
body='{"error": "something"}', status=200,
|
content_type="app
|
lication/json")
auth = self.cli.authorize("test_token")
self.assertEquals(auth, "OK")
self.assertEquals(responses.calls[0].response.status_code, 200)
|
mobb-io/django-erp
|
djangoerp/menus/signals.py
|
Python
|
mit
| 2,406
| 0.007897
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""This file
|
is part of the django ERP project.
T
|
HE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2013-2015, django ERP Team'
__version__ = '0.0.1'
from django.conf import settings
from django.db.models.signals import post_save, pre_delete
from djangoerp.core.utils.models import get_model
from djangoerp.core.signals import manage_author_permissions
from .models import Menu, Link, Bookmark
from .utils import create_bookmarks, delete_bookmarks
## HANDLERS ##
def _create_bookmarks(sender, instance, *args, **kwargs):
create_bookmarks(instance)
def _delete_bookmarks(sender, instance, *args, **kwargs):
delete_bookmarks(instance)
## API ##
def manage_bookmarks(cls, enabled=True):
"""Connects handlers for bookmarks management.
This handler could be used to automatically create a related bookmark list
on given model class instance creation. i.e.:
>> manage_bookmarks(User)
It will auto generate a bookmark list associated to each new User's instance.
To disconnect:
>> manage_bookmarks(User, False)
"""
cls = get_model(cls)
cls_name = cls.__name__.lower()
create_dispatch_uid = "create_%s_bookmarks" % cls_name
delete_dispatch_uid = "delete_%s_bookmarks" % cls_name
if enabled:
post_save.connect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
pre_delete.connect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
else:
post_save.disconnect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
pre_delete.disconnect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
## CONNECTIONS ##
manage_author_permissions(Menu)
manage_author_permissions(Link)
manage_author_permissions(Bookmark)
manage_bookmarks(settings.AUTH_USER_MODEL)
|
OpenDataCordoba/codigo-postal-argentino
|
alembic/versions/f5195fe91e09_agrega_alturas_codprov_y_alturas_cp.py
|
Python
|
gpl-2.0
| 796
| 0.001256
|
"""Agrega alturas.codprov y alturas.cp
Revision ID: f5195fe91e09
Revises: fccbcd8362d7
Create Date: 2017-07-09 22:01:51.280360
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f5195fe91e09'
down_revision = 'fccbcd8362d7'
branch_labels = Non
|
e
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('alturas', sa.Column('codprov', sa.String(length=1), nullable=True))
op.add_column('alturas', sa.Column('cp', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('alturas', 'cp')
op.dr
|
op_column('alturas', 'codprov')
# ### end Alembic commands ###
|
simontakite/sysadmin
|
pythonscripts/practicalprogramming/gui/mainloop.py
|
Python
|
gpl-2.0
| 78
| 0
|
import tkinter
window = t
|
kinter.Tk()
window.mainloop()
print('Anybod
|
y home?')
|
Thermi/ocfs2-tools
|
ocfs2console/ocfs2interface/process.py
|
Python
|
gpl-2.0
| 4,541
| 0.002202
|
# OCFS2Console - GUI frontend for OCFS2 management and debugging
# Copyright (C) 2002, 2005 Oracle. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 021110-1307, USA.
import os
import fcntl
import popen2
import gobject
import gtk
from guiutil import set_props
INTERVAL = 100
TIMEOUT = 10000
class Process:
def __init__(self, command, title, desc, parent=None, spin_now=False):
if isinstance(command, basestring):
if len(command.split(None, 1)) < 2:
command = (command,)
self.command = command
self.title = title
self.desc = desc
self
|
.parent = parent
self.spin_now = spin_now
self.pipe = popen2.Popen4(self.command)
def reap(self):
self.success = False
self.killed = False
self.count = TIMEOUT // INTERVAL
self.threshold = self.count - INTERVAL * 10
self.dialog = None
if self.spin_now:
|
self.count = TIMEOUT * 60
self.make_progress_box()
timeout_id = gobject.timeout_add(INTERVAL, self.timeout)
fromchild = self.pipe.fromchild
fileno = fromchild.fileno()
flags = fcntl.fcntl(fileno, fcntl.F_GETFL, 0)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fileno, fcntl.F_SETFL, flags)
self.output = ''
output_id = gobject.io_add_watch(fromchild, gobject.IO_IN, self.read)
gtk.main()
if self.dialog:
self.dialog.destroy()
gobject.source_remove(output_id)
gobject.source_remove(timeout_id)
if not self.success:
if self.killed:
if self.output:
self.output += '\n'
self.output += 'Killed prematurely.'
return self.success, self.output, self.killed
def timeout(self):
self.count = self.count - 1
ret = self.pipe.poll()
if ret != -1:
self.success = not os.WEXITSTATUS(ret)
gtk.main_quit()
return True
if self.count < 1:
self.kill()
return True
if self.count < self.threshold and not self.dialog:
self.make_progress_box()
if self.dialog:
self.pbar.pulse()
return True
def kill(self):
self.success = False
self.killed = True
os.kill(self.pipe.pid, 15)
gobject.timeout_add(INTERVAL * 5, self.kill_timeout)
gtk.main_quit()
def kill_timeout(self):
if self.pipe.poll() == -1:
os.kill(self.pipe.pid, 9)
self.kill_9 = True
return False
def make_progress_box(self):
self.dialog = gtk.Window()
set_props(self.dialog, title=self.title,
resizable=False,
modal=True,
type_hint=gtk.gdk.WINDOW_TYPE_HINT_DIALOG)
def ignore(w, e):
return True
self.dialog.connect('delete-event', ignore)
self.dialog.set_transient_for(self.parent)
vbox = gtk.VBox()
set_props(vbox, spacing=0,
homogeneous=False,
border_width=4,
parent=self.dialog)
label = gtk.Label(self.desc + '...')
vbox.pack_start(label, expand=False, fill=False)
self.pbar = gtk.ProgressBar()
vbox.pack_start(self.pbar, expand=False, fill=False)
self.dialog.show_all()
def read(self, fd, cond):
if cond & gtk.gdk.INPUT_READ:
try:
self.output += fd.read(1024)
except IOError:
return False
return True
def main():
process = Process('echo Hello; sleep 10', 'Sleep', 'Sleeping',
spin_now=True)
print process.reap()
if __name__ == '__main__':
main()
|
aalekperov/Task1
|
fixture/db.py
|
Python
|
apache-2.0
| 2,534
| 0.005919
|
import mysql
import pymysql
from model.group import Group
from model.contact import Contact
class DbFixture:
def __init__(self, host, name, user, password):
self.host = host
self.name = name
self.user = user
self.password = password
self.connection = pymysql.connect(host=host, database=name, user=user, password=password)
def get_group_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name, group_header, group_footer from group_list")
for row in cursor:
(id, name, header, footer) = row
list.append(Group(id=str(id), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contact_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select id, firstname, middlename, lastname, nickname, "
|
"company, title, address, "
"home, mobile, work, fax, "
"email, email2, email3, homepage, "
"bday, bmonth, byear, "
"aday, amonth, ayear, "
"address2, phone2, notes from `addressbook` WHERE `deprecated` = '0000-00-00 00:00:00'")
for row in cursor:
(
|
id, firstname, middlename, lastname, nickname,
company, title, address,
home, mobile, work, fax,
email, email2, email3, homepage,
bday, bmonth, byear,
aday, amonth, ayear,
address2, phone2, notes) = row
list.append(Contact(id=str(id), firstname=firstname, middlename=middlename, lastname=lastname,
company=company, title=title, company_address=address,
home=home, mobile_phone_num=mobile, work_phone_num=work, fax_num=fax,
email1=email, email2=email2, email3=email3,
birthday_d=bday, birthday_m=bmonth, birthday_y=byear,
anniversary_d=aday, anniversary_m=amonth, anniversary_y=ayear,
second_address=address2, second_home=phone2, notes=notes))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()
|
parthapritam2717/CodeChef
|
FCTRL2.py
|
Python
|
gpl-3.0
| 283
| 0.038869
|
from sys import stdin as sin
list_index=[]
list=dict()
def fn(n):
f=1
#check if a value less that that has already been calculated
for i
|
in range(1,n+1):
f*=i
return f
t=int(input())
for i i
|
n range(t):
n=int(sin.readline().rstrip())
print(fn(n))
|
Ragowit/fireplace
|
fireplace/managers.py
|
Python
|
agpl-3.0
| 7,048
| 0.027667
|
from hearthstone.enums import GameTag
from . import enums
class Manager(object):
def __init__(self, obj):
self.obj = obj
self.observers = []
def __getitem__(self, tag):
if self.map.get(tag):
return getattr(self.obj, self.map[tag], 0)
raise KeyError
def __setitem__(self, tag, value):
setattr(self.obj, self.map[tag], value)
def __iter__(self):
for k in self.map:
if self.map[k]:
yield k
def get(self, k, default=None):
return self[k] if k in self.map else default
def items(self):
for k, v in self.map.items():
if v is not None:
yield k, self[k]
def register(self, observer):
self.observers.append(observer)
def update(self, tags):
for k, v in tags.items():
if self.map.get(k) is not None:
self[k] = v
class GameManager(Manager):
map = {
GameTag.CARDTYPE: "type",
GameTag.NEXT_STEP: "next_step",
GameTag.NUM_MINIONS_KILLED_THIS_TURN: "minions_killed_this_turn",
GameTag.PROPOSED_ATTACKER: "proposed_attacker",
GameTag.PROPOSED_DEFENDER: "proposed_defender",
GameTag.STATE: "state",
GameTag.STEP: "step",
GameTag.TURN: "turn",
GameTag.ZONE: "zone",
}
def __init__(self, obj):
super().__init__(obj)
self.counter = 1
obj.entity_id = self.counter
def action_start(self, type, source, index, target):
for observer in self.observers:
observer.action_start(type, source, index, target)
def action_end(self, type, source):
for observer in self.observers:
observer.action_end(type, source)
def new_entity(self, entity):
self.counter += 1
entity.entity_id = self.counter
for observer in self.observers:
observer.new_entity(entity)
def start_game(self):
for observer in self.observers:
observer.start_game()
def step(self, step, next_step):
for observer in self.observers:
observer.game_step(step, next_step)
self.obj.step = step
self.obj.next_step = next_step
class PlayerManager(Manager):
map = {
GameTag.CANT_DRAW: "cant_draw",
GameTag.CARDTYPE: "type",
GameTag.COMBO_ACTIVE: "combo",
GameTag.CONTROLLER: "controller",
GameTag.CURRENT_PLAYER: "current_player",
GameTag.CURRENT_SPELLPOWER: "spellpower",
GameTag.EMBRACE_THE_SHADOW: "healing_as_damage",
GameTag.FATIGUE: "fatigue_counter",
GameTag.FIRST_PLAYER: "first_player",
GameTag.HEALING_DOUBLE: "healing_double",
GameTag.HERO_ENTITY: "hero",
GameTag.LAST_CARD_PLAYED: "last_card_played",
GameTag.MAXHANDSIZE: "max_hand_size",
GameTag.MAXRESOURCES: "max_resources",
GameTag.NUM_CARDS_DRAWN_THIS_TURN: "cards_drawn_this_turn",
GameTag.NUM_CARDS_PLAYED_THIS_TURN: "cards_played_this_turn",
GameTag.NUM_MINIONS_PLAYED_THIS_TURN: "minions_played_this_turn",
GameTag.NUM_MINIONS_PLAYER_KILLED_THIS_TURN: "minions_killed_this_turn",
GameTag.NUM_TIMES_HERO_POWER_USED_THIS_GAME: "times_hero_power_used_this_game",
GameTag.OVERLOAD_LOCKED: "overload_locked",
GameTag.OVERLOAD_OWED: "overloaded",
GameTag.PLAYSTATE: "playstate",
GameTag.RESOURCES: "max_mana",
GameTag.RESOURCES_USED: "used_mana",
GameTag.SPELLPOWER_DOUBLE: "spellpower_double",
GameTag.STARTHANDSIZE: "start_hand_size",
GameTag.HERO_POWER_DOUBLE: "hero_power_double",
GameTag.TEMP_RESOURCES: "temp_mana",
GameTag.TIMEOUT: "timeout",
GameTag.TURN_START: "turn_start",
enums.CANT_OVERLOAD: "cant_overload",
}
CARD_ATTRIBUTE_MAP = {
GameTag.ADJACENT_BUFF: "adjacent_buff",
GameTag.ARMOR: "armor",
GameTag.ATK: "atk",
GameTag.ATTACKING: "attacking",
GameTag.ATTACHED: "owner",
GameTag.AURA: "aura",
GameTag.BATTLECRY: "has_battlecry",
GameTag.CANNOT_ATTACK_HEROES: "cannot_attack_heroes",
GameTag.CANT_ATTACK: "cant_attack",
GameTag.CANT_BE_ATTACKED: "cant_be_attacked",
GameTag.CANT_BE_DAMAGED: "cant_be_damaged",
GameTag.CANT_BE_TARGETED_BY_ABILITIES: "cant_be_targeted_by_abilities",
GameTag.CANT_BE_TARGETED_BY_HERO_POWERS: "cant_be_targeted_by_hero_powers",
GameTag.CANT_BE_TARGETED_BY_OPPONENTS: "cant_be_targeted_by_opponents",
GameTag.CANT_PLAY: "cant_play",
GameTag.CARD_ID: "id",
GameTag.CARD_TARGET: "target",
GameTag.CARDNAME: "name",
GameTag.CARDRACE: "race",
Ga
|
meTag.CARDTYPE: "type",
GameTag.CHARGE: "charge",
GameTag.CLASS: "card_class",
GameTag.COMBO: "has_combo",
GameTag.CONTROLLER: "controller",
GameTag.COST: "cost",
GameTag.CREATOR: "creator",
GameTag.DAMAGE: "damage",
GameTag.DEATHRATTLE: "has_deathrattle",
GameTag.DEFENDING: "defending",
GameTag.DIVIN
|
E_SHIELD: "divine_shield",
GameTag.DURABILITY: "max_durability",
GameTag.EMBRACE_THE_SHADOW: "healing_as_damage",
GameTag.ENRAGED: "enrage",
GameTag.EXHAUSTED: "exhausted",
GameTag.EXTRA_DEATHRATTLES: "extra_deathrattles",
GameTag.FORGETFUL: "forgetful",
GameTag.FROZEN: "frozen",
GameTag.HEALING_DOUBLE: "healing_double",
GameTag.HEALTH: "max_health",
GameTag.HEALTH_MINIMUM: "min_health",
GameTag.HEAVILY_ARMORED: "heavily_armored",
GameTag.HEROPOWER_ADDITIONAL_ACTIVATIONS: "additional_activations",
GameTag.HEROPOWER_DAMAGE: "heropower_damage",
GameTag.INCOMING_DAMAGE_MULTIPLIER: "incoming_damage_multiplier",
GameTag.ImmuneToSpellpower: "immune_to_spellpower",
GameTag.IMMUNE_WHILE_ATTACKING: "immune_while_attacking",
GameTag.INSPIRE: "has_inspire",
GameTag.NUM_ATTACKS_THIS_TURN: "num_attacks",
GameTag.NUM_TURNS_IN_PLAY: "turns_in_play",
GameTag.TAG_ONE_TURN_EFFECT: "one_turn_effect",
GameTag.OVERLOAD: "overload",
GameTag.POISONOUS: "poisonous",
GameTag.POWERED_UP: "powered_up",
GameTag.RARITY: "rarity",
GameTag.RECEIVES_DOUBLE_SPELLDAMAGE_BONUS: "receives_double_spelldamage_bonus",
GameTag.SECRET: "secret",
GameTag.SHADOWFORM: "shadowform",
GameTag.SHOULDEXITCOMBAT: "should_exit_combat",
GameTag.SILENCED: "silenced",
GameTag.SPELLPOWER: "spellpower",
GameTag.SPELLPOWER_DOUBLE: "spellpower_double",
GameTag.STEALTH: "stealthed",
GameTag.TAG_AI_MUST_PLAY: "autocast",
GameTag.HERO_POWER_DOUBLE: "hero_power_double",
GameTag.TAUNT: "taunt",
GameTag.WINDFURY: "windfury",
GameTag.ZONE: "zone",
GameTag.ZONE_POSITION: "zone_position",
enums.ALWAYS_WINS_BRAWLS: "always_wins_brawls",
enums.EXTRA_BATTLECRIES: "extra_battlecries",
enums.KILLED_THIS_TURN: "killed_this_turn",
GameTag.AFFECTED_BY_SPELL_POWER: None,
GameTag.ARTISTNAME: None,
GameTag.AttackVisualType: None,
GameTag.CARD_SET: None,
GameTag.CARDTEXT_INHAND: None,
GameTag.CardTextInPlay: None,
GameTag.Collectible: None,
GameTag.DevState: None,
GameTag.ELITE: None,
GameTag.ENCHANTMENT_IDLE_VISUAL: None,
GameTag.ENCHANTMENT_BIRTH_VISUAL: None,
GameTag.EVIL_GLOW: None,
GameTag.FACTION: None,
GameTag.FLAVORTEXT: None,
GameTag.FREEZE: None,
GameTag.HealTarget: None,
GameTag.HIDE_COST: None,
GameTag.HOW_TO_EARN: None,
GameTag.HOW_TO_EARN_GOLDEN: None,
GameTag.InvisibleDeathrattle: None,
GameTag.MORPH: None,
GameTag.SILENCE: None,
GameTag.SUMMONED: None,
GameTag.SPARE_PART: None,
GameTag.SHOWN_HERO_POWER: None,
GameTag.TARGETING_ARROW_TEXT: None,
GameTag.TOPDECK: None,
GameTag.TAG_AI_MUST_PLAY: None,
GameTag.TRIGGER_VISUAL: None,
}
class CardManager(Manager):
map = CARD_ATTRIBUTE_MAP
|
nickhand/nbodykit
|
nersc/example.py
|
Python
|
gpl-3.0
| 578
| 0.012111
|
from nbodykit.lab import *
f
|
rom nbodykit import setup_logging
setup_logging("debug")
# initialize a linear power spectrum class
cosmo = cosmology.Planck15
Plin = cosmology.LinearPower(cosmo, redshift=0.55, transfer='CLASS')
# get some lognormal particles
source = LogNormalCatalog(Plin=Plin, nbar=3e-7, BoxSize=1380., Nmesh=8, seed=42)
# apply RSD
source['Position'] += source['VelocityOffset'] * [0,0,1]
# compute P(k,mu) and multipoles
result = FFTPower(source, mode='2d', poles=[0,2,4], los=[0,0,1])
# and save
outp
|
ut = "./nbkit_example_power.json"
result.save(output)
|
eyaler/tensorpack
|
examples/FasterRCNN/model_box.py
|
Python
|
apache-2.0
| 7,519
| 0.001197
|
# -*- coding: utf-8 -*-
# File: model_box.py
import numpy as np
from collections import namedtuple
import tensorflow as tf
from tensorpack.tfutils.scope_utils import under_name_scope
from config import config
@under_name_scope()
def clip_boxes(boxes, window, name=None):
"""
Args:
boxes: nx4, xyxy
window: [h, w]
"""
boxes = tf.maximum(boxes, 0.0)
m = tf.tile(tf.reverse(window, [0]), [2]) # (4,)
boxes = tf.minimum(boxes, tf.to_float(m), name=name)
return boxes
@under_name_scope()
def decode_bbox_target(box_predictions, anchors):
"""
Args:
box_predictions: (..., 4), logits
anchors: (..., 4), floatbox. Must have the same shape
Returns:
box_decoded: (..., 4), float32. With the same shape.
"""
orig_shape = tf.shape(anchors)
box_pred_txtytwth = tf.reshape(box_predictions, (-1, 2, 2))
box_pred_txty, box_pred_twth = tf.split(box_pred_txtytwth, 2, axis=1)
# each is (...)x1x2
anchors_x1y1x2y2 = tf.reshape(anchors, (-1, 2, 2))
anchors_x1y1, anchors_x2y2 = tf.split(anchors_x1y1x2y2, 2, axis=1)
waha = anchors_x2y2 - anchors_x1y1
xaya = (anchors_x2y2 + anchors_x1y1) * 0.5
clip = np.log(config.PREPROC.MAX_SIZE / 16.)
wbhb = tf.exp(tf.minimum(box_pred_twth, clip)) * waha
xbyb = box_pred_txty * waha + xaya
x1y1 = xbyb - wbhb * 0.5
x2y2 = xbyb + wbhb * 0.5 # (...)x1x2
out = tf.concat([x1y1, x2y2], axis=-2)
return tf.reshape(out, orig_shape)
@under_name_scope()
def encode_bbox_target(boxes, anchors):
"""
Args:
boxes: (..., 4), float32
anchors: (..., 4), float32
Returns:
box_encoded: (..., 4), float32 with the same shape.
"""
anc
|
hors_x1y1x2y2 = tf.reshape(anchors, (-1, 2, 2))
anchors_x1y1, anchors_x2y2 = tf.split(anchors_x1y1x2y2, 2, axis=1)
waha = anchors_x2y2 - anchors_x1y1
xaya = (anchors_x2y2 + anchors_x1y1) * 0.5
boxes_x1y1x2y2 = tf.reshape(boxes, (-1, 2, 2))
boxes_x1y1, boxes_x2y2 = tf.split(boxes_x1y1x2y2, 2, axis=1)
wbhb = boxes_x2y2 - boxes_x1y1
xbyb = (boxes_x2y2 + boxes_x1y1) * 0.5
# Note that here not all boxes are valid. Some may be zero
|
txty = (xbyb - xaya) / waha
twth = tf.log(wbhb / waha) # may contain -inf for invalid boxes
encoded = tf.concat([txty, twth], axis=1) # (-1x2x2)
return tf.reshape(encoded, tf.shape(boxes))
@under_name_scope()
def crop_and_resize(image, boxes, box_ind, crop_size, pad_border=True):
"""
Aligned version of tf.image.crop_and_resize, following our definition of floating point boxes.
Args:
image: NCHW
boxes: nx4, x1y1x2y2
box_ind: (n,)
crop_size (int):
Returns:
n,C,size,size
"""
assert isinstance(crop_size, int), crop_size
boxes = tf.stop_gradient(boxes)
# TF's crop_and_resize produces zeros on border
if pad_border:
# this can be quite slow
image = tf.pad(image, [[0, 0], [0, 0], [1, 1], [1, 1]], mode='SYMMETRIC')
boxes = boxes + 1
@under_name_scope()
def transform_fpcoor_for_tf(boxes, image_shape, crop_shape):
"""
The way tf.image.crop_and_resize works (with normalized box):
Initial point (the value of output[0]): x0_box * (W_img - 1)
Spacing: w_box * (W_img - 1) / (W_crop - 1)
Use the above grid to bilinear sample.
However, what we want is (with fpcoor box):
Spacing: w_box / W_crop
Initial point: x0_box + spacing/2 - 0.5
(-0.5 because bilinear sample (in my definition) assumes floating point coordinate
(0.0, 0.0) is the same as pixel value (0, 0))
This function transform fpcoor boxes to a format to be used by tf.image.crop_and_resize
Returns:
y1x1y2x2
"""
x0, y0, x1, y1 = tf.split(boxes, 4, axis=1)
spacing_w = (x1 - x0) / tf.to_float(crop_shape[1])
spacing_h = (y1 - y0) / tf.to_float(crop_shape[0])
nx0 = (x0 + spacing_w / 2 - 0.5) / tf.to_float(image_shape[1] - 1)
ny0 = (y0 + spacing_h / 2 - 0.5) / tf.to_float(image_shape[0] - 1)
nw = spacing_w * tf.to_float(crop_shape[1] - 1) / tf.to_float(image_shape[1] - 1)
nh = spacing_h * tf.to_float(crop_shape[0] - 1) / tf.to_float(image_shape[0] - 1)
return tf.concat([ny0, nx0, ny0 + nh, nx0 + nw], axis=1)
# Expand bbox to a minium size of 1
# boxes_x1y1, boxes_x2y2 = tf.split(boxes, 2, axis=1)
# boxes_wh = boxes_x2y2 - boxes_x1y1
# boxes_center = tf.reshape((boxes_x2y2 + boxes_x1y1) * 0.5, [-1, 2])
# boxes_newwh = tf.maximum(boxes_wh, 1.)
# boxes_x1y1new = boxes_center - boxes_newwh * 0.5
# boxes_x2y2new = boxes_center + boxes_newwh * 0.5
# boxes = tf.concat([boxes_x1y1new, boxes_x2y2new], axis=1)
image_shape = tf.shape(image)[2:]
boxes = transform_fpcoor_for_tf(boxes, image_shape, [crop_size, crop_size])
image = tf.transpose(image, [0, 2, 3, 1]) # nhwc
ret = tf.image.crop_and_resize(
image, boxes, tf.to_int32(box_ind),
crop_size=[crop_size, crop_size])
ret = tf.transpose(ret, [0, 3, 1, 2]) # ncss
return ret
@under_name_scope()
def roi_align(featuremap, boxes, resolution):
"""
Args:
featuremap: 1xCxHxW
boxes: Nx4 floatbox
resolution: output spatial resolution
Returns:
NxCx res x res
"""
# sample 4 locations per roi bin
ret = crop_and_resize(
featuremap, boxes,
tf.zeros([tf.shape(boxes)[0]], dtype=tf.int32),
resolution * 2)
ret = tf.nn.avg_pool(ret, [1, 1, 2, 2], [1, 1, 2, 2], padding='SAME', data_format='NCHW')
return ret
class RPNAnchors(namedtuple('_RPNAnchors', ['boxes', 'gt_labels', 'gt_boxes'])):
"""
boxes (FS x FS x NA x 4): The anchor boxes.
gt_labels (FS x FS x NA):
gt_boxes (FS x FS x NA x 4): Groundtruth boxes corresponding to each anchor.
"""
def encoded_gt_boxes(self):
return encode_bbox_target(self.gt_boxes, self.boxes)
def decode_logits(self, logits):
return decode_bbox_target(logits, self.boxes)
@under_name_scope()
def narrow_to(self, featuremap):
"""
Slice anchors to the spatial size of this featuremap.
"""
shape2d = tf.shape(featuremap)[2:] # h,w
slice3d = tf.concat([shape2d, [-1]], axis=0)
slice4d = tf.concat([shape2d, [-1, -1]], axis=0)
boxes = tf.slice(self.boxes, [0, 0, 0, 0], slice4d)
gt_labels = tf.slice(self.gt_labels, [0, 0, 0], slice3d)
gt_boxes = tf.slice(self.gt_boxes, [0, 0, 0, 0], slice4d)
return RPNAnchors(boxes, gt_labels, gt_boxes)
if __name__ == '__main__':
"""
Demonstrate what's wrong with tf.image.crop_and_resize:
"""
import tensorflow.contrib.eager as tfe
tfe.enable_eager_execution()
# want to crop 2x2 out of a 5x5 image, and resize to 4x4
image = np.arange(25).astype('float32').reshape(5, 5)
boxes = np.asarray([[1, 1, 3, 3]], dtype='float32')
target = 4
print(crop_and_resize(
image[None, None, :, :], boxes, [0], target)[0][0])
"""
Expected values:
4.5 5 5.5 6
7 7.5 8 8.5
9.5 10 10.5 11
12 12.5 13 13.5
You cannot easily get the above results with tf.image.crop_and_resize.
Try out yourself here:
"""
print(tf.image.crop_and_resize(
image[None, :, :, None],
np.asarray([[1, 1, 2, 2]]) / 4.0, [0], [target, target])[0][:, :, 0])
|
jhsenjaliya/incubator-airflow
|
airflow/executors/local_executor.py
|
Python
|
apache-2.0
| 2,991
| 0
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import multiprocessing
import subprocess
import time
from builtins import range
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.state import State
PARALLELISM = configuration.get('core', 'PARALLELISM')
class LocalWorker(multiprocessing.Process, LoggingMixin):
def __init__(self, task_queue, result_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
self.daemon = True
def run(self):
while True:
key, command = self.task_queue.get()
if key is None:
# Received poison pill, no more tasks to run
self.task_queue.task_done()
break
self.log.info("%s running %s", self.__class__.__name__, command)
command = "exec bash -c '{0}'".format(command)
try:
subprocess.check_call(command, shell=True)
state = State.SUCCESS
except subprocess.CalledProcessError as e:
state = State.FAILED
self.log.error("Failed to execute task %s.", str(e))
# TODO: Why is this commented out?
# raise e
self.result_queue.put((key, state))
self.task_queue.task_done()
time.sleep(1)
class LocalExecutor(BaseExecutor):
"""
L
|
ocalExecutor executes tasks locally in parallel. It uses the
multiprocessing Python library and queues to parallelize the execution
|
of tasks.
"""
def start(self):
self.queue = multiprocessing.JoinableQueue()
self.result_queue = multiprocessing.Queue()
self.workers = [
LocalWorker(self.queue, self.result_queue)
for _ in range(self.parallelism)
]
for w in self.workers:
w.start()
def execute_async(self, key, command, queue=None):
self.queue.put((key, command))
def sync(self):
while not self.result_queue.empty():
results = self.result_queue.get()
self.change_state(*results)
def end(self):
# Sending poison pill to all worker
for _ in self.workers:
self.queue.put((None, None))
# Wait for commands to finish
self.queue.join()
self.sync()
|
xuender/test
|
testAdmin/itest/migrations/0007_auto__chg_field_test_content.py
|
Python
|
apache-2.0
| 1,756
| 0.006834
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Test.content'
db.alter_column(u'itest_test', 'content', self.gf('django.db.models.fields.CharField')(max_length=5850, null=True))
def backwards(self, orm):
# Changing field 'Test.content'
db.alter_column(u'itest_test', 'content', self.gf('django.db.models.fields.CharField')(max_length=1850, null=True))
models = {
'itest.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'word': ('django.db.models.fields.CharField', [], {'max_length': '35'})
},
'itest.test': {
'Meta': {'object_name': 'Test'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '5850', 'null': 'True', 'blank': 'True'}),
'create_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'summary': ('djang
|
o.db.models.fields.CharField', [], {'max_length': '450', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'tests'", 'symmetrical': 'False', 'to': "orm['ite
|
st.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'})
}
}
complete_apps = ['itest']
|
gleicher27/Tardigrade
|
moose/gui/mesh_info/ExodusIIMeshInfo.py
|
Python
|
lgpl-2.1
| 1,654
| 0.015719
|
from MeshInfo import *
''' Provides Informatio
|
n about ExodusII meshes '''
class ExodusIIMeshInfo(MeshInfo):
def __init__(self, mesh_ite
|
m_data, file_name):
MeshInfo.__init__(self, mesh_item_data)
self.file_name = file_name
import vtk
reader = vtk.vtkExodusIIReader()
reader.SetFileName(self.file_name)
reader.UpdateInformation()
num_nodesets = reader.GetNumberOfNodeSetArrays()
num_sidesets = reader.GetNumberOfSideSetArrays()
num_blocks = reader.GetNumberOfElementBlockArrays()
self.nodesets = set()
for i in xrange(num_nodesets):
self.nodesets.add(reader.GetObjectId(vtk.vtkExodusIIReader.NODE_SET,i))
if 'Unnamed' not in reader.GetObjectName(vtk.vtkExodusIIReader.NODE_SET,i).split(' '):
self.nodesets.add(reader.GetObjectName(vtk.vtkExodusIIReader.NODE_SET,i).split(' ')[0])
self.sidesets = set()
for i in xrange(num_sidesets):
self.sidesets.add(reader.GetObjectId(vtk.vtkExodusIIReader.SIDE_SET,i))
if 'Unnamed' not in reader.GetObjectName(vtk.vtkExodusIIReader.SIDE_SET,i).split(' '):
self.sidesets.add(reader.GetObjectName(vtk.vtkExodusIIReader.SIDE_SET,i).split(' ')[0])
self.blocks = set()
for i in xrange(num_blocks):
self.blocks.add(reader.GetObjectId(vtk.vtkExodusIIReader.ELEM_BLOCK,i))
if 'Unnamed' not in reader.GetObjectName(vtk.vtkExodusIIReader.ELEM_BLOCK,i).split(' '):
self.blocks.add(reader.GetObjectName(vtk.vtkExodusIIReader.ELEM_BLOCK,i).split(' ')[0])
def blockNames(self):
return self.blocks
def sidesetNames(self):
return self.sidesets
def nodesetNames(self):
return self.nodesets
|
rmenegaux/bqplot
|
bqplot/colorschemes.py
|
Python
|
apache-2.0
| 2,813
| 0.003199
|
# These color schemes come from d3: http://d3js.org/
#
# They are licensed under the following license:
#
# Copyright (c) 2010-2015, Michael Bostock
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * The name Michael Bostock may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#: 10 colors that work well together as data category colors
CATEGORY10 = ['#1f77b4', '#ff7f0e', '#2ca02c', '#
|
d62728', '#9467bd', '#8c564b',
'#e377c2', '#7f7f7f', '#bcbd22', '#17becf']
#: 20 colors that work well together as data category colors
CATEGORY20 = ['#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c', '#98df8a',
'#d62728', '#ff9896', '#9467bd', '#c5b0d5', '#8c564b', '#c49c94',
'#e377c2', '#f7b6d2', '#7f7f7f', '#c7c7c7', '#bcbd22', '#dbdb8d',
'#17becf', '#9edae5']
#: 20 colors that work well together as data category colors
CATEGORY20b = ['#393b79', '#5254a3', '#6b6ecf', '#9c9ede', '#637939',
'#8ca252', '#b5cf6b', '#cedb9c', '#8c6d31', '#bd9e39',
'#e7ba52', '#e7cb94', '#843c39', '#ad494a', '#d6616b',
'#e7969c', '#7b4173', '#a55194', '#ce6dbd', '#de9ed6']
#: 20 colors that work well together as data category colors
CATEGORY20c = ['#3182bd', '#6baed6', '#9ecae1', '#c6dbef', '#e6550d',
'#fd8d3c', '#fdae6b', '#fdd0a2', '#31a354', '#74c476',
'#a1d99b', '#c7e9c0', '#756bb1', '#9e9ac8', '#bcbddc',
'#dadaeb', '#636363', '#969696', '#bdbdbd', '#d9d9d9']
|
thinkopensolutions/odoo-saas-tools
|
saas_portal_tagging/models/__init__.py
|
Python
|
lgpl-3.0
| 79
| 0
|
# -
|
*- coding: utf-8 -*-
from . import saas_portal_taggin
|
g
from . import wizard
|
ianyh/heroku-buildpack-python-opencv
|
vendor/.heroku/lib/python2.7/test/test_robotparser.py
|
Python
|
mit
| 6,753
| 0.003998
|
import unittest, StringIO, robotparser
from test import test_support
from urllib2 import urlopen, HTTPError
class RobotTestCase(unittest.TestCase):
def __init__(self, index, parser, url, good, agent):
unittest.TestCase.__init__(self)
if good:
self.str = "RobotTest(%d, good, %s)" % (index, url)
else:
self.str = "RobotTest(%d, bad, %s)" % (index, url)
self.parser = parser
self.url = url
self.good = good
self.agent = agent
def runTest(self):
if isinstance(self.url, tuple):
agent, url = self.url
else:
url = self.url
agent = self.agent
if self.good:
self.assertTrue(self.parser.can_fetch(agent, url))
else:
self.assertFalse(self.parser.can_fetch(agent, url))
def __str__(self):
return self.str
tests = unittest.TestSuite()
def RobotTest(index, robots_txt, good_urls, bad_urls,
agent="test_robotparser"):
lines = StringIO.StringIO(robots_txt).readlines()
parser = robotparser.RobotFileParser()
parser.parse(lines)
for url in good_urls:
tests.addTest(RobotTestCase(index, parser, url, 1, agent))
for url in bad_urls:
tests.addTest(RobotTestCase(index, parser, url, 0, agent))
# Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002)
# 1.
doc = """
User-agent: *
Disallow: /cyberworld/map/ # This is an infinite virtual URL space
Disallow: /tmp/ # these will soon disappear
Disallow: /foo.html
"""
good = ['/','/test.html']
bad = ['/cyberworld/map/index.html','/tmp/xxx','/foo.html']
RobotTest(1, doc, good, bad)
# 2.
doc = """
# robots.txt for http://www.example.com/
User-agent: *
Disallow: /cyberworld/map/ # This is an infinite virtual URL space
# Cybermapper knows where to go.
User-agent: cybermapper
Disallow:
"""
good = ['/','/test.html',('cybermapper','/cyberworld/map/index.html')]
bad = ['/cyberworld/map/index.html']
RobotTest(2, doc, good, bad)
# 3.
doc = """
# go away
User-agent: *
Disallow: /
"""
good = []
bad = ['/cyberworld/map/index.html','/','/tmp/']
RobotTest(3, doc, good, bad)
# Examples from http://www.robotstxt.org/wc/norobots-rfc.html (fetched 2002)
# 4.
doc = """
User-agent: figtree
Disallow: /tmp
Disallow: /a%3cd.html
Disallow: /a%2fb.html
Disallow: /%7ejoe/index.html
"""
good = [] # XFAIL '/a/b.html'
bad = ['/tmp','/tmp.html','/tmp/a.html',
'/a%3cd.html','/a%3Cd.html','/a%2fb.html',
'/~joe/index.html'
]
RobotTest(4, doc, good, bad, 'figtree')
RobotTest(5, doc, good, bad, 'FigTree Robot libwww-perl/5.04')
# 6.
doc = """
User-agent: *
Disallow: /tmp/
Disallow: /a%3Cd.html
Disallow: /a/b.html
Disallow: /%7ejoe/index.html
"""
good = ['/tmp',] # XFAIL: '/a%2fb.html'
bad = ['/tmp/','/tmp/a.html',
'/a%3cd.html','/a%3Cd.html',"/a/b.html",
'/%7Ejoe/index.html']
RobotTest(6, doc, good, bad)
# From bug report #523041
# 7.
doc = """
User-Agent: *
Disallow: /.
"""
good = ['/foo.html']
bad = [] # Bug report says "/" should be denied, but that is not in the RFC
RobotTest(7, doc, good, bad)
# From Google: http://www.google.com/support/webmasters/bin/answer.py?hl=en&answer=40364
# 8.
doc = """
User-agent: Googlebot
Allow: /folder1/myfile.html
Disallow: /folder1/
"""
good = ['/folder1/myfile.html']
bad = ['/folder1/anotherfile.html']
RobotTest(8, doc, good, bad, agent="Googlebot")
# 9. This file is incorrect because "Googlebot" is a substring of
# "Googlebot-Mobile", so test 10 works just like test 9.
doc = """
User-agent: Googlebot
Disallow: /
User-agent: Googlebot-Mobile
Allow: /
"""
good = []
bad = ['/something.jpg']
RobotTest(9, doc, good, bad, agent="Googlebot")
good = []
bad = ['/something.jpg']
RobotTest(10, doc, good, bad, agent="Googlebot-Mobile")
# 11. Get the order correct.
doc = """
User-agent: Googlebot-Mobile
Allow: /
User-agent: Googlebot
Disallow: /
"""
good = []
bad = ['/something.jpg']
RobotTest(11, doc, good, bad, agent="Googlebot")
good = ['/something.jpg']
bad = []
RobotTest(12, doc, good, bad, agent="Googlebot-Mobile")
# 13. Google also got the order wrong in #8. You need to specify the
# URLs from more specific to more general.
doc = """
User-agent: Googlebot
Allow: /folder1/myfile.html
Disallow: /folder1/
"""
good = ['/folder1/myfile.html']
bad = ['/folder1/anotherfile.html']
RobotTest(13, doc, good, bad, agent="googlebot")
# 14. For issue #6325 (query string support)
doc = """
User-agent: *
Disallow: /some/path?name=value
"""
good = ['/some/path']
bad = ['/some/path?name=value']
RobotTest(14, doc, good, bad)
# 15. For issue #4108 (obey first * entry)
doc = """
User-agent: *
Disallow: /some/path
User-agent: *
Disallow: /another/path
"""
good = ['/another/path']
bad = ['/some/path']
RobotTest(15, doc, good, bad)
# 16. Empty query (issue #17403). Normalizing the url first.
doc = """
User-agent: *
Allow: /some/path?
Disallow: /another/path?
"""
good = ['/some/path?']
bad = ['/another/path?']
RobotTest(16, doc, good, bad)
class NetworkTestCase(unittest.TestCase):
def testPasswordProtectedSite(self):
test_support.requires('network')
with test_support.transient_internet('mueblesmoraleda.com'):
url = 'http://mueblesmoraleda.com'
robots_url = url + "/robots.txt"
# First check the URL is usable for our purposes, since the
# test site is a bit flaky.
try:
urlopen(robots_url)
except HTTPError as e:
if e.code not in {401, 403}:
self.skipTest(
"%r should return a 401 or 403 HTTP error, not %r"
% (robots_url, e.code))
else:
self.skipTest(
"%r should return a 401 or 403 HTTP error, not succeed"
% (robots_url))
parser = robotparser.RobotFileParser()
parser.set_url(url)
try:
parser.read()
except IOError:
self.skipTest('%s is unavailable' % url)
self.as
|
sertEqual(parser.can_fetch("*", robots_url), False)
def testPythonOrg(self):
test_support.requires('network')
with test_support.transient_internet('www.python.org'):
parser = robotparser.RobotFileParser(
"http://www.python.org/robots.txt")
parser.read()
self.assertTrue(
|
parser.can_fetch("*", "http://www.python.org/robots.txt"))
def test_main():
test_support.run_unittest(tests)
test_support.run_unittest(NetworkTestCase)
if __name__=='__main__':
test_support.verbose = 1
test_main()
|
BigBart/2sync
|
2sync.py
|
Python
|
gpl-3.0
| 1,089
| 0.01011
|
#! /usr/bin/env python3
from gi.repository import Gtk, GObject
import gui
import logging
import argparse
import threading
# Commandline arguments
parser = argparse.ArgumentParser(description='2-way syncronisation for folders')
parser.add_argument('config', help='name of the configuration file')
parser.add_argument('-d', '--debug', action='store_true', help='use this option for debuging (write debug messages to logfile)')
a
|
rgs = parser.parse_args()
# Config logging
# Set loglevel für logfile
if args.debug == True:
log_level = logging.DEBUG
else:
log_level = logging.INFO
# Logging to file
logging.basicConfig(level=log_level, filename='2sync.log', filemode='a', format='%(levelname)s: %(asctime)s - 2sync - %(message)s')
# define a Handler for sys.stderr and add it
console = logging.StreamHandler()
console.
|
setLevel(logging.WARNING)
logging.getLogger('').addHandler(console)
# Needed for running threads
GObject.threads_init()
thread = threading.Thread(target=gui.TwoSyncGUI, args=[args.config])
thread.daemon = True
thread.start()
try:
Gtk.main()
except:
Gtk.main_quit()
|
quattor/aquilon
|
tests/broker/test_add_rack.py
|
Python
|
apache-2.0
| 11,937
| 0.001508
|
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the add rack command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestAddRack(TestBrokerCommand):
def test_100_addut3(self):
command = "add rack --fullname ut3 --bunker zebrabucket.ut --row a --column 3"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ut3", command)
command = "show building --building ut"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Next Rack ID: 4", command)
def test_101_addut3again(self):
command = "update building --building ut --next_rackid 3"
out = self.commandtest(command.split(" "))
command = "add rack --fullname ut3 --room utroom1 --row a --column 3"
out = self.badrequesttest(command.split(" "))
self.matchoutput(out, "Rack ut3 already exists.", command)
command = "update building --building ut --next_rackid 4"
out = self.commandtest(command.split(" "))
def test_105_verifyaddut3(self):
command = "show rack --rack ut3"
out = self.commandtest(command.split(" "))
self.output_equals(out, """
Rack: ut3
Fullname: ut3
Row: a
Column: 3
Location Parents: [Organization ms, Hub ny, Continent na, Country us, Campus ny, City ny, Building ut, Room utroom1, Bunker zebrabucket.ut]
""", command)
def test_110_verifyaddut3proto(self):
command = "show rack --rack ut3 --format proto"
loc = self.protobuftest(command.split(" "), expect=1)[0]
self.assertEqual(loc.name, "ut3")
self.assertEqual(loc.location_type, "rack")
self.assertEqual(loc.row, "a")
self.assertEqual(loc.col, "3")
def test_115_addcards1(self):
command = "add rack --fullname cards1 --building cards --row a --column 1"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "cards1", command)
def test_120_verifyaddcards1(self):
command = "show rack --rack cards1"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Rack: cards1", command)
self.matchoutput(out, "Row: a", command)
self.matchoutput(out, "Column: 1", command)
def test_125_addnp3(self):
command = "add rack --fullname np3 --bunker zebrabucket.np --row a --column 3"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "np3", command)
def test_130_addut4(self):
command = "add rack --fullname A4 --room utroom1 --row a --column 4"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ut4", command)
def test_135_addut8(self):
# Test override rackid
command = "add rack --fullname 8.6.7 --building ut --row g --column 2 --force_rackid 8"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ut8", command)
def test_140_addut9(self):
# Test that next rackid for building ut was reset to 8 + 1,
# because force_rackid with value > current next_rackid was used
command = "add rack --fullname Aperture_name --bunker bucket2.ut --row g --column 3"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ut9", command)
def test_145_verifyut9(self):
command = "show rack --rack ut9"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Fullname: Aperture_name", command)
self.matchoutput(out,
"Location Parents: [Organization ms, Hub ny, "
"Continent na, Country us, Campus ny, City ny, "
"Building ut, Room utroom2, Bunker bucket2.ut]",
command)
def test_146_test_fillin_gaps(self):
# Test that next rackid for building ut was NOT reset,
# because force_rackid with value < current next_rackid was used
command = "add rack --fullname Aperture_name --building ut --row g --column 6 --force_rackid ut6"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ut6", command)
command = "show building --building ut"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Next Rack ID: 10", command)
def test_147_test_fillin_gaps_delete(self):
command = "del rack --rack ut6"
self.noouttest(command.split(" "))
def test_148_add_rack_fail_name_format(self):
command = "add rack --force_rackid ut12-66-1 --building ut --row g --column 4"
err = self.badrequesttest(command.split(" "))
self.matchoutput(err, "Invalid rack name ut12-66-1. Correct name format: "
"building name + numeric rack ID (integer without leading 0)", command)
def test_149_add_rack_fail_option(self):
command = "add rack --rackid ut12-66-1 --building u
|
t --row g --column 4"
err = self.badoptiontest(command.split(" "))
self.matchoutput(err, "no such option: --rackid", command)
def test_150_addut10(self):
command = "add rack --building ut --row g --column 4"
out = self.commandtest(command.sp
|
lit(" "))
self.matchoutput(out, "ut10", command)
def test_151_add_rack_fail_name_format(self):
command = "add rack --force_rackid 012 --building ut --row g --column 4"
err = self.badrequesttest(command.split(" "))
self.matchoutput(err, "Invalid rack name ut012. Correct name format: "
"building name + numeric rack ID (integer without leading 0).", command)
def test_155_addut11(self):
# Test that if next_rackid == force_rackid this next_rackid is incremented by 1
command = "add rack --bunker zebrabucket.ut --row k --column 1 --force_rackid ut11"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ut11", command)
command = "show building --building ut"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Next Rack ID: 12", command)
def test_160_addut12(self):
command = "add rack --building ut --row k --column 2 --uri TESTURI"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "ut12", command)
command = "show rack --rack ut12"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Location URI: TESTURI", command)
def test_165_addnp7(self):
command = "update building --building np --next_rackid 7"
out = self.commandtest(command.split(" "))
command = "add rack --building np --row g --column 1"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "np7", command)
def test_170_addnp997(self):
command = "update building --building np --next_rackid 997"
out = self.commandtest(command.split(" "))
command = "add rack --building np --row ZZ --column 99"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "np997", command)
def test_175_addnp998(self):
command = "add rack --building np --row yy --column 88"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "np998", command)
def test_180_addnp999(self):
command
|
ppwwyyxx/tensorflow
|
tensorflow/python/tpu/tpu_test_wrapper_test.py
|
Python
|
apache-2.0
| 6,679
| 0.005989
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use thi
|
s file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, eit
|
her express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for tpu_test_wrapper.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib.util # Python 3 only.
import os
from absl.testing import flagsaver
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.tpu import tpu_test_wrapper
class TPUTestWrapperTest(test.TestCase):
@flagsaver.flagsaver()
def test_flags_undefined(self):
tpu_test_wrapper.maybe_define_flags()
self.assertIn('tpu', flags.FLAGS)
self.assertIn('zone', flags.FLAGS)
self.assertIn('project', flags.FLAGS)
self.assertIn('model_dir', flags.FLAGS)
@flagsaver.flagsaver()
def test_flags_already_defined_not_overridden(self):
flags.DEFINE_string('tpu', 'tpuname', 'helpstring')
tpu_test_wrapper.maybe_define_flags()
self.assertIn('tpu', flags.FLAGS)
self.assertIn('zone', flags.FLAGS)
self.assertIn('project', flags.FLAGS)
self.assertIn('model_dir', flags.FLAGS)
self.assertEqual(flags.FLAGS.tpu, 'tpuname')
@flagsaver.flagsaver(bazel_repo_root='tensorflow/python')
def test_parent_path(self):
filepath = '/filesystem/path/tensorflow/python/tpu/example_test.runfiles/tensorflow/python/tpu/example_test' # pylint: disable=line-too-long
self.assertEqual(
tpu_test_wrapper.calculate_parent_python_path(filepath),
'tensorflow.python.tpu')
@flagsaver.flagsaver(bazel_repo_root='tensorflow/python')
def test_parent_path_raises(self):
filepath = '/bad/path'
with self.assertRaisesWithLiteralMatch(
ValueError,
'Filepath "/bad/path" does not contain repo root "tensorflow/python"'):
tpu_test_wrapper.calculate_parent_python_path(filepath)
def test_is_test_class_positive(self):
class A(test.TestCase):
pass
self.assertTrue(tpu_test_wrapper._is_test_class(A))
def test_is_test_class_negative(self):
class A(object):
pass
self.assertFalse(tpu_test_wrapper._is_test_class(A))
@flagsaver.flagsaver(wrapped_tpu_test_module_relative='.tpu_test_wrapper_test'
)
def test_move_test_classes_into_scope(self):
# Test the class importer by having the wrapper module import this test
# into itself.
with test.mock.patch.object(
tpu_test_wrapper, 'calculate_parent_python_path') as mock_parent_path:
mock_parent_path.return_value = (
tpu_test_wrapper.__name__.rpartition('.')[0])
module = tpu_test_wrapper.import_user_module()
tpu_test_wrapper.move_test_classes_into_scope(module)
self.assertEqual(
tpu_test_wrapper.tpu_test_imported_TPUTestWrapperTest.__name__,
self.__class__.__name__)
@flagsaver.flagsaver(test_dir_base='gs://example-bucket/tempfiles')
def test_set_random_test_dir(self):
tpu_test_wrapper.maybe_define_flags()
tpu_test_wrapper.set_random_test_dir()
self.assertStartsWith(flags.FLAGS.model_dir,
'gs://example-bucket/tempfiles/')
self.assertGreater(
len(flags.FLAGS.model_dir), len('gs://example-bucket/tempfiles/'))
@flagsaver.flagsaver(test_dir_base='gs://example-bucket/tempfiles')
def test_set_random_test_dir_repeatable(self):
tpu_test_wrapper.maybe_define_flags()
tpu_test_wrapper.set_random_test_dir()
first = flags.FLAGS.model_dir
tpu_test_wrapper.set_random_test_dir()
second = flags.FLAGS.model_dir
self.assertNotEqual(first, second)
def test_run_user_main(self):
test_module = _write_and_load_module("""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
VARS = 1
if 'unrelated_if' == 'should_be_ignored':
VARS = 2
if __name__ == '__main__':
VARS = 3
if 'extra_if_at_bottom' == 'should_be_ignored':
VARS = 4
""")
self.assertEqual(test_module.VARS, 1)
tpu_test_wrapper.run_user_main(test_module)
self.assertEqual(test_module.VARS, 3)
def test_run_user_main_missing_if(self):
test_module = _write_and_load_module("""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
VARS = 1
""")
self.assertEqual(test_module.VARS, 1)
with self.assertRaises(NotImplementedError):
tpu_test_wrapper.run_user_main(test_module)
def test_run_user_main_double_quotes(self):
test_module = _write_and_load_module("""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
VARS = 1
if "unrelated_if" == "should_be_ignored":
VARS = 2
if __name__ == "__main__":
VARS = 3
if "extra_if_at_bottom" == "should_be_ignored":
VARS = 4
""")
self.assertEqual(test_module.VARS, 1)
tpu_test_wrapper.run_user_main(test_module)
self.assertEqual(test_module.VARS, 3)
def test_run_user_main_test(self):
test_module = _write_and_load_module("""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.platform import test as unique_name
class DummyTest(unique_name.TestCase):
def test_fail(self):
self.fail()
if __name__ == '__main__':
unique_name.main()
""")
# We're actually limited in what we can test here -- we can't call
# test.main() without deleting this current test from locals(), or we'll
# recurse infinitely. We settle for testing that the test imports and calls
# the right test module.
with test.mock.patch.object(test, 'main') as mock_main:
tpu_test_wrapper.run_user_main(test_module)
mock_main.assert_called_once()
def _write_and_load_module(source):
fp = os.path.join(test.get_temp_dir(), 'testmod.py')
with open(fp, 'w') as f:
f.write(source)
spec = importlib.util.spec_from_file_location('testmodule', fp)
test_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(test_module)
return test_module
if __name__ == '__main__':
test.main()
|
QISKit/qiskit-sdk-py
|
qiskit/pulse/commands/instruction.py
|
Python
|
apache-2.0
| 9,538
| 0.001887
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Instruction = Leaf node of schedule.
"""
from typing import Tuple, List, Iterable, Callable, Optional
from qiskit.pulse.channels import Channel
from qiskit.pulse.interfaces import ScheduleComponent
from qiskit.pulse.schedule import Schedule
from qiskit.pulse.timeslots import Interval, Timeslot, TimeslotCollection
# pylint: disable=missing-return-doc,missing-type-doc
class Instruction(ScheduleComponent):
"""An abstract class for leaf nodes of schedule."""
def __init__(self, command, *channels: List[Channel],
name: Optional[str] = None):
"""
Args:
command: Pulse command to schedule
*channels: List of pulse channels to schedule with command
name: Name of Instruction
"""
self._command = command
self._name = name if name else self._command.name
duration = command.duration
self._timeslots = TimeslotCollection(*(Timeslot(Interval(0, duration), channel)
for channel in channels))
channels = self.channels
self._buffer = max(chan.buffer for chan in channels) if channels else 0
@property
def name(self) -> str:
"""Name of this instruction."""
return self._name
@property
def command(self):
"""The associated command.
Returns: Command
"""
return self._command
@property
def channels(self) -> Tuple[Channel]:
"""Returns channels that this schedule uses."""
return self.timeslots.channels
@property
def timeslots(self) -> TimeslotCollection:
"""Occupied time slots by this instruction."""
return self._timeslots
@property
def start_time(self) -> int:
"""Relative begin time of this instruction."""
return self.timeslots.start_time
@property
def stop_time(self) -> int:
"""Relative end time of this instruction."""
return self.timeslots.stop_time
@property
def duration(self) -> int:
"""Duration of this instruction."""
return self.timeslots.duration
@property
def buffer(self) -> int:
"""Buffer for schedule. To be used when appending"""
return self._buffer
@property
def _children(self) -> Tuple[ScheduleComponent]:
"""Instruction has no child nodes."""
return ()
@property
def instructions(self) -> Tuple[Tuple[int, 'Instruction']]:
"""Iterable for getting instructions from Schedule tree."""
return tuple(self._instructions())
def ch_duration(self, *channels: List[Channel]) -> int:
"""Return duration of the supplied channels in this Instruction.
Args:
*channels: Supplied channels
"""
return self.timeslots.ch_duration(*channels)
def ch_start_time(self, *channels: List[Channel]) -> int:
"""Return minimum start time for supplied channels.
Args:
*channels: Supplied channels
"""
return self.timeslots.ch_start_time(*channels)
def ch_stop_time(self, *channels: List[Channel]) -> int:
"""Return maximum start time for supplied channels.
Args:
*channels: Supplied channels
"""
return self.timeslots.ch_stop_time(*channels)
def _instructions(self, time: int = 0) -> Iterable[Tuple[int, 'Instruction']]:
"""Iterable for flattening Schedule tree.
Args:
time: Shifted time of this node due to parent
Yields:
Tuple[int, ScheduleComponent]: Tuple containing time `ScheduleComponent` starts
at and the flattened `ScheduleComponent`
"""
yield (time, self)
def flatten(self) -> 'Instruction':
"""Return itself as already single instruction."""
return self
def union(self, *schedules: List[ScheduleComponent], name: Optional[str] = None) -> 'Schedule':
"""Return a new schedule which is the union of `self` and `schedule`.
Args:
*schedules: Schedules to be take the union with this Instruction.
name: Name of the new schedule. Defaults to name of self
"""
if name is None:
name = self.name
return Schedule(self, *schedules, name=name)
def shift(self: ScheduleComponent, time: int, name: Optional[str] = None) -> 'Schedule':
"""Return a new schedule shifted forward by `time`.
Args:
time: Time to shift by
name: Name of the new schedule. Defaults to name of self
"""
if name is None:
name = self.name
return Schedule((time, self), name=name)
def insert(self, start_time: int, schedule: ScheduleComponent, buffer: bool = False,
name: Optional[str] = None) -> 'Schedule':
"""Return a new schedule with `schedule` inserted within `self` at `start_time`.
Args:
start_time: Time to insert the schedule schedule
schedule: Schedule to insert
buffer: Whether to obey buffer when inserting
name: Name of the new schedule. Defaults to name of self
"""
if buffer and schedule.buffer and start_time > 0:
start_time += self.buffer
return self.union((start_time, schedule), name=name)
def append(self, schedule: ScheduleComponent, buffer: bool = True,
name: Optional[str] = None) -> 'Schedule':
"""Return a new schedule with `schedule` inserted at the maximum time over
all channels shared between `self` and `schedule`.
Args:
schedule: schedule to be appended
buffer: Whether to obey buffer when appending
name: Name of the new schedule. Defaults to name of self
"""
common_channels = set(self.channels) & set(schedule.channels)
time = self.ch_stop_time(*common_channe
|
ls)
return self.insert(time, schedule, buffer=buffer, name=name)
def draw(self, dt: float = 1, style: Optional['SchedStyle'] = None,
filename: Optional[str] = None, interp_method: Optional[Callable] = None,
scaling: float = 1, channels_to_plot: Optional[List[Channel]] = None,
plot_all: bool = False, plot_range: Optional[Tuple[float]] = None,
|
interactive: bool = False, table: bool = True,
label: bool = False, framechange: bool = True):
"""Plot the instruction.
Args:
dt: Time interval of samples
style: A style sheet to configure plot appearance
filename: Name required to save pulse image
interp_method: A function for interpolation
scaling: Relative visual scaling of waveform amplitudes
channels_to_plot: A list of channel names to plot
plot_all: Plot empty channels
plot_range: A tuple of time range to plot
interactive: When set true show the circuit in a new window
(this depends on the matplotlib backend being used supporting this)
table: Draw event table for supported commands
label: Label individual instructions
framechange: Add framechange indicators
Returns:
matplotlib.figure: A matplotlib figure object of the pulse schedule
"""
# pylint: disable=invalid-name, cyclic-import
from qiskit import visualization
return visualization.pulse_drawer(self, dt=dt, style=style,
filename=filename, interp_method=interp_method,
|
tkruse/rosinstall
|
test/local/test_setupfiles.py
|
Python
|
bsd-3-clause
| 14,673
| 0.002726
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import subprocess
import rosinstall.setupfiles
import wstool.helpers
from wstool.config import Config
from wstool.config_yaml import PathSpec, generate_config_yaml
from rosinstall.helpers import ROSInstallException
from wstool.helpers import ROSINSTALL_FILENAME
from wstool.multiproject_cmd import cmd_persist_config
from rosinstall.rosinstall_cmd import cmd_generate_ros_files
from test.scm_test_base import AbstractFakeRosBasedTest
from test.scm_test_base import AbstractRosinstallBaseDirTest
def has_python3():
cmd = "python3 --version"
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = p.communicate()
p.stdout.close()
if not p.returncode == 0:
return True
return False
HAS_PYTHON3 = has_python3()
if 'ROSINSTALL_SKIP_PYTHON3' in os.environ:
HAS_PYTHON3 = False
def _add_to_file(path, content):
"""Util function to append to file to get a modification"""
with open(path, 'ab') as f:
f.write(content.encode('UTF-8'))
cla
|
ss GenerateTest(AbstractFakeRosBasedTest):
def test_gen_setup(self):
try:
config = Config([PathSpec(os.path.join("test", "example_dirs", "ros_comm")),
PathSpec("bar")],
self.test_root_path,
None)
rosinstall.setupfiles.generate_setup(config)
self.fail('expected exception')
except ROSInstallException:
pass
config = Config([PathSpec(
|
self.ros_path),
PathSpec(os.path.join("test", "example_dirs", "ros_comm")),
PathSpec("bar")],
self.test_root_path,
None)
rosinstall.setupfiles.generate_setup(config)
self.assertTrue(os.path.isfile(os.path.join(self.test_root_path, 'setup.sh')))
self.assertTrue(os.path.isfile(os.path.join(self.test_root_path, 'setup.bash')))
self.assertTrue(os.path.isfile(os.path.join(self.test_root_path, 'setup.zsh')))
def test_gen_setupsh(self):
config = Config([PathSpec(self.ros_path),
PathSpec(os.path.join("test", "example_dirs", "ros_comm")),
PathSpec("bar")],
self.test_root_path,
None)
result = rosinstall.setupfiles.generate_setup_sh_text(config.get_base_path())
self.assertTrue(result.count("#!/usr/bin/env sh") == 1)
config = Config([PathSpec(self.ros_path),
PathSpec(os.path.join("test", "example_dirs", "ros_comm")),
PathSpec("bar.sh", tags=['setup-file'])],
self.test_root_path,
None)
result = rosinstall.setupfiles.generate_setup_sh_text(config.get_base_path())
self.assertTrue(result.count("#!/usr/bin/env sh") == 1, result)
def test_source_setup_sh(self):
test_folder = os.path.join(self.test_root_path, 'workspacetest')
os.makedirs(test_folder)
othersetupfile = os.path.join(test_folder, 'othersetup.sh')
testsetupfile = os.path.join(test_folder, 'testsetup.sh')
with open(othersetupfile, 'w') as fhand:
fhand.write('unset ROS_WORKSPACE')
config = Config([PathSpec(self.ros_path),
PathSpec(othersetupfile,
scmtype=None,
tags=['setup-file'])],
install_path=test_folder,
config_filename=ROSINSTALL_FILENAME)
result = rosinstall.setupfiles.generate_setup_sh_text(config.get_base_path())
self.assertTrue('export ROS_WORKSPACE=%s' % test_folder in result)
with open(testsetupfile, 'w') as fhand:
fhand.write(result)
# check that sourcing setup.sh raises error when .wstool is missing
raised = False
try:
subprocess.check_call(". %s" % testsetupfile , shell=True, env=self.new_environ)
except:
raised = True
self.assertTrue(raised, 'sourcing setup.sh with missing .wstool should fail')
# test that our otherscript really unsets ROS_WORKSPACE, else nexttest would be invalid
# using basename to check var is not set
raised = False
try:
cmd = "export ROS_WORKSPACE=foo && . %s && basename $ROS_WORKSPACE" % othersetupfile
subprocess.check_call(
cmd,
shell=True,
env=self.new_environ)
except:
raised = True
self.assertTrue(raised, 'unsetting-sh-file did not unset var')
# now test that when sourcing setup.sh that contains a
# setup-file to other sh file which unsets ROS_WORKSPACE,
# ROS_WORKSPACE is still set in the end
generate_config_yaml(config, ROSINSTALL_FILENAME, '')
self.assertTrue(os.path.isfile(os.path.join(test_folder, ROSINSTALL_FILENAME)))
# using basename to check var is set
cmd = "export ROS_WORKSPACE=foo && . %s && echo $ROS_WORKSPACE" % testsetupfile
po = subprocess.Popen(cmd, shell=True, cwd=test_folder, stdout=subprocess.PIPE)
workspace = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"').strip()
po.stdout.close()
self.assertEqual(test_folder, workspace)
def test_source_setup_sh_chain(self):
"""
Tests chaining of workspaces, which is fragile because
sourcing very similar setup.sh files recursively
"""
chain_root_path = os.path.join(self.test_root_path, 'chaintest')
os.makedirs(chain_root_path)
test_folder1 = os.path.join(chain_root_path, 'ws1')
os.makedirs(test_folder1)
test_folder2 = os.path.join(chain_root_path, 'ws2')
os.makedirs(test_folder2)
test_folder3 = os.path.join(chain_root_path, 'ws3')
os.makedirs(test_folder3)
test_folder4 = os.path.join(chain_root_path, 'ws4')
os.makedirs(test_folder4)
othersetupfile = os.path.join(chain_root_path, 'othersetup.sh')
with open(othersetupfile, 'w') as fhand:
fhand.write('export ROS_PACKAGE_PATH=/opt/ros/distro')
config1 = Config([PathSpec('ws1sub'),
PathSpec(os.path.join(test_folder2, "setup.sh"),
scmtype=None,
tags=['setup-file']),
PathSpec(os.path.join(test_fo
|
LockScreen/Backend
|
venv/lib/python2.7/site-packages/boto3/s3/transfer.py
|
Python
|
mit
| 27,752
| 0.000036
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Abstractions over S3's upload/download operations.
This module provides high level abstractions for efficient
uploads/downloads. It handles several things for the user:
* Automatically switching to multipart transfers when
a file is over a specific size threshold
* Uploading/downloading a file in parallel
* Throttling based on max bandwidth
* Progress callbacks to monitor transfers
* Retries. While botocore handles retries for streaming uploads,
it is not possible for it to handle retries for streaming
downloads. This module handles retries for both cases so
you don't need to implement any retry logic yourself.
This module has a reasonable set of defaults. It also allows you
to configure many aspects of the transfer process including:
* Multipart threshold size
* Max parallel downloads
* Max bandwidth
* Socket timeouts
* Retry amounts
There is no support for s3->s3 multipart copies at this
time.
.. _ref_s3transfer_usage:
Usage
=====
The simplest way to use this module is:
.. code-block:: python
client = boto3.client('s3', 'us-west-2')
transfer = S3Transfer(client)
# Upload /tmp/myfile to s3://bucket/key
transfer.upload_file('/tmp/myfile', 'bucket', 'key')
# Download s3://bucket/key to /tmp/myfile
transfer.download_file('bucket', 'key', '/tmp/myfile')
The ``upload_file`` and ``download_file`` methods also accept
``**kwargs``, which will be forwarded through to the corresponding
client operation. Here are a few examples using ``upload_file``::
# Making the object public
transfer.upload_file('/tmp/myfile', 'bucket', 'key',
extra_args={'ACL': 'public-read'})
# Setting metadata
transfer.upload_file('/tmp/myfile', 'bucket', 'key',
extra_args={'Metadata': {'a': 'b', 'c': 'd'}})
# Setting content type
transfer.upload_file('/tmp/myfile.json', 'bucket', 'key',
extra_args={'ContentType': "application/json"})
The ``S3Transfer`` clas also supports progress callbacks so you can
provide transfer progress to users. Both the ``upload_file`` and
``download_file`` methods take an optional ``callback`` parameter.
Here's an example of how to print a simple progress percentage
to the user:
.. code-block:: python
class ProgressPercentage(object):
def __init__(self, filename):
self._filename = filename
self._size = float(os.path.getsize(filename))
self._seen_so_far = 0
self._lock = threading.Lock()
def __call__(self, bytes_amount):
# To simplify we'll assume this is hooked up
# to a single filename.
with self._lock:
self._seen_so_far += bytes_amount
percentage = (self._seen_so_far / self._size) * 100
sys.stdout.write(
"\r%s %s / %s (%.2f%%)" % (self._filename, self._seen_so_far,
self._size, percentage))
sys.stdout.flush()
transfer = S3Transfer(boto3.client('s3', 'us-west-2'))
# Upload /tmp/myfile to s3://bucket/key and print upload progress.
transfer.upload_file('/tmp/myfile', 'bucket', 'key',
callback=ProgressPercentage('/tmp/myfile'))
You can also provide a TransferConfig object to the S3Transfer
object that gives you mor
|
e fine grained control over the
transfer. For example:
.. code-block:: python
client = boto3.client('s3', 'us-west-2')
config = TransferConfig(
multipart_threshold=8 * 1024 * 1024,
max_concurrency=10,
num_download_attempts=10,
)
transfer = S3Transfer(client, config)
transfer.upload_file('
|
/tmp/foo', 'bucket', 'key')
"""
import os
import math
import functools
import logging
import socket
import threading
import random
import string
import boto3
from concurrent import futures
from botocore.compat import six
from botocore.vendored.requests.packages.urllib3.exceptions import \
ReadTimeoutError
from botocore.exceptions import IncompleteReadError
import boto3.compat
from boto3.exceptions import RetriesExceededError, S3UploadFailedError
logger = logging.getLogger(__name__)
queue = six.moves.queue
MB = 1024 * 1024
SHUTDOWN_SENTINEL = object()
def random_file_extension(num_digits=8):
return ''.join(random.choice(string.hexdigits) for _ in range(num_digits))
def disable_upload_callbacks(request, operation_name, **kwargs):
if operation_name in ['PutObject', 'UploadPart'] and \
hasattr(request.body, 'disable_callback'):
request.body.disable_callback()
def enable_upload_callbacks(request, operation_name, **kwargs):
if operation_name in ['PutObject', 'UploadPart'] and \
hasattr(request.body, 'enable_callback'):
request.body.enable_callback()
class QueueShutdownError(Exception):
pass
class ReadFileChunk(object):
def __init__(self, fileobj, start_byte, chunk_size, full_file_size,
callback=None, enable_callback=True):
"""
Given a file object shown below:
|___________________________________________________|
0 | | full_file_size
|----chunk_size---|
start_byte
:type fileobj: file
:param fileobj: File like object
:type start_byte: int
:param start_byte: The first byte from which to start reading.
:type chunk_size: int
:param chunk_size: The max chunk size to read. Trying to read
pass the end of the chunk size will behave like you've
reached the end of the file.
:type full_file_size: int
:param full_file_size: The entire content length associated
with ``fileobj``.
:type callback: function(amount_read)
:param callback: Called whenever data is read from this object.
"""
self._fileobj = fileobj
self._start_byte = start_byte
self._size = self._calculate_file_size(
self._fileobj, requested_size=chunk_size,
start_byte=start_byte, actual_file_size=full_file_size)
self._fileobj.seek(self._start_byte)
self._amount_read = 0
self._callback = callback
self._callback_enabled = enable_callback
@classmethod
def from_filename(cls, filename, start_byte, chunk_size, callback=None,
enable_callback=True):
"""Convenience factory function to create from a filename.
:type start_byte: int
:param start_byte: The first byte from which to start reading.
:type chunk_size: int
:param chunk_size: The max chunk size to read. Trying to read
pass the end of the chunk size will behave like you've
reached the end of the file.
:type full_file_size: int
:param full_file_size: The entire content length associated
with ``fileobj``.
:type callback: function(amount_read)
:param callback: Called whenever data is read from this object.
:type enable_callback: bool
:param enable_callback: Indicate whether to invoke callback
during read() calls.
:rtype: ``ReadFileChunk``
:return: A new instance of ``ReadFileChunk``
"""
f = open(filename, 'rb')
file_size = os.fstat(f.fileno()).st_size
return cls(f, start_byte, chunk_size, file_size, callback,
enable_callback)
def _calculate_file_size(self, fileobj, req
|
akrherz/iem
|
scripts/dbutil/set_wfo.py
|
Python
|
mit
| 2,127
| 0
|
"""Assign a WFO to sites in the metadata tables that have no WFO set."""
from pyiem.util import get_dbconn, logger
LOG = logger()
def main():
"""Go Main"""
mesosite = get_dbconn("mesosite")
postgis = get_dbconn("postgis")
mcursor = mesosite.cursor()
mcursor2 = mesosite.cursor()
pcursor = postgis.cursor()
# Find sites we need to check on
mcursor.execute(
"select s.id, s.iemid, s.network, st_x(geom) as lon, "
"st_y(geom) as lat from stations s WHERE "
"(s.wfo IS NULL or s.wfo = '') and s.country = 'US'"
)
for row in mcursor:
sid = row[0]
iemid = row[1]
network = row[2]
# Look for WFO that
pcursor.execute(
"select wfo from cwa WHERE "
"ST_Contains(the_geom, "
" ST_SetSrid(ST_GeomFromEWKT('POINT(%s %s)'), 4326)) ",
(row[3], row[4]),
)
if pcursor.rowcount == 0:
LOG.info(
"IEMID: %s ID: %s NETWORK: %s not within CWAs, calc dist",
iemid,
sid,
network,
)
pcursor.execute(
"SELECT wfo, ST_Distance(the_geom, "
" ST_SetSrid(ST_GeomFromEWKT('POINT(%s %s)'), 4326)) as dist "
"from cwa ORDER by dist ASC LIMIT 1",
|
(row[3], row[4]),
)
wfo, dist = pcursor.fetchone()
if dist > 3:
LOG.info(
" closest CWA %s found >3 degrees away %.2f",
|
wfo,
dist,
)
continue
else:
row2 = pcursor.fetchone()
wfo = row2[0][:3]
LOG.info(
"Assinging WFO: %s to IEMID: %s ID: %s NETWORK: %s",
wfo,
iemid,
sid,
network,
)
mcursor2.execute(
"UPDATE stations SET wfo = %s WHERE iemid = %s", (wfo, iemid)
)
mcursor.close()
mcursor2.close()
mesosite.commit()
mesosite.close()
if __name__ == "__main__":
main()
|
nsdont/dotfiles
|
bin/omnifocus_export_dayone.py
|
Python
|
mit
| 6,596
| 0
|
#!/usr/local/bin/python3
"""OmniFocus export to Dayone.
Usage:
omnifocus_export_dayone.py
omnifocus_export_dayone.py <date> [--show]
omnifocus_export_dayone.py (-s | --show)
Options:
-h --help Show this screen.
--version Show version.
-s --show Only echo to screen.
"""
import sys
import sqlite3
import uuid
import logging
import logging.handlers
from os.path import expanduser
from datetime import datetime, timedelta
from pync import Notifier
from docopt import docopt
OMNIFOCUS_DB_PATH = expanduser('~/Library/Containers/com.omnigroup.OmniFocus2/'
'Data/Library/Caches/com.omnigroup.OmniFocus2/'
'OmniFocusDatabase2')
DAYONE2_DB_PATH = expanduser('~/Library/Group Containers/5U8NS4GX82.dayoneapp2'
'/Data/Documents/DayOne.sqlite')
LOG = logging.getLogger('omnifocus_export')
TAGS_ENUM = {
'Daily': 1,
'Monthly': 2,
'Weekly': 3,
'Yearly': 4
}
def setup_logging():
""" 设置日志 """
log_file = '/tmp/omnifocus_export.log'
fmt = '%(asctime)s - %(message)s'
formatter = logging.Formatter(fmt)
handler = logging.handlers.RotatingFileHandler(
log_file, maxBytes=1024 * 1024, backupCount=5)
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
LOG.addHandler(handler)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
console_handler.setLevel(logging.DEBUG)
LOG.addHandler(console_handler)
LOG.setLevel(logging.DEBUG)
def generate_md(data, tag):
""" 生成具体 markdown """
project_fmt = '* {}'
item_fmt = ' * {}'
plan_summer = ''
base_fmt = '### OmniFocus\n{tasks_content}\n{plan_summer}\n### Life\n'
tasks = []
for project in data:
tasks.append(project_fmt.format(project))
tasks.extend([item_fmt.format(i) for i in data[project]])
tasks_content = '\n'.join(tasks)
if tag in {'Weekly', 'Monthly'}:
plan_summer = '\n### {} Plan\n\n#### OKR Judge\n'
plan_summer = plan_summer.format(tag)
return base_fmt.format(tasks_content=tasks_content,
plan_summer=plan_summer)
def export_to_dayone(md_content, now, tag):
""" 将文件写入到 dayone 的数据库 """
conn = sqlite3.connect(DAYONE2_DB_PATH)
cur = conn.cursor()
start_at = datetime(2001, 1, 1)
timestamp = (now - start_at).total_seconds()
zuuid = str(uuid.uuid4().hex).upper()
cuuid = str(uuid.uuid4()).upper()
sql = """
INSERT INTO ZENTRY (
Z_ENT, Z_OPT, ZSTARRED, ZJOURNAL, ZCREATIONDATE, ZMODIFIEDDATE,
ZCHANGEID, ZTEXT, ZUUID
) VALUES (
2, 2, 0, 2, {}, {}, '{}', '{}', '{}');
"""
cur.execute(sql.format(timestamp, timestamp, cuuid, md_content, zuuid))
conn.commit()
sql = """
select Z_PK from ZENTRY where ZUUID = '{}' order by ZCREATIONDATE desc;
"""
ids = [i for i in cur.execute(sql.format(zuuid))]
sql = """
INSERT INTO Z_2TAGS (
Z_2ENTRIES, Z_20TAGS
) VALUES ('{}', '{}');
"""
conn.execute(sql.format(ids[0][0], TAGS_ENUM[tag]))
conn.commit()
conn.close()
def query_and_export_data(start_ts, end_ts, now, tag='Daily', only_show=False):
""" 读取数据库 """
conn = sqlite3.connect(OMNIFOCUS_DB_PATH)
cur = conn.cursor()
sql = """
select t.name as 'task_name', p.name as 'project_name' from task as t
left join task as p on p.projectInfo = t.containingProjectInfo
where
t.projectinfo is null and
t.dateCompleted >= {} and
t.dateCompleted < {} and
t.containingProjectInfo in (
select projectInfo from task where projectInfo is not NULL and
name not like "%Ritual%" and name not like "%REVIEW%"
);
"""
data = {}
for title, project in cur.execute(sql.format(start_ts, end_ts)):
if project not in data:
data[project] = list()
data[project].append(title)
sql = """
select name from task
where
projectinfo is null and
ininbox = 1 and
dateCompleted >= {} and
dateCompleted < {};
"""
data['Inbox'] = []
for title, in cur.execute(sql.format(start_ts, end_ts)):
data['Inbox'].append(title)
if not len(data['Inbox']):
data.pop('Inbox')
md_content = generate_md(data, tag)
if only_show:
print(md_content)
else:
export_to_dayone(md_content, now, tag)
def main(args):
""" 启动入口 """
if args['<date>']:
try:
now = datetime.strptime(args['<date>'], '%Y.%m.%d')
except ValueError:
print('时间串格式错误,比如: 2015.11.17')
sys.exit(1)
else:
now = datetime.utcnow()
only_show = args['--show']
base_timestamp = datetime(2001, 1, 1).timestamp()
today = datetime(now.year, now.month, now.day)
tomorrow = today + timedelta(1)
if now.weekday() == 6:
# 生成周报
LOG.info('Start generate weekly...')
today_timestamp = (today - timedelta(6)).timestamp() - base_timestamp
tomorrow_timestamp = tomorrow.timestamp() - base_timestamp
query_and_export_data(today_timestamp, tomorrow_timestamp, now,
'Weekly', only_show=only_show)
LOG.info('Finish generate weekly...')
tomorrow = now + timedelta(days=1)
if tomorrow.month > now.month or (now.month == 12 and tomorrow.month == 1):
# 生成月报
LO
|
G.info('Start generate monthly...')
today_timestamp = (today -
|
timedelta(30)).timestamp() - base_timestamp
tomorrow_timestamp = tomorrow.timestamp() - base_timestamp
query_and_export_data(today_timestamp, tomorrow_timestamp, now,
'Monthly', only_show=only_show)
LOG.info('Finish generate monthly...')
# 生成日报
LOG.info('Start generate daily...')
today_timestamp = today.timestamp() - base_timestamp
tomorrow_timestamp = tomorrow.timestamp() - base_timestamp
query_and_export_data(today_timestamp, tomorrow_timestamp, now, 'Daily',
only_show=only_show)
LOG.info('Finish generate daily...')
if not only_show:
Notifier.notify('Export DayOne Done.', title='Omnifocus Statistics')
if __name__ == '__main__':
setup_logging()
LOG.info('--------START------')
main(docopt(__doc__, version='0.1.2'))
LOG.info('--------END--------')
|
gvizquel/comunidad
|
comunidad/1settings.py
|
Python
|
gpl-3.0
| 3,105
| 0.001288
|
"""
Django settings for comunidad project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kwc%yj7hs827bizff+hjn68j6bw7w=1m%(t-18+t&#ehhibt()'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common
|
.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middle
|
ware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'comunidad.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'comunidad.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
tjnapster555/django-edu
|
djangoedu/core/generic_views.py
|
Python
|
mit
| 264
| 0.015152
|
"""
=============
G
|
eneric Views
=============
Class based helper views.
"""
class GenericManyToMany(object):
"""Generic view to edit many to many relations with extra fields."""
left_table = None
right_table = None
allow_multiple = True
| |
mapix/utknows
|
examples/test_world.py
|
Python
|
bsd-3-clause
| 860
| 0.004651
|
# -*- coding:utf-8 -*-
import random
import unittest
class TestSequenceFunctionsWorld(unittest.TestCase):
def setUp(self):
self.seq = range(10)
def test_shuffle(self):
# make sure the shuffled sequence does not lose any elements
random.shuffle
|
(self.seq)
self.seq.sort()
self.assertEqual(self.seq, range(10))
# should raise an exception for an immutable sequence
self.assertRaises(TypeError, random.shuffle, (1,2,3))
def test_choice(self):
element = random.choice(self.seq)
self.assertTrue(element in self.seq)
|
def test_sample(self):
with self.assertRaises(ValueError):
random.sample(self.seq, 20)
for element in random.sample(self.seq, 5):
self.assertTrue(element in self.seq)
if __name__ == '__main__':
unittest.main()
|
daviskirk/climatecontrol
|
climatecontrol/ext/pydantic.py
|
Python
|
mit
| 2,447
| 0.001226
|
"""Climatecontrol extension for using pydantic schemas as source."""
from typing import Generic, Mapping, Type, TypeVar
from pydantic import BaseModel
from climatecontrol.core import Climate as BaseClimate
from climatecontrol.core import SettingsItem as BaseSettingsItem
from climatecontrol.fragment import FragmentPath
T = TypeVar("T", bound=BaseModel)
class SettingsItem(BaseSettingsItem):
@classmethod
def _self_is_mutable(cls, value) -> bool:
return super()._self_is_mutable(value) or isinstance(value, BaseModel)
class Climate(BaseClimate, Generic[T]):
"""Climate settings manager for dataclasses."""
def __init__(self, *args, model: Type[T], **kwargs):
"""Initialize pydantic climate object.
Uses a pydantic model as a schema to initialize settings and check typ
|
es.
Args:
*args, **kwargs: See :class:`climateontrol.Climate`
model: Additional argument specific to the model to use for the settings.
Examples:
>>> from climatecontrol.ext.pydantic import Climate
>>>
>>> class SettingsSubSchema(BaseModel):
... d: int = 4
...
>>> class SettingsSchema(BaseModel):
...
|
a: str = 'test'
... b: bool = False
... c: SettingsSubSchema = SettingsSubSchema()
...
>>> climate = Climate(model=SettingsSchema)
>>> # defaults are initialized automatically:
>>> climate.settings.a
'test'
>>> climate.settings.c.d
4
>>> # Types are checked if given
>>> climate.update({'c': {'d': 'boom!'}})
Traceback (most recent call last):
...
pydantic.error_wrappers.ValidationError: 1 validation error for SettingsSchema
c -> d
value is not a valid integer (type=type_error.integer)
See Also:
:module:`pydantic`: Used to initialize and check settings.
"""
self.model = model
super().__init__(*args, **kwargs)
@property
def settings(self) -> T:
self.ensure_initialized()
return SettingsItem(self._data, self, FragmentPath())
def parse(self, data: Mapping) -> T:
"""Parse data into the provided dataclass."""
data = super().parse(data)
obj: T = self.model(**data)
return obj
|
mehulsbhatt/easyengine
|
ee/cli/plugins/clean.py
|
Python
|
mit
| 4,861
| 0.000823
|
"""Clean Plugin for EasyEngine."""
from ee.core.shellexec import EEShellExec
from ee.core.aptget import EEAptGet
from ee.core.services import EEService
from ee.core.logging import Log
from cement.core.controller import CementBaseController, expose
from cement.core import handler, hook
import os
import urllib.request
def ee_clean_hook(app):
# do something with the ``app`` object here.
pass
class EECleanController(CementBaseController):
class Meta:
label = 'clean'
stacked_on = 'base'
stacked_type = 'nested'
description = ('Clean NGINX FastCGI cache, Opcacache, Memcache')
arguments = [
(['--all'],
dict(help='Clean all cache', action='store_true')),
(['--fastcgi'],
dict(help='Clean FastCGI cache', action='store_true')),
(['--memcache'],
dict(help='Clean MemCache', action='store_true')),
(['--opcache'],
dict(help='Clean OpCache', action='store_true')),
(['--pagespeed'],
dict(help='Clean Pagespeed Cache', action='store_true')),
(['--redis'],
dict(help='Clean Redis Cache', action='store_true')),
]
usage = "ee clean [options]"
@expose(hide=True)
def default(self):
if (not (self.app.pargs.all or self.app.pargs.fastcgi or
self.app.pargs.memcache or self.app.pargs.opcache or
self.app.pargs.pagespeed or self.app.pargs.redis)):
self.clean_fastcgi()
if self.app.pargs.all:
self.clean_memcache()
self.clean_fastcgi()
self.clean_opcache()
self.clean_redis()
self.clean_pagespeed()
if self.app.pargs.fastcgi:
self.clean_fastcgi()
if self.app.pargs.memcache:
self.clean_memcache()
if self.app.pargs.opcache:
self.clean_opcac
|
he()
if self.app.pargs.pagespeed:
self.clean_pagespeed()
if self.app.pargs.redis:
self.clean_redis()
@expose(hide=True)
def clean_redis(self):
"""This function clears Redis cache"""
if(EEAptGet.is_installed(self, "redis-server")):
Log.info(self, "Cleaning Redis cache")
EEShellExec.cmd_exec(self, "redis-cli flushall")
else:
Log.info(self, "Redis is
|
not installed")
@expose(hide=True)
def clean_memcache(self):
"""This function Clears memcache """
try:
if(EEAptGet.is_installed(self, "memcached")):
EEService.restart_service(self, "memcached")
Log.info(self, "Cleaning MemCache")
else:
Log.info(self, "Memcache not installed")
except Exception as e:
Log.debug(self, "{0}".format(e))
Log.error(self, "Unable to restart Memcached")
@expose(hide=True)
def clean_fastcgi(self):
"""This function clears Fastcgi cache"""
if(os.path.isdir("/var/run/nginx-cache")):
Log.info(self, "Cleaning NGINX FastCGI cache")
EEShellExec.cmd_exec(self, "rm -rf /var/run/nginx-cache/*")
else:
Log.error(self, "Unable to clean FastCGI cache")
@expose(hide=True)
def clean_opcache(self):
"""This function clears opcache"""
try:
Log.info(self, "Cleaning opcache")
wp = urllib.request.urlopen(" https://127.0.0.1:22222/cache"
"/opcache/opgui.php?page=reset").read()
except Exception as e:
Log.debug(self, "{0}".format(e))
Log.debug(self, "Unable hit url, "
" https://127.0.0.1:22222/cache/opcache/opgui.php?page=reset,"
" please check you have admin tools installed")
Log.debug(self, "please check you have admin tools installed,"
" or install them with `ee stack install --admin`")
Log.error(self, "Unable to clean opcache")
@expose(hide=True)
def clean_pagespeed(self):
"""This function clears Pagespeed cache"""
if(os.path.isdir("/var/ngx_pagespeed_cache")):
Log.info(self, "Cleaning PageSpeed cache")
EEShellExec.cmd_exec(self, "rm -rf /var/ngx_pagespeed_cache/*")
else:
Log.debug(self, "/var/ngx_pagespeed_cache does not exist,"
" so cache not cleared")
Log.error(self, "Unable to clean pagespeed cache")
def load(app):
# register the plugin class.. this only happens if the plugin is enabled
handler.register(EECleanController)
# register a hook (function) to run after arguments are parsed.
hook.register('post_argument_parsing', ee_clean_hook)
|
Endika/account-financial-tools
|
account_move_batch_validate/account.py
|
Python
|
agpl-3.0
| 6,169
| 0
|
# -*- coding: utf-8 -*-
###############################################################################
# #
# Author: Leonardo Pistone
# Copyright 2014 Camptocamp SA
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
"""Accounting customisation for delayed posting."""
import logging
from openerp.osv import fields, orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
try:
from openerp.addons.connector.queue.job import job
from openerp.addons.connector.session import ConnectorSession
from openerp.addons.connector.queue.job import OpenERPJobStorage
except ImportError:
_logger.debug('Can not `import connector`.')
import functools
def empty_decorator_factory(*argv, **kwargs):
return functools.partial
job = empty_decorator_factory
# do a massive write on account moves BLOCK_SIZE at a time
BLOCK_SIZE = 1000
class account_move(orm.Model):
"""We modify the account move to allow delayed posting."""
_name = 'account.move'
_inherit = 'account.move'
_columns = {
'to_post': fields.boolean(
'Posting Requested',
readonly=True,
help='Check this box to mark the move for batch posting'
),
'post_job_uuid': fields.char(
'UUID of the Job to approve this move'
),
}
def _delay_post_marked(self, cr, uid, eta=None, context=None):
"""Create a job for every move marked for posting.
If some moves already have a job, they are skipped.
"""
if context is None:
context = {}
session = ConnectorSession(cr, uid, context=context)
move_ids = self.search(cr, uid, [
('to_post', '=', True),
('post_job_uuid', '=', False),
('state', '=', 'draft'),
], context=context)
name = self._name
# maybe not creating too many dictionaries will make us a bit faster
values = {'post_job_uuid': None}
_logger.info(
u'{0} jobs for posting moves have been created.'.format(
len(move_ids)
)
)
for move_id in move_ids:
job_uuid = validate_one_move.delay(session, name, move_id,
eta=eta)
values['post_job_uuid'] = job_uuid
self.write(cr, uid, [move_id], values)
cr.commit()
def _cancel_jobs(self, cr, uid, context=None):
"""Find moves where the mark has been removed and cancel the jobs.
For the moves that are posted already it's too late: we skip them.
"""
if context is None:
context = {}
session = ConnectorSession(cr, uid, context=context)
storage = OpenERPJobStorage(session)
move_ids = self.search(cr, uid, [
('to_post', '=', False),
('post_job_uuid', '!=', False),
('state', '=', 'draft'),
], context=context)
for move in self.browse(cr, uid, move_ids, context=context):
job_rec = storage.load(move.post_job_uuid)
if job_rec.state in (u'pending', u'enqueued'):
job_rec.set_done(result=_(
u'Task set to Done because the user unmarked the move'
))
storage.store(job_rec)
def mark_for_posting(self, cr, uid, move_ids, eta=None, context=None):
"""Mark a list of moves for delayed posting, and enqueue the jobs."""
if context is None:
context = {}
# For massive amounts of moves, this becomes necessary to avoid
# MemoryError's
_logger.info(
u'{0} moves marked for posting.'.format(len(move_ids))
)
for start in xrange(0, len(move_ids), BLOCK_SIZE):
self.write(
cr,
uid,
move_ids[start:start + BLOCK_SIZE],
{'to_post': True},
context=context)
# users like to see the flag sooner rather than later
cr.commit()
self._delay_post_marked(cr, uid, eta=eta, context=context)
def unmark_for_posting(self, cr, uid, move_ids, context=None):
"""Unmark moves for delayed posting, and cancel the jobs."""
if context is None:
|
context = {}
self.write(cr, uid, move_ids, {'to_post': False}, context=context)
self._cancel_jobs(cr, uid, context=context)
@job(default_channel='root.account_move_batch_validate')
def validate_one_move(session, model_name, move_id):
"""Validate a move, and leave the job reference in place."""
move_pool = session.pool['account.move']
if mov
|
e_pool.exists(session.cr, session.uid, [move_id]):
move_pool.button_validate(
session.cr,
session.uid,
[move_id]
)
else:
return _(u'Nothing to do because the record has been deleted')
|
Buggaarde/youtube-dl
|
youtube_dl/extractor/kontrtube.py
|
Python
|
unlicense
| 2,732
| 0.002276
|
# encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_duration,
)
class KontrTubeIE(InfoExtractor):
IE_NAME = 'kontrtube'
IE_DESC = 'KontrTube.ru - Труба зовёт'
_VALID_URL = r'http://(?:www\.)?kontrtube\.ru/videos/(?P<id>\d+)/(?P<display_id>[^/]+)/'
_TEST = {
'u
|
rl': 'http://www.kontrtube.ru/videos/2678/nad-olimpiyskoy-derevney-v-sochi-podnyat-rossiyskiy-flag/',
'm
|
d5': '975a991a4926c9a85f383a736a2e6b80',
'info_dict': {
'id': '2678',
'display_id': 'nad-olimpiyskoy-derevney-v-sochi-podnyat-rossiyskiy-flag',
'ext': 'mp4',
'title': 'Над олимпийской деревней в Сочи поднят российский флаг',
'description': 'md5:80edc4c613d5887ae8ccf1d59432be41',
'thumbnail': 'http://www.kontrtube.ru/contents/videos_screenshots/2000/2678/preview.mp4.jpg',
'duration': 270,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
webpage = self._download_webpage(
url, display_id, 'Downloading page')
video_url = self._search_regex(
r"video_url\s*:\s*'(.+?)/?',", webpage, 'video URL')
thumbnail = self._search_regex(
r"preview_url\s*:\s*'(.+?)/?',", webpage, 'thumbnail', fatal=False)
title = self._html_search_regex(
r'(?s)<h2>(.+?)</h2>', webpage, 'title')
description = self._html_search_meta(
'description', webpage, 'description')
duration = self._search_regex(
r'Длительность: <em>([^<]+)</em>', webpage, 'duration', fatal=False)
if duration:
duration = parse_duration(duration.replace('мин', 'min').replace('сек', 'sec'))
view_count = self._search_regex(
r'Просмотров: <em>([^<]+)</em>',
webpage, 'view count', fatal=False)
if view_count:
view_count = int_or_none(view_count.replace(' ', ''))
comment_count = int_or_none(self._search_regex(
r'Комментарии \((\d+)\)<', webpage, ' comment count', fatal=False))
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'thumbnail': thumbnail,
'title': title,
'description': description,
'duration': duration,
'view_count': int_or_none(view_count),
'comment_count': int_or_none(comment_count),
}
|
euccas/CodingPuzzles-Python
|
leet/source/pickone/recordered_power_of_2.py
|
Python
|
mit
| 690
| 0.010145
|
class Solution:
def reorderedPowerOf2(self, N):
"""
:type N: int
:rtype: bool
"""
if N is None or N == 0:
return False
binary = []
while N > 0:
binary.append(N%2)
N = N//2
binary.sort()
binary.pop() # remove the 1
#if any(binary) == 1:
# return False
zero = 0
|
for n in binary:
if n == 0:
zero += 1
return zero == len(binary) - zero
if __name__ == "__main__":
sln = Solution()
#res = sln.reorderedPowerOf2(10)
#assert(res == False)
res = sln.reorderedPowerOf2(46)
assert(res == True)
|
|
RIPE-NCC/ripe.atlas.sagan
|
ripe/atlas/sagan/http.py
|
Python
|
gpl-3.0
| 3,498
| 0
|
# Copyright (c) 2016 RIPE NCC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Fo
|
undation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General P
|
ublic License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .base import Result, ParsingDict
class Response(ParsingDict):
def __init__(self, data, **kwargs):
ParsingDict.__init__(self, **kwargs)
self.raw_data = data
self.af = self.ensure("af", int)
self.body_size = self.ensure("bsize", int)
self.head_size = self.ensure("hsize", int)
self.destination_address = self.ensure("dst_addr", str)
self.source_address = self.ensure("src_addr", str)
self.code = self.ensure("res", int)
self.response_time = self.ensure("rt", float)
self.version = self.ensure("ver", str)
if not self.destination_address:
self.destination_address = self.ensure(
"addr", str, self.destination_address)
if not self.source_address:
self.source_address = self.ensure(
"srcaddr", str, self.source_address)
if not self.code:
self._handle_malformation("No response code available")
error = self.ensure("err", str)
if error:
self._handle_error(error)
class HttpResult(Result):
METHOD_GET = "GET"
METHOD_POST = "POST"
METHOD_PUT = "PUT"
METHOD_DELETE = "DELETE"
METHOD_HEAD = "HEAD"
METHODS = {
METHOD_GET: "GET",
METHOD_POST: "POST",
METHOD_PUT: "PUT",
METHOD_DELETE: "DELETE",
METHOD_HEAD: "HEAD"
}
def __init__(self, data, **kwargs):
Result.__init__(self, data, **kwargs)
self.uri = self.ensure("uri", str)
self.method = None
self.responses = []
if "result" not in self.raw_data:
self._handle_malformation("No result value found")
return
if isinstance(self.raw_data["result"], list):
# All modern results
for response in self.raw_data["result"]:
self.responses.append(Response(response, **kwargs))
if self.responses:
method = self.raw_data["result"][0].get(
"method",
self.raw_data["result"][0].get("mode") # Firmware == 4300
)
if method:
method = method.replace("4", "").replace("6", "")
if method in self.METHODS.keys():
self.method = self.METHODS[method]
else:
# Firmware <= 1
response = self.raw_data["result"].split(" ")
self.method = response[0].replace("4", "").replace("6", "")
self.responses.append(Response({
"dst_addr": response[1],
"rt": float(response[2]) * 1000,
"res": int(response[3]),
"hsize": int(response[4]),
"bsize": int(response[5]),
}))
__all__ = (
"HttpResult"
)
|
rahulunair/nova
|
nova/tests/functional/wsgi/test_services.py
|
Python
|
apache-2.0
| 19,683
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os_resource_classes as orc
import os_traits
import six
from nova import context as nova_context
from nova import exception
from nova import objects
from nova.tests.functional.api import client as api_client
from nova.tests.functional import integrated_helpers
from nova import utils
class TestServicesAPI(integrated_helpers.ProviderUsageBaseTestCase):
compute_driver = 'fake.SmallFakeDriver'
def test_compute_service_delete_ensure_related_cleanup(self):
"""Tests deleting a compute service and the related cleanup associated
with that like the compute_nodes table entry, removing the host
from any aggregates, the host mapping in the API DB and the associated
resource provider in Placement.
"""
compute = self._start_compute('host1')
# Make sure our compute host is represented as expected.
services = self.admin_api.get_services(binary='nova-compute')
self.assertEqual(1, len(services))
service = services[0]
# Now create a host aggregate and add our host to it.
aggregate = self.admin_api.post_aggregate(
{'aggregate': {'name': 'agg1'}})
self.admin_api.add_host_to_aggregate(aggregate['id'], service['host'])
# Make sure the host is in the aggregate.
aggregate = self.admin_api.api_get(
'/os-aggregates/%s' % aggregate['id']).body['aggregate']
self.assertEqual([service['host']], aggregate['hosts'])
rp_uuid = self._get_provider_uuid_by_host(service['host'])
# We'll know there is a host mapping implicitly if os-hypervisors
# returned something in _get_provider_uuid_by_host, but let's also
# make sure the host mapping is there like we expect.
ctxt = nova_context.get_admin_context()
objects.HostMapping.get_by_host(ctxt, service['host'])
# Make sure there is a resource provider for that compute node based
# on the uuid.
resp = self.placement_api.get('/resource_providers/%s' % rp_uuid)
self.assertEqual(200, resp.status)
# Make sure the resource provider has inventory.
inventories = self._get_provider_inventory(rp_uuid)
# Expect a minimal set of inventory for the fake virt driver.
for resource_class in [orc.VCPU, orc.MEMORY_MB, orc.DISK_GB]:
self.assertIn(resource_class, inventories)
# Now create a server so that the resource provider has some allocation
# records.
flavor = self.api.get_flavors()[0]
server = self._boot_and_check_allocations(flavor, service['host'])
# Now the fun part, delete the compute service and make sure related
# resources are cleaned up, like the compute node, host mapping, and
# resource provider. We have to first stop the compute service so
# it doesn't recreate the compute node during the
# update_available_resource periodic task.
self.admin_api.put_service(service['id'], {'forced_down': True})
compute.stop()
# The first attempt should fail since there is an instance on the
# compute host.
ex = self.assertRaises(api_client.OpenStackApiException,
self.admin_api.api_delete,
'/os-services/%s' % service['id'])
self.assertIn('Unable to delete compute service that is hosting '
'instances.', six.text_type(ex))
self.assertEqual(409, ex.response.status_code)
# Now delete the instance and wait for it to be gone.
self._delete_and_check_allocations(server)
# Now we can delete the service.
self.admin_api.api_delete('/os-services/%s' % service['id'])
# Make sure the service is deleted.
services = self.admin_api.get_services(binary='nova-compute')
self.assertEqual(0, l
|
en(services))
# Make sure the host was removed from the aggregate.
aggregate = self.admin_api.api_get(
'/os-aggregates/%s' % aggregate['id']).body['aggregate']
self.assertEqual([], aggregate['hosts'])
|
# Trying to get the hypervisor should result in a 404.
self.admin_api.api_get(
'os-hypervisors?hypervisor_hostname_pattern=%s' % service['host'],
check_response_status=[404])
# The host mapping should also be gone.
self.assertRaises(exception.HostMappingNotFound,
objects.HostMapping.get_by_host,
ctxt, service['host'])
# And finally, the resource provider should also be gone. The API
# will perform a cascading delete of the resource provider inventory
# and allocation information.
resp = self.placement_api.get('/resource_providers/%s' % rp_uuid)
self.assertEqual(404, resp.status)
def test_evacuate_then_delete_compute_service(self):
"""Tests a scenario where a server is created on a host, the host
goes down, the server is evacuated to another host, and then the
source host compute service is deleted. After that the deleted
compute service is restarted. Related placement resources are checked
throughout.
"""
# Create our source host that we will evacuate *from* later.
host1 = self._start_compute('host1')
# Create a server which will go on host1 since it is the only host.
flavor = self.api.get_flavors()[0]
server = self._boot_and_check_allocations(flavor, 'host1')
# Get the compute service record for host1 so we can manage it.
service = self.admin_api.get_services(
binary='nova-compute', host='host1')[0]
# Get the corresponding resource provider uuid for host1.
rp_uuid = self._get_provider_uuid_by_host(service['host'])
# Make sure there is a resource provider for that compute node based
# on the uuid.
resp = self.placement_api.get('/resource_providers/%s' % rp_uuid)
self.assertEqual(200, resp.status)
# Down the compute service for host1 so we can evacuate from it.
self.admin_api.put_service(service['id'], {'forced_down': True})
host1.stop()
# Start another host and trigger the server evacuate to that host.
self._start_compute('host2')
self.admin_api.post_server_action(server['id'], {'evacuate': {}})
# The host does not change until after the status is changed to ACTIVE
# so wait for both parameters.
self._wait_for_server_parameter(server, {
'status': 'ACTIVE',
'OS-EXT-SRV-ATTR:host': 'host2'})
# Delete the compute service for host1 and check the related
# placement resources for that host.
self.admin_api.api_delete('/os-services/%s' % service['id'])
# Make sure the service is gone.
services = self.admin_api.get_services(
binary='nova-compute', host='host1')
self.assertEqual(0, len(services), services)
# FIXME(mriedem): This is bug 1829479 where the compute service is
# deleted but the resource provider is not because there are still
# allocations against the provider from the evacuated server.
resp = self.placement_api.get('/resource_providers/%s' % rp_uuid)
self.assertEqual(200, resp.status)
self.assertFlavorMatchesUsage(rp_uuid, flavor)
# Try to restart the host1 compute service to create a new resource
# provider.
self.restart_compute_service(host1)
# FIXME(mriedem): This is bug 1817833 where restar
|
aodag/asbool
|
tests/test_it.py
|
Python
|
mit
| 2,013
| 0
|
import pytest
class TestAsBoolConverter(object):
@pytest.fixture
def target(self):
from asbool.converter import AsBoolConverter
return AsBoolConverter
@pytest.mark.parametrize(
"true_values, false_values, input, expected",
[
(['t'], ['f'], 't', True),
(['t'], ['f'], 'f', False),
(['true'], ['false'], 'true', True),
(['true'], ['false'], 'false', False),
(['true'], ['false'], 'TRUE', True),
(['true'], ['false'], 'FALSE', False),
(['t', 'true'], ['f', 'false'], 'true', True),
(['t', 'true'], ['f', 'false'], 'false', False),
],
)
def test_call(self, target,
true_values, false_values, input, expected):
converter = target(true_values, false_values)
result = converter(input)
assert result == expected
def test_value_error(self, target):
converter = target(["t"], ["f"])
with pytest.raises(ValueError) as excinfo:
converter("TRUE")
assert "TRUE" in str(excinfo.value)
@pytest.mark.parametrize(
"true_values, false_values, input, expected",
[
(['t'], ['f'], 0, False),
(['t'], ['f'], [], False),
(['t'], ['f'], 1, True),
(['t'], ['f'], [0], True),
],
)
def test_non_string(self, target,
true_values, false_values, input, expected):
converter = target(true_values, false_values)
result = converter(
|
input)
assert result == expected
@pytest.mark.parametrize(
"input, expected",
[
('TRUE', True
|
),
('t', True),
('y', True),
('yes', True),
(1, True),
('FALSE', False),
('f', False),
('n', False),
('no', False),
(0, False),
]
)
def test_asbool(input, expected):
from asbool import asbool
result = asbool(input)
assert result == expected
|
sdpython/pyquickhelper
|
_unittests/ut_pycode/test_missing_function_pycode.py
|
Python
|
mit
| 3,947
| 0.000507
|
"""
@brief test log(time=8s)
@author Xavier Dupre
"""
import sys
import os
import unittest
import shutil
from contextlib import redirect_stdout
from io import StringIO
from pyquickhelper.pycode import ExtTestCase
from pyquickhelper.pycode import process_standard_options_for_setup_help, get_temp_folder
from pyquickhelper.texthelper import compare_module_version
from pyquickhelper.texthelper.version_helper import numeric_module_version
from pyquickhelper.pycode.setup_helper import (
clean_notebooks_for_numbers, hash_list, process_argv_for_unittest,
process_standard_options_for_setup)
class TestMissingFunctionsPycode(ExtTestCase):
def test_process_standard_options_for_setup_help(self):
f = StringIO()
with redirect_stdout(f):
process_standard_options_for_setup_help('--help-commands')
self.assertIn('Commands processed by pyquickhelper:', f.getvalue())
f = StringIO()
with redirect_stdout(f):
process_standard_options_for_setup_help(['--help', 'unittests'])
self.assertIn('-f file', f.getvalue())
f = StringIO()
with redirect_stdout(f):
process_standard_options_for_setup_help(['--help', 'clean_space'])
self.assertIn('clean unnecessary spaces', f.getvalue())
|
@unittest.skipIf(sys.platform != 'win32', reason="not available")
def test_process_standard_options_for_setup(self):
|
temp = get_temp_folder(
__file__, "temp_process_standard_options_for_setup")
os.mkdir(os.path.join(temp, '_unittests'))
f = StringIO()
with redirect_stdout(f):
process_standard_options_for_setup(
['build_script'], file_or_folder=temp, project_var_name="debug",
fLOG=print)
text = f.getvalue()
self.assertIn('[process_standard_options_for_setup]', text)
self.assertExists(os.path.join(temp, 'bin'))
def test_numeric_module_version(self):
self.assertEqual(numeric_module_version((4, 5)), (4, 5))
self.assertEqual(numeric_module_version("4.5.e"), (4, 5, 'e'))
self.assertEqual(compare_module_version(("4.5.e"), (4, 5, 'e')), 0)
self.assertEqual(compare_module_version(("4.5.e"), None), -1)
self.assertEqual(compare_module_version(None, ("4.5.e")), 1)
self.assertEqual(compare_module_version(None, None), 0)
self.assertEqual(compare_module_version(
("4.5.e"), (4, 5, 'e', 'b')), -1)
def test_clean_notebooks_for_numbers(self):
temp = get_temp_folder(__file__, "temp_clean_notebooks_for_numbers")
nb = os.path.join(temp, "..", "data", "notebook_with_svg.ipynb")
fold = os.path.join(temp, '_doc', 'notebooks')
self.assertNotExists(fold)
os.makedirs(fold)
shutil.copy(nb, fold)
res = clean_notebooks_for_numbers(temp)
self.assertEqual(len(res), 1)
with open(res[0], 'r') as f:
content = f.read()
self.assertIn('"execution_count": 1,', content)
def test_hash_list(self):
li = [4, '5']
res = hash_list(li)
self.assertEqual(res, "1402b9d4")
li = []
res = hash_list(li)
self.assertEqual(res, "d41d8cd9")
def test_process_argv_for_unittest(self):
li = ['unittests', '-d', '5']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
li = ['unittests']
res = process_argv_for_unittest(li, None)
self.assertEmpty(res)
li = ['unittests', '-e', '.*']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
li = ['unittests', '-g', '.*']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
li = ['unittests', '-f', 'test.py']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
if __name__ == "__main__":
unittest.main()
|
r-darwish/pushjournal
|
pushjournal/_compat.py
|
Python
|
bsd-3-clause
| 128
| 0
|
import sys
PY2 = sy
|
s.version_info[0] == 2
if PY2:
from urllib import urlopen
else:
from urllib.request import
|
urlopen
|
jmetzen/skgp
|
examples/plot_gp_learning_curve.py
|
Python
|
bsd-3-clause
| 3,042
| 0.001644
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
==========================================================
Comparing different variants of squared exponential kernel
==========================================================
Three variants of the squared exponential covariance function are compared:
* Isotropic squared exponential: a global length scale is learned from data.
* Automatic relevance determination (ARD): every dimension gets its own
characteristic length scale, irrelevant dimensions can be ignored if thetaL
is sufficiently small.
* Factor analysis distance (FAD): A low-rank approximation of a full
covariance matrix for the squared exponential kernel is learned.
Correlations between different dimensions can be identified and exploited
to some extent.
The target function maps a 4-dimensional vector onto a real value. One of
the dimensions is ignored (and should thus be pruned away by ARD). The other
dimensions are correlated, which can be exploited by FAD.
The hyperparameters are optimized within
.. math::
\theta_i \in [1e-4, 1e2]
See Rasmussen and Williams 2006, p107 for details regarding the different
variants of the squared exponential kernel.
"""
print(__doc__)
# Author: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# Licence: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.learning_curve import learning_curve
from skgp.estimators import GaussianProcess
np.random.seed(1)
def f(X):
""" Target function for GPR.
Note that one dimension (X[:, 3]) is irrelevant and should thus be ignored
by ARD. Furthermore, the values x in R^3 and
x + \alpha (1, 2 , 0) + \beta (1, 0, 2) have the same value for all x and
all alpha and beta. This can be exploited by FAD.
"""
return np.tanh(2 * X[:, 0] - X[:, 1] - X[:, 2])
Xtrain = np.random.random((200, 4)) * 2 - 1
ytrain = f(Xtrain)
plt.figure()
colors = ['r', 'g', 'b', 'c', 'm']
labels = {1: "Isotropic", 4: "Automatic Relevance Determination",
8: "Factor Analysis"}
for i, n in enumerate(labels.keys()):
train_sizes, train_scores, test_scores = \
learning_curve(GaussianProcess(corr='squared_exponential',
theta0=[1.0] * n, thetaL=[1e-4] * n,
thetaU=[1e2] * n),
|
Xtrain, ytrain, scoring="mean_squa
|
red_error",
cv=10, n_jobs=4)
test_scores = -test_scores # Scores correspond to negative MSE
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_min = np.min(test_scores, axis=1)
test_scores_max = np.max(test_scores, axis=1)
plt.plot(train_sizes, test_scores_mean, label=labels[n],
color=colors[i])
plt.fill_between(train_sizes, test_scores_min, test_scores_max,
alpha=0.2, color=colors[i])
plt.legend(loc="best")
plt.title("Learning Curve")
plt.xlabel("Training examples")
plt.ylabel("Mean Squared Error")
plt.yscale("symlog", linthreshy=1e-10)
plt.show()
|
MadsJensen/CAA
|
calc_itc_ali.py
|
Python
|
bsd-3-clause
| 2,791
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 30 20:51:31 2017
@author: mje
"""
import numpy as np
import mne
import matplotlib.pyplot as plt
from mne.stats import permutation_cluster_test
from my_settings import (subjects_select, tf_folder, epochs_folder)
d_ali_ent_right = []
for subject in subjects_select:
data_right_ent = np.load(tf_folder + "%s_ent_right-4-itc.npy" % subject)
data_left_ent = np.load(tf_folder + "%s_ent_left-4-itc.npy" % subject)
data_right_ctl = np.load(tf_folder + "%s_ctl_right-4-itc.npy" % subject)
data_left_ctl = np.load(tf_folder + "%s_ctl_left-4-itc.npy" % subject)
epochs = mne.read_epochs(
epochs_folder + "%s_trial_start-epo.fif" % subject, preload=False)
selection = mne.read_selection("Left-occipital")
selection = [f.replace(' ', '') for f in selection]
left_idx = mne.pick_types(
epochs.info,
meg='grad',
eeg=False,
eog=False,
stim=False,
exclude=[],
selection=selection)
selection = mne.read_selection("Right-occipital")
selection = [f.replace(' ', '') for f in selection]
right_idx = mne.pick_types(
epochs.info,
meg='grad',
eeg=False,
eog=False,
stim=False,
exclude=[],
selection=selection)
d_right_ent = (
data_left_ent[right_idx, :, :] - data_right_ent[right_idx, :, :])
d_left_ent = (
data_left_ent[left_idx, :, :] - data_right_ent[left_idx, :, :])
d_right_ctl = (
data_left_ctl[right_idx, :, :] - data_right_ctl[right_idx, :, :])
d_left_ctl = (
data_left_ctl[left_idx, :, :] - data_right_ctl[left_idx, :, :])
d_ali_ent_right = np.asarray(d_right_ent).mean(axis=1)
d_ali_ent_left = np.asarray(d_left_ent).mean(axis=1)
d_ali_ctl_right = np.asarray(d_right_ctl).mean(axis=1)
d_ali_ctl_left = np.asarray(d_left_ctl).mean(axis=1)
T_obs, clusters, cluster_pv, H0 = permutation_cluster_test(
[d_ali_ent_left, d_ali_ent_right], n_permutations=5000)
times = (epochs.times[::4][:-1]) *
|
1e3
plt.close('all')
plt.subplot(211)
plt.title("Ctl left v right")
plt.plot(
times,
d_ali_ent_left.mean(axis=0) - d_ali_ent_right.mean(axis=0),
label="ERF Contrast (Event 1 - Event 2)")
plt.ylabel("MEG (T / m)")
plt.legend()
plt.subplot(212)
for i_c, c in enumerate(clusters):
c = c[0]
if cluster_pv[i_c] <= 0.05:
h = plt.axvspan(
times[c.start], times[c.stop - 1], color='r', alpha=0.3)
els
|
e:
plt.axvspan(
times[c.start],
times[c.stop - 1],
color=(0.3, 0.3, 0.3),
alpha=0.3)
hf = plt.plot(times, T_obs, 'g')
plt.legend((h, ), ('cluster p-value < 0.05', ))
plt.xlabel("time (ms)")
plt.ylabel("f-values")
plt.show()
|
cysuncn/python
|
spark/crm/PROC_A_R_ENT_CRE_COUNT.py
|
Python
|
gpl-3.0
| 3,402
| 0.015805
|
#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_A_R_ENT_CRE_COUNT').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).
|
strftime("%Y-%m-%d")
V_STEP = 0
#保留当天及月末数据
if V_DT_LD
|
!= V_DT_LMD :
ret = os.system("hdfs dfs -rm -r /"+dbname+"/ACRM_A_ENT_CRE_COUNT/"+V_DT_LD+".parquet")
#删除当天的
ret = os.system("hdfs dfs -rm -r /"+dbname+"/ACRM_A_ENT_CRE_COUNT/"+V_DT+".parquet")
ACRM_F_CUS_DEV_CONFIG = sqlContext.read.parquet(hdfs+'/ACRM_F_CUS_DEV_CONFIG/*')
ACRM_F_CUS_DEV_CONFIG.registerTempTable("ACRM_F_CUS_DEV_CONFIG")
ACRM_A_CUST_CRE_MON_CHART = sqlContext.read.parquet(hdfs+'/ACRM_A_CUST_CRE_MON_CHART/*')
ACRM_A_CUST_CRE_MON_CHART.registerTempTable("ACRM_A_CUST_CRE_MON_CHART")
#任务[21] 001-01::
V_STEP = V_STEP + 1
sql = """
SELECT CAST('00000' AS INTEGER) AS ID
,D1.ORG_ID AS ORG_ID
,D1.ORG_NAME AS ORG_NAME
,'CRM_CRE_001' AS SUB_ID
,G.COLUMN_NAME AS SUB_NAME
,COUNT(D1.CUST_ID) AS BAL
,D1.REPORT_DATE AS ODS_DATE
,G.INDEX_MIN_VALUE AS SORT_ID
,CAST(SUM(D1.CRE_BAL) AS DECIMAL(24,6)) AS AMOUNT
,D1.FR_NAME AS FR_NAME
,D1.FR_ID AS FR_ID
FROM ACRM_A_CUST_CRE_MON_CHART D1 --
INNER JOIN ACRM_F_CUS_DEV_CONFIG G --客户发展情况一览表配制表
ON D1.ORG_ID = G.ORG_ID
AND G.INDEX_CODE = 'CRM_CRE_001'
WHERE D1.CUST_TYP = '2'
AND D1.REPORT_DATE = V_DT
AND CAST(D1.CRE_BAL AS DECIMAL(24,6)) >= NVL(G.INDEX_MIN_VALUE, 0.001)
AND CAST(D1.CRE_BAL AS DECIMAL(24,6)) < NVL(G.INDEX_MAX_VALUE, 999999999999999999.99)
GROUP BY D1.FR_ID
,D1.FR_NAME
,D1.ORG_ID
,D1.ORG_NAME
,G.COLUMN_NAME
,G.INDEX_MIN_VALUE
,D1.REPORT_DATE """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
ACRM_A_ENT_CRE_COUNT = sqlContext.sql(sql)
ACRM_A_ENT_CRE_COUNT.registerTempTable("ACRM_A_ENT_CRE_COUNT")
dfn="ACRM_A_ENT_CRE_COUNT/"+V_DT+".parquet"
ACRM_A_ENT_CRE_COUNT.cache()
nrows = ACRM_A_ENT_CRE_COUNT.count()
ACRM_A_ENT_CRE_COUNT.write.save(path=hdfs + '/' + dfn, mode='append')
ACRM_A_ENT_CRE_COUNT.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert ACRM_A_ENT_CRE_COUNT lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrows)
|
davilajose23/ProjectCobra
|
functions_dir.py
|
Python
|
mit
| 10,907
| 0.004034
|
"""Modulo que contiene la clase directorio de funciones
-----------------------------------------------------------------
Compilers Design Project
Tec de Monterrey
Julio Cesar Aguilar Villanueva A01152537
Jose Fernando Davila Orta A00999281
-----------------------------------------------------------------
DOCUMENTATION: For complete Documentation see UserManual.pdf"""
from stack import Stack
from function import Function
from variable import Variable
def get_var_type(var_type):
'''retorna el identificador de cada tipo de variable'''
if var_type == 'int':
return 'i'
elif var_type == 'double':
return 'd'
elif var_type == 'string':
return 's'
elif var_type == 'bool':
return 'b'
def get_var_scope(scope):
'''retorna el identificador de cada tipo de scope'''
if scope == 'global':
return 'g'
elif scope == 'main':
return 'l'
else:
return 't'
def get_var_name(var_type, scope, var_name):
'''construct the direccion of a variable based on
the type, scope and variable name.'''
name_type = get_var_type(var_type)
name_scope = get_var_scope(scope)
name = name_type + name_scope + var_name
return name
class FunctionsDir(object):
'''Las funciones son entradas en el diccionario functions.
Las funciones son objetos con diccionarios de variables.
Scope global del programa se inicia con una funcion global
sin variables.
Scope es el function_id de cada funcion.'''
def __init__(self):
'''Metodo de inicializacion'''
self.functions = {}
self.functions['global'] = Function()
self.scope = 'global'
# Define si se esta evaluando la existencia de variables o se estan agregando al directorio
self.evaluating = True
# Indica si es necesario acutlaizar la lista de prametros de una funcion
self.updating_params = False
# Indica si se
|
va a leer variable con funcion read
self.r
|
eading = False
# Ultimo token ID, usado para el read
self.last_id = Stack()
# Ultimo token de tipo que fue leido por el directorio de funciones
self.last_type = None
'''Funciones que estan siendo llamadas.
Se utiliza una pila para llamadas nesteadas a funciones'''
self.call_function = Stack()
'''Cantidad de argumentos que estan siendo utilizados al llamar a una funcion.
Se utiliza una pilla para llamadas nesteadas'''
self.call_arguments = Stack()
self.last_read = Stack()
def add_function(self, function_id):
'''Add function to fuctions directory. Verify if function already exists'''
if self.functions.get(function_id, None) is not None:
raise NameError('Error: 1001 Function already declared! Function: ' + str(function_id))
else:
self.functions[function_id] = Function()
def validate_function(self, function_id):
'''Validate function exists'''
if self.functions.get(function_id, None) is None:
raise NameError('Error: 1002 Function not declared! Name: ' + str(function_id))
def increase_expected_arguments(self):
'''Manda llamar el metodo increase expected arguments de la clase Function'''
self.functions[self.scope].increase_expected_arguments()
def update_function_params(self, var_id, var_type):
'''Manda llamar metodo update params de la clase Funcion'''
self.functions[self.scope].update_params(var_id, var_type)
def set_return_type(self, function_return_type):
'''Manda llamar el metodo set return type de la clase Function'''
self.functions[self.scope].set_return_type(function_return_type)
def set_func_quad(self, func_quad):
'''Manda llamar el metodo set_func_quad de la clase Function'''
self.functions[self.scope].set_func_quad(func_quad)
def set_scope(self, scope):
'''Cambia el scope actual del directorio de funciones al scope que recibe'''
self.scope = scope
def reset_scope(self):
'''Reset del scope a global scope'''
self.scope = 'global'
# Add variable to current function scope
def add_var(self, variable_id, var_type, value=0, size=1):
'''Agrega variable a el diccionario de variables de una Funcion'''
if self.functions[self.scope].variables_dict.get(variable_id, None) is None:
var_name = get_var_name(var_type, self.scope, variable_id)
self.functions[self.scope].variables_dict[variable_id] = Variable(var_name, value, var_type, self.scope, size)
else:
variable_type = self.functions[self.scope].variables_dict[variable_id].get_type()
msg = 'Error 2001: Variable already declared! ' + str(variable_id) + '. TYPE: ' + variable_type
raise NameError(msg)
def add_for_var(self, variable_id, var_type):
'''Agrega variable al diccionario del current scope, si ya existe sobreescribe valor
Marca error si existe y no es tipo int'''
if self.functions[self.scope].variables_dict.get(variable_id, None) is None:
var_name = get_var_name(var_type, self.scope, variable_id)
self.functions[self.scope].variables_dict[variable_id] = Variable(var_name, -1, var_type, self.scope, 1)
else:
variable_type = self.functions[self.scope].variables_dict[variable_id].get_type()
if variable_type != 'int':
msg = 'Error 2001: Variable already declared! ' + str(variable_id) + '. TYPE: ' + variable_type
raise NameError(msg)
else:
self.functions[self.scope].variables_dict[variable_id].value = -1
def validate_variable(self, variable_id):
'''Busca variable en el scope actual'''
if self.functions[self.scope].variables_dict.get(variable_id, None) is None:
# Busca variable en el scope global
if self.functions['global'].variables_dict.get(variable_id, None) is None:
raise NameError('Error 2002: Variable not declared! VAR: ' + variable_id)
def start_evaluating(self):
'''Indica que el directorio de funciones esta evaluando la existencia de variables'''
self.evaluating = True
def finish_evaluating(self):
'''Indica que el directorio de funciones deja de evaluar funciones'''
self.evaluating = False
def set_type(self, last_type):
'''Set del ultimo token de tipo que fue leido'''
self.last_type = last_type
def get_func_dir(self):
'''Obtiene el diccionario de funciones'''
return self.functions
def get_var(self, variable_id):
'''Obtiene la lista con los datos de la variable del
diccionario de funciones en el scope actual o el global'''
if variable_id in self.functions[self.scope].variables_dict:
return self.functions[self.scope].variables_dict.get(variable_id)
elif variable_id in self.functions['global'].variables_dict:
return self.functions['global'].variables_dict.get(variable_id)
return None
def set_call_function(self, function_id):
'''Set del id de la funcion que esta siendo llamada
una vez que se valido su existencia en el diccionario de funciones'''
self.call_function.push(function_id)
self.call_arguments.push(0)
def increase_call_arguments(self):
'''# Incrementa la cantidad de argumentos que estan siendo usados para llamar una funcion.
Obtiene el tope de la pila, aumenta y vuelve a insertar en la pila'''
curr = self.call_arguments.pop()
curr += 1
self.call_arguments.push(curr)
def update_var_size(self, size):
'''Actualiza el size de una variable en caso de ser dimensionada'''
if size <= 0:
raise ValueError('Error 7005: Array size must be a positive integer')
else:
self.functions[self.scope].variables_dict[self.last_id.top].size = size
self.functions[self.scope].variables_dict[self.last_id.top].is_dim = True
|
callowayproject/django-viewpoint
|
viewpoint/urls_defaultblog.py
|
Python
|
apache-2.0
| 2,705
| 0.026248
|
"""
URL routing for blogs, entries and feeds
"""
from django.conf.urls.defaults import patterns, url
from django.conf import settings
from feeds import LatestEntriesByBlog, LatestEntries #, EntryComments
from models import Blog
from views import generic_blog_entry_view, blog_detail
from viewpoint.settings import USE_CATEGORIES, DEFAULT_BLOG
FEEDS = {
'all': LatestEntries,
'latest': LatestEntries,
}
if USE_CATEGORIES and 'categories' in settings.INSTALLED_APPS:
from feeds import LatestEntriesByCategory
FEEDS['categories'] = LatestEntriesByCategory
urlpatterns = patterns('django.contrib.syndication.views',
(r'^feeds/(?P<url>.*)/$', 'feed', {'feed_dict': FEEDS}),
)
urlpatterns += patterns('',
# Blog detail (Main page of a blog, shows description and stuff)
url(
regex = r'^$',
view = blog_detail,
name='viewpoint_blog_detail'
),
# Listing of blog entries for a given year
url(
regex = r'^(?P<year>\d{4})/$',
view = generic_blog_entry_view,
name='viewpoint_blog_archive_year'
),
# Listing of blog entries for a given month/year
url(
regex = r'^(?P<year>\d{4})/(?P<month>\w{3})/$',
view = generic_blog_entry_view,
name = 'viewpoint_blog_archive_month'
),
|
# Listing of blog entries for a given week of the year
url(
regex = r'^(?P<year>\d{4})/(?P<week>\d{1,2})/$',
view = generic_blog_entry_view,
name = 'viewpoint_blog_archive_week'
),
# Listing of blog entries for a given day
url(
regex = r'^(?
|
P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/$',
view = generic_blog_entry_view,
name = 'viewpoint_blog_archive_day'
),
# Listing of blog entries for the current date
url(
regex = r'^today/$',
view = generic_blog_entry_view,
name='viewpoint_blog_archive_today'
),
# A blog entry
url(
regex = r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/$',
view = generic_blog_entry_view,
name='viewpoint_entry_detail'
),
# A blog comments page
url(
regex = r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/comments/$',
view = generic_blog_entry_view,
kwargs = {'template_name':'viewpoint/entry_comments.html'},
name='viewpoint_entry_comments'
),
# A blog printing page
url(
regex = r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/print/$',
view = generic_blog_entry_view,
kwargs = {'template_name':'viewpoint/entry_print.html'},
name='viewpoint_entry_print'
),
)
|
RealTimeWeb/datasets
|
datasets/python/state_fragility/setup.py
|
Python
|
gpl-2.0
| 198
| 0.005051
|
from setuptools import setup
import os.path
se
|
tup(
name='State Fragility',
version='1',
py_modules=['state_fragility'],
data_file
|
s=[('', [
"./state_fragility.db"
])]
)
|
jtk1rk/xsubedit
|
gcustom/cellRendererText.py
|
Python
|
gpl-3.0
| 923
| 0.004334
|
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from .textEditDialog import cTextEditDialog
class cCellRendererText(Gtk.CellRendererText):
""" Label entry cell which calls TextEdit_Dialog upon editing """
__gtype_name__ = 'CellRendererCustomTe
|
xt'
def __init__(self, parent):
super(cCellRendererText, self).__init__()
self.parentWindow = parent
def do_start_editing(
self, event, treeview, path, background_area, cell_area, flags):
|
if not self.get_property('editable'):
return
sub = treeview.get_model()[path][0]
entry = Gtk.Entry()
dialog = cTextEditDialog(self.parentWindow, sub, 'vo', treeview.thesaurus)
response = dialog.run()
if response == Gtk.ResponseType.OK:
entry.set_text(dialog.text)
self.emit('edited', path, entry.get_text())
dialog.destroy()
|
yawd/django-sphinxdoc
|
sphinxdoc/admin.py
|
Python
|
bsd-3-clause
| 659
| 0
|
# encoding: utf-8
"""
Admin interface for the sphinxdoc app.
"""
from django.contrib import admin
from sphinxdoc.models import Project, Document
class ProjectAdmin(
|
admin.ModelAdmin):
"""Admin interface for :class:`~sphinxdoc.models.Project`."""
list_display = ('name', 'path',)
prepopulated_fields = {'slug': ('name',)}
class DocumentAdmin(admin.ModelAdmin):
"""
Admin interface for :class:`~sphinxdo
|
c.models.Document`.
Normally, you shouldn’t need this, since you create new documents via
the management command.
"""
pass
admin.site.register(Project, ProjectAdmin)
admin.site.register(Document, DocumentAdmin)
|
yvaucher/account-financial-tools
|
__unported__/account_compute_tax_amount/__openerp__.py
|
Python
|
agpl-3.0
| 1,342
| 0.003726
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville. Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
#
|
published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public
|
License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Recompute tax_amount",
"version" : "1.0",
"depends" : ["base",
"account",
],
"author" : "Camptocamp",
"description": """Recompute tax_amount to avoid sign problem""",
'website': 'http://www.camptocamp.com',
'data' : [],
'installable': False,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
lucienfostier/gaffer
|
python/GafferUI/ButtonPlugValueWidget.py
|
Python
|
bsd-3-clause
| 3,794
| 0.03611
|
##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferUI
## Supported metadata :
#
# buttonPlugValueWidget:clicked
class ButtonPlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
|
self.__button = GafferUI.Button()
GafferUI.PlugValueWidget.__init__( self, self.__button, plug, **kw )
self.__button.clickedSignal().connect( Gaffer.WeakMethod( self.__clicked )
|
, scoped = False )
Gaffer.Metadata.plugValueChangedSignal().connect( Gaffer.WeakMethod( self.__plugMetadataChanged ), scoped = False )
self.setPlug( plug )
def hasLabel( self ) :
return True
def setPlug( self, plug ) :
GafferUI.PlugValueWidget.setPlug( self, plug )
self.__nameChangedConnection = None
if plug is not None :
self.__nameChangedConnection = plug.nameChangedSignal().connect( Gaffer.WeakMethod( self.__nameChanged ) )
self.__updateLabel()
def _updateFromPlug( self ) :
self.__button.setEnabled( self._editable() )
def __nameChanged( self, plug ) :
self.__updateLabel()
def __updateLabel( self ) :
label = ""
if self.getPlug() :
label = self.getPlug().getName()
label = Gaffer.Metadata.value( self.getPlug(), "label" ) or label
self.__button.setText( label )
def __clicked( self, widget ) :
code = Gaffer.Metadata.value( self.getPlug(), "buttonPlugValueWidget:clicked" )
if not code :
return False
executionDict = {
"IECore" : IECore,
"Gaffer" : Gaffer,
"plug" : self.getPlug(),
"button" : self,
}
with GafferUI.ErrorDialogue.ErrorHandler( title = "Button Error", parentWindow = self.ancestor( GafferUI.Window ) ) :
with Gaffer.UndoScope( self.getPlug().ancestor( Gaffer.ScriptNode ) ) :
with self.getContext() :
exec( code, executionDict, executionDict )
def __plugMetadataChanged( self, nodeTypeId, plugPath, key, plug ) :
if self.getPlug() is None :
return
if key=="label" and Gaffer.MetadataAlgo.affectedByChange( self.getPlug(), nodeTypeId, plugPath, plug ) :
self.__updateLabel()
|
mhoffma/micropython
|
tests/extmod/ure_split_notimpl.py
|
Python
|
mit
| 138
| 0
|
import ure as re
r = re.compile('( )')
|
try:
s = r.split("a b
|
c foobar")
except NotImplementedError:
print('NotImplementedError')
|
LorenzoBi/courses
|
UQ/rand_gen.py
|
Python
|
mit
| 286
| 0.003497
|
import numpy as np
import matplotlib.pyp
|
lot as plt
def generate_random(a, M, c, seed):
for i in range(1000 * seed // 10):
seed = (a
|
* seed + c) % M
return seed / M
y = [generate_random(45, 989993, 12, i) for i in range(1000)]
plt.plot(np.arange(1000), y)
plt.show()
|
UmSenhorQualquer/pythonVideoAnnotator
|
base/pythonvideoannotator/setup.py
|
Python
|
mit
| 2,475
| 0.013737
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import re, os
PACKAGE_PATH = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(PACKAGE_PATH, 'pythonvideoannotator','__init__.py'), 'r') as fd:
content = fd.read()
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', content, re.MULTILINE).group(1)
with open(os.path.join(PACKAGE_PATH, '..','..','README.md'), 'r') as fd:
long_description = fd.read()
# REQUIREMENTS BEGIN
REQUIREMENTS = [
"geometry_designer==0.4.38",
"modular-computer-vision-api-gui==0.3.31",
"pyforms-gui==4.904.152",
"modular-computer-vision-api==0.3.29",
|
"python-video-annotator-models-gui==0.7.63",
"python-video-annotator-models==0.8.82",
"python-video-annotator-module-timeline==0.6.26",
"python-video-annotator-module-eventstats==0.5.15",
"python-video-annotator-module-virtual-object-generator==0.
|
6.26",
"python-video-annotator-module-deeplab==0.902.21",
"python-video-annotator-module-contours-images==0.5.28",
"python-video-annotator-module-tracking==0.6.38",
"python-video-annotator-module-smooth-paths==0.5.19",
"python-video-annotator-module-distances==0.5.18",
"python-video-annotator-module-path-map==0.6.16",
"python-video-annotator-module-motion-counter==0.5.26",
"python-video-annotator-module-create-paths==0.5.15",
"python-video-annotator-module-regions-filter==0.5.18",
"python-video-annotator-module-import-export==0.5.23",
"python-video-annotator-module-background-finder==0.5.21",
"python-video-annotator-module-find-orientation==0.5.18",
"python-video-annotator-module-path-editor==0.5.28"
]
# REQUIREMENTS END
setup(
name='Python video annotator',
version=version,
description="""""",
author=['Ricardo Ribeiro'],
author_email='ricardojvr@gmail.com',
url='https://bitbucket.org/fchampalimaud/pythonvideoannotator-models',
long_description = long_description,
long_description_content_type = 'text/markdown',
packages=find_packages(),
install_requires=[
'simplejson',
'pypi-xmlrpc',
'send2trash',
'scipy',
'sklearn',
'confapp',
] + REQUIREMENTS,
entry_points={
'console_scripts': [
'start-video-annotator=pythonvideoannotator.__main__:start',
],
},
package_data={'pythonvideoannotator': [
'resources/icons/*.png',
'resources/themes/default/*.css',
]
},
)
|
izhaohui/gardener
|
controller/home/flower/models.py
|
Python
|
gpl-3.0
| 2,048
| 0.001953
|
from django.db import models
import socket,logging
# Create your models here.
class Sensor(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
env_light = models.IntegerField()
env_humid = models.IntegerField()
env_raindrop = models.IntegerField()
env_temperature = models.IntegerField()
soi_humid = models.IntegerField()
soi_fatness = models.IntegerField()
soi_temperature = models.IntegerField()
valve_flow = models.IntegerField()
valve_span = models.IntegerField()
@staticmethod
d
|
ef valve(seconds):
seconds = seconds if 0 <= seconds <= 10 else 2
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
conn.connect(("192.168.33.58", 80))
conn.send("ST,%d" % seconds)
content = conn.recv(10240)
if content and content.find("&") > 0:
data = {k: v for k, v in [tuple(i.split('=')) for i in content.split("&")]}
sensor = Sensor()
sensor.env_light = data['env_light']
sensor.env_humid = data['env_humid']
sensor.env_raindrop = data['env_raindrop']
sensor.env_temperature = data['env_temperature']
sensor.soi_humid = data['humi_aval']
sensor.soi_fatness = data['soil_fatness']
sensor.soi_temperature = data['soil_temperature']
sensor.valve_flow = data['valve_flow']
sensor.valve_span = data['relay_total_seconds']
sensor.save()
return True
else:
logging.warning("error sensor data %s." % content)
return False
class Daily(models.Model):
date = models.DateField(auto_now_add=True)
env_light = models.IntegerField()
env_humid = models.IntegerField()
env_raindrop = models.IntegerField()
env_temperature = models.IntegerField()
soi_humid = models.IntegerField()
soi_fatness = models.IntegerField()
soi_temperature = models.IntegerField()
valve_flow = models.IntegerField()
valve_span = models.IntegerField()
|
jfischer/micropython-iot-hackathon
|
example_code/server_mqtt_to_influx.py
|
Python
|
mit
| 2,908
| 0.003783
|
# Read from an MQTT queue and write events to Influxdb
import argparse
import asyncio
import time
import sys
from collections import namedtuple
from thingflow.base import Scheduler, SensorEvent
from thingflow.adapters.mqtt import MQTTReader
import thingflow.filters.select # adds select() method
import thingflow.filters.json # adds json() method
from thingflow.adapters.influxdb import InfluxDBWriter
Sensor = namedtuple('Sensor', ['series_name', 'fields', 'tags'])
def setup_flow(args):
mqtt = MQTTReader(args.mqtt_host, topics=[(args.topic_name, 0),])
decoded = mqtt.select(lambda m:(m.payload).decode('utf-8'))\
.from_json(constructor=SensorEvent)\
.select(lambda evt: SensorEvent(sensor_id=evt.sensor_id,
ts=time.time(),
val=evt.val))
decoded.output()
w = InfluxDBWriter(msg_format=Sensor(series_name=args.influx_measurement,
fields=['val', 'ts'], tags=['sensor_id']),
generate_timestamp=False,
username=args.influx_username,
password=args.influx_password,
database=args.influx_database)
decoded.connect(w)
return mqtt
def main():
parser = argparse.ArgumentParser(description="Subscribe to the specified topic and write the resulting messages to Influxdb")
parser.add_argument('--mqtt-host', type=str, default='localhost',
|
help="Ho
|
stname or IP address of MQTT broker (defaults to localhost)")
parser.add_argument('--topic-name', type=str, default='sensor-data',
help="Topic for subscription (defaults to sensor-data)")
parser.add_argument('--influx-host', type=str, default='localhost',
help="Influx db host (defaults to localhost)")
parser.add_argument('--influx-username', type=str, default='root',
help="Influx db username (defaults to root)")
parser.add_argument('--influx-password', type=str, default=None,
help="Influx db password (defaults to None)")
parser.add_argument('--influx-database', type=str, default='sensor-data',
help="Influx db database (defaults to sensor-data)")
parser.add_argument('--influx-measurement', type=str, default='lux',
help="Influx db measurement (defaults to lux)")
args = parser.parse_args()
mqtt = setup_flow(args)
scheduler = Scheduler(asyncio.get_event_loop())
stop = scheduler.schedule_on_private_event_loop(mqtt)
print("Running main loop")
try:
scheduler.run_forever()
except KeyboardInterrupt:
print("Stopping...")
stop()
return 0
if __name__ == '__main__':
sys.exit(main())
|
jedp/oakland_pm
|
core/models.py
|
Python
|
mit
| 8,624
| 0.007189
|
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django_countries import CountryField
from django.contrib.localflavor.us.models import *
from django.contrib.localflavor.us.us_states import STATE_CHOICES
# https://docs.djangoproject.com/en/1.3/ref/contrib/localflavor/#united-states-of-america-us
# TODO: django scheduler
# TODO: confirm what's in User model
# User model?
# first name
# last name
# auth_mode (custom, facebook, tumblr?)
class Profile(models.Model):
"""
Profile extends Django User Model
todo: Selection should eventually be intelligent based on location
"""
user = models.ForeignKey(User, unique=True, verbose_name='user')
school = models.ForeignKey('School', blank=True, null=True)
watch_list = models.ForeignKey('WatchList', blank=True, null=True, related_name="profile_watch_list")
# class Meta:
# verbose_name_plural = 'Profiles'
# ordering = ('user',)
#
# def __unicode__(self):
# return self.user
#
# @models.permalink
# def get_absolute_url(self):
# return ('view_forum_category', (self.forum.slug, self.slug,))
class School(models.Model):
name = models.CharField(max_length=200, unique=True)
address = models.ForeignKey('Address', blank=True, null=True)
contact = models.ForeignKey('Contact', blank=True, null=True)
district = models.PositiveIntegerField(blank=True, null=True)
def __unicode__(self):
return self.name
class Address(models.Model):
street1 = models.CharField(max_length=250)
street2 = models.CharField(max_length=250, blank=True, null=True)
city = models.CharField(max_length=100, default='Oakland')
state = USStateField(choices=STATE_CHOICES, default='CA', blank=True, null=True)
country = CountryField(blank=True, null=True, default='US')
zipcode = models.CharField(max_length=10, blank=True, null=True)
# GIS is computed as a post-save process, so must
# be able to be null on first save
location = models.ForeignKey('GIS', blank=True, null=True)
def __unicode__(self):
return self.street1
class GIS(models.Model):
"""
GIS location data for events, schools,
bus stops, and bart stops
"""
latitude = models.FloatField(blank=True, null=True)
longitude = models.FloatField(blank=True, null=True)
class EventDate(models.Model):
"""
Can this be replaced with a django scheduler?
"""
date = models.DateTimeField()
duration_mins = models.PositiveIntegerField(default=60)
def __unicode__(self):
return self.date.isoformat()
class Meta:
ordering = ['date']
class Contact(models.Model):
"""
Contact info for projects and events
"""
first_name = models.CharField(max_length=40, blank=True, null=True)
last_name = models.CharField(max_length=40, blank=True, null=True)
role = models.CharField(max_length=200, blank=True, null=True)
phone = PhoneNumberField(blank=True, null=True)
smsok = models.BooleanField(default=False)
tdd = PhoneNumberField(max_length=20, blank=True, null=True)
fax = PhoneNumberField(max_length=20, blank=True, null=True)
email = models.EmailField(blank=True, null=True)
web = models.URLField(blank=True, null=True)
def __unicode__(self):
if self.email:
return self.email
if self.phone:
return self.phone
if self.web:
return self.web
return '<Contact at 0x%x>' % (id(self))
class Category(models.Model):
"""
Moderated set of categories for events
"""
name = models.CharField(max_length=60, unique=True)
def __unicode__(self):
return self.name
class Tag(models.Model):
"""
Moderated set of subcats for events
"""
name = models.CharField(max_length=60, unique=True)
def __unicode__(self):
return self.name
class Organization(models.Model):
"""
An organization that offers Programs
"""
name = models.CharField(max_length=250, unique=True)
about = models.TextField(blank=True, null=True)
headoffice = models.ForeignKey('Address', related_name='office')
contact = models.ForeignKey('Contact')
date_added = models.DateTimeField(auto_now_add=True)
date_updated = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
class Program(models.Model):
"""
Program info
"""
# Core Details
name = models.CharField(max_length=200, blank=True, null=True)
summary = models.TextField(blank=True, null=True)
about = models.TextField()
organization = models.ForeignKey('Organization', blank=True, null=True)
address = models.ForeignKey('Address')
notes = models.TextField(blank=True, null=True)
primary_contact = models.ForeignKey('Contact', null=True)
# Time
events = models.ManyToManyField('EventDate')
# Attendee Details
cost = models.FloatField(default=0.00)
agemin = models.PositiveIntegerField(default=13)
agemax = models.PositiveIntegerField(default=18)
registration_needed = models.BooleanField(blank=True, default=False)
# validate required if reg_needed
registration_due_by = models.DateTimeField(blank=True, null=True)
registration_instructions = models.TextField(blank=True, null=True)
# Organization
categories = models.ManyToManyField('Category')
tags = models.ManyToManyField('Tag')
# todo: make subcat intelligent based on cat selected
# Meta
is_active = models.BooleanField(default=False)
program_status = models.ForeignKey('ProgramStatus', null=True) # eg pending approval, approved, denied, need verifications, etc.
program_type = models.ForeignKey('ProgramType', null=True) # eg drop-in, register
rank = models.IntegerField(default=-1)
capcity = models.PositiveIntegerField(null=True) # who's going, how many total can attend
wait_list = models.ForeignKey('WaitList', blank=True, null=True, related_name="program_wait_list")
date_added = models.DateTimeField(auto_now_add=True)
date_updated = models.DateTimeField(auto_now=True)
# holding
# logo = models.ImageField()
# attending = models.ForeignKey(User)
def next_event(self):
import datetime
events = self.events.filter(date__gt=datetime.datetime.now()).order_by('date')
if events:
return events[0].date
return
|
None
def time_until(self):
return "hi"
def __unicode__(self):
return self.name
class ProgramStatus(models.Model):
program_status = models.CharField(max_length=200)
description = models.TextField(blank=True, null=True)
class ProgramType(models.Model):
program_type = models.CharF
|
ield(max_length=200)
description = models.TextField(blank=True, null=True)
class WatchList(models.Model):
profile = models.ForeignKey('Profile', related_name="watchlist_profile")
program = models.ForeignKey('Program', related_name="watchlist_program")
date_added = models.DateTimeField(auto_now_add=True)
class WaitList(models.Model):
profile = models.ForeignKey('Profile', related_name="waitlist_profile")
program = models.ForeignKey('Program', related_name="waitlist_program")
date_added = models.DateTimeField(auto_now_add=True)
position = models.PositiveIntegerField(default=0)
def save(self):
self.position += 1
super(WaitList,self).save()
class PublicTransport(models.Model):
''' Pull data with APIs?'''
TRANSPORT_CHOICES = (
('B', 'Bus'),
('T', 'Train'),
('LR', 'Light Rail'),
)
company = models.CharField(max_length=100)
line = models.CharField(max_length=40)
name = models.CharField(max_length=40)
address = models.ForeignKey('Address')
pt_type = models.CharField(max_length=10, choices=TRANSPORT_CHOICES)
# -- Hoilding ---
# class Comment(models.Mode
|
rienafairefr/pynYNAB
|
pynYNAB/scripts/helpers.py
|
Python
|
mit
| 2,659
| 0.001128
|
from __future__ import print_function
import os
import argparse
import re
import six
import yaml
DEFAULT_CONFIG_FILE = 'ynab.yaml'
class ConfigEnvArgumentParser(argparse.ArgumentParser):
def __init__(self, *args, **kwargs):
super(ConfigEnvArgumentParser, self).__init__(*args, **kwargs)
try:
self.add_argument('--config', dest='config',
help='a yaml file containing the config')
except argparse.ArgumentError:
# --config already exists
pass
def parse_args(self, *args, **kwargs):
parsed_args = vars(super(ConfigEnvArgumentParser, self).parse_args(*args, **kwargs))
parsed_args_config = merge_config(parsed_args)
if parsed_args_config is None:
self.error('invalid config file was passed')
return AttrDict(**parsed_args_config)
def get_config_from_yaml(config_file=DEFAULT_CONFIG_FILE):
try:
with open(config_file, 'r') as stream:
yaml_content = yaml.load(stream)
if yaml_content is not None:
return yaml_content
except:
return {}
def get_config_from_env():
valid_key = re.compile(r'^N?YNAB_(?P<key>.*)$')
returnvalue = {}
for key, value in os.environ.items():
match = valid_key.match(key)
if match:
key_name = match.group('key').lower()
returnvalue[key_name] = value
return returnvalue
def merge_config(arguments=None, nominal=DEFAULT_CONFIG_FILE):
if arguments is None:
arguments = {}
cli_config = arguments
env_config = get_config_from_env()
ynab_yaml_config = get
|
_config_from_yaml(nominal)
# cli-passed > cli-passed-config > ynab.yaml > ENV
merged_config = merge(ynab_yaml_config, env_config)
if hasattr(arguments, 'config') and arguments.config:
cli_passed_config = get_config_from_yaml(arguments.config)
merged_config = merge(cli_passed_config, merged_config)
merged_config = merge(cli_config, merged_config)
print('Config used:')
print('------------')
print(yaml.dump(merg
|
ed_config, default_flow_style=False), end='',)
print('------------')
return merged_config
def merge(user, default):
if isinstance(user, dict) and isinstance(default, dict):
for key, value in six.iteritems(default):
if key not in user:
user[key] = value
else:
user[key] = merge(user[key], value)
return user
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
|
DVegaCapital/zipline
|
tests/test_exception_handling.py
|
Python
|
apache-2.0
| 3,339
| 0
|
#
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
import zipline.utils.simfactory as simfactory
import zipline.utils.factory as factory
from zipline.test_algorithms import (
ExceptionAlgorithm,
DivByZeroAlgorithm,
SetPortfolioAlgorithm,
)
from zipline.finance.slippage import FixedSlippage
from zipline.utils.test_utils import (
drain_zipline,
setup_logger,
teardown_logger,
ExceptionSource,
)
DEFAULT_TIMEOUT = 15 # seconds
EXTENDED_TIMEOUT = 90
class ExceptionTestCase(TestCase):
def setUp(self):
self.zipline_test_config = {
'sid': 133,
'slippage': FixedSlippage()
}
setup_logger(self)
def tearDown(self):
teardown_logger(self)
def test_datasource_exception(self):
self.zipline_test_config['trade_source'] = ExceptionSource()
zipline = simfactory.create_test_zipline(
**self.zipline_test_config
)
with self.assertRaises(ZeroDivisionError):
output, _ = drain_zipline(self, zipline)
def test_exception_in_handle_data(self):
# Simulation
# ----------
self.zipline_test_config['algorithm'] = \
ExceptionAlgorithm(
'handle_data',
self.zipline_test_config['sid'],
sim_params=factory.create_simulation_parameters()
)
zipline = simfactory.create_test_zipline(
**self.zipline_test_config
)
with self.assertRaises(Exception) as ctx:
output, _ = drain_zipline(self, zipline)
self.assertEqual(str(ctx.exception),
|
'Algo exception in handle_data')
def test_zerodivision_exception_in_handle_data(self):
# Simulation
# ----------
self.zipline_test_conf
|
ig['algorithm'] = \
DivByZeroAlgorithm(
self.zipline_test_config['sid'],
sim_params=factory.create_simulation_parameters()
)
zipline = simfactory.create_test_zipline(
**self.zipline_test_config
)
with self.assertRaises(ZeroDivisionError):
output, _ = drain_zipline(self, zipline)
def test_set_portfolio(self):
"""
Are we protected against overwriting an algo's portfolio?
"""
# Simulation
# ----------
self.zipline_test_config['algorithm'] = \
SetPortfolioAlgorithm(
self.zipline_test_config['sid'],
sim_params=factory.create_simulation_parameters()
)
zipline = simfactory.create_test_zipline(
**self.zipline_test_config
)
with self.assertRaises(AttributeError):
output, _ = drain_zipline(self, zipline)
|
timthelion/FreeCAD
|
src/Mod/Fem/_CommandBeamSection.py
|
Python
|
lgpl-2.1
| 2,767
| 0.001084
|
# ***************************************************************************
# * *
# * Copyright (c) 2015 - Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# *
|
but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for m
|
ore details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "_CommandBeamSection"
__author__ = "Bernd Hahnebach"
__url__ = "http://www.freecadweb.org"
import FreeCAD
from FemCommands import FemCommands
if FreeCAD.GuiUp:
import FreeCADGui
from PySide import QtCore
class _CommandBeamSection(FemCommands):
"The Fem_BeamSection command definition"
def __init__(self):
super(_CommandBeamSection, self).__init__()
self.resources = {'Pixmap': 'fem-beam-section',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Fem_BeamSection", "Beam cross section"),
'Accel': "C, B",
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Fem_BeamSection", "Creates a FEM beam cross section")}
self.is_active = 'with_analysis'
def Activated(self):
FreeCAD.ActiveDocument.openTransaction("Create FemBeamSection")
FreeCADGui.addModule("FemBeamSection")
FreeCADGui.doCommand("FemGui.getActiveAnalysis().Member = FemGui.getActiveAnalysis().Member + [FemBeamSection.makeFemBeamSection()]")
if FreeCAD.GuiUp:
FreeCADGui.addCommand('Fem_BeamSection', _CommandBeamSection())
|
BioRoboticsUNAM/pyRobotics
|
setup.py
|
Python
|
mit
| 513
| 0.017578
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from pyrobotics.BB import __ver
|
sion__
setup(name='pyRobotics',
version=__version__,
author='Adrián Revuelta Cuauhtli',
author_email='adrianrc.89@gmail.com',
url='http://bioroboticsunam.github.io/pyRobotics',
|
license='LICENSE.txt',
data_files=[('', ['README', 'LICENSE.txt'])],
description="A Python API to create modules that connect to our message-passing and shared varaibels hub 'BlackBoard'.",
packages=['pyrobotics'])
|
himanshu-dixit/oppia
|
core/controllers/learner_playlist_test.py
|
Python
|
apache-2.0
| 14,127
| 0.001628
|
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the learner playlist."""
from constants import constants
from core.domain import learner_playlist_services
from core.domain import learner_progress_services
from core.tests import test_utils
import feconf
class LearnerPlaylistHandlerTests(test_utils.GenericTestBase):
OWNER_EMAIL = 'owner@example.com'
OWNER_USERNAME = 'owner'
EXP_ID_1 = 'exp_id_1'
EXP_TITLE_1 = 'exp title 1'
EXP_ID_2 = 'exp_id_2'
EXP_TITLE_2 = 'exp title 2'
EXP_ID_3 = 'exp_id_3'
EXP_TITLE_3 = 'exp title 3'
EXP_ID_4 = 'exp_id_4'
EXP_TITLE_4 = 'exp title 4'
COL_ID_1 = 'col_id_1'
COL_TITLE_1 = 'col title 1'
COL_ID_2 = 'col_id_2'
COL_TITLE_2 = 'col title 2'
COL_ID_3 = 'col_id_3'
COL_TITLE_3 = 'col title 3'
COL_ID_4 = 'col_id_4'
COL_TITLE_4 = 'col title 4'
def setUp(self):
super(LearnerPlaylistHandlerTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.viewer_id = self.get_user_id_from_email(self.VIEWER_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
# Save the explorations.
self.save_new_default_exploration(
self.EXP_ID_1, self.owner_id, title=self.EXP_TITLE_1)
self.save_new_default_exploration(
self.EXP_ID_2, self.owner_id, title=self.EXP_TITLE_2)
self.save_new_default_exploration(
self.EXP_ID_3, self.owner_id, title=self.EXP_TITLE_3)
self.save_new_default_exploration(
self.EXP_ID_4, self.viewer_id, title=self.EXP_TITLE_3)
# Save the collections.
self.save_new_default_collection(
self.COL_ID_1, self.owner_id, title=self.COL_TITLE_1)
self.save_new_default_collection(
self.COL_ID_2, self.owner_id, title=self.COL_TITLE_2)
self.save_new_default_collection(
self.COL_ID_3, self.owner_id, title=self.COL_TITLE_3)
self.save_new_default_collection(
self.COL_ID_4, self.viewer_id, title=self.COL_TITLE_4)
def test_add_exploration_to_learner_playlist(self):
self.login(self.VIEWER_EMAIL)
response = self.testapp.get(feconf.LEARNER_DASHBOARD_URL)
csrf_token = self.get_csrf_token_from_response(response)
# Add one exploration to the playlist.
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_1), {}, csrf_token)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_1])
# Add another exploration.
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_2), {}, csrf_token)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_1, self.EXP_ID_2])
# User rearranges the explorations. 'exp title 2' is shifted to the
# first position.
payload = {
'index': 0
}
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_2), payload, csrf_token)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2, self.EXP_ID_1])
# If an exploration belongs to the incomplete list or completed list, it
# should not be added. Here we test for the completed case.
learner_progress_services.mark_exploration_as_completed(
self.viewer_id, self.EXP_ID_3)
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_3), {}, csrf_token)
self.assertEqual(
response['belongs_to_completed_or_incomplete_list'], True)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2, self.EXP_ID_1])
# If an exploration belongs to one of the subscribed explorations,
# it should not be added
|
to the learner playlist.
|
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_4), {}, csrf_token)
self.assertEqual(
response['belongs_to_subscribed_activities'], True)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2, self.EXP_ID_1])
# Now we begin testing of not exceeding the limit of activities in the
# learner playlist.
# Add feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT - 2 activities to reach
# the maximum limit.
for exp_id in range(5, feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT + 3):
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
'exp_id_%s' % exp_id), {}, csrf_token)
# Now if we try and add an activity we should get a message saying we
# are exceeding the limit.
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
'exp_id_%s' %
str(feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT + 3)),
{}, csrf_token)
self.assertEqual(response['playlist_limit_exceeded'], True)
self.logout()
def test_add_collection_to_learner_playlist(self):
self.login(self.VIEWER_EMAIL)
response = self.testapp.get(feconf.LEARNER_DASHBOARD_URL)
csrf_token = self.get_csrf_token_from_response(response)
# Add one collection to the playlist.
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_1), {}, csrf_token)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_1])
# Add another exploration.
self.post_json('%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_2), {}, csrf_token)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_1, self.COL_ID_2])
# User rearranges the explorations. 'exp title 2' is shifted to the
# first position.
payload = {
'index': 0
}
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_2), payload, csrf_token)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint
|
avocado-framework/avocado-vt
|
virttest/libvirt_xml/devices/audio.py
|
Python
|
gpl-2.0
| 1,707
| 0
|
"""
audio device support class(es)
https://libvirt.org/formatdomain.html#audio-devices
"""
from virttest.libvirt_xml import accessors
from virttest.libvirt_xml.devices import base
class Audio(base.UntypedDeviceBase):
__slots__ = ('id', 'type', 'attrs', 'input_attrs',
'input_settings', 'output_attrs', 'output_settings')
def __init__(self, virsh_instance=base.base.virsh):
accessors.XMLAttribute('id', self,
parent_xpath='/',
tag_name='audio',
attribute='id')
accessors.XMLAttribute('type', self,
parent_xpath='/',
tag_name='audio',
attribute='type')
accessors.XMLElementDict('attrs', self,
parent_xpath='/',
tag_name='audio')
accessors.XMLElementDict('input_attrs', self,
parent_xpath='/',
tag_name='inpu
|
t')
accessors.XMLElementDict('input_settings', self,
parent_xpath='/input',
tag_name='settings')
accessors.XMLElementDict('output_attrs', self,
parent_xpath='/',
tag_name='output')
accessors.XMLElementDict('output_settings', self,
paren
|
t_xpath='/output',
tag_name='settings')
super(Audio, self).__init__(device_tag='audio',
virsh_instance=virsh_instance)
|
billhoffman/drake
|
drake/bindings/python/pydrake/test/testRBTIK.py
|
Python
|
bsd-3-clause
| 1,165
| 0.006009
|
import unittest
import numpy as np
import pydrake
from pydrake.solvers import ik
import os.path
class TestRBTIK(unittest.TestCase):
def testPostureConstraint(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(), "examples/Pendulum/Pendulum.urdf"))
q = -0.9
posture_constraint = ik.PostureConstraint(r)
posture_constraint.setJointLimits(np.array([[6]], dtype=np.int32),
np.array([[q]]),
np.array([[q]]))
# Choose a seed configuration (randomly) and a nominal configuration (at 0)
q_seed = np.vstack((np.zeros((6,1)),
|
0.8147))
q_nom = np.vstack((np.zeros((6,1)),
0.))
options = ik.IKoptions(r)
results = ik.inverseKinSimple(r,
q_seed,
q_nom,
[posture_constraint],
options)
self.assertAlmostEqual(results.q_sol[6], q, 1e-9)
if __name__ == '__main__':
unittest
|
.main()
|
matthew-brett/bibstuff
|
bibstuff/bibstyles/default.py
|
Python
|
mit
| 4,900
| 0.02102
|
#File: default.py
"""
Provides a default style for bib4txt.py
Produces a list of citations that to be included in a reStructuredText document.
(In very simple documents, can also provide citation reference formatting
by substituting in the document text for the citation references.)
A style includes:
- citation template
- CitationManager class
- sortkey for make_text_output
(often based on a field list)
:note: you will often want to override these
:note: shared.py holds defintions common to most styles
:note: see the examples (e.g., example_numbered.py) of different styles
:author: Alan G Isaac
:contact: http://www.american.edu/cas/econ/faculty/isaac/isaac1.htm
:copyright: 2006 by Alan G Isaac
:license: MIT (see `license.txt`_)
:date: 2006-08-01
.. _license.txt: ./license.txt
"""
__docformat__ = "restructuredtext en"
__author__ = "Alan G. Isaac"
__version__ = "0.6"
__needs__ = '2.4'
################### IMPORTS ##########################
# from standard library
import logging
style_logger = logging.getLogger('bibstuff_logger')
#shared_logger = logging.getLogger('bibstuff_logger')
# imports from bibstuff
#TODO: change to relative imports (requires Python 2.5)
# :IMPORTANT: every style must import shared!
import shared
# most styles with start with the default templates:
# in default_templates.py
import default_templates
########################################################
##########################################################################
################### CITEREF FORMATTING #################################
##########################################################################
CITEREF_TEMPLATE = default_templates.DEFAULT_CITEREF_TEMPLATE.copy()
##########################################################################
################## CITATION FORMATTING #################################
##########################################################################
"""
Every style must have a CITATION_TEMPLATE, a CitationManager, and a ref_list_sort_key.
Crucial formatting decisions are made int the CITATION_TEMPLATE.
The CITATION_TEMPLATE provides default reference formatting (may also be used by BibStyle)
:TODO:
- provide graceful handling of missing fields
- allow different formatting of first and other names
- allow different initial line and subsequent line indenting
"""
# here we simply use the default citation template
CITATION_TEMPLATE = shared.CitationManager.default_citation_template
class CitationManager(shared.CitationManager):
################### CITEREF FORMATTING #########################
#we set the 'format_inline_cite' method equal to the below 'format_inline_cite' function
def format_inline_cite(self, cite_key_list):
"""
Usually you will need to write a 'format_inline_cite' function
that the CiteRefProcessor will use
|
to substitute inline for citation references.
"""
style_logger.debug('default: enter CitationManager.format_inline_cite')
#:note: need entry to be None if cite_key not found, so discard=Fa
|
lse
entry_list = self.find_entries(cite_key_list,discard=False)
"""
for entry in entry_list:
print entry
"""
return format_inline_cite(entry_list, self)
################### CITATION FORMATTING ########################
def get_citation_label(self,entry,citation_template=None):
return '.. [' + entry.citekey + ']\n'
#sort_key for sorting list of references
# (choice of field_list is a formatting decision)
def sortkey(self,bibentry):
return self.make_sort_key(bibentry,['Author','Year'])
def format_inline_cite(entry_list, citation_manager):
"""Return string, formatted in-text citation (allows *multiple* citations).
`entry_list` : list
entries to be formatted
`citation_manager` : CitationManager instance
handles name formatting
:note: need the entry formatter bc its determines the field of the names for the cite
:note: much of the following functionality was in the old Bibstyle's formatCitation() method
:TODO: rewrite
:TODO: ? entries shd be more featureful ? (conflicts with core goal of BibEntry class)
"""
style_logger.debug("default.py: Entering format_inline_cite.")
name_date_sep = ' '
formatted_list = []
for entry in entry_list:
if not entry: #None replaces missing entries
formatted_list.append('?')
else:
year = entry['year']
entry_formatter = citation_manager.entry_formatter
last_names = entry.get_names(entry_formatter).get_last_names() #:note: ignores "von" part
if len(last_names) < 3:
last_names = ' and '.join(last_names)
else:
last_names = last_names[0] + ' et al.'
formatted_list.append( ('%s' + name_date_sep + '%s')%(last_names, year) )
#to cite by number can use this instead:
#formatted_list.append('%d'%entry.citation_rank)
style_logger.debug("Exiting format_inline_cite.")
return '(' + CITEREF_TEMPLATE['citeref_sep'].join(formatted_list)+')'
|
xfumihiro/powerline
|
powerline/lint/imp.py
|
Python
|
mit
| 1,573
| 0.028043
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
from powerline.lint.selfcheck import havemarks
class WithPath(object):
def __init__(self, import_paths):
self.import_paths = import_paths
def __enter__(self):
self.oldpath = sys.path
sys.path = self.import_paths + sys.path
def __exit__(self, *args):
sys.path = self.oldpath
def import_function(function_type, name, data, context, echoerr, module):
havemarks(name, module)
with WithPath(data['import_paths']):
try:
func = getattr(__import__(str(module), fromlist=[str(name)]), str(name))
except ImportError:
echoerr(context='Error while checking segments (key {key})'.format(key=context.key),
context_mark=name.mark,
problem='failed to import module {0}'.format(module),
problem_mark=module.mark)
return None
except AttributeError:
echoerr(context='Error while loading {0} function (key {key})'.format(function_type, key=context.key),
problem='failed to load function {0} from module {1}'.format(name, module),
problem_mark=name.mark)
return None
if not callable(func):
echoerr(context='Error while checking segments (key {key})'.format(key=context.key),
context_mark=name.mark,
problem='imported “function” {0} from module {1} is not callable'.format(name, module),
problem_mark=module.mark)
return None
return func
|
def impo
|
rt_segment(*args, **kwargs):
return import_function('segment', *args, **kwargs)
|
javierwilson/forocacao
|
forocacao/app/migrations/0006_auto_20160808_1041.py
|
Python
|
bsd-3-clause
| 1,212
| 0.0033
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0005_question_event'),
]
operations = [
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(verbose_name='ID', seria
|
lize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200, verbose_name='Nombre')),
('weight', models.IntegerField()),
],
options={
'ordering': ['weight'],
'verbose_name': 'Tema',
'verbose_name_plural': 'Temas',
},
),
migrations.AlterField(
model_name='organization'
|
,
name='type',
field=models.CharField(blank=True, max_length=1, null=True, choices=[('O', 'Organizador'), ('E', 'Exhibidor'), ('S', 'Speaker')]),
),
migrations.AddField(
model_name='organization',
name='topic',
field=models.ForeignKey(verbose_name='Topic', blank=True, to='app.Topic', null=True),
),
]
|
mahabs/nitro
|
nssrc/com/citrix/netscaler/nitro/resource/config/authentication/authenticationwebauthpolicy_systemglobal_binding.py
|
Python
|
apache-2.0
| 5,383
| 0.036597
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class authenticationwebauthpolicy_systemglobal_binding(base_resource) :
""" Binding class showing the systemglobal that can be bound to authenticationwebauthpolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._activepolicy = 0
self._name = ""
self.___count = 0
@property
def boundto(self) :
"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def name(self) :
"""Name of the WebAuth policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the WebAuth policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(authenticationwebauthpolicy_systemglobal_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authenticationwebauthpolicy_systemglobal_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch authenticationwebauthpolicy_systemglobal_binding resources.
"""
try :
obj = authenticationwebauthpolicy_systemglobal_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of authenticationwebauthpolicy_systemglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationwebauthpolicy_systemglobal_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count authenticationwebauthpolicy_systemglobal_binding resources configued on NetScaler.
"""
try :
obj = authenticationwebauthpolicy_systemglobal_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___cou
|
nt']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of authenticationwebauthpolicy_systemglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationwebauthpolicy_systemglobal_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(servic
|
e, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class authenticationwebauthpolicy_systemglobal_binding_response(base_response) :
def __init__(self, length=1) :
self.authenticationwebauthpolicy_systemglobal_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authenticationwebauthpolicy_systemglobal_binding = [authenticationwebauthpolicy_systemglobal_binding() for _ in range(length)]
|
ZeitOnline/zeit.cms
|
src/zeit/cms/tagging/testing.py
|
Python
|
bsd-3-clause
| 5,795
| 0
|
import collections
import lxml.objectify
import mock
import zeit.cms.repository.interfaces
import zeit.cms.tagging.interfaces
import zeit.cms.tagging.tag
import zope.component
import zope.interface
NAMESPACE = "http://namespaces.zeit.de/CMS/tagging"
KEYWORD_PROPERTY = ('testtags', NAMESPACE)
class DummyTagger(object):
zope.component.adapts(zeit.cms.repository.interfaces.IDAVContent)
zope.interface.implements(zeit.cms.tagging.interfaces.ITagger)
def __init__(self, context):
self.context = context
@property
def whitelist(self):
return zope.component.getUtility(
zeit.cms.tagging.interfaces.IWhitelist)
@property
def dav_properties(self):
return zeit.connector.interfaces.IWebDAVProperties(self.context)
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self.keys())
def __contains__(self, key):
return key in self.keys()
def keys(self):
keys = self.dav_properties.get(KEYWORD_PROPERTY, '').split('|')
return tuple(keys) if keys != [''] else ()
def values(self):
return (self[x] for x in self.keys())
def __getitem__(self, key):
if key not in self.keys():
raise KeyError(key)
return self.whitelist.get(key)
def __setitem__(self, key, value):
keys = list(self.keys())
if key not in keys:
keys.append(key)
self.dav_properties[KEYWORD_PROPERTY] = '|'.join(keys)
def __delitem__(self, key):
keys = list(self.keys())
keys.remove(key)
self.dav_properties[KEYWORD_PROPERTY] = '|'.join(keys)
def updateOrder(self, order):
order = list(order) # people are fond of passing in generators
if set(order) != set(self.keys()):
raise ValueError(
'Must pass in the same keys already present %r, not %r'
% (self.keys(), order))
self.dav_properties[KEYWORD_PROPERTY] = '|'.join(order)
def update(self):
pass
def set_pinned(self, keys):
pass
def to_xml(self):
return None
links = {}
pinned = {}
class DummyWhitelist(object):
zope.interface.implements(zeit.cms.tagging.interfaces.IWhitelist)
tags = {
'testtag': 'Testtag',
'testtag2': 'Testtag2',
'testtag3': 'Testtag3',
}
location_tags = {
'hannover': u'Hannover',
'paris': u'Paris'
}
def search(self, term):
term = term.lower()
return [FakeTag(code=k, label=v)
for k, v in self.tags.items() if term in v.lower()]
def locations(self, term):
term = term.lower()
return [FakeTag(code=key, label=label)
for key, label in self.location_tags.items()
if term in label.lower()]
def get(self, id):
if id in self.tags:
return FakeTag(code=id, label=self.tags[id])
return None
class FakeTags(collections.OrderedDict):
def __init__(self):
super(FakeTags, self).__init__()
self.updateOrder = mock.Mock()
self.update = mock.Mock()
def __contains__(self, key):
return key in list(self)
def set_pinned(self, keys):
for tag in self.values():
tag.pinned = tag.code in keys
@property
def pinned(self):
return [x.code for x in self.values() if x.pinned]
@property
def links(self):
config = zope.app.appsetup.product.getProductConfiguration('zeit.cms')
live_prefix = config['live-prefix']
return {x.uniqueId: live_prefix + x.link
for x in self.values() if x.link}
def to_xml(self):
node = lxml.objectify.E.tags(*[
lxml.objectify.E.tag(x.label) for x in self.values()])
return node
class FakeTag(object):
"""Fake implementation of ITag for tests."""
zope.interface.implements(zeit.cms.tagging.interfaces.ITag)
def __init__(self, code, label):
self.label = label
self.code = code
self.pinned = False
self.__name__ = self.code # needed to fulfill `ICMSContent`
self.link = None
@property
def uniqueId(self):
return (zeit.cms.tagging.interfaces.ID_NAMESPACE +
self.code.encode('unicode_escape'))
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
return self.code == other.code and self.pinned == other.pinned
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = FakeTag(code=code, label=code)
return tag
def setup_tags(self, *codes):
tags = FakeTags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
self.whitelist_tags = {tag.code: tag.label for tag in tags.values()}
whitelist = zope.component.queryUtility(
zeit.cms.tagging.interfaces.IWhitelist)
if whitelist is not None: # only when ZCML is loaded
original_tags = whitelist.tags
whitelist.tags = self.whitelist_tags
def restore_original_tags_on_whitelist():
whitelist.tags = original_tags
self.addCleanup(restore_original_tags_on_whitelist)
return tags
def add_keyword_b
|
y_autocomplete(self, text, form_prefix='form'):
self.add_by_autocomplete(
text, 'id=%s.keywords.add' %
|
form_prefix)
def add_topicpage_link(self, tag):
tag.link = 'thema/%s' % tag.label.lower()
|
madhav-datt/kgp-hms
|
src/workers/mess_manager.py
|
Python
|
mit
| 2,480
| 0.000806
|
#
# IIT Kharagpur - Hall Management System
# System to manage Halls of residences, Warden grant requests, student complaints
# hall worker attendances and salary payments
#
# MIT License
#
"""
@ authors: Madhav Datt, Avikalp Srivastava
"""
from ..database import db_func as db
from ..database import password_validation as pv
import worker
class MessManager(worker.Worker):
"""Contains details of Worker Instance
Attributes:
worker_ID: Integer to uniquely identify worker
name: String
hall_ID: Integer to uniquely identify hall
monthly_salary: Float
"""
def __init__(self, name, hall_ID, password, monthly_salary,
rebuild=False, worker_ID=None):
"""
Init MessManager with details as recruited by HMC or Warden
"""
# The rebuild flag, if true, denotes that the object is being made from
# data already present in the database
# If False, a new data row is added to the specific table
if not rebuild:
self.worker_ID = db.add("worker")
db.update("worker", self.worker_ID, "worker_type", "M")
self.password = password
else:
self.worker_ID = worker_ID
self._password = password
self.monthly_salary = monthly_salary
worker.Worker.__init__(self, self.worker_ID, name, hall_ID)
# password getter and setter functions
@property
def password(self):
return self._password
@password.se
|
tter
def password(self, password):
self._password = pv.hash_password(password)
db.update("worker", self.worker_ID, "password", self.password)
# monthly_salary getter and setter functions
@property
def monthly_salary(self):
return self._monthly_salary
@monthly_salary.setter
def monthly_salary(self, monthly_salary):
self._monthly_salary = monthly_salary
db.update("worker", se
|
lf.worker_ID, "monthly_salary", self.monthly_salary)
def compute_mess_payment(self, student_table):
"""
Compute total money due to hall in form of mess payments
Sum of each student resident's mess charge
Pass parameter student_table = dbr.rebuild("student")
"""
mess_total = 0.
for key in student_table:
if student_table[key].hall_ID == self.hall_ID:
mess_total = mess_total + student_table[key].mess_charge
return mess_total
|
leighpauls/k2cro4
|
third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/base_unittest.py
|
Python
|
bsd-3-clause
| 22,390
| 0.002635
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import optparse
import sys
import tempfile
import unittest
from webkitpy.common.system.executive import Executive, ScriptError
from webkitpy.common.system import executive_mock
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.common.system.path import abspath_to_uri
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.mocktool import MockOptions
from webkitpy.common.system.executive_mock import MockExecutive, MockExecutive2
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.layout_tests.port import Port, Driver, DriverOutput
from webkitpy.layout_tests.port.test import add_unit_tests_to_mock_filesystem, TestPort
class PortTest(unittest.TestCase):
def make_port(self, executive=None, with_tests=False, **kwargs):
host = MockSystemHost()
if executive:
host.executive = executive
if with_tests:
add_unit_tests_to_mock_filesystem(host.filesystem)
return TestPort(host, **kwargs)
return Port(host, **kwargs)
def test_default_child_processes(self):
port = self.make_port()
self.assertNotEquals(port.default_child_processes(), None)
def test_format_wdiff_output_as_html(self):
output = "OUTPUT %s %s %s" % (Port._WDIFF_DEL, Port._WDIFF_ADD, Port._WDIFF_END)
html = self.make_port()._format_wdiff_output_as_html(output)
expected_html = "<head><style>.del { background: #faa; } .add { background: #afa; }</style></head><pre>OUTPUT <span class=del> <span class=add> </span></pre>"
self.assertEqual(html, expected_html)
def test_wdiff_command(self):
port = self.make_port()
port._path_to_wdiff = lambda: "/path/to/wdiff"
command = port._wdiff_command("/actual/path", "/expected/path")
expected_command = [
"/path/to/wdiff",
"--start-delete=##WDIFF_DEL##",
"--end-delete=##WDIFF_END##",
"--start-insert=##WDIFF_ADD##",
"--end-insert=##WDIFF_END##",
"/actual/path",
"/expected/path",
]
self.assertEqual(command, expected_command)
def _file_with_contents(self, contents, encoding="utf-8"):
new_file = tempfile.NamedTemporaryFile()
new_file.write(contents.encode(encoding))
new_file.flush()
return new_file
def test_pretty_patch_os_error(self):
port = self.make_port(executive=executive_mock.MockExecutive2(exception=OSError))
oc = OutputCapture()
oc.capture_output()
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
# This tests repeated calls to make sure we cache the result.
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
oc.restore_output()
def test_pretty_patch_script_error(self):
# FIXME: This is some ugly white-box test hacking ...
port = self.make_port(executive=executive_mock.MockExecutive2(exception=ScriptError))
port._pretty_patch_available = True
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
# This tests repeated calls to make sure we cache the result.
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
def integration_test_run_wdiff(self):
executive = Executive()
# This may fail on some systems. We could ask the port
# object for the wdiff path, but since we don't know what
# port object to use, this is sufficient for now.
try:
wdiff_path = executive.run_command(["which", "wdiff"]).rstrip()
except Exception, e:
wdiff_path = None
port = self.make_port(executive=executive)
port._path_to_wdiff = lambda: wdiff_path
if wdiff_path:
# "with tempfile.NamedTemporaryFile() as actual" does not seem to work in Python 2.5
actual = self._file_with_contents(u"foo")
expected = self._file_with_contents(u"bar")
wdiff = port._run_wdiff(actual.name, expected.name)
expected_wdiff = "<head><style>.del { background: #faa; } .add { background: #afa; }</style></head><pre><span class=del>foo</span><span class=add>bar</span></pre>"
self.assertEqual(wdiff, expected_wdiff)
# Running the full wdiff_text method should give the same result.
port._wdiff_available = True # In case it's somehow already disabled.
wdiff = port.wdiff_text(actual.name, expected.name)
self.assertEqual(wdiff, expected_wdiff)
# wdiff should still be available after running wdiff_text with a valid diff.
self.assertTrue(port._wdiff_available)
actual.close()
expected.close()
# Bogus paths should raise a script error.
self.assertRaises(ScriptError, port._run_wdiff, "/does/not/exist", "/does/not/exist2")
self.assertRaises(ScriptError, port.wdiff_text, "/does/not/exist", "/does/not/exist2")
# wdiff will still be available after running wdiff_text with invalid paths.
self.assertTrue(port._wdiff_available)
# If wdiff does not exist _run_wdiff should throw an OSError.
port._path_to_wdiff = lambda: "/invalid/path/to/wdiff"
self.assertRaises(OSError, port._run_wdiff, "foo", "bar")
# wdiff_text should not throw an error if wdiff does not exist.
self.assertEqual(port.wdiff_text("foo", "bar"), "")
# However wdiff should not be available after running wdiff_text if wdiff is missing.
self.assertFalse(port._wdiff_available)
def test_wdiff_text(self):
port = self.make_port()
port.wdiff_available = lambda: True
port._run_wdiff = lambda a, b: 'PASS'
self.assertEqual('PASS', port.wdiff_text(None, None))
def test_diff_text(self):
port = self.make_port()
# Make sure that we don't run into decoding exceptions when the
# filenames are unicode, with regular or malformed input (expected or
# actual input is always raw bytes, not unicode).
port.diff_text('exp', 'act', 'exp.txt', 'act.txt')
port.diff_text('exp', 'act', u'exp.txt', 'act.txt')
port.diff_text('exp', 'act', u'
|
a\xac\u1234\u20ac\U00008000', 'act.txt')
port.d
|
iff_text('exp' + chr(255), 'act', 'exp.txt'
|
cloudera/hue
|
desktop/core/ext-py/Django-1.11.29/tests/gis_tests/gdal_tests/test_raster.py
|
Python
|
apache-2.0
| 21,062
| 0.000902
|
"""
gdalinfo tests/gis_tests/data/rasters/raster.tif:
Driver: GTiff/GeoTIFF
Files: tests/gis_tests/data/rasters/raster.tif
Size is 163, 174
Coordinate System is:
PROJCS["NAD83 / Florida GDL Albers",
GEOGCS["NAD83",
DATUM["North_American_Datum_1983",
SPHEROID["GRS 1980",6378137,298.2572221010002,
AUTHORITY["EPSG","7019"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6269"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4269"]],
PROJECTION["Albers_Conic_Equal_Area"],
PARAMETER["standard_parallel_1",24],
PARAMETER["standard_parallel_2",31.5],
PARAMETER["latitude_of_center",24],
PARAMETER["longitude_of_center",-84],
PARAMETER["false_easting",400000],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]],
AUTHORITY["EPSG","3086"]]
Origin = (511700.468070655711927,435103.377123198588379)
Pixel Size = (100.000000000000000,-100.000000000000000)
Metadata:
AREA_OR_POINT=Area
Image Structure Metadata:
INTERLEAVE=BAND
Corner Coordinates:
Upper Left ( 511700.468, 435103.377) ( 82d51'46.16"W, 27d55' 1.53"N)
Lower Left ( 511700.468, 417703.377) ( 82d51'52.04"W, 27d45'37.50"N)
Upper Right ( 528000.468, 435103.377) ( 82d41'48.81"W, 27d54'56.30"N)
Lower Right ( 528000.468, 417703.377) ( 82d41'55.54"W, 27d45'32.28"N)
Center ( 519850.468, 426403.377) ( 82d46'50.64"W, 27d50'16.99"N)
Band 1 Block=163x50 Type=Byte, ColorInterp=Gray
NoData Value=15
"""
import os
import struct
import temp
|
file
from django.contrib.gis.gdal import GDAL_VERSION, GDALRaster
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.raster.band import GDALBand
from django.contrib.gis.shortcuts import numpy
from django.test import SimpleTestCase
from django.utils import six
from django.utils._os import upath
from ..data.rasters.textrasters import JSON_RASTER
class GDALRasterTests(SimpleTestCase):
"""
Test a GDALRaster instance created from a file (GeoTiff)
|
.
"""
def setUp(self):
self.rs_path = os.path.join(os.path.dirname(upath(__file__)),
'../data/rasters/raster.tif')
self.rs = GDALRaster(self.rs_path)
def test_rs_name_repr(self):
self.assertEqual(self.rs_path, self.rs.name)
self.assertRegex(repr(self.rs), r"<Raster object at 0x\w+>")
def test_rs_driver(self):
self.assertEqual(self.rs.driver.name, 'GTiff')
def test_rs_size(self):
self.assertEqual(self.rs.width, 163)
self.assertEqual(self.rs.height, 174)
def test_rs_srs(self):
self.assertEqual(self.rs.srs.srid, 3086)
self.assertEqual(self.rs.srs.units, (1.0, 'metre'))
def test_rs_srid(self):
rast = GDALRaster({
'width': 16,
'height': 16,
'srid': 4326,
})
self.assertEqual(rast.srid, 4326)
rast.srid = 3086
self.assertEqual(rast.srid, 3086)
def test_geotransform_and_friends(self):
# Assert correct values for file based raster
self.assertEqual(
self.rs.geotransform,
[511700.4680706557, 100.0, 0.0, 435103.3771231986, 0.0, -100.0]
)
self.assertEqual(self.rs.origin, [511700.4680706557, 435103.3771231986])
self.assertEqual(self.rs.origin.x, 511700.4680706557)
self.assertEqual(self.rs.origin.y, 435103.3771231986)
self.assertEqual(self.rs.scale, [100.0, -100.0])
self.assertEqual(self.rs.scale.x, 100.0)
self.assertEqual(self.rs.scale.y, -100.0)
self.assertEqual(self.rs.skew, [0, 0])
self.assertEqual(self.rs.skew.x, 0)
self.assertEqual(self.rs.skew.y, 0)
# Create in-memory rasters and change gtvalues
rsmem = GDALRaster(JSON_RASTER)
rsmem.geotransform = range(6)
self.assertEqual(rsmem.geotransform, [float(x) for x in range(6)])
self.assertEqual(rsmem.origin, [0, 3])
self.assertEqual(rsmem.origin.x, 0)
self.assertEqual(rsmem.origin.y, 3)
self.assertEqual(rsmem.scale, [1, 5])
self.assertEqual(rsmem.scale.x, 1)
self.assertEqual(rsmem.scale.y, 5)
self.assertEqual(rsmem.skew, [2, 4])
self.assertEqual(rsmem.skew.x, 2)
self.assertEqual(rsmem.skew.y, 4)
self.assertEqual(rsmem.width, 5)
self.assertEqual(rsmem.height, 5)
def test_rs_extent(self):
self.assertEqual(
self.rs.extent,
(511700.4680706557, 417703.3771231986, 528000.4680706557, 435103.3771231986)
)
def test_rs_bands(self):
self.assertEqual(len(self.rs.bands), 1)
self.assertIsInstance(self.rs.bands[0], GDALBand)
def test_memory_based_raster_creation(self):
# Create uint8 raster with full pixel data range (0-255)
rast = GDALRaster({
'datatype': 1,
'width': 16,
'height': 16,
'srid': 4326,
'bands': [{
'data': range(256),
'nodata_value': 255,
}],
})
# Get array from raster
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Assert data is same as original input
self.assertEqual(result, list(range(256)))
def test_file_based_raster_creation(self):
# Prepare tempfile
rstfile = tempfile.NamedTemporaryFile(suffix='.tif')
# Create file-based raster from scratch
GDALRaster({
'datatype': self.rs.bands[0].datatype(),
'driver': 'tif',
'name': rstfile.name,
'width': 163,
'height': 174,
'nr_of_bands': 1,
'srid': self.rs.srs.wkt,
'origin': (self.rs.origin.x, self.rs.origin.y),
'scale': (self.rs.scale.x, self.rs.scale.y),
'skew': (self.rs.skew.x, self.rs.skew.y),
'bands': [{
'data': self.rs.bands[0].data(),
'nodata_value': self.rs.bands[0].nodata_value,
}],
})
# Reload newly created raster from file
restored_raster = GDALRaster(rstfile.name)
self.assertEqual(restored_raster.srs.wkt, self.rs.srs.wkt)
self.assertEqual(restored_raster.geotransform, self.rs.geotransform)
if numpy:
numpy.testing.assert_equal(
restored_raster.bands[0].data(),
self.rs.bands[0].data()
)
else:
self.assertEqual(restored_raster.bands[0].data(), self.rs.bands[0].data())
def test_offset_size_and_shape_on_raster_creation(self):
rast = GDALRaster({
'datatype': 1,
'width': 4,
'height': 4,
'srid': 4326,
'bands': [{
'data': (1,),
'offset': (1, 1),
'size': (2, 2),
'shape': (1, 1),
'nodata_value': 2,
}],
})
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Band data is equal to nodata value except on input block of ones.
self.assertEqual(
result,
[2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2]
)
def test_set_nodata_value_on_raster_creation(self):
# Create raster filled with nodata values.
rast = GDALRaster({
'datatype': 1,
'width': 2,
'height': 2,
'srid': 4326,
'bands': [{'nodata_value': 23}],
})
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# All band data is equal to nodata value.
self.assertEqual(result, [23, ] * 4)
def test_set_nodata_none_on_raster_creation(self):
if GDAL_VERSION < (2, 1):
self.skipTest("GDAL >= 2.1 is required for this test.")
# Create raster without data and without nodata value.
rast = G
|
crf1111/Bio-Informatics-Learning
|
Bio-StrongHold/src/Constructing_a_De_Bruijn_Graph.py
|
Python
|
mit
| 685
| 0.013139
|
import os
import sys
from Bio.Seq import Seq
def main(*a
|
rgs, **kwargs):
fpath = os.path.join(os.getcwd(), args[-2])
tmp = []
with open(fpath,'r') as f:
for line in f:
txt = line.strip()
tmp.append(txt)
S1 = set(tmp)
S2 = set([str(S
|
eq(s).reverse_complement()) for s in tmp])
S = S1.union(S2)
res = []
for s in S:
res.append((s[:-1],s[1:]))
for t1,t2 in res:
print '(%s, %s)' % (t1,t2)
out = os.path.join(os.getcwd(),args[-1])
f = open(out, 'w')
for t1,t2 in res:
txt = '(%s, %s)\n' % (t1,t2)
f.write(txt)
f.close()
if __name__ == '__main__':
main(*sys.argv)
|
mozilla/popcorn_maker
|
popcorn_gallery/reports/views.py
|
Python
|
bsd-3-clause
| 1,003
| 0
|
from django.contrib import messages
from django.shortcuts import render, redirect
from django.templ
|
ate.loader import render_to_string
from tower import ugettext as _
from .forms import ReportForm
from ..base.utils import notify_admins
from ..base.decorators import throttle_view
@throttle_view(methods=['POST'], duration=30)
def report_form(request):
if request.method == 'POST':
form = ReportForm(request.POST)
if form.is_valid():
report = form.save()
context = {'report': report}
|
subject = render_to_string('reports/email_subject.txt', context)
subject = ''.join(subject.splitlines())
body = render_to_string('reports/email_body.txt', context)
notify_admins(subject, body)
messages.success(request, _('Report sent successfully'))
return redirect('homepage')
else:
form = ReportForm()
context = {'form': form}
return render(request, 'reports/form.html', context)
|
memsharded/conan
|
conans/test/functional/old/package_integrity_test.py
|
Python
|
mit
| 2,451
| 0.00204
|
import os
import unittest
from conans.model.ref import ConanFileReference, PackageReference
from conans.test.utils.conanfile import TestConanFile
from conans.test.utils.tools import TestClient, TestServe
|
r,\
NO_SETTINGS_PACKAGE_ID
from conans.util.files import set_dirty
class PackageIngrityTest(unittest.TestCase):
def remove_locks_test(self):
client = TestClient()
client.save({"conanf
|
ile.py": str(TestConanFile())})
client.run("create . lasote/testing")
self.assertNotIn('does not contain a number!', client.out)
ref = ConanFileReference.loads("Hello/0.1@lasote/testing")
conan_folder = client.cache.package_layout(ref).base_folder()
self.assertIn("locks", os.listdir(conan_folder))
self.assertTrue(os.path.exists(conan_folder + ".count"))
self.assertTrue(os.path.exists(conan_folder + ".count.lock"))
client.run("remove * --locks", assert_error=True)
self.assertIn("ERROR: Specifying a pattern is not supported", client.out)
client.run("remove", assert_error=True)
self.assertIn('ERROR: Please specify a pattern to be removed ("*" for all)', client.out)
client.run("remove --locks")
self.assertNotIn("locks", os.listdir(conan_folder))
self.assertFalse(os.path.exists(conan_folder + ".count"))
self.assertFalse(os.path.exists(conan_folder + ".count.lock"))
def upload_dirty_test(self):
test_server = TestServer([], users={"lasote": "mypass"})
client = TestClient(servers={"default": test_server},
users={"default": [("lasote", "mypass")]})
client.save({"conanfile.py": str(TestConanFile())})
client.run("create . lasote/testing")
ref = ConanFileReference.loads("Hello/0.1@lasote/testing")
pref = PackageReference(ref, NO_SETTINGS_PACKAGE_ID)
package_folder = client.cache.package_layout(pref.ref).package(pref)
set_dirty(package_folder)
client.run("upload * --all --confirm", assert_error=True)
self.assertIn("ERROR: Package %s is corrupted, aborting upload" % str(pref),
client.out)
self.assertIn("Remove it with 'conan remove Hello/0.1@lasote/testing -p=%s'"
% NO_SETTINGS_PACKAGE_ID, client.out)
client.run("remove Hello/0.1@lasote/testing -p=%s -f" % NO_SETTINGS_PACKAGE_ID)
client.run("upload * --all --confirm")
|
JulienMcJay/eclock
|
windows/kivy/kivy/core/__init__.py
|
Python
|
gpl-2.0
| 4,391
| 0
|
'''
Core Abstraction
================
This module defines the abstraction layers for our core providers and their
implementations. For further information, please refer to
:ref:`architecture` and the :ref:`providers` section of the documentation.
In most cases, you shouldn't directly use a library that's already covered
by the core abstraction. Always try to use our providers first.
In case we are missing a feature or method, please let us know by
opening a new Bug report instead of relying on your library.
.. warning::
These are **not** widgets! These are just abstractions of the respective
functionality. For example, you cannot add a core image to your window.
You have to use the image **widget** class instead. If you're really
looking for widgets, please refer to :mod:`kivy.uix` instead.
'''
import os
import kivy
from kivy.logger import Logger
class CoreCriticalException(Exception):
pass
def core_select_lib(category, llist, create_instance=False, base='kivy.core'):
if 'KIVY_DOC' in os.environ:
return
category = category.lower()
libs_ignored = []
for option, modulename, classname in llist:
try:
# module activated in config ?
try:
if option not in kivy.kivy_options[category]:
libs_ignored.append(modulename)
Logger.debug(
'{0}: Provider <{1}> ignored by config'.format(
category.capitalize(), option))
continue
except KeyError:
pass
# import module
mod = __import__(name='{2}.{0}.{1}'.format(
category, modulename, base),
globals=globals(),
locals=locals(),
fromlist=[modulename], level=0)
cls = mod.__getattribute__(classname)
# ok !
Logger.info('{0}: Provider: {1}{2}'.format(
category.capitalize(), option,
'({0} ignored)'.format(libs_ignored) if libs_ignored else ''))
if create_instance:
cls = cls()
return cls
except ImportError as e:
libs_ignored.append(modulename)
Logger.debug('{0}: Ignored <{1}> (import error)'.format(
category.capitalize(), option))
Logger.trace('', exc_info=e)
except CoreCriticalException as e:
Logger.error('{0}: Unable to use {1}'.format(
category.capitalize(), option))
Logger.error(
'{0}: The module raised an imp
|
ortant error: {1!r}'.format(
category.capitalize(), e.message))
raise
except Exception as e:
libs_ignored.append(modulename)
Logger.trace('{0}: Unable to use {1}'.format(
category.capitalize(), option, category))
Logger.trace('', exc_info=e)
Lo
|
gger.critical(
'{0}: Unable to find any valuable {1} provider at all!'.format(
category.capitalize(), category.capitalize()))
def core_register_libs(category, libs, base='kivy.core'):
if 'KIVY_DOC' in os.environ:
return
category = category.lower()
libs_loaded = []
libs_ignored = []
for option, lib in libs:
try:
# module activated in config ?
if option not in kivy.kivy_options[category]:
Logger.debug('{0}: option <{1}> ignored by config'.format(
category.capitalize(), option))
libs_ignored.append(lib)
continue
# import module
__import__(name='{2}.{0}.{1}'.format(category, lib, base),
globals=globals(),
locals=locals(),
fromlist=[lib],
level=0)
libs_loaded.append(lib)
except Exception as e:
Logger.trace('{0}: Unable to use <{1}> as loader!'.format(
category.capitalize(), option))
Logger.trace('', exc_info=e)
libs_ignored.append(lib)
Logger.info('{0}: Providers: {1} {2}'.format(
category.capitalize(),
', '.join(libs_loaded),
'({0} ignored)'.format(
', '.join(libs_ignored)) if libs_ignored else ''))
return libs_loaded
|
brechtm/rinohtype
|
tests/test_pdf_reader.py
|
Python
|
agpl-3.0
| 3,238
| 0
|
# This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gn
|
u.org/licenses/.
import pytest
from io import BytesIO
from rinoh.backend.pdf import cos
from rinoh.backend.pdf.reader import PDFObjectReader
def test_read_bool
|
ean():
def test_boolean(bytes_boolean, boolean):
reader = PDFObjectReader(BytesIO(bytes_boolean))
result = reader.next_item()
assert isinstance(result, cos.Boolean) and bool(result) == boolean
test_boolean(b'true', True)
test_boolean(b'false', False)
def test_read_integer():
def test_integer(bytes_integer, integer):
reader = PDFObjectReader(BytesIO(bytes_integer))
result = reader.next_item()
assert isinstance(result, cos.Integer) and result == integer
test_integer(b'123', 123)
test_integer(b'43445', 43445)
test_integer(b'+17', 17)
test_integer(b'-98', -98)
test_integer(b'0', 0)
def test_read_real():
def test_real(bytes_real, real):
reader = PDFObjectReader(BytesIO(bytes_real))
result = reader.next_item()
assert isinstance(result, cos.Real) and result == real
test_real(b'34.5', 34.5)
test_real(b'-3.62', -3.62)
test_real(b'+123.6', 123.6)
test_real(b'4.', 4.0)
test_real(b'-.002', -.002)
test_real(b'0.0', 0.0)
def test_read_name():
def test_name(bytes_name, unicode_name):
reader = PDFObjectReader(BytesIO(bytes_name))
result = reader.next_item()
assert isinstance(result, cos.Name) and str(result) == unicode_name
test_name(b'/Adobe#20Green', 'Adobe Green')
test_name(b'/PANTONE#205757#20CV', 'PANTONE 5757 CV')
test_name(b'/paired#28#29parentheses', 'paired()parentheses')
test_name(b'/The_Key_of_F#23_Minor', 'The_Key_of_F#_Minor')
test_name(b'/A#42', 'AB')
def test_read_dictionary():
input = b"""
<< /Type /Example
/Subtype /DictionaryExample
/Version 0.01
/IntegerItem 12
/StringItem (a string)
/Subdictionary << /Item1 0.4
/Item2 true
/LastItem (not!)
/VeryLastItem (OK)
>>
>>"""
reader = PDFObjectReader(BytesIO(input))
result = reader.next_item()
expected = cos.Dictionary([('Type', cos.Name('Example')),
('Subtype', cos.Name('DictionaryExample')),
('Version', cos.Real(0.01)),
('IntegerItem', cos.Integer(12)),
('StringItem', cos.String('a string')),
('Subdictionary', cos.Dictionary(
[('Item1', cos.Real(0.4)),
('Item2', cos.Boolean(True)),
('LastItem', cos.String('not!')),
('VeryLastItem', cos.String('OK'))]))])
assert isinstance(result, cos.Dictionary)
assert dict(result) == dict(expected)
|
jbradberry/django-diplomacy
|
setup.py
|
Python
|
mit
| 915
| 0.001093
|
import setuptools
with open("README.rst") as f:
long_description = f.read()
setuptools.setup(
name='django-diplomacy',
version="0.8.0",
author='Jeff Bradberry',
author_ema
|
il='jeff.bradberry@gmail.com',
description='A play-by-web app for Diplomacy',
long_description=long_description,
long_description_content_type='test/x-rst',
url='http://github.com/j
|
bradberry/django-diplomacy',
packages=setuptools.find_packages(),
entry_points={
'turngeneration.plugins': ['diplomacy = diplomacy.plugins:TurnGeneration'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Games/Entertainment :: Turn Based Strategy'
],
)
|
rboman/progs
|
sandbox/pyopengl/ball_glut.py
|
Python
|
apache-2.0
| 1,601
| 0.021861
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# OK anaconda2 + freeglut+pyopengl
# sous windows:
# - telecharger freeglut ici: http://freeglut.sourceforge.net/
# - renommer freeglut.dll en freeglut64.vc14.dll et la mettre dans le path
# en cas de non chargement regarder ce qui se passe dans C:\Python37\Lib\site-packages\OpenGL\platform\win32.py
from OpenGL.GLUT import *
from OpenGL.GLU import *
from OpenGL.GL import *
import sys
name = b'ball_glut'
def main():
glutInit(sys.argv)
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH)
glutInitWindowSize(400,400)
glutCreateWindow(name)
glClearColor(0.,0.,0.,1.)
glShadeModel(GL_SMOOTH)
glEnable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glEnable(GL_LIGHTING)
lightZeroPosition = [10.,4.,10.,1.]
lightZeroColor = [0.8,1.0,0.8,1.0] #green tinged
glLightfv(GL_LIGHT0, GL_POSITION, lightZeroPosition)
glLight
|
fv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor)
glLightf(GL_LIGHT0, GL_CONSTANT_ATTENUATION, 0.1)
glLightf(GL_LIGHT0, GL_LINEAR_ATTENUATION, 0.05)
glEnable(GL_LIGHT0)
glutDisplayFunc(display)
glMatrixMode(GL_PROJECTION)
gluPerspective(40.,1.,1.,40.)
glMatrixMode(GL_MODELVIEW)
gluLookAt(0,0,10,
0,0,0,
0,1,0)
glPushMatrix()
glutMainLoop()
return
def display():
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
|
glPushMatrix()
color = [1.0,0.,0.,1.]
glMaterialfv(GL_FRONT,GL_DIFFUSE,color)
glutSolidSphere(2,20,20)
glPopMatrix()
glutSwapBuffers()
return
if __name__ == '__main__': main()
|
mambocab/python-driver
|
tests/integration/cqlengine/statements/test_update_statement.py
|
Python
|
apache-2.0
| 3,975
| 0.001761
|
# Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/lic
|
enses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the sp
|
ecific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from cassandra.cqlengine.columns import Column, Set, List, Text
from cassandra.cqlengine.operators import *
from cassandra.cqlengine.statements import (UpdateStatement, WhereClause,
AssignmentClause, SetUpdateClause,
ListUpdateClause)
import six
class UpdateStatementTests(unittest.TestCase):
def test_table_rendering(self):
""" tests that fields are properly added to the select statement """
us = UpdateStatement('table')
self.assertTrue(six.text_type(us).startswith('UPDATE table SET'), six.text_type(us))
self.assertTrue(str(us).startswith('UPDATE table SET'), str(us))
def test_rendering(self):
us = UpdateStatement('table')
us.add_assignment(Column(db_field='a'), 'b')
us.add_assignment(Column(db_field='c'), 'd')
us.add_where(Column(db_field='a'), EqualsOperator(), 'x')
self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = %(0)s, "c" = %(1)s WHERE "a" = %(2)s', six.text_type(us))
us.add_where(Column(db_field='a'), NotEqualsOperator(), 'y')
self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = %(0)s, "c" = %(1)s WHERE "a" = %(2)s AND "a" != %(3)s', six.text_type(us))
def test_context(self):
us = UpdateStatement('table')
us.add_assignment(Column(db_field='a'), 'b')
us.add_assignment(Column(db_field='c'), 'd')
us.add_where(Column(db_field='a'), EqualsOperator(), 'x')
self.assertEqual(us.get_context(), {'0': 'b', '1': 'd', '2': 'x'})
def test_context_update(self):
us = UpdateStatement('table')
us.add_assignment(Column(db_field='a'), 'b')
us.add_assignment(Column(db_field='c'), 'd')
us.add_where(Column(db_field='a'), EqualsOperator(), 'x')
us.update_context_id(3)
self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = %(4)s, "c" = %(5)s WHERE "a" = %(3)s')
self.assertEqual(us.get_context(), {'4': 'b', '5': 'd', '3': 'x'})
def test_additional_rendering(self):
us = UpdateStatement('table', ttl=60)
us.add_assignment(Column(db_field='a'), 'b')
us.add_where(Column(db_field='a'), EqualsOperator(), 'x')
self.assertIn('USING TTL 60', six.text_type(us))
def test_update_set_add(self):
us = UpdateStatement('table')
us.add_update(Set(Text, db_field='a'), set((1,)), 'add')
self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = "a" + %(0)s')
def test_update_empty_set_add_does_not_assign(self):
us = UpdateStatement('table')
us.add_update(Set(Text, db_field='a'), set(), 'add')
self.assertFalse(us.assignments)
def test_update_empty_set_removal_does_not_assign(self):
us = UpdateStatement('table')
us.add_update(Set(Text, db_field='a'), set(), 'remove')
self.assertFalse(us.assignments)
def test_update_list_prepend_with_empty_list(self):
us = UpdateStatement('table')
us.add_update(List(Text, db_field='a'), [], 'prepend')
self.assertFalse(us.assignments)
def test_update_list_append_with_empty_list(self):
us = UpdateStatement('table')
us.add_update(List(Text, db_field='a'), [], 'append')
self.assertFalse(us.assignments)
|
sl2017/campos
|
campos_activity/wizards/campos_activity_signup_wiz.py
|
Python
|
agpl-3.0
| 10,652
| 0.008261
|
# -*- coding: utf-8 -*-
# Copyright 2017 Stein & Gabelgaard ApS
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, fields, models, _, exceptions
import logging
_logger = logging.getLogger(__name__)
class CamposActivitySignupMembers(mode
|
ls.TransientModel):
_name = 'campos.activity.signup.members'
_description = 'Activity Signup Members'
name = fields.Char('Name', size=64)
camp_age = fields.Integer(related='par_id.camp_age')
own_note = fields.Char(related='par_id.own_note')
par_id = fields.Many2one('campos.event.p
|
articipant', 'Participation', ondelete='cascade')
reg_id = fields.Many2one('event.registration', 'Registration', ondelete='cascade')
wiz_id = fields.Many2one('campos.activity.signup.wiz', 'Wizard', ondelete='cascade')
class CamposActivitySignupWiz(models.TransientModel):
_name = 'campos.activity.signup.wiz'
name = fields.Char('Own Note', size=128, help='You can add a Note for own use. It will be shown on activity list etc. I will NOT be read/answered by the Staff.')
state = fields.Selection([('step1', 'Select Activity'),
('step2', 'Select Participants')], 'State', default='step1')
reg_id = fields.Many2one('event.registration', 'Scout Group')
par_id = fields.Many2one('campos.event.participant', 'Single signup')
act_id = fields.Many2one('campos.activity.activity', '1. Select Activity', required=True, select=True, ondelete='cascade', domain=[('state', 'in', ['confirmed'])])
teaser = fields.Text(related='act_id.teaser', readonly=True)
audience_ids = fields.Many2many(related='act_id.audience_ids', readonly=True)
age_from = fields.Integer(related='act_id.age_from', readonly=True)
age_to = fields.Integer(related='act_id.age_to', readonly=True)
act_ins_id = fields.Many2one('campos.activity.instanse', '2. Select Period', required=True, select=True, ondelete='cascade')
seats = fields.Integer('3. Reserve Seats', required=True)
seats_reserved = fields.Integer('Reserved Seats')
ticket_id = fields.Many2one('campos.activity.ticket', 'Ticket', ondelete='set null')
seats_available = fields.Integer(related='act_ins_id.seats_available', readonly=True)
par_signup_ids = fields.Many2many('campos.activity.signup.members', relation="rel_act_signup_wiz", string='4. Signup participants')
@api.one
@api.constrains('seats', 'act_ins_id')
def _check_seats(self):
if self.seats <= 0:
raise exceptions.ValidationError(_("Reserved seats should be positive"))
if self.act_ins_id.seats_hard and not self.ticket_id:
if self.seats > self.act_ins_id.seats_available:
if self.act_ins_id.seats_available > 0:
raise exceptions.ValidationError(_("Sorry! Only %d seats available") % self.act_ins_id.seats_available)
else:
raise exceptions.ValidationError(_("Sorry! No seats available"))
if self.act_ins_id.seats_hard and self.ticket_id and self.seats > self.seats_reserved:
raise exceptions.ValidationError(_("Sorry! You can't increase the number of reserved seats. Only %d reserved") % (self.seats_reserved))
if self.act_ins_id.seats_available <= 0 and not self.ticket_id:
raise exceptions.ValidationError(_("Sorry! No seats available"))
return True
@api.onchange('act_ins_id')
def _onchange_act_ins_id(self):
if self.act_ins_id and self.act_ins_id.seats_available <= 0:
return {
'warning': {'title': _("Warning"), 'message': _("No seats available for selected period!")},
}
if self.par_id:
period_ok = True
if self.par_id.ticket_ids:
for tck in self.par_id.ticket_ids:
if tck.act_ins_id.period_id.date_begin <= self.act_ins_id.period_id.date_end and tck.act_ins_id.period_id.date_end >= self.act_ins_id.period_id.date_begin:
period_ok = False
booked = tck.act_ins_id.display_name
break
if not period_ok:
return {
'warning': {'title': _("Warning"), 'message': _("You are already booked in that period for:\n%s") % (booked)},
}
@api.multi
def doit_step1(self):
_logger.info('doit_step1')
self.ensure_one()
for wiz in self:
got_par = False
wiz.seats_reserved = wiz.seats
dt = wiz.act_ins_id.period_id.date_begin[0:10]
mbr_obj = self.env['campos.activity.signup.members']
if wiz.reg_id.participant_ids:
for par in wiz.reg_id.participant_ids:
if par.state in ['reg', 'duplicate', 'deregistered']:
continue
#Test aktivitetsdato mod deltagerdage
_logger.info('Evaluating %s %s %s', par.name, dt, par.tocampdate)
if dt < par.tocampdate or dt > par.fromcampdate:
continue
# Test alderskrav - Alderskrav bortfaldet
#_logger.info('Evaluating age %s %s', par.name, par.camp_age)
#if par.camp_age < wiz.act_ins_id.activity_id.age_from or par.camp_age > wiz.act_ins_id.activity_id.age_to:
# continue
# Test mod andre bookinger
period_ok = True
if par.ticket_ids:
for tck in par.ticket_ids:
if tck.act_ins_id.period_id.date_begin <= wiz.act_ins_id.period_id.date_end and tck.act_ins_id.period_id.date_end >= wiz.act_ins_id.period_id.date_begin:
period_ok = False
break
if not period_ok:
continue
_logger.info('Adding %s', par.name)
mbr_id = mbr_obj.create({'wiz_id' : wiz.id,
'par_id' : par.id,
'name' : par.name,
'reg_id' : wiz.reg_id.id})
if wiz.par_id and wiz.par_id.id == par.id:
got_par = True
wiz.par_signup_ids += mbr_id
self.state = 'step2'
self.ticket_id = self.env['campos.activity.ticket'].suspend_security().create({'reg_id': wiz.reg_id.id,
'act_ins_id': wiz.act_ins_id.id,
'seats': wiz.seats,
'state': 'open'})
_logger.info('Ticket created %s %d', wiz.state, wiz.id)
return {
'name': _('Add participants'),
'view_mode': 'form',
'view_type': 'form',
'view_id': self.env.ref('campos_activity.campos_activity_signup_wiz_form_view').id,
'res_model': 'campos.activity.signup.wiz',
'type': 'ir.actions.act_window',
#'nodestroy': True,
'target': 'new',
'context' : {
'default_reg_id': self.id,
},
'res_id': self.id,
}
@api.multi
def doit_step2(self):
_logger.info('doit_step2')
self.ensure_one()
for wiz in self:
signups = []
for par in wiz.par_signup_ids:
signups.append(par.par_id.id)
tck = wiz.ticket_id.suspend_security()
tck.par_ids = [(6, 0, signups)]
tck.seats = len(signups) # TODO Check against max avail!
tck.state = 'done'
@api.multi
def prepare_step2(self):
_logger.info('prepare_step
|
pandeydivesh15/AVSR-Deep-Speech
|
util/data_set_helpers_RHL.py
|
Python
|
gpl-2.0
| 7,982
| 0.004009
|
import pandas
import tensorflow as tf
from threading import Thread
from math import ceil
from six.moves import range
from util.audio import audiofile_to_input_vector
from util.gpu import get_available_gpus
from util.text_RHL import ctc_label_dense_to_sparse, text_to_char_array
class DataSets(object):
def __init__(self, train, dev, test):
'''Container for train, dev and test sets of one corpus.
Args:
train (DataSet): the train data set of the corpus
dev (DataSet): the validation data set of the corpus
test (DataSet): the test data set of the corpus
'''
self._dev = dev
self._test = test
self._train = train
def start_queue_threads(self, session):
self._dev.start_queue_threads(session)
self._test.start_queue_threads(session)
self._train.start_queue_threads(session)
@property
def train(self):
return self._train
@property
def dev(self):
return self._dev
@property
def test(self):
return self._test
class DataSet(object):
def __init__(self, files_list, thread_count, batch_size, numcep, numcontext, next_index=lambda x: x + 1):
self._coord = None
self._numcep = numcep
self._x = tf.placeholder(tf.float32, [None, numcep + (2 * numcep * numcontext)])
self._x_length = tf.placeholder(tf.int32, [])
self._y = tf.placeholder(tf.int32, [None,])
self._y_length = tf.placeholder(tf.int32, [])
self.example_queue = tf.PaddingFIFOQueue(shapes=[[None, numcep + (2 * numcep * numcontext)], [], [None,], []],
dtypes=[tf.float32, tf.int32, tf.int32, tf.int32],
capacity=2 * self._get_device_count() * batch_size)
self._enqueue_op = self.example_queue.enqueue([self._x, self._x_length, self._y, self._y_length])
self._close_op = self.example_queue.close(cancel_pending_enqueues=True)
self.batch_size = batch_size
self._numcontext = numcontext
self._thread_count = thread_count
self._files_list = self._create_files_list(files_list)
self._next_index = next_index
def _get_device_count(self):
available_gpus = get_available_gpus()
return max(len(available_gpus), 1)
def start_queue_threads(self, session, coord):
self._coord = coord
batch_threads = [Thread(target=self._populate_batch_queue, args=(session,)) for i in range(self._thread_count)]
for batch_thread in batch_threads:
self._coord.register_thread(batch_thread)
batch_thread.daemon = True
batch_thread.start()
return batch_threads
def close_queue(self, session):
session.run(self._close_op)
def _create_files_list(self, files_list):
# 1. Sort by wav filesize
# 2. Select just wav filename and transcript columns
# 3. Return a NumPy representation
return files_list.sort_values(by="wav_filesize") \
.ix[:, ["wav_filename", "transcript"]] \
.values
def _indices(self):
index = -1
while not self._coord.should_stop():
index = self._next_index(index) % len(self._files_list)
yield self._files_list[index]
def _populate_batch_queue(self, session):
for wav_file, transcript in self._indices():
source = audiofile_to_input_vector(wav_file, self._numcep, self._numcontext)
source_len = len(source)
target = text_to_char_array(transcript)
target_len = len(target)
try:
session.run(self._enqueue_op, feed_dict={
self._x: source,
self._x_length: source_len,
self._y: target,
self._y_length: target_len})
except tf.errors.CancelledError:
return
def next_batch(self):
source, source_lengths, target, target_lengths = self.example_queue.dequeue_many(self.batch_size)
sparse_labels = ctc_label_dense_to_sparse(target, target_lengths, self.batch_size)
return source, source_lengths, sparse_labels
@property
def total_batches(self):
# Note: If len(_files_list) % batch_size != 0, this re-uses initial files
return int(ceil(len(self._files_list) / self.batch_size))
class SwitchableDataSet(object):
def __init__(self, data_sets):
'''Data set that is wrapping a data sets instance to switch between train, dev and test instances during training.
Args:
data_sets (DataSets): the corpus container holding all three data sets
'''
self._data_sets = data_sets
self._sets = [data_sets.train, data_sets.dev, data_sets.test]
self._queues = [s.example_queue for s in self._sets]
self._queue_selector = tf.placeholder(tf.int32, name='Queue_Selector')
self._queue = tf.QueueBase.from_list(self._queue_selector, self._queues)
self._close_op = self._queue.close(cancel_pending_enqueues=True)
self._data_set = data_sets.train
def set_data_set(self, feed_dict, data_set):
index = self._sets.index(data_set)
assert index >= 0
feed_dict[self._queue_selector] = index
self._data_set = data_set
def start_queue_threads(self, session, coord):
batch_threads = []
for s in self._sets:
batch_threads += s.start_queue_threads(session, coord)
return batch_threads
def close_queue(self, session):
session.run(self._close_op, feed_dict={ self._queue_selector: 0 })
for s in self._sets:
s.close_queue(session)
def next_batch(self):
source, source_lengths, target, target_lengths = self._queue.dequeue_many(self._data_set.batch_size)
sparse_labels = ctc_label_dense_to_sparse(target, target_lengths, self._data_set.batch_size)
return source, source_lengths, sparse_labels
def read_data_sets(train_csvs, dev_csvs, test_csvs,
train_batch_size, dev_batch_size, test_batch_size,
numcep, numcontext, thread_count=8,
stride=1, offset=0, next_index=lambda s, i: i + 1,
limit_dev=0, limit_test=0, limit_train=0):
# Read the processed set files from disk
def read_csvs(csvs):
files = None
for csv in csvs:
file = pandas.read_csv(csv)
|
if files is None:
files = file
else:
files = files.append(file)
return files
train_files = read_csvs(train_csvs)
dev_files = read_csvs(dev_csvs)
test_files = read_csvs(test_csvs)
# Create train DataSet from all the train archives
train = _read_data_set(train_files, thread_count, train_batch_size, numcep, numcontext, stride=stride, offset=offset, next_index=lambda i: next_index('train', i), limit=limit_trai
|
n)
# Create dev DataSet from all the dev archives
dev = _read_data_set(dev_files, thread_count, dev_batch_size, numcep, numcontext, stride=stride, offset=offset, next_index=lambda i: next_index('dev', i), limit=limit_dev)
# Create test DataSet from all the test archives
test = _read_data_set(test_files, thread_count, test_batch_size, numcep, numcontext, stride=stride, offset=offset, next_index=lambda i: next_index('test', i), limit=limit_test)
# Return DataSets
return DataSets(train, dev, test)
def _read_data_set(filelist, thread_count, batch_size, numcep, numcontext, stride=1, offset=0, next_index=lambda i: i + 1, limit=0):
# Optionally apply dataset size limits
if limit > 0:
filelist = filelist.iloc[:limit]
filelist = filelist[offset::stride]
# Return DataSet
return DataSet(filelist, thread_count, batch_size, numcep, numcontext, next_index=next_index)
|
mediatum/mediatum
|
core/test/test_containertype.py
|
Python
|
gpl-3.0
| 581
| 0.008606
|
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2014 b
|
y the mediaTUM authors
:license: GPL3, see COPYING for details
"""
from core.test.asserts import assert_deprecation_warning
def test_getContainerChildren(some_node):
container_children = assert_deprecation_warning(some_node.getContainerChildren)
assert len(container_children) == 1
assert container_children[0].name == "container"
def test_getContentType_container(container_node):
content_type = assert_deprecation_warning(container_node.getContentType)
|
assert content_type == "directory"
|
balloob/netdisco
|
netdisco/discoverables/spotify_connect.py
|
Python
|
mit
| 355
| 0
|
"""Discover devices that implement the Spotify Connect platform."""
from . import MDNSDiscoverable
class Discoverable(MDNSDiscoverable):
"""Add support for discovering Spotify Connect serv
|
ice."""
def __init__(self, nd):
"""Initialize the Cast discovery."""
|
super(Discoverable, self).__init__(nd, '_spotify-connect._tcp.local.')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.