repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
admgrn/Switcharoo
|
scraper/scraper/entryqueue.py
|
Python
|
gpl-3.0
| 2,041
| 0.00147
|
# Copyright 2015 Adam Greenstein <adamgreenstein@comcast.net>
#
# Swit
|
charo
|
o Cartographer is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Switcharoo Cartographer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Switcharoo Cartographer. If not, see <http://www.gnu.org/licenses/>.
from data import Access, Entry, EntryError
from Queue import Queue
class EntryQueue(Queue):
def __init__(self, transverse, maxsize=0):
self.reddit = transverse.reddit
self.events = transverse.events
Queue.__init__(self, maxsize)
def _init(self, maxsize):
Queue._init(self, maxsize)
nodes = Access(self.events).get_entry(maxsize)
for node in nodes:
try:
self.queue.append(Entry(node['raw_url'], self.reddit))
self.events.on_adding_to_queue(node['raw_url'])
except EntryError:
# TODO Remove old entry from DB
pass
def _put(self, url):
try:
entry = Entry(url, self.reddit)
if self._is_unique(entry):
self.events.on_adding_to_queue(url)
self.queue.append(entry)
else:
self.events.on_not_adding_to_queue(url)
except EntryError:
self.events.on_not_adding_to_queue(url)
def _get(self):
return self.queue.popleft()
def _is_unique(self, entry):
# TODO Logic here to determine if new url found
if entry not in self.queue:
return Access(self.events).is_unique_entry(entry)
else:
return False
|
kdwink/intellij-community
|
python/testData/inspections/PyPep8NamingInspection/overridden.py
|
Python
|
apache-2.0
| 142
| 0.070423
|
cla
|
ss A:
def <weak_warning descr="Function name sho
|
uld be lowercase">fooBar</weak_warning>(self): pass
class B(A):
def fooBar(self): pass
|
olemoudi/tweetdigest
|
tweepy/tweepy/binder.py
|
Python
|
apache-2.0
| 7,174
| 0.001812
|
# Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
import httplib
import urllib
import time
import re
from tweepy.error import TweepError
from tweepy.utils import convert_to_utf8_str
from tweepy.models import Model
re_path_template = re.compile('{\w+}')
def bind_api(**config):
class APIMethod(object):
path = config['path']
payload_type = config.get('payload_type', None)
payload_list = config.get('payload_list', False)
allowed_param = config.get('allowed_param', [])
method = config.get('method', 'GET')
require_auth = config.get('require_auth', False)
search_api = config.get('search_api', False)
use_cache = config.get('use_cache', True)
def __init__(self, api, args, kargs):
# If authentication is required and no credentials
# are provided, throw an error.
if self.require_auth and not api.auth:
raise TweepError('Authentication required!')
self.api = api
self.post_data = kargs.pop('post_data', None)
self.retry_count = kargs.pop('retry_count', api.retry_count)
self.retry_delay = kargs.pop('retry_delay', api.retry_delay)
self.retry_errors = kargs.pop('retry_errors', api.retry_errors)
self.headers = kargs.pop('headers', {})
self.build_parameters(args, kargs)
# Pick correct URL root to use
if self.search_api:
self.api_root = api.search_root
else:
self.api_root = api.api_root
# Perform any path variable substitution
self.build_path()
if api.secure:
self.scheme = 'https://'
else:
self.scheme = 'http://'
if self.search_api:
self.host = api.search_host
else:
self.host = api.host
# Manually set Host header to fix an issue in python 2.5
# or older where Host is set including the 443 port.
# This causes Twitter to issue 301 redirect.
# See Issue https://github.com/tweepy/tweepy/issues/12
self.headers['Host'] = self.host
def build_parameters(self, args, kargs):
self.parameters = {}
for idx, arg in enumerate(args):
if arg is None:
continue
try:
self.parameters[self.allowed_param[idx]] = convert_to_utf8_str(arg)
except IndexError:
raise TweepError('Too many parameters supplied!')
for k, arg in
|
kargs.items():
if arg is None:
continue
if k in se
|
lf.parameters:
raise TweepError('Multiple values for parameter %s supplied!' % k)
self.parameters[k] = convert_to_utf8_str(arg)
def build_path(self):
for variable in re_path_template.findall(self.path):
name = variable.strip('{}')
if name == 'user' and 'user' not in self.parameters and self.api.auth:
# No 'user' parameter provided, fetch it from Auth instead.
value = self.api.auth.get_username()
else:
try:
value = urllib.quote(self.parameters[name])
except KeyError:
raise TweepError('No parameter value found for path variable: %s' % name)
del self.parameters[name]
self.path = self.path.replace(variable, value)
def execute(self):
# Build the request URL
url = self.api_root + self.path
if len(self.parameters):
url = '%s?%s' % (url, urllib.urlencode(self.parameters))
# Query the cache if one is available
# and this request uses a GET method.
if self.use_cache and self.api.cache and self.method == 'GET':
cache_result = self.api.cache.get(url)
# if cache result found and not expired, return it
if cache_result:
# must restore api reference
if isinstance(cache_result, list):
for result in cache_result:
if isinstance(result, Model):
result._api = self.api
else:
if isinstance(cache_result, Model):
cache_result._api = self.api
return cache_result
# Continue attempting request until successful
# or maximum number of retries is reached.
retries_performed = 0
while retries_performed < self.retry_count + 1:
# Open connection
# FIXME: add timeout
if self.api.secure:
conn = httplib.HTTPSConnection(self.host)
else:
conn = httplib.HTTPConnection(self.host)
# Apply authentication
if self.api.auth:
self.api.auth.apply_auth(
self.scheme + self.host + url,
self.method, self.headers, self.parameters
)
# Execute request
try:
conn.request(self.method, url, headers=self.headers, body=self.post_data)
resp = conn.getresponse()
except Exception, e:
raise TweepError('Failed to send request: %s' % e)
# Exit request loop if non-retry error code
if self.retry_errors:
if resp.status not in self.retry_errors: break
else:
if resp.status == 200: break
# Sleep before retrying request again
time.sleep(self.retry_delay)
retries_performed += 1
# If an error was returned, throw an exception
self.api.last_response = resp
if resp.status != 200:
try:
error_msg = self.api.parser.parse_error(resp.read())
except Exception:
error_msg = "Twitter error response: status code = %s" % resp.status
raise TweepError(error_msg, resp)
# Parse the response payload
result = self.api.parser.parse(self, resp.read())
conn.close()
# Store result into cache if one is available.
if self.use_cache and self.api.cache and self.method == 'GET' and result:
self.api.cache.store(url, result)
return result
def _call(api, *args, **kargs):
method = APIMethod(api, args, kargs)
return method.execute()
# Set pagination mode
if 'cursor' in APIMethod.allowed_param:
_call.pagination_mode = 'cursor'
elif 'page' in APIMethod.allowed_param:
_call.pagination_mode = 'page'
return _call
|
colinsullivan/bingo-board
|
bingo_board/tastypie/resources.py
|
Python
|
mit
| 58,556
| 0.007121
|
from django.conf import settings
from django.conf.urls.defaults import patterns, url
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.core.urlresolvers import NoReverseMatch, reverse, resolve, Resolver404
from django.db.models.sql.constants import QUERY_TERMS, LOOKUP_SEP
from django.http import HttpResponse
from django.utils.cache import patch_cache_control
from tastypie.authentication import Authentication
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.cache import NoCache
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.exceptions import NotFound, BadRequest, InvalidFilterError, HydrationError, InvalidSortError, ImmediateHttpResponse
from tastypie.fields import *
from tastypie.http import *
from tastypie.paginator import Paginator
from tastypie.serializers import Serializer
from tastypie.throttle import BaseThrottle
from tastypie.utils import is_valid_jsonp_callback_value, dict_strip_unicode_keys, trailing_slash
from tastypie.utils.mime import determine_format, build_content_type
from tastypie.validation import Validation
try:
set
except NameError:
from sets import Set as set
# The ``copy`` module was added in Python 2.5 and ``copycompat`` was added in
# post 1.1.1 Django (r11901)
try:
from django.utils.copycompat import deepcopy
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from copy import deepcopy
def csrf_exempt(func):
return func
class ResourceOptions(object):
"""
A configuration class for ``Resource``.
Provides sane defaults and the logic needed to augment these settings with
the internal ``class Meta`` used on ``Resource`` subclasses.
"""
serializer = Serializer()
authentication = Authentication()
authorization = ReadOnlyAuthorization()
cache = NoCache()
throttle = BaseThrottle()
validation = Validation()
allowed_methods = ['get', 'post', 'put', 'delete']
list_allowed_methods = None
detail_allowed_methods = None
limit = getattr(settings, 'API_LIMIT_PER_PAGE', 20)
api_name = None
resource_name = None
urlconf_namespace = None
default_format = 'application/json'
filtering = {}
ordering = []
object_class = None
queryset = None
fields = []
excludes = []
include_resource_uri = True
include_absolute_url = False
def __new__(cls, meta=None):
overrides = {}
# Handle overrides.
if meta:
for override_name in dir(meta):
# No internals please.
if not override_name.startswith('_'):
overrides[override_name] = getattr(meta, override_name)
allowed_methods = overrides.get('allowed_methods', ['get', 'post', 'put', 'delete'])
if overrides.get('list_allowed_methods', None) is None:
overrides['list_allowed_methods'] = allowed_methods
if overrides.get('detail_allowed_methods', None) is None:
overrides['detail_allowed_methods'] = allowed_methods
if not overrides.get('queryset', None) is None:
overrides['object_class'] = overrides['queryset'].model
return object.__new__(type('ResourceOptions', (cls,), overrides))
class DeclarativeMetaclass(type):
def __new__(cls, name, bases, attrs):
attrs['base_fields'] = {}
declared_fields = {}
# Inherit any fields from parent(s).
try:
parents = [b for b in bases if issubclass(b, Resource)]
for p in parents:
fields = getattr(p, 'base_fields', {})
for field_name, field_object in fields.items():
attrs['base_fields'][field_name] = deepcopy(field_object)
except NameError:
pass
for field_name, obj in attrs.items():
if isinstance(obj, ApiField):
field = attrs.pop(field_name)
declared_fields[field_name] = field
attrs['base_fields'].update(declared_fields)
attrs['declared_fields'] = declared_fields
new_class = super(DeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
opts = getattr(new_class, 'Meta', None)
new_class._meta = ResourceOptions(opts)
if not getattr(new_class._meta, 'resource_name', None):
# No ``resource_name`` provided. Attempt to auto-name the resource.
class_name = new_class.__name__
name_bits = [bit for bit in class_name.split('Resource') if bit]
resource_name = ''.join(name_bits).lower()
new_class._meta.resource_name = resource_name
if getattr(new_class._meta, 'include_resource_uri', True):
if not 'resource_uri' in new_class.base_fields:
new_class.base_fields['resource_uri'] = CharField(readonly=True)
elif 'resource_uri' in new_class.base_fields and not 'resource_uri' in attrs:
del(new_class.base_fields['resource_uri'])
for field_name, field_object in new_class.base_fields.items():
if hasattr(field_object, 'contribute_to_class'):
field_object.contribute_to_class(new_class, field_name)
return new_class
class Resource(object):
"""
Handles the data, request dispatch and responding to requests.
Serialization/deserialization is handled "at the edges" (i.e. at the
beginning/end of the request/response cycle) so that everything internally
is Python data structures.
This class tries to be non-model specific, so it can be hooked up to other
data sources, such as search results, files, other data, etc.
"""
__metaclass__ = DeclarativeMetaclass
def __init__(self, api_name=None):
self.fields = deepcopy(self.base_fields)
if not api_name is None:
self._meta.api_name = api_name
def __getattr__(self, name):
if name in self.fields:
return self.fields[name]
def wrap_view(self, view):
"""
Wraps methods so they can be called in a more functional way as well
as handling exceptions better.
Note that if ``BadRequest`` or an exception with a ``response`` attr
are seen, there is special handling to either present a message back
to the user or return the response traveling with the exception.
"""
@csrf_exempt
def wrapper(request, *args, **kwargs):
try:
callback = getattr(self, view)
response = callback(request, *args, **kwargs)
if request.is_ajax():
# IE excessively caches XMLHttpRequests, so we're disabling
# the browser cache here.
# See http://www.enhanceie.com/ie/bugs.asp for details.
patch_cache_control(response, no_cache=True)
return response
except (BadRequest, ApiFieldError), e:
|
return HttpBadRequest(e.args[0])
except Exception, e:
if hasattr(e, 'response'):
return e.response
# A real, non-expecte
|
d exception.
# Handle the case where the full traceback is more helpful
# than the serialized error.
if settings.DEBUG and getattr(settings, 'TASTYPIE_FULL_DEBUG', False):
raise
# Rather than re-raising, we're going to things similar to
# what Django does. The difference is returning a serialized
# error message.
return self._handle_500(request, e)
return wrapper
def _handle_500(self, request, exception):
import traceback
import sys
the_trace = '\n'.join(traceback.format_exception(*(sys.exc_info())))
|
anselmobd/fo2
|
src/systextil/urls/table.py
|
Python
|
mit
| 540
| 0.001852
|
from django.urls import re_path
from systextil.views import apoio_index
from systextil.views.table import (
deposito,
colecao,
|
estagio,
periodo_confeccao,
unidade,
)
urlpatterns = [
re_path(r'^colecao/$', colecao.view, name='colecao'),
re_path(r'^deposito/$', deposito.deposito, name='deposito'),
re_path(r'^estagio/$
|
', estagio.view, name='estagio'),
re_path(r'^periodo_confeccao/$', periodo_confeccao.view, name='periodo_confeccao'),
re_path(r'^unidade/$', unidade.view, name='unidade'),
]
|
tmerrick1/spack
|
var/spack/repos/builtin/packages/r-futile-options/package.py
|
Python
|
lgpl-2.1
| 1,636
| 0.001834
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.
|
1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 T
|
emple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RFutileOptions(RPackage):
"""A scoped options management framework"""
homepage = "https://cran.rstudio.com/web/packages/futile.options/index.html"
url = "https://cran.rstudio.com/src/contrib/futile.options_1.0.0.tar.gz"
list_url = "https://cran.rstudio.com/src/contrib/Archive/futile.options"
version('1.0.0', '8fd845774bbce56f41f7c43c3b4c13ba')
|
the13fools/Bokeh_Examples
|
pandas/dataframe.py
|
Python
|
bsd-3-clause
| 318
| 0.006289
|
import numpy as np
import pandas as pd
from bokeh import mpl
ts = pd.S
|
eries(np.random.randn(1000), index=pd.date_range('1/1/2000', periods=1000))
ts = ts.cumsum()
df = pd.DataFrame(np.random.randn(1000, 4), index=ts.index, columns=list('ABCD'))
df = df.cumsum()
df.plot(le
|
gend=False)
mpl.to_bokeh(name="dataframe")
|
heytrav/drs-project
|
domain_api/migrations/0010_topleveldomain_slug.py
|
Python
|
mit
| 478
| 0
|
# -*- coding: utf-8
|
-*-
# Generated by Django 1.10.5 on 2017-04-04 22:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('domain_api', '0009_remove_topleveldomain_slug'),
]
operations = [
migrations.AddField(
model_name='topleveldomain',
name='slug',
field=models.CharField(max_length=100, null=T
|
rue),
),
]
|
zibawa/zibawa
|
simulator/forms.py
|
Python
|
gpl-3.0
| 76
| 0
|
'
|
''
Created on Nov 21, 2016
@author: julimatt
'''
from django import forms
| |
codewarrior0/pymclevel
|
setup_nbt.py
|
Python
|
isc
| 344
| 0.02907
|
from distutils.co
|
re import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules = [Extension("_nbt", ["_nbt.pyx"])]
import numpy
setup(
name = 'NBT library (Cython implementation)',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
include_dirs = numpy.get_include
|
()
)
|
diorcety/translate
|
translate/storage/projstore.py
|
Python
|
gpl-2.0
| 14,767
| 0.000339
|
# -*- coding: utf-8 -*-
#
# Copyright 2010 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os
import six
from lxml import etree
__all__ = ('FileExistsInProjectError', 'FileNotInProjectError', 'ProjectStore')
class FileExistsInProjectError(Exception):
pass
class FileNotInProjectError(Exception):
pass
class ProjectStore(object):
"""Basic project file container."""
# INITIALIZERS #
def __init__(self):
self._files = {}
self._sourcefiles = []
self._targetfiles = []
self._transfiles = []
self.settings = {}
self.convert_map = {}
# The above map maps the conversion of input files (keys) to its output
# file and template used (2-tuple). All values are project file names.
# eg. convert_map = {
# 'sources/doc.odt': ('trans/doc.odt.xlf', None),
# 'trans/doc.odt.xlf': ('targets/doc.odt', 'sources/doc.odt')
#}
# The following dict groups together sets of mappings from a file
# "type" string ("src", "tgt" or "trans") to various other values
# or objects.
self.TYPE_INFO = {
# type => prefix for new files
'f_prefix': {
'src': 'sources/',
'tgt': 'targets/',
'trans': 'trans/',
},
# type => list containing filenames for that type
'lists': {
'src': self._sourcefiles,
'tgt': self._targetfiles,
'trans': self._transfiles,
},
# type => next type in process: src => trans => tgt
'next_type': {
'src': 'trans',
'trans': 'tgt',
'tgt': None,
},
# type => name of the sub-section in the settings file/dict
'settings': {
'src': 'sources',
'tgt': 'targets',
'trans': 'transfiles',
}
}
def __del__(self):
try:
self.close()
except Exception:
pass
# ACCESSORS #
def _get_sourcefiles(self):
"""Read-only access to ``self._sourcefiles``."""
return tuple(self._sourcefiles)
sourcefiles = property(_get_sourcefiles)
def _get_targetfiles(self):
"""Read-only access to ``self._targetfiles``."""
return tuple(self._targetfiles)
targetfiles = property(_get_targetfiles)
def _get_transfiles(self):
"""Read-only access to ``self._transfiles``."""
return tuple(self._transfiles)
transfiles = property(_get_transfiles)
# SPECIAL METHODS #
def __in__(self, lhs):
"""@returns ``True`` if ``lhs`` is a file name or file object in the project store."""
return (lhs in self._sourcefiles or
lhs in self._targetfiles or
lhs in self._transfiles or
lhs in self._files or
lhs in self._files.values())
# METHODS #
def append_file(self, afile, fname, ftype='trans', delete_orig=False):
"""Append the given file to the project with the given filename, marked
to be of type ``ftype`` ('src', 'trans', 'tgt').
:type delete_orig: bool
:param delete_orig: Whether or not the original (given) file should be
deleted after being appended. This is set to
``True`` by
:meth:`~translate.storage.project.convert_forward`
. Not used in this class.
"""
if ftype not in self.TYPE_INFO['f_prefix']:
raise ValueError('Invalid file type: %s' % (ftype))
if isinstance(afile, six.string_types) and os.path.isfile(afile) and not fname:
# Try and use afile as the file name
fname, afile = afile, open(afile)
#
|
Check if we can get an real file name
realfname = fname
if realfname is None or not os.path.isfile(realfname):
realfname = getattr(afile, 'name', None)
if realfname is None or not os.path.isfile(realfname):
realfname = getattr(afile, 'filename', None)
if not realfname or not os.path.isfile(realfname):
realfname = None
# Try
|
to get the file name from the file object, if it was not given:
if not fname:
fname = getattr(afile, 'name', None)
if not fname:
fname = getattr(afile, 'filename', None)
fname = self._fix_type_filename(ftype, fname)
if not fname:
raise ValueError('Could not deduce file name and none given')
if fname in self._files:
raise FileExistsInProjectError(fname)
if realfname is not None and os.path.isfile(realfname):
self._files[fname] = realfname
else:
self._files[fname] = afile
self.TYPE_INFO['lists'][ftype].append(fname)
return afile, fname
def append_sourcefile(self, afile, fname=None):
return self.append_file(afile, fname, ftype='src')
def append_targetfile(self, afile, fname=None):
return self.append_file(afile, fname, ftype='tgt')
def append_transfile(self, afile, fname=None):
return self.append_file(afile, fname, ftype='trans')
def remove_file(self, fname, ftype=None):
"""Remove the file with the given project name from the project. If
the file type ('src', 'trans' or 'tgt') is not given, it is guessed.
"""
if fname not in self._files:
raise FileNotInProjectError(fname)
if not ftype:
# Guess file type (source/trans/target)
for ft, prefix in self.TYPE_INFO['f_prefix'].items():
if fname.startswith(prefix):
ftype = ft
break
self.TYPE_INFO['lists'][ftype].remove(fname)
if self._files[fname] and hasattr(self._files[fname], 'close'):
self._files[fname].close()
del self._files[fname]
def remove_sourcefile(self, fname):
self.remove_file(fname, ftype='src')
def remove_targetfile(self, fname):
self.remove_file(fname, ftype='tgt')
def remove_transfile(self, fname):
self.remove_file(fname, ftype='trans')
def close(self):
self.save()
def get_file(self, fname, mode='rb'):
"""Retrieve the file with the given name from the project store.
The file is looked up in the ``self._files`` dictionary. The values
in this dictionary may be ``None``, to indicate that the file is not
cacheable and needs to be retrieved in a special way. This special
way must be defined in this method of sub-classes. The value may
also be a string, which indicates that it is a real file accessible
via ``open``.
:type mode: str
:param mode: The mode in which to re-open the file (if it is closed).
"""
if fname not in self._files:
raise FileNotInProjectError(fname)
rfile = self._files[fname]
if isinstance(rfile, six.string_types):
rfile = open(rfile, 'rb')
# Check that the file is actually open
if getattr(rfile, 'closed', False):
rfname = fname
if not os.path.isfile(rfname):
rfname = getattr(rfile, 'name', None)
if not rfile or not os.path.isfile(rfname):
rfname = getattr(rfile,
|
hnikolov/pihdf
|
pihdf/printers/ip_wrapper_gen.py
|
Python
|
mit
| 6,357
| 0.016045
|
from myhdl import *
from str_builder import StrBuilder
import os
import sys
class GenWrapperFile(object):
'''|
| This class is used to generate a python wrapper of a verilog design
|________'''
def __init__(self):
self.module_name = ''
self.interface = [] # keeps the order of the declarations
self.parameters = []
self.inputs = []
self.outputs = []
def generateWrapperFile(self):
'''|
| Generate <module_name_wrapper>.py file
|________'''
print "\nGenerating .py wrapper file."
s = StrBuilder()
self.genWrapperInterface(s)
s += '# Need this in order to work...\n'
# We assume that the clock is 'clk'!!!!!
s += '@always(clk.posedge)\n'
s += 'def pass_thru():\n'
s += s.indent() + 'pass\n\n'
s.dedent()
self.genTheWrapper(s)
self.genConvertFunc(s)
filename = self.module_name + '_wrp.py'
s.write(filename)
def genWrapperInterface(self, s):
'''|
| Generate the interfaces of the wrapper file
|________'''
s += 'from myhdl import *\n\n'
s += 'def ' + self.module_name + '_wrapper(\n'
s.indent(5)
for i in self.interface: # to preserve the order
if i['type'] != 'parameter':
s += i['name'] + ',\n'
s+= 'INST_NAME = "' + self.module_name.upper() + '",\n'
for p in self.parameters:
s += p['name'] + ' = ' + p['value'] + ',\n'
s -= 2
s += s.noIndent() + '):\n\n'
s.dedent(3)
def genTheWrapper(self, s, py_name=None):
'''|
| Generate the wrapper
|________'''
s += '#---------------------------------------------#\n'
s += '# Define the interface to the verilog ip core #\n'
s += '#---------------------------------------------#\n'
str_name = self.module_name if py_name == None else py_name
s += str_name + '_wrp.verilog_code = \\' + '\n'
if self.parameters != []:
s += '"""' + self.module_name + '#(\\n""" + \\' + '\n'
for p in self.parameters:
s += '""" .' + p["name"] + '($' + p["name"] + '),\\n""" + \\' + '\n'
s = s-11 + (s.noIndent() + '\\n""" + \\' + '\n')
s += '""" ) $INST_NAME (\\n""" + \\' + '\n'
else:
s += '"""' + self.module_name + ' $INST_NAME (\\n""" + \\' + '\n'
for i in self.interface:
if i["type"] != "parameter":
s += '""" .' + i["name"] + '($' + i["name"] + '),\\n""" + \\' + '\n'
s = s-11 + (s.noIndent() + '\\n""" + \\' + '\n')
s += '""");"""\n\n'
s += '#-------------------------------------------------------#\n'
s += '# output, needed when converting the wrapper to verilog #\n'
s += '#-------------------------------------------------------#\n'
for o in self.outputs:
s += o["name"] + '.driven = "wire"\n'
s += '\n'
s += 'return pass_thru\n\n\n'
def genConvertFunc(self, s):
'''|
| Generate function convert()
|________'''
s += s.noIndent() + 'def convert():\n\n'
for i in self.parameters:
s += i['name'] + ' = ' + i['value'] + '\n'
s += '\n'
# Declare signals
for i in self.interface:
if i['type'] != 'parameter':
x = i["size"]
stype = 'bool(0)'
if x.startswith('['):
stype = 'intbv(0)'
x = x.replace(":0", "+1:")
s += i['name'] + '= Signal(' + stype + x + ')\n'
s += '\n'
s += 'toVerilog(' + self.module_name + '_wrapper,\n'
s.indent(2)
for i in self.interface:
if i['type'] != 'parameter':
s += i['name'] + ' = ' + i['name'] + ',\n'
for p in self.parameters:
s += p['name'] + ' = ' + p['name'] + ',\n'
s = s-2 + (s.noIndent() + ' )\n\n\n')
s.dedent(4)
s += 'if __name__ == "__main__":\n'
s += s.indent() + 'convert()\n'
def initialize(self, filename):
'''|
| Initialize the GenWrapperFile object
|________'''
with open(filename) as f:
for line_number, line in enumerate(f):
w_list = line.split()
if w_list != []:
if w_list[0]=='module':
self.module_name = w_list[1]
elif w_list[0] == 'parameter':
name = w_list[1].replace(',','')
value = w_list[3].replace(',','')
self.parameters.append( {'name':name, 'value':value})
self.interface.append( {'name':name, 'value':value, 'type':w_list[0]})
elif w_list[0] == 'input' or w_list[0] == 'output':
name = w_list[3] if w_list[2].startswith('[') else w_list[2]
size = w_list[2] if w_list[2].startswith('[') else ''
if w_list[0] == 'input':
self.inputs.append( {'name':name.replace(',',''), 'size':size})
elif w_list[0] == 'output':
self.outputs.append({'name':name.replace(',',''), 'size':size})
self.interface.append( {'name':name.replace(',',''), 'size':size, 'type':w_list[0]})
elif w_list[0] == ');':
break
def main( args ):
gwf = GenWrapperFile()
gwf
|
.initialize(args.file_name)
g
|
wf.generateWrapperFile()
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Create a myhdl wrapper file.')
parser.add_argument('-f', '--file', dest='file_name', default="",
help='top-level verilog (.v) file')
args = parser.parse_args()
main( args )
|
h4ck3rm1k3/letter-to-editor
|
newspaper/letter_to_editor/migrations/0005_newspaper_sister_newspapers.py
|
Python
|
agpl-3.0
| 478
| 0.002092
|
# encoding: utf8
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('letter_to_editor', '0004_company_newspaper_webcache_wikipediapage'),
]
operations = [
|
migrations.AddField(
model_name='newspaper',
name='sister_newspapers',
field=models.ForeignKey(to='letter_to_editor.Newspaper', to_field='newspaper_name'),
preserve_default=True
|
,
),
]
|
shyamalschandra/scikit-learn
|
sklearn/neighbors/tests/test_dist_metrics.py
|
Python
|
bsd-3-clause
| 8,002
| 0.0005
|
import itertools
import pickle
import numpy as np
from numpy.testing import assert_array_almost_equal
import pytest
from scipy.spatial.distance import cdist
from sklearn.neighbors import DistanceMetric
from sklearn.neighbors import BallTree
from sklearn.utils import check_random_state
from sklearn.utils._testing import create_memmap_backed_data
from sklearn.utils.fixes import sp_version, parse_version
def dist_func(x1, x2, p):
return np.sum((x1 - x2) ** p) ** (1.0 / p)
rng = check_random_state(0)
d = 4
n1 = 20
n2 = 25
X1 = rng.random_sample((n1, d)).astype("float64", copy=False)
X2 = rng.random_sample((n2, d)).astype("float64", copy=False)
[X1_mmap, X2_mmap] = create_memmap_backed_data([X1, X2])
# make boolean arrays: ones and zeros
X1_bool = X1.round(0)
X2_bool = X2.round(0)
[X1_bool_mmap, X2_bool_mmap] = create_memmap_backed_data([X1_bool, X2_bool])
V = rng.random_sample((d, d))
VI = np.dot(V, V.T)
BOOL_METRICS = [
"matching",
"jaccard",
"dice",
"kulsinski",
"rogerstanimoto",
"russellrao",
"sokalmichener",
"sokalsneath",
]
METRICS_DEFAULT_PARAMS = {
"euclidean": {},
"cityblock": {},
"minkowski": dict(p=(1, 1.5, 2, 3)),
"chebyshev": {},
"seuclidean": dict(V=(rng.random_sample(d),)),
"wminkowski": dict(p=(1, 1.5, 3), w=(rng.random_sample(d),)),
"mahalanobis": dict(VI=(VI,)),
"hamming": {},
"canberra": {},
"braycurtis": {},
}
@pytest.mark.parametrize("metric", METRICS_DEFAULT_PARAMS)
@pytest.mark.parametrize("X1, X2", [(X1, X2), (X1_mmap, X2_mmap)])
def test_cdist(metric, X1, X2):
argdict = METRICS_DEFAULT_PARAMS[metric]
keys = argdict.keys()
for vals in itertools.product(*argdict.values()):
kwargs = dict(zip(keys, vals))
if metric == "mahalanobis":
# See: https://github.com/scipy/scipy/issues/13861
pytest.xfail("scipy#13861: cdist with 'mahalanobis' fails onmemmap data")
elif metric == "wminkowski":
if sp_version >= parse_version("1.8.0"):
pytest.skip("wminkowski will be removed in SciPy 1.8.0")
# wminkoski is deprecated in SciPy 1.6.0 and removed in 1.8.0
ExceptionToAssert = None
if sp_version >= parse_version("1.6.0"):
ExceptionToAssert = DeprecationWarning
with pytest.warns(ExceptionToAssert):
D_true = cdist(X1, X2, metric, **kwargs)
else:
D_true = cdist(X1, X2, metric, **kwargs)
check_cdist(metric, kwargs, D_true)
@pytest.mark.parametrize("metric", BOOL_METRICS)
@pytest.mark.parametrize(
"X1_bool, X2_bool", [(X1_bool, X2_bool), (X1_bool_mmap, X2_bool_mmap)]
)
def test_cdist_bool_metric(metric, X1_bool, X2_bool):
D_true = cdist(X1_bool, X2_bool, metric)
check_cdist_bool(metric, D_true)
def check_cdist(metric, kwargs, D_true):
dm = DistanceMetric.get_metric(metric, **kwargs)
D12 = dm.pairwise(X1, X2)
assert_array_almost_equal(D12, D_true)
def check_cdist_bool(metric, D_true):
dm = DistanceMetric.get_metric(metric)
D12 = dm.pairwise(X1_bool, X2_bool)
assert_array_almost_equal(D12, D_true)
@pytest.mark.parametrize("metric", METRICS_DEFAULT_PARAMS)
@pytest.mark.parametrize("X1, X2", [(X1, X2), (X1_mmap, X2_mmap)])
def test_pdist(metric, X1, X2):
argdict = METRICS_DEFAULT_PARAMS[metric]
keys = argdict.keys()
for vals in itertools.product(*argdict.values()):
kwargs = dict(zip(keys, vals))
if metric == "mahalanobis":
# See: https://github.com/scipy/scipy/issues/13861
pytest.xfail("scipy#13861: pdist with 'mahalanobis' fails onmemmap data")
elif metric == "wminkowski":
if sp_version >= parse_version("1.8.0"):
pytest.skip("wminkowski will be removed in SciPy 1.8.0")
# wminkoski is deprecated in SciPy 1.6.0 and removed in 1.8.0
ExceptionToAssert = None
if sp_version >= parse_version("1.6.0"):
ExceptionToAssert = DeprecationWarning
with pytest.warns(ExceptionToAssert):
D_true = cdist(X1, X1, metric, **kwargs)
else:
D_true = cdist(X1, X1, metric, **kwargs)
check_pdist(metric, kwargs, D_true)
@pytest.mark.parametrize("metric", BOOL_METRICS)
@pytest.mark.parametrize("X1_bool", [X1_bool, X1_bool_mmap])
def test_pdist_bool_metrics(metric, X1_bool):
D_true = cdist(X1_bool, X1_bool, metric)
check_pdist_bool(metric, D_true)
def check_pdist(metric, kwargs, D_true):
dm = DistanceMetric.get_metric(metric, **kwargs)
D12 = dm.pairwise(X1)
assert_array_almost_equal(D12, D_true)
def check_pdist_bool(metric, D_true):
dm = DistanceMetric.get_metric(metric)
D12 = dm.pairwise(X1_bool)
# Based on https://github.com/scipy/scipy/pull/7373
# When comparing two all-zero vectors, scipy>=1.2.0 jaccard metric
# was changed to return 0, instead of nan.
if metric == "jaccard" and sp_version < parse_version("1.2.0"):
D_true[np.isnan(D_true)] = 0
assert_array_almost_equal(D12, D_true)
@pytest.mark.parametrize("metric", METRICS_DEFAULT_PARAMS)
def test_pickle(metric):
argdict = METRICS_DEFAULT_PARAMS[metric]
keys = argdict.keys()
for vals in itertools.product(*argdict.values()):
kwargs = dict(zip(keys, vals))
check_pickle(metric, kwargs)
@pytest.mark.parametrize("metric", BOOL_METRICS)
@pytest.mark.parametrize("X1_bool", [X1_bool, X1_bool_mmap])
def test_pickle_bool_metrics(metric, X1_bool):
dm = DistanceMetric.get_metric(metric)
D1 = dm.pairwise(X1_bool)
dm2 = pickle.loads(pickle.dumps(dm))
D2 = dm2.pairwise(X1_bool)
assert_array_almost_equal(D1, D2)
def check_pickle(metric, kwargs):
dm = DistanceMetric.get_metric(metric, **kwargs)
D1 = dm.pairwise(X1)
dm2 = pickle.loads(pickle.dumps(dm))
D2 = dm2.pairwise(X1)
assert_array_almost_equal(D1, D2)
def test_haversine_metric():
def haversine_slow(x1, x2):
return 2 * np.arcsin(
np.sqrt(
np.sin(0.5 * (x1[0] - x2[0])) ** 2
+ np.cos(x1[0]) * np.cos(x2[0]) * np.sin(0.5 * (x1[1] - x2[1])) ** 2
)
)
X = np.random.random((10, 2))
haversine = DistanceMetric.get_metric("haversine")
D1 = haversine.pairwise(X)
D2 = np.zeros_like(D1)
for i, x1 in enumerate(X):
for j, x2 in enumerate(X):
D2[i, j] = haversine_slow(x1, x2)
assert_array_almost_equ
|
al(D1, D2)
assert_array_almost_equal(haversine.dist_to_rdist(D1), np.sin(0.5 * D2) ** 2)
def test_pyfunc_metric():
X = np.random.random((10, 3))
euclidean = DistanceMetric.get_metric("euclidean")
pyfunc = DistanceMetric.get_metric("pyfunc", func=dist_func, p=2)
|
# Check if both callable metric and predefined metric initialized
# DistanceMetric object is picklable
euclidean_pkl = pickle.loads(pickle.dumps(euclidean))
pyfunc_pkl = pickle.loads(pickle.dumps(pyfunc))
D1 = euclidean.pairwise(X)
D2 = pyfunc.pairwise(X)
D1_pkl = euclidean_pkl.pairwise(X)
D2_pkl = pyfunc_pkl.pairwise(X)
assert_array_almost_equal(D1, D2)
assert_array_almost_equal(D1_pkl, D2_pkl)
def test_bad_pyfunc_metric():
def wrong_distance(x, y):
return "1"
X = np.ones((5, 2))
msg = "Custom distance function must accept two vectors"
with pytest.raises(TypeError, match=msg):
BallTree(X, metric=wrong_distance)
def test_input_data_size():
# Regression test for #6288
# Previously, a metric requiring a particular input dimension would fail
def custom_metric(x, y):
assert x.shape[0] == 3
return np.sum((x - y) ** 2)
rng = check_random_state(0)
X = rng.rand(10, 3)
pyfunc = DistanceMetric.get_metric("pyfunc", func=custom_metric)
eucl = DistanceMetric.get_metric("euclidean")
assert_array_almost_equal(pyfunc.pairwise(X), eucl.pairwise(X) ** 2)
|
kjwilcox/digital_heist
|
src/engine.py
|
Python
|
gpl-2.0
| 1,911
| 0.005756
|
#!/usr/bin/python3
import exhibition
import inputdevice
import data
from levels import level1
import os
import pygame
import logging
log = logging.getLogger(__name__)
class Engine:
""" Main class responsible for running the game.
Controls game setup and runs the main loop.
Passes input to game and handles the event queue. """
def __init__(self):
""" Creates the display surface and loads the game assets. """
pygame.init()
log.info("Initializing display surface at {}x{}".format(
data.SCREEN_RESOLUTION[0], data.SCREEN_RESOLUTION[1]))
self.screen = pygame.display.set_mode(data.SCREEN_RESOLUTION)
pygame.display.set_caption("digital heist")
# load image resources
exhibition.images(os.path.join(data.DATA_DIR, "images"))
exhibition.optimize()
self.level = level1.Level1()
self.input = inputdevice.KeyboardInput()
def run(self):
""" Starts the game and runs the main game loop. """
|
self.main_loop()
pygame.quit()
def main_loop(self):
clock = pygame.time.Clock()
|
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
return
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE:
return
# game update
self.input.update()
self.level.process_input(self.input)
complete = self.level.update()
self.screen.fill((0, 0, 0))
self.level.render()
pygame.display.flip()
# noinspection PyUnusedLocal,PyUnusedLocal
ms = clock.tick(60)
if complete:
break
|
google-research/exoplanet-ml
|
exoplanet-ml/astronet/ops/metrics.py
|
Python
|
apache-2.0
| 5,503
| 0.005452
|
# Copyright 2018 The Exoplanet ML Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WAR
|
RANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the
|
License for the specific language governing permissions and
# limitations under the License.
"""Functions for computing evaluation metrics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def _metric_variable(name, shape, dtype):
"""Creates a Variable in LOCAL_VARIABLES and METRIC_VARIABLES collections."""
return tf.Variable(
initial_value=tf.zeros(shape, dtype),
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES, tf.GraphKeys.METRIC_VARIABLES],
name=name)
def _build_metrics(labels, predictions, weights, batch_losses, output_dim=1):
"""Builds TensorFlow operations to compute model evaluation metrics.
Args:
labels: Tensor with shape [batch_size].
predictions: Tensor with shape [batch_size, output_dim].
weights: Tensor with shape [batch_size].
batch_losses: Tensor with shape [batch_size].
output_dim: Dimension of model output
Returns:
A dictionary {metric_name: (metric_value, update_op).
"""
# Compute the predicted labels.
assert len(predictions.shape) == 2
binary_classification = output_dim == 1
if binary_classification:
assert predictions.shape[1] == 1
predictions = tf.squeeze(predictions, axis=[1])
predicted_labels = tf.cast(
tf.greater(predictions, 0.5), tf.int32, name="predicted_labels")
else:
predicted_labels = tf.argmax(
predictions, 1, name="predicted_labels", output_type=tf.int32)
metrics = {}
with tf.name_scope("metrics"):
# Total number of examples.
num_examples = _metric_variable("num_examples", [], tf.float32)
update_num_examples = tf.assign_add(num_examples, tf.reduce_sum(weights))
metrics["num_examples"] = (num_examples.read_value(), update_num_examples)
# Accuracy metrics.
num_correct = _metric_variable("num_correct", [], tf.float32)
is_correct = tf.equal(labels, predicted_labels)
weighted_is_correct = weights * tf.cast(is_correct, tf.float32)
update_num_correct = tf.assign_add(num_correct,
tf.reduce_sum(weighted_is_correct))
metrics["accuracy/num_correct"] = (num_correct.read_value(),
update_num_correct)
accuracy = tf.div(num_correct, num_examples, name="accuracy")
metrics["accuracy/accuracy"] = (accuracy, tf.no_op())
# Weighted cross-entropy loss.
metrics["losses/weighted_cross_entropy"] = tf.metrics.mean(
batch_losses, weights=weights, name="cross_entropy_loss")
def _count_condition(name, labels_value, predicted_value):
"""Creates a counter for given values of predictions and labels."""
count = _metric_variable(name, [], tf.float32)
is_equal = tf.logical_and(
tf.equal(labels, labels_value),
tf.equal(predicted_labels, predicted_value))
weighted_is_equal = weights * tf.cast(is_equal, tf.float32)
update_op = tf.assign_add(count, tf.reduce_sum(weighted_is_equal))
return count.read_value(), update_op
# Confusion matrix metrics.
num_labels = 2 if binary_classification else output_dim
for gold_label in range(num_labels):
for pred_label in range(num_labels):
metric_name = "confusion_matrix/label_{}_pred_{}".format(
gold_label, pred_label)
metrics[metric_name] = _count_condition(
metric_name, labels_value=gold_label, predicted_value=pred_label)
# Possibly create AUC metric for binary classification.
if binary_classification:
labels = tf.cast(labels, dtype=tf.bool)
metrics["auc"] = tf.metrics.auc(
labels, predictions, weights=weights, num_thresholds=1000)
return metrics
def create_metric_fn(model):
"""Creates a tuple (metric_fn, metric_fn_inputs).
This function is primarily used for creating a TPUEstimator.
The result of calling metric_fn(**metric_fn_inputs) is a dictionary
{metric_name: (metric_value, update_op)}.
Args:
model: Instance of AstroModel.
Returns:
A tuple (metric_fn, metric_fn_inputs).
"""
weights = model.weights
if weights is None:
weights = tf.ones_like(model.labels, dtype=tf.float32)
metric_fn_inputs = {
"labels": model.labels,
"predictions": model.predictions,
"weights": weights,
"batch_losses": model.batch_losses,
}
def metric_fn(labels, predictions, weights, batch_losses):
return _build_metrics(
labels,
predictions,
weights,
batch_losses,
output_dim=model.hparams.output_dim)
return metric_fn, metric_fn_inputs
def create_metrics(model):
"""Creates a dictionary {metric_name: (metric_value, update_op)}.
This function is primarily used for creating an Estimator.
Args:
model: Instance of AstroModel.
Returns:
A dictionary {metric_name: (metric_value, update_op).
"""
metric_fn, metric_fn_inputs = create_metric_fn(model)
return metric_fn(**metric_fn_inputs)
|
pedrotari7/advent_of_code
|
py/2017/22B.py
|
Python
|
mit
| 600
| 0.013333
|
weakened, flagged = set(), set()
infected = {(i,j) for i,a in enumerate(open('22.in')) for j,b in enumerate(a.strip('\n')) if b=='#'}
s = len(open('22.in').readlines())/2
p = (s,s)
total = 0
d = (-1,0)
for _ in xrange(10**7):
if p in infected:
d = d[1], -1*d[0]
infected.remove(p)
flagged.add(p)
elif p in weakened:
total +=
|
1
weakened.remove(p)
infected.
|
add(p)
elif p in flagged:
d = -1*d[0], -1*d[1]
flagged.remove(p)
else:
d = -1*d[1], d[0]
weakened.add(p)
p = p[0]+d[0], p[1]+d[1]
print total
|
thelabnyc/django-oscar-wfrs
|
sandbox/basket/utils.py
|
Python
|
isc
| 128
| 0.007813
|
from osc
|
arbluelight.basket_uti
|
ls import BluelightLineOfferConsumer as LineOfferConsumer
__all__ = [
"LineOfferConsumer",
]
|
bashu/django-feedback-form
|
feedback_form/south_migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 4,849
| 0.008043
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Feedback'
db.create_table('feedback_form_feedback', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
('body', self.gf('django.db.models.fields.TextField')()),
('sent_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('feedback_form', ['Feedback'])
def backwards(self, orm):
# Deleting model 'Feedback'
db.delete_table('feedback_form_feedback')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'grou
|
ps': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.Boolea
|
nField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'feedback_form.feedback': {
'Meta': {'object_name': 'Feedback'},
'body': ('django.db.models.fields.TextField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sent_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
}
}
complete_apps = ['feedback_form']
|
macbre/mobify
|
mobify/sources/oreilly.py
|
Python
|
mit
| 1,627
| 0.003697
|
# -*- coding: utf-8 -*-
import re
from mobify.source import MobifySource
class OReillySource(MobifySource):
HEADER = u"""
<h1>{title}</h1>
<p><strong>{lead}</strong></p>
<p><small>{author} @ oreilly.com</small><br></p>
"""
@staticmethod
def is_my_url(url):
# https://www.oreilly.com/ideas/the-evolution-of-devops
return 'oreilly.com/ideas/' in url
def get_inner_html(self):
article = self.xpath('//*[@itemprop="articleBody"]')
xpaths = [
'aside',
'div',
'figure[@class]',
]
# clean up the HTML
article = self.remove_node
|
s(article, xpaths)
html = self.get_node_html(article)
return html
d
|
ef get_html(self):
# add a title and a footer
return '\n'.join([
self.HEADER.format(title=self.get_title(), author=self.get_author(), lead=self.get_lead()).strip(),
self.get_inner_html()
]).strip()
def get_title(self):
# <meta property="og:title" content="Radio w Poznaniu rozpoczęło nadawanie 90 lat temu" />
return self.get_node('//meta[@property="og:title"]', attr='content').strip()
def get_lead(self):
# <meta property="og:description" content="90 lat temu, 24 kwietnia 1927 roku nadawanie rozpoczęła..." />
lead = self.get_node('//meta[@property="og:description"]', attr='content').strip()
return lead.strip() if lead else ''
def get_author(self):
return self.get_node('//meta[@property="article:author"]', attr='content').strip()
def get_language(self):
return 'en'
|
easyw/kicad-3d-models-in-freecad
|
cadquery/FCAD_script_generator/Fuse/main_generator.py
|
Python
|
gpl-2.0
| 8,654
| 0.012364
|
# -*- coding: utf-8 -*-
#!/usr/bin/python
#
# This is derived from a cadquery script for generating PDIP models in X3D format
#
# from https://bitbucket.org/hyOzd/freecad-macros
# author hyOzd
# This is a
# Dimensions are from Microchips Packaging Specification document:
# DS00000049BY. Body drawing is the same as QFP generator#
## requirements
## cadquery FreeCAD plugin
## https://github.com/jmwright/cadquery-freecad-module
## to run the script just do: freecad main_generator.py modelName
## e.g. c:\freecad\bin\freecad main_generator.py DIP8
## the script will generate STEP and VRML parametric models
## to be used with kicad StepUp script
#* These are a FreeCAD & cadquery tools *
#* to export generated models in STEP & VRML format. *
#*
|
*
#* cadquery script for generating QFP/SOIC/SSOP/TSSOP models in STEP AP214 *
#* Copyright (c) 2015 *
#* Maurice https://launchpad.net/~easyw *
#* All trademarks within this guide belong to their legitimate owners. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., *
#* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA *
#* *
#****************************************************************************
__title__ = "make Valve 3D models"
__author__ = "Stefan, based on Valve script"
__Comment__ = 'make varistor 3D models exported to STEP and VRML for Kicad StepUP script'
___ver___ = "1.3.3 14/08/2015"
# maui import cadquery as cq
# maui from Helpers import show
from collections import namedtuple
import math
import sys, os
import datetime
from datetime import datetime
sys.path.append("../_tools")
import exportPartToVRML as expVRML
import shaderColors
# maui start
import FreeCAD, Draft, FreeCADGui
import ImportGui
import FreeCADGui as Gui
#from Gui.Command import *
outdir=os.path.dirname(os.path.realpath(__file__) + os.sep + '..' + os.sep + '_3Dmodels')
scriptdir=os.path.dirname(os.path.realpath(__file__))
sys.path.append(outdir)
sys.path.append(scriptdir)
if FreeCAD.GuiUp:
from PySide import QtCore, QtGui
# Licence information of the generated models.
#################################################################################################
STR_licAuthor = "kicad StepUp"
STR_licEmail = "ksu"
STR_licOrgSys = "kicad StepUp"
STR_licPreProc = "OCC"
STR_licOrg = "FreeCAD"
#################################################################################################
import cq_belfuse # modules parameters
from cq_belfuse import *
import cq_keystone # modules parameters
from cq_keystone import *
import cq_bulgin # modules parameters
from cq_bulgin import *
import cq_schurter # modules parameters
from cq_schurter import *
import cq_tme # modules parameters
from cq_tme import *
import cq_littlefuse # modules parameters
from cq_littlefuse import *
different_models = [
cq_belfuse(),
cq_keystone(),
cq_bulgin(),
cq_schurter(),
cq_tme(),
cq_littlefuse(),
]
def make_3D_model(models_dir, model_class, modelID):
LIST_license = ["",]
CheckedmodelName = 'A_' + modelID.replace('.', '').replace('-', '_').replace('(', '').replace(')', '')
CheckedmodelName = CheckedmodelName
Newdoc = App.newDocument(CheckedmodelName)
App.setActiveDocument(CheckedmodelName)
Gui.ActiveDocument=Gui.getDocument(CheckedmodelName)
destination_dir = model_class.get_dest_3D_dir(modelID)
material_substitutions = model_class.make_3D_model(modelID)
modelName = model_class.get_model_name(modelID)
doc = FreeCAD.ActiveDocument
doc.Label = CheckedmodelName
objs=GetListOfObjects(FreeCAD, doc)
objs[0].Label = CheckedmodelName
restore_Main_Tools()
script_dir=os.path.dirname(os.path.realpath(__file__))
expVRML.say(models_dir)
out_dir=models_dir+os.sep+destination_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
exportSTEP(doc, modelName, out_dir)
if LIST_license[0]=="":
LIST_license=Lic.LIST_int_license
LIST_license.append("")
Lic.addLicenseToStep(out_dir + os.sep, modelName+".step", LIST_license,\
STR_licAuthor, STR_licEmail, STR_licOrgSys, STR_licOrg, STR_licPreProc)
# scale and export Vrml model
scale=1/2.54
#exportVRML(doc,modelName,scale,out_dir)
del objs
objs=GetListOfObjects(FreeCAD, doc)
expVRML.say("######################################################################")
expVRML.say(objs)
expVRML.say("######################################################################")
export_objects, used_color_keys = expVRML.determineColors(Gui, objs, material_substitutions)
export_file_name=out_dir+os.sep+modelName+'.wrl'
colored_meshes = expVRML.getColoredMesh(Gui, export_objects , scale)
#expVRML.writeVRMLFile(colored_meshes, export_file_name, used_color_keys)# , LIST_license
expVRML.writeVRMLFile(colored_meshes, export_file_name, used_color_keys, LIST_license)
#scale=0.3937001
#exportVRML(doc,modelName,scale,out_dir)
# Save the doc in Native FC format
saveFCdoc(App, Gui, doc, modelName,out_dir)
#display BBox
Gui.activateWorkbench("PartWorkbench")
Gui.SendMsgToActiveView("ViewFit")
Gui.activeDocument().activeView().viewAxometric()
#FreeCADGui.ActiveDocument.activeObject.BoundingBox = True
def run():
## # get variant names from command line
return
#import step_license as L
import add_license as Lic
# when run from command line
if __name__ == "__main__" or __name__ == "main_generator":
FreeCAD.Console.PrintMessage('\r\nRunning...\r\n')
full_path=os.path.realpath(__file__)
expVRML.say(full_path)
scriptdir=os.path.dirname(os.path.realpath(__file__))
expVRML.say(scriptdir)
sub_path = full_path.split(scriptdir)
expVRML.say(sub_path)
sub_dir_name =full_path.split(os.sep)[-2]
expVRML.say(sub_dir_name)
sub_path = full_path.split(sub_dir_name)[0]
expVRML.say(sub_path)
models_dir=sub_path+"_3Dmodels"
model_to_build = ''
if len(sys.argv) < 3:
FreeCAD.Console.PrintMessage('No variant name is given, add a valid model name as an argument or the argument "all"\r\n')
sys.exit()
else:
model_to_build=sys.argv[2]
found_one = False
if len(model_to_build) > 0:
if model_to_build == 'all' or model_to_build == 'All' or model_to_build == 'ALL':
found_one = True
for n in different_models:
listall = n.get_list_all()
for i in listall:
make_3D_model(models_dir, n, i)
elif model_to_build == 'list':
found_one = True
FreeCAD.Console.PrintMessage('\r\n')
for n in different_models:
|
|
GISPPU/GrenadaLandInformation
|
geonode/context_processors.py
|
Python
|
gpl-3.0
| 2,437
| 0.010669
|
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf import settings
from geonode import get_version
from geonode.catalogue import default_catalogue_backend
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from geonode.utils import ogc_server_settings
def resource_urls(request):
"""Global values to pass to templates"""
site = Site.objects.get_current()
return dict(
STATIC_URL=settings.STATIC_URL,
GEOSERVER_BASE_URL=ogc_server_settings.public_url,
CATALOGUE_BASE_URL=default_catalogue_backend()['URL'],
REGISTRATION_OPEN=settings.REGISTRATION_OPEN,
VERSION=get_version(),
SITE_NAME=site.name,
SITE_DOMAIN=site.domain,
GROUPS_APP = True if "geonode.contrib.groups" in settings.INSTALLED_APPS else False,
UPLOADER_URL = reverse('data_upload') if getattr(settings, 'UPLOADER', dict()).get('BACKEND', 'geonode.rest') == 'geonode.importer' else reverse('layer_upload'),
GEOGIT_ENABLED = ogc_server_settings.GEOGIT_ENABLED,
TIME_ENABLED = getattr(settings, 'UPLOADER', dict()).get('OPTIONS', dict()).get('TIME_ENABLED', False),
DEBUG_STATIC = getattr(settings, "DEBUG_STATIC", False),
MF_PRINT_ENABLED = ogc_ser
|
ver_settings.MAPFISH_PRINT_ENABLED,
PRINTNG_ENABLED = ogc_
|
server_settings.PRINTNG_ENABLED,
GS_SECURITY_ENABLED = ogc_server_settings.GEONODE_SECURITY_ENABLED,
PROXY_URL = getattr(settings, 'PROXY_URL', '/proxy/?url='),
SOCIAL_BUTTONS = getattr(settings, 'SOCIAL_BUTTONS', True),
USE_DOCUMENTS = 'geonode.documents' in settings.INSTALLED_APPS
)
|
abstrakraft/repo
|
subcmds/forall.py
|
Python
|
apache-2.0
| 7,450
| 0.010604
|
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fcntl
import re
import os
import select
import sys
import subprocess
from color import Coloring
from command import Command, MirrorSafeCommand
_CAN_COLOR = [
'branch',
'diff',
'grep',
'log',
]
class ForallColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'forall')
self.project = self.printer('project', attr='bold')
class Forall(Command, MirrorSafeCommand):
common = False
helpSummary = "Run a shell command in each project"
helpUsage = """
%prog [<project>...] -c <command> [<arg>...]
"""
helpDescription = """
Executes the same shell command in each project.
Output Formatting
-----------------
The -p option causes '%prog' to bind pipes to the command's stdin,
stdout and stderr streams, and pipe all output into a continuous
stream that is displayed in a single pager session. Project headings
are inserted before the output of each command is displayed. If the
command produces no output in a project, no heading is displayed.
The formatting convention used by -p is very suitable for some
types of searching, e.g. `repo forall -p -c git log -SFoo` will
print all commits that add or remove references to Foo.
The -v option causes '%prog' to display stderr messages if a
command produces output only on stderr. Normally the -p option
causes command output to be suppressed until the command produces
at least one byte of output on stdout.
Environment
-----------
pwd is the project's working directory. If the current client is
a mirror client, then pwd is the Git repository.
REPO_PROJECT is set to the unique name of the project.
REPO_PATH is the path relative the the root of the client.
REPO_REMOTE is the name of the remote system from the manifest.
REPO_LREV is the name of the revision from the manifest, translated
to a local tracking branch. If you need to pass the manifest
revision to a locally executed git command, use REPO_LREV.
REPO_RREV is the name of the revision from the manifest, exactly
as written in the manifest.
shell positional arguments ($1, $2, .., $#) are set to any arguments
following <command>.
Unless -p is used, stdin, stdout, stderr are inherited from the
terminal and are not redirected.
"""
def _Options(self, p):
def cmd(option, opt_str, value, parser):
setattr(parser.values, option.dest, list(parser.rargs))
while parser.rargs:
del parser.rargs[0]
p.add_option('-c', '--command',
help='Command (and arguments) to execute',
dest='command',
action='callback',
callback=cmd)
g = p.add_option_group('Output')
g.add_option('-p',
dest='project_header', action='store_true',
help='Show project headers before output')
g.add_option('-v', '--verbose',
dest='verbose', action='store_true',
help='Show command error messages')
def WantPager(self, opt):
return opt.project_header
def Execute(self, opt, args):
if not opt.command:
self.Usage()
cmd = [opt.command[0]]
shell = True
if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]):
shell = False
if shell:
cmd.append(cmd[0])
cmd.extend(opt.command[1:])
if opt.project_header \
and not shell \
and cmd[0] == 'git':
# If this is a direct git command that can enable colorized
# output and the user prefers coloring, add --color into the
# command line because we are going to wrap the command into
# a pipe and git won't know coloring should activate.
#
for cn in cmd[1:]:
if not cn.startswith('-'):
break
if cn in _CAN_COLOR:
class ColorCmd(Coloring):
def __init__(self, config, cmd):
Coloring.__init__(self, config, cmd)
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
cmd.insert(cmd.index(cn) + 1, '--color')
mirror = self.manifest.IsMirror
out = ForallColoring(self.manifest.manifestProject.config)
out.redirect(sys.stdout)
rc = 0
first = True
for project in self.GetProjects(args):
env = dict(os.environ.iteritems())
def setenv(name, val):
if val is None:
val = ''
env[name] = val
setenv('REPO_PROJECT', project.name)
setenv('REPO_PATH', project.relpath)
setenv('REPO_REMOTE', project.remote.name)
setenv('REPO_LREV', project.GetRevisionId())
setenv('REPO_RREV', project.revisionExpr)
if mirror:
setenv('GIT_DIR', project.gitdir)
cwd = project.gitdir
else:
cwd = project.worktree
if not os.path.exists(cwd):
if (opt.project_header and opt.verbose) \
or not opt.project_header:
print >>sys.stderr, 'skipping %s/' % project.relpath
continue
if opt.project_header:
stdin = subprocess.PIPE
stdout = subprocess.PIPE
stderr = subprocess.PIPE
else:
stdin = None
stdout = None
stderr = None
p = subprocess.Popen(cmd,
cwd = cwd,
shell = shell,
env = env,
stdin = stdin,
stdout = stdout,
stderr = stderr)
if opt.project_header:
class sfd(object):
def __init__(self, fd, dest):
self.fd = fd
self.dest = dest
def fileno(self):
return self.fd.fileno()
empty = True
didout = False
errbuf = ''
p.stdin.close()
s_in = [sfd(p.stdout, sys.stdout),
|
sfd(p.stderr, sys.stderr
|
)]
for s in s_in:
flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
while s_in:
in_ready, out_ready, err_ready = select.select(s_in, [], [])
for s in in_ready:
buf = s.fd.read(4096)
if not buf:
s.fd.close()
s_in.remove(s)
continue
if not opt.verbose:
if s.fd == p.stdout:
didout = True
else:
errbuf += buf
continue
if empty:
if first:
first = False
else:
out.nl()
out.project('project %s/', project.relpath)
out.nl()
out.flush()
if errbuf:
sys.stderr.write(errbuf)
sys.stderr.flush()
errbuf = ''
empty = False
s.dest.write(buf)
s.dest.flush()
r = p.wait()
if r != 0 and r != rc:
rc = r
if rc != 0:
sys.exit(rc)
|
MusculoskeletalAtlasProject/mapclient-tests
|
test_resources/updater_test/mayaviviewerstep-master/mapclientplugins/mayaviviewerstep/widgets/ui_mayaviviewerwidget.py
|
Python
|
apache-2.0
| 12,774
| 0.004619
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mayaviviewerwidget.ui'
#
# Created: Mon Nov 11 18:02:00 2013
# by: pyside-uic 0.2.13 running on PySide 1.1.0
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(914, 548)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
self.horizontalLayout_2 = QtGui.QHBoxLayout(Dialog)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.widget = QtGui.QWidget(Dialog)
self.widget.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget.sizePolicy().hasHeightForWidth())
self.widget.setSizePolicy(sizePolicy)
self.widget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.widget.setObjectName("widget")
self.gridLayout = QtGui.QGridLayout(self.widget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.widget1 = QtGui.QWidget(self.widget)
self.widget1.setMaximumSize(QtCore.QSize(500, 16777215))
self.widget1.setObjectName("widget1")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.widget1)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.verticalLayout.setObjectName("verticalLayout")
self.tableWidget = QtGui.QTableWidget(self.widget1)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget.sizePolicy().hasHeightForWidth())
self.tableWidget.setSizePolicy(sizePolicy)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(2)
self.tableWidget.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
self.tableWidget.horizontalHeader().setVisible(True)
self.tableWidget.horizontalHeader().setCascadingSectionResizes(False)
self.tableWidget.horizontalHeader().setDefaultSectionSize(100)
self.verticalLayout.addWidget(self.tableWidget)
self.sliceplanegroup = QtGui.QGroupBox(self.widget1)
self.sliceplanegroup.setEnabled(False)
self.sliceplanegroup.setObjectName("sliceplanegroup")
self.horizontalLayout = QtGui.QHBoxLayout(self.sliceplanegroup)
self.horizontalLayout.setObjectName("horizontalLayout")
self.slicePlaneRadioX = QtGui.QRadioButton(self.sliceplanegroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.slicePlaneRadioX.sizePolicy().hasHeightForWidth())
self.slicePlaneRadioX.setSizePolicy(sizePolicy)
self.slicePlaneRadioX.setChecked(False)
self.slicePlaneRadioX.setObjectName("slicePlaneRadioX")
self.horizontalLayout.addWidget(self.slicePlaneRadioX)
self.slicePlaneRadioY = QtGui.QRadioButton(self.sliceplanegroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.slicePlaneRadioY.sizePolicy().hasHeightForWidth())
self.slicePlaneRadioY.setSizePolicy(sizePolicy)
self.slicePlaneRadioY.setChecked(True)
self.slicePlaneRadioY.setObjectName("slicePlaneRadioY")
self.horizontalLayout.addWidget(self.slicePlaneRadioY)
self.slicePlaneRadioZ = QtGui.QRadioButton(self.sliceplanegroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.slicePlaneRadioZ.sizePolicy().hasHeightForWidth(
|
))
self.slicePlaneRadioZ.setSizePolicy(sizePolicy)
self.slicePlaneRadioZ.setObjectName("slicePlaneRadioZ")
self.horizon
|
talLayout.addWidget(self.slicePlaneRadioZ)
self.verticalLayout.addWidget(self.sliceplanegroup)
self.screenshotgroup = QtGui.QGroupBox(self.widget1)
self.screenshotgroup.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.screenshotgroup.setObjectName("screenshotgroup")
self.formLayout = QtGui.QFormLayout(self.screenshotgroup)
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setObjectName("formLayout")
self.pixelsXLabel = QtGui.QLabel(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pixelsXLabel.sizePolicy().hasHeightForWidth())
self.pixelsXLabel.setSizePolicy(sizePolicy)
self.pixelsXLabel.setObjectName("pixelsXLabel")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.pixelsXLabel)
self.screenshotPixelXLineEdit = QtGui.QLineEdit(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.screenshotPixelXLineEdit.sizePolicy().hasHeightForWidth())
self.screenshotPixelXLineEdit.setSizePolicy(sizePolicy)
self.screenshotPixelXLineEdit.setObjectName("screenshotPixelXLineEdit")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.screenshotPixelXLineEdit)
self.pixelsYLabel = QtGui.QLabel(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pixelsYLabel.sizePolicy().hasHeightForWidth())
self.pixelsYLabel.setSizePolicy(sizePolicy)
self.pixelsYLabel.setObjectName("pixelsYLabel")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.pixelsYLabel)
self.screenshotPixelYLineEdit = QtGui.QLineEdit(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.screenshotPixelYLineEdit.sizePolicy().hasHeightForWidth())
self.screenshotPixelYLineEdit.setSizePolicy(sizePolicy)
self.screenshotPixelYLineEdit.setObjectName("screenshotPixelYLineEdit")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.screenshotPixelYLineEdit)
self.screenshotFilenameLabel = QtGui.QLabel(self.screenshotgroup)
self.screenshotFilenameLabel.setObjectName("screenshotFilenameLabel")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.screenshotFilenameLabel)
self.screenshotFilenameLineEdit = QtGui.QLineEdit(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizon
|
akvo/akvo-rsr
|
akvo/iati/exports/elements/participating_org.py
|
Python
|
agpl-3.0
| 1,717
| 0.00233
|
# -*- coding: ut
|
f-8 -*-
# Akvo RSR is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root fo
|
lder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
from lxml import etree
def participating_org(project):
"""
Generate the participating-org element.
:param project: Project object
:return: A list of Etree elements
"""
partnership_elements = []
from akvo.rsr.models import Partnership
for partnership in project.partnerships.all():
# Don't include reporting orgs or sponsor partners
if partnership.iati_organisation_role in Partnership.IATI_ROLE_LIST[:4] and \
partnership.organisation:
org = partnership.organisation.get_original()
element = etree.Element("participating-org")
if org.iati_org_id:
element.attrib['ref'] = org.iati_org_id
if org.new_organisation_type:
element.attrib['type'] = str(org.new_organisation_type)
if partnership.iati_organisation_role:
element.attrib['role'] = str(partnership.iati_organisation_role)
if partnership.iati_activity_id:
element.attrib['activity-id'] = partnership.iati_activity_id
# TODO: Funding amount
narrative_element = etree.SubElement(element, "narrative")
if org.long_name:
narrative_element.text = org.long_name
elif org.name:
narrative_element.text = org.name
partnership_elements.append(element)
return partnership_elements
|
treasure-data/td-client-python
|
tdclient/client.py
|
Python
|
apache-2.0
| 32,550
| 0.001505
|
#!/usr/bin/env python
import json
from tdclient import api, models
class Client:
"""API Client for Treasure Data Service
"""
def __init__(self, *args, **kwargs):
self._api = api.API(*args, **kwargs)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
@property
def api(self):
"""
an instance of :class:`tdclient.api.API`
"""
return self._api
@property
def apikey(self):
"""
API key string.
"""
return self._api.apikey
def server_status(self):
"""
Returns:
a string represents current server status.
"""
return self.api.server_status()
def create_database(self, db_name, **kwargs):
"""
Args:
db_name (str): name of a database to create
Returns:
`True` if success
"""
return self.api.create_database(db_name, **kwargs)
def delete_database(self, db_name):
"""
Args:
db_name (str): name of database to delete
Returns:
`True` if success
"""
return self.api.delete_database(db_name)
def databases(self):
"""
Returns:
a list of :class:`tdclient.models.Database`
"""
databases = self.api.list_databases()
return [
models.Database(self, db_name, **kwargs)
for (db_name, kwargs) in databases.items()
]
def database(self, db_name):
"""
Args:
db_name (str): name of a database
Returns:
:class:`tdclient.models.Database`
"""
databases = self.api.list_databases()
for (name, kwargs) in databases.items():
if name == db_name:
return models.Database(self, name, **kwargs)
raise api.NotFoundError("Database '%s' does not exist" % (db_name))
def create_log_table(self, db_name, table_name):
"""
Args:
db_name (str): name of a database
table_name (str): name of a table to create
Returns:
`True` if success
"""
return self.api.create_log_table(db_name, table_name)
def swap_table(self, db_name, table_name1, table_name2):
"""
Args:
db_name (str): name of a database
table_name1 (str): original table name
table_name2 (str): table name you want to rename to
Returns:
`True` if success
"""
return self.api.swap_table(db_name, table_name1, table_name2)
def update_schema(self, db_name, table_name, schema):
"""Updates the schema of a table
Args:
db_name (str): name of a database
table_name (str): name of a table
|
schema (list): a dictionary object represents the schema definition (will
be converted to JSON)
e.g.
.. code-blo
|
ck:: python
[
["member_id", # column name
"string", # data type
"mem_id", # alias of the column name
],
["row_index", "long", "row_ind"],
...
]
Returns:
`True` if success
"""
return self.api.update_schema(db_name, table_name, json.dumps(schema))
def update_expire(self, db_name, table_name, expire_days):
"""Set expiration date to a table
Args:
db_name (str): name of a database
table_name (str): name of a table
epire_days (int): expiration date in days from today
Returns:
`True` if success
"""
return self.api.update_expire(db_name, table_name, expire_days)
def delete_table(self, db_name, table_name):
"""Delete a table
Args:
db_name (str): name of a database
table_name (str): name of a table
Returns:
a string represents the type of deleted table
"""
return self.api.delete_table(db_name, table_name)
def tables(self, db_name):
"""List existing tables
Args:
db_name (str): name of a database
Returns:
a list of :class:`tdclient.models.Table`
"""
m = self.api.list_tables(db_name)
return [
models.Table(self, db_name, table_name, **kwargs)
for (table_name, kwargs) in m.items()
]
def table(self, db_name, table_name):
"""
Args:
db_name (str): name of a database
table_name (str): name of a table
Returns:
:class:`tdclient.models.Table`
Raises:
tdclient.api.NotFoundError: if the table doesn't exist
"""
tables = self.tables(db_name)
for table in tables:
if table.table_name == table_name:
return table
raise api.NotFoundError("Table '%s.%s' does not exist" % (db_name, table_name))
def tail(self, db_name, table_name, count, to=None, _from=None, block=None):
"""Get the contents of the table in reverse order based on the registered time
(last data first).
Args:
db_name (str): Target database name.
table_name (str): Target table name.
count (int): Number for record to show up from the end.
to: Deprecated parameter.
_from: Deprecated parameter.
block: Deprecated parameter.
Returns:
[dict]: Contents of the table.
"""
return self.api.tail(db_name, table_name, count, to, _from, block)
def change_database(self, db_name, table_name, new_db_name):
"""Move a target table from it's original database to new destination database.
Args:
db_name (str): Target database name.
table_name (str): Target table name.
new_db_name (str): Destination database name to be moved.
Returns:
bool: `True` if succeeded.
"""
return self.api.change_database(db_name, table_name, new_db_name)
def query(
self,
db_name,
q,
result_url=None,
priority=None,
retry_limit=None,
type="hive",
**kwargs
):
"""Run a query on specified database table.
Args:
db_name (str): name of a database
q (str): a query string
result_url (str): result output URL. e.g.,
``postgresql://<username>:<password>@<hostname>:<port>/<database>/<table>``
priority (int or str): priority (e.g. "NORMAL", "HIGH", etc.)
retry_limit (int): retry limit
type (str): name of a query engine
Returns:
:class:`tdclient.models.Job`
Raises:
ValueError: if unknown query type has been specified
"""
# for compatibility, assume type is hive unless specifically specified
if type not in ["hive", "pig", "impala", "presto"]:
raise ValueError("The specified query type is not supported: %s" % (type))
job_id = self.api.query(
q,
type=type,
db=db_name,
result_url=result_url,
priority=priority,
retry_limit=retry_limit,
**kwargs
)
return models.Job(self, job_id, type, q)
def jobs(self, _from=None, to=None, status=None, conditions=None):
"""List jobs
Args:
_from (int, optional): Gets the Job from the nth index in the list. Default: 0.
to (int, optional): Gets the Job up to the nth index in the list.
By default, the first 20 jobs in the list are displayed
status (str, optional): Filter by given status. {"queued", "running", "success", "error"}
conditions (str, optional): Condition for ``TIMESTAMPDIFF()`` to search for slow queries.
|
teltek/edx-platform
|
lms/djangoapps/badges/api/views.py
|
Python
|
agpl-3.0
| 6,051
| 0.003966
|
"""
API views for badges
"""
from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser
from opaque_keys import InvalidKeyError
from opaque_keys.edx.django.models import CourseKeyField
from opaque_keys.edx.keys import CourseKey
from rest_framework import generics
from rest_framework.exceptions import APIException
from badges.models import BadgeAssertion
from openedx.core.djangoapps.user_api.permissions import is_field_shared_factory
from openedx.core.lib.api.authentication import OAuth2AuthenticationAllowInactiveUser
from .serializers import BadgeAssertionSerializer
class InvalidCourseKeyError(APIException):
"""
Raised the course key given isn't valid.
"""
status_code = 400
default_detail = "The course key provided was invalid."
class UserBadgeAssertions(generics.ListAPIView):
"""
** Use cases **
Request a list of assertions for a user, optionally constrained to a course.
** Example Requests **
GET /api/badges/v1/assertions/user/{username}/
** Response Values **
Body comprised of a list of objects with the following fields:
* badge_class: The badge class the assertion was awarded for. Represented as an object
with the following fields:
* slug: The identifier for the badge class
* issuing_component: The software component responsible for issuing this badge.
* display_name: The display name of the badge.
* course_id: The course key of the course this badge is scoped to, or null if it isn't scoped to a course.
* description: A description of the award and its significance.
* criteria: A description of what is needed to obtain this award.
* image_url: A URL to the icon image used to represent this award.
* image_url: The baked assertion image derived from the badge_class icon-- contains metadata about the award
in its headers.
* assertion_url: The URL to the OpenBadges BadgeAssertion object, for verification by compatible tools
and software.
** Params **
* slug (optional): The identifier for a particular badge class to filter by.
* issuing_component (optional): The issuing component for a particular badge class to filter by
(requires slug to have been specified, or this will be ignored.) If slug is provided and this is not,
assumes the issuing_component should be empty.
* course_id (optional): Returns assertions that were awarded as part of a particular course. If slug is
provided, and this field is not specified, assumes that the target badge has an empty course_id field.
'*' may be used to get all badges with the specified slug, issuing_component combination across all courses.
** Returns **
* 200 on success, with a list of Badge Assertion objects.
* 403 if a user who does not have permission to masquerade as
another user specifies a username other than their own.
* 404 if the specified user does not exist
{
"count": 7,
"previous": null,
"num_pages": 1,
"results": [
{
"badge_class": {
"slug": "special_award",
"issuing_component": "openedx__course",
"display_name": "Very Special Award",
"course_id": "course-v1:edX+DemoX+Demo_Course",
"description": "Awarded for people who did something incredibly special",
"criteria": "Do something incredibly special.",
"image": "http://example.com/media/badge_classes/badges/special_xdpqpBv_9FYOZwN.png"
},
"image_url": "http://badges.example.com/media/issued/cd75b69fc1c979fcc1697c8403da2bdf.png",
"assertion_url": "http://badges.example.com/public/assertions/07020647-e772-44dd-98b7-d13d34335ca6"
},
...
]
}
"""
serializer_class = BadgeAssertionSerializer
authentication_classes = (
OAuth2AuthenticationAllowInactiveUser,
SessionAuthenticationAllowInactiveUser
)
permission_classes = (is_field_shared_factory("accomplishments_shared"),)
def filter_queryset(self, queryset):
"""
Return most recent to least recent badge.
"""
return queryset.order_by('-created')
def get_queryset(self):
"""
Get all badges for the username specified.
"""
queryset = BadgeAssertion.objects.filter(user__username=self.kwargs['username'])
provided_course_id = self.request.query_params.get('course_id')
if provided_course_id == '*':
# We might want to get all the matching course scoped badges to see how many courses
|
# a user managed to get a specific award on.
course_id = None
elif provided_course_id:
try:
course_id = CourseKey.from_string(provided_course_id)
except InvalidKeyError:
raise InvalidCourseKeyError
elif 'slug' not in self.request.query_params:
# Need to get all badges for the user.
cours
|
e_id = None
else:
# Django won't let us use 'None' for querying a ForeignKey field. We have to use this special
# 'Empty' value to indicate we're looking only for badges without a course key set.
course_id = CourseKeyField.Empty
if course_id is not None:
queryset = queryset.filter(badge_class__course_id=course_id)
if self.request.query_params.get('slug'):
queryset = queryset.filter(
badge_class__slug=self.request.query_params['slug'],
badge_class__issuing_component=self.request.query_params.get('issuing_component', '')
)
return queryset
|
ryfeus/lambda-packs
|
Keras_tensorflow_nightly/source2.7/tensorflow/tools/api/generator/api/resource_loader/__init__.py
|
Python
|
mit
| 532
| 0.003759
|
"""Imports for Python API.
This file is MACHINE GENE
|
RATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python.platform.resource_loader import get_data_files_path
from tensorflow.python.platform.resource_loader import get_path_to_datafile
from tensorflow.python.platform.resource_loader import get_root_dir_with_all_resources
from tensorflow.python.platform.resource_loader import load_resource
from tensorflow.python.platform.resource_
|
loader import readahead_file_path
|
DramaFever/sst
|
src/sst/selftests/keys.py
|
Python
|
apache-2.0
| 1,307
| 0
|
import sst
import sst.actions
# tests for simulate_keys
sst.actions.set_base_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.go_to('/')
sst.actions.assert_title('The Page Title')
sst.actions.write_textfield('text_1', 'Foobar..')
sst.actions.simulate_keys('text_1', 'BACK_SPACE')
sst.actions.simulate_keys('text_1', 'back_space') # not case sensitive
sst.actions.simulate_keys('text_1',
|
'SPACE')
sst.actions.simulate_keys('text_1', 'Space')
sst.actions.assert_
|
text('text_1', 'Foobar ')
# available keys (from selenium/webdriver/common/keys.py):
#
# 'NULL'
# 'CANCEL'
# 'HELP'
# 'BACK_SPACE'
# 'TAB'
# 'CLEAR'
# 'RETURN'
# 'ENTER'
# 'SHIFT'
# 'LEFT_SHIFT'
# 'CONTROL'
# 'LEFT_CONTROL'
# 'ALT'
# 'LEFT_ALT'
# 'PAUSE'
# 'ESCAPE'
# 'SPACE'
# 'PAGE_UP'
# 'PAGE_DOWN'
# 'END'
# 'HOME'
# 'LEFT'
# 'ARROW_LEFT'
# 'UP'
# 'ARROW_UP'
# 'RIGHT'
# 'ARROW_RIGHT'
# 'DOWN'
# 'ARROW_DOWN'
# 'INSERT'
# 'DELETE'
# 'SEMICOLON'
# 'EQUALS'
# 'NUMPAD0'
# 'NUMPAD1'
# 'NUMPAD2'
# 'NUMPAD3'
# 'NUMPAD4'
# 'NUMPAD5'
# 'NUMPAD6'
# 'NUMPAD7'
# 'NUMPAD8'
# 'NUMPAD9'
# 'MULTIPLY'
# 'ADD'
# 'SEPARATOR'
# 'SUBTRACT'
# 'DECIMAL'
# 'DIVIDE'
# 'F1'
# 'F2'
# 'F3'
# 'F4'
# 'F5'
# 'F6'
# 'F7'
# 'F8'
# 'F9'
# 'F10'
# 'F11'
# 'F12'
# 'META'
# 'COMMAND'
|
amonmoce/corba_examples
|
omniORBpy-4.2.1/build/python/COS/CosTradingDynamic/__init__.py
|
Python
|
mit
| 251
| 0.003984
|
# DO NO
|
T EDIT THIS FILE!
#
# Python module CosTradingDy
|
namic generated by omniidl
import omniORB
omniORB.updateModule("CosTradingDynamic")
# ** 1. Stub files contributing to this module
import CosTradingDynamic_idl
# ** 2. Sub-modules
# ** 3. End
|
yangsibai/SiteMiner
|
test/util_test.py
|
Python
|
mit
| 229
| 0.004367
|
# -*- coding: utf-8 -*-
|
__author__ = 'massimo'
import unittest
class UtilTestFunctions(unittest.TestCase):
def test_verify_url(self):
self.assertTrue(True)
if __name__ == "__main__":
UtilTestFunctions.mai
|
n()
|
Frojd/wagtail-systemtext
|
wagtailsystemtext/models.py
|
Python
|
mit
| 791
| 0
|
from __future__ import unicode_literals
from django.db import models
from wagtail.core.models import Site
class SystemString(models.Model):
D
|
EFAULT_GROUP = 'general'
identifier = models.CharField(max_length=1024)
string = models.CharField(max_length=1024, blank=True, null=True)
group = models.CharField(max_length=255, default=DEFAULT_GROUP)
site = models.ForeignKey(Site, on_delete=
|
models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(null=True, blank=True)
accessed = models.DateTimeField(null=True, blank=True)
modified = models.BooleanField(default=False)
class Meta:
unique_together = ['identifier', 'site', 'group']
def __unicode__(self):
return unicode(self.identifier)
|
vertical-knowledge/ripozo
|
ripozo/adapters/siren.py
|
Python
|
gpl-2.0
| 6,055
| 0.001156
|
"""
Siren protocol adapter. See `SIREN specification <https://github.com/kevinswiber/siren>`_.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from ripozo.adapters import AdapterBase
from ripozo.utilities import titlize_endpoint
from ripozo.resources.resource_base import create_url
from ripozo.resources.constants import input_categories
import json
import six
_CONTENT_TYPE = 'application/vnd.siren+json'
class SirenAdapter(AdapterBase):
"""
An adapter that formats the response in the SIREN format.
A description of a SIREN format can be found here:
`SIREN specification <https://github.com/kevinswiber/siren>`_
"""
formats = [_CONTENT_TYPE, 'siren']
extra_headers = {'Content-Type': _CONTENT_TYPE}
@property
def formatted_body(self):
"""
Gets the formatted body of the response in unicode form.
If ``self.status_code == 204`` then this will
return an empty string.
:return: The siren formatted response body
:rtype: unicode
"""
# 204's are supposed to be empty responses
if self.status_code == 204:
return ''
links = self.generate_links()
entities = self.get_entities()
response = dict(properties=self.resource.properties, actions=self._actions,
links=links, entities=entities)
# need to do this separately since class is a reserved keyword
response['class'] = [self.resource.resource_name]
return json.dumps(response)
@property
def _actions(self):
"""
Gets the list of actions in an appropriately SIREN format
:return: The list of actions
:rtype: list
"""
actions = []
for endpoint, options in six.iteritems(self.resource.endpoint_dictionary()):
options = options[0]
all_methods = options.get('methods', ('GET',))
meth = all_methods[0] if all_methods else 'GET'
base_route = options.get('route', self.resource.base_url)
route = create_url(base_route, **self.resource.properties)
route = self.combine_base_url_with_resource_url(route)
fields = self.generate_fields_for_endpoint_funct(options.get('endpoint_func'))
actn = dict(name=endpoint, title=titlize_endpoint(endpoint),
method=meth, href=route, fields=fields)
actions.append(actn)
return actions
def generate_fields_for_endpoint_funct(self, endpoint_func):
"""
Returns the action's fields attribute in a SIREN
appropriate format.
:param apimethod endpoint_func:
:return: A dictionary of action fields
:rtype: dict
"""
return_fields = []
fields_method = getattr(endpoint_func, 'fields', None)
if not fields_method:
return []
fields = fields_method(self.resource.manager)
for field in fields:
if field.arg_type is input_categories.URL_PARAMS:
continue
field_dict = dict(name=field.name, type=field.field_type.__name__,
location=field.arg_type, required=field.required)
return_fields.append(field_dict)
return return_fields
def generate_links(self):
"""
Generates the Siren links for the resource.
:return: The list of Siren formatted links.
:rtype: list
"""
href = self.combine_base_url_with_resource_url(self.resource.url)
links = [dict(rel=['self'], href=href)]
for link, link_name, embedded in self.re
|
source.linked_resources:
links.append(dict(rel=[link_name],
href=self.combine_base_url_with_resource_url(link.url)))
return links
def get_entities(self):
"""
Gets a list of related entities in an appropriate SIREN format
:return: A list of entities
:rtype: list
"""
entities = []
for resource, name, embedded in self.resource.rel
|
ated_resources:
for ent in self.generate_entity(resource, name, embedded):
entities.append(ent)
return entities
def generate_entity(self, resource, name, embedded):
"""
A generator that yields entities
"""
if isinstance(resource, list):
for res in resource:
for ent in self.generate_entity(res, name, embedded):
yield ent
else:
if not resource.has_all_pks:
return
ent = {'class': [resource.resource_name], 'rel': [name]}
resource_url = self.combine_base_url_with_resource_url(resource.url)
if not embedded:
ent['href'] = resource_url
else:
ent['properties'] = resource.properties
ent['links'] = [dict(rel=['self'], href=resource_url)]
yield ent
@classmethod
def format_exception(cls, exc):
"""
Takes an exception and appropriately formats it
in the siren format. Mostly. It doesn't return
a self in this circumstance.
:param Exception exc: The exception to format.
:return: A tuple containing: response body, format,
http response code
:rtype: tuple
"""
status_code = getattr(exc, 'status_code', 500)
body = {'class': ['exception', exc.__class__.__name__],
'actions': [], 'entities': [], 'links': [],
'properties': dict(status=status_code, message=six.text_type(exc))}
return json.dumps(body), cls.formats[0], status_code
@classmethod
def format_request(cls, request):
"""
Simply returns request
:param RequestContainer request: The request to handler
:rtype: RequestContainer
"""
return request
|
mattBrzezinski/Hydrogen
|
robot-controller/Study/BaseStudy.py
|
Python
|
mit
| 6,141
| 0.006676
|
from PyQt4 import QtCore
from PyQt4 import QtGui
from Action import Speech
from UI.ActionPushButton import ActionPushButton
class BaseStudy(QtGui.QWidget):
def __init__(self):
super(BaseStudy, self).__init__()
self._actionQueue = None
self._nao = None
self._widgets = None
self._buttons = None
#END __init__()
def LEDActive(self):
if self._nao is not None:
self._nao.LEDrandomEyes(1.0, True)
#END if
#END LEDActive()
def LEDNormal(self):
if self._nao is not None:
self._nao.LEDNormal()
#END if
#END LEDNormal()
def setActionQueue(self, actionQueue):
self._actionQueue = actionQueue
#END setActionQueue()
def setNao(self, nao):
if self._nao is not None:
self._nao.connected.disconnect(self.on_nao_connected)
self._nao.disco
|
nnected.disconnect(self.on_nao_disconnected)
#END if
self._nao = nao
if self._nao is not None:
self._nao.connected.connect(self.on_nao_connected)
self._nao.disconnected.connect(self.on_nao_disconnected)
#END if
#END setNao()
def speech(self, txt, speed, shaping):
return None
#END speech()
def on_button_clicked(self):
if self._actionQueue is not None:
self._actionQueue.addAction
|
s(self.sender().getRobotActions())
#END if
#END on_button_clicked()
def on_nao_connected(self):
pass
#END on_nao_connected()
def on_nao_disconnected(self):
pass
#END on_nao_disconnected()
def on_runSpeech_clicked(self):
if self._actionQueue is not None:
self._actionQueue.addActions(self.sender().getRobotActions())
#END if
#END on_runSpeech_clicked()
def _setupUi(self, general_panel = True, custom_widget = None):
wgtGeneral = None
if general_panel:
wgtGeneral = QtGui.QWidget()
wgtGeneral.setMaximumHeight(80)
wgtGeneral.setMinimumHeight(80)
##################################################
# General Speech
##################################################
self._speechs = [
ActionPushButton(None, "Hello", Speech("Hello")),
ActionPushButton(None, "Thanks", Speech("Thank you")),
ActionPushButton(None, "Sorry", Speech("I'm sorry")),
ActionPushButton(None, "Good", Speech("Good!")),
ActionPushButton(None, "Okay", Speech("Okay")),
ActionPushButton(None, "Yes", Speech("Yes")),
ActionPushButton(None, "No", Speech("No")),
ActionPushButton(None, "Hmmm", Speech("Heum,")),
None,
ActionPushButton(None, "Louder", Speech("Please speak louder")),
ActionPushButton(None, "Say again?", Speech("Can you say one more time?")),
ActionPushButton(None, "Repeat?", Speech("Would you like me to repeat that?")),
ActionPushButton(None, "Understood?", Speech("Do you understand?")),
ActionPushButton(None, "Don't Understand", Speech("I don't understand")),
ActionPushButton(None, "Greeting", Speech("Hello, my name is NAO, nice to meet you")),
ActionPushButton(None, "End Experiment", Speech("Thank you for participating in our experiment")),
]
self._grpSpeech = QtGui.QGroupBox(wgtGeneral)
self._grpSpeech.setTitle("General Speech")
layoutSpeech = QtGui.QVBoxLayout(self._grpSpeech)
layoutSpeech.setMargin(6)
layoutSpeech.addSpacing(3)
widget = QtGui.QWidget(self._grpSpeech)
layout = QtGui.QHBoxLayout(widget)
layout.setMargin(0)
for item in self._speechs:
if item is None:
layoutSpeech.addWidget(widget)
widget = QtGui.QWidget(self._grpSpeech)
layout = QtGui.QHBoxLayout(widget)
layout.setMargin(0)
else:
item.setParent(widget)
item.clicked.connect(self.on_runSpeech_clicked)
layout.addWidget(item)
#END if
#END for
layoutSpeech.addWidget(widget)
#END if
wgtButtons = None
if self._widgets is not None and self._buttons is not None:
wgtButtons = QtGui.QWidget()
layout = QtGui.QHBoxLayout(wgtButtons)
layout.setMargin(0)
for i in range(len(self._widgets)):
layoutButtons = QtGui.QVBoxLayout(self._widgets[i])
layoutButtons.setMargin(0)
for button in self._buttons[i]:
if isinstance(button, ActionPushButton):
button.clicked.connect(self.on_button_clicked)
#END if
layoutButtons.addWidget(button)
#END for
scroll = QtGui.QScrollArea()
scroll.setAlignment(QtCore.Qt.AlignCenter)
scroll.setWidget(self._widgets[i])
layoutScroll = QtGui.QHBoxLayout()
layoutScroll.setMargin(0)
layoutScroll.addWidget(scroll)
layout.addLayout(layoutScroll)
#END for
#END if
if wgtGeneral is not None or wgtButtons is not None or custom_widget is not None:
splitter = QtGui.QSplitter(self)
splitter.setOrientation(QtCore.Qt.Vertical)
layout = QtGui.QHBoxLayout(self)
layout.setMargin(0)
layout.addWidget(splitter)
if wgtGeneral is not None:
wgtGeneral.setParent(splitter)
#END if
if wgtButtons is not None:
wgtButtons.setParent(splitter)
#END if
if custom_widget is not None:
custom_widget.setParent(splitter)
#END if
#END if
#END _setupUi()
#END BaseStudy
|
Teamxrtc/webrtc-streaming-node
|
third_party/webrtc/src/chromium/src/build/android/devil/android/decorators.py
|
Python
|
mit
| 5,401
| 0.007036
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Function/method decorators that provide timeout and retry logic.
"""
import functools
import itertools
import sys
from devil.android import device_errors
from devil.utils import cmd_helper
from devil.utils import reraiser_thread
from devil.utils import timeout_retry
DEFAULT_TIMEOUT_ATTR = '_default_timeout'
DEFAULT_RETRIES_ATTR = '_default_retries'
def _TimeoutRetryWrapper(f, timeout_func, retries_func, pass_values=False):
""" Wraps a funcion with timeout and retry handling logic.
Args:
f: The function to wrap.
timeout_func: A callable that returns the timeout value.
retries_func: A callable that returns the retries value.
pass_values: If True, passes the values returned by |timeout_func| and
|retries_func| to the wrapped function as 'timeout' and
'retries' kwargs, respectively.
Returns:
The wrapped function.
"""
@functools.wraps(f)
def timeout_retry_wrapper(*args, **kwargs):
timeout = timeout_func(*args, **kwargs)
retries = retries_func(*args, **kwargs)
if pass_values:
kwargs['timeout'] = timeout
kwargs['retries'] = retries
@functools.wraps(f)
def impl():
return f(*args, **kwargs)
try:
if timeout_retry.CurrentTimeoutThreadGroup():
# Don't wrap if there's already an outer timeout thread.
return impl()
else:
desc = '%s(%s)' % (f.__name__, ', '.join(itertools.chain(
(str(a) for a in args),
('%s=%s' % (k, str(v)) for k, v in kwargs.iteritems()))))
return timeout_retry.Run(impl, timeout, retries, desc=desc)
except reraiser_thread.TimeoutError as e:
raise device_errors.CommandTimeoutError(str(e)), None, (
sys.exc_info()[2])
except cmd_helper.TimeoutError as e:
raise device_errors.CommandTimeoutError(str(e)), None, (
sys.exc_info()[2])
return timeout_retry_wrapper
def WithTimeoutAndRetries(f):
"""A decorator that handles timeouts and retries.
'timeout' and 'retries' kwargs must be passed to the function.
Args:
f: The function to decorate.
Returns:
The decorated function.
"""
get_timeout = lambda *a, **kw: kw['timeout']
get_retries = lambda *a, **kw: kw['retries']
return _TimeoutRetryWrapper(f, get_timeout, get_retries)
def WithExplicitTimeoutAndRetries(timeout, retries):
"""Returns a decorator that handles timeouts and retries.
The provided |timeout| and |retries| values are always used.
Args:
timeout: The number of seconds to wait for the decorated function to
return. Always used.
retries: The number of times the decorated function should be retried on
failure. Always used.
Returns:
The actual decorator.
"""
def decorator(f):
get_timeout = lambda *a, **kw: timeout
get_retries = lambda *a, **kw: retries
return _TimeoutRetryWrapper(f, get_timeout, get_retries)
return decorator
def WithTimeoutAndRetriesDefaults(default_timeout, default_retries):
"""Returns a decorator that handles timeouts and retries.
The provided |default_timeout| and |default_retries| values are used only
if timeout and retries values are not provided.
Args:
default_timeout: The number of seconds to wait for the decorated function
to return. Only used if a 'timeout' kwarg is not passed
to the decorated function.
default_retries: The number of times the decorated function should be
retried on failure. Only used if a 'retries' kwarg is not
passed to the decorated function.
Returns:
The actual decorator.
"""
def decorator(f):
get_timeout = lambda *a, **kw: kw.get('timeout', default_timeout)
get_retries = lambda
|
*a, **kw: kw.get('retries', default_retries)
return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
return decora
|
tor
def WithTimeoutAndRetriesFromInstance(
default_timeout_name=DEFAULT_TIMEOUT_ATTR,
default_retries_name=DEFAULT_RETRIES_ATTR,
min_default_timeout=None):
"""Returns a decorator that handles timeouts and retries.
The provided |default_timeout_name| and |default_retries_name| are used to
get the default timeout value and the default retries value from the object
instance if timeout and retries values are not provided.
Note that this should only be used to decorate methods, not functions.
Args:
default_timeout_name: The name of the default timeout attribute of the
instance.
default_retries_name: The name of the default retries attribute of the
instance.
min_timeout: Miniumum timeout to be used when using instance timeout.
Returns:
The actual decorator.
"""
def decorator(f):
def get_timeout(inst, *_args, **kwargs):
ret = getattr(inst, default_timeout_name)
if min_default_timeout is not None:
ret = max(min_default_timeout, ret)
return kwargs.get('timeout', ret)
def get_retries(inst, *_args, **kwargs):
return kwargs.get('retries', getattr(inst, default_retries_name))
return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
return decorator
|
baike21/blog
|
blog/settings.py
|
Python
|
gpl-3.0
| 4,119
| 0.002444
|
# -*- coding: utf-8 -*-
"""
Django settings for blog project.
Generated by 'django-admin startproject' using Django 1.11.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2y8c!z@9!s%(0=epe+2n3k8+_!$vg--xz^3p4rs)6ov)c^2a*t'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blogadmin',
'DjangoUeditor',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
#
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.mysql',
# 'NAME': 'blog', # app's database name
# 'USER': 'root',
# 'PASSWORD': 'root',
# 'HOST': '127.0.0.1', # localhost or cloudhost
# 'PORT': '3306',
# },
# 'another_db':{
# 'ENGINE': 'django.db.backends.mysql',
# 'NAME': 'app2_database', # app2's database name
# 'USER': 'root',
# 'PASSWORD': 'root',
# 'HOST': '127.0.0.1', # localhost or cloudhost
# 'PORT': '3306',
# },
# }
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
|
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_
|
validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR,'static')
# 公共的static文件夹
STATIC_DIRS = (
os.path.join(BASE_DIR,'static'),
os.path.join(BASE_DIR,'media'),
)
# 上传数据的文件夹
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
STATICFILES_FINDERS = ("django.contrib.staticfiles.finders.FileSystemFinder", "django.contrib.staticfiles.finders.AppDirectoriesFinder",)
|
slowkid/EulerProject
|
solutions/problem11.py
|
Python
|
unlicense
| 5,822
| 0.009275
|
#!/usr/bin/env python
"""
Largest product in a grid
Problem 11
Published on 22 February 2002 at 06:00 pm [Server Time]
In the 20x20 grid below, four numbers along a diagonal line have been marked in red.
The product of these numbers is 26 * 63 * 78 * 14 = 1788696.
What is the greatest product of four adjacent numbers in the same direction (up, down, left, right, or diagonally) in the 20x20 grid?
"""
THE_GRID = [[int(column) for column in row.split(' ')] for row in
"""
08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48
""".strip().split('\n')]
"""
A few words about the declaration of THE_GRID:
This is not the easiest thing to digest on first look. I think it is "pythonic"
in its implementation and it allows to copy/paste the grid straight out of the problem
statement without a bunch of mucking around to manually turn it into a 2d array
( or nested lists, actually ). It is arranged as a list of rows. Each row is a
list of numbers for each column in that row. Looking at it, the multi-line string
definition actually converts to a list of strings from the split operation. One
string for each row. The top list comprehension converts each row into a list of
short strings ( the columns ) which are also converted to int.
"""
#------------------------------------------------------------------------------
import operator
#------------------------------------------------------------------------------
def product(iterable):
return reduce(operator.mul, iterable, 1)
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for y_dir in (0, 1):
for x_dir in (0,1):
for i in range(run_length):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
for row in range(height-r
|
un_length+1):
for column in range(width-run_length+1):
|
for i in range(run_length):
for y_dir in (0, 1):
for x_dir in (0,1):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
highest = 0
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for x_dir, y_dir in [(1, 0), (0, 1), (1, 1)]:
for i in range(run_length):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
highest = 0
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for x_dir, y_dir in [(1, 0), (0, 1), (1, 1)]:
run =[THE_GRID[row+(y_dir*i)][column+x_dir*i] for i in range(run_length)]
result = product(run)
print run, result
#if result > highest:
# highest = result
#return(highest)
#------------------------------------------------------------------------------
def solve():
g = THE_GRID
maxp = 0
rows, cols, path_size = len(g), len(g[0]), 5
for i in range(rows):
for j in range(cols - path_size + 1):
phv = max(product([g[i][j+s] for s in range(path_size)]),
product([g[j+s][i] for s in range(path_size)]))
#phv = max(g[i][j] * g[i][j+1] * g[i][j+2] * g[i][j+3],
# g[j][i] * g[j+1][i] * g[j+2][i] * g[j+3][i])
if i < rows - path_size:
pdd = max(product([g[i+s][j+s] for s in range(path_size)]),
product([g[i+s][j+path_size-s-1] for s in range(path_size)]))
#pdd = max(g[i][j] * g[i+1][j+1] * g[i+2][j+2] * g[i+3][j+3],
# g[i][j+3] * g[i+1][j+2] * g[i+2][j+1] * g[i+3][j])
maxp = max(maxp, phv, pdd)
return maxp
#------------------------------------------------------------------------------
def main():
print "PROBLEM:\n"
for line in __doc__.strip().split('\n'):
print '\t', line
print "\nSOLUTION:"
print "\n\t", solve()
#------------------------------------------------------------------------------
if __name__ == "__main__":
main()
|
ktbyers/pynet-ons-mar17
|
json_yaml/json_test.py
|
Python
|
apache-2.0
| 335
| 0
|
#!/usr/bin/env python
import json
my_list = range(10)
my_list.append('whatever')
my_list.append('some thing')
my_dict = {
'key1': 'val1',
'key2': 'val2',
'key3': 'va
|
l3'
}
my_dict['key4'] = my_list
my_dict['key5'] = False
print my_dict
print json.dumps(my_dict)
with open("my_file.json", "w") as f
|
:
json.dump(my_dict, f)
|
dsavoiu/kafe2
|
kafe2/fit/representation/model/__init__.py
|
Python
|
gpl-3.0
| 353
| 0.005666
|
"""This submodule contains objects for handl
|
ing different representations of models and model functions.
:synopsis: This submodule contains objects for handling different representations of models and model functions.
.. moduleauthor:: Johannes Gaessler <johannes.gaessler@student.kit.edu>
"""
from ._base import *
from .
|
yaml_drepr import *
|
AlgoLab/pygfa
|
pygfa/serializer/utils.py
|
Python
|
mit
| 1,690
| 0.006509
|
from pygfa.graph_element.parser import line, field_validator as fv
SERIALIZATION_ERROR_MESSAGGE = "Couldn't serialize object identified by: "
def _format_exception(identifier, exception):
return SERIALIZATION_ERROR_MESSAGGE + identifier \
+ "\n\t" + repr(exception)
def _remove_common_edge_fields(edge_dict):
edg
|
e_dict.pop('eid')
edge_dict.pop('from_node')
edge_dict.pop('from_orn')
edge_dict.pop('to_node')
edge_dict.pop('to_orn')
edge_dict.pop('from_positions')
edge_dict.pop('to_positions')
edge_dict.pop('al
|
ignment')
edge_dict.pop('distance')
edge_dict.pop('variance')
def _serialize_opt_fields(opt_fields):
fields = []
for key, opt_field in opt_fields.items():
if line.is_optfield(opt_field):
fields.append(str(opt_field))
return fields
def _are_fields_defined(fields):
try:
for field in fields:
if field is None:
return False
except:
return False
return True
def _check_fields(fields, required_fields):
"""Check if each field has the correct format as
stated from the specification.
"""
try:
for field in range(0, len(required_fields)):
if not fv.is_valid(fields[field], required_fields[field]):
return False
return True
except:
return False
def _check_identifier(identifier):
if not isinstance(identifier, str):
identifier = "'{0}' - id of type {1}.".format(\
str(identifier), \
type(identifier) \
)
return identifier
|
SheepDogInc/django-base
|
project_name/urls.py
|
Python
|
bsd-3-clause
| 294
| 0.003401
|
from django.con
|
f.urls import patterns, include, url
from django.views.generic import TemplateView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', TemplateView.
|
as_view(template_name='base.html'))
)
|
AnishShah/tensorflow
|
tensorflow/contrib/data/__init__.py
|
Python
|
apache-2.0
| 5,193
| 0.002696
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions a
|
nd
# limitations under the License.
# ============
|
==================================================================
"""Experimental API for building input pipelines.
This module contains experimental `Dataset` sources and transformations that can
be used in conjunction with the `tf.data.Dataset` API. Note that the
`tf.contrib.data` API is not subject to the same backwards compatibility
guarantees as `tf.data`, but we will provide deprecation advice in advance of
removing existing functionality.
See [Importing Data](https://tensorflow.org/guide/datasets) for an overview.
@@Counter
@@CheckpointInputPipelineHook
@@CsvDataset
@@LMDBDataset
@@Optional
@@RandomDataset
@@Reducer
@@SqlDataset
@@TFRecordWriter
@@assert_element_shape
@@batch_and_drop_remainder
@@bucket_by_sequence_length
@@choose_from_datasets
@@copy_to_device
@@dense_to_sparse_batch
@@enumerate_dataset
@@get_next_as_optional
@@get_single_element
@@group_by_reducer
@@group_by_window
@@ignore_errors
@@make_batched_features_dataset
@@make_csv_dataset
@@make_saveable_from_iterator
@@map_and_batch
@@padded_batch_and_drop_remainder
@@parallel_interleave
@@parse_example_dataset
@@prefetch_to_device
@@read_batch_features
@@rejection_resample
@@reduce_dataset
@@sample_from_datasets
@@scan
@@shuffle_and_repeat
@@sliding_window_batch
@@sloppy_interleave
@@unbatch
@@unique
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.contrib.data.python.ops.batching import assert_element_shape
from tensorflow.contrib.data.python.ops.batching import batch_and_drop_remainder
from tensorflow.contrib.data.python.ops.batching import dense_to_sparse_batch
from tensorflow.contrib.data.python.ops.batching import map_and_batch
from tensorflow.contrib.data.python.ops.batching import padded_batch_and_drop_remainder
from tensorflow.contrib.data.python.ops.batching import unbatch
from tensorflow.contrib.data.python.ops.counter import Counter
from tensorflow.contrib.data.python.ops.enumerate_ops import enumerate_dataset
from tensorflow.contrib.data.python.ops.error_ops import ignore_errors
from tensorflow.contrib.data.python.ops.get_single_element import get_single_element
from tensorflow.contrib.data.python.ops.get_single_element import reduce_dataset
from tensorflow.contrib.data.python.ops.grouping import bucket_by_sequence_length
from tensorflow.contrib.data.python.ops.grouping import group_by_reducer
from tensorflow.contrib.data.python.ops.grouping import group_by_window
from tensorflow.contrib.data.python.ops.grouping import Reducer
from tensorflow.contrib.data.python.ops.interleave_ops import choose_from_datasets
from tensorflow.contrib.data.python.ops.interleave_ops import parallel_interleave
from tensorflow.contrib.data.python.ops.interleave_ops import sample_from_datasets
from tensorflow.contrib.data.python.ops.interleave_ops import sloppy_interleave
from tensorflow.contrib.data.python.ops.iterator_ops import CheckpointInputPipelineHook
from tensorflow.contrib.data.python.ops.iterator_ops import make_saveable_from_iterator
from tensorflow.contrib.data.python.ops.parsing_ops import parse_example_dataset
from tensorflow.contrib.data.python.ops.prefetching_ops import copy_to_device
from tensorflow.contrib.data.python.ops.prefetching_ops import prefetch_to_device
from tensorflow.contrib.data.python.ops.random_ops import RandomDataset
from tensorflow.contrib.data.python.ops.readers import CsvDataset
from tensorflow.contrib.data.python.ops.readers import LMDBDataset
from tensorflow.contrib.data.python.ops.readers import make_batched_features_dataset
from tensorflow.contrib.data.python.ops.readers import make_csv_dataset
from tensorflow.contrib.data.python.ops.readers import read_batch_features
from tensorflow.contrib.data.python.ops.readers import SqlDataset
from tensorflow.contrib.data.python.ops.resampling import rejection_resample
from tensorflow.contrib.data.python.ops.scan_ops import scan
from tensorflow.contrib.data.python.ops.shuffle_ops import shuffle_and_repeat
from tensorflow.contrib.data.python.ops.sliding import sliding_window_batch
from tensorflow.contrib.data.python.ops.unique import unique
from tensorflow.contrib.data.python.ops.writers import TFRecordWriter
from tensorflow.python.data.ops.iterator_ops import get_next_as_optional
from tensorflow.python.data.ops.optional_ops import Optional
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__)
# A constant that can be used to enable auto-tuning.
AUTOTUNE = -1
|
dola/Telesto
|
tools/protocol/telesto/plot/phases.py
|
Python
|
mit
| 718
| 0.001393
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from telesto import plot
OFFSET = 1000 # REMEMBER TO CALCULATE MANUALLY!
def main():
path = sys.argv[1]
mean = plot.mean(
plot.rows(path, plot.parse_middleware_line),
("waiting", "parsing", "database", "r
|
esponse")
)
dev = plot.standard_deviation(
plot.rows(path, plot.parse_middleware_line), mean
)
print "#",
for key in mean:
print key,
print
for key in mean:
print mean[key],
print
for key in mean:
print 100.0 * mean[key] / sum(mean.itervalues()),
print
for key in mean:
print dev[key],
print
|
if __name__ == "__main__":
main()
|
kili/playbooks
|
filter_plugins/netmask_conversion.py
|
Python
|
gpl-3.0
| 1,194
| 0
|
def len2mask(len):
"""Convert a bit length to a dotted netmask (aka. CIDR to netmask)"""
mask = ''
if not isinstance(len, int) or len < 0 or len > 32:
print "Illegal subnet length: %s (w
|
hich is a %s)" % \
(str(len), type(len).__name__)
return None
for t in range(4):
if len > 7:
mask += '255.'
else:
dec = 255 - (2**(8 - len) - 1)
mask += str(dec) + '.'
len -= 8
if len < 0:
len = 0
return mask[:-1]
def mask2len(subnet):
"""Convert a dotted netmask to bit length (aka. netmask to CIDR)"""
oct
|
ets = [int(x) for x in subnet.split(".")]
count = 0
for octet in octets:
highest_bit = 128
while highest_bit > 0:
if octet >= highest_bit:
count = count + 1
octet = octet - highest_bit
highest_bit = highest_bit / 2
else:
return count
return count
class FilterModule(object):
''' utility to convert cidr netmasks into len and reverse '''
def filters(self):
return {'mask2len': mask2len,
'len2mask': len2mask}
|
zwffff2015/stock
|
task/GetStockHistoryInfoTask.py
|
Python
|
mit
| 7,619
| 0.004191
|
# coding=utf-8
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), "..")))
from db.MysqlUtil import initMysql, execute, select, batchInsert, disconnect
from common.JsonHelper import loadJsonConfig
from api.tushareApi import getSimpleHistoryData
from datetime import datetime, timedelta
from common.LoggerHelper import writeErrorLog, writeWarningLog, writeInfoLog, writeDebugLog, writeLog, writeExceptionLog
from wechat.weChatSender import sendMessageToMySelf
from common.HttpHelper import httpGet
from common.FileHelper import saveFile
import time
import json
# 从同花顺抓取历史行情数据(前复权)
def updateStockHistoryInfoByTHS(stockList):
for stock in stockList:
code = stock[0]
i = 2010
thisYear = datetime.now().year
while (i <= thisYear):
# time.sleep(1)
infos = getStockInfos(code, i)
if infos is None:
continue
for date in infos:
open = infos.get(date).get('open')
close = infos.get(date).get('close')
high = infos.get(date).get('high')
low = infos.get(date).get('low')
volume = infos.get(date).get('volume')
amount = infos.get(date).get('amount')
checkExistSql = unicode("select count(*) from s_stock where code='{0}' and date='{1}'").format(code,
date)
count = select(checkExistSql, False)[0]
if count > 0:
updateSql = unicode(
"update s_stock set volume={2},highPrice={3},lowPrice={4},openPrice={5},closePrice={6},amount='{7}' where code='{0}' and date='{1}'").format(
code, date, volume, high, low, open, close, amount)
execute(updateSql)
print code, date, updateSql
else:
insertSql = unicode(
"insert into s_stock(code,date,timestamp,volume,highPrice,lowPrice,openPrice,closePrice,amount) VALUES ('{0}','{1}',{2},{3},{4},{5},{6},{7},'{8}')").format(
code, date, int(time.mktime(time.strptime(date, '%Y-%m-%d'))), volume, high, low, open, close,
amount)
execute(insertSql)
print code, date, insertSql
i = i + 1
# 解析同花顺年行情数据(前复权)
def getStockInfos(code, year):
try:
url = "http://d.10jqka.com.cn/v2/line/hs_{0}/01/{1}.js".format(code, year)
res = httpGet(url).decode("utf-8")
index = res.find("(")
if (index < 0):
writeErrorLog(unicode("解析行情失败: code:{0}, year:{1}, res:{2}").format(code, year, res))
return []
res = res[index + 1:-1]
writeLog(unicode("获取股票历史行情: code: {0}, year:{1}").format(code, year))
jo = json.loads(res)
dataInfo = jo['data'].split(';')
result = {}
for item in dataInfo:
infos = item.split(',')
dic = {}
dic['open'] = infos[1]
dic['high'] = infos[2]
dic['low'] = infos[3]
dic['close'] = infos[4]
dic['volume'] = infos[5]
dic['amount'] = "{0}亿".format(round(float(infos[6]) / 100000000, 1))
result[datetime.strptime(infos[0], '%Y%m%d').strftime('%Y-%m-%d')] = dic
return result
except Exception, e:
writeErrorLog(unicode("解析行情失败: code:{0}, year:{1}, e:{2}").format(code, year, str(e)))
if "404" in str(e):
return []
else:
return None
def getStockHistoryInfoFromDb():
sql = unicode("SELECT code,count(*) from s_stock GROUP by code HAVING count(*)<20")
data = select(sql)
updateStockHistoryInfoByTHS(data)
def getStockHistoryInfoFromConfig():
stockList = loadJsonConfig(os.path.abspath(os.path.join(os.g
|
etcwd(), "../config/newStockList.json")))
updateStockHistoryInfoByTHS(stockList)
def updateAllStockHistoryInfo():
sql = unicode("select code,name from s_stock_info order by code asc")
data = select(sql)
updateStockHistoryInfoByTHS(data)
def updateStockOtherInfo():
sql = unicode("select code,name from s_stock_info order by code asc")
stockList = select(sql)
for stock in stockList:
code = stock[0]
if int(code) < 60
|
1126:
continue
selectInfoSql = unicode("select date,closePrice from s_stock where code='{0}' order by date asc").format(code)
data = select(selectInfoSql)
writeLog(unicode("更新股票其他指标数据: code: {0}").format(code))
updataStockBias(code, data, 6)
updataStockBias(code, data, 12)
updataStockBias(code, data, 24)
updateStockMA(code, data, 5)
updateStockMA(code, data, 10)
updateStockMA(code, data, 20)
updateStockMA(code, data, 30)
updateStockMA(code, data, 60)
updateStockMA(code, data, 120)
updateStockMA(code, data, 250)
updateStockChangePercent(code, data)
def updateStockChangePercent(code, data):
for i in range(1, len(data)):
try:
changeAmount = data[i][1] - data[i - 1][1]
changePercent = round(changeAmount * 100 / data[i - 1][1], 2)
updateSql = unicode(
"update s_stock set changePercent={0},changeAmount={1} where code='{2}' and date='{3}'").format(
changePercent, changeAmount, code, data[i][0])
execute(updateSql)
except Exception, e:
writeErrorLog(
unicode("更新涨幅数据失败: code:{0}, i:{1}, date:{2}, closePrice:{3}").format(code, i, data[i][0], data[i][1]))
def updateStockMA(code, data, n):
for i in range(n - 1, len(data)):
j = i
sum = 0
while (i - j < n):
sum = sum + data[j][1]
j = j - 1
avg = round(sum / n, 2)
sql = unicode("update s_stock set MA{0}={1} where code='{2}' and date='{3}'").format(n, avg, code, data[i][0])
execute(sql)
def updataStockBias(code, data, n):
for i in range(n - 1, len(data)):
j = i
sum = 0
while (i - j < n):
sum = sum + data[j][1]
j = j - 1
avg = round(sum / n, 2)
todayClosePrice = float(data[i][1])
bias = 0 if avg == 0 else round((todayClosePrice - avg) * 100 / avg, 2)
number = 1 if n == 6 else (2 if n == 12 else 3)
sql = unicode("update s_stock set BIAS{0}={1} where code='{2}' and date='{3}'").format(number, bias, code,
data[i][0])
execute(sql)
def main(argv):
try:
reload(sys)
sys.setdefaultencoding('utf-8')
# sendMessageToMySelf(unicode("开始查询股票历史行情数据"))
begin = datetime.now()
initMysql()
# getStockHistoryInfoFromDb()
# getStockHistoryInfoFromConfig()
updateStockOtherInfo()
disconnect()
end = datetime.now()
message = unicode("查询股票历史行情数据的任务执行完毕,当前时间:{0},执行用时:{1}").format(datetime.now(), end - begin)
writeLog(message)
sendMessageToMySelf(message)
except:
writeExceptionLog('RealTimeRemindTask Error.')
if __name__ == '__main__':
main(sys.argv)
|
luskaner/wps-dict
|
wps_dict/wps_dict/providers/offline/list.py
|
Python
|
gpl-3.0
| 81
| 0
|
fr
|
om . import *
offline_providers = {
'builtin': builtin.ProviderBuilti
|
n,
}
|
tiancj/emesene
|
emesene/e3/xmpp/SleekXMPP/sleekxmpp/plugins/xep_0085/chat_states.py
|
Python
|
gpl-3.0
| 1,636
| 0
|
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz, Lance J.T. Stout
This file is part of SleekXMPP.
See the file LICENSE for copying permissio
"""
import logging
import sleekxmpp
from sleekxmpp.stanza import Message
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.xmlstream.matcher import StanzaPath
from sleekxmpp.xmlstream import register_stanza_plugin, ElementBase, ET
from sleekxmpp.plugins import BasePlugin
from sleekxmpp.plugins.xep_0085 import stanza, ChatState
log = logging.getLogger(__name__)
class XEP
|
_0085(BasePlugin):
"""
XEP-0085 Chat State Notifications
"""
name = 'xep_0085'
description = 'XEP-0085: Chat State Notifications'
dependencies = set(['xep_0030'])
stanza = stanza
def plugin_init(self):
self.xmpp.register_handler(
Callback('Chat State',
StanzaPath('message/chat_state'),
self._handle_chat_state))
register_stanza_plugin(Message,
|
stanza.Active)
register_stanza_plugin(Message, stanza.Composing)
register_stanza_plugin(Message, stanza.Gone)
register_stanza_plugin(Message, stanza.Inactive)
register_stanza_plugin(Message, stanza.Paused)
def plugin_end(self):
self.xmpp.remove_handler('Chat State')
def session_bind(self, jid):
self.xmpp.plugin['xep_0030'].add_feature(ChatState.namespace)
def _handle_chat_state(self, msg):
state = msg['chat_state']
log.debug("Chat State: %s, %s", state, msg['from'].jid)
self.xmpp.event('chatstate_%s' % state, msg)
|
aequitas/home-assistant
|
homeassistant/components/sonos/media_player.py
|
Python
|
apache-2.0
| 37,431
| 0
|
"""Support to interface with Sonos players."""
import asyncio
import datetime
import functools as ft
import logging
import socket
import time
import urllib
import async_timeout
import pysonos
import pysonos.snapshot
from pysonos.exceptions import SoCoUPnPException, SoCoException
from homeassistant.components.media_player import MediaPlayerDevice
from homeassistant.components.media_player.const import (
ATTR_MEDIA_ENQUEUE, MEDIA_TYPE_MUSIC, SUPPORT_CLEAR_PLAYLIST,
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_SELECT_SOURCE,
SUPPORT_SHUFFLE_SET, SUPPORT_STOP, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET)
from homeassistant.const import (
ENTITY_MATCH_ALL, STATE_IDLE, STATE_PAUSED, STATE_PLAYING)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util.dt import utcnow
from . import (
CONF_ADVERTISE_ADDR, CONF_HOSTS, CONF_INTERFACE_ADDR,
DATA_SERVICE_EVENT, DOMAIN as SONOS_DOMAIN,
ATTR_ALARM_ID, ATTR_ENABLED, ATTR_INCLUDE_LINKED_ZONES, ATTR_MASTER,
ATTR_NIGHT_SOUND, ATTR_SLEEP_TIME, ATTR_SPEECH_ENHANCE, ATTR_TIME,
ATTR_VOLUME, ATTR_WITH_GROUP,
SERVICE_CLEAR_TIMER, SERVICE_JOIN, SERVICE_RESTORE, SERVICE_SET_OPTION,
SERVICE_SET_TIMER, SERVICE_SNAPSHOT, SERVICE_UNJOIN, SERVICE_UPDATE_ALARM)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
DISCOVERY_INTERVAL = 60
# Quiet down pysonos logging to just actual problems.
logging.getLogger('pysonos').setLevel(logging.WARNING)
logging.getLogger('pysonos.data_structures_entry').setLevel(logging.ERROR)
SUPPORT_SONOS = SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE |\
SUPPORT_PLAY | SUPPORT_PAUSE | SUPPORT_STOP | SUPPORT_SELECT_SOURCE |\
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_SEEK |\
SUPPORT_PLAY_MEDIA | SUPPORT_SHUFFLE_SET | SUPPORT_CLEAR_PLAYLIST
DATA_SONOS = 'sonos_media_player'
SOURCE_LINEIN = 'Line-in'
SOURCE_TV = 'TV'
ATTR_SONOS_GROUP = 'sonos_group'
UPNP_ERRORS_TO_IGNORE = ['701', '711', '712']
class SonosData:
"""Storage class for platform global data."""
def __init__(self, hass):
"""Initialize the data."""
self.entities = []
self.topology_condition = asyncio.Condition()
async def async_setup_platform(hass,
config,
async_add_entities,
discovery_info=None):
"""Set up the Sonos platform. Obsolete."""
_LOGGER.error(
'Loading Sonos by media_player platform config is no longer supported')
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Sonos from a config entry."""
if DATA_SONOS not in hass.data:
hass.data[DATA_SONOS] = SonosData(hass)
config = hass.data[SONOS_DOMAIN].get('media_player', {})
advertise_addr = config.get(CONF_ADVERTISE_ADDR)
if advertise_addr:
pysonos.config.EVENT_ADVERTISE_IP = advertise_addr
def _discovery(now=None):
"""Discover players from network or configuration."""
hosts = config.get(CONF_HOSTS)
def _discovered_player(soco):
"""Handle a (re)discovered player."""
try:
# Make sure that the player is available
_ = soco.volume
entity = _get_entity_from_soco_uid(hass, soco.uid)
if not entity:
hass.add_job(async_add_entities, [SonosEntity(soco)])
else:
entity.seen()
except SoCoException:
pass
if hosts:
for host in hosts:
try:
player = pysonos.SoCo(socket.gethostbyname(host))
if player.is_visible:
_discovered_player(player)
except (OSError, SoCoException):
if now is None:
_LOGGER.warning("Failed to initialize '%s'", host)
else:
pysonos.discover_thread(
_discovered_player,
interface_addr=config.get(CONF_INTERFACE_ADDR))
for entity in hass.data[DATA_SONOS].entities:
entity.check_unseen()
hass.helpers.event.call_later(DISCOVERY_INTERVAL, _discovery)
hass.async_add_executor_job(_discovery)
async def async_service_handle(service, data):
"""Handle dispatched services."""
entity_ids = data.get('entity_id')
entities = hass.data[DATA_SONOS].entities
if entity_ids and entity_ids != ENTITY_MATCH_ALL:
entities = [e for e in entities if e.entity_id in entity_ids]
if service == SERVICE_JOIN:
master = [e for e in hass.data[DATA_SONOS].entities
if e.entity_id == data[ATTR_MASTER]]
if master:
await SonosEntity.join_multi(hass, master[0], entities)
elif service == SERVICE_UNJOIN:
await SonosEntity.unjoin_multi(hass, entities)
elif service == SERVICE_SNAPSHOT:
await SonosEntity.snapshot_multi(
hass, entities, data[ATTR_WITH_GROUP])
elif service == SERVICE_RESTORE:
await SonosEntity.restore_multi(
hass, entities, data[ATTR_WITH_GROUP])
else:
for entity in entities:
if service == SERVICE_SET_TIMER:
call = entity.set_sleep_timer
elif service == SERVICE_CLEAR_TIMER:
call = entity.clear_sleep_timer
elif service == SERVICE_UPDATE_ALARM:
call = entity.set_alarm
elif service == SERVICE_SET_OPTION:
call = entity.set_option
hass.async_add_executor_job(call, data)
# We are ready for the next service call
hass.data[DATA_SERVICE_EVENT].set()
async_dispatcher_connect(hass, SONOS_DOMAIN, async_service_handle)
class _ProcessSonosEventQueue:
"""Queue like object for dispatching sonos events."""
def __init__(self, handler):
"""Initialize Sonos event queue."""
self._handler = handler
def put(self, item, block=True, timeout=None):
"""Process event."""
self._handler(item)
def _get_entity_from_soco_uid(hass, uid):
"""Return SonosEntity from SoCo uid."""
for entity in hass.data[DATA_SONOS].entities:
if uid == entity.unique_id:
return entity
return None
def soco_error(errorcodes=None):
"""Filter out
|
specified UPnP errors from logs and avoid exceptions."""
def decorator(funct):
"""Decorate functions."""
@ft.wraps(funct)
def wrapper(*args, **kwargs):
"""Wrap for all soco UPnP exception."""
try:
return funct(*args, **kwargs)
|
except SoCoUPnPException as err:
if errorcodes and err.error_code in errorcodes:
pass
else:
_LOGGER.error("Error on %s with %s", funct.__name__, err)
except SoCoException as err:
_LOGGER.error("Error on %s with %s", funct.__name__, err)
return wrapper
return decorator
def soco_coordinator(funct):
"""Call function on coordinator."""
@ft.wraps(funct)
def wrapper(entity, *args, **kwargs):
"""Wrap for call to coordinator."""
if entity.is_coordinator:
return funct(entity, *args, **kwargs)
return funct(entity.coordinator, *args, **kwargs)
return wrapper
def _timespan_secs(timespan):
"""Parse a time-span into number of seconds."""
if timespan in ('', 'NOT_IMPLEMENTED', None):
return None
return sum(60 ** x[0] * int(x[1]) for x in enumerate(
reversed(timespan.split(':'))))
def _is_radio_uri(uri):
"""Return whether the URI is a radio stream."""
radio_schemes = (
'x-rincon-mp3radio:', 'x-sonosapi-stream:', 'x-sonosapi-radio:',
'x-sonosapi-hls:', 'hls-radio:')
return uri.startswith(radio_schemes)
class SonosEntity(MediaPlayerDevice):
"""Representation of a Sonos e
|
jantman/webhook2lambda2sqs
|
webhook2lambda2sqs/version.py
|
Python
|
agpl-3.0
| 1,938
| 0.000516
|
"""
The latest version of this package is available at:
<http://github.com/jantman/webhook2lambda2sqs>
#######################################
|
#########################################
Copyright 2016 Jason Antman <jason@j
|
asonantman.com> <http://www.jasonantman.com>
This file is part of webhook2lambda2sqs, also known as webhook2lambda2sqs.
webhook2lambda2sqs is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
webhook2lambda2sqs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with webhook2lambda2sqs. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
################################################################################
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/webhook2lambda2sqs> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
################################################################################
AUTHORS:
Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com>
################################################################################
"""
VERSION = '0.2.0'
PROJECT_URL = 'https://github.com/jantman/webhook2lambda2sqs'
|
JamesTFarrington/hendrix
|
hendrix/defaults.py
|
Python
|
mit
| 209
| 0
|
import os
CACHE_PORT = 80
|
80
HTTP_PORT = 8000
HTTPS_PORT = 4430
DEFAULT_MAX_AGE = 3600
DEFAULT_LOG_PATH = os.path.dirname(__file__)
DEFAULT_
|
LOG_FILE = os.path.join(DEFAULT_LOG_PATH, 'default-hendrix.log')
|
jonathanmorgan/reddit_collect
|
examples/praw_testing.py
|
Python
|
gpl-3.0
| 5,265
| 0.02754
|
# SET THE FOLLOWING EITHER BEFORE RUNNING THIS FILE OR BELOW, BEFORE INITIALIZING
# PRAW!
# set variables for interacting with reddit.
# my_user_agent = ""
# my_username = ""
# my_password = ""
# reddit_post_id = -1
# import praw - install: pip install praw
# Praw doc: https://praw.readthedocs.org/en/latest/index.html
import praw
# python base imports
import datetime
import pprint
# import python_utilities
from python_utilities.logging.summary_helper import SummaryHelper
# declare variables.
my_summary_helper = None
r = None
post = None
comments = None
flat_comments = None
test_comment = None
comment_prop_map = None
summary_string = ""
# set variables for interacting with reddit.
my_user_agent = "<user_agent>"
my_username = "<reddit_username>"
my_password = "<reddit_password>"
#reddit_post_id = "1cp0i3"
reddit_post_id = "1bvkol"
# init summary helper.
my_summary_helper = SummaryHelper()
print( "Starting PRAW test at " + str( start_dt ) )
# set user agent.
r = praw.Reddit( user_agent = my_user_agent )
# got login set?
if ( ( ( my_username ) and ( my_username != "" ) ) and ( ( my_password ) and ( my_password != "" ) ) ):
# yes. Login.
r.login( my_username, my_password )
print( "==> Logged in." )
#-- END check to see if we log in. --#
pr
|
int( "==> Created reddit instance." )
# retrieve post
# - post with lots of comments - 1bvkol has 22014
#reddit_post_id = "1bvkol"
post = r.get_submission( submission_id = reddit_post_id, comment_limit = 1500, comment_sort = "old" )
print(
|
"Retrieved post " + str( reddit_post_id ) )
# output number of comments based on post
print( "==> post.permalink: " + post.permalink )
print( "==> post.num_comments: " + str( post.num_comments ) )
# use the replace_more_comments() method to pull in as many comments as possible.
post.replace_more_comments( limit = None, threshold = 0 )
print( "==> After replace_more_comments()" )
# get the comments
comments = post.comments
# print out number of comments
print( "==> len( comments ): " + str( len( comments ) ) ) # 3,915 and counting
# these are objects where parent comments reference children. flatten...
flat_comments = praw.helpers.flatten_tree( post.comments )
# how many now?
print( "==> after flatten_tree(), comment count: " + str ( len( flat_comments ) ) ) # 13364 - closer to 22000, but still not all of them.
# get a comment
test_comment = flat_comments[ 0 ]
print( "Looking at comment 0:")
# what is in it?
print( "==> str( comment ): " + str( test_comment ) ) # outputs the text of comment, nothing more.
# reddit ID of comment:
print( "==> comment id: " + str( test_comment.id ) )
# body of comment
print( "==> comment body: " + test_comment.body )
# to get map of property names to values in a praw object:
comment_prop_map = vars( test_comment )
# pretty-print it with pprint library.
pprint.pprint( comment_prop_map )
'''
Example:
{'_info_url': 'http://www.reddit.com/api/info/',
'_replies': [<praw.objects.MoreComments object at 0x4867550>],
'_submission': <praw.objects.Submission object at 0x4867790>,
'_underscore_names': ['replies'],
'approved_by': None,
'author': Redditor(user_name='worldclasssteez'),
'author_flair_css_class': None,
'author_flair_text': None,
'banned_by': None,
'body': u'Using the "J" and "K" keys on VLC player to sync up the audio. ',
'body_html': u'<div class="md"><p>Using the &quot;J&quot; and &quot;K&quot; keys on VLC player to sync up the audio. </p>\n</div>',
'created': 1365399487.0,
'created_utc': 1365395887.0,
'distinguished': None,
'downs': 44,
'edited': False,
'gilded': 0,
'has_fetched': True,
'id': u'c9ap3fp',
'json_dict': None,
'likes': None,
'link_id': u't3_1bvkol',
'name': u't1_c9ap3fp',
'num_reports': None,
'parent_id': u't3_1bvkol',
'reddit_session': <praw.Reddit object at 0x48539d0>,
'saved': False,
'score_hidden': False,
'subreddit': Subreddit(display_name='AskReddit'),
'subreddit_id': u't5_2qh1i',
'ups': 201}
'''
# each name in that map can be invoked as a variable on the object itself.
# test summary counter
my_summary_helper.set_prop_value( "comment_count", 0 )
# look at utc date order of comments:
# for comment in flat_comments[ 0:15 ]:
for index in range( 0, len( flat_comments ) ):
# get vars
comment = flat_comments[ index ]
comment_prop_map = vars( comment )
created_from_map = comment_prop_map[ 'created_utc' ]
created_from_obj = comment.created_utc
created_dt = datetime.datetime.fromtimestamp( created_from_map )
comment_id = comment.name
print( "==> " + str( index ) + " ( " + comment_id + " ) - Created UTC: " + str( created_from_map ) + " (map); " + str( created_from_obj ) + " (obj); " + str( created_dt ) )
# increment comment count
my_summary_helper.increment_prop_value( "comment_count" )
#-- END loop over comments --#
print( "==> Created: " + str( created_from_map ) + " (map); " + str( created_from_obj ) + " (obj); " + str( created_dt ) )
summary_string = "\nPRAW testing complete!\n"
# generate summary string
# set stop time.
my_summary_helper.set_stop_time()
# generate summary string.
summary_string += my_summary_helper.create_summary_string( item_prefix_IN = "==> " )
print( summary_string )
|
garrettkatz/directional-fibers
|
dfibers/examples/lorenz.py
|
Python
|
mit
| 3,181
| 0.019491
|
"""
Fiber-based fixed point location in the Lorenz system
f(v)[0] = s*(v[1]-v[0])
f(v)[1] = r*v[0] - v[1] - v[0]*v[2]
f(v)[2] = v[0]*v[1] - b*v[2]
Reference:
http://www.emba.uvm.edu/~jxyang/teaching/Math266notes13.pdf
https://en.wikipedia.org/wiki/Lorenz_system
"""
import numpy as np
import matplotlib.pyplot as pt
import scipy.integrate as si
import dfibers.traversal as tv
import dfibers.numerical_utilities as nu
import dfibers.fixed_points as fx
import dfibers.solvers as sv
from mpl_toolkits.mplot3d import Axes3D
N = 3
s, b, r = 10, 8./3., 28
def f(v):
return np.array([
s*(v[1,:]-v[0,:]),
r*v[0,:] - v[1,:] - v[0,:]*v[2,:],
v[0,:]*v[1,:] - b*v[2,:],
])
def ef(v):
return 0.001*np.ones((N,1))
def Df(v):
Dfv = np.empty((v.shape[1],3,3))
Dfv[:,0,0], Dfv[:,0,1], Dfv[:,0,2] = -s, s, 0
Dfv[:,1,0], Dfv[:,1,1], Dfv[:,1,2] = r - v[2], -1, -v[0]
Dfv[:,2,0], Dfv[:,2,1], Dfv[:,2,2] = v[1], v[0], -b
return Dfv
if __name__ == "__main__":
# Col
|
lect attractor points
t = np.arange(0,40,0.01)
v = np.ones((N,1))
A = si.odeint(lambda v, t: f(v.reshape((N,1))).flatten(), v.flatten(), t).T
# Set up fiber arguments
v = np.zeros((N,1))
# c = np.random.randn(N,1)
c = np.array([[0.83736021, -1.87848114, 0.43935044]]).T
fiber_kwargs = {
"f": f,
"ef": ef,
"Df": Df,
"compute_step_amount": lambda trace: (0.1, 0, False),
"v": v,
|
"c": c,
"terminate": lambda trace: (np.fabs(trace.x[:N,:]) > 50).any(),
"max_step_size": 1,
"max_traverse_steps": 2000,
"max_solve_iterations": 2**5,
}
print("using c:")
print(c.T)
# Visualize strange attractor
ax = pt.gca(projection="3d")
ax.plot(*A, color='gray', linestyle='-', alpha=0.5)
br1 = np.sqrt(b*(r-1))
U = np.array([[0, 0, 0],[br1,br1,r-1],[-br1,-br1,r-1]]).T
ax.scatter(*U, color='black')
# Run and visualize fiber components, for each fxpt
xlims, ylims, zlims = [-20,20], [-30,30], [-20,60]
for fc in [0,2]:
# start from current fxpt
fiber_kwargs["v"] = U[:,[fc]]
# ax.text(U[0,fc],U[1,fc],U[2,fc], str(fc))
# Run in one direction
solution = sv.fiber_solver(**fiber_kwargs)
V1 = np.concatenate(solution["Fiber trace"].points, axis=1)[:N,:]
z = solution["Fiber trace"].z_initial
# Run in other direction (negate initial tangent)
fiber_kwargs["z"] = -z
solution = sv.fiber_solver(**fiber_kwargs)
V2 = np.concatenate(solution["Fiber trace"].points, axis=1)[:N,:]
# Join fiber segments, restrict to figure limits
V = np.concatenate((np.fliplr(V1), V2), axis=1)
V = V[:,::50]
for i, (lo, hi) in enumerate([xlims, ylims, zlims]):
V = V[:,(lo < V[i,:]) & (V[i,:] < hi)]
C = f(V)
# Visualize fiber
ax.plot(*V, color='black', linestyle='-')
ax.quiver(*np.concatenate((V,.1*C),axis=0),color='black')
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.view_init(elev=15,azim=145)
pt.tight_layout()
pt.show()
|
carpyncho/feets
|
feets/datasets/synthetic.py
|
Python
|
mit
| 10,431
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
# Copyright (c) 2017 Juan Cabral
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
# DOCS
# =============================================================================
"""Synthetic light curve generator.
"""
# =============================================================================
# IMPORTS
# =============================================================================
import numpy as np
from .base import Data
# =============================================================================
# CONSTANTS
# =============================================================================
DS_NAME = "feets-synthetic"
DESCRIPTION = "Lightcurve created with random numbers"
BANDS = ("B", "V")
METADATA = None
DEFAULT_SIZE = 10000
# =============================================================================
# FUNCTIONS
# =============================================================================
def create_random(
magf,
magf_params,
errf,
errf_params,
timef=np.linspace,
timef_params=None,
size=DEFAULT_SIZE,
id=None,
ds_name=DS_NAME,
description=DESCRIPTION,
bands=BANDS,
metadata=METADATA,
):
"""Generate a data with any given random function.
Parameters
----------
magf : callable
Function to generate the magnitudes.
magf_params : dict-like
Parameters to feed the `magf` function.
errf : callable
Function to generate the magnitudes.
errf_params : dict-like
Parameters to feed the `errf` function.
timef : callable, (default=numpy.linspace)
Function to generate the times.
timef_params : dict-like or None, (default={"start": 0., "stop": 1.})
Parameters to feed the `timef` callable.
size : int (default=10000)
Number of obervation of the light curves
id : object (default=None)
Id of the created da
|
ta.
ds_name : str (default="feets-synthetic")
Name of the dataset
description : str (default="Lightcurve created with random numbers")
Description of the data
bands : tuple of strings (default=("B", "V"))
The bands to be created
metadata : dict-like or None (default=None)
The metadata of the created data
Returns
-------
data
A Data object with a random lightcurves.
Examples
|
--------
.. code-block:: pycon
>>> from numpy import random
>>> create_random(
... magf=random.normal, magf_params={"loc": 0, "scale": 1},
... errf=random.normal, errf_params={"loc": 0, "scale": 0.008})
Data(id=None, ds_name='feets-synthetic', bands=('B', 'V'))
"""
timef_params = (
{"start": 0.0, "stop": 1.0}
if timef_params is None
else timef_params.copy()
)
timef_params.update(num=size)
magf_params = magf_params.copy()
magf_params.update(size=size)
errf_params = errf_params.copy()
errf_params.update(size=size)
data = {}
for band in bands:
data[band] = {
"time": timef(**timef_params),
"magnitude": magf(**magf_params),
"error": errf(**errf_params),
}
return Data(
id=id,
ds_name=ds_name,
description=description,
bands=bands,
metadata=metadata,
data=data,
)
def create_normal(
mu=0.0, sigma=1.0, mu_err=0.0, sigma_err=1.0, seed=None, **kwargs
):
"""Generate a data with magnitudes that follows a Gaussian
distribution. Also their errors are gaussian.
Parameters
----------
mu : float (default=0)
Mean of the gaussian distribution of magnitudes
sigma : float (default=1)
Standar deviation of the gaussian distribution of magnitude errors
mu_err : float (default=0)
Mean of the gaussian distribution of magnitudes
sigma_err : float (default=1)
Standar deviation of the gaussian distribution of magnitude errorrs
seed : {None, int, array_like}, optional
Random seed used to initialize the pseudo-random number generator.
Can be any integer between 0 and 2**32 - 1 inclusive, an
array (or other sequence) of such integers, or None (the default).
If seed is None, then RandomState will try to read data from
/dev/urandom (or the Windows analogue) if available or seed from
the clock otherwise.
kwargs : optional
extra arguments for create_random.
Returns
-------
data
A Data object with a random lightcurves.
Examples
--------
.. code-block:: pycon
>>> ds = create_normal(0, 1, 0, .0008, seed=42)
>>> ds
Data(id=None, ds_name='feets-synthetic', bands=('B', 'V'))
>>> ds.data.B
LightCurve(time[10000], magnitude[10000], error[10000])
>>> ds.data.B.time
array([ 0.00000000e+00, 1.00010001e-04, 2.00020002e-04, ...,
9.99799980e-01, 9.99899990e-01, 1.00000000e+00])
"""
random = np.random.RandomState(seed)
return create_random(
magf=random.normal,
magf_params={"loc": mu, "scale": sigma},
errf=random.normal,
errf_params={"loc": mu_err, "scale": sigma_err},
**kwargs,
)
def create_uniform(
low=0.0, high=1.0, mu_err=0.0, sigma_err=1.0, seed=None, **kwargs
):
"""Generate a data with magnitudes that follows a uniform
distribution; the error instead are gaussian.
Parameters
----------
low : float, optional
Lower boundary of the output interval. All values generated will be
greater than or equal to low. The default value is 0.
high : float, optional
Upper boundary of the output interval. All values generated will be
less than high. The default value is 1.0.
mu_err : float (default=0)
Mean of the gaussian distribution of magnitudes
sigma_err : float (default=1)
Standar deviation of the gaussian distribution of magnitude errorrs
seed : {None, int, array_like}, optional
Random seed used to initialize the pseudo-random number generator.
Can be any integer between 0 and 2**32 - 1 inclusive, an
array (or other sequence) of such integers, or None (the default).
If seed is None, then RandomState will try to read data from
/dev/urandom (or the Windows analogue) if available or seed from
the clock otherwise.
kwargs : optional
extra arguments for create_random.
Returns
-------
data
A Data object with a random lightcurves.
Examples
--------
.. code-block:: pycon
>>> ds = synthetic.create_uniform(1, 2, 0, .0008, 42)
>>> ds
Data(id=None, ds_name='feets-synthetic', bands=('B', 'V'))
>>> ds.data.B.magnitude
array([ 1.37454012, 1.95071431, 1.73199394, ..., 1.94670792,
1.39748799, 1.2171404 ])
"""
random = np.random.RandomState(seed)
return create_random(
|
obiwanus/django-qurl
|
qurl/models.py
|
Python
|
mit
| 22
| 0
|
# Say hell
|
o to Dj
|
ango
|
0ppen/introhacking
|
code_from_book.py
|
Python
|
mit
| 16,192
| 0.000628
|
# Python Code From Book
# This file consists of code snippets only
# It is not intended to be run as a script
raise SystemExit
####################################################################
# 3. Thinking in Binary
####################################################################
import magic
print magic.from_file("my_image.jpg")
# JPEG image data, Exif standard: [TIFF image data, big-endian,
# direntries=16, height=3264, bps=0, PhotometricIntepretation=RGB],
# baseline, precision 8, 2378x2379, frames 3
if magic.from_file("upload.jpg", mime=True) == "image/jpeg":
continue_uploading("upload.jpg")
else:
alert("Sorry! This file type is not allowed")
import imghdr
print imghdr.what("path/to/my/file.ext")
import binascii
def spoof_file(file, magic_number):
magic_number = binascii.unhexlify(magic_number)
with open(file, "r+b") as f:
old = f.read()
f.seek(0)
f.write(magic_number + old)
def to_ascii_bytes(string):
return " ".join(format(ord(char), '08b') for char in string)
string = "my ascii string"
"".join(hex(ord(char))[2:] for char in string)
# '6d7920617363696920737472696e67'
hex_string = "6d7920617363696920737472696e67"
hex_string.decode("hex")
# 'my ascii string'
"".join(chr(int(hex_string[i:i+2], 16)) for i in range(0, len(hex_string), 2))
# 'my ascii string'
# adapted from https://code.activestate.com/recipes/142812-hex-dumper/
def hexdump(string, length=8):
result = []
digits = 4 if isinstance(string, unicode) else 2
for i in xrange(0, len(string), length):
s = string[i:i + length]
hexa = "".join("{:0{}X}".format(ord(x), digits) for x in s)
text = "".join(x if 0x20 <= ord(x) < 0x7F else '.' for x in s)
result.append("{:04X} {:{}} {}".format(i, hexa, length * (digits + 1), text))
return '\n'.join(result)
with open("/path/to/my_file.ext", "r") as f:
print hexdump(f.read())
import struct
num = 0x103e4
struct.pack("I", 0x103e4)
# '\xe4\x03\x01\x00'
string = '\xe4\x03\x01\x00'
struct.unpack("i", string)
# (66532,)
bytes = '\x01\xc2'
struct.pack("<h", struct.unpack(">h", bytes)[0])
# '\xc2\x01'
import base64
base64.b64encode('encodings are fun...')
# 'ZW5jb2RpbmdzIGFyZSBmdW4uLi4='
base64.b64decode(_)
# 'encodings are fun...'
string = "hello\x00"
binary_string = ' '.join('{:08b}'.format(ord(char)) for char in string)
" ".join(binary_string[i:i+6] for i in range(0, len(binary_string), 6))
# '011010 000110 010101 101100 011011 000110 111100 000000'
bin_string = '011010 000110 010101 101100 011011 000110 111100 000000'
[int(b, 2) for b in bin_string.split()]
# [26, 6, 21, 44, 27, 6, 60, 0]
u'◑ \u2020'.encode('utf8')
# '\xe2\x97\x91 \xe2\x80\xa0'
'\xe2\x97\x91 \xe2\x80\xa0'.decode('utf8')
# u'\u25d1 \u2020'
unicode('\xe2\x97\x91 \xe2\x80\xa0', encoding='utf8')
# u'\u25d1 \u2020'
utf8_string = 'Åêíòü'
utf8_string
# '\xc3\x85\xc3\xaa\xc3\xad\xc3\xb2\xc3\xbc'
unicode_string = utf8_string.decode('utf8')
unicode_string
# u'\xc5\xea\xed\xf2\xfc'
unicode_string.encode('mac roman')
# '\x81\x90\x92\x98\x9f'
'Åêíòü'.decode('utf8').encode('ascii')
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# UnicodeEncodeError:
|
'ascii' codec can't encode characters in position 0-4: ordinal not in range(128)
file = """潍楪慢敫椠桴慧扲敬整瑸琠慨⁴獩琠敨爠獥汵⁴景琠硥⁴敢湩敤潣敤獵湩湡甠楮瑮湥敤档
牡捡整湥潣楤杮楷桴挠浯汰瑥汥⁹湵敲慬整湯獥景整牦浯愠搠晩敦敲瑮眠楲楴杮猠獹整‧⠊慔敫
牦浯攠
|
楷楫数楤牯⥧"""
print file.decode('utf8').encode('utf16')
# ??Mojibake is the garbled text that is the result of text being decoded using an
# unintended character encoding with completely unrelated ones, often from a
# different writing system.' (Taken from en.wikipedia.org)
import ftfy
ftfy.fix_text(u"“Mojibake“ can be fixed.")
# u'"Mojibake" can be fixed.'
bin(0b1010 & 0b1111110111)
# '0b10'
bin(0b1010 | 0b0110)
# '0b1110'
bin(0b10111 | 0b01000)
# '0b11111'
bin(0b100 ^ 0b110)
# '0b10'
bin(-0b1010 >> 0b10)
# '-0b11'
x = 0b1111
y = 0b1010
bin(int("{:b}{:b}".format(x, y), 2))
# '0b11111010'
bin(x << 4 | y)
# '0b11111010'
####################################################################
# 4. Cryptography
####################################################################
import random
import string
r = random.SystemRandom()
# Get a random integer between 0 and 20
r.randint(0, 20)
# 5
# Get a random number between 0 and 1
r.random()
# 0.8282475835972263
# Generate a random 40-bit number
r.getrandbits(40)
# 595477188771L
# Choose a random item from a string or list
chars = string.printable
r.choice(chars)
# 'e'
# Randomize the order of a sequence
seq = ['a', 'b', 'c', 'd', 'e']
r.shuffle(seq)
print seq
# ['c','d', 'a', 'e', 'b']
"ALLIGATOR".encode('rot13')
# 'NYYVTNGBE'
"NYYVTNGBE".encode('rot13')
# 'ALLIGATOR'
plaintext = "A secret-ish message!"
"".join(chr((ord(c) + 20) % 256) for c in plaintext)
# 'U4\x87yw\x86y\x88A}\x87|4\x81y\x87\x87u{y5'
ciphertext = 'U4\x87yw\x86y\x88A}\x87|4\x81y\x87\x87u{y5'
"".join(chr((ord(c) - 20) % 256) for c in ciphertext)
# 'A secret-ish message!'
plaintext = 0b110100001101001
one_time_pad = 0b110000011100001
bin(plaintext ^ one_time_pad)
# '0b100010001000'
decrypted = 0b100010001000 ^ one_time_pad
format(decrypted, 'x').decode('hex')
# 'hi'
import os
import binascii
# ASCII-encoded plaintext
plaintext = "this is a secret message"
plaintext_bits = int(binascii.hexlify(plaintext), 16)
print "plaintext (ascii):", plaintext
print "plaintext (hex):", plaintext_bits
# Generate the one-time pad
onetime_pad = int(binascii.hexlify(os.urandom(len(plaintext))), 16)
print "one-time pad: (hex):", onetime_pad
# Encrypt plaintext using XOR operation with one-time pad
ciphertext_bits = plaintext_bits ^ onetime_pad
print "encrypted text (hex):", ciphertext_bits
# Decrypt using XOR operation with one-time pad
decrypted_text = ciphertext_bits ^ onetime_pad
decrypted_text = binascii.unhexlify(hex(decrypted_text)[2:-1])
print "decrypted text (ascii):", decrypted_text
import random
import binascii
p1 = "this is the part where you run away"
p2 = "from bad cryptography practices."
# pad plaintexts with spaces to ensure equal length
p1 = p1.ljust(len(p2))
p2 = p2.ljust(len(p1))
p1 = int(binascii.hexlify(p1), 16)
p2 = int(binascii.hexlify(p2), 16)
# get random one-time pad
otp = random.SystemRandom().getrandbits(p1.bit_length())
# encrypt
c1 = p1 ^ otp
c2 = p2 ^ otp # otp reuse...not good!
print "c1 ^ c2 == p1 ^ p2 ?", c1 ^ c2 == p1 ^ p2
print "c1 ^ c2 =", hex(c1 ^ c2)
# the crib
crib = " the "
crib = int(binascii.hexlify(crib), 16)
xored = c1 ^ c2
print "crib =", hex(crib)
cbl = crib.bit_length()
xbl = xored.bit_length()
print
mask = (2**(cbl + 1) - 1)
fill = len(str(xbl / 8))
# crib dragging
for s in range(0, xbl - cbl + 8, 8):
xor = (xored ^ (crib << s)) & (mask << s)
out = binascii.unhexlify(hex(xor)[2:-1])
print "{:>{}} {}".format(s/8, fill, out)
from cryptography.fernet import Fernet
key = Fernet.generate_key()
f = Fernet(key)
ciphertext = f.encrypt("this is my plaintext")
decrypted = f.decrypt(ciphertext)
print decrypted
# this is my plaintext
import os
from cryptography.hazmat.primitives import padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
pt = "my plaintext"
backend = default_backend()
key = os.urandom(32)
iv = os.urandom(16)
padder = padding.PKCS7(128).padder()
pt = padder.update(pt) + padder.finalize()
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend)
encryptor = cipher.encryptor()
ct = encryptor.update(pt) + encryptor.finalize()
decryptor = cipher.decryptor()
out = decryptor.update(ct) + decryptor.finalize()
unpadder = padding.PKCS7(128).unpadder()
out = unpadder.update(out) + unpadder.finalize()
print out
import hashlib
hashlib.md5("hash me please").hexdigest()
# '760d92b6a6f974ae11904cd0a6fc2e90'
hashlib.sha1("hash me please").hexdigest()
# '1a58c9b3d138a45519518ee42e634600d1b52153'
import os
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
from cryptography.haz
|
schreiberx/sweet
|
mule_local/python/mule_local/postprocessing/shtnsfiledata.py
|
Python
|
mit
| 3,227
| 0.008057
|
#! /usr/bin/env python3
import math, sys
import shtns
import numpy as np
class shtnsfiledata:
#
# Adopted from https://bitbucket.org/nschaeff/shtns/src/master/examples/shallow_water.py
#
def __init__(
self,
rsphere = 1.0
):
self.rsphere = rsphere
def setup(self, file_info, anti_aliasing=False):
import shtns
import numpy as np
if file_info['modes_m_max'] != file_info['modes_m_max']:
raise Exception("Only num_lon == num_lat supported")
ntrunc = file_info['modes_n_max']
self._shtns = shtns.sht(ntrunc, ntrunc, 1, shtns.sht_orthonormal+shtns.SHT_NO_CS_PHASE)
nlons = (ntrunc + 1) * 2
nlats = (ntrunc + 1)
if anti_aliasing:
if nlons & 1:
raise Exception("Only even numbers of longitudinal coordinates allowed for anti-aliasing")
if nlats & 1:
raise Exception("Only even numbers of latitudinal coordinates allowed for anti-aliasing")
print("Anti-aliasing:")
print(" + old lon/lat: ", nlons, nlats)
nlons += nlons//2
nlats += nlats//2
print(" + new lon/lat: ", nlons, nlats)
if file_info['grid_type'] == 'GAUSSIAN':
#self._shtns.
|
set_grid(nlats,nlons,shtns.sht_gauss_fly|shtns.SHT_PHI_CONTIGUOUS, 1.e-10)
self._shtns.set_grid(nlats, nlons, shtns.sht_quick_init|shtns.SHT_PHI_CONTIGUOUS, 0)
elif file_info['grid_type'] == 'REGULAR':
#self._shtns.set_grid(nlats,nlons,shtns.sht_reg_dct|shtns.SHT_PHI_CONTIGUOUS, 1.e-10)
self._shtns.set_grid(nlats, nlons, shtns.sht_reg_dct|shtns.SHT_PHI_CONTIGUOUS, 0)
else:
|
raise Exception("Grid type '"+file_info['grid_type']+"' not supported!")
self.lats = np.arcsin(self._shtns.cos_theta)
self.lons = (2.*np.pi/nlons)*np.arange(nlons)
self.nlons = nlons
self.nlats = nlats
self.ntrunc = ntrunc
self.nlm = self._shtns.nlm
self.degree = self._shtns.l
self.lap = -self.degree*(self.degree+1.0).astype(np.complex)
self.invlap = np.zeros(self.lap.shape, self.lap.dtype)
self.invlap[1:] = 1./self.lap[1:]
self.lap = self.lap/self.rsphere**2
self.invlap = self.invlap*self.rsphere**2
def phys2spec(self, data):
return self._shtns.analys(data)
def spec2phys(self, dataspec):
return self._shtns.synth(dataspec)
def vrtdiv2uv(self, vrtspec, divspec):
return self._shtns.synth((self.invlap/self.rsphere)*vrtspec, (self.invlap/self.rsphere)*divspec)
def uv2vrtdiv(self,u,v):
vrtspec, divspec = self._shtns.analys(u, v)
return self.lap*self.rsphere*vrtspec, self.lap*self.rsphere*divspec
def getuv(self,divspec):
vrtspec = np.zeros(divspec.shape, dtype=np.complex)
u,v = self._shtns.synth(vrtspec,divspec)
return u, v
def rotateX90(self, i_field):
return self._shtns.Xrotate90(i_field)
def rotateY90(self, i_field):
return self._shtns.Yrotate90(i_field)
def rotateZ90(self, i_field, angle):
return self._shtns.Zrotate(i_field, angle)
|
math-a3k/django-ai
|
django_ai/base/views.py
|
Python
|
lgpl-3.0
| 2,787
| 0
|
# -*- coding: utf-8 -*-
import inspect
import random
import numpy as np
from django.contrib import messages
from django.views.generic import RedirectView
from django.contrib.contenttypes.models import ContentType
from django.http import Http404
from django.contrib.auth.mixins import UserPassesTestMixin
class RunActionView(UserPassesTestMixin, RedirectView):
"""
Runs common Actions for Systems and Techniques
"""
permanent = False
#: Available Actions
ACTIONS = {
"perform_inference": {
"type": "object",
"str": "PERFORMING INFERENCE",
"method": "perform_inference",
"kwargs": {
"recalculate": True
},
},
"reset_inference": {
"type": "object",
"str": "RESETING INFERENCE",
"method": "reset_inference",
"kwargs": {},
},
"reinitialize_rng": {
"type": "general",
"str": "REINITIALIZING RNG",
"method": "action_reinitialize_rng",
"kwargs": {},
}
}
def test_func(self):
return(self.request.user.is_superuser)
def action_reinitialize_rng(self):
"""
Reinitialize both generators
"""
random.seed()
np.random.seed()
def get_ct_object(self, content_type, object_id):
ct = ContentType.objects.get(model=content_type)
return(ct.model_class().objects.get(id=object_id))
def run_action(self, action, action_object=None):
try:
if action_object:
action_method = getattr(action_object, action['method'])
else:
action_method = getattr(self, action['method'])
action_method(**action['kwargs'])
messages.success(self.request,
"SUCCESS AT {}".format(action['str']))
except Exception as e:
msg = e.args[0]
frm = inspect.trace()[-1]
mod = inspect.getmodule(frm[0])
modname = mod.__name__ if mod else frm[1]
messages.error(self.request,
"ERROR WHILE {}: [{}] {}".format(
action['str'], modname, str(msg)))
def get_redirect_url(self, *args, **kwargs):
|
if kwargs['action'] not in self.ACTIONS:
raise Http404("Action not Found")
if self.ACTIONS[kwargs['action']]["type"] == 'object':
action_object = self.get_ct_object(kwargs['content_type'],
|
kwargs['object_id'])
else:
action_object = None
self.run_action(self.ACTIONS[kwargs['action']], action_object)
return(self.request.META.get('HTTP_REFERER', '/'))
|
ghutchis/cclib
|
src/cclib/writer/cjsonwriter.py
|
Python
|
lgpl-2.1
| 4,465
| 0.000896
|
# This file is part of cclib (http://cclib.github.io), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2014, the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
"""A writer for chemical JSON (CJSON) files."""
try:
import openbabel as ob
has_openbabel = True
except ImportError:
has_openbabel = False
import os.path
import json
from . import filewriter
class CJSON(filewriter.Writer):
"""A writer for chemical JSON (CJSON) files."""
def __init__(self, ccdata, *args, **kwargs):
"""Initialize the chemical JSON writer object.
Inputs:
ccdata - An instance of ccData, parsed from a logfile.
"""
# Call the __init__ method of the superclass
super(CJSON, self).__init__(ccdata, *args, **kwargs)
self.generate_repr()
def generate_repr(self):
"""Generate the CJSON representation of the logfile data."""
cjson_dict = dict()
cjson_dict['chemical json'] = 0
if self.jobfilename is not None:
cjson_dict['name'] = os.path.splitext(self.jobfilename)[0]
# These are properties that can be collected using Open Babel.
if has_openbabel:
cjson_dict['smiles'] = self.pbmol.write('smiles')
cjson_dict['inchi'] = self.pbmol.write('inchi')
cjson_dict['inchikey'] = self.pbmol.write('inchikey')
cjson_dict['formula'] = self.pbmol.formula
cjson_dict['atoms'] = dict()
cjson_dict['atoms']['elements'] = dict()
cjson_dict['atoms']['elements']['number'] = self.ccdata.atomnos.tolist()
cjson_dict['atoms']['coords'] = dict()
cjson_dict['atoms']['coords']['3d'] = self.ccdata.atomcoords[-1].flatten().tolist()
cjson_dict['bonds'] = dict()
cjson_dict['bonds']['connections'] = dict()
cjson_dict['bonds']['connections']['index'] = []
if has_openbabel:
for bond in self.bond_connectivities:
cjson_dict['bonds']['connections']['index'].append(bond[0] + 1)
cjson_dict['bonds']['connections']['index'].append(bond[1] + 1)
cjson_dict['bonds']['order'] = [bond[2] for bond in self.bond_connectivities]
cjson_dict['properties'] = dict()
if has_openbabel:
cjson_dict['properties']['molecular mass'] = self.pbmol.molwt
cjson_dict['atomCount'] = len(self.ccdata.atomnos)
cjson_dict['heavyAtomCount'] = len([x for x in self.ccdata.atomnos if x > 1])
if has_openbabel:
cjson_dict['diagram'] = self.pbmol.write(format='svg')
# These are properties that can be collected using cclib.
# Do there need to be any unit conversions here?
homo_idx_alpha = int(self.ccdata.homos[0])
homo_idx_beta = int(self.ccdata.homos[-1])
energy_alpha_homo = self.ccdata.moenergies[0][homo_idx_alpha]
energy_alpha_lumo
|
= self.ccdata.moenergies[0][homo_idx_alpha + 1]
energy_alpha_gap = energy_alpha_lumo - energy_alpha_homo
energy_beta_homo = self.ccdata.moenergies[-1][homo_idx_beta]
energy_beta_lumo = self.ccdata.moenergies[-1][homo_idx_beta + 1]
energy_beta_gap = energy_beta_lumo - energy_beta_homo
cjson_dict['energy'] = dict()
cjson_dict['energy']['total'] = self.ccdata.scfenergies
|
[-1]
cjson_dict['energy']['alpha'] = dict()
cjson_dict['energy']['alpha']['homo'] = energy_alpha_homo
cjson_dict['energy']['alpha']['lumo'] = energy_alpha_lumo
cjson_dict['energy']['alpha']['gap'] = energy_alpha_gap
cjson_dict['energy']['beta'] = dict()
cjson_dict['energy']['beta']['homo'] = energy_beta_homo
cjson_dict['energy']['beta']['lumo'] = energy_beta_lumo
cjson_dict['energy']['beta']['gap'] = energy_beta_gap
cjson_dict['totalDipoleMoment'] = self._calculate_total_dipole_moment()
# Can/should we append the entire original log file?
# cjson_dict['files'] = dict()
# cjson_dict['files']['log'] = []
# cjson_dict['files']['log'].append()
return json.dumps(cjson_dict)
if __name__ == "__main__":
pass
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/sklearn/tests/test_hmm.py
|
Python
|
agpl-3.0
| 26,383
| 0.000493
|
import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
from unittest import TestCase
from sklearn.datasets.samples_generator import make_spd_matrix
from sklearn import hmm
from sklearn import mixture
from sklearn.utils.extmath import logsumexp
from sklearn.utils import check_random_state
from nose import SkipTest
rng = np.random.RandomState(0)
np.seterr(all='warn')
class TestBaseHMM(TestCase):
def setUp(self):
self.prng = np.random.RandomState(9)
class StubHMM(hmm._BaseHMM):
def _compute_log_likelihood(self, X):
return self.framelogprob
def _generate_sample_from_state(self):
pass
def _init(self):
pass
def setup_example_hmm(self):
# Example from http://en.wikipedia.org/wiki/Forward-backward_algorithm
h = self.StubHMM(2)
h.transmat_ = [[0.7, 0.3], [0.3, 0.7]]
h.startprob_ = [0.5, 0.5]
framelogprob = np.log([[0.9, 0.2],
[0.9, 0.2],
[0.1, 0.8],
[0.9, 0.2],
[0.9, 0.2]])
# Add dummy observations to stub.
h.framelogprob = framelogprob
return h, framelogprob
def test_init(self):
h, framelogprob = self.setup_example_hmm()
for params in [('transmat_',), ('startprob_', 'transmat_')]:
d = dict((x[:-1], getattr(h, x)) for x in params)
h2 = self.StubHMM(h.n_components, **d)
self.assertEqual(h.n_components, h2.n_components)
for p in params:
assert_array_almost_equal(getattr(h, p), getattr(h2, p))
def test_do_forward_pass(self):
h, framelogprob = self.setup_example_hmm()
logprob, fwdlattice = h._do_forward_pass(framelogprob)
reflogprob = -3.3725
self.assertAlmostEqual(logprob, reflogprob, places=4)
reffwdlattice = np.array([[0.4500, 0.1000],
[0.3105, 0.0410],
[0.0230, 0.0975],
[0.0408, 0.0150],
[0.0298, 0.0046]])
assert_array_almost_equal(np.exp(fwdlattice), reffwdlattice, 4)
def test_do_backward_pass(self):
h, framelogprob = self.setup_example_hmm()
bwdlattice = h._do_backward_pass(framelogprob)
refbwdlattice = np.array([[0.0661, 0.0455],
[0.0906, 0.1503],
[0.4593, 0.2437],
[0.6900, 0.4100],
[1.0000, 1.0000]])
assert_array_almost_equal(np.exp(bwdlattice), refbwdlattice, 4)
def test_do_viterbi_pass(self):
h, framelogprob = self.setup_example_hmm()
logprob, state_sequence = h._do_viterbi_pass(framelogprob)
refstate_sequence = [0, 0, 1, 0, 0]
assert_array_equal(state_sequence, refstate_sequence)
reflogprob = -4.4590
self.assertAlmostEqual(logprob, reflogprob, places=4)
def test_eval(self):
h, framelogprob = self.setup_example_hmm()
nobs = len(framelogprob)
logprob, posteriors = h.eval([])
assert_array_almost_equal(posteriors.sum(axis=1), np.ones(nobs))
reflogprob = -3.3725
self.assertAlmostEqual(logprob, reflogprob, places=4)
refposteriors = np.array([[0.8673, 0.1327],
[0.8204, 0.1796],
[0.3075, 0.6925],
[0.8204, 0.1796],
[0.8673, 0.1327]])
assert_array_almost_equal(posteriors, refposteriors, decimal=4)
def test_hmm_eval_consistent_with_gmm(self):
n_components = 8
nobs = 10
h = self.StubHMM(n_components)
# Add dummy observations to stub.
framelogprob = np.log(self.prng.rand(nobs, n_components))
h.framelogprob = framelogprob
# If startprob and transmat are uniform across all states (the
# default), the transitions are uninformative - the model
# reduces to a GMM with uniform mixing weights (in terms of
# posteriors, not likelihoods).
logprob, hmmposteriors = h.eval([])
assert_array_almost_equal(hmmposteriors.sum(axis=1), np.ones(nobs))
norm = logsumexp(framelogprob, axis=1)[:, np.newaxis]
gmmposteriors = np.exp(framelogprob - np.tile(norm, (1, n_components)))
assert_array_almost_equal(hmmposteriors, gmmposteriors)
def test_hmm_decode_consistent_with_gmm(self):
n_components = 8
nobs = 10
h = self.StubHMM(n_components)
# Add dummy observations to stub.
framelogprob = np.log(self.prng.rand(nobs, n_components))
h.framelogprob = framelogprob
# If startprob and transmat are uniform across all states (the
# default), the transitions are uninformative - the model
# reduces to a GMM with uniform mixing weights (in terms of
# posteriors, not likelihoods).
viterbi_ll, state_sequence = h.decode([])
norm = logsumexp(framelogprob, axis=1)[:, np.newaxis]
gmmposteriors = np.exp(framelogprob - np.tile(norm, (1, n_components)))
gmmstate_sequence = gmmposteriors.argmax(axis=1)
assert_array_equal(state_sequence, gmmstate_sequence)
def test_base_hmm_attributes(self):
n_components = 20
startprob = self.prng.rand(n_components)
startprob = startprob / startprob.sum()
transmat = self.prng.rand(n_components, n_components)
transmat /= np.tile(transmat.sum(axis=1)
[:, np.newaxis], (1, n_components))
h = self.StubHMM(n_components)
self.assertEquals(h.n_components, n_components)
h.startprob_ = startprob
assert_array_almost_equal(h.startprob_, startprob)
self.assertRaises(ValueError, h.__setattr__, 'startprob_',
2 * startprob)
self.assertRaises(ValueError, h.__setattr__, 'startprob_', [])
self.assertRaises(ValueError, h.__setattr__, 'startprob_',
np.zeros((n_components - 2, 2)))
h.transmat_ = transmat
assert_array_almost_equal(h.transmat_, transmat)
self.assertRaises(ValueError, h.__setattr__, 'transmat_',
2 * transmat)
self.assertRaises(ValueError, h.__setattr__, 'transmat_', [])
self.assertRaises(ValueError, h.__setattr__, 'transmat_',
np.zeros((n_components - 2, n_components)))
def train_hmm_and_keep_track_of_log_likelihood(hmm, obs, n_iter=1, **kwargs):
hmm.n_iter = 1
hmm.fit(obs)
loglikelihoods = []
for n in xrange(n_iter):
hmm.n_iter = 1
hmm.init_params = ''
hmm.fit(obs)
loglikelihoods.append(sum(hmm.score(x) for x in obs))
return loglikelihoods
class GaussianHMMBaseTester(object):
def setUp(self):
self.prng = prng = np.random.RandomState(10)
self.n_components = n_components = 3
self.n_features = n_features = 3
self.startprob = prng.rand(n_components)
self.startprob = self.startprob / self.star
|
tprob.sum()
self.transmat = prng.rand(n_components, n_components)
self.transmat /= np.tile(self.transmat.sum(axis=1)[:, np.new
|
axis],
(1, n_components))
self.means = prng.randint(-20, 20, (n_components, n_features))
self.covars = {
'spherical': (1.0 + 2 * np.dot(prng.rand(n_components, 1),
np.ones((1, n_features)))) ** 2,
'tied': (make_spd_matrix(n_features, random_state=0)
+ np.eye(n_features)),
'diag': (1.0 + 2 * prng.rand(n_components, n_features)) ** 2,
'full': np.array([make_spd_matrix(n_features, random_state=0)
+ np.eye(n_features)
for x in range(n_components)]),
}
self.expanded_covars = {
'sp
|
jdemon519/cfme_tests
|
cfme/configure/access_control.py
|
Python
|
gpl-2.0
| 24,877
| 0.001729
|
from functools import partial
from navmazing import NavigateToSibling, NavigateToAttribute
from cfme import Credential
from cfme.exceptions import CandidateNotFound, OptionNotAvailable
import cfme.fixtures.pytest_selenium as sel
import cfme.web_ui.toolbar as tb
from cfme.web_ui import (
AngularSelect, Form, Select, CheckboxTree, accordion, fill, flash,
form_buttons, Input, Table, UpDownSelect, CFMECheckbox, BootstrapTreeview)
from cfme.web_ui.form_buttons import change_stored_password
from utils import version
from utils.appliance import Navigatable
from utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to
from utils.log import logger
from utils.pretty import Pretty
from utils.update import Updateable
tb_select = partial(tb.select, "Configuration")
pol_btn = partial(tb.select, "Policy")
edit_tags_form = Form(
fields=[
("select_tag", Select("select#tag_cat")),
("select_value", Select("select#tag_add"))
])
tag_table = Table("//div[@id='assignments_div']//table")
users_table = Table("//div[@id='records_div']//table")
group_order_selector = UpDownSelect(
"select#seq_fields",
"//img[@alt='Move selected fields up']",
"//img[@alt='Move selected fields down']")
def simple_user(userid, password):
creds = Credential(principal=userid, secret=password)
return User(name=userid, credential=creds)
class User(Updateable, Pretty, Navigatable):
user_form = Form(
fields=[
('name_txt', Input('name')),
('userid_txt', Input('userid')),
('password_txt', Input('password')),
('password_verify_txt', Input('verify')),
('email_txt', Input('email')),
('user_group_select', AngularSelect('chosen_group')),
])
pretty_attrs = ['name', 'group']
def __init__(self, name=None, credential=None, email=None, group=None, cost_center=None,
value_assign=None, appliance=None):
Navigatable.__init__(self, appliance=appliance)
self.name = name
self.credential = credential
self.email = email
self.group = group
self.cost_center = cost_center
self.value_assign = value_assign
self._restore_user = None
def __enter__(self):
if self._restore_user != self.appliance.user:
from cfme.login import logout
logger.info('Switching to new user: %s', self.credential.principal)
self._restore_user = self.appliance.user
logout()
self.appliance.user = self
def __exit__(self, *args, **kwargs):
if self._restore_user != self.appliance.user:
from cf
|
me.login import logout
logger.info('Restoring to old user: %s', self._restore_user.credential.principal)
logout()
self.appliance.user = self._restore_user
self._restore_user = None
def create(self):
navigate_to(
|
self, 'Add')
fill(self.user_form, {'name_txt': self.name,
'userid_txt': self.credential.principal,
'password_txt': self.credential.secret,
'password_verify_txt': self.credential.verify_secret,
'email_txt': self.email,
'user_group_select': getattr(self.group,
'description', None)},
action=form_buttons.add)
flash.assert_success_message('User "{}" was saved'.format(self.name))
def update(self, updates):
navigate_to(self, 'Edit')
change_stored_password()
new_updates = {}
if 'credential' in updates:
new_updates.update({
'userid_txt': updates.get('credential').principal,
'password_txt': updates.get('credential').secret,
'password_verify_txt': updates.get('credential').verify_secret
})
if self.appliance.version >= '5.7':
self.name = updates.get('credential').principal
new_updates.update({
'name_txt': updates.get('name'),
'email_txt': updates.get('email'),
'user_group_select': getattr(
updates.get('group'),
'description', None)
})
fill(self.user_form, new_updates, action=form_buttons.save)
flash.assert_success_message(
'User "{}" was saved'.format(updates.get('name', self.name)))
def copy(self):
navigate_to(self, 'Details')
tb.select('Configuration', 'Copy this User to a new User')
new_user = User(name=self.name + "copy",
credential=Credential(principal='redhat', secret='redhat'))
change_stored_password()
fill(self.user_form, {'name_txt': new_user.name,
'userid_txt': new_user.credential.principal,
'password_txt': new_user.credential.secret,
'password_verify_txt': new_user.credential.verify_secret},
action=form_buttons.add)
flash.assert_success_message('User "{}" was saved'.format(new_user.name))
return new_user
def delete(self):
navigate_to(self, 'Details')
tb.select('Configuration', 'Delete this User', invokes_alert=True)
sel.handle_alert()
flash.assert_success_message('EVM User "{}": Delete successful'.format(self.name))
def edit_tags(self, tag, value):
navigate_to(self, 'Details')
pol_btn("Edit 'My Company' Tags for this User", invokes_alert=True)
fill(edit_tags_form, {'select_tag': tag,
'select_value': value},
action=form_buttons.save)
flash.assert_success_message('Tag edits were successfully saved')
def remove_tag(self, tag, value):
navigate_to(self, 'Details')
pol_btn("Edit 'My Company' Tags for this User", invokes_alert=True)
row = tag_table.find_row_by_cells({'category': tag, 'assigned_value': value},
partial_check=True)
sel.click(row[0])
form_buttons.save()
flash.assert_success_message('Tag edits were successfully saved')
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@property
def description(self):
return self.credential.principal
@navigator.register(User, 'All')
class UserAll(CFMENavigateStep):
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
accordion.tree(
"Access Control",
self.obj.appliance.server.zone.region.settings_string, "Users")
def resetter(self):
accordion.refresh("Access Control")
@navigator.register(User, 'Add')
class UserAdd(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
def step(self):
tb_select("Add a new User")
@navigator.register(User, 'Details')
class UserDetails(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
def step(self):
accordion.tree(
"Access Control",
self.obj.appliance.server.zone.region.settings_string,
"Users",
self.obj.name
)
def resetter(self):
accordion.refresh("Access Control")
@navigator.register(User, 'Edit')
class UserEdit(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def step(self):
tb_select('Edit this User')
class Group(Updateable, Pretty, Navigatable):
group_form = Form(
fields=[
('ldap_groups_for_user', AngularSelect("ldap_groups_user")),
('description_txt', Input('description')),
('lookup_ldap_groups_chk', Input('lookup')),
('role_select', AngularSelect("group_role")),
('group_tenant', AngularSelect("group_tenant"), {"appeared_in": "5.5"}),
('user_to_look_up', Input('user')),
('username', Input('user_id')),
|
mitodl/micromasters
|
discussions/signals.py
|
Python
|
bsd-3-clause
| 2,043
| 0.004405
|
"""
Signals for user profiles
"""
from django.conf import settings
from django.db import transaction
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from discussions import tasks
from profiles.models import Profile
from roles.models import Role
from roles.roles import Permissions
@receiver(post_save, sender=Profile, dispatch_uid="sync_user_profile")
def sync_user_profile(sender, instance, created, **kwargs): # pylint: disable=unused-argument
"""
Signal handler create/update a DiscussionUser every time a profile is created/updated
"""
if not settings.FEATURES.get('OPEN_DISCUSSIONS_USER_SYNC', False):
return
transaction.on_commit(lambda: tasks.sync_discussion_user.delay(instance.user_id))
@receiver(post_save, sender=Role, dispatch_uid="add_staff_as_moderator")
def add_staff_as_moderator(sender, instance, created, **kwargs): # pylint: disable=unused-argument
"""
Signal handler add user as moderator when his staff role on program is added
"""
if not settings.FEATURES.get('OPEN_DISCUSSIONS_USER_SYNC', False):
return
if instance.role not in Role.permission_to_roles[Permissions.CAN_CREATE_FORUMS]:
r
|
eturn
transaction.on_commit(
lambda: tasks.add_user_as_moderator_to_channel.delay(
instance.user_id,
instance.program_id,
)
)
@receiver(post_delete, sender=Role, dispatch_uid="delete_staff_as_moderator")
def delete_staff_as_moderator(sender, instance, **kwargs): # pylint: disable=unused-argument
"""
Signal handler removes user as moderator when his staff role on program is deleted
"""
if not settings.FEATURES.get('OPEN_DISCUSSIONS_USE
|
R_SYNC', False):
return
if instance.role not in Role.permission_to_roles[Permissions.CAN_CREATE_FORUMS]:
return
transaction.on_commit(
lambda: tasks.remove_user_as_moderator_from_channel.delay(
instance.user_id,
instance.program_id,
)
)
|
googleads/google-ads-python
|
google/ads/googleads/v8/services/services/ad_service/client.py
|
Python
|
apache-2.0
| 21,991
| 0.000909
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.resources.types import ad
from google.ads.googleads.v8.services.types import ad_service
from google.rpc import status_pb2 # type: ignore
from .transports.base import AdServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import AdServiceGrpcTransport
class AdServiceClientMeta(type):
"""Metaclass for the AdService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[AdServiceTransport]]
_transport_registry["grpc"] = AdServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[AdServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class AdServiceClient(metaclass=AdServiceClientMeta):
"""Service to manage ads."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AdServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AdServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> AdServiceTransport:
"""Return the transport used by the client instance.
Returns:
AdServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def ad_path(customer_id: str, ad_id: str,) -> str:
"""Return a fully-qualified ad string."""
return "customers/{customer_id}/ads/{ad_id}".format(
customer_id=customer_id, ad_id=ad_id,
)
@staticmethod
def parse_ad_path(path: str) -> Dict[str, str]:
"""Parse a ad path into its comp
|
onent segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/ads/(?P<ad_id>.+?)$", path
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
|
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
|
adini121/oneanddone
|
oneanddone/users/tests/test_mixins.py
|
Python
|
mpl-2.0
| 3,843
| 0.001041
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.core.exceptions import PermissionDenied
from mock import Mock, patch
from nose.tools import eq_, raises
from oneanddone.base.tests import TestCase
from oneanddone.users.mixins import BaseUserProfileRequiredMixin, PrivacyPolicyRequiredMixin, MyStaffUserRequiredMixin
from oneanddone.users.tests import UserFactory, UserProfileFactory
class FakeMixin(object):
def dispatch(self, request, *args, **kwargs):
return 'fakemixin'
class FakeView(BaseUserProfileRequiredMixin, FakeMixin):
pass
class FakeViewNeedsPrivacyPolicy(PrivacyPolicyRequiredMixin, FakeMixin):
pass
class FakeViewNeedsStaff(MyStaffUserRequiredMixin, FakeMixin):
pass
class MyStaffUserRequiredMixinTests(TestCase):
def setUp(self):
self.view = FakeViewNeedsStaff()
def test_is_staff(self):
"""
If the user is staff, call the parent class's
dispatch method.
"""
request = Mock()
request.user = UserFactory.create(is_staff=True)
|
eq_(self.view.dispatch(request), 'fakemixin')
@raises(PermissionDenied)
def test_not_staff(self):
"""
If the user is not staff, raise a PermissionDenied exception.
"""
request = Mock()
request.user = UserFactory.create(is_staff=False)
self.view.dispatch(request)
class PrivacyPolicyRequiredMixinTests(TestCase):
def setUp(self):
self.view = FakeViewNeedsPrivacyPolicy()
def test_has_profile_and_accepts
|
_privacy_policy(self):
"""
If the user has created a profile, and has accepted privacy policy
call the parent class's dispatch method.
"""
request = Mock()
request.user = UserProfileFactory.create(privacy_policy_accepted=True).user
eq_(self.view.dispatch(request), 'fakemixin')
def test_has_profile_and_not_accepted_privacy_policy(self):
"""
If the user has created a profile, and has not accepted privacy policy
redirect them to profile update view.
"""
request = Mock()
request.user = UserProfileFactory.create(privacy_policy_accepted=False).user
with patch('oneanddone.users.mixins.redirect') as redirect:
eq_(self.view.dispatch(request), redirect.return_value)
redirect.assert_called_with('users.profile.update')
def test_no_profile(self):
"""
If the user hasn't created a profile, redirect them to the
profile creation view.
"""
request = Mock()
request.user = UserFactory.create()
with patch('oneanddone.users.mixins.redirect') as redirect:
eq_(self.view.dispatch(request), redirect.return_value)
redirect.assert_called_with('users.profile.create')
class UserProfileRequiredMixinTests(TestCase):
def setUp(self):
self.view = FakeView()
def test_has_profile(self):
"""
If the user has created a profile, and has accepted privacy policy
call the parent class's dispatch method.
"""
request = Mock()
request.user = UserProfileFactory.create(privacy_policy_accepted=True).user
eq_(self.view.dispatch(request), 'fakemixin')
def test_no_profile(self):
"""
If the user hasn't created a profile, redirect them to the
profile creation view.
"""
request = Mock()
request.user = UserFactory.create()
with patch('oneanddone.users.mixins.redirect') as redirect:
eq_(self.view.dispatch(request), redirect.return_value)
redirect.assert_called_with('users.profile.create')
|
gangadhar-kadam/hrerp
|
erpnext/patches/4_0/fix_employee_user_id.py
|
Python
|
agpl-3.0
| 491
| 0.028513
|
import frappe
from frappe.utils import get_fullname
def execute():
for user_id in frappe.db.sql_list("""select distinct user_id from `tabEmployee`
where ifnul
|
l(user_id, '')!=''
group by user_i
|
d having count(name) > 1"""):
fullname = get_fullname(user_id)
employee = frappe.db.get_value("Employee", {"employee_name": fullname, "user_id": user_id})
if employee:
frappe.db.sql("""update `tabEmployee` set user_id=null
where user_id=%s and name!=%s""", (user_id, employee))
|
liqd/a4-meinberlin
|
meinberlin/apps/votes/apps.py
|
Python
|
agpl-3.0
| 132
| 0
|
from django.apps import AppConfig
|
cl
|
ass VotesConfig(AppConfig):
name = 'meinberlin.apps.votes'
label = 'meinberlin_votes'
|
StanczakDominik/PythonPIC
|
pythonpic/tests/test_FieldSolver.py
|
Python
|
bsd-3-clause
| 9,551
| 0.003036
|
# coding=utf-8
import matplotlib.pyplot as plt
import numpy as np
import pytest
from ..classes import Simulation, PeriodicTestGrid, NonperiodicTestGrid
from ..visualization.time_snapshots import FieldPlot, CurrentPlot
@pytest.fixture(params=(64, 128, 256, 512))
def _NG(request):
return request.param
@pytest.fixture(params=(1, 2 * np.pi, 10 * np.pi, 1000))
def _L(request):
return request.param
@pytest.fixture(params=(1, 2 * np.pi, 10 * np.pi, 1000))
def _test_charge_density(request):
return request.param
@pytest.fixture(params=(1, 2 * np.pi, 7.51))
def __t(request):
return request.param
def test_PoissonSolver(_NG, _L):
g = PeriodicTestGrid(1, _L, _NG)
charge_density = (2 * np.pi / _L) ** 2 * np.sin(2 * g.x * np.pi / _L)
field = np.zeros((_NG + 2, 3))
field[1:-1, 0] = -2 * np.pi / _L * np.cos(2 * np.pi * g.x / _L)
g.charge_density[:-1] = charge_density
g.init_solve()
def plots():
fig, axes = plt.subplots(2)
ax0, ax1 = axes
ax0.plot(g.x, charge_density)
ax0.set_title("Charge density")
ax1.set_title("Field")
ax1.plot(g.x, g.electric_field[1:-1], "r-", label="Fourier")
ax1.plot(g.x, field, "g-", label="Analytic")
for ax in axes:
ax.grid()
ax.legend()
plt.show()
return "test_PoissonSolver failed! calc/theory field ratio at 0: {}".format(g.electric_field[1] / field[0])
assert np.allclose(g.electric_field, field), plots()
# def test_PoissonSolver_complex(debug=DEBUG):
# L = 1
# N = 32 * 2**5
# epsilon_0 = 1
# x, dx = np.linspace(0, L, N, retstep=True, endpoint=False)
# anal_potential = lambda x: np.sin(x * 2 * np.pi) + 0.5 * \
# np.sin(x * 6 * np.pi) + 0.1 * np.sin(x * 20 * np.pi)
# anal_field = lambda x: -(2 * np.pi * np.cos(x * 2 * np.pi) + 3 * np.pi *
# np.cos(x * 6 * np.pi) + 20 * np.pi * 0.1 * np.cos(x * 20 * np.pi))
# charge_density_anal = lambda x: ((2 * np.pi)**2 * np.sin(x * 2 * np.pi) + 18 * np.pi**2 * np.sin(
# x * 6 * np.pi) + (20 * np.pi)**2 * 0.1 * np.sin(x * 20 * np.pi)) * epsilon_0
#
# NG = 32
# g = Frame(L, NG, epsilon_0)
# # indices_in_denser_grid = np.searchsorted(x, g.x)
# g.charge_density = charge_density_anal(g.x)
# energy_fourier = g.init_solver_fourier()
# energy_direct = 0.5 * (g.electric_field**2).sum() * g.dx
# print("dx", dx, "fourier", energy_fourier, "direct", energy_direct, energy_fourier / energy_direct)
#
# def plots():
# fig, xspace = plt.subplots()
# xspace.set_title(
# r"Solving the Poisson equation $\Delta \psi = \rho / \epsilon_0$ via Fourier transform")
# xspace.plot(g.x, g.charge_density, "ro--", label=r"$\rho$")
# xspace.plot(x, charge_density_anal(x), "r-", lw=6, alpha=0.5, label=r"$\rho_a$")
# xspace.plot(g.x, g.potential, "go--", label=r"$V$")
# xspace.plot(x, anal_potential(x), "g-", lw=6, alpha=0.5, label=r"$V_a$")
# xspace.plot(g.x, g.electric_field, "bo--", alpha=0.5, label=r"$E$")
# EplotAnal, = xspace.plot(x, anal_field(x), "b-", lw=6, alpha=0.5, label=r"$E_a$")
# xspace.set_xlim(0, L)
# xspace.set_xlabel("$x$")
# xspace.grid()
# xspace.legend(loc='best')
#
# fig2, fspace = plt.subplots()
# fspace.plot(g.k_plot, g.energy_per_mode, "bo--", label=r"electric energy $\rho_F V_F^\dagger$")
# fspace.set_xlabel("k")
# fspace.set_ylabel("mode energy")
# fspace.set_title("Fourier space")
# fspace.grid()
# fspace.legend(loc='best')
# plt.show()
# return "test_PoissonSolver_complex failed!"
#
# energy_correct = np.isclose(energy_fourier, energy_direct)
# field_correct = np.isclose(g.electric_field, anal_field(g.x)).all()
# potential_correct = np.isclose(g.potential, anal_potential(g.x)).all()
# assert field_correct and potential_correct and energy_correct, plots()
def test_PoissonSolver_energy_sine(_NG, ):
_L = 1
resolution_increase = _NG
N = _NG * resolution_increase
epsilon_0 = 1
x, dx = np.linspace(0, _L, N, retstep=True, endpoint=False)
anal_field = np.zeros((N, 3))
anal_field[:, 0] = -(2 * np.pi * np.cos(x * 2 * np.pi / _L))
charge_density_anal = ((2 * np.pi) ** 2 * np.sin(x * 2 * np.pi))
g = PeriodicTestGrid(1, _L, _NG, epsilon_0)
indices_in_denser_grid = np.searchsorted(x, g.x)
g.charge_density[:-1] = charge_density_anal[indices_in_denser_grid] # / resolution_increase
g.init_solve()
g.save_field_values(0)
g.postprocess()
energy_fourier = g.grid_energy_history[0]
energy_direct = g.direct_energy_calculation()
print("dx", dx, "fourier", energy_fourier, "direct", energy_direct, energy_fourier / energy_direct)
def plots():
fig, xspace = plt.subplots()
xspace.set_title(
r"Solving the Poisson equation $\Delta \psi = \rho / \epsilon_0$ via Fourier transform")
xspace.plot(g.x, g.charge_density, "ro--", label=r"$\rho$")
xspace.plot(x, charge_density_anal, "r-", lw=6, alpha=0.5, label=r"$\rho_a$")
xspace.plot(g.x, g.electric_field, "bo--", alpha=0.5, labe
|
l=r"$E$")
xspace.plot(x, anal_field, "b-", lw=6, alpha=0.5, label=r"$E_a$")
xspace.set_xlim(0, _L)
xspace.set_xlabel("$x$")
xspace.grid()
xspace.legend(loc='best')
fig2, fspace = plt.subplots()
fspace.plot(g.k_plot, g.energy_per_mode, "bo--", label=r"electric energy $\rho_F V_F^\dagger$")
fspace.set_xlabel("k")
fspace.set_ylabel("mode energy")
fsp
|
ace.set_title("Fourier space")
fspace.grid()
fspace.legend(loc='best')
plt.show()
return "test_PoissonSolver_complex failed!"
energy_correct = np.allclose(energy_fourier, energy_direct)
assert energy_correct, plots()
field_correct = np.allclose(g.electric_field[1:-1, 0], anal_field[indices_in_denser_grid][:, 0])
assert field_correct, plots()
def test_PoissonSolver_sheets(_NG, _L, _test_charge_density=1):
epsilon_0 = 1
x, dx = np.linspace(0, _L, _NG, retstep=True, endpoint=False)
charge_density = np.zeros_like(x)
region1 = (_L * 1 / 8 < x) * (x < _L * 2 / 8)
region2 = (_L * 5 / 8 < x) * (x < _L * 6 / 8)
charge_density[region1] = _test_charge_density
charge_density[region2] = -_test_charge_density
g = PeriodicTestGrid(1, _L, _NG, epsilon_0)
g.charge_density[:-1] = charge_density
g.init_solve()
def plots():
fig, axes = plt.subplots(3)
ax0, ax1 = axes
ax0.plot(x, charge_density)
ax0.set_title("Charge density")
ax1.set_title("Field")
ax1.plot(x, g.electric_field, "r-")
for ax in axes:
ax.grid()
ax.legend()
plt.show()
return "test_PoissonSolver_sheets failed!"
polynomial_coefficients = np.polyfit(x[region1], g.electric_field[1:-1, 0][region1], 1)
first_bump_right = np.isclose(
polynomial_coefficients[0], _test_charge_density, rtol=1e-2)
assert first_bump_right, plots()
polynomial_coefficients = np.polyfit(x[region2], g.electric_field[1:-1, 0][region2], 1)
second_bump_right = np.isclose(
polynomial_coefficients[0], -_test_charge_density, rtol=1e-2)
assert second_bump_right, plots()
def test_PoissonSolver_ramp(_NG, _L):
""" For a charge density rho = Ax + B
d2phi/dx2 = -rho/epsilon_0
set epsilon_0 to 1
d2phi/dx2 = Ax
phi must be of form
phi = -Ax^3/6 + Bx^2 + Cx + D"""
a = 1
# noinspection PyArgumentEqualDefault
g = PeriodicTestGrid(1, _L, _NG, epsilon_0=1)
g.charge_density[:-1] = a * g.x
g.init_solve()
field = a * (g.x - _L / 2) ** 2 / 2
def plots():
fig, axes = plt.subplots(2)
ax0, ax1 = axes
ax0.plot(g.x, g.charge_density)
ax0.set_title("Charge density")
ax1.set_title("Field")
ax1.plot(g.x, g.electric_field, "r-")
ax1.plot(g.x, field, "g-")
for ax in axes:
ax.grid()
ax.legend()
|
jmwenda/hypermap
|
hypermap/aggregator/tests/test_warper.py
|
Python
|
mit
| 1,877
| 0.001598
|
# -*- coding: utf-8 -*-
"""
Tests for the WMS Service Type.
"""
import unittest
from httmock import with_httmock
import mocks.warper
from aggregator.models import Service
class TestWarper(unit
|
test.TestCase):
@with_httmock(mocks.warper.resource_get)
def test_create_wms_service(self):
# create the service
service = Service(
type='WARPER',
url='http://warper.example.com/warper/m
|
aps',
)
service.save()
# check layer number
self.assertEqual(service.layer_set.all().count(), 15)
# check layer 0 (public)
layer_0 = service.layer_set.all()[0]
self.assertEqual(layer_0.name, '29568')
self.assertEqual(layer_0.title, 'Plate 24: Map bounded by Myrtle Avenue')
self.assertTrue(layer_0.is_public)
self.assertEqual(layer_0.keywords.all().count(), 0)
self.assertEqual(layer_0.srs.all().count(), 3)
self.assertEqual(layer_0.check_set.all().count(), 1)
self.assertEqual(layer_0.layerdate_set.all()[0].date, '1855-01-01')
# a layer with no bbox must be stored with None coordinates
layer_no_bbox = service.layer_set.get(name='16239')
self.assertEqual(layer_no_bbox.bbox_x0, None)
self.assertEqual(layer_no_bbox.bbox_y0, None)
self.assertEqual(layer_no_bbox.bbox_x1, None)
self.assertEqual(layer_no_bbox.bbox_y1, None)
# test that if creating the service and is already exiting it is not being duplicated
# create the service
def create_duplicated_service():
duplicated_service = Service(
type='WARPER',
url='http://warper.example.com/warper/maps',
)
duplicated_service.save()
self.assertRaises(Exception, create_duplicated_service)
if __name__ == '__main__':
unittest.main()
|
wufangjie/leetcode
|
735. Asteroid Collision.py
|
Python
|
gpl-3.0
| 882
| 0
|
class Solution(object):
def asteroidCollision(self, asteroids):
"""
:type asteroids: List[int]
:rtype: List[int]
"""
ret = []
for elem in asteroids:
if elem > 0:
ret.append(elem)
else:
while ret:
if 0 < ret[-1] <= -elem:
temp = ret.pop()
if temp == -elem:
break
else:
if ret[-1] < 0:
ret.append(elem)
break
else:
ret.append(elem)
return ret
print(S
|
olution().asteroidCollision([5, 10, -5]))
print(Solution().asteroidCollision([8, -8]))
p
|
rint(Solution().asteroidCollision([10, 2, -5]))
print(Solution().asteroidCollision([-2, -1, 1, 2]))
|
google-research/tensorflow-coder
|
tf_coder_colab_logging/serialization.py
|
Python
|
apache-2.0
| 6,543
| 0.004585
|
# Copyright 2021 The TF-Coder Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Serialization of objects relevant to TF-Coder.
This module will be used to send information from the public Colab notebook to
Google Analytics, in string form. Using BigQuery we can extract the strings that
were sent, and then parse the strings back into the Python objects that they
represent. The information we want to log includes:
* Input/output objects. Usually these are multidimensional lists, Tensors, or
SparseTensors, but in principle these can be anything that value search
supports (e.g., primitives, dtypes, tuples of Tensors etc.).
* Constants. Usually these are Python primitives, but again they may be
anything value search supports (e.g., a shape tuple).
* Natural language description. This should be a string and may contain tricky
characters like Unicode or quotes.
* Settings for the TF-Coder tool. These may use standard Python collections,
i.e. lists/tuples/sets/dicts. This category of information should be treated
generally to be future-proof.
* Results of the TF-Coder tool. These would include timestamps and solution
expressions.
* Other metadata, e.g., session/problem IDs, and whether the data can be
released in a dataset.
"""
import ast
from typing import Any, List, Text
import numpy as np
import tensorflow as tf
# Constant strings for dict representations of objects.
_KIND_KEY = 'kind'
_DTYPE_KIND = 'DType'
_TENSOR_KIND = 'Tensor'
_SPARSE_TENSOR_KIND = 'SparseTensor'
_DICT_KIND = 'Dict'
def _object_to_literal(to_serialize: Any, container_stack: List[Any]) -> Any:
"""Turns a supported object into a Python literal."""
if isinstance(to_serialize, (int, float, bool, str, bytes, type(None))):
return to_serialize
elif isinstance(to_serialize, tf.DType):
dtype_string = repr(to_serialize)
assert dtype_string.startswith('tf.')
dtype_string = dtype_string[len('tf.'):]
return {_KIND_KEY: _DTYPE_KIND,
'dtype': dtype_string}
elif isinstance(to_serialize, tf.Tensor):
tensor_content = to_serialize.numpy()
# Sometimes tensor_content is a numpy type, and sometimes it's a normal
# Python type.
if type(tensor_content).__module__ == np.__name__:
tensor_content = tensor_content.tolist()
return {_KIND_KEY: _TENSOR_KIND,
'content': tensor_content,
'dtype': _object_to_literal(to_serialize.dtype, container_stack)}
elif isinstance(to_serialize, tf.SparseTensor):
return {_KIND_KEY: _SPARSE_TENSOR_KIND,
'indices': _object_to_literal(to_serialize.indices,
container_stack),
'values': _object_to_literal(to_serialize.values, container_stack),
'dense_shape': _object_to_literal(to_serialize.dense_shape,
container_stack)}
elif isinstance(to_serialize, dict):
if any(to_serialize is seen for seen in container_stack):
raise ValueError('Cycle detected in object dependencies.')
container_stack.append(to_serialize)
result = {_object_to_literal(key, container_stack):
_object_to_literal(value, container_stack)
for key, value in to_serialize.items()}
container_stack.pop()
return {_KIND_KEY: _DICT_KIND,
'dict': result}
elif isinstance(to_serialize, (list, tuple, set)):
if any(to_serialize is seen for seen in container_stack):
raise ValueError('Cycle detected in object dependencies.')
container_stack.append(to_serialize)
generator = (_object_to_literal(x, container_stack) for x in to_serialize)
container_type = type(to_serialize)
result = container_type(generator)
container_stack.pop()
return result
else:
raise TypeError('Cannot convert object {} with type {} to a literal.'
.format(to_serialize, type(to_serialize)))
def _literal_to_object(literal: Any) -> Any:
"""Turns a literal created by _object_to_literal back into the object."""
if isinstance(literal, (int, float, bool, str, bytes, type(None))):
return literal
elif isinstance(literal, dict):
# If the dict was not created by _object_to_literal, we may throw KeyError.
kind = literal[_KIND_KEY]
if kind == _DTYPE_KIND:
return getattr(tf, literal['dtype'])
elif kind == _TENSOR_KIND:
return tf.constant(literal['content'],
dtype=_literal_to_object(literal['dtype']))
elif kind == _SPARSE_TENSOR_KIND:
return tf.SparseTensor(
indices=_literal_to_object(literal['indices']),
values=_literal_to_object(literal['values']),
dense_shape=_literal_to_object(literal['dense_shape']))
elif kind == _DICT_KIND:
return {_literal_to_object(key): _literal_to_object(value)
for key, value in literal['dict'].items()}
else:
raise ValueError('Unsupported kind in dict: {}'.format(kind))
elif isinstance(literal, (list, tuple, set)):
generator = (_literal_to_object(x) for x in literal)
container_type = type(literal)
return container_type(generator)
else:
raise TypeError('Cannot convert literal {} with type {} to an object.'
.format(literal, type(literal)))
def serialize(to_serialize: Any) -> Text:
"""Serializes an object into a string.
Note: This does n
|
ot work in Python 2 because its ast.literal_eval do
|
es not
support sets.
Args:
to_serialize: The object to serialize. This may be a Python literal (int,
float, boolean, string, or None), Tensor, SparseTensor, or
possibly-nested lists/tuples/sets/dicts of these.
Returns:
A string representation of the object.
"""
return repr(_object_to_literal(to_serialize, container_stack=[]))
def parse(serialized: Text) -> Any:
"""Unparses a string into an object (the inverse of serialize_object)."""
literal = ast.literal_eval(serialized)
return _literal_to_object(literal)
|
InfoSec-CSUSB/club-websystem
|
src/events/models.py
|
Python
|
mit
| 10,430
| 0.020997
|
from datetime import date, timedelta
from django.db import models
from django.contrib.sites.models import Site
from django.core.validators import MinValueValidator
from clubdata.models import Club
def range_date_inclusive(start_date, end_date):
for n in range((end_date - start_date).days+1):
yield start_date + timedelta(n)
def num_days_in_month(d):
dmonth = d.month
if dmonth == 12:
return 31
else:
return (d.replace(month=dmonth+1, day=1) - timedelta(days=1)).day
def last_day_in_month(d):
dmonth = d.month
if dmonth == 12:
return d.replace(day=31)
else:
return d.replace(month=dmonth+1, day=1) - timedelta(days=1)
def decode_weekly_criteria(criteria):
c = criteria.split(",")
dow_possible = ('mo','tu','we','th','fr','sa','su')
dow = [False,False,False,False,False,False,False]
for x in c: dow[dow_possible.index(x)] = True
return dow
def decode_monthly_criteria(criteria):
c = criteria.split(",")
specificdays = []
daystocalculate = []
dow_possible = ('mo','tu','we','th','fr','sa','su')
for x in c:
if x.isdigit():
# Specific numbered day (same every month)
specificdays.append(int(x))
else:
# A code to represent a day. We'll convert from strings to integers for later.
if x == 'last':
# Last day of the month (must be calculated later)
daystocalculate.append( (99, -1) )
else:
y,z = x.split("-")
if y == 'last':
# Last DOW of the month (must be calculated later)
daystocalculate.append( (99, dow_possible.index(z)) )
else:
# Specified DOW of the month (must be calculated later)
daystocalculate.append( (int(y), dow_possible.index(z)) )
return specificdays,daystocalculate
class RecurringEvent(models.Model):
DAILY = 100
WEEKLY = 200
MONTHLY = 300
rule_type_choices = (
(DAILY, 'Daily'),
(WEEKLY, 'Weekly'),
(MONTHLY, 'Monthly'),
)
id = models.AutoField(
primary_key=True)
# Range
starts_on = models.DateField('Starts on')
ends_on = models.DateField('Ends on')
# Rule
rule_type = models.IntegerField('Recurring rule',
choices=rule_type_choices,
default=WEEKLY)
repeat_each = models.IntegerField('Repeat each',
default=1,
validators=[MinValueValidator(1)])
criteria = models.CharField('Criteria',
max_length=200,
null=True, # Blank is stored as Null
blank=True) # Field is optional
class Meta:
verbose_name = 'Recurring Event'
verbose_name_plural = 'Recurring Events'
def __unicode__(self): #Python 3.3 is __str__
|
rt = self.rule_type
for t in self.rule_type_choices:
if t[0] == rt:
rt = t[1]
break
return "%s Event, %s to %s, Criteria=\"%s\"" % (rt, self.starts_on, self.ends_on, self.criteria)
def dates_per_rule_iter(self):
if self.rule_type == self.WEEKLY:
# criteria = Must be a comma-separated list of lowercase 2-letter abbreviations for the days
|
# of the week. Ex: mo,we,fr,su
# repeat_each = If this is 2, then every other week will be skipped. If it is 3,
# then two weeks will be skipped between each filled week. etc...
# Deconstruct the criteria
criteria = decode_weekly_criteria(self.criteria)
# Generate a list of dates that match
if self.repeat_each == 1:
# If repeat_each is 1, then our calculation is much simpler
for x in range_date_inclusive(self.starts_on, self.ends_on):
if criteria[x.weekday()]: yield x
else:
# Special handling because we're not doing every week
r = 2 # Set this to 2 so the first iteration will set it to 1
dow_begin = self.starts_on.weekday()
for x in range_date_inclusive(self.starts_on, self.ends_on):
wd = x.weekday()
if wd == dow_begin:
# It's the beginning of a new week (rather than assuming the user considers Monday to be
# the first day of the week, we use the DOW of the start of the range for this purpose.
if r == 1:
# Reset the counter
r = self.repeat_each
else:
# Decrease the counter
r -= 1
if r == 1:
# If counter is 1, then this week should be included
if criteria[wd]: yield x
elif self.rule_type == self.MONTHLY:
# criteria = Must be a comma-separated list of the following types of codes:
# * 1,2,3,4, etc specific days of the month
# * 1-mo, 3-fr, last-we, etc first Monday, third Friday, last Wednesday, etc.
# * last last day of the month
# repeat_each = If this is 2, then every other month will be skipped. If it is 3, then two
# months will be skipped between each filled month. etc...
# Deconstruct the criteria
specificdays,daystocalculate = decode_monthly_criteria(self.criteria)
# Generate a list of dates that match
calcdays = None
oneday = timedelta(days=1)
r = 2 # Set this to 2 so the first iteration will set it to 1
for x in range_date_inclusive(self.starts_on, self.ends_on):
xday = x.day
if (xday == 1) or (calcdays is None):
# It's the first day of the month (or first iteration of this loop)
if r == 1:
# Reset the counter
r = self.repeat_each
else:
# Decrease the counter
r -= 1
if r == 1: # Putting this within the above 'else' will malfunction if repeat_each is 1
# Since this month is included, we must turn those vague days into specific numbered days
# for this current month (each month is different, so they couldn't have been calculated earlier.
calcdays = []
for y in daystocalculate:
if y[0] == 99:
if y[1] == -1:
# Calculate the last day of the month
calcdays.append(num_days_in_month(x))
else:
# Calculate the last DOW of the month
end_date = last_day_in_month(x)
for z in range(end_date.day):
d = end_date - timedelta(z)
if d.weekday() == y[1]:
calcdays.append(d.day)
break
else:
# Calculate the specified DOW of the month
start_date = date(x.year, x.month, 1)
found_count = 0
for z in range(num_days_in_month(start_date)):
d = start_date + timedelta(z)
if d.weekday() == y[1]:
found_count += 1
if found_count == y[0]:
calcdays.append(z+1)
break
print(calcdays)
# Check if this month is included (not a skipped month per the repeat_each rule)
if r == 1:
if (xday in specificdays) or (xday in calcdays):
# Assuming the daystocalculate have been calculated (above), simply check if the day is
# in one of the two lists
yield x
elif self.rule_type == self.DAILY:
# criteria = Not used
# repeat_each = If this is 2, then every other day will be skipped. If it is 3, only every
# third day will be chosen. etc...
# Generate a list of dates that match
if self.repeat_each == 1:
# If repeat_each is 1, then our calculation is much simpler
for x in range_date_inclusive(self.starts_on, self.ends_on):
yield x
else:
# Use the repeat value.
r = self.repeat_each # Include the first day of the range, and then start counting from there
for x in range_date_inclusive(self.starts_on, self.ends_on):
if r == self.repeat_each:
|
kawamon/hue
|
desktop/core/ext-py/Django-1.11.29/tests/check_framework/tests_1_10_compatibility.py
|
Python
|
apache-2.0
| 688
| 0.001453
|
from django.core.checks.compatibility.django_1_10 import (
check_duplicate_middleware_settings,
)
from django.test import SimpleTestCase
from django.test.utils import override_settings
class CheckDuplicateMiddlwareSettingsTest(SimpleTestC
|
ase):
@override_settings(MIDDLEWARE=[], MIDDLEWARE_CLASSES=['django.middle
|
ware.common.CommonMiddleware'])
def test_duplicate_setting(self):
result = check_duplicate_middleware_settings(None)
self.assertEqual(result[0].id, '1_10.W001')
@override_settings(MIDDLEWARE=None)
def test_middleware_not_defined(self):
result = check_duplicate_middleware_settings(None)
self.assertEqual(len(result), 0)
|
rtucker-mozilla/inventory
|
migrate_dns/management/commands/dns_migrate.py
|
Python
|
bsd-3-clause
| 204
| 0.004902
|
f
|
rom django.core.management.base import BaseCommand
from migrate_dns.import_utils import do_import
class Command(BaseCommand):
args = ''
def handle(self, *args, **options):
|
do_import()
|
wisechengyi/pants
|
tests/python/pants_test/binaries/test_binary_tool.py
|
Python
|
apache-2.0
| 6,038
| 0.003478
|
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
from pants.binaries.binary_tool import BinaryToolBase
from pants.binaries.binary_util import (
BinaryToolFetcher,
BinaryToolUrlGenerator,
BinaryUtil,
HostPlatform,
)
from pants.option.scope import GLOBAL_SCOPE
from pants.testutil.test_base import TestBase
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_file_dump
class DefaultVersion(BinaryToolBase):
options_scope = "default-version-test"
name = "default_version_test_tool"
default_version = "XXX"
class AnotherTool(BinaryToolBase):
options_scope = "another-tool"
name = "another_tool"
default_version = "0.0.1"
class ReplacingLegacyOptionsTool(BinaryToolBase):
# TODO: check scope?
options_scope = "replacing-legacy-options-tool"
|
name = "replacing_legacy_options_tool"
default_version = "a2f4ab23a4c"
replaces_scope = "old_tool_scope"
replaces_name = "old_tool_version"
class BinaryUtilFakeUname(BinaryUtil):
def host_platform(self):
return HostPlatform("xxx", "yyy")
class CustomUrlGenerator(BinaryToolUrlGenerator):
_DIST_URL_FMT = "https://custom-url.example.org/files/custom_urls_tool-{version}-{system_id}"
_SYSTEM_ID = {
"xxx": "zzz",
}
def generate_urls(self, version,
|
host_platform):
base = self._DIST_URL_FMT.format(
version=version, system_id=self._SYSTEM_ID[host_platform.os_name]
)
return [
base,
f"{base}-alternate",
]
class CustomUrls(BinaryToolBase):
options_scope = "custom-urls"
name = "custom_urls_tool"
default_version = "v2.1"
def get_external_url_generator(self):
return CustomUrlGenerator()
def _select_for_version(self, version):
binary_request = self.make_binary_request(version)
return BinaryUtilFakeUname.Factory._create_for_cls(BinaryUtilFakeUname).select(
binary_request
)
# TODO: these should have integration tests which use BinaryTool subclasses overriding archive_type.
class BinaryToolBaseTest(TestBase):
def setUp(self):
super().setUp()
self._context = self.context(
for_subsystems=[DefaultVersion, AnotherTool, ReplacingLegacyOptionsTool, CustomUrls],
options={
GLOBAL_SCOPE: {
"binaries_baseurls": ["https://binaries.example.org"],
"pants_bootstrapdir": str(temporary_dir()),
},
"another-tool": {"version": "0.0.2",},
"default-version-test.another-tool": {"version": "YYY",},
"custom-urls": {"version": "v2.3",},
"old_tool_scope": {"old_tool_version": "3",},
},
)
def test_base_options(self):
# TODO: using extra_version_option_kwargs!
default_version_tool = DefaultVersion.global_instance()
self.assertEqual(default_version_tool.version(), "XXX")
another_tool = AnotherTool.global_instance()
self.assertEqual(another_tool.version(), "0.0.2")
another_default_version_tool = DefaultVersion.scoped_instance(AnotherTool)
self.assertEqual(another_default_version_tool.version(), "YYY")
def test_replacing_legacy_options(self):
replacing_legacy_options_tool = ReplacingLegacyOptionsTool.global_instance()
self.assertEqual(replacing_legacy_options_tool.version(), "a2f4ab23a4c")
self.assertEqual(replacing_legacy_options_tool.version(self._context), "3")
def test_urls(self):
default_version_tool = DefaultVersion.global_instance()
self.assertIsNone(default_version_tool.get_external_url_generator())
with self.assertRaises(BinaryUtil.BinaryResolutionError) as cm:
default_version_tool.select()
err_msg = str(cm.exception)
self.assertIn(BinaryToolFetcher.BinaryNotFound.__name__, err_msg)
self.assertIn("Failed to fetch default_version_test_tool binary from any source:", err_msg)
self.assertIn(
"Failed to fetch binary from https://binaries.example.org/bin/default_version_test_tool/XXX/default_version_test_tool:",
err_msg,
)
custom_urls_tool = CustomUrls.global_instance()
self.assertEqual(custom_urls_tool.version(), "v2.3")
with self.assertRaises(BinaryUtil.BinaryResolutionError) as cm:
custom_urls_tool.select()
err_msg = str(cm.exception)
self.assertIn(BinaryToolFetcher.BinaryNotFound.__name__, err_msg)
self.assertIn("Failed to fetch custom_urls_tool binary from any source:", err_msg)
self.assertIn(
"Failed to fetch binary from https://custom-url.example.org/files/custom_urls_tool-v2.3-zzz:",
err_msg,
)
self.assertIn(
"Failed to fetch binary from https://custom-url.example.org/files/custom_urls_tool-v2.3-zzz-alternate:",
err_msg,
)
def test_hackily_snapshot(self):
with temporary_dir() as temp_dir:
safe_file_dump(
os.path.join(
temp_dir,
"bin",
DefaultVersion.name,
DefaultVersion.default_version,
DefaultVersion.name,
),
"content!",
)
context = self.context(
for_subsystems=[DefaultVersion],
options={GLOBAL_SCOPE: {"binaries_baseurls": [f"file:///{temp_dir}"],},},
)
self.maxDiff = None
default_version_tool = DefaultVersion.global_instance()
_, snapshot = default_version_tool.hackily_snapshot(context)
self.assertEqual(
"51a98706ab7458069aabe01856cb352ca97686e3edd3bf9ebd3205c2b38b2974",
snapshot.directory_digest.fingerprint,
)
|
pexip/meson
|
test cases/common/229 custom_target source/x.py
|
Python
|
apache-2.0
| 132
| 0
|
#! /usr/bi
|
n/env python3
with open('x.c', 'w') as f:
print('int main(void) { return 0; }', file=f)
with open('y', 'w'):
pass
| |
e1ven/Waymoot
|
libs/tornado-2.2/build/lib/tornado/curl_httpclient.py
|
Python
|
mit
| 18,106
| 0.000663
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Blocking and non-blocking HTTP client implementations using pycurl."""
import io
import collections
import logging
import pycurl
import threading
import time
from tornado import httputil
from tornado import ioloop
from tornado import stack_context
from tornado.escape import utf8
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize(self, io_loop=None, max_clients=10,
max_simultaneous_connections=None):
self.io_loop = io_loop
self._multi = pycurl.CurlMulti()
self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
self._curls = [_curl_create(max_simultaneous_connections)
for i in range(max_clients)]
self._free_list = self._curls[:]
self._requests = collections.deque()
self._fds = {}
self._timeout = None
try:
self._socket_action = self._multi.socket_action
except AttributeError:
# socket_action is found in pycurl since 7.18.2 (it's been
# in libcurl longer than that but wasn't accessible to
# python).
logging.warning("socket_action method missing from pycurl; "
"falling back to socket_all. Up
|
grading "
"libcurl and pycurl will improve performance")
self._socket_action = \
lambda fd, action: self._multi.socket_all()
# libcurl has bugs that sometimes cause it to not report all
# relevant file des
|
criptors and timeouts to TIMERFUNCTION/
# SOCKETFUNCTION. Mitigate the effects of such bugs by
# forcing a periodic scan of all active requests.
self._force_timeout_callback = ioloop.PeriodicCallback(
self._handle_force_timeout, 1000, io_loop=io_loop)
self._force_timeout_callback.start()
def close(self):
self._force_timeout_callback.stop()
for curl in self._curls:
curl.close()
self._multi.close()
self._closed = True
super(CurlAsyncHTTPClient, self).close()
def fetch(self, request, callback, **kwargs):
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
self._requests.append((request, stack_context.wrap(callback)))
self._process_queue()
self._set_timeout(0)
def _handle_socket(self, event, fd, multi, data):
"""Called by libcurl when it wants to change the file descriptors
it cares about.
"""
event_map = {
pycurl.POLL_NONE: ioloop.IOLoop.NONE,
pycurl.POLL_IN: ioloop.IOLoop.READ,
pycurl.POLL_OUT: ioloop.IOLoop.WRITE,
pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE
}
if event == pycurl.POLL_REMOVE:
self.io_loop.remove_handler(fd)
del self._fds[fd]
else:
ioloop_event = event_map[event]
if fd not in self._fds:
self._fds[fd] = ioloop_event
self.io_loop.add_handler(fd, self._handle_events,
ioloop_event)
else:
self._fds[fd] = ioloop_event
self.io_loop.update_handler(fd, ioloop_event)
def _set_timeout(self, msecs):
"""Called by libcurl to schedule a timeout."""
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = self.io_loop.add_timeout(
time.time() + msecs/1000.0, self._handle_timeout)
def _handle_events(self, fd, events):
"""Called by IOLoop when there is activity on one of our
file descriptors.
"""
action = 0
if events & ioloop.IOLoop.READ: action |= pycurl.CSELECT_IN
if events & ioloop.IOLoop.WRITE: action |= pycurl.CSELECT_OUT
while True:
try:
ret, num_handles = self._socket_action(fd, action)
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
def _handle_timeout(self):
"""Called by IOLoop when the requested timeout has passed."""
with stack_context.NullContext():
self._timeout = None
while True:
try:
ret, num_handles = self._socket_action(
pycurl.SOCKET_TIMEOUT, 0)
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
# In theory, we shouldn't have to do this because curl will
# call _set_timeout whenever the timeout changes. However,
# sometimes after _handle_timeout we will need to reschedule
# immediately even though nothing has changed from curl's
# perspective. This is because when socket_action is
# called with SOCKET_TIMEOUT, libcurl decides internally which
# timeouts need to be processed by using a monotonic clock
# (where available) while tornado uses python's time.time()
# to decide when timeouts have occurred. When those clocks
# disagree on elapsed time (as they will whenever there is an
# NTP adjustment), tornado might call _handle_timeout before
# libcurl is ready. After each timeout, resync the scheduled
# timeout with libcurl's current state.
new_timeout = self._multi.timeout()
if new_timeout != -1:
self._set_timeout(new_timeout)
def _handle_force_timeout(self):
"""Called by IOLoop periodically to ask libcurl to process any
events it may have forgotten about.
"""
with stack_context.NullContext():
while True:
try:
ret, num_handles = self._multi.socket_all()
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
def _finish_pending_requests(self):
"""Process any requests that were completed by the last
call to multi.socket_action.
"""
while True:
num_q, ok_list, err_list = self._multi.info_read()
for curl in ok_list:
self._finish(curl)
for curl, errnum, errmsg in err_list:
self._finish(curl, errnum, errmsg)
if num_q == 0:
break
self._process_queue()
def _process_queue(self):
with stack_context.NullContext():
while True:
started = 0
while self._free_list and self._requests:
started += 1
curl = self._free_list.pop()
(request, callback) = self._requests.popleft()
curl.info = {
"headers": httputil.HTTPHeaders(),
"buffer": io.StringIO(),
"request": request,
"callback": callback,
"curl_start_time": time.time(),
}
# Disable IPv6 to mitigate the effects of this bug
|
nmichaud/enable-mapping
|
mapping/enable/api.py
|
Python
|
bsd-3-clause
| 273
| 0
|
from canvas import Mappi
|
ngCa
|
nvas
from viewport import MappingViewport
try:
from geojson_overlay import GeoJSONOverlay
except ImportError:
# No geojson
pass
# Tile managers
from mbtile_manager import MBTileManager
from http_tile_manager import HTTPTileManager
|
stackArmor/security_monkey
|
migrations/versions/ea2739ecd874_.py
|
Python
|
apache-2.0
| 646
| 0.003096
|
"""Ensures that account.identifier is unique.
Revision ID: ea2739ecd874
Revises: 5bd631a1b748
Create Date: 2017-09-29 09:16:09.436339
"""
# revision
|
identifiers, used by Alembic.
revision = 'ea2739ecd874'
down_revision = '5bd631a1b748'
from alembic import
|
op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint(None, 'account', ['identifier'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'account', type_='unique')
# ### end Alembic commands ###
|
Iconik/eve-suite
|
src/model/static/sta/services.py
|
Python
|
gpl-3.0
| 624
| 0.00641
|
from model.flyweight import Flyweight
from model.static.database import database
class Service(Flyweight):
def __init__(self,service_id):
#prevents reinitializing
if "_in
|
ited" in self.__dict__:
return
self._inited = None
#prevents reinitializing
self.service_id = service_id
cursor = database.get_cursor(
"select * from staServices where serviceID={};".format(
s
|
elf.service_id))
row = cursor.fetchone()
self.service_name = row["serviceName"]
self.description = row["description"]
cursor.close()
|
IBT-FMI/SAMRI
|
samri/fetch/test/test_local.py
|
Python
|
gpl-3.0
| 2,202
| 0.052225
|
import numpy as np
def test_prepare_abi_connectivity_maps():
from samri.fetch.local import prepare_abi_connectivity_maps
prepare_abi_connectivity_maps('Ventral_tegmental_area',
invert_lr_experiments=[
"127651139",
"127796728",
"127798146",
"127867804",
"156314762",
"160539283",
"160540751",
"165975096",
"166054222",
"171021829",
"175736945",
"278178382",
"292958638",
"301062306",
"304337288",
],
)
def test_prepare_feature_map():
from samri.fetch.local import prepare_feature_map
prepare_feature_map('/usr/share/ABI-connectivity-data/Ventral_tegmental_area-127651139/',
invert_lr=True,
sav
|
e_as='/var/tmp/samri_testing/pytest/vta_127651139.nii.gz',
)
def test_summary_atlas():
from samri.fetch.local import summary_atlas
mapping='/usr/share/mouse-brain-templates/d
|
surqe_labels.csv'
atlas='/usr/share/mouse-brain-templates/dsurqec_40micron_labels.nii'
summary={
1:{
'structure':'Hippocampus',
'summarize':['CA'],
'laterality':'right',
},
2:{
'structure':'Hippocampus',
'summarize':['CA'],
'laterality':'left',
},
3:{
'structure':'Cortex',
'summarize':['cortex'],
'laterality':'right',
},
4:{
'structure':'Cortex',
'summarize':['cortex'],
'laterality':'left',
},
}
new_atlas, new_mapping = summary_atlas(atlas,mapping,
summary=summary,
)
new_atlas_data = new_atlas.get_data()
output_labels = np.unique(new_atlas_data).tolist()
target_labels = [0,]
target_labels.extend([i for i in summary.keys()])
assert output_labels == target_labels
def test_roi_from_atlaslabel():
from samri.fetch.local import roi_from_atlaslabel
mapping='/usr/share/mouse-brain-templates/dsurqe_labels.csv'
atlas='/usr/share/mouse-brain-templates/dsurqec_40micron_labels.nii'
my_roi = roi_from_atlaslabel(atlas,
mapping=mapping,
label_names=['cortex'],
)
roi_data = my_roi.get_data()
output_labels = np.unique(roi_data).tolist()
assert output_labels == [0, 1]
my_roi = roi_from_atlaslabel(atlas,
mapping=mapping,
label_names=['cortex'],
output_label=3,
)
roi_data = my_roi.get_data()
output_labels = np.unique(roi_data).tolist()
assert output_labels == [0, 3]
|
crosswalk-project/crosswalk-test-suite
|
apptools/apptools-android-tests/apptools/manifest_xwalk_target_platforms.py
|
Python
|
bsd-3-clause
| 6,310
| 0.000792
|
#!/usr/bin/env python
#
# Copyright (c) 2016 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Liu, Yun <yunx.liu@intel.com>
import unittest
import os
import comm
class TestCrosswalkApptoolsFunctions(unittest.TestCase):
def test_setting_value(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --platforms=android --android=" + comm.ANDROID_MODE + " --crosswalk=" + comm.crosswalkzip + " ../../testapp/manifest_xwalk_target_platforms/windows_platform/"
(return_code, output) = comm.getstatusoutput(cmd)
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertIn("Loading 'android' platform backend", output[0])
self.assertNotIn("Loading 'windows' platform backend", output[0])
def test_without_platforms(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --android=" + comm.ANDROID_MODE + " --crosswalk=" + comm.crosswalkzip + " ../../testapp/create_package_basic/"
(return_code, output) = comm.getstatusoutput(cmd)
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
|
if comm.BIT == "6
|
4":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertIn("Loading 'android' platform backend", output[0])
def test_with_target_platforms(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --android=" + comm.ANDROID_MODE + " --crosswalk=" + comm.crosswalkzip + " ../../testapp/manifest_xwalk_target_platforms/android_platform/"
(return_code, output) = comm.getstatusoutput(cmd)
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertIn("Loading 'android' platform backend", output[0])
if __name__ == '__main__':
unittest.main()
|
seryl/Nodetraq
|
nodetraq/tests/functional/test_pools.py
|
Python
|
mit
| 200
| 0.005
|
from nodetraq.tests import *
class
|
TestPoolsController(TestController):
def test_index(self):
response = self.app.get(url(controller='pools', action='index'))
# T
|
est response...
|
ajylee/gpaw-rtxs
|
gpaw/test/cmr_append.py
|
Python
|
gpl-3.0
| 2,763
| 0.003257
|
# this example shows how to append new calculated results to an already
# existing cmr file, illustrated for calculation of PBE energy on LDA density
import os
import cmr
# set True in order to use cmr in parallel jobs!
cmr.set_ase_parallel(enable=True)
from ase.structure import molecule
from ase.io import read, write
from ase.parallel import barrier, rank
from gpaw import GPAW, restart
from gpaw.test import equal
# define the project in order to find it in the database!
project_id = 'modify cmr file after gpw restart'
formula = 'H2'
vacuum = 2.0
xc = 'LDA'
mode = 'lcao'
h = 0.20
cmr_params = {
'db_keywords': [project_id],
# add project_id also as a field to support search across projects
'project_id': project_id,
# user's tags: xc tag will be set later for illustration purpose!
'formula': formula,
'vacuum': vacuum,
'mode': mode,
'h': h,
}
cmrfile = formula + '.cmr'
system1 = molecule(formula)
system1.center(vacuum=vacuum)
# first calculation: LDA lcao
calc = GPAW(mode=mode, xc=xc, h=h, txt=None)
system1.set_calculator(calc)
e = system1.get_potential_energy()
calc.write(formula)
# read gpw file
system2, calc2 = restart(formula, txt=None)
# write the information 'as in' gpw file into db file
# (called *db to avoid conflict with
|
the *cmr file below)
if 1: # not used in this example
calc2.write(formula + '.db', cmr_params=cmr_params)
# write the information 'as in' corresponding trajectory file into cmr file
write(cmrfile, system2, cmr_params=cmr_params)
# add the xc tag to the cmrfile
assert os.path.exists(cmrfile)
data = cmr.read(cmrfile)
data.set_user_variable('xc', xc)
data.write(cmrfile)
# peform PBE calculation on LDA density
ediff = calc2.get_xc_difference('PBE')
# add new res
|
ults to the cmrfile
assert os.path.exists(cmrfile)
data = cmr.read(cmrfile)
data.set_user_variable('PBE', data['ase_potential_energy'] + ediff)
data.write(cmrfile)
# analyse the results with CMR
# cmr readers work only in serial!
from cmr.ui import DirectoryReader
if rank == 0:
reader = DirectoryReader(directory='.', ext='.cmr')
# read all compounds in the project with lcao
all = reader.find(name_value_list=[('mode', 'lcao')],
keyword_list=[project_id])
results = all.get('formula', formula)
print results['formula'], results['xc'], results['ase_potential_energy']
# column_length=0 aligns data in the table (-1 : data unaligned is default)
all.print_table(column_length=0,
columns=['formula', 'xc', 'h', 'ase_potential_energy', 'PBE'])
if rank == 0:
equal(results['PBE'], e + ediff, 1e-6)
if rank == 0:
for file in [formula + '.gpw', formula + '.db', cmrfile]:
if os.path.exists(file): os.unlink(file)
|
GovReady/govready-q
|
guidedmodules/migrations/0011_modulequestion_answer_type_module.py
|
Python
|
gpl-3.0
| 1,551
| 0.002579
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-22 16:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
def forwards_func(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
m_model = apps.get_model("guidedmodules", "Module")
mq_model = apps.get_model("guidedmodules", "ModuleQuestion")
db_alias = schema_editor.connection.alias
for obj in mq_model.objects.all():
if obj.spec.get("module-id"):
mq_model.objects\
.filter(id=obj.id)\
.update(answer_type_module=m_model.objects.get(id=obj.spec["module-id"]))
class Migration(migrations.Migration):
dependencies = [
('guidedmodules', '0010_auto_20160809_1500'),
]
operations = [
migrations.AddField(
model_name='modulequestion',
name='answer_type_module',
field=models.ForeignKey(
blank=True,
help_text=(
'For module and module-set typed questions, this is the Module that'
' Tasks that answer this question must be for.'
),
null=True,
on_delete=django.db.mode
|
ls.deletion.PROTECT,
related_name='is_type_of_answer_to',
|
to='guidedmodules.Module',
),
),
migrations.RunPython(forwards_func, migrations.RunPython.noop),
]
|
balajikris/autorest
|
src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/SubscriptionIdApiVersion/microsoftazuretesturl/credentials.py
|
Python
|
mit
| 731
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# reg
|
enerated.
# --------------------------------------------------------------------------
from msrest.authentication import (
BasicAut
|
hentication,
BasicTokenAuthentication,
OAuthTokenAuthentication)
from msrestazure.azure_active_directory import (
InteractiveCredentials,
ServicePrincipalCredentials,
UserPassCredentials)
|
hillwoodroc/deepin-music-player
|
src/widget/combo.py
|
Python
|
gpl-3.0
| 10,334
| 0.009386
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 ~ 2012 Deepin, Inc.
# 2011 ~ 2012 Hou Shaohui
#
# Author: Hou Shaohui <houshao55@gmail.com>
# Maintainer: Hou Shaohui <houshao55@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gtk
import gobject
from dtk.ui.menu import Menu
from dtk.ui.draw import draw_pixbuf
from dtk.ui.label import Label
from dtk.ui.utils import propagate_expose
from dtk.ui.constant import BUTTON_PRESS, BUTTON_NORMAL, BUTTON_HOVER
import dtk.ui.tooltip as Tooltip
from widget.skin import app_theme
from nls import _
class ComboItem(gtk.Button):
def __init__(self, bg_image_group, icon_group, index, set_index, get_index):
gtk.Button.__init__(self)
# Init.
self.index = index
self.set_index = set_index
self.get_index = get_index
self.icon_group = icon_group
self.bg_image_group = bg_image_group
self.resize_button()
# connect
self.connect("clicked", self.update_button_index)
self.connect("expose-event", self.expose_button_cb)
def expose_button_cb(self, widget, event):
# Init.
rect = widget.allocation
bg_normal_dpixbuf, bg_hover_dpixbuf, bg_press_dpixbuf = self.bg_image_group
fg_normal_dpixbuf, fg_hover_dpixbuf, fg_press_dpixbuf = self.icon_group
select_index = self.get_index()
bg_image = bg_normal_dpixbuf.get_pixbuf()
fg_image = fg_normal_dpixbuf.get_pixbuf()
if widget.state == gtk.STATE_NORMAL:
if select_index == self.index:
select_status = BUTTON_PRESS
else:
select_status = BUTTON_NORMAL
elif widget.state == gtk.STATE_PRELIGHT:
if select_index == self.index:
select_status = BUTTON_PRESS
else:
select_status = BUTTON_HOVER
elif widget.state == gtk.STATE_ACTIVE:
select_status = BUTTON_PRESS
if select_status == BUTTON_NORMAL:
bg_image = bg_normal_dpixbuf.get_pixbuf()
fg_image = fg_normal_dpixbuf.get_pixbuf()
elif select_status == BUTTON_HOVER:
bg_image = bg_hover_dpixbuf.get_pixbuf()
fg_image = fg_hover_dpixbuf.get_pixbuf()
elif select_status == BUTTON_PRESS:
bg_image = bg_press_dpixbuf.get_pixbuf()
fg_image = fg_press_dpixbuf.get_pixbuf()
image_width = bg_image.get_width()
image_height = bg_image.get_height()
fg_rect_x = rect.x + (image_width - fg_image.get_width()) / 2
fg_rect_y = rect.y + (image_height - fg_image.get_height()) / 2
cr = widget.window.cairo_create()
draw_pixbuf(cr, bg_image, rect.x, rect.y)
draw_pixbuf(cr, fg_image, fg_rect_x, fg_rect_y)
propagate_expose(widget, event)
return True
def resize_button(self):
normal_dpixbuf = self.bg_image_group[0]
request_width = normal_dpixbuf.get_pixbuf().get_width()
request_height = normal_dpixbuf.get_pixbuf().get_height()
self.set_size_request(request_width, request_height)
def update_icon_group(self, new_group):
self.icon_group = new_group
def update_button_index(self, widget):
self.set_index(self.index)
class ComboButton(gtk.Button):
def __init__(self, bg_image_group, icon_group):
gtk.Button.__init__(self)
# Init.
self.icon_group = icon_group
self.bg_image_group = bg_image_group
self.resize_button()
# connect
self.connect("expose-event", self.expose_button_cb)
def expose_button_cb(self, widget, event):
# Init.
rect = widget.allocation
bg_normal_dpixbuf, bg_hover_dpixbuf, bg_press_dpixbuf = self.bg_image_group
fg_normal_dpixbuf, fg_hover_dpixbuf, fg_press_dpixbuf = self.
|
icon_group
|
if widget.state == gtk.STATE_NORMAL:
bg_image = bg_normal_dpixbuf.get_pixbuf()
fg_image = fg_normal_dpixbuf.get_pixbuf()
elif widget.state == gtk.STATE_PRELIGHT:
bg_image = bg_hover_dpixbuf.get_pixbuf()
fg_image = fg_hover_dpixbuf.get_pixbuf()
elif widget.state == gtk.STATE_ACTIVE:
bg_image = bg_press_dpixbuf.get_pixbuf()
fg_image = fg_press_dpixbuf.get_pixbuf()
image_width = bg_image.get_width()
image_height = bg_image.get_height()
fg_rect_x = rect.x + (image_width - fg_image.get_width()) / 2
fg_rect_y = rect.y + (image_height - fg_image.get_height()) / 2
cr = widget.window.cairo_create()
draw_pixbuf(cr, bg_image, rect.x, rect.y)
draw_pixbuf(cr, fg_image, fg_rect_x, fg_rect_y)
propagate_expose(widget, event)
return True
def resize_button(self):
normal_dpixbuf = self.bg_image_group[0]
request_width = normal_dpixbuf.get_pixbuf().get_width()
request_height = normal_dpixbuf.get_pixbuf().get_height()
self.set_size_request(request_width, request_height)
def update_icon_group(self, new_group):
self.icon_group = new_group
class ComboMenuButton(gtk.HBox):
__gsignals__ = {
"list-actived" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ()),
"combo-actived" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_STRING,))
}
def __init__(self, init_index=0):
super(ComboMenuButton, self).__init__()
self.current_index = init_index
self.current_status = "artist"
self.set_spacing(0)
self.msg_content = _("By artist")
self.list_button = ComboItem(
(app_theme.get_pixbuf("combo/left_normal.png"),
app_theme.get_pixbuf("combo/left_hover.png"),
app_theme.get_pixbuf("combo/left_press.png")),
(app_theme.get_pixbuf("combo/list_normal.png"),
app_theme.get_pixbuf("combo/list_normal.png"),
app_theme.get_pixbuf("combo/list_press.png")
), 0, self.set_index, self.get_index)
Tooltip.text(self.list_button, _("List view"))
# draw left_button.
self.left_button = gtk.Button()
self.left_button = ComboItem(
(app_theme.get_pixbuf("combo/left_normal.png"),
app_theme.get_pixbuf("combo/left_hover.png"),
app_theme.get_pixbuf("combo/left_press.png")),
(app_theme.get_pixbuf("combo/artist_normal.png"),
app_theme.get_pixbuf("combo/artist_normal.png"),
app_theme.get_pixbuf("combo/artist_press.png")
), 1, self.set_index, self.get_index)
Tooltip.custom(self.left_button, self.get_msg_label).always_update(self.left_button, True)
# draw right_button.
self.right_button = ComboButton(
(app_theme.get_pixbuf("combo/right_normal.png"),
app_theme.get_pixbuf("combo/right_hover.png"),
app_theme.get_pixbuf("combo/right_hover.png")),
(app_theme.get_pixbuf("combo/triangle_normal.png"),
app_theme.get_pixbuf("combo/triangle_normal.png"),
app_theme.get_pixbuf("combo/triangle_press.png")
))
|
maximumG/exscript
|
tests/Exscript/AccountPoolTest.py
|
Python
|
mit
| 4,218
| 0.000948
|
from builtins import range
import sys
import unittest
import re
import os.path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
from Exscript import Account
from Exscript.account import AccountPool
from Exscript.util.file import get_accounts_from_file
class AccountPoolTest(unittest.TestCase):
CORRELATE = AccountPool
def setUp(self):
self.user1 = 'testuser1'
self.password1 = 'test1'
self.account1 = Account(self.user1, self.password1)
self.user2 = 'testuser2'
self.password2 = 'test2'
self.account2 = Account(self.user2, self.password2)
self.accm = AccountPool()
def testConstructor(self):
accm = AccountPool()
self.assertEqual(accm.n_accounts(), 0)
accm = AccountPool([self.account1, self.account2])
self.assertEqual(accm.n_accounts(), 2)
def testAddAccount(self):
self.assertEqual(self.accm.n_accounts(), 0)
self.accm.add_account(self.account1)
self.assertEqual(self.accm.n_accounts(), 1)
self.accm.add_account(self.account2)
self.assertEqual(self.accm.n_accounts(), 2)
def testReset(self):
self.testAddAccount()
self.accm.reset()
self.assertEqual(self.accm.n_accounts(), 0)
def testHasAccount(self):
self.assertEqual(self.accm.has_account(self.account1), False)
self.accm.add_account(self.account1)
self.assertEqual(self.accm.has_account(self.account1), True)
def testGetAccountFromHash(self):
account = Account('user', 'test')
thehash = account.__hash__()
self.accm.add_account(account)
self.assertEqual(self.accm.get_account_from_hash(thehash), account)
def testGetAccountFromName(self):
self.testAddAccount()
self.assertEqual(self.account2,
self.accm.get_account_from_name(self.user2))
def testNAccounts(self):
self.testAddAccount()
|
def testAcquireAccount(self):
|
self.testAddAccount()
self.accm.acquire_account(self.account1)
self.account1.release()
self.accm.acquire_account(self.account1)
self.account1.release()
# Add three more accounts.
filename = os.path.join(os.path.dirname(__file__), 'account_pool.cfg')
self.accm.add_account(get_accounts_from_file(filename))
self.assertEqual(self.accm.n_accounts(), 5)
for i in range(0, 2000):
# Each time an account is acquired a different one should be
# returned.
acquired = {}
for n in range(0, 5):
account = self.accm.acquire_account()
self.assertTrue(account is not None)
self.assertNotIn(account.get_name(), acquired)
acquired[account.get_name()] = account
# Release one account.
acquired['abc'].release()
# Acquire one account.
account = self.accm.acquire_account()
self.assertEqual(account.get_name(), 'abc')
# Release all accounts.
for account in list(acquired.values()):
account.release()
def testReleaseAccounts(self):
account1 = Account('foo')
account2 = Account('bar')
pool = AccountPool()
pool.add_account(account1)
pool.add_account(account2)
pool.acquire_account(account1, 'one')
pool.acquire_account(account2, 'two')
self.assertNotIn(account1, pool.unlocked_accounts)
self.assertNotIn(account2, pool.unlocked_accounts)
pool.release_accounts('one')
self.assertIn(account1, pool.unlocked_accounts)
self.assertNotIn(account2, pool.unlocked_accounts)
pool.release_accounts('one')
self.assertIn(account1, pool.unlocked_accounts)
self.assertNotIn(account2, pool.unlocked_accounts)
pool.release_accounts('two')
self.assertIn(account1, pool.unlocked_accounts)
self.assertIn(account2, pool.unlocked_accounts)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(AccountPoolTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py
|
Python
|
mit
| 3,345
| 0.001495
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .linked_service import LinkedService
class SapHanaLinkedService(LinkedService):
"""SAP HANA Linked Service.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param connect_via: The integration runtime reference.
:type connect_via:
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param type: Constant filled by server.
:type type: str
:param server: Host name of the SAP HANA server. Type: string (or
Expression with resultType string).
:type server: object
:param authentication_type: The authentication
|
type to be used to connect
to t
|
he SAP HANA server. Possible values include: 'Basic', 'Windows'
:type authentication_type: str or
~azure.mgmt.datafactory.models.SapHanaAuthenticationType
:param user_name: Username to access the SAP HANA server. Type: string (or
Expression with resultType string).
:type user_name: object
:param password: Password to access the SAP HANA server.
:type password: ~azure.mgmt.datafactory.models.SecureString
:param encrypted_credential: The encrypted credential used for
authentication. Credentials are encrypted using the integration runtime
credential manager. Type: string (or Expression with resultType string).
:type encrypted_credential: object
"""
_validation = {
'type': {'required': True},
'server': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'server': {'key': 'typeProperties.server', 'type': 'object'},
'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
'user_name': {'key': 'typeProperties.userName', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecureString'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
def __init__(self, server, additional_properties=None, connect_via=None, description=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None):
super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
self.server = server
self.authentication_type = authentication_type
self.user_name = user_name
self.password = password
self.encrypted_credential = encrypted_credential
self.type = 'SapHana'
|
franck-talbart/codelet_tuning_infrastructure
|
ctr-common/plugins/4e7420cd-904e-4c2a-b08f-02c867ba4cd8/wiki2man.py
|
Python
|
gpl-3.0
| 27,388
| 0.005226
|
# -*- coding: UTF-8 -*-
import sys
WorkList = None
def SH(i):
"""reformatting .SH"""
global WorkList
string = WorkList[i]
l = len(string) - 2
r = 0
while string[0] == '=' and string[l] == '=':
WorkList[i] = string[1:l]
string = WorkList[i]
l = len(string) - 1
r = r + 1
if r == 2:
WorkList[i] = '\n.SH "' + string + '"\n.PP\n'
else:
WorkList[i] = '\n.SS "' + string + '"\n.PP\n'
#---------------------------------------------------------------------------
def TP(i):
"""reformatting .TP"""
global WorkList
string = WorkList[i]
l=0
string1 = WorkList[i + l]
while string1 != '' and string1[0] == ';':
j=0
finish = 0
nexcl = 1
s = 0
while len(string) > j and finish == 0:
if string[j:j+8] == '<nowiki>':
nexcl = 0
j = j + 7
elif string[j:j+9] == '</nowiki>':
nexcl = 1
j = j + 8
elif string[j:j+4] == '<!--':
nexcl = 0
j = j + 3
elif string[j:j+3] == '-->':
nexcl = 1
j = j + 2
if string[j] == ':':
s = 1
finish = nexcl * s
s = 0
j = j + 1
if len(string) == j:
WorkList[i] = '.TP\n.B ' + string[1:]
elif string[j-1] == ':':
WorkList[i] = '.TP\n.B ' + string[1:j-1] + '\n' + string[j:]
l = l + 1
string1 = WorkList[i+l]
while string1 != '' and string1[0] == ':' and string1[1] <> ':' and string1[1] <> ';':
WorkList[i + l] = '.br\n' + string1[1:]
l = l + 1
string1 = WorkList[i + l]
#---------------------------------------------------------------------------
def wiki2man(content):
global WorkList
string = '\n'
string = unicode(string, 'utf-8')
WorkList = [string]
cpt = 0
while string != '' and cpt < len(content):
string = content[cpt]
cpt += 1
WorkList.append(string)
path = sys.argv[0]
n = len(path)
n = n - 11
path = path[:n]
########## Reformatting from wiki to roff ##########
# TH:
string = WorkList[1];
if len(string) > 2 and string[0] != '=' and string[:4] != '<!--' and string[:2] != '{{':
i = 0
while len(string) > i and string[i] != '(':
i = i + 1
WorkList.pop(1)
WorkList.pop(0)
i = 0
tabacc = -1
tab = 0
tab2 = 0
col = 0
nf = 0
nr = 0
excl = 0
nowiki = 0
RS=0
strng = unicode('{{MAN индекс}}', 'utf-8')
while len(WorkList) > i:
string = WorkList[i]
if len(string) > 1:
# reformatting "nowiki"
if string[:9] == '</nowiki>':
WorkList[i] = string[9:]
nowiki = 0
if nowiki == 0:
# reformatting "pre"
if string[:6] == '</pre>':
WorkList[i] = '\n.fi\n.RE\n' + string[6:]
nf = 0
# reformatting "tt"
elif string[:5] == '</tt>':
if string[6:7] == '. ':
WorkList[i] = '\n.fi\n.RE\n' + string[7:]
elif len(string) > 6 and string[6] == '.':
WorkList[i] = '\n.fi\n.RE\n' + string[6:]
else:
WorkList[i] = '\n.fi\n.RE\n' + string[5:]
nf = 0
# reformatting " "
if string[0] == ' ':
if nf == 0:
nf = 1
WorkList[i] = '\n.RS\n.nf\n' + string
elif nf == 1:
WorkList[i] = string
else:
if nf == 1:
nf = 0
WorkList[i] = '\n.fi\n.RE\n'
WorkList.insert(i+1, string)
string = WorkList[i]
if nf != 2 and nowiki == 0:
# reformatting excluded text <!-- * -->
if excl == 1:
WorkList[i] = '.\" ' + string[0:]
string = WorkList[i]
if nf == 0:
# format titles
if string[0] == '=' and string[len(string)-2] == '=':
SH(i)
# format ";"
elif string[0] == ';':
TP(i)
# format ":..."
elif string[0] == ':':
l = 1
s = ''
while string[l] == ':':
l = l + 1;
if RS == l:
s = '\n.br\n'
elif RS < l:
while RS < l:
s = s + '.RS\n'
RS = RS + 1
if string[RS] == ';':
WorkList[i] = s + '.TP\n.B ' + string[RS+1:]
else:
WorkList[i] = s + string[RS:]
string = WorkList[i]
stri = WorkList[i+1]
if RS > 0 and stri[0] <> ':':
while RS > 0:
WorkList[i] = string + '\n.RE\n'
RS = RS - 1
string = WorkList[i]
else:
while RS > 0 and len(stri) > RS-1 and stri[RS-1] <> ':':
RS = RS - 1
WorkList[i] = string + '\n.RE\n'
string = WorkList[i]
# format "*..."
elif string[0] == '*':
WorkList[i] = '.br\n * ' + string[1:]
# format tables 2
elif string[:2] == '{|':
if tab2 > 0:
WorkList[i] = '.RS\n'
tab2 = tab2 + 1
col = 0
else:
WorkList[i] = ''
tab2 = 1
elif string[:2] == '|-' and tab2 > 0:
WorkList[i] = ''
col = 0
elif string[:2] == '|}':
if tab2 == 1:
WorkList[i] = ''
col = 0
tab2 = 0
elif tab2 > 1:
WorkList[i] = '\n.RE\n'
col = 0
|
tab2 = tab2 - 1
elif string[:8] == '|valign=' and tab2 > 0:
j = 9
while len(string) > j and string[j]!='|':
j = j + 1
if string[j] == '|':
if col == 0:
|
WorkList[i] = '\n.TP\n' + string[j+1:]
col = 1
elif col > 0:
WorkList[i] = string[j+1:]
col = 2
elif col > 1:
WorkList[i] = '.PP\n' + string[j+1:]
col = col + 1
elif string[:1] == '|' and tab2 > 0:
if col == 0:
WorkList[i] = '\n.TP\n' + string[1:]
col = 1
elif col == 1:
WorkList[i] = string[1:]
col = col + 1
|
kubeflow/kfp-tekton-backend
|
samples/core/condition/condition.py
|
Python
|
apache-2.0
| 2,572
| 0.001555
|
#!/usr/bin/env python3
# Cop
|
yright 2019 Google LLC
#
# Licensed under
|
the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import kfp
from kfp import dsl
def random_num_op(low, high):
"""Generate a random number between low and high."""
return dsl.ContainerOp(
name='Generate random number',
image='python:alpine3.6',
command=['sh', '-c'],
arguments=['python -c "import random; print(random.randint($0, $1))" | tee $2', str(low), str(high), '/tmp/output'],
file_outputs={'output': '/tmp/output'}
)
def flip_coin_op():
"""Flip a coin and output heads or tails randomly."""
return dsl.ContainerOp(
name='Flip coin',
image='python:alpine3.6',
command=['sh', '-c'],
arguments=['python -c "import random; result = \'heads\' if random.randint(0,1) == 0 '
'else \'tails\'; print(result)" | tee /tmp/output'],
file_outputs={'output': '/tmp/output'}
)
def print_op(msg):
"""Print a message."""
return dsl.ContainerOp(
name='Print',
image='alpine:3.6',
command=['echo', msg],
)
@dsl.pipeline(
name='Conditional execution pipeline',
description='Shows how to use dsl.Condition().'
)
def flipcoin_pipeline():
flip = flip_coin_op()
with dsl.Condition(flip.output == 'heads'):
random_num_head = random_num_op(0, 9)
with dsl.Condition(random_num_head.output > 5):
print_op('heads and %s > 5!' % random_num_head.output)
with dsl.Condition(random_num_head.output <= 5):
print_op('heads and %s <= 5!' % random_num_head.output)
with dsl.Condition(flip.output == 'tails'):
random_num_tail = random_num_op(10, 19)
with dsl.Condition(random_num_tail.output > 15):
print_op('tails and %s > 15!' % random_num_tail.output)
with dsl.Condition(random_num_tail.output <= 15):
print_op('tails and %s <= 15!' % random_num_tail.output)
if __name__ == '__main__':
kfp.compiler.Compiler().compile(flipcoin_pipeline, __file__ + '.yaml')
|
sindresf/The-Playground
|
Python/Machine Learning/ScikitClassifiers/Classifiers/Random_Forrest_Classification.py
|
Python
|
mit
| 724
| 0.008287
|
import pandas as pd
from sklearn import model_selection
from sklearn.ensemble import RandomForestClassifier
url = "https://archive.ics.uci.edu/ml/machine-learning-databases/pima-indians-diabetes/pima-indians-diabetes.data"
names = ['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']
df = pd.read_csv(url, names=names)
array = df.values
X = array[:,0:8]
y = array[:,8]
seed = 21
num_trees = 10
|
0
max_features = 3
kfold = model_selection.KFold(n_splits=10, random_state=seed)
model = RandomForestClassifier(n_estimators=num_trees, max_features=max_features)
results = model_selection.cross_val_score(model, X, y,
|
cv=kfold)
print('results: ')
print(results)
print()
print('mean: ' + str(results.mean()))
|
saguas/jasperserverlib
|
jasperserverlib/core/ResourcesTypeResolverUtil.py
|
Python
|
mit
| 2,430
| 0.017284
|
from resource_media_types import *
from ClientFile import ClientFile
from ClientFolder import ClientFolder
from queryResource import ClientQuery
class ResourcesTypeResolverUtil(object):
classes = {}
#classes[ClientAdhocDataView.__name__] = ResourceMediaType.ADHOC_DATA_VIEW_MIME
#classes[ClientAwsDataSource.__name__] = ResourceMediaType.AWS_DATA_SOURCE_MIME
#classes[ClientBeanDataSource.__name__] = ResourceMediaType.BEAN_DATA_SOURCE_MIME
#classes[ClientCustomDataSource.__name__] = ResourceMediaType.CUSTOM_DATA_SOURC
|
E_MIME
#classes[ClientDataType.__name__] = ResourceMediaType.DATA_TYPE_MIME
classes[ClientFile.__name__] = TYPE_FILE
classes[ClientFolder.__name__] = TYPE_FOLDER
#classes[ClientInputControl.__name__] = ResourceMediaType.INPUT_CONTROL_MIME
#classes[ClientJdbcDataSource.__name__] = Re
|
sourceMediaType.JDBC_DATA_SOURCE_MIME
#classes[ClientJndiJdbcDataSource.__name__] = ResourceMediaType.JNDI_JDBC_DATA_SOURCE_MIME
#classes[ClientListOfValues.__name__] = ResourceMediaType.LIST_OF_VALUES_MIME
#put(ClientMondrianConnection.class, ResourceMediaType.MONDRIAN_CONNECTION_MIME);
#put(ClientMondrianXmlaDefinition.class, ResourceMediaType.MONDRIAN_XMLA_DEFINITION_MIME);
#put(ClientOlapUnit.class, ResourceMediaType.OLAP_UNIT_MIME);
classes[ClientQuery.__name__] = TYPE_QUERY
#put(ClientReportUnit.class, ResourceMediaType.REPORT_UNIT_MIME);
#put(ClientSecureMondrianConnection.class, ResourceMediaType.SECURE_MONDRIAN_CONNECTION_MIME);
#put(ClientSemanticLayerDataSource.class, ResourceMediaType.SEMANTIC_LAYER_DATA_SOURCE_MIME);
#put(ClientVirtualDataSource.class, ResourceMediaType.VIRTUAL_DATA_SOURCE_MIME);
#put(ClientXmlaConnection.class, ResourceMediaType.XMLA_CONNECTION_MIME);
#put(ClientResourceLookup.class, ResourceMediaType.RESOURCE_LOOKUP_MIME);
#put(ClientDashboard.class, ResourceMediaType.DASHBOARD_MIME);
#put(ClientDomainTopic.class, ResourceMediaType.DOMAIN_TOPIC_MIME);
@staticmethod
def getMimeType(resource):
if isinstance(resource, basestring):
#return resource as default in the case resource to be of direct type from resource_media_types
ret = ResourcesTypeResolverUtil.classes.get(resource, resource)
else:
ret = ResourcesTypeResolverUtil.classes.get(resource.__class__.__name__, None)
return ret
|
ryfeus/lambda-packs
|
pytorch/source/caffe2/python/operator_test/onnx_while_test.py
|
Python
|
mit
| 3,154
| 0.000951
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given
import hypothesis.strategies as st
import numpy as np
import unittest
class TestONNXWhile(serial.SerializedTestCase):
@serial.given(
condition=st.booleans(),
max_trip_count=st.integers(0, 100),
save_scopes=st.booleans(),
disable_scopes=st.booleans(),
seed=st.integers(0, 65535),
**hu.gcs_cpu_only)
def test_onnx_while_fibb(
self, condition, max_trip_count, save_scopes, disable_scopes, seed, gc, dc):
np.random.seed(seed)
if disable_scopes:
save_scopes = False
# Create body net
body_net = caffe2_pb2.NetDef()
# Two loop carried dependencies: first and second
body_net.external_input.extend(['i', 'cond', 'first', 'second'])
body_net.external_output.extend(['cond_new', 'second', 'third', 'third'])
add_op = core.CreateOperator(
'Add',
['first', 'second'],
['third'],
)
print3 = core.CreateOperator(
'Print',
['third'],
[],
)
limit_const = core.CreateOperator(
'ConstantFill',
[],
['limit_const'],
shape=[1],
dtype=caffe2_pb2.TensorProto.FLOAT,
value=100.0,
)
cond = core.CreateOperator(
'LT',
['third', 'limit_const'],
['cond_new'],
)
body_net.op.extend([add_op, print3, limit_const, cond])
while_op = core.CreateOperator(
'ONNXWhile',
['max_trip_count', 'condition', 'first_init', 'second_init'],
['first_a', 'second_a', 'third_a'],
body=body_net,
has_cond=True,
has_trip_count=True,
save_scopes=save_scopes,
disable_scopes=disable_scopes,
)
condition_arr = np.array(condition).astype(np.bool)
max_trip_count_arr = np.array(max_trip_count).astype(np.int64)
first_init = np.array([1]).astype(np.float32)
second_init = np.array([1]).astype(np.float32)
|
def ref(max_trip_count, condition, first_init, second_init):
first = 1
second = 1
results = []
if condition:
for _ in range(max_trip_count):
third = first + second
first = s
|
econd
second = third
results.append(third)
if third > 100:
break
return (first, second, np.array(results).astype(np.float32))
self.assertReferenceChecks(
gc,
while_op,
[max_trip_count_arr, condition_arr, first_init, second_init],
ref,
)
if __name__ == "__main__":
unittest.main()
|
changyuheng/code-jam-solutions
|
2014/Round 1B/A.py
|
Python
|
mit
| 1,248
| 0.004006
|
# -*- coding: utf-8 -*-
import sys
def get_skeleton(N, strings):
skeletons = []
for i in range(N):
skeleton = [strings[i][0]]
skeleton += [strings[i][j] for j in range(1, len(strings[i])) if strings[i][j] != strings[i][j-1]]
skeletons.append(skeleton)
for i in range(1, N):
if skeletons[i] != skeletons[i-1]:
skeletons[0] = []
break
return skeletons[0]
def solve():
N = int(input())
strings = [input() for _ in range(N)]
ans = 0
skeleton = get_skeleton(N, strings)
|
if len(skeleton) == 0:
return 'Fegla Won'
lengths = []
for
|
c in skeleton:
length = dict()
for i in range(N):
for j in range(len(strings[i])):
if strings[i][j] != c:
break
length[j] = length.get(j, 0) + 1
strings[i] = strings[i][j:] if j < len(strings[i]) else ''
lengths.append(length)
for length in lengths:
ans += min(sum(abs(k - l) * length[l] for l in length) for k in length)
return ans
def main():
T = int(input())
for i in range(1, T + 1):
print('Case #{}: {}'.format(i, solve()))
if __name__ == '__main__':
sys.exit(main())
|
lzw120/django
|
django/db/models/base.py
|
Python
|
bsd-3-clause
| 39,705
| 0.002393
|
import copy
import sys
from functools import update_wrapper
from future_builtins import zip
import django.db.models.manager # Imported to register signal handler.
from django.conf import settings
from django.core.exceptions import (ObjectDoesNotExist,
MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS)
from django.core import validators
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.related import (ManyToOneRel,
OneToOneField, add_lazy_relation)
from django.db import (connections, router, transaction, DatabaseError,
DEFAULT_DB_ALIAS)
from django.db.models.query import Q
from django.db.models.query_utils import DeferredAttribute
from django.db.models.deletion import Collector
from django.db.models.options import Options
from django.db.models import signals
from django.db.models.loading import register_models, get_model
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import curry
from django.utils.encoding import smart_str, force_unicode
from django.utils.text import get_text_list, capfirst
class ModelBase(type):
"""
Metaclass for all models.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
# If this isn't a subclass of Model, don't do anything special.
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not at
|
tr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
if getattr(meta
|
, 'app_label', None) is None:
# Figure out the app_label by looking one level up.
# For 'django.contrib.sites.models', this would be 'sites'.
model_module = sys.modules[new_class.__module__]
kwargs = {"app_label": model_module.__name__.split('.')[-2]}
else:
kwargs = {}
new_class.add_to_class('_meta', Options(meta, **kwargs))
if not abstract:
new_class.add_to_class('DoesNotExist', subclass_exception('DoesNotExist',
tuple(x.DoesNotExist
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
or (ObjectDoesNotExist,), module))
new_class.add_to_class('MultipleObjectsReturned', subclass_exception('MultipleObjectsReturned',
tuple(x.MultipleObjectsReturned
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
or (MultipleObjectsReturned,), module))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
if getattr(new_class, '_default_manager', None):
if not is_proxy:
# Multi-table inheritance doesn't inherit default manager from
# parents.
new_class._default_manager = None
new_class._base_manager = None
else:
# Proxy classes do inherit parent's default manager, if none is
# set explicitly.
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
# Bail out early if we have already created this class.
m = get_model(new_class._meta.app_label, name,
seed_cache=False, only_installed=False)
if m is not None:
return m
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = new_class._meta.local_fields + \
new_class._meta.local_many_to_many + \
new_class._meta.virtual_fields
field_names = set([f.name for f in new_fields])
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [cls for cls in parents if hasattr(cls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
else:
continue
if base is not None:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
else:
base = parent
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
if (new_class._meta.local_fields or
new_class._meta.local_many_to_many):
raise FieldError("Proxy model '%s' contains model fields." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Do the appropriate setup for any model parents.
o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
if isinstance(f, OneToOneField)])
for base in parents:
original_base = base
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in parent_fields:
if field.name in field_names:
raise FieldError('Local field %r in class %r clashes '
'with field of similar name from '
'base class %r' %
(field.name, name, base.__name__))
if not base._meta.abstract:
# Concrete classes...
base = base._meta.concrete_model
if base in o2o_map:
field = o2o_map[base]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.module_name
field = OneToOneField(base, name=attr_name,
auto_created=True, parent_link=True)
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
# .. and abstract ones.
for field in parent_fields:
new_class.add_to_class(field.name, copy.deepcopy(field))
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base._meta.parents)
# Inherit managers from the abstract base classes.
new_class.copy_managers(base._meta.abstract_managers)
# Proxy models inherit the non-abstract managers from their base,
# unless they have redefined any of
|
deeso/python_scrirpts
|
tax stuff.py
|
Python
|
apache-2.0
| 15,594
| 0.026549
|
import urllib
import urllib2
import threading
BaseProp = 361995
EndProp = 362044
proxies = {'http': 'http://localhost:8080'}
# handles getting the owner pdf listing of the home
class DoNothingRedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
return headers
opener = urllib2.build_opener(DoNothingRedirectHandler())
data_sheet_url = 'http://.org/appraisal/publicaccess/PropertyDataSheet.aspx?PropertyID=%s&PropertyOwnerID=%s&NodeID=11'
get_data_location = lambda propId,ownerId: opener.open(urllib2.Request(data_sheet_url%(propId, ownerId)))
get_data_pdf = lambda url: urllib.urlopen(url
|
).read()
def get_pdf_listing(propId, ownerId, owner_name):
fname = owner_name.replace(" ","_").replace(",","").replace("&","and")+".pdf"
x = get_data_location(propId, ownerId)
if x.headers:
url = x.getheader('Location')
if url == "":
return
url = "http://.org/"+url.split("#")[0]
pdf = get_data_pdf(url)
f = open(fname,'wb')
f.write(pdf)
print "wrote pdf", fname
MAIN_HISTORY = {}
PropIds = []
for i in xrange(BaseProp, EndProp):
PropIds
|
.append('R%d'%i)
CurrentYear = 2010
EndYear = 2006
history_url = "http://.org/appraisal/publicaccess/PropertyHistory.aspx?PropertyID=%s&PropertyOwnerID=%s&NodeID=11&FirstTaxYear=%d&LastTaxYear=%d"
get_history_page = lambda propId,ownerId,eyear,syear:urllib.urlopen(history_url%(propId,ownerId,syear,eyear)).read()
HistorySplit = lambda data: "".join(data.split('<HistoryResults>')[1]).split('</HistoryResults>')[0].strip()
HistoryTaxYearSplit = lambda data: data.split('History TaxYear="')[1].split('" ')[0].strip()
HistroyNameSplit = lambda data: data.split('Name="')[1].split('" ')[0].strip()
HistroyValueSplit = lambda data: data.split('Value="')[1].split('" ')[0].strip()
def process_history(propId,ownerId, cyear, eyear):
history_data = {}
page_data = get_history_page(propId,ownerId, cyear-1, eyear)
if page_data.find("<HistoryResults>") == -1:
print "page has no relevant history"
return {}
for line in HistorySplit(page_data).splitlines():
if line.find("History TaxYear=") == -1:
continue
line = line.strip()
if line == "":
continue
year = HistoryTaxYearSplit(line)
name = HistroyNameSplit(line)
value = HistroyValueSplit(line)
if not year in history_data:
history_data[year] = {}
history_data[year][name] = value
return history_data
# get the required segments for the square footages and ammendments
init_imp_page = 'http://.org/appraisal/publicaccess/PropertyImpDetail.aspx?CurrPosition=1&LastPosition=1&PropertyID=%s&PropertyOwnerID=0&NodeID=11'
extract_segments = lambda data: int(data.split('<td class="ssDetailLabel" nowrap="true">Segments</td><td class="ssDetailData" nowrap="true">')[1].split("</td>")[0])
get_segments = lambda propId: extract_segments(urllib.urlopen(init_imp_page%propId).read())
segments_url = "http://.org/appraisal/publicaccess/PropertyImpSegDetail.aspx"
segments_data = lambda current,last,propID: "CurrSegPosition=%d&LastSegPosition=%d&CurrPosition=1&LastPosition=1&TaxYear=2008&PropertyID=%s&PropertyOwnerID=0&NodeID=11&dbKeyAuth=Appraisal"%(current,last,propID)
segments_post = lambda data:urllib.urlopen(segments_url,data=data).read()
def traverse_segments(start, end, propId):
imps = {"MA (Main Area)":"Main Floor",
"MA2.0 (Main Area 2nd Flr)":"Second Floor",
"Garage":"Garage",
"Porch":"Porch"}
functions = {"Second Floor":[SecondFlrSqSplit, SecondFlrVaSplit],
"Main Floor":[MainFlrSqSplit, MainFlrVaSplit],
"Porch":[PorchFlrSqSplit, PorchFlrVaSplit],
"Garage":[GarageFlrSqSplit, GarageFlrVaSplit]}
results = {}
for i in xrange(start, end+1):
data = segments_data(i,end,propId)
page_data = segments_post(data)
for i in imps.keys():
if page_data.find(i) > -1:
name = imps[i]
results[name+" SQ Footage"] = functions[name][0](page_data).strip()
results[name+" Value"] = "$"+functions[name][1](page_data).split('$')[1].strip()
break
return results
header_string = "Address,Name,ID,Legal Description,Year,% inc,Value,Land ($),sq ft,$/sq ft,House ($),Sq ft,$/sq ft,1st floor ($),Sq ft,$/sq ft,2nd floor ($),Sq ft,$/sq ft,Garage ($),Sq ft,$/sq ft,Porch ($),Sq ft,$/sq ft"
split_str2 = '%s:</td><td class="ssDetailData" valign="top">'
split_field2 = lambda sstr, data: data.split(split_str2%sstr)[1].split("</td>")[0]
split_str3 = '<td class="ssDetailLabel">%s:</td><td class="ssDetailData" width="125px" align="right">'
def split_field3(sstr, data):
return data.split(split_str3)[1].split(' ')[0]
ExemptionSplit = lambda data: data.split('Exemption Codes:</label></td><td><table cellpadding="0" cellspacing="0"><tr><td class="ssDetailData">')[1].split('</td></tr></table></td></tr><tr xmlns:msxsl="urn:schemas-microsoft-com:xslt" xmlns:tyl="http://www.tsgweb.com"><td id="tdEntity" class="ssDetailLabel" valign="top">')[0]
Exemptions = lambda data: ",".join([i.split('(')[0].strip() for i in ExemptionSplit(data).split(" <br />")])
CleanupAddr = lambda addr: addr.replace("<br />","")
# pull out address name, etc on first page
AddrSplit = lambda data: CleanupAddr(data.split('Property Address:</td><td class="ssDetailData" valign="top">')[1].split('</td>')[0])
NameSplit = lambda data: data.split('Owner Name:</td><td class="ssDetailData">')[1].split('</td>')[0].replace("&","&")
YearSplit = lambda data:"2010"
PropID = lambda data: data.split('Property Detail Sheet (')[1].split(')')[0]
LDescSplit = lambda data: data.split('Legal Description:</td><td class="ssDetailData">')[1].split('</td>')[0]
# Pull out values from summary stuff
AppraisedSplit = lambda data: data.split('Appraised:</td><td class="ssDetailData" width="125px" align="right">')[1].split(' ')[0]
LandHSSplit = lambda data: data.split('Land HS:</td><td class="ssDetailData" width="125px" align="right">')[1].split(' ')[0]
ImprovementHSSplit = lambda data: data.split('Improvement HS:</td><td class="ssDetailData" width="125px" align="right">')[1].split(' ')[0]
HomeSteadCapSplit = lambda data: data.split('Homestead Cap:</td><td class="ssDetailData" width="125px" align="right">')[1].split(' ')[0]
AssessedSplit = lambda data: data.split('Assessed:</td><td class="ssDetailData" width="125px" align="right">')[1].split(' ')[0]
OwnerIdSplit = lambda data: data.split('Owner ID:</td><td class="ssDetailData">')[1].split('</td>')[0]
# Pull out values and areas of floors
split_str = '<td class="ssDetailPageLabel" nowrap="1">%s</td><td class="ssDetailPageData" nowrap="1">'
split_field = lambda sstr, data: data.split(split_str %sstr)[1].split('</td>')[0]
MainFlrSqSplit = lambda data: split_field("Area", data)
MainFlrVaSplit = lambda data: split_field("Value", data)
SecondFlrSqSplit = lambda data: split_field("Area", data)
SecondFlrVaSplit = lambda data: split_field("Value", data)
PorchFlrSqSplit = lambda data: split_field("Area", data)
PorchFlrVaSplit = lambda data: split_field("Value", data)
GarageFlrSqSplit = lambda data: split_field("Area", data)
GarageFlrVaSplit = lambda data: split_field("Value", data)
main_url = 'http://.org/appraisal/publicaccess/PropertyDetail.aspx?PropertyID=%s&dbKeyAuth=Appraisal&TaxYear=%s&NodeID=11&PropertyOwnerID=%s'
get_main_page = lambda propId,cyear,ownerId: urllib.urlopen(main_url%(propId, cyear,ownerId), proxies=proxies).read()
THREADS = []
main_search = "http://.org/appraisal/publicaccess/PropertySearch.aspx?PropertySearchType=1&SelectedItem=10&PropertyID=&PropertyOwnerID=&NodeID=11"
search_data = lambda prop_value,cyear: "PropertyID=%s&PropertySearchType=1&NodeID=11&dbKeyAuth=Appraisal&TaxYear=%s&SearchSubmit=Search"%(prop_value, cyear)
get_property = lambda data: urllib.urlopen("http://.org/appraisal/publicaccess/PropertySearchResults.aspx",data=data).read()
def get_propId(property,cyear):
d = search_data(property,cyear)
#print d
page = get_property(d)
#print page
print page.split("ViewPropertyOrOwners(")[1].split(")")[0].replace(",","").split()
propId =
|
taimir/keras-layers
|
setup.py
|
Python
|
mit
| 367
| 0
|
from setuptools import setup
|
from setuptools import find_packages
setup(name='Keras-layers',
version='0.0.1',
description='Collection of useful non-standard layers for keras',
author='Atanas Mirchev',
author_email='taimir93@gmail.com',
ur
|
l='https://github.com/taimir/keras-layers',
license='MIT',
packages=find_packages())
|
Kasai-Code/Kinesis
|
settings/hyperparameters.py
|
Python
|
mit
| 268
| 0
|
nb_epoch = 100
batch_size = 64
optimizer = 'adam'
hidden_units = 3000
nb_val_samples = 462
embedding_size = 10
dropout_percentag
|
e = 0.3
embedding_GRU_size = 100
maximum_line_length = 500
samples_per_epoch = 4127 * maximum_line_length
l
|
oss = 'categorical_crossentropy'
|
SneakersInc/Pandora
|
modules/exportgraph.py
|
Python
|
mit
| 2,370
| 0.002532
|
#!/usr/bin/env python
# Original code was created by Nadeem Douba as part of the Canari Framework
from collections import OrderedDict
from xml.etree.cElementTree import XML
from zipfile import ZipFile
def mtgx2json(graph):
zipfile = ZipFile(graph)
graphs = filter(lambda x: x.endswith('.graphml'), zipfile.namelist())
for f in graphs:
multikeys = []
xml = XML(zipfile.open(f).read())
links = {}
for edge in xml.findall('{http://graphml.graphdrawing.org/xmlns}graph/'
'{http://graphml.graphdrawing.org/xmlns}edge'):
src = edge.get('source')
dst = edge.get('target')
if src not in links:
links[src] = dict(in_=[], out=[])
if dst not in links:
links[dst] = dict(in_=[], out=[])
links[src]['out'].append(dst)
links[dst]['in_'].append(src)
for node in xml.findall('{http://graphml.graphdrawing.org/xmlns}graph/'
'{http://graphml.graphdrawing.org/xmlns}node'):
node_id = node.get('id')
node = node.find('{http://graphml.graphdrawing.org/xmlns}data/'
'{http://maltego.paterva.com/xml/mtgx}MaltegoEntity')
record = OrderedDict({'NodeID': node_id, 'EntityType': node.get('type').strip()})
props = {'Data': {}}
for prop in node.findall('{http://maltego.paterva.com/xml/mtgx}Properties/'
'{http://maltego.paterva.com/xml/mtgx}Property'):
value = prop.find('{http://maltego.paterva.com/xml/mtgx}Value').text or ''
entity_prop = {prop.get('displayName'): value.strip()}
props['Data'].update(entity_prop)
|
record.update(props)
s = ' - '.join(['%s: %s' % (key, value) for (
|
key, value) in record['Data'].items()])
record.pop('Data')
data = {'Data': s}
record.update(data)
link = {'Links': {}}
i_link = {'Incoming': links.get(node_id, {}).get('in_', 0)}
link['Links'].update(i_link)
o_link = {'Outgoing': links.get(node_id, {}).get('out', 0)}
link['Links'].update(o_link)
record.update(link)
multikeys.append(record)
return multikeys
|
hasgeek/coaster
|
tests/test_logger.py
|
Python
|
bsd-2-clause
| 2,411
| 0.001659
|
from io import StringIO
from coaster.logger import RepeatValueIndicator, filtered_value, pprint_with_indent
def test_filtered_value():
"""Test for filtered values."""
# Doesn't touch normal key/value pairs
assert filtered_value('normal', 'value') == 'value'
assert filtered_value('also_normal', 123) == 123
# But does redact sensitive keys
assert filtered_value('password', '123pass') != '123pass'
# The returned value is an object that renders via repr and str as '[Filtered]'
assert repr(filtered_value('password', '123pass')) == '[Filtered]'
assert str(filtered_value('password', '123pass')) == '[Filtered]'
# Also works on partial matches in the keys
assert repr(filtered_value('confirm_password', '123pass')) == '[Filtered]'
# The filter uses a verbose regex. Words in the middle of the regex also work
assert repr(filtered_value('access_token',
|
'secret-here')) == '[Filtered]'
# Filters are case insensitive
assert repr(filtered_value('TELEGRAM_ERROR_APIKEY', 'api:key')) == '[Filtered]'
# Keys with 'token' as a word are also filtered
assert repr(filtered_value('SMS_TWILIO_TOKEN', 'api:key')) == '[Filtered]'
# Numbers that look like card numbers are filtered
assert (
filtered_value('anything', 'My number is 1234 5678 9012 3456')
== 'My number is [Filtered]'
)
# This works with any combination of sp
|
aces and dashes within the number
assert (
filtered_value('anything', 'My number is 1234 5678-90123456')
== 'My number is [Filtered]'
)
def test_pprint_with_indent():
"""Test pprint_with_indent does indentation."""
out = StringIO()
data = {
12: 34,
'confirm_password': '12345qwerty',
'credentials': ['abc', 'def'],
'key': 'value',
'nested_dict': {'password': 'not_filtered'},
'password': '12345qwerty',
}
pprint_with_indent(data, out)
assert (
out.getvalue()
== '''\
{12: 34,
'confirm_password': [Filtered],
'credentials': [Filtered],
'key': 'value',
'nested_dict': {'password': 'not_filtered'},
'password': [Filtered]}
'''
)
def test_repeat_value_indicator():
"""Test RepeatValueIndicator class."""
assert repr(RepeatValueIndicator('key')) == "<same as prior 'key'>"
assert str(RepeatValueIndicator('key')) == "<same as prior 'key'>"
|
ivano666/tensorflow
|
tensorflow/python/kernel_tests/bitcast_op_test.py
|
Python
|
apache-2.0
| 2,305
| 0.007375
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.bitcast."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class BitcastTest(tf.test.TestCase):
def _testBitcast(self, x, datatype, shape):
with self.test_session():
tf_ans = tf.bitcast(x, datatype)
out = tf_ans.eval()
buff_after = memoryview(out).tobytes()
buff_before = memoryview(x).tobytes()
self.assertEqual(buff_before, buff_after)
self.assertEqual(tf_ans.get_shape(), shape)
self.assertEqual(tf_ans.dtype, datatype)
def testSmaller(self):
x = np.random.rand(3, 2)
datatype = tf.int8
shape = [3, 2, 8]
self._testBitcast(x, datatype, shape)
def testLarger(self):
x = np.arange(16, dtype=np.int8).reshape([4, 4])
datatype = tf.int32
shape = [4]
self._testBitcast(x, datatype, shape)
def testSameDtyp
|
e(self):
x = np.random.rand(3, 4)
shape = [3, 4]
self._testBitcast(x, x.dtype, shape)
def testSameSize(self):
x = np.random.rand(3, 4)
shape = [3, 4]
self._testBitcast(x, tf.int64, shape)
def testErrors(self):
|
x = np.zeros([1, 1], np.int8)
datatype = tf.int32
with self.assertRaisesRegexp(ValueError, "Cannot bitcast due to shape"):
tf.bitcast(x, datatype, None)
def testEmpty(self):
x = np.ones([], np.int32)
datatype = tf.int8
shape = [4]
self._testBitcast(x, datatype, shape)
def testUnknown(self):
x = tf.placeholder(tf.float32)
datatype = tf.int8
tf.bitcast(x, datatype, None)
if __name__ == "__main__":
tf.test.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.