repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
compiteing/flask-ponywhoosh
|
docs/src/conf.py
|
Python
|
mit
| 8,701
| 0.004252
|
# -*- coding: utf-8 -*-
#
# flask-ponywhoosh documentation build configuration file, created by
# sphinx-quickstart on Tue Jun 1 14:31:57 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is re
|
lative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
sys.path.append(os.path.abspath('_themes'))
# -- General configuration ----
|
-------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.doctest']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
roject = u'flask-ponywhoosh'
copyright = u'2018, Jonathan Prieto-Cubides & Felipe Rodriguez'
author = u'Jonathan Prieto-Cubides & Felipe Rodriguez'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
# import pkg_resources
# try:
# release = pkg_resources.get_distribution('flask-ponywhoosh').version
# except pkg_resources.DistributionNotFound:
# print('To build the documentation, The distribution information of')
# print('Flask-Ponywhoosh has to be available. Either install the package')
# print('into your development environment or run "setup.py develop"')
# print('to setup the metadata. A virtualenv is recommended!')
# sys.exit(1)
# del pkg_resources
# if 'dev' in release:
# release = release.split('dev')[0] + 'dev'
# version = '.'.join(release.split('.')[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'flask'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'index_logo': 'logo.png'
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar. Do not set, template magic!
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['sidebarintro.html', 'sourcelink.html', 'searchbox.html'],
'**': ['sidebarlogo.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = False
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'flask-ponywhooshdoc'
# -- Options for LaTeX output --------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(master_doc, 'flask-ponywhoosh.tex', u'flask-ponywhoosh Documentation',
u'Jonathan Prieto-Cubides \\& Felipe Rodriguez', 'manual'),
]
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_use_modindex = False
latex_elements = {
'fontpkg': r'\usepackage{mathpazo}',
'papersize': 'a4paper',
'pointsize': '12pt',
'preamble': r'\usepackage{flaskstyle}'
}
latex_use_parts = True
latex_additional_files = ['flaskstyle.sty', 'logo.pdf']
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'flask-ponywhoosh', u'flask-ponywhoosh Documentation',
[author], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None,
'http://flask.pocoo.org/docs/': None}
pygments_style = 'flask_theme_support.FlaskyStyle'
texinfo_documents = [
(master_doc, 'flask-ponywhoosh', u'flask-ponywhoosh Documentation',
au
|
stevecassidy/signbank-video
|
video/admin.py
|
Python
|
bsd-3-clause
| 264
| 0.007576
|
from django.contrib import admin
from video.models import Tag
|
gedVideo, Video
class VideoAdmin(admin.TabularInline):
model = Video
@admin.register(TaggedVideo)
class TaggedVideoAdmin(admin.ModelAdmin):
search
|
_fields = ['^tag']
inlines = [VideoAdmin]
|
mission-liao/pyopenapi
|
pyopenapi/tests/migration/test_scan.py
|
Python
|
mit
| 6,151
| 0.000488
|
# -*- coding: utf-8 -*-
import unittest
import weakref
from pyopenapi.migration.scan import Scanner, Dispatcher, scan
from pyopenapi.migration.versions.v1_2.objects import (
ApiDeclaration, Authorization, Operation, ResponseMessage, Parameter)
from pyopenapi.migration.versions.v3_0_0.objects import (
Header as Header3,
Parameter as Parameter3,
)
from ..utils import get_test_data_folder, SampleApp
class CountObject(object):
""" a scanner for counting objects and looking for
longest attribute name. Just for test.
"""
class Disp(Dispatcher):
pass
def __init__(self):
self.total = {
ApiDeclaration: 0,
Authorization: 0,
Operation: 0,
ResponseMessage: 0
}
self.long_name = ''
@Disp.register([ApiDeclaration, Authorization, Operation, ResponseMessage])
def _count(self, path, obj, _):
self.total[obj.__class__] = self.total[obj.__class__] + 1
return path
@Disp.result
def _result(self, name):
if len(name) > len(self.long_name):
self.long_name = name
class PathRecord(object):
""" a scanner to record all json path
"""
class Disp(Dispatcher):
pass
def __init__(self):
self.api_declaration = []
self.authorization = []
self.response_message = []
self.parameter = []
# pylint: disable=unused-argument
@Disp.register([ApiDeclaration])
def _api_declaration(self, path, obj, _):
self.api_declaration.append(path)
# pylint: disable=unused-argument
@Disp.register([Authorization])
def _authorization(self, path, obj, _):
self.authorization.append(path)
# pylint: disable=unused-argument
@Disp.register([ResponseMessage])
def _response_message(self, path, obj, _):
self.response_message.append(path)
@Disp.register([Parameter])
def _parameter(self, path, obj, _):
self.parameter.append(path)
class ScannerTestCase(unittest.TestCase):
""" test scanner """
@classmethod
def setUpClass(cls):
cls.app = SampleApp.load(
get_test_data_folder(version='1.2', which='wordnik'))
def test_count(self):
scanner = Scanner(self.app)
count_obj = CountObject()
scanner.scan(route=[count_obj], root=self.app.raw)
for name in self.app.raw.cached_apis:
scanner.scan(route=[count_obj], root=self.app.raw.cached_apis[name])
self.assertEqual(
len(count_obj.long_name),
len('#/apis/3/operations/0/responseMessages/0'))
self.assertEqual(count_obj.total, {
Authorization: 1,
ApiDeclaration: 3,
Operation: 20,
ResponseMessage: 23
})
def test_leaves(self):
scanner = Scanner(self.app)
count_obj = CountObject()
scanner.scan(route=[count_obj], root=self.app.raw, leaves=[Operation])
for name in self.app.raw.cached_apis:
scanner.scan(
route=[count_obj],
root=self.app.raw.cached_apis[name],
leaves=[Operation])
# the scanning would stop at Operation, so ResponseMessage
# would not be counted.
self.assertEqual(count_obj.total, {
Authorization: 1,
ApiDeclaration: 3,
Operation: 20,
ResponseMessage: 0
})
def test_path(self):
scanner = Scanner(self.app)
path_record = PathRecord()
scanner.scan(route=[path_record], root=self.app.raw)
scanner.scan(
route=[path_record], root=self.app.raw.cached_apis['store'])
self.assertEqual(sorted(path_record.api_declaration), sorted(['#']))
self.assertEqual(path_record.authorization, ['#/authorizations/oauth2'])
self.assertEqual(
sorted(path_record.response_message),
sorted([
'#/apis/0/operations/0/responseMessages/0',
'#/apis/1/operations/0/responseMessages/1',
'#/apis/1/operations/0/responseMessages/0',
'#/apis/1/operations/1/responseMessages/1',
'#/apis/1/operations/1/responseMessages/0'
]))
self.assertEqual(
sorted(path_record.parameter),
sorted([
'#/apis/0/operations/0/parameters/0',
'#/apis/1/operations/0/parameters/0',
'#/apis/1/operations/1/parameters/0',
]))
class ResolveTestCase(unittest.TestCase):
""" test for scanner: Resolve """
@classmethod
def setUpClass(cls):
cls.app = SampleApp.create(
get_test_data_folder(version='1.2', which='model_subtypes'),
to_spec_version='2.0')
def test_ref_resolve(self):
""" make sure pre resolve works """
schema, _ = self.app.resolve_obj(
'#/definitions/user!##!UserWithInfo/allOf/0',
from_spec_version='2.0')
ref = schema.get_attrs('migration').ref_obj
self.assertTrue(isinstance(ref, weakref.ProxyTypes))
schema, _ = self.app.resolve_obj(
'#/definitions/user!##!User',
from_spec_version='2.0',
)
self.assertEqual(ref, schema)
class CountParemeter3(object):
""" a scanner just for test
"""
class Disp(Dispatcher):
|
pass
def __init__(self):
self.total = {
|
Header3: 0,
Parameter3: 0,
}
@Disp.register([Header3, Parameter3])
def _count(self, _, obj):
self.total[obj.__class__] = self.total[obj.__class__] + 1
class Scanner2TestCase(unittest.TestCase):
""" test case for Scanner2 """
def test_child_class_called_twice(self):
""" make a callback for 'Header' and 'Parameter' would only be called once,
when Header inherit Paremeter
"""
header = Header3({})
count_param = CountParemeter3()
scan(route=[count_param], root=header)
self.assertEqual(count_param.total[Header3], 1)
self.assertEqual(count_param.total[Parameter3], 0)
|
tellesnobrega/sahara
|
sahara/utils/notification/sender.py
|
Python
|
apache-2.0
| 2,996
| 0
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log as logging
from sahara import context
from sahara.utils import rpc as messaging
LOG = logging.getLogger(__name__)
SERVICE = 'sahara'
CLUSTER_EVENT_TEMPLATE = "sahara.cluster.%s"
HEALTH_EVENT_TYPE = CLUSTER_EVENT_TEMPLATE % "health"
notifier_opts = [
cfg.StrOpt('level',
default='INFO',
deprecated_name='notification_level',
deprecated_group='DEFAULT',
help='Notification level for outgoing notifications'),
cfg.StrOpt('publisher_id',
deprecated_name='notification_publisher_id',
deprecated_group='DEFAULT')
]
notifier_opts_group = 'oslo_messaging_notifications'
CONF = cfg.CONF
CONF.register_opts(notifier_opts, group=notifier_opts_group)
def _get_publisher():
publisher_id = CONF.oslo_messaging_notifications.publisher_id
if publisher_id is None:
publisher_id = SERVICE
return publisher_id
def _notify(event_type, body):
LOG.debug("Notification about cluster is going to be sent. Notification "
"type={type}".format(type=event_type))
ctx = context.ctx()
level = CONF.oslo_messaging_notifications.level
body.update({'project_id': ctx.tenant_id, 'user_id': ctx.user_id})
client = messaging.get_notifier(_get_publisher())
method = getattr(client, level.lower())
method(ctx, event_type, body)
def _health_notification_body(cluster, health_check):
verification = cluster.verification
return {
'cluster_id': cluster.id,
'cluster_name': cluster.name,
|
'verification_id': v
|
erification['id'],
'health_check_status': health_check['status'],
'health_check_name': health_check['name'],
'health_check_description': health_check['description'],
'created_at': health_check['created_at'],
'updated_at': health_check['updated_at']
}
def status_notify(cluster_id, cluster_name, cluster_status, ev_type):
"""Sends notification about creating/updating/deleting cluster."""
_notify(CLUSTER_EVENT_TEMPLATE % ev_type, {
'cluster_id': cluster_id, 'cluster_name': cluster_name,
'cluster_status': cluster_status})
def health_notify(cluster, health_check):
"""Sends notification about current cluster health."""
_notify(HEALTH_EVENT_TYPE,
_health_notification_body(cluster, health_check))
|
ToxicWar/bibliotheque
|
library/admin.py
|
Python
|
mit
| 326
| 0
|
# coding: utf-8
from __future__ import unicode_literals
from django.contrib import admin
from .models import Book, Au
|
thor, Publisher, Genre
admin.site.register(Book, admin.ModelAd
|
min)
admin.site.register(Author, admin.ModelAdmin)
admin.site.register(Publisher, admin.ModelAdmin)
admin.site.register(Genre, admin.ModelAdmin)
|
ibc/MediaSoup
|
worker/deps/gyp/test/actions-bare/gyptest-bare.py
|
Python
|
isc
| 558
| 0
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights re
|
served.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies actions which are not depended on by other targets get executed.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('bare.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('bare.gyp', chdir='relocate/src')
file_content = 'Hello fr
|
om bare.py\n'
test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
test.pass_test()
|
pirata-cat/agora-ciudadana
|
userena/tests/managers.py
|
Python
|
agpl-3.0
| 6,047
| 0.001984
|
from django.test import TestCase
from django.core import mail
from django.contrib.auth.models import User
from userena.models import UserenaSignup
from userena import settings as userena_settings
from guardian.shortcuts import get_perms
import datetime, re
class UserenaManagerTests(TestCase):
""" Test the manager of Userena """
user_info = {'username': 'alice',
'password': 'swordfish',
'email': 'alice@example.com'}
fixtures = ['users']
def test_create_inactive_user(self):
"""
Test the creation of a new user.
``UserenaSignup.create_inactive_user`` should create a new user that is
not active. The user should get an ``activation_key`` that is used to
set the user as active.
Every user also has a profile, so this method should create an empty
profile.
"""
# Check that the fields are set.
new_user = UserenaSignup.objects.create_user(**self.user_info)
self.assertEqual(new_user.username, self.user_info['username'])
self.assertEqual(new_user.email, self.user_info['email'])
self.failUnless(new_user.check_password(self.user_info['password']))
# User should be inactive
self.failIf(new_user.is_active)
# User has a valid SHA1 activation key
self.failUnless(re.match('^[a-f0-9]{40}$', new_user.userena_signup.activation_key))
# User now has an profile.
self.failUnless(new_user.get_profile())
# User should be saved
self.failUnlessEqual(User.objects.filter(email=self.user_info['email']).count(), 1)
def test_activation_valid(self):
"""
Valid activation of an user.
Activation of an user with a valid ``activation_key`` should activate
the user and set a new invalid ``activation_key`` that is defined in
the setting ``USERENA_ACTIVATED``.
"""
user = UserenaSignup.objects.create_user(**self.user_info)
active_user = UserenaSignup.objects.activate_user(user.username,
user.userena_signup.activation_key)
# The returned user should be the same as the one just created.
self.failUnlessEqual(user, active_user)
# The user should now be active.
self.failUnless(active_user.is_active)
# The user should have permission to view and change its profile
self.failUnless('view_profile' in get_perms(active_user, active_user.get_profile()))
self.failUnless('change_profile' in get_perms(active_user, active_user.get_profile()))
# The activation key should be the same as in the settings
self.assertEqual(active_user.userena_signup.activation_key,
userena_settings.USERENA_ACTIVATED)
def test_activation_invalid(self):
"""
Activation with a key that's invalid should make
``UserenaSignup.objects.activate_user`` return ``False``.
"""
# Wrong key
self.failIf(UserenaSignup.objects.activate_user('john', 'wrong_key'))
# At least the right length
invalid_key = 10 * 'a1b2'
self.failIf(UserenaSignup.objects.activate_user('john', invalid_key))
def test_activation_expired(self):
"""
Activation with a key that's expired should also make
``UserenaSignup.objects.activation_user`` return ``False``.
"""
user = UserenaSignup.objects.create_user(**self.user_info)
# Set the date that the key is created a day further away than allowed
user.date_joined -= datetime.timedelta(days=userena_settings.USERENA_ACTIVATION_DAYS + 1)
user.save()
# Try to activate the user
UserenaSignup.objects.activate_user(user.username, user.userena_signup.activation_key)
active_user = User.objects.get(username='alice')
# UserenaSignup activation should have failed
self.failIf(active_user.is_active)
# The activation key should still be a hash
self.assertEqual(user.userena_signup.activation_key,
active_user.userena_signup.activation_key)
def test_confirmation_valid(self):
"""
Confirmation of a new e-mail address with turns out to be valid.
"""
new_email = 'john@newexample.com'
user = User.objects.get(pk=1)
user.userena_signup.change_email(new_email)
# Confirm email
confirmed_user = UserenaSignup.objects.confirm_email(user.username,
user.userena_signup.email_confirmation_key)
self.failUnlessEqual(user, confirmed_user)
# Check the new email is set.
self.failUnlessEqual(confirmed_user.email, new_email)
# ``email_new`` and ``email_verification_key`` should be empty
self.failIf(confirmed_user.userena_signup.email_unconfirmed)
self.failIf(confirmed_user.userena_signup.email_confirmation_key)
def test_confirmation_invalid(self):
"""
Trying to confirm a new e-mail address when the ``confirmation_key``
is invalid.
"""
new_email = 'john@newexample.com'
user = User.objects.get(pk=1)
user.userena_signup.change_email(new_email)
# Verify email with wrong SHA1
self.failIf(UserenaSignup.objects.
|
confirm_email('john', 'sha1'))
# Correct SHA1, but non-existend in db.
self.failIf(UserenaSignup.objects.confirm_email('john', 10 * 'a1b2'))
def test_delete_expired_users(self):
"""
Test if expired users are del
|
eted from the database.
"""
expired_user = UserenaSignup.objects.create_user(**self.user_info)
expired_user.date_joined -= datetime.timedelta(days=userena_settings.USERENA_ACTIVATION_DAYS + 1)
expired_user.save()
deleted_users = UserenaSignup.objects.delete_expired_users()
self.failUnlessEqual(deleted_users[0].username, 'alice')
|
selwin/django-mailer
|
django_mailer/engine.py
|
Python
|
mit
| 7,863
| 0.001145
|
"""
The "engine room" of django mailer.
Methods here actually handle the sending of queued messages.
"""
from django_mailer import constants, models, settings
from lockfile import FileLock, AlreadyLocked, LockTimeout
from socket import error as SocketError
import logging
import os
import smtplib
import tempfile
import time
if constants.EMAIL_BACKEND_SUPPORT:
from django.core.mail import get_connection
else:
from django.core.mail import SMTPConnection as get_connection
LOCK_PATH = settings.LOCK_PATH or os.path.join(tempfile.gettempdir(),
'send_mail')
logger = logging.getLogger('django_mailer.engine')
def _message_queue(block_size, exclude_messages=[]):
"""
A generator which iterates queued messages in blocks so that new
prioritised messages can be inserted during iteration of a large number of
queued messages.
To avoid an infinite loop, yielded messages *must* be deleted or deferred.
"""
def get_block():
queue = models.QueuedMessage.objects.non_deferred() \
.exclude(pk__in=exclude_messages).select_related()
if block_size:
queue = queue[:block_size]
return queue
queue = get_block()
while queue:
for message in queue:
yield message
queue = get_block()
def send_all(block_size=500, backend=None):
"""
Send all non-deferred messages in the queue.
A lock file is used to ensure that this process can not be started again
while it is already running.
The ``block_size`` argument allows for queued messages to be iterated in
blocks, allowing new prioritised messages to be inserted during iteration
of a large number of queued messages.
"""
lock = FileLock(LOCK_PATH)
logger.debug("Acquiring lock...")
try:
# lockfile has a bug dealing with a negative LOCK_WAIT_TIMEOUT (which
# is the default if it's not provided) systems which use a LinkFileLock
# so ensure that it is never a negative number.
lock.acquire(settings.LOCK_WAIT_TIMEOUT or 0)
#lock.acquire(settings.LOCK_WAIT_TIMEOUT)
except AlreadyLocked:
logger.debug("Lock already in place. Exiting.")
return
except LockTimeout:
logger.debug("Waiting for the lock timed out. Exiting.")
return
logger.debug("Lock acquired.")
start_time = time.time()
sent = deferred = skipped = 0
# A list of messages to be sent, usually contains messages that failed
exclude_messages = []
try:
if constants.EMAIL_BACKEND_SUPPORT:
connection = get_connection(backend=backend)
else:
connection = get_connection()
blacklist = models.Blacklist.objects.values_list('email', flat=True)
connection.open()
for message in _message_queue(block_size, exclude_messages=exclude_messages):
result = send_queued_message(message, connection=connection,
blacklist=blacklist)
if result == constants.RESULT_SENT:
sent += 1
elif result == constants.RESULT_FAILED:
deferred +=
|
1
# Don't try to send this message again for now
exclude_messages.append(message.pk)
elif result == constants.RESULT_SKIPPED:
skipped += 1
connection.close()
finally:
logger.debug("Releasing lock...")
lock.release()
logger.debug("Lock released.")
logger.debug("")
if sent or deferred or skipped:
log = logger.warning
else:
log = logger.info
|
log("%s sent, %s deferred, %s skipped." % (sent, deferred, skipped))
logger.debug("Completed in %.2f seconds." % (time.time() - start_time))
def send_loop(empty_queue_sleep=None):
"""
Loop indefinitely, checking queue at intervals and sending and queued
messages.
The interval (in seconds) can be provided as the ``empty_queue_sleep``
argument. The default is attempted to be retrieved from the
``MAILER_EMPTY_QUEUE_SLEEP`` setting (or if not set, 30s is used).
"""
empty_queue_sleep = empty_queue_sleep or settings.EMPTY_QUEUE_SLEEP
while True:
while not models.QueuedMessage.objects.all():
logger.debug("Sleeping for %s seconds before checking queue "
"again." % empty_queue_sleep)
time.sleep(empty_queue_sleep)
send_all()
def send_queued_message(queued_message, connection=None, blacklist=None,
log=True):
"""
Send a queued message, returning a response code as to the action taken.
The response codes can be found in ``django_mailer.constants``. The
response will be either ``RESULT_SKIPPED`` for a blacklisted email,
``RESULT_FAILED`` for a deferred message or ``RESULT_SENT`` for a
successful sent message.
To allow optimizations if multiple messages are to be sent, a
connection can be provided and a list of blacklisted email addresses.
Otherwise a new connection will be opened to send this message and the
email recipient address checked against the ``Blacklist`` table.
If the message recipient is blacklisted, the message will be removed from
the queue without being sent. Otherwise, the message is attempted to be
sent with an SMTP failure resulting in the message being flagged as
deferred so it can be tried again later.
By default, a log is created as to the action. Either way, the original
message is not deleted.
"""
message = queued_message.message
if connection is None:
connection = get_connection()
connection.open()
arg_connection = False
else:
arg_connection = True
if blacklist is None:
blacklisted = models.Blacklist.objects.filter(email=message.to_address)
else:
blacklisted = message.to_address in blacklist
if blacklisted:
logger.info("Not sending to blacklisted email: %s" %
message.to_address.encode("utf-8"))
queued_message.delete()
result = constants.RESULT_SKIPPED
else:
result = send_message(message, connection=connection)
if not arg_connection:
connection.close()
return result
def send_message(message, connection=None):
"""
Send an EmailMessage, returning a response code as to the action taken.
The response codes can be found in ``django_mailer.constants``. The
response will be either ``RESULT_FAILED`` for a failed send or
``RESULT_SENT`` for a successfully sent message.
To allow optimizations if multiple messages are to be sent, a
connection can be provided. Otherwise a new connection will be opened
to send this message.
This function does not perform any logging or queueing.
"""
if connection is None:
connection = get_connection()
opened_connection = False
try:
logger.info("Sending message to %s: %s" %
(message.to_address.encode("utf-8"),
message.subject.encode("utf-8")))
message.email_message(connection=connection).send()
message.queuedmessage.delete()
result = constants.RESULT_SENT
log_message = 'Sent'
except Exception, err:
if isinstance(err, settings.DEFER_ON_ERRORS):
message.queuedmessage.defer()
logger.warning("Message to %s deferred due to failure: %s" %
(message.to_address.encode("utf-8"), err))
log_message = unicode(err)
result = constants.RESULT_FAILED
models.Log.objects.create(message=message, result=result,
log_message=log_message)
if opened_connection:
connection.close()
return result
|
francis-taylor/Timotty-Master
|
cybot/plug/myid.py
|
Python
|
mit
| 625
| 0.0224
|
# -*- coding: utf-8 -*-
import config
from metodos import *
from mensagens import myid
def my_id(msg):
chat_id = msg['chat']['id']
t
|
ry: user = '@' + msg['from']['username']
except:user = " "
if msg['text'] == '/id':
if msg['chat']['type'] == 'private':
sendMessage(chat_id, myid['private'].decode('utf8').format(msg['from']['first_name'].encode('utf-8'),msg['from']['id'],user))
if msg['chat']['type'] == 'supergroup' or msg['chat']['type'] == 'group':
sendMe
|
ssage(chat_id, myid['private'].decode('utf8').format(msg['from']['first_name'],msg['from']['id'],user))
|
the-blue-alliance/the-blue-alliance
|
src/backend/common/queries/suggestion_query.py
|
Python
|
mit
| 1,368
| 0.000731
|
from typing import Any, Generator, List, Optional
from backend.common.consts.suggestion_state import Su
|
ggestionState
from backend.common.models.account import Account
from backend.common.models.suggestion import Suggestion
from backend.common.queries.database_query import DatabaseQuery
from backend.common.tasklets import typed_tasklet
class SuggestionQuery(DatabaseQuery[List[Suggestion], None]):
DICT_CONVERTER = None
def __init__(
self,
review_state: SuggestionState,
autho
|
r: Optional[Account] = None,
reviewer: Optional[Account] = None,
keys_only: bool = False,
) -> None:
super().__init__(
review_state=review_state,
author=author,
reviewer=reviewer,
keys_only=keys_only,
)
@typed_tasklet
def _query_async(
self,
review_state: SuggestionState,
author: Optional[Account] = None,
reviewer: Optional[Account] = None,
keys_only: bool = False,
) -> Generator[Any, Any, List[Suggestion]]:
params = [Suggestion.review_state == review_state]
if author:
params.append(Suggestion.author == author.key)
if reviewer:
params.append(Suggestion.reviewer == reviewer.key)
return (yield (Suggestion.query(*params).fetch_async(keys_only=keys_only)))
|
hkawasaki/kawasaki-aio8-1
|
lms/djangoapps/courseware/views.py
|
Python
|
agpl-3.0
| 32,876
| 0.002555
|
"""
Courseware views functions
"""
import logging
import urllib
import json
from collections import defaultdict
from django.utils.translation import ugettext as _
from django.conf import settings
from django.core.context_processors import csrf
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_GET
from django.http import Http404, HttpResponse
from django.shortcuts import redirect
from edxmako.shortcuts import render_to_response, render_to_string
from django_future.csrf import ensure_csrf_cookie
from django.views.decorators.cache import cache_control
from django.db import transaction
from markupsafe import escape
from courseware import grades
from courseware.access import has_access
from courseware.courses import get_courses, get_course, get_studio_url, get_course_with_access, sort_by_announcement
from courseware.masquerade import setup_masquerade
from courseware.model_data import FieldDataCache
from .module_render import toc_for_course, get_module_for_descriptor, get_module
from courseware.models import StudentModule, StudentModuleHistory
from course_modes.models import CourseMode
from open_ended_grading import open_ended_notifications
from student.models import UserTestGroup, CourseEnrollment
from student.views import course_from_id, single_course_reverification_info
from util.cache import cache, cache_if_anonymous
from xblock.fragment import Fragment
from xmodule.modulestore import Location
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, NoPathToItem
from xmodule.modulestore.search import path_to_location
from xmodule.course_module import CourseDescriptor
from xmodule.tabs import CourseTabList, StaffGradingTab, PeerGradingTab, OpenEndedGradingTab
import shoppingcart
from microsite_configuration import microsite
log = logging.getLogger("edx.courseware")
template_imports = {'urllib': urllib}
def user_groups(user):
"""
TODO (vshnayder): This is not used. When we have a new plan for groups, adjust appropriately.
"""
if not user.is_authenticated():
return []
# TODO: Rewrite in Django
key = 'user_group_names_{user.id}'.format(user=user)
cache_expiration = 60 * 60 # one hour
# Kill caching on dev machines -- we switch groups a lot
group_names = cache.get(key)
if settings.DEBUG:
group_names = None
if group_names is None:
group_names = [u.name for u in UserTestGroup.objects.filter(users=user)]
cache.set(key, group_names, cache_expiration)
return group_names
@ensure_csrf_cookie
@cache_if_anonymous
def courses(request):
"""
Render "find courses" page. The course selection work is done in courseware.courses.
"""
courses = get_courses(request.user, request.META.get('HTTP_HOST'))
courses = sort_by_announcement(courses)
return render_to_response("courseware/courses.html", {'courses': courses})
def render_accordion(request, course, chapter, section, field_data_cache):
"""
Draws navigation bar. Takes current position in accordion as
parameter.
If chapter and section are '' or None, renders a default accordion.
course, chapter, and section are the url_names.
Returns the html string
"""
# grab the table of contents
user = User.objects.prefetch_related("groups").get(id=request.user.id)
request.user = user # keep just one instance of User
toc = toc_for_course(user, request, course, chapter, section, field_data_cache)
context = dict([
('toc', toc),
('course_id', course.id),
('csrf', csrf(request)['csrf_token']),
('due_date_display_format', course.due_date_display_format)
] + template_imports.items())
return render_to_string('courseware/accordion.html', context)
def get_current_child(xmodule):
"""
Get the xmodule.position's display item of an xmodule that has a position and
children. If xmodule has no position or is out of bounds, return the first child.
Returns None only if there are no children at all.
"""
if not hasattr(xmodule, 'position'):
return None
if xmodule.position is None:
pos = 0
else:
# position is 1-indexed.
pos = xmodule.position - 1
children = xmodule.get_display_items()
if 0 <= pos < len(children):
child = children[pos]
elif len(children) > 0:
# Something is wrong. Default to first child
child = children[0]
else:
child = None
return child
def redirect_to_course_position(course_module):
"""
Return a redirect to the user's current place in the course.
If this is the user's first time, redirects to COURSE/CHAPTER/SECTION.
If this isn't the users's first time, redirects to COURSE/CHAPTER,
and the view will find the current section and display a message
about reusing the stored position.
If there is no current position in the course or chapter, then selects
the first child.
"""
urlargs = {'course_id': course_module.id}
chapter = get_current_child(course_module)
if chapter is None:
# oops. Something bad has happened.
raise Http404("No chapter found when loading current position in course")
urlargs['chapter'] = chapter.url_name
if course_module.position is not None:
return redirect(reverse('courseware_chapter', kwargs=urlargs))
# Relying on default of returning first child
section = get_current_child(chapter)
if section is None:
raise Http404("No section found when loading current position in course")
urlargs['section'] = section.url_name
return redirect(reverse('courseware_section', kwargs=urlargs))
def save_child_position(seq_module, child_name):
"""
child_name: url_name of the child
"""
for position, c in enumerate(seq_module.get_display_items(), start=1):
if c.url_name == child_name:
# Only save if position changed
if position != seq_module.position:
seq_module.position = position
# Save this new position to the underlying KeyValueStore
seq_module.save()
def chat_settings(course, user):
"""
Returns a dict containing the settings required to connect to a
Jabber chat server and room.
"""
domain = getattr(settings, "JABBER_DOMAIN", None)
if domain is None:
log.warning('You must set JABBER_DOMAIN in the settings to '
'enable the chat widget')
return None
return {
'domain': domain,
# Jabber doesn't like slashes, so replace with dashes
'room': "{ID}_class".format(ID=course.id.replace('/', '-')),
'username': "{USER}@{DOMAIN}".format(
USER=user.username, DOMAIN=domain
),
# TODO: clearly this needs to be something other than the username
# should also be something that'
|
s not necessarily tied to a
# particular course
'password': "{USER}@{DOMAIN}".format(
USER=user.username, DOMAIN=domain
),
}
@login_required
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
def index(request, course_id, chapter=None, section=None,
position=None):
"""
Displays courseware accordion and associated con
|
tent. If course, chapter,
and section are all specified, renders the page, or returns an error if they
are invalid.
If section is not specified, displays the accordion opened to the right chapter.
If neither chapter or section are specified, redirects to user's most recent
chapter, or the first chapter if this is the user's first visit.
Arguments:
- request : HTTP request
- course_id : course id (str: ORG/course/URL_NAME)
- chapter : chapter url_name (str)
- section : section url_name (str)
- position : position in module, eg of <sequential> m
|
cyruscyliu/diffentropy
|
week3/w3_lnn.py
|
Python
|
mit
| 2,275
| 0
|
import numpy as np
from week3 import lnn, tools
import os
def f(x):
if 0 <= x < 0.25:
return float(0)
elif 0.25 <= x < 0.5:
return 16.0 * (x - 0.25)
elif 0.5 <= x < 0.75:
return -16.0 * (x - 0.75)
elif 0.75 < x <= 1:
return float(0)
else:
raise ValueError('value should in [0, 1], now is {0}'.format(x))
def MCMC_SD(size=100):
"""
Assume X~U(0, 1) with only 1 dimension, then generate lots of that X,
if acceptable, add it to the result set, if not, add last X.
"""
result = []
current
|
= 0.5
for i in range(0, size):
next_ = np.random.rand()
u = np.random.rand()
if f(current) == float(0):
condition = 0
else:
condition = min(f(next_) / f(current), 1)
if u < condition:
# accept
result.append(next_)
current = next_
else:
# refuse
result.append(current)
return result
def main():
# change the size of sam
|
ples
size = 100000
data = MCMC_SD(size)
data = [[data[i]] for i in range(size)]
data = np.array(data)
true = -1.276263936
# for k in (24):
k = 27
result = []
for tr in range(k, 40, 5):
try:
entropy = lnn.LNN_2_entropy(data, k=k, tr=tr, bw=0)
print entropy
result.append(entropy)
# print 'k={0} tr={1} error={2}'.format(k, tr, error)
except ValueError, e:
print 'k={0} tr={1} error={2}'.format(k, tr, e)
except IndexError, e:
print 'k={0} tr={1} error={2}'.format(k, tr, e)
result = np.array(result)
with open('w3_klnn-estimate-result', 'a') as f:
print result-true
RMSE = tools.getRootMeanSquaredError(result, true)
f.write(':'.join([str(size), str(k), str(RMSE)]))
f.write('\n')
f.close()
print 'write for k={0} done'.format(k)
return tools.getRootMeanSquaredError(result, true)
if __name__ == '__main__':
# if os.path.exists('w3_klnn-estimate-result'):
# os.remove('w3_klnn-estimate-result')
results = []
# repeat for 50 times
for i in range(0, 50):
results.append(main())
print tools.getMean(results)
|
kamyu104/LeetCode
|
Python/binary-tree-tilt.py
|
Python
|
mit
| 1,289
| 0
|
# Time: O(n)
# Space: O(n)
# Given a binary tree, return the tilt of the whole tree.
#
# The tilt of a tree node is defined as the absolute difference
# between the sum of all left subtree node values and
# the sum of all right subtree node values. Null node has tilt 0.
#
# The tilt of the whole tree is defined as the sum of all nodes' tilt.
#
# Example:
# Input:
# 1
# / \
# 2 3
# Output: 1
# Explanation:
|
# Tilt of node 2 : 0
# Tilt of node 3 : 0
# Tilt of node 1 : |2-3| = 1
# Tilt of binary tree : 0 + 0 + 1 = 1
# Note:
#
# The sum of node values in any subtree won't exceed
# the range of 32-bit integer.
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def findTilt(self, root):
"""
:type root: TreeNode
:rtype: int
"""
|
def postOrderTraverse(root, tilt):
if not root:
return 0, tilt
left, tilt = postOrderTraverse(root.left, tilt)
right, tilt = postOrderTraverse(root.right, tilt)
tilt += abs(left-right)
return left+right+root.val, tilt
return postOrderTraverse(root, 0)[1]
|
PyQwt/PyQwt4
|
configure/configure.py
|
Python
|
gpl-2.0
| 29,680
| 0.003605
|
#!/usr/bin/python
#
# Generate the build trees and Makefiles for PyQwt.
import compileall
import glob
import optparse
import os
import pprint
import re
import shutil
import sys
import traceback
class Die(Exception):
def __init__(self, info):
Exception.__init__(self, info)
# __init__()
# class Die
try:
required = 'Requires at least SIP-4.6 and its development tools.'
import sipconfig
except ImportError:
raise Die, required
if 0x040600 > sipconfig._pkg_config['sip_version']:
raise Die, required
del required
def get_pyqt_configuration(options):
"""Return the PyQt configuration for Qt3 or Qt4
"""
required = 'Requires at least PyQt-3.17 and its development tools.'
options.qwt = 'qwt4qt3'
options.iqt = 'iqt4qt3'
try:
import pyqtconfig as pyqtconfig
except ImportError:
raise Die, required
if 0x031100 > pyqtconfig._pkg_config['pyqt_version']:
raise Die, required
try:
configuration = pyqtconfig.Configuration()
except AttributeError:
raise Die, 'Check if SIP and PyQt have been installed properly.'
return configuration
# get_pyqt_configuration()
def compile_qt_program(name, configuration,
extra_defines=[],
extra_include_dirs=[],
extra_lib_dirs=[],
extra_libs=[],
):
"""Compile a simple Qt application.
name is the name of the single source file
configuration is the pyqtconfig.Configuration()
extra_defines is a list of extra preprocessor definitions
extra_include_dirs is a list of extra directories to search for headers
ex
|
tra_lib_dirs is a list of extra directories to search for libraries
extra_libs is a list of extra libraries
"""
makefile = sipconfig.ProgramMakefile(
configuration, console=True, qt=True, warnings=True)
makefile.extra_defines.extend(extra_defines)
makefile.extra_include_dirs.extend(extra_include_dirs)
makefile.extra_lib_dirs.extend(extra_lib_dirs)
|
makefile.extra_libs.extend(extra_libs)
exe, build = makefile.build_command(name)
# zap a spurious executable
try:
os.remove(exe)
except OSError:
pass
os.system(build)
if not os.access(exe, os.X_OK):
return None
if sys.platform != 'win32':
exe = './' + exe
return exe
# compile_qt_program()
def copy_files(sources, directory):
"""Copy a list of files to a directory
"""
for source in sources:
shutil.copy2(source, os.path.join(directory, os.path.basename(source)))
# copy_files()
def fix_build_file(name, extra_sources, extra_headers, extra_moc_headers):
"""Extend the targets of a SIP build file with extra files
"""
keys = ('target', 'sources', 'headers', 'moc_headers')
sbf = {}
for key in keys:
sbf[key] = []
# Parse,
nr = 0
for line in open(name, 'r'):
nr += 1
if line[0] != '#':
eq = line.find('=')
if eq == -1:
raise Die, ('"%s\" line %d: Line must be in the form '
'"key = value value...."' % (name, nr)
)
key = line[:eq].strip()
value = line[eq+1:].strip()
if key in keys:
sbf[key].append(value)
# extend,
sbf['sources'].extend(extra_sources)
sbf['headers'].extend(extra_headers)
sbf['moc_headers'].extend(extra_moc_headers)
# and write.
output = open(name, 'w')
for key in keys:
if sbf[key]:
print >> output, '%s = %s' % (key, ' '.join(sbf[key]))
# fix_build_file()
def lazy_copy_file(source, target):
"""Lazy copy a file to another file:
- check for a SIP time stamp to skip,
- check if source and target do really differ,
- copy the source file to the target if they do,
- return True on copy and False on no copy.
"""
if not os.path.exists(target):
shutil.copy2(source, target)
return True
sourcelines = open(source).readlines()
targetlines = open(target).readlines()
# global length check
if len(sourcelines) != len(targetlines):
shutil.copy2(source, target)
return True
# skip a SIP time stamp
if (len(sourcelines) > 3
and sourcelines[3].startswith(' * Generated by SIP')
):
line = 4
else:
line = 0
# line by line check
while line < len(sourcelines):
if sourcelines[line] != targetlines[line]:
shutil.copy2(source, target)
return True
line = line + 1
return False
# lazy_copy_file()
def check_numarray(configuration, options, package):
"""See if the numarray extension has been installed.
"""
if options.disable_numarray:
options.excluded_features.append("-x HAS_NUMARRAY")
return options
try:
import numarray
# Try to find numarray/arrayobject.h.
numarray_inc = os.path.join(
configuration.py_inc_dir, "numarray", "arrayobject.h")
if os.access(numarray_inc, os.F_OK):
print "Found numarray-%s.\n" % numarray.__version__
options.extra_defines.append("HAS_NUMARRAY")
else:
print ("numarray has been installed, "
"but its headers are not in the standard location.\n"
"%s will be build without support for numarray.\n"
"(Linux users may have to install a development package)\n"
) % (package,)
raise ImportError
except ImportError:
options.excluded_features.append("-x HAS_NUMARRAY")
print ("Failed to import numarray: "
"%s will be build without support for numarray.\n"
) % (package,)
return options
# check_numarray()
def check_numeric(configuration, options, package):
"""See if the Numeric extension has been installed.
"""
if options.disable_numeric:
options.excluded_features.append("-x HAS_NUMERIC")
return options
try:
import Numeric
# Try to find Numeric/arrayobject.h.
numeric_inc = os.path.join(
configuration.py_inc_dir, "Numeric", "arrayobject.h")
if os.access(numeric_inc, os.F_OK):
print "Found Numeric-%s.\n" % Numeric.__version__
options.extra_defines.append("HAS_NUMERIC")
else:
print ("Numeric has been installed, "
"but its headers are not in the standard location.\n"
"%s will be build without support for Numeric.\n"
"(Linux users may have to install a development package)\n"
) % (package,)
raise ImportError
except ImportError:
options.excluded_features.append("-x HAS_NUMERIC")
print ("Failed to find Numeric2: "
"%s will be build without support for Numeric.\n"
) % (package,)
return options
# check_numeric()
def check_numpy(configuration, options, package):
"""See if the NumPy extension has been installed.
"""
if options.disable_numpy:
options.excluded_features.append("-x HAS_NUMPY")
return options
try:
import numpy
# Try to find numpy/arrayobject.h.
from numpy.distutils.misc_util import get_numpy_include_dirs
include_dirs = get_numpy_include_dirs()
for inc_dir in include_dirs:
header = os.path.join(inc_dir, 'numpy', 'arrayobject.h')
if os.access(header, os.F_OK):
break
else:
print ('NumPy has been installed, '
'but its headers are not in the standard location.\n'
'%s will be build without support for NumPy.\n'
'(Linux users may have to install a development package)\n'
) % (package,)
raise ImportError
print 'Found NumPy-%s.\n' % numpy.__version__
options.extra_defines.append('HAS_NUMPY')
|
alexandrucoman/vbox-neutron-agent
|
neutron/tests/unit/api/v2/test_attributes.py
|
Python
|
apache-2.0
| 35,656
| 0
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import string
import testtools
import mock
from neutron.api.v2 import attributes
from neutron.common import exceptions as n_exc
from neutron.tests import base
class TestAttributes(base.BaseTestCase):
def _construct_dict_and_constraints(self):
"""Constructs a test dictionary and a definition of constraints.
:return: A (dictionary, constraint) tuple
"""
constraints = {'key1': {'type:values': ['val1', 'val2'],
'required': True},
'key2': {'type:string': None,
'required': False},
'key3': {'type:dict': {'k4': {'type:string': None,
'required': True}},
'required': True}}
dictionary = {'key1': 'val1',
'key2': 'a string value',
'key3': {'k4': 'a string value'}}
return dictionary, constraints
def test_is_attr_set(self):
data = attributes.ATTR_NOT_SPECIFIED
self.assertIs(attributes.is_attr_set(data), False)
data = None
self.assertIs(attributes.is_attr_set(data), False)
data = "I'm set"
self.assertIs(attributes.is_attr_set(data), True)
def test_validate_values(self):
msg = attributes._validate_values(4, [4, 6])
self.assertIsNone(msg)
msg = attributes._validate_values(4, (4, 6))
self.assertIsNone(msg)
msg = attributes._validate_values(7, [4, 6])
self.assertEqual("'7' is not in [4, 6]", msg)
msg = attributes._validate_values(7, (4, 6))
self.assertEqual("'7' is not in (4, 6)", msg)
def test_validate_not_empty_string(self):
msg = attributes._validate_not_empty_string(' ', None)
self.assertEqual(u"' ' Blank strings are not permitted", msg)
def test_validate_not_empty_string_or_none(self):
msg = attributes._validate_not_empty_string_or_none(' ', None)
self.assertEqual(u"' ' Blank strings are not permitted", msg)
msg = attributes._validate_not_empty_string_or_none(None, None)
self.assertIsNone(msg)
def test_validate_string_or_none(self):
msg = attributes._validate_not_empty_string_or_none('test', None)
self.assertIsNone(msg)
msg = attributes._validate_not_empty_string_or_none(None, None)
self.assertIsNone(msg)
def test_validate_string(self):
msg = attributes._validate_string(None, None)
self.assertEqual("'None' is not a valid string", msg)
# 0 == len(data) == max_len
msg = attributes._validate_string("", 0)
self.assertIsNone(msg)
# 0 == len(data) < max_len
msg = attributes._validate_string("", 9)
self.assertIsNone(msg)
# 0 < len(data) < max_len
msg = attributes._validate_string("123456789", 10)
self.assertIsNone(msg)
# 0 < len(data) == max_len
msg = attributes._validate_string("123456789", 9)
self.assertIsNone(msg)
# 0 < max_len < len(data)
msg = attributes._validate_string("1234567890", 9)
self.assertEqual("'1234567890' exceeds maximum length of 9", msg)
msg = attributes._validate_string("123456789", None)
self.assertIsNone(msg)
def test_validate_no_whitespace(self):
data = 'no_white_space'
result = attributes._validate_no_whitespace(data)
self.assertEqual(data, result)
self.assertRaises(n_exc.InvalidInput,
attributes._validate_no_whitespace,
'i have whitespace')
self.assertRaises(n_exc.InvalidInput,
|
attributes._validate_no_whitespace,
'i\thave\twhitespace')
for ws in string.whitespace:
self.assertRaises(n_exc.InvalidInput,
attributes._validate_no_whites
|
pace,
'%swhitespace-at-head' % ws)
self.assertRaises(n_exc.InvalidInput,
attributes._validate_no_whitespace,
'whitespace-at-tail%s' % ws)
def test_validate_range(self):
msg = attributes._validate_range(1, [1, 9])
self.assertIsNone(msg)
msg = attributes._validate_range(5, [1, 9])
self.assertIsNone(msg)
msg = attributes._validate_range(9, [1, 9])
self.assertIsNone(msg)
msg = attributes._validate_range(1, (1, 9))
self.assertIsNone(msg)
msg = attributes._validate_range(5, (1, 9))
self.assertIsNone(msg)
msg = attributes._validate_range(9, (1, 9))
self.assertIsNone(msg)
msg = attributes._validate_range(0, [1, 9])
self.assertEqual("'0' is too small - must be at least '1'", msg)
msg = attributes._validate_range(10, (1, 9))
self.assertEqual("'10' is too large - must be no larger than '9'", msg)
msg = attributes._validate_range("bogus", (1, 9))
self.assertEqual("'bogus' is not an integer", msg)
msg = attributes._validate_range(10, (attributes.UNLIMITED,
attributes.UNLIMITED))
self.assertIsNone(msg)
msg = attributes._validate_range(10, (1, attributes.UNLIMITED))
self.assertIsNone(msg)
msg = attributes._validate_range(1, (attributes.UNLIMITED, 9))
self.assertIsNone(msg)
msg = attributes._validate_range(-1, (0, attributes.UNLIMITED))
self.assertEqual("'-1' is too small - must be at least '0'", msg)
msg = attributes._validate_range(10, (attributes.UNLIMITED, 9))
self.assertEqual("'10' is too large - must be no larger than '9'", msg)
def _test_validate_mac_address(self, validator, allow_none=False):
mac_addr = "ff:16:3e:4f:00:00"
msg = validator(mac_addr)
self.assertIsNone(msg)
mac_addr = "ffa:16:3e:4f:00:00"
msg = validator(mac_addr)
err_msg = "'%s' is not a valid MAC address"
self.assertEqual(err_msg % mac_addr, msg)
mac_addr = "123"
msg = validator(mac_addr)
self.assertEqual(err_msg % mac_addr, msg)
mac_addr = None
msg = validator(mac_addr)
if allow_none:
self.assertIsNone(msg)
else:
self.assertEqual(err_msg % mac_addr, msg)
mac_addr = "ff:16:3e:4f:00:00\r"
msg = validator(mac_addr)
self.assertEqual(err_msg % mac_addr, msg)
def test_validate_mac_address(self):
self._test_validate_mac_address(attributes._validate_mac_address)
def test_validate_mac_address_or_none(self):
self._test_validate_mac_address(
attributes._validate_mac_address_or_none, allow_none=True)
def test_validate_ip_address(self):
ip_addr = '1.1.1.1'
msg = attributes._validate_ip_address(ip_addr)
self.assertIsNone(msg)
ip_addr = '1111.1.1.1'
msg = attributes._validate_ip_address(ip_addr)
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
# Depending on platform to run UTs, this case might or might not be
# an equivalent to test_validate_ip_address_bsd.
ip_addr = '1' * 59
msg = attributes._validate_ip_address(ip_addr)
self.assertEqual("'%s' is not a valid IP address" % ip_addr, msg)
ip_addr = '1.1.1.1 has whitespace'
msg = attri
|
JelleAalbers/plunc
|
plunc/intervals/base.py
|
Python
|
mit
| 13,686
| 0.004749
|
import numpy as np
import logging
from plunc.common import round_to_digits
from plunc.exceptions import SearchFailedException, InsufficientPrecisionError, OutsideDomainError
from plunc.WaryInterpolator import WaryInterpolator
class IntervalChoice(object):
"""Base interval choice method class
"""
method = 'rank' # 'rank' or 'threshold'
threshold = float('inf')
precision_digits = 2
use_interval_cache = True
wrap_interpolator = True
background = 0
confidence_level = 0.9
max_hypothesis = 1e6
interpolator_log_domain = (-1, 3)
fixed_upper_limit = None
fixed_lower_limit = None
# Use only for testing:
forbid_exact_computation = False
def __init__(self, statistic, **kwargs):
self.statistic = statistic
for k, v in kwargs.items():
setattr(self, k, v)
self.cl = self.confidence_level
self.log = logging.getLogger(self.__class__.__name__)
if self.wrap_interpolator:
self.log.debug("Initializing interpolators")
if self.fixed_lower_limit is None:
self.low_limit_interpolator = WaryInterpolator(precision=10**(-s
|
elf.precision_digits),
domain=self.interpolator_log_domain)
if self.fixed_upper_limit is None:
self.high_limit_interpolator = Wa
|
ryInterpolator(precision=10**(-self.precision_digits),
domain=self.interpolator_log_domain)
# "Joints" of the interpolator must have better precision than required of the interpolator results
self.precision_digits += 1
# Dictionary holding "horizontal" intervals: interval on statistic for each precision and hypothesis.
self.cached_intervals = {}
def get_interval_on_statistic(self, hypothesis, precision_digits):
"""Returns the self.cl confidence level interval on self.statistic for the event rate hypothesis
The event rate here includes signal as well as identically distributed background.
Intervals are inclusive = closed.
"""
if self.use_interval_cache and (hypothesis, precision_digits) in self.cached_intervals:
return self.cached_intervals[(hypothesis, precision_digits)]
stat_values, likelihoods = self.statistic.get_values_and_likelihoods(hypothesis,
precision_digits=precision_digits)
likelihoods = likelihoods / np.sum(likelihoods)
# Score each statistic value (method-dependent)
stat_value_scores = self.score_stat_values(statistic_values=stat_values,
likelihoods=likelihoods,
hypothesis=hypothesis)
if self.method == 'threshold':
# Include all statistic values that score higher than some threshold
values_in_interval = stat_values[stat_value_scores > self.get_threshold()]
else:
# Include the values with highest score first, until we reach the desired confidence level
# TODO: wouldn't HIGHEST score first be more user-friendly?
ranks = np.argsort(stat_value_scores)
train_values_sorted = stat_values[ranks]
likelihoods_sorted = likelihoods[ranks]
# Find the last value to include
# (= first value that takes the included probability over the required confidence level)
sum_lhoods = np.cumsum(likelihoods_sorted)
last_index = np.where(sum_lhoods > self.cl)[0][0] # TODO: can fail?
values_in_interval = train_values_sorted[:last_index + 1]
# Limits = extreme values in the interval.
# This means we will be conservative if values_in_interval is not continuous.
low_lim, high_lim = values_in_interval.min(), values_in_interval.max()
# If we included all values given up until a boundary, don't set that boundary as a limit
if low_lim == np.min(stat_values):
low_lim = 0
if high_lim == np.max(stat_values):
high_lim = float('inf')
# Cache and return upper and lower limit on the statistic
if self.use_interval_cache:
self.cached_intervals[(hypothesis, precision_digits)] = low_lim, high_lim
return low_lim, high_lim
def get_confidence_interval(self, value, precision_digits, search_region, debug=False):
"""Performs the Neynman construction to get confidence interval on event rate (mu),
if the statistic is observed to have value
"""
log_value = np.log10(value)
if self.wrap_interpolator:
# Try to interpolate the limit from limits computed earlier
self.log.debug("Trying to get values from interpolators")
try:
if self.fixed_lower_limit is None:
low_limit = 10**(self.low_limit_interpolator(log_value))
else:
low_limit = self.fixed_lower_limit
if self.fixed_upper_limit is None:
high_limit = 10**(self.high_limit_interpolator(log_value))
else:
high_limit = self.fixed_upper_limit
return low_limit, high_limit
except InsufficientPrecisionError:
self.log.debug("Insuffienct precision achieved by interpolators")
if log_value > self.interpolator_log_domain[1]:
self.log.debug("Too high value to dare to start Neyman construction... raising exception")
# It is not safe to do the Neyman construction: too high statistics
raise
self.log.debug("Log value %s is below interpolator log domain max %s "
"=> starting Neyman construction" % (log_value, self.interpolator_log_domain[1]))
except OutsideDomainError:
# The value is below the interpolator domain (e.g. 0 while the domain ends at 10**0 = 1)
pass
if self.forbid_exact_computation:
raise RuntimeError("Exact computation triggered")
def is_value_in(mu):
low_lim, high_lim = self.get_interval_on_statistic(mu + self.background,
precision_digits=precision_digits)
return low_lim <= value <= high_lim
# We first need one value in the interval to bound the limit searches
try:
true_point, low_search_bound, high_search_bound = search_true_instance(is_value_in,
*search_region,
precision_digits=precision_digits)
except SearchFailedException as e:
self.log.debug("Exploratory search could not find a single value in the interval! "
"This is probably a problem with search region, or simply a very extreme case."
"Original exception: %s" % str(e))
if is_value_in(0):
self.log.debug("Oh, ok, only zero is in the interval... Returning (0, 0)")
return 0, 0
return 0, float('inf')
self.log.debug(">>> Exploratory search completed: %s is in interval, "
"search for boundaries in [%s, %s]" % (true_point, low_search_bound, high_search_bound))
if self.fixed_lower_limit is not None:
low_limit = self.fixed_lower_limit
elif is_value_in(low_search_bound):
# If mu=0 can't be excluded, we're apparently only setting an upper limit (mu <= ..)
low_limit = 0
else:
low_limit = bisect_search(is_value_in, low_search_bound, true_point, precision_digits=precision_digits)
self.log.debug(">>> Low limit found at %s" % low_limit)
if self.fixed_upper_limit is not None:
low_limit = self.
|
serverdensity/sdbot
|
limbo/settings/__init__.py
|
Python
|
mit
| 431
| 0.00464
|
from ..utils import getif
def init_config():
config = {}
getif(config, "token", "SLACK_TOKEN")
getif(config, "loglevel", "LIMBO_LOGLEVEL")
getif(config, "logfile", "LIMBO_LOGFILE")
getif(config, "log
|
format", "LIMBO_LOGFORMAT
|
")
getif(config, "plugins", "LIMBO_PLUGINS")
getif(config, "heroku", "LIMBO_ON_HEROKU")
getif(config, "beepboop", "BEEPBOOP_TOKEN")
return config
CONFIG = init_config()
|
LucaBongiorni/openlte
|
LTE_fdd_dl_file_gen/python/LTE_fdd_dl_file_gen.py
|
Python
|
agpl-3.0
| 1,147
| 0.004359
|
#!/usr/bin/env python
from gnuradio import gr
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import LTE_fdd_dl_fg
import sys
class LTE_fdd_dl_file_gen (gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
|
usage = "usage: %prog [options] file N_frames N_ant N_id_cell bandwidth mcc mnc"
parser=OptionParser(option_class=eng_option, usage=usage)
# Add options here
(options, args) = parser.parse_args()
if len(args) != 7:
parser.print_help()
sys.exit(1)
output_filename = args[0]
N_frames = args[1]
N_ant = args[2]
N_id_cell = args[3]
bandwidth = args[4]
|
mcc = args[5]
mnc = args[6]
# Build flow graph
self.samp_buf = LTE_fdd_dl_fg.samp_buf(int(N_frames), int(N_ant), int(N_id_cell), float(bandwidth), mcc, mnc)
self.fsink = gr.file_sink(gr.sizeof_char, output_filename)
self.connect(self.samp_buf, self.fsink)
if __name__ == '__main__':
tb = LTE_fdd_dl_file_gen()
try:
tb.run()
except KeyboardInterrupt:
pass
|
werkt/bazel
|
src/test/py/bazel/bazel_windows_cpp_test.py
|
Python
|
apache-2.0
| 32,128
| 0.001276
|
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
import unittest
from src.test.py.bazel import test_base
class BazelWindowsCppTest(test_base.TestBase):
def createProjectFiles(self):
self.CreateWorkspaceWithDefaultRepos('WORKSPACE')
self.ScratchFile('BUILD', [
'package(',
' default_visibility = ["//visibility:public"],',
' features=["windows_export_all_symbols"]',
')',
'',
'cc_library(',
' name = "A",',
' srcs = ["a.cc"],',
' hdrs = ["a.h"],',
' copts = ["/DCOMPILING_A_DLL"],',
' features = ["no_windows_export_all_symbols"],',
')',
'',
'cc_library(',
' name = "B",',
' srcs = ["b.cc"],',
' hdrs = ["b.h"],',
' deps = [":A"],',
' copts = ["/DNO_DLLEXPORT"],',
')',
'',
'cc_binary(',
' name = "C",',
' srcs = ["c.cc"],',
' deps = [":A", ":B" ],',
' linkstatic = 0,',
')',
])
self.ScratchFile('a.cc', [
'#include <stdio.h>',
'#include "a.h"',
'int a = 0;',
'void hello_A() {',
' a++;',
' printf("Hello A, %d\\n", a);',
'}',
])
self.ScratchFile('b.cc', [
'#include <stdio.h>',
'#include "a.h"',
'#include "b.h"',
'void hello_B() {',
' hello_A();',
' printf("Hello B\\n");',
'}',
])
header_temp = [
'#ifndef %{name}_H',
'#define %{name}_H',
'',
'#if NO_DLLEXPORT',
' #define DLLEXPORT',
'#elif COMPILING_%{name}_DLL',
' #define DLLEXPORT __declspec(dllexport)',
'#else',
' #define DLLEXPORT __declspec(dllimport)',
'#endif',
'',
'DLLEXPORT void hello_%{name}();',
'',
'#endif',
]
self.ScratchFile('a.h',
[line.replace('%{name}', 'A') for line in header_temp])
self.ScratchFile('b.h',
[line.replace('%{name}', 'B') for line in header_temp])
c_cc_content = [
|
'#include <stdio.h>',
'#include "a.h"',
'#include "b.h"',
'',
'void hello_C() {',
' hello_A();',
' hello_B();',
' printf("Hello C\\n");',
'}',
'',
'int main() {',
' hello_C();',
' return 0;',
'}',
]
self.ScratchFile('c.cc', c_cc_content)
self.ScratchFile('lib/BUILD', [
'cc_library(',
|
' name = "A",',
' srcs = ["dummy.cc"],',
' features = ["windows_export_all_symbols"],',
' visibility = ["//visibility:public"],',
')',
])
self.ScratchFile('lib/dummy.cc', ['void dummy() {}'])
self.ScratchFile('main/main.cc', c_cc_content)
def getBazelInfo(self, info_key):
exit_code, stdout, stderr = self.RunBazel(['info', info_key])
self.AssertExitCode(exit_code, 0, stderr)
return stdout[0]
def testBuildDynamicLibraryWithUserExportedSymbol(self):
self.createProjectFiles()
bazel_bin = self.getBazelInfo('bazel-bin')
# //:A export symbols by itself using __declspec(dllexport), so it doesn't
# need Bazel to export symbols using DEF file.
exit_code, _, stderr = self.RunBazel(
['build', '//:A', '--output_groups=dynamic_library'])
self.AssertExitCode(exit_code, 0, stderr)
# TODO(pcloudy): change suffixes to .lib and .dll after making DLL
# extensions correct on Windows.
import_library = os.path.join(bazel_bin, 'A.if.lib')
shared_library = os.path.join(bazel_bin, 'A.dll')
empty_def_file = os.path.join(bazel_bin, 'A.gen.empty.def')
self.assertTrue(os.path.exists(import_library))
self.assertTrue(os.path.exists(shared_library))
# An empty DEF file should be generated for //:A
self.assertTrue(os.path.exists(empty_def_file))
def testBuildDynamicLibraryWithExportSymbolFeature(self):
self.createProjectFiles()
bazel_bin = self.getBazelInfo('bazel-bin')
# //:B doesn't export symbols by itself, so it need Bazel to export symbols
# using DEF file.
exit_code, _, stderr = self.RunBazel(
['build', '//:B', '--output_groups=dynamic_library'])
self.AssertExitCode(exit_code, 0, stderr)
# TODO(pcloudy): change suffixes to .lib and .dll after making DLL
# extensions correct on Windows.
import_library = os.path.join(bazel_bin, 'B.if.lib')
shared_library = os.path.join(bazel_bin, 'B.dll')
def_file = os.path.join(bazel_bin, 'B.gen.def')
self.assertTrue(os.path.exists(import_library))
self.assertTrue(os.path.exists(shared_library))
# DEF file should be generated for //:B
self.assertTrue(os.path.exists(def_file))
# Test build //:B if windows_export_all_symbols feature is disabled by
# no_windows_export_all_symbols.
exit_code, _, stderr = self.RunBazel([
'build', '//:B', '--output_groups=dynamic_library',
'--features=no_windows_export_all_symbols'
])
self.AssertExitCode(exit_code, 0, stderr)
import_library = os.path.join(bazel_bin, 'B.if.lib')
shared_library = os.path.join(bazel_bin, 'B.dll')
empty_def_file = os.path.join(bazel_bin, 'B.gen.empty.def')
self.assertTrue(os.path.exists(import_library))
self.assertTrue(os.path.exists(shared_library))
# An empty DEF file should be generated for //:B
self.assertTrue(os.path.exists(empty_def_file))
self.AssertFileContentNotContains(empty_def_file, 'hello_B')
def testBuildCcBinaryWithDependenciesDynamicallyLinked(self):
self.createProjectFiles()
bazel_bin = self.getBazelInfo('bazel-bin')
# Since linkstatic=0 is specified for //:C, it's dependencies should be
# dynamically linked.
exit_code, _, stderr = self.RunBazel(['build', '//:C'])
self.AssertExitCode(exit_code, 0, stderr)
# TODO(pcloudy): change suffixes to .lib and .dll after making DLL
# extensions correct on
# Windows.
# a_import_library
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'A.if.lib')))
# a_shared_library
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'A.dll')))
# a_def_file
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'A.gen.empty.def')))
# b_import_library
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'B.if.lib')))
# b_shared_library
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'B.dll')))
# b_def_file
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'B.gen.def')))
# c_exe
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'C.exe')))
def testBuildCcBinaryFromDifferentPackage(self):
self.createProjectFiles()
self.ScratchFile('main/BUILD', [
'cc_binary(',
' name = "main",',
' srcs = ["main.cc"],',
' deps = ["//:B"],',
' linkstatic = 0,'
')',
])
bazel_bin = self.getBazelInfo('bazel-bin')
exit_code, _, stderr = self.RunBazel(['build', '//main:main'])
self.AssertExitCode(exit_code, 0, stderr)
# Test if A.dll and B.dll are copied to the directory of main.exe
main_bin = os.path.join(bazel_bin, 'main/main.exe')
self.assertTrue(os.path.exists(main_bin))
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'main/A.dll')))
self.assertTrue(os.path.exists(os.path.join(bazel_bin, 'main/B.dll')))
# Run the binary to see if it runs successfully
exit_code, stdout, stderr = self.RunProgram([main_bin])
self.AssertExitCode(exit_code, 0, stderr)
self.assertEqual(
|
jlzirani/cache-storing
|
store.py
|
Python
|
gpl-3.0
| 1,439
| 0.039611
|
from datetime import datetime
class store:
def __init__(self, delta):
self.delta = delta
self.dataDict = {}
self.ordList = []
def store(self, data, info):
ob2store = [data,info]
if data in self.dataDict:
try:
self.ordList.remove(self.dataDict[data])
except ValueError:
pass
self.dataDict[data] = ob2store
self.ordList.append(ob2store)
def flush(self):
self.ordList = []
|
self.dataDict = {}
def getIndex(self, pivot):
endIndex = len(self.ordList)
if endIndex == 0:
return 0
|
(d0,i0) = self.ordList[0]
(dE,iE) = self.ordList[-1]
if i0 > pivot:
return 0
if iE < pivot:
return endIndex
return self.getIndexRec(pivot, 0, endIndex)
def getIndexRec(self, pivot, startIndex, endIndex):
if startIndex == endIndex:
return startIndex
median = (endIndex-startIndex)/2 +startIndex
(data, info) = self.ordList[median]
if info > pivot:
return self.getIndexRec( pivot, startIndex, max(0,median-1) )
if info < pivot:
return self.getIndexRec( pivot, min(median+1,endIndex), endIndex )
return median
def getData(self, ref):
self.ordList = self.ordList[self.getIndex(ref-self.delta):]
if len(self.ordList) == 0:
self.dataDict = {}
return [ x for (x,y) in self.ordList ]
|
chengduoZH/Paddle
|
python/paddle/fluid/dygraph/parallel_helper.py
|
Python
|
apache-2.0
| 1,499
| 0
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except jin compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ..layers import collective
from ..framework import Parameter
__parallel_ctx__clz__ = None
def _is_data_parallel_mode():
global __parallel_ctx__clz__
return __parallel_ctx__clz__ is not None and int(
os.getenv("PADDLE_TRAINERS_NUM", "1")) > 1
def _set_parallel_ctx(nccl_parallel_context):
global __parallel_ctx__clz__
assert __parallel_ctx__clz__ is None, \
"ParallelContext can only be initialize
|
d once."
__parallel_ctx__clz__ = nccl_parallel_context
def _init_parallel_ctx():
global __parallel_ctx__clz__
assert __parallel_ctx__clz__ is not None, \
|
"ParallelContext should be initialized."
__parallel_ctx__clz__.init()
def _broadcast_parameters(parameters):
for param in parameters:
if isinstance(param, Parameter) and param.trainable:
collective._broadcast(param, 0, sync_mode=True)
|
krull/docker-zenoss4
|
init_fs/usr/local/zenoss/ZenPacks/ZenPacks.zenoss.ZenJMX-3.12.1.egg/ZenPacks/zenoss/ZenJMX/zenjmx.py
|
Python
|
gpl-3.0
| 25,284
| 0.006051
|
#! /usr/bin/env python
##############################################################################
#
# Copyright (C) Zenoss, Inc. 2008, 2009, all rights reserved.
#
# This content is made available according to terms specified in
# License.zenoss under the directory where your Zenoss product is installed.
#
##############################################################################
__doc__ = """Monitor Java Management eXtension (JMX) mbeans
Dispatches calls to a java server process to collect JMX values for a device.
"""
import logging
import sys
import os
import socket
import Globals
import zope
from twisted.internet.defer import Deferred
from twisted.web import xmlrpc
from twisted.internet.protocol import ProcessProtocol
from twisted.internet import defer, reactor, error
from Products.ZenCollector.daemon import CollectorDaemon
from Products.ZenCollector.interfaces import ICollectorPreferences,\
IDataService,\
IEventService,\
IScheduledTask
from Products.ZenCollector.tasks import SimpleTaskFactory,\
SimpleTaskSplitter,\
TaskStates
from Products.ZenEvents import Event
from Products.ZenHub.XmlRpcService import XmlRpcService
from Products.ZenUtils.NJobs import NJobs
from Products.ZenUtils.Utils import unused
from Products.ZenUtils.observable import ObservableMixin
import ZenPacks.zenoss.ZenJMX
from ZenPacks.zenoss.ZenJMX.services.ZenJMXConfigService import JMXDataSourceConfig
unused(JMXDataSourceConfig)
log = logging.getLogger( "zen.zenjmx" )
DEFAULT_HEARTBEAT_TIME = 5 * 60
WARNING_EVENT = dict(eventClass='/Status/JMX', component='JMX',
device=socket.getfqdn(), severity=Event.Warning)
class ZenJMXPreferences(object):
"""
Configuration values for the zenjmx daemon.
"""
zope.interface.implements(ICollectorPreferences)
def __init__(self):
"""
Construct a new ZenJMXPreferences instance and provide default
values for needed attributes.
"""
self.collectorName = "zenjmx"
self.defaultRRDCreateCommand = None
self.cycleInterval = 5 * 60 # seconds
self.configCycleInterval = 20 # minutes
self.options = None
# the configurationService attribute is the fully qualified class-name
# of our configuration service that runs within ZenHub
self.configurationService = 'ZenPacks.zenoss.ZenJMX.services.ZenJMXConfigService'
def buildOptions(self, parser):
parser.add_option('-j','--zenjmxjavaport',
dest='zenjmxjavaport',
default=9988,
type='int',
help='Port for zenjmxjava process; default 9988. '+\
'Tries 5 consecutive ports if there is a conflict',
)
parser.add_option('--concurrentJMXCalls',
dest='concurrentJMXCalls',
action='store_true', default=False,
help='Enable concurrent calls to a JMX server'
)
parser.add_option('--parallel', dest='parallel',
default=200, type='int',
help='Number of devices to collect from at one time'
)
parser.add_option('--cycleInterval', dest='cycleInterval',
default=300, type='int',
help='Cycle time, in seconds, to run collection'
)
parser.add_option('--portRange', dest='portRange',
default=5, type='int',
help='Number of ports to attempt when starting' +
'Java jmx client')
parser.add_option('--javaheap',
dest="maxHeap",type="int", default=512,
help="Max heap, in MB, to use for java process")
def postStartup(self):
pass
def getJavaClientArgs(self):
args = None
if self.options.configfile:
args = ('--configfile', self.options.configfile)
if self.options.logseverity:
args = args + ('-v', str(self.options.logseverity))
if self.options.concurrentJMXCalls:
args = args + ('-concurrentJMXCalls', )
return args
def getStartingPort(self):
return self.options.zenjmxjavaport
def getAttemptedPortRange(self):
return self.options.portRange
class IZenJMXJavaCl
|
ient(zope.interface.Interface):
listenPort = zope.interface.Attribute("listenPort")
class ZenJMXJavaClientImpl(ProcessProtocol):
"""
Protocol to control the zenjmxjava process
"""
zope.interface.implements(IZenJMXJavaClient)
def __init__(
self,
args,
cycle=True,
zenjmxjavaport=9988,
maxHeap=512
):
"""
Initializer
@param args: argument list for zenjmx
@type args: list of strings
|
@param cycle: whether to run once or repeat
@type cycle: boolean
@param zenjmxjavaport: port on which java process
will listen for queries
@type zenjmxjavaport: int
"""
self.deferred = Deferred()
self.stopCalled = False
self.process = None
self.outReceived = sys.stdout.write
self.errReceived = sys.stderr.write
self.log = logging.getLogger('zen.ZenJMXJavaClient')
self.args = args
self.cycle = cycle
self.listenPort = zenjmxjavaport
self._maxHeap = maxHeap
self.restartEnabled = False
self._eventService = zope.component.queryUtility(IEventService)
self._preferences = zope.component.queryUtility(ICollectorPreferences,
'zenjmx')
def processEnded(self, reason):
"""
Twisted reactor function called when the process ends.
@param reason: message from the process
@type reason: string
"""
self.process = None
if not self.stopCalled:
procEndEvent = {
'eventClass': '/Status/JMX',
'summary': 'zenjmxjava ended unexpectedly: %s'\
% reason.getErrorMessage(),
'severity': Event.Warning,
'component': 'zenjmx',
'device': self._preferences.options.monitor,
}
self._eventService.sendEvent(procEndEvent)
self.log.warn('processEnded():zenjmxjava process ended %s'
% reason)
if self.deferred:
msg = reason.getErrorMessage()
exitCode = reason.value.exitCode
if exitCode == 10:
msg = 'Could not start up Java web server, '+\
'possible port conflict'
self.deferred.callback((exitCode,msg))
self.deferred = None
elif self.restartEnabled:
self.log.info('processEnded():restarting zenjmxjava')
reactor.callLater(1, self.run)
def stop(self):
"""
Twisted reactor function called when we are shutting down.
"""
import signal
self.log.info('stop():stopping zenjmxjava')
self.stopCalled = True
if not self.process:
self.log.debug('stop():no zenjmxjava process to stop')
return
try:
self.process.signalProcess(signal.SIGKILL)
except error.ProcessExitedAlready:
self.log.info('stop():zenjmxjava process already exited')
pass
try:
self.process.loseConnection()
except Exception:
pass
self.process = None
def connecti
|
EvanBianco/pickr
|
main.py
|
Python
|
apache-2.0
| 13,664
| 0.009075
|
import webapp2
from jinja2 import Environment, FileSystemLoader
from os.path import dirname, join
import os
import json
import base64
import hashlib
import StringIO
from google.appengine.api import users
import numpy as np
if not os.environ.get('SERVER_SOFTWARE','').startswith('Development'):
import PIL
import matplotlib.pyplot as plt
import matplotlib.cm as cm
#from scipy.ndimage.morphology import grey_dilation
import Image
local = False
else:
local = True
from lib_db import SeismicObject, PickrParent
# Jinja2 environment to load templates.
env = Environment(loader=FileSystemLoader(join(dirname(__file__),
'templates')))
# Data store set up.
db_parent = PickrParent.all().get()
if not db_parent:
db_parent = PickrParent()
db_parent.put()
class CommentHandler(webapp2.RequestHandler):
def get(self):
index = int(self.request.get("index"))
data = SeismicObject.all().ancestor(db_parent).sort("-date")
data = data.fetch(1000)[index]
self.response.write(json.dumps(data.comments))
def post(self):
index = int(self.request.get("index"))
comment = int(self.request.get("comment"))
data = SeismicObject.all().ancestor(db_parent).sort("-date")
data = data.fetch(1000)[index]
comments = data.comments
comments.append(comment)
data.comments = comments
data.put()
self.response.write(comment)
class VoteHandler(webapp2.RequestHandler):
def get(self):
index = int(self.request.get("index"))
data = SeismicObject.all().ancestor(db_parent).order("-date")
data = data.fetch(1000)[index]
self.response.write(data.votes)
def post(self):
index = int(self.request.get("index"))
vote = int(self.request.get("vote"))
data = SeismicObject.all().ancestor(db_parent).order("-date")
|
data = data.fetch(1000)[index]
if vote > 0:
vote
|
= 1
else:
vote =-1
data.votes += vote
data.put()
self.response.write(data.votes)
class MainPage(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if not user:
login_url = users.create_login_url('/')
template = env.get_template("main.html")
html = template.render(login_url=login_url)
self.response.out.write(html)
else:
logout_url = users.create_logout_url('/')
login_url = None
email_hash = hashlib.md5(user.email()).hexdigest()
self.redirect('/pickr')
class ResultsHandler(webapp2.RequestHandler):
def get(self):
# connect the dots using one dimensional linear interpretation: np.interp()
def regularize(xarr, yarr, pxi, pxf):
# connect the dots of the horizon spanning the image
# pxi : is the first x pos.
# pyi : is the first y pos., and so on
horx = np.arange(pxi,pxf+1)
hory = np.interp(horx, xarr, yarr)
return horx, hory
# append all horizons into one big file
all_picks_x = np.array([])
all_picks_y = np.array([])
data = SeismicObject().all().fetch(1000)
count = len(data)
if not local:
fig = plt.figure(figsize=(15,8))
ax = fig.add_axes([0,0,1,1])
# Load the image to a variable
im = Image.open('brazil_ang_unc.png')
px, py = im.size
# plot the seismic image first
# im = plt.imshow(im)
# Make a modified version of rainbow colormap with some transparency
# in the bottom of the colormap.
hot = cm.hot
hot.set_under(alpha = 0.0) #anything that has value less than 0.5 goes transparent
for user in data:
try:
picks = np.array(json.loads(user.picks))
hx, hy = regularize(picks[:,0], picks[:,1], pxi, pyf)
all_picks_x = np.concatenate((all_picks_x,hx))
all_picks_y = np.concatenate((all_picks_y,hy))
ax.plot(picks[:,0], picks[:,1], 'g-', alpha=0.5, lw=2)
m = 1
x1, x2 = np.amin(all_picks_x), np.amax(all_picks_x)
y1, y2 = np.amin(all_picks_y),np.amax(all_picks_y)
heat_extent_im = [x1,x2,y2,y1] #flip extents of heatmap for image plot
# do 2d histogram to display heatmap
binsizex = m
binsizey = m
heatmap, yedges, xedges = np.histogram2d(all_picks_y, all_picks_x,
bins= ((y2-y1)/binsizey,(x2-x1)/binsizex),
range =np.array([[y1, y2],[x1, x2]])
)
# do dilation of picks in heatmap
from mmorph import dilate
n = 3 #should be odd integer
B = np.array((n,n)).astype(int)
heatmap_dil = dilate(heatmap, B=B)
#fig = plt.figure(figsize=(15,8))
#ax = fig.add_axes([0, 0, 1, 1])
heatim = ax.imshow(heatmap_dil,
cmap=cm.hot,
extent=heat_extent_im,
alpha=0.75)
heatim.set_clim(0.5, np.amax(heatmap))
ax.set_ylim((py,0))
ax.set_xlim((0,px))
#ax.invert_yaxis()
ax.set_xticks([])
ax.set_yticks([])
ax.set_frame_on(False)
except:
pass
output = StringIO.StringIO()
plt.savefig(output)
image = base64.b64encode(output.getvalue())
user = users.get_current_user()
# User should exist, so this should fail otherwise.
logout_url = users.create_logout_url('/')
login_url = None
email_hash = hashlib.md5(user.email()).hexdigest()
template = env.get_template("results.html")
html = template.render(count=count,
logout_url=logout_url,
email_hash=email_hash,
image=image)
self.response.write(html)
else:
with open("alaska.b64", "r") as f:
image = f.read()
user = users.get_current_user()
# User should exist, so this should fail otherwise.
logout_url = users.create_logout_url('/')
login_url = None
email_hash = hashlib.md5(user.email()).hexdigest()
template = env.get_template("results.html")
html = template.render(count=count,
logout_url=logout_url,
email_hash=email_hash,
image=image)
self.response.write(html)
# Make composite image
class AboutHandler(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
logout_url = users.create_logout_url('/')
login_url = None
email_hash = hashlib.md5(user.email()).hexdigest()
else:
logout_url = None
login_url = users.create_login_url('/')
email_hash = ''
# Write the page.
template = env.get_template('about.html')
html = template.render(logout_url=logout_url,
login_u
|
safwanrahman/kuma
|
kuma/search/tests/test_types.py
|
Python
|
mpl-2.0
| 1,629
| 0
|
from elasticsearch_dsl import query
from kuma.core.tests import eq_, ok_
from kuma.wiki.search import WikiDocumentType
from . import ElasticTestCase
class WikiDocumentTypeTests(ElasticTestCase):
fixtures = ElasticTestCase.fixtures + ['wiki/documents.json']
def test_get_excerpt_strips_html(self):
self.refresh()
results = WikiDocumentType.search().query('match', content='audio')
ok_(results.count() > 0)
for doc in results.execute():
excerpt = doc.get_excerpt()
ok_('audio' in excerpt)
ok_('<strong>' not in excerpt)
def test_current_locale_results(self):
self.refresh()
results = (WikiDocumentType.search()
.query(query.Match(title='article') |
query.Match(content='article'))
.filter('term', locale='en-US'))
for doc in results.execute():
eq_('en-US', doc.locale)
def test_get_excerpt_uses_summary(self):
self.refresh()
results = WikiDocumentType.search().query('match', content='audio')
ok_(results.count() > 0)
for doc in results.execute():
excerpt = doc.get_excerpt
|
()
ok_('the word for tough things' in excerpt)
ok_('extra content' not in excerpt)
def test_hidden_slugs_get_indexable(self):
self.refresh()
title_list = WikiDocumentType.get_indexable().values_list('ti
|
tle',
flat=True)
ok_('User:jezdez' not in title_list)
|
thomasperrot/MTGTrader
|
mtg/config/urls.py
|
Python
|
mit
| 915
| 0
|
"""config URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(
|
r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
ur
|
l(r'^admin/', admin.site.urls),
url(r'^cards/', include('cards.urls')),
url(r'^tournaments/', include('tournaments.urls')),
url(r'^stats/', include('stats.urls'))
]
|
dwavesystems/dimod
|
dimod/bqm/construction.py
|
Python
|
apache-2.0
| 4,145
| 0.000241
|
# Copyright 2020 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections.abc import Sequence, Set
from dimod.bqm.adjvectorbqm import AdjVectorBQM
from dimod.core.bqm import BQM
from dimod.vartypes import as_vartype
__all__ = ['as_bqm']
def as_bqm(*args, cls=None, copy=False):
"""Convert the input to a binary quadratic model.
Converts the following input formats to a binary quadratic model (BQM):
as_bqm(vartype)
Creates an empty binary quadratic model.
as_bqm(bqm)
Creates a BQM from another BQM. See `copy` and `cls` kwargs below.
as_bqm(bqm, vartype)
Creates a BQM from another BQM, changing to the appropriate
`vartype` if necessary. See `copy` and `c
|
ls` kwargs below.
as_bqm(n, vartype)
Creates a BQM with `n` variables, indexed linearly from zero,
setting all biases to zero.
as_bqm(quadratic, vartype)
Creates a BQM from quadratic biases given as a square array_like_
or a dictionary of the form `{(u, v): b, ...
|
}`. Note that when
formed with SPIN-variables, biases on the diagonal are added to the
offset.
as_bqm(linear, quadratic, vartype)
Creates a BQM from linear and quadratic biases, where `linear` is a
one-dimensional array_like_ or a dictionary of the form
`{v: b, ...}`, and `quadratic` is a square array_like_ or a
dictionary of the form `{(u, v): b, ...}`. Note that when formed
with SPIN-variables, biases on the diagonal are added to the offset.
as_bqm(linear, quadratic, offset, vartype)
Creates a BQM from linear and quadratic biases, where `linear` is a
one-dimensional array_like_ or a dictionary of the form
`{v: b, ...}`, and `quadratic` is a square array_like_ or a
dictionary of the form `{(u, v): b, ...}`, and `offset` is a
numerical offset. Note that when formed with SPIN-variables, biases
on the diagonal are added to the offset.
Args:
*args:
See above.
cls (type/list, optional):
Class of the returned BQM. If given as a list,
the returned BQM is of one of the types in the list. Default is
:class:`.AdjVectorBQM`.
copy (bool, optional, default=False):
If False, a new BQM is only constructed when
necessary.
Returns:
A binary quadratic model.
.. _array_like: https://numpy.org/doc/stable/user/basics.creation.html
"""
if cls is None:
if isinstance(args[0], BQM):
cls = type(args[0])
else:
cls = AdjVectorBQM
elif isinstance(cls, (Sequence, Set)): # want Collection, but not in 3.5
classes = tuple(cls)
if not classes:
raise ValueError("cls kwarg should be a type or a list of types")
if type(args[0]) in classes:
cls = type(args[0])
else:
# otherwise just pick the first one
cls = classes[0]
if isinstance(args[0], cls) and not copy:
# this is the only case (currently) in which copy matters
if len(args) == 1:
return args[0]
elif len(args) == 2:
bqm, vartype = args
if bqm.vartype is as_vartype(vartype):
return bqm
# otherwise we're doing a copy
# otherwise we don't have a well-formed bqm input so pass off the check
# to cls(*args)
return cls(*args)
|
luetgendorf/Espruino
|
boards/ESP32.py
|
Python
|
mpl-2.0
| 4,723
| 0.035994
|
#!/bin/false
# This file is part of Espruino, a JavaScript interpreter for Microcontrollers
#
# Copyright (C) 2013 Gordon Williams <gw@pur3.co.uk>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ----------------------------------------------------------------------------------------
# This file contains information for a specific board - the available pins, and where LEDs,
# Buttons, and other in-built peripherals are. It is used to build documentation as well
# as various source and header files for Espruino.
# ----------------------------------------------------------------------------------------
import pinutils;
info = {
'name' : "ESP32",
'espruino_page_link' : 'ESP32',
'default_console' : "EV_SERIAL1",
'default_console_baudrate' : "115200",
'variables' : 5000,
'binary_name' : 'espruino_%v_esp32.bin',
'build' : {
'optimizeflags' : '-Og',
'libraries' : [
'ESP32',
'NET',
'GRAPHICS',
'CRYPTO',
'TLS',
'TELNET',
'NEOPIXEL',
'FILESYSTEM',
'FLASHFS'
],
'makefile' : [
'DEFINES+=-DESP_PLATFORM -DESP32=1'
]
}
};
chip = {
'part' : "ESP32",
'family' : "ESP32",
'package' : "",
'ram' : 512,
'flash' : 0,
'speed' : 160,
'usart' : 3,
'spi' : 2,
'i2c' : 2,
'adc' : 1,
'dac' : 0,
'saved_code' : {
'address' : 0x100000,
'page_size' : 4096,
'pages' : 16,
'flash_available' : 960, # firmware can be up to this size
},
};
devices = {
};
# left-right, or top-bottom order
board_esp32 = {
'top' : ['GND','D23','D22','D1','D3','D21','D20','D19','D18','D5','D17','D16','D4','D0'],
'bottom' : ['D12','D14','D27','D26','D25','D33','D32','D35','D34','D39','D36','EN','3V3','GND'],
'right' : [ 'GND','D13','D9','D10','D11','D6','D7','D8','D15','D2']
};
board_esp32["bottom"].reverse()
board_esp32["right"].reverse()
board_esp32["_css"] = """
#board {
width: 600px;
height: 435px;
left: 50px;
top: 170px;
background-image: url(img/ESP32.jpg);
}
#boardcontainer {
height: 700px;
}
#board #right {
top: 80px;
left: 600px;
}
#board #top {
bottom: 440px;
left: 155px;
}
#board #bottom {
top: 435px;
left: 155px;
}
#board .rightpin {
height: 28px;
}
#board .toppin, #board .bottompin {
width: 24px;
}
""";
boards = [ board_esp32 ];
def get_pins():
# { "name":"PD20", "sortingname":"D20", "port":"D", "num":"30", "functions":{ "I2C1_SDA":0 }, "csv":{} },
# pins = pinutils.generate_pins(0,5);
##6-11 are used by Flash chip
# pins.extend(pinutils.generate_pins(12,23));
# pins.extend(pinutils.generate_pins(25,27));
##32-33 are routed to rtc for xtal
# pins.extend(pinutils.generate_pins(34,39));
# pins = pinutils.fill_gaps_in_pin_list(pins);
pins = pinutils.generate_pins(0,39) # 40 General Purpose I/O Pins.
pinutils.findpin(pins, "PD36", True)["functions"]["ADC1_IN0"]=0;
pinutils.findpin(pins, "PD37", True)["functions"]["ADC1_IN1"]=0;
pinutils.findpin(pins, "PD38", True)["functions"]["ADC1_IN2"]=0;
pinutils.findpin(pins, "PD39", True)["functions"]["ADC1_IN3"]=0;
pinutils.findpin(pins, "PD32", True)["functions"]["ADC1_IN4"]=0;
pinutils.findpin(pins, "PD33", True)["functions"]["ADC1_IN5"]=0;
pinutils.findpin(pins, "PD34", True)["functions"]["ADC1_IN6"]=0;
pinutils.findpin(pins, "PD35", True)["functions"]["ADC1_IN7"]=0;
#ADC2 not supported yet, waiting for driver from espressif
pinutils.findpin(pins, "PD4", True)["functions"]["ADC2_IN0"]=0;
pinutils.findpin(pins, "PD0", True)["functions"]["ADC2_IN1"]=0;
pinutils.findpin(pins, "PD2", True)["functions"]["ADC2_IN2"]=0;
pinutils.findpin(pins, "PD15", True)["functions"]["ADC2_IN3"]=0;
pinutils.findpin(pins, "PD13", True)["functions"]["ADC2_IN4"]=0;
pinutils.findpin(pins, "PD12", True)["functions"]["ADC2_IN5"]=0;
pinutils.findpin(pins, "PD14", True)["functions"]["ADC2_IN6"]=0;
pinutils.findpin(pins, "PD27", True)["functions"]["ADC2_IN7"]=0;
pinutils.findpin(pins, "PD25", True)["functions"]["DAC_OUT
|
1"]=0;
pinutils.findpin(pins, "PD26", True)["functions"]["DAC_OUT2"]=0;
pinutils.findpin(pins, "PD0", True)["functions"]["LED_1"]=0;
pinutils.findpin(pins, "PD10", True)["functions"]["USART0_TX"]=0;
pinutils.findpin(pins, "PD16", True)["functions"]["USART2_RX"]=0;
pinutils.findpin(pins, "PD17", True)["functions"]["USART2_TX"]=0;
pinutils.findpin(pins, "PD32", True)["functions"]["USART0_RX"]
|
=0;
# everything is non-5v tolerant
#for pin in pins:
# pin["functions"]["3.3"]=0;
return pins
|
ERICUdL/ISTEX_MentalRotation
|
ids2docs_years.py
|
Python
|
bsd-3-clause
| 2,227
| 0.021105
|
# -*- coding: utf-8 -*-
#
# This file is part of Istex_Mental_Rotation.
# Copyright (C) 2016 3ST ERIC Laboratory.
#
# This is a free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
# Load and transform ISTEX and wiki articles into bag_of_words decomposed by SVD.
# co-author : Lucie Martinet <lucie.martinet@univ-lorraine.fr>
# co-author : Hussein AL-NATSHEH <hussein.al-natsheh@ish-lyon.cnrs.fr.>
# Affiliation: University of Lyon, ERIC Laboratory, Lyon2
# Thanks to ISTEX project for the funding
import os, argparse, pickle, json
import numpy as np
def get_article_by_istex_id(istex_ids, istex_dir):
size = len(istex_ids)
res = np.array(range(size), dtype=np.object)
i = 0
for fname in os.listdir(istex_dir):
for doc in json.load(open(os.path.join(istex_dir, fname))):
istex_id = doc["istex_id"]
if istex_id in istex_ids :
article = dict()
article['text'] = doc["title"] + " __ " + doc["abstract"]
article['publicationDate'] = doc["publicationDate"]
article["istex_id"] = doc["istex_id"]
res[i] = article
i += 1
res = res.tolist()
res = res[:i]
return res
if __name__ == "__main__" :
parser = argparse.ArgumentParser()
parser.add_argument("--results_file", default='results/istex_mr_top10k_vec150results.pickle', type=str)
parser.add_argument("--istex_dir", default='sample_data/ISTEX/', type=str)
parser.add_argument("--out_file", default="chart_input.json", type=str) # name of the output file
parser.add_argument("-
|
-out_dir", default="results", type=str) # name of the output directory
args = parser.parse_args()
results_file = args.results_file
istex_dir = args.istex_dir
out_file = args.out_file
out_dir = args.out_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
results = pickle.load(open(results_file,'rb'))
istex_ids = results.keys()
print "length of the results keys (istex_ids): ", len(istex_i
|
ds)
articles = get_article_by_istex_id(istex_ids, istex_dir)
json.dump(articles, open(os.path.join(out_dir, out_file), "w"), indent=2)
print 'length of response file: ', len(articles)
print 'response file could be found at: ', os.path.join(out_dir, out_file)
|
ulif/pulp
|
server/pulp/server/db/model/auth.py
|
Python
|
gpl-2.0
| 1,467
| 0.001363
|
# -*- coding: utf-8 -*-
from pulp.server.db.model.base import Model
class Role(Model):
"""
Represents a role and a set of permissions associated with that role.
Users that are added to this role will inherit all the permissions associated
with the role.
@ivar id: role's id, must be unique for eac
|
h role
@type id: str
@ivar display_name: user-readable name of the role
|
@type display_name: str
@ivar description: free form text used to describe the role
@type description: str
@ivar permissions: dictionary of resource: tuple of allowed operations
@type permissions: dict
"""
collection_name = 'roles'
unique_indices = ('id',)
def __init__(self, id, display_name=None, description=None, permissions=None):
super(Role, self).__init__()
self.id = id
self.display_name = display_name or id
self.description = description
self.permissions = permissions or {}
class Permission(Model):
"""
Represents the user permissions associated with a pulp resource.
@ivar resource: uri path of resource
@type resource: str
@ivar users: list of dictionaries of user logins and permissions
@type users: list
"""
collection_name = 'permissions'
unique_indices = ('resource',)
def __init__(self, resource, users=None):
super(Permission, self).__init__()
self.resource = resource
self.users = users or []
|
lemon24/intercessor
|
examples/book.py
|
Python
|
bsd-3-clause
| 78
| 0.038462
|
#: one
x = 1
print(x)
im
|
por
|
t time
time.sleep(10)
#: two
#x = 2
print(x)
|
daluu/AutoPyDriverServer
|
sample-code/webdriver_integration_demo.py
|
Python
|
apache-2.0
| 1,719
| 0.007563
|
from selenium import webdriver
from selenium.webdriver import ActionChains
from os import p
|
ath
import time
#NOTE: this demo uses images under images subfolder to find by name.
# Be sure to configure AutoPyDriverServer to use that folder for images by name
# start up both Firefox & AutoPyDriver for demo
browser = webdriver.Firefox()
autopy_driver = webdriver.Remote( command_executor='http://127.0.0.1:4723/wd/hub', desired_capabilities={'browserName':'AutoPy','imageRecognitionToleranceValue':0.0})
print "Desi
|
red Capabilities returned by server:\n"
print autopy_driver.desired_capabilities
print ""
# launch browser to Drag & drop page for demo test
browser.get("http://html5demos.com/drag")
if len(browser.find_elements_by_tag_name("li")) != 5:
print "Drag & drop test page not in correct state for demo test"
time.sleep(5)
src = autopy_driver.find_element_by_name('drag_src_html5.png')
target = autopy_driver.find_element_by_name('drop_target_html5.png')
actions = ActionChains(autopy_driver)
actions.drag_and_drop(src,target).perform()
# check results, drag & drop reduced items by 1 from 5 to 4
result = len(browser.find_elements_by_tag_name('li'))
if result != 4:
print 'Drag & drop failed. There are %d items when there should be 4.\n' % result
else:
print 'Drag & drop success.\n'
browser.quit()
autopy_driver.quit()
# Now imagine from this integration demo, you could use AutoPy with browser via
# WebDriver to do stuff like file download, HTTP authentication and other stuff
# like drag item from desktop into browser, that you could not do w/ WebDriver
# alone, or w/ executing shell commands and other external stuff. Now can do all
# with WebDriver APIs against 2+ WebDriver instances.
|
0x7678/youtube-dl
|
youtube_dl/extractor/tvigle.py
|
Python
|
unlicense
| 2,889
| 0.001065
|
# encoding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
float_or_none,
parse_age_limit,
)
class TvigleIE(InfoExtractor):
IE_NAME = 'tvigle'
IE_DESC = 'Интернет-телевидение Tvigle.ru'
_VALID_URL = r'http://(?:www\.)?tvigle\.ru/(?:[^/]+/)+(?P<id>[^/]+)/$'
_TESTS = [
{
'url': 'http://www.tvigle.ru/video/sokrat/',
'md5': '36514aed3657d4f70b4b2cef8eb520cd',
'info_dict': {
'id': '1848932',
'display_id': 'sokrat',
'ext': 'flv',
'title': 'Сократ',
'description': 'md5:a05bd01be310074d5833efc6743be95e',
'duration': 6586,
'age_limit': 0,
},
},
{
'url': 'http://www.tvigle.ru/video/vladimir-vysotskii/vedushchii-teleprogrammy-60-minut-ssha-o-vladimire-vysotskom/',
'md5': 'd9012d7c7c598fe7a11d7fb46dc1f574',
'info_dict': {
'id': '5142516',
'ext': 'mp4',
'title': 'Ведущий телепрограммы «60 минут» (США) о Владимире Высоцком',
'description': 'md5:027f7dc872948f14c96d19b4178428a4',
'duration': 186.080,
'age_limit': 0,
},
},
]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
video_id = self._html_search_regex(
r'<li class="video-preview current_playing" id="(\d+)">', webpage, 'video id')
video_data = self._download_json(
'http://cloud.tvigle.ru/api/play/video/%s/' % v
|
ideo_id, display_id)
item = video_data['playlist']['items'][0]
title = item['title']
description = item['description']
thumbnail = item['thumbnail']
duration = float_or_none(item.get('durationMilliseconds
|
'), 1000)
age_limit = parse_age_limit(item.get('ageRestrictions'))
formats = []
for vcodec, fmts in item['videos'].items():
for quality, video_url in fmts.items():
formats.append({
'url': video_url,
'format_id': '%s-%s' % (vcodec, quality),
'vcodec': vcodec,
'height': int(quality[:-1]),
'filesize': item['video_files_size'][vcodec][quality],
})
self._sort_formats(formats)
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'age_limit': age_limit,
'formats': formats,
}
|
ehouarn-perret/EhouarnPerret.Python.Kattis
|
Trivial/Peg.py
|
Python
|
mit
| 719
| 0.030598
|
def count_moves(a, x, y):
character = a[x][y]
if character == ".":
count = 0
rows = len(a)
columns = len(a[x])
# Top Check
if ((x - 2) >= 0) and (a[x - 1][y] == "o") and (a[x - 2][y] == "o"):
count += 1
# Bottom Check
if ((x + 2) < rows) and (a[x + 1][y] == "o") and (a[x +
|
2][y] == "o"):
count += 1
# Left Check
if ((y - 2) >= 0
|
) and (a[x][y - 1] == "o") and (a[x][y - 2] == "o"):
count += 1
# Right Check
if ((y + 2) < columns) and (a[x][y + 1] == "o") and (a[x][y + 2] == "o"):
count += 1
return count
else:
return 0
moves = 0
size = 7
board = [input() for _ in range(size)]
for cx in range(size):
for cy in range(size):
moves += count_moves(board, cx, cy)
print(moves)
|
lpakula/django-oscar-paypal
|
paypal/express_checkout/gateway.py
|
Python
|
bsd-3-clause
| 7,616
| 0.001576
|
from decimal import Decimal as D
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.template.defaultfilters import striptags, truncatechars
from django.utils.translation import gettext_lazy as _
from paypalcheckoutsdk.core import LiveEnvironment, PayPalHttpClient, SandboxEnvironment
from paypalcheckoutsdk.orders import (
Order
|
sAuthorizeRequest, OrdersCaptureRequest, OrdersCreateRequest, OrdersGetRequest)
from paypalcheckoutsdk.payments import AuthorizationsCaptureRequest, AuthorizationsVoidRequest, CapturesRefundRequest
|
INTENT_AUTHORIZE = 'AUTHORIZE'
INTENT_CAPTURE = 'CAPTURE'
INTENT_REQUEST_MAPPING = {
INTENT_AUTHORIZE: AuthorizationsCaptureRequest,
INTENT_CAPTURE: OrdersCaptureRequest,
}
LANDING_PAGE_LOGIN = 'LOGIN'
LANDING_PAGE_BILLING = 'BILLING'
LANDING_PAGE_NO_PREFERENCE = 'NO_PREFERENCE'
USER_ACTION_CONTINUE = 'CONTINUE'
USER_ACTION_PAY_NOW = 'PAY_NOW'
buyer_pays_on_paypal = lambda: getattr(settings, 'PAYPAL_BUYER_PAYS_ON_PAYPAL', False)
def format_description(description):
return truncatechars(striptags(description), 127) if description else ''
def format_amount(amount):
return str(amount.quantize(D('0.01')))
def get_landing_page():
landing_page = getattr(settings, 'PAYPAL_LANDING_PAGE', LANDING_PAGE_NO_PREFERENCE)
if landing_page not in (LANDING_PAGE_LOGIN, LANDING_PAGE_BILLING, LANDING_PAGE_NO_PREFERENCE):
message = _("'%s' is not a valid landing page") % landing_page
raise ImproperlyConfigured(message)
return landing_page
class PaymentProcessor:
client = None
def __init__(self):
credentials = {
'client_id': settings.PAYPAL_CLIENT_ID,
'client_secret': settings.PAYPAL_CLIENT_SECRET,
}
if getattr(settings, 'PAYPAL_SANDBOX_MODE', True):
environment = SandboxEnvironment(**credentials)
else:
environment = LiveEnvironment(**credentials)
self.client = PayPalHttpClient(environment)
def build_order_create_request_body(
self, basket, currency, return_url, cancel_url, order_total,
address=None, shipping_charge=None, intent=None,
):
application_context = {
'return_url': return_url,
'cancel_url': cancel_url,
'landing_page': get_landing_page(),
'shipping_preference': 'SET_PROVIDED_ADDRESS' if address is not None else 'NO_SHIPPING', # TODO: ???
'user_action': 'PAY_NOW' if buyer_pays_on_paypal() else 'CONTINUE',
}
if getattr(settings, 'PAYPAL_BRAND_NAME', None) is not None:
application_context['brand_name'] = settings.PAYPAL_BRAND_NAME
breakdown = {
'item_total': {
'currency_code': currency,
'value': format_amount(basket.total_incl_tax_excl_discounts),
},
'discount': {
'currency_code': currency,
'value': format_amount(sum([
discount['discount']
for discount
in basket.offer_discounts + basket.voucher_discounts
], D(0))),
},
'shipping_discount': {
'currency_code': currency,
'value': format_amount(sum([
discount['discount']
for discount
in basket.shipping_discounts
], D(0))),
}
}
if shipping_charge is not None:
breakdown['shipping'] = {
'currency_code': currency,
'value': format_amount(shipping_charge),
}
purchase_unit = {
'amount': {
'currency_code': currency,
'value': format_amount(order_total),
'breakdown': breakdown,
}
}
items = []
for line in basket.all_lines():
product = line.product
item = {
'name': product.get_title(),
'description': format_description(product.description),
'sku': product.upc if product.upc else '',
'unit_amount': {
'currency_code': currency,
'value': format_amount(line.unit_price_incl_tax)
},
'quantity': line.quantity,
'category': 'PHYSICAL_GOODS' if product.is_shipping_required else 'DIGITAL_GOODS'
}
items.append(item)
purchase_unit['items'] = items
if address is not None:
purchase_unit['shipping'] = {
'name': {
'full_name': address.name
},
'address': {
'address_line_1': address.line1,
'address_line_2': address.line2,
'admin_area_2': address.line4,
'admin_area_1': address.state,
'postal_code': address.postcode,
'country_code': address.country.iso_3166_1_a2
}
}
body = {
'intent': intent,
'application_context': application_context,
'purchase_units': [purchase_unit]
}
return body
def build_refund_order_request_body(self, amount, currency):
return {
'amount': {
'value': format_amount(amount),
'currency_code': currency
}
}
def create_order(
self, basket, currency, return_url, cancel_url, order_total,
address=None, shipping_charge=None, intent=None, preferred_response='minimal',
):
request = OrdersCreateRequest()
request.prefer(f'return={preferred_response}')
request.request_body(self.build_order_create_request_body(
basket=basket,
currency=currency,
return_url=return_url,
cancel_url=cancel_url,
order_total=order_total,
intent=intent,
address=address,
shipping_charge=shipping_charge,
))
response = self.client.execute(request)
return response.result
def get_order(self, token):
request = OrdersGetRequest(token)
response = self.client.execute(request)
return response.result
def get_authorize_request_body(self):
return {}
def authorize_order(self, order_id, preferred_response='minimal'):
request = OrdersAuthorizeRequest(order_id)
request.prefer(f'return={preferred_response}')
request.request_body(self.get_authorize_request_body())
response = self.client.execute(request)
return response.result
def void_authorized_order(self, authorization_id):
request = AuthorizationsVoidRequest(authorization_id)
self.client.execute(request)
def refund_order(self, capture_id, amount, currency, preferred_response='minimal'):
request = CapturesRefundRequest(capture_id)
request.prefer(f'return={preferred_response}')
request.request_body(self.build_refund_order_request_body(amount, currency))
response = self.client.execute(request)
return response.result
def capture_order(self, token, intent, preferred_response='minimal'):
capture_request = INTENT_REQUEST_MAPPING[intent]
request = capture_request(token)
request.prefer(f'return={preferred_response}')
response = self.client.execute(request)
return response.result
|
kobejean/tensorflow
|
tensorflow/python/ops/numerics.py
|
Python
|
apache-2.0
| 4,096
| 0.004395
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Connects all half, float and double tensors to CheckNumericsOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export("verify_tensor_all_finite")
def verify_tensor_all_finite(t, msg, name=None):
"""Assert that the tensor does not contain any NaN's or Inf's.
Args:
t: Tensor to check.
msg: Message to log on failure.
name: A na
|
me for this operation (optional).
Returns:
Same tensor as `t`.
|
"""
with ops.name_scope(name, "VerifyFinite", [t]) as name:
t = ops.convert_to_tensor(t, name="t")
with ops.colocate_with(t):
verify_input = array_ops.check_numerics(t, message=msg)
out = control_flow_ops.with_dependencies([verify_input], t)
return out
@tf_export("add_check_numerics_ops")
def add_check_numerics_ops():
"""Connect a `check_numerics` to every floating point tensor.
`check_numerics` operations themselves are added for each `half`, `float`,
or `double` tensor in the graph. For all ops in the graph, the
`check_numerics` op for all of its (`half`, `float`, or `double`) inputs
is guaranteed to run before the `check_numerics` op on any of its outputs.
Note: This API is not compatible with the use of `tf.cond` or
`tf.while_loop`, and will raise a `ValueError` if you attempt to call it
in such a graph.
Returns:
A `group` op depending on all `check_numerics` ops added.
Raises:
ValueError: If the graph contains any numeric operations in a control flow
structure.
RuntimeError: If called with eager execution enabled.
@compatibility(eager)
Not compatible with eager execution. To check for `Inf`s and `NaN`s under
eager execution, call tfe.seterr(inf_or_nan='raise') once before executing
the checked operations.
@enc_compatibility
"""
if context.executing_eagerly():
raise RuntimeError(
"add_check_numerics_ops() is not compatible with eager execution. "
"To check for Inf's and NaN's under eager execution, call "
"tfe.seterr(inf_or_nan='raise') once before executing the "
"checked operations.")
check_op = []
# This code relies on the ordering of ops in get_operations().
# The producer of a tensor always comes before that tensor's consumer in
# this list. This is true because get_operations() returns ops in the order
# added, and an op can only be added after its inputs are added.
for op in ops.get_default_graph().get_operations():
for output in op.outputs:
if output.dtype in [dtypes.float16, dtypes.float32, dtypes.float64]:
if op._get_control_flow_context() is not None: # pylint: disable=protected-access
raise ValueError("`tf.add_check_numerics_ops() is not compatible "
"with TensorFlow control flow operations such as "
"`tf.cond()` or `tf.while_loop()`.")
message = op.name + ":" + str(output.value_index)
with ops.control_dependencies(check_op):
check_op = [array_ops.check_numerics(output, message=message)]
return control_flow_ops.group(*check_op)
|
vladimir-ipatov/ganeti
|
lib/outils.py
|
Python
|
gpl-2.0
| 4,426
| 0.005874
|
#
#
# Copyright (C) 2012 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Module for object related utils."""
#: Supported container types for serialization/de-serialization (must be a
#: tuple as it's used as a parameter for C{isinstance})
_SEQU
|
ENCE_TYPES = (list, tuple, set, frozenset)
class AutoSlots(type):
"""Meta base class for __slots__ definitions.
"""
def __new__(mcs, n
|
ame, bases, attrs):
"""Called when a class should be created.
@param mcs: The meta class
@param name: Name of created class
@param bases: Base classes
@type attrs: dict
@param attrs: Class attributes
"""
assert "__slots__" not in attrs, \
"Class '%s' defines __slots__ when it should not" % name
attrs["__slots__"] = mcs._GetSlots(attrs)
return type.__new__(mcs, name, bases, attrs)
@classmethod
def _GetSlots(mcs, attrs):
"""Used to get the list of defined slots.
@param attrs: The attributes of the class
"""
raise NotImplementedError
class ValidatedSlots(object):
"""Sets and validates slots.
"""
__slots__ = []
def __init__(self, **kwargs):
"""Constructor for BaseOpCode.
The constructor takes only keyword arguments and will set
attributes on this object based on the passed arguments. As such,
it means that you should not pass arguments which are not in the
__slots__ attribute for this class.
"""
slots = self.GetAllSlots()
for (key, value) in kwargs.items():
if key not in slots:
raise TypeError("Object %s doesn't support the parameter '%s'" %
(self.__class__.__name__, key))
setattr(self, key, value)
@classmethod
def GetAllSlots(cls):
"""Compute the list of all declared slots for a class.
"""
slots = []
for parent in cls.__mro__:
slots.extend(getattr(parent, "__slots__", []))
return slots
def Validate(self):
"""Validates the slots.
This method must be implemented by the child classes.
"""
raise NotImplementedError
def ContainerToDicts(container):
"""Convert the elements of a container to standard Python types.
This method converts a container with elements to standard Python types. If
the input container is of the type C{dict}, only its values are touched.
Those values, as well as all elements of input sequences, must support a
C{ToDict} method returning a serialized version.
@type container: dict or sequence (see L{_SEQUENCE_TYPES})
"""
if isinstance(container, dict):
ret = dict([(k, v.ToDict()) for k, v in container.items()])
elif isinstance(container, _SEQUENCE_TYPES):
ret = [elem.ToDict() for elem in container]
else:
raise TypeError("Unknown container type '%s'" % type(container))
return ret
def ContainerFromDicts(source, c_type, e_type):
"""Convert a container from standard python types.
This method converts a container with standard Python types to objects. If
the container is a dict, we don't touch the keys, only the values.
@type source: None, dict or sequence (see L{_SEQUENCE_TYPES})
@param source: Input data
@type c_type: type class
@param c_type: Desired type for returned container
@type e_type: element type class
@param e_type: Item type for elements in returned container (must have a
C{FromDict} class method)
"""
if not isinstance(c_type, type):
raise TypeError("Container type '%s' is not a type" % type(c_type))
if source is None:
source = c_type()
if c_type is dict:
ret = dict([(k, e_type.FromDict(v)) for k, v in source.items()])
elif c_type in _SEQUENCE_TYPES:
ret = c_type(map(e_type.FromDict, source))
else:
raise TypeError("Unknown container type '%s'" % c_type)
return ret
|
DirtyUnicorns/android_external_chromium-org
|
tools/perf/benchmarks/media.py
|
Python
|
bsd-3-clause
| 1,514
| 0.015852
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
from measurements import media
from telemetry import test
class Media(test.Test):
"""Obtains media metrics for key user scenarios."""
test = media.Media
page_set = 'page_sets/tough_video_cases.json'
class MediaNetworkSimulation(test.Test):
"""Obtains media metrics under different network simulations."""
test = media.Media
enabled = not sys.platform.startswith('linux')
page_set = 'page_sets/media_cns_cases.json'
class MediaAndroid(test.Test):
"""Obtains media metrics for key user scenarios on Android."""
test = media.Media
tag = 'android'
page_set = 'page_sets/tough_video_cases.json'
# Exclude crowd* media files (50fps 2160p).
options = {
'page_filter_exclude': '.*crowd.*'
}
def CustomizeBrowserOptions(self, options):
# Needed to run media actions in JS in Android.
options.AppendExtraBrowserArgs(
'--disable-gesture-requirement-for-media-playback')
class MediaSourceExtensions(test.T
|
est):
"""Obtains media metrics for key media source extensions functions."""
test = media.Media
enabled = not sys.platform.startswith('l
|
inux')
page_set = 'page_sets/mse_cases.json'
def CustomizeBrowserOptions(self, options):
# Needed to allow XHR requests to return stream objects.
options.AppendExtraBrowserArgs(
'--enable-experimental-web-platform-features')
|
klingebj/regreg
|
code/regreg/problems/composite.py
|
Python
|
bsd-3-clause
| 14,743
| 0.005698
|
from numpy.linalg import norm
from numpy import zeros, array, any as npany
import new
from copy import copy
# local imports
from ..identity_quadratic import identity_quadratic as sq
from ..algorithms import FISTA
from ..objdoctemplates import
|
objective_doc_templater
from ..doctemplates import (doc_template_user, doc_template_provider)
@objective_doc_templater()
class composite(object):
"""
A generic way to specify a problem in composite form.
"""
objective_template = r"""f(%(var)s)"""
objective_vars = {'var': r'\beta', 'shape':'p', 'offset':r'\alpha'}
def __init__(self, shape, offset=None,
|
quadratic=None, initial=None):
self.offset = offset
if offset is not None:
self.offset = array(offset)
if type(shape) == type(1):
self.shape = (shape,)
else:
self.shape = shape
if quadratic is not None:
self.quadratic = quadratic
else:
self.quadratic = sq(0,0,0,0)
if initial is None:
self.coefs = zeros(self.shape)
else:
self.coefs = initial.copy()
def latexify(self, var=None, idx=''):
template_dict = self.objective_vars.copy()
template_dict['idx'] = idx
if var is not None:
template_dict['var'] = var
if hasattr(self, 'offset') and self.offset is not None and npany(self.offset != 0):
template_dict['var'] = var + (r' - %(offset)s_{%(idx)s}' % template_dict)
obj = self.objective_template % template_dict
template_dict['obj'] = obj
if not self.quadratic.iszero:
return ' + '.join([obj, self.quadratic.latexify(var=var, idx=idx)])
return obj
def _repr_latex_(self):
return self.latexify('x')
def nonsmooth_objective(self, x, check_feasibility=False):
return self.quadratic.objective(x, 'func')
def smooth_objective(self, x, mode='both', check_feasibility=False):
'''
The smooth_objective and the quadratic_objective combined.
'''
raise NotImplementedError
def objective(self, x, check_feasibility=False):
return self.smooth_objective(x,mode='func', check_feasibility=check_feasibility) + self.nonsmooth_objective(x, check_feasibility=check_feasibility)
@doc_template_provider
def proximal_optimum(self, quadratic):
r"""
Returns
.. math::
\inf_{x \in \mathbb{R}^p} Q(x)
+ h(x)
where $p$ = ``x.shape[0]``, $Q(x)=$ `self.quadratic` and
.. math::
h(%(var)s) = %(ns_objective)s
"""
argmin = self.proximal(quadratic)
if self.quadratic is None:
return argmin, lipschitz * norm(x-argmin)**2 / 2. + self.nonsmooth_objective(argmin)
else:
return argmin, lipschitz * norm(x-argmin)**2 / 2. + self.nonsmooth_objective(argmin) + self.quadratic.objective(argmin, 'func')
def proximal_step(self, quadratic, prox_control=None):
"""
Compute the proximal optimization
prox_control: If not None, then a dictionary of parameters for the prox procedure
"""
# This seems like a null op -- if all proximals accept optional prox_control
if prox_control is None:
return self.proximal(quadratic)
else:
return self.proximal(quadratic, prox_control=prox_control)
def apply_offset(self, x):
"""
If self.offset is not None, return x-self.offset, else return x.
"""
if self.offset is not None:
return x - self.offset
return x
def set_quadratic(self, quadratic):
"""
Set the quadratic part of the composite.
"""
self._quadratic = quadratic
def get_quadratic(self):
"""
Get the quadratic part of the composite.
"""
if not hasattr(self, "_quadratic"):
self._quadratic = sq(None, None, None, None)
return self._quadratic
quadratic = property(get_quadratic, set_quadratic)
def smoothed(self, smoothing_quadratic):
'''
Add quadratic smoothing term
'''
conjugate_atom = copy(self.conjugate)
sq = smoothing_quadratic
if sq.coef in [None, 0]:
raise ValueError('quadratic term of smoothing_quadratic must be non 0')
total_q = sq
if conjugate_atom.quadratic is not None:
total_q = sq + conjugate_atom.quadratic
conjugate_atom.set_quadratic(total_q)
smoothed_atom = conjugate_atom.conjugate
return smoothed_atom
def solve(self, quadratic=None, return_optimum=False, **fit_args):
raise NotImplementedError('subclasses must implement their own solve methods')
class nonsmooth(composite):
"""
A composite subclass that explicitly returns 0
as smooth_objective.
"""
def smooth_objective(self, x, mode='both', check_feasibility=False):
if mode == 'both':
return 0., zeros(x.shape)
elif mode == 'func':
return 0.
elif mode == 'grad':
return zeros(x.shape)
raise ValueError("Mode not specified correctly")
def solve(self, quadratic=None, return_optimum=False, **fit_args):
if quadratic is None:
quadratic = sq(0,0,0,0)
self.coefs = self.proximal(quadratic)
if return_optimum:
return self.objective(self.coefs) + quadratic.objective(self.coefs, 'func'), self.coefs
else:
return self.coefs
class smooth(composite):
"""
A composite subclass that has 0 as
nonsmooth_objective and the proximal
is a null-op.
"""
objective_vars = composite.objective_vars.copy()
objective_vars['coef'] = 'C'
def get_lipschitz(self):
if hasattr(self, '_lipschitz'):
return self._lipschitz + self.quadratic.coef
return self.quadratic.coef
def set_lipschitz(self, value):
if value < 0:
raise ValueError('Lipschitz constant must be non-negative')
self._lipschitz = value
lipschitz = property(get_lipschitz, set_lipschitz)
def smooth_objective(self, x, mode='func', check_feasibility=False):
return self._smooth_objective(x, mode=mode, check_feasibility=check_feasibility)
def proximal(self, quadratic):
totalq = self.quadratic + quadratic
return -totalq.linear_term / totalq.coef
def solve(self, quadratic=None, return_optimum=False, **fit_args):
if quadratic is None:
quadratic = sq(0,0,0,0)
oldq, self.quadratic = self.quadratic, self.quadratic + quadratic
self.solver = FISTA(self)
self.solver.fit(**fit_args)
self.quadratic = oldq
if return_optimum:
return self.objective(self.coefs), self.coefs
else:
return self.coefs
class smooth_conjugate(smooth):
def __init__(self, atom, quadratic=None):
"""
Given an atom,
compute the conjugate of this atom plus
an identity_quadratic which will be
a smooth version of the conjugate of the atom.
should we have an argument "collapse" that makes a copy?
"""
# this holds a pointer to the original atom,
# but will be replaced later
self.atom = atom
if quadratic is None:
quadratic = sq(0,0,0,0)
self.smoothing_quadratic = quadratic
self.total_quadratic = self.atom.quadratic + self.smoothing_quadratic
if self.total_quadratic.coef in [0,None]:
raise ValueError('the atom must have non-zero quadratic term to compute ensure smooth conjugate')
self.shape = atom.shape
# A smooth conjugate is the conjugate of some $f$ with an identity quadratic added to it, or
# $$
# h(u) = \sup_x \left( u^Tx - \frac{\kappa}{2} \|x\|^2_2 - \beta^Tx-c-f(x) \right).
# $$
# Suppose we add a quadratic to $h$ to get
# $$
# \tilde{h}(u) = \frac{r}{2} \|u\|^2_2 + u^T\gamma + a + h(u)$$
# and take the conjugate again:
# $$
# \beg
|
bigmlcom/python
|
bigml/api_handlers/evaluationhandler.py
|
Python
|
apache-2.0
| 3,626
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2022 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for evaluations' REST calls
https://bigml.com/api/evaluations
"""
try:
import simplejson as json
except ImportError:
import json
from bigml.api_handlers.resourcehandler import ResourceHandlerMixin
from bigml.api_handlers.resourcehandler import check_resource_type
from bigml.constants import SUPERVISED_PATHS, TIME_SERIES_PATH, EVALUATION_PATH
class EvaluationHandlerMixin(ResourceHandlerMixin):
"""This class is used by the BigML class as
a mixin that provides the REST calls models. It should not
be instantiated independently.
"""
def __init__(self):
"""Initializes the EvaluationHandler. This class is intended to be
used as a mixin on ResourceHandler, that inherits its
attributes and basic method from BigMLConnection, and must not be
instantiated independently.
"""
self.evaluation_url = self.url + EVALUATION_PATH
def create_evaluation(self, model, dataset,
args=None, wait_time=3, retries=10):
"""Creates a new evaluation.
"""
create_args = {}
if args is not None:
create_args.update(args)
model_types = SUPERVISED_PATHS[:]
model_types.append(TIME_SERIES_PATH)
origin_resources_checked = self.check_origins(
dataset, model, create_args, model_types=model_types,
wait_time=wait_time, retries=retries)
if origin_resources_ch
|
ecked:
body = json.dumps(create_args)
return self._create(self.evaluation_url, body)
return
def get_evaluation(self, evaluation, query_string=''):
|
"""Retrieves an evaluation.
The evaluation parameter should be a string containing the
evaluation id or the dict returned by create_evaluation.
As evaluation is an evolving object that is processed
until it reaches the FINISHED or FAULTY state, the function will
return a dict that encloses the evaluation values and state info
available at the time it is called.
"""
check_resource_type(evaluation, EVALUATION_PATH,
message="An evaluation id is needed.")
return self.get_resource(evaluation, query_string=query_string)
def list_evaluations(self, query_string=''):
"""Lists all your evaluations.
"""
return self._list(self.evaluation_url, query_string)
def update_evaluation(self, evaluation, changes):
"""Updates an evaluation.
"""
check_resource_type(evaluation, EVALUATION_PATH,
message="An evaluation id is needed.")
return self.update_resource(evaluation, changes)
def delete_evaluation(self, evaluation):
"""Deletes an evaluation.
"""
check_resource_type(evaluation, EVALUATION_PATH,
message="An evaluation id is needed.")
return self.delete_resource(evaluation)
|
blakev/tappy
|
tap/i18n.py
|
Python
|
bsd-2-clause
| 229
| 0
|
# Copyright (c) 2015, Matt Layman
import gettext
import os
localedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locale')
tr
|
anslate = gettext.translation('tappy', localedir, fallback=Tru
|
e)
_ = translate.gettext
|
openworm/PyOpenWorm
|
owmeta/data_trans/common_data.py
|
Python
|
mit
| 496
| 0
|
from .. import BASE_SCHEMA_URL, BASE_DATA_URL
from rdflib.namespace import Namespace
TRANS_N
|
S = Namespace(BASE_SCHEMA_URL + '/translators/'
|
)
TRANS_DATA_NS = Namespace(BASE_DATA_URL + '/translators/')
DS_NS = Namespace(BASE_SCHEMA_URL + '/data_sources/')
DS_DATA_NS = Namespace(BASE_DATA_URL + '/data_sources/')
class DSMixin(object):
base_namespace = DS_NS
base_data_namespace = DS_DATA_NS
class DTMixin(object):
base_namespace = TRANS_NS
base_data_namespace = TRANS_DATA_NS
|
sai9/weewx-gitsvn
|
bin/weewx/drivers/ws28xx.py
|
Python
|
gpl-3.0
| 174,398
| 0.006061
|
# $Id$
# Copyright 2013 Matthew Wall
# See the file LICENSE.txt for your full rights.
#
# Thanks to Eddie De Pieri for the first Python implementation for WS-28xx.
# Eddie did the difficult work of decompiling HeavyWeather then converting
# and reverse engineering into a functional Python implementation. Eddie's
# work was based on reverse engineering of HeavyWeather 2800 v 1.54
#
# Thanks to Lucas Heijst for enumerating the console message types and for
# debugging the transceiver/console communication timing issues.
"""Classes and functions for interfacing with WS-28xx weather stations.
LaCrosse makes a number of stations in the 28xx series, including:
WS-2810, WS-2810U-IT
WS-2811, WS-2811SAL-IT, WS-2811BRN-IT, WS-2811OAK-IT
WS-2812, WS-2812U-IT
WS-2813
WS-2814, WS-2814U-IT
WS-2815, WS-2815U-IT
C86234
The station is also sold as the TFA Primus, TFA Opus, and TechnoLine.
HeavyWeather is the software provided by LaCrosse.
There are two versions of HeavyWeather for the WS-28xx series: 1.5.4 and 1.5.4b
Apparently there is a difference between TX59UN-1-IT and TX59U-IT models (this
identifier is printed on the thermo-hygro sensor).
HeavyWeather Version Firmware Version Thermo-Hygro Model
1.54 333 or 332 TX59UN-1-IT
1.54b 288, 262, 222 TX59U-IT
HeavyWeather provides the following weather station settings:
time display: 12|24 hour
temperature display: C|F
air pressure display: inhg|hpa
wind speed display: m/s|knots|bft|km/h|mph
rain display: mm|inch
recording interval: 1m
keep weather station in hi-speed communication mode: true/false
According to the HeavyWeatherPro User Manual (1.54, rev2), "Hi speed mode wears
down batteries on your display much faster, and similarly consumes more power
on the PC. We do not believe most users need to enable this setting. It was
provided at the request of users who prefer ultra-frequent uploads."
The HeavyWeatherPro 'CurrentWeather' view is updated as data arrive from the
console. The console sends current weather data approximately every 13
seconds.
Historical data are updated less frequently - every 2 hours in the default
HeavyWeatherPro configuration.
According to the User Manual, "The 2800 series weather station uses the
'original' wind chill calculation rather than the 2001 'North American'
formula because the original formula is international."
Apparently the station console determines when data will be sent, and, once
paired, the transceiver is always listening. The station console sends a
broadcast on the hour. If the transceiver responds, the station console may
continue to broadcast data, depending on the transceiver response and the
timing of the transceiver response.
According to the C86234 Operations Manual (Revision 7):
- Temperature and humidity data are sent to the console every 13 seconds.
- Wind data are sent to the temperature/humidity sensor every 17 seconds.
- Rain data are sent to the temperature/humidity sensor every 19 seconds.
- Air pressure is measured every 15 seconds.
Each tip of the rain bucket is 0.26 mm of rain.
The following information was obtained by logging messages from the ws28xx.py
driver in weewx and by capturing USB messages between Heavy Weather Pro for
ws2800 and the TFA Primus Weather Station via windows program USB sniffer
busdog64_v0.2.1.
Pairing
The transceiver must be paired with a console before it can receive data. Each
frame sent by the console includes the device identifier of the transceiver
with which it is paired.
Synchronizing
When the console and transceiver stop communicating, they can be synchronized
by one of the following methods:
- Push the SET button on the console
- Wait till the next full hour when the console sends a clock message
In each case a Request Time message is received by the transceiver from the
console. The 'Send Time to WS' message should be sent within ms (10 ms
typical). The transceiver should handle the 'Time SET' message then send a
'Time/Config written' message about 85 ms after the 'Send Time to WS' message.
When complete, the console and transceiver will have been synchronized.
Timing
Current Weather messages, History messages, getConfig/setConfig messages, and
setTime messages each have their own timing. Missed History messages - as a
result of bad timing - result in console and transceiver becoming out of synch.
Current Weather
The console periodically sends Current Weather messages, each with the latest
values from the sensors. The CommModeInterval determines how often the console
will send Current Weather messages.
History
The console records data periodically at an interval defined by the
HistoryInterval parameter. The factory default setting is 2 hours.
Each history record contains a timestamp. Timestamps use the time from the
console clock. The console can record up to 1797 history records.
Reading 1795 history records took about 110 minutes on a raspberry pi, for
an average of 3.6 seconds per history record.
Reading 1795 history records took 65 minutes o
|
n a synology ds209+ii, for
an average of 2.2 seconds per history record.
Reading 1750 history records took 19 minutes using HeavyWeatherPro on a
Windows 7 64-bit laptop.
Message Types
The first byte of a message determines the message type.
ID Type Length
01 ? 0x0f (15
|
)
d0 SetRX 0x15 (21)
d1 SetTX 0x15 (21)
d5 SetFrame 0x111 (273)
d6 GetFrame 0x111 (273)
d7 SetState 0x15 (21)
d8 SetPreamblePattern 0x15 (21)
d9 Execute 0x0f (15)
dc ReadConfigFlash< 0x15 (21)
dd ReadConfigFlash> 0x15 (21)
de GetState 0x0a (10)
f0 WriteReg 0x05 (5)
In the following sections, some messages are decomposed using the following
structure:
start position in message buffer
hi-lo data starts on first (hi) or second (lo) nibble
chars data length in characters (nibbles)
rem remark
name variable
-------------------------------------------------------------------------------
1. 01 message (15 bytes)
000: 01 15 00 0b 08 58 3f 53 00 00 00 00 ff 15 0b (detected via USB sniffer)
000: 01 15 00 57 01 92 3f 53 00 00 00 00 ff 15 0a (detected via USB sniffer)
00: messageID
02-15: ??
-------------------------------------------------------------------------------
2. SetRX message (21 bytes)
000: d0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
020: 00
00: messageID
01-20: 00
-------------------------------------------------------------------------------
3. SetTX message (21 bytes)
000: d1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
020: 00
00: messageID
01-20: 00
-------------------------------------------------------------------------------
4. SetFrame message (273 bytes)
Action:
00: rtGetHistory - Ask for History message
01: rtSetTime - Ask for Send Time to weather station message
02: rtSetConfig - Ask for Send Config to weather station message
03: rtGetConfig - Ask for Config message
05: rtGetCurrent - Ask for Current Weather message
c0: Send Time - Send Time to WS
40: Send Config - Send Config to WS
000: d5 00 09 DevID 00 CfgCS cIntThisAdr xx xx xx rtGetHistory
000: d5 00 09 DevID 01 CfgCS cIntThisAdr xx xx xx rtReqSetTime
000: d5 00 09 f0 f0 02 CfgCS cIntThisAdr xx xx xx rtReqFirstConfig
000: d5 00 09 DevID 02 CfgCS cIntThisAdr xx xx xx rtReqSetConfig
000: d5 00 09 DevID 03 CfgCS cIntThisAdr xx xx xx rtGetConfig
000: d5 00 09 DevID 05 CfgCS cIntThisAdr xx xx xx rtGetCurrent
000: d5 00 0c DevID c0 CfgCS [TimeData . .. .. .. Send Time
000: d5 00 30 DevID 40 CfgCS [ConfigData .. .. .. Send Config
All SetFrame messages:
00: messageID
01: 00
02: Message Length (starting with next byte)
03-04: DeviceID [DevID]
05: Action
06-07: Config checksum [CfgCS]
Additional bytes rtGetCurrent, rtGetHistory, rtSetTime messages:
08-09hi: ComInt [cINT] 1.5 bytes (high byte first)
09lo-11: ThisHistoryAddress [ThisAdr] 2.5 bytes (high byte first)
Additional byt
|
andrebellafronte/stoq
|
stoqlib/gui/dialogs/tillhistory.py
|
Python
|
gpl-2.0
| 4,857
| 0.003088
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2007 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHA
|
NTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should ha
|
ve received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
""" Implementation of classes related to till operations. """
import datetime
import gtk
from kiwi.currency import currency
from kiwi.ui.objectlist import Column, ColoredColumn
from stoqlib.api import api
from stoqlib.domain.till import TillEntry
from stoqlib.gui.search.searchdialog import SearchDialog
from stoqlib.gui.base.dialogs import run_dialog
from stoqlib.gui.editors.tilleditor import (CashAdvanceEditor, CashInEditor,
CashOutEditor)
from stoqlib.gui.stockicons import (STOQ_MONEY, STOQ_MONEY_ADD,
STOQ_MONEY_REMOVE)
from stoqlib.gui.search.searchcolumns import IdentifierColumn
from stoqlib.gui.search.searchfilters import DateSearchFilter
from stoqlib.gui.search.searchoptions import Today
from stoqlib.gui.utils.printing import print_report
from stoqlib.lib.translation import stoqlib_gettext
from stoqlib.lib.defaults import payment_value_colorize
from stoqlib.reporting.till import TillHistoryReport
_ = stoqlib_gettext
class TillHistoryDialog(SearchDialog):
size = (780, -1)
search_spec = TillEntry
selection_mode = gtk.SELECTION_MULTIPLE
searchbar_labels = _('Till Entries matching:')
title = _('Till history')
#
# SearchDialog
#
def get_columns(self, *args):
return [IdentifierColumn('identifier', title=_('Entry #'), sorted=True),
Column('date', _('Date'), data_type=datetime.date),
Column('time', _('Time'), data_type=datetime.time),
Column('description', _('Description'), data_type=str,
expand=True),
ColoredColumn('value', _('Value'), data_type=currency,
color='red', data_func=payment_value_colorize,
width=140)]
def create_filters(self):
self.set_text_field_columns(['description'])
self.date_filter = DateSearchFilter(_('Date:'))
self.date_filter.select(Today)
self.add_filter(self.date_filter, columns=['date'])
# add summary label
value_format = '<b>%s</b>'
total_label = '<b>%s</b>' % api.escape(_(u'Total:'))
self.search.set_summary_label('value', total_label, value_format)
def setup_widgets(self):
self.results.set_visible_rows(10)
self.results.connect('has-rows', self._has_rows)
self._add_editor_button(_('Cash _Add...'), CashAdvanceEditor,
STOQ_MONEY)
self._add_editor_button(_('Cash _In...'), CashInEditor,
STOQ_MONEY_ADD)
self._add_editor_button(_('Cash _Out...'), CashOutEditor,
STOQ_MONEY_REMOVE)
self.print_button = gtk.Button(None, gtk.STOCK_PRINT, True)
self.print_button.set_property("use-stock", True)
self.print_button.connect('clicked', self._print_button_clicked)
self.action_area.set_layout(gtk.BUTTONBOX_START)
self.action_area.pack_end(self.print_button, False, False, 6)
self.print_button.show()
self.print_button.set_sensitive(False)
#
# Private API
#
def _add_editor_button(self, name, editor_class, stock):
button = self.add_button(name, stock=stock)
button.connect('clicked', self._run_editor, editor_class)
button.show()
def _print_button_clicked(self, button):
print_report(TillHistoryReport, self.results, list(self.results),
filters=self.search.get_search_filters())
def _run_editor(self, button, editor_class):
with api.new_store() as store:
run_dialog(editor_class, self, store)
if store.committed:
self.search.refresh()
self.results.unselect_all()
if len(self.results):
self.results.select(self.results[-1])
def _has_rows(self, results, obj):
self.print_button.set_sensitive(obj)
|
jorge-marques/shoop
|
shoop/discount_pricing/admin_form_part.py
|
Python
|
agpl-3.0
| 2,820
| 0.000355
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from shoop.admin.form_part import FormPart, TemplatedFormDef
from shoop.core.models import Shop
from shoop.discount_pricing.models import DiscountedProductPrice
class DiscountPricingForm(forms.Form):
def __init__(self, **kwargs):
self.product = kwargs.pop("product")
super(DiscountPricingForm, self).__init__(**kwargs)
self.shops = []
if self.product:
self._build_fields()
def _build_fields(self):
self.shops = list(Shop.objects.all())
prices_by_shop_and_group = dict(
(shop_id, price)
for (shop_id, price)
in DiscountedProductPrice.objects.filter(product=self.product)
.values_list("shop_id", "price_value")
)
for shop in self.shops:
name = self._get_field_name(shop)
price = prices_by_shop_and_group.get(shop.id)
price_field = forms.DecimalField(
min_value=0, initial=price,
label=_("Price (%s)") % shop, required=False
)
self.fields[name] = price_field
def _get_
|
field_name(self, shop):
return "s_%d" % shop.id
def _process_single_save(self, shop):
name = self._get_field_name(shop)
value = self.cleaned_data.get(name)
clear = (value is None or value < 0)
if clear:
DiscountedProductPrice.objects.filter(product=self.product, shop=shop).delete()
else:
|
(spp, created) = DiscountedProductPrice.objects.get_or_create(
product=self.product, shop=shop,
defaults={'price_value': value})
if not created:
spp.price_value = value
spp.save()
def save(self):
if not self.has_changed(): # No changes, so no need to do anything.
return
for shop in self.shops:
self._process_single_save(shop)
def get_shop_field(self, shop):
name = self._get_field_name(shop)
return self[name]
class DiscountPricingFormPart(FormPart):
priority = 10
def get_form_defs(self):
yield TemplatedFormDef(
name="discount_pricing",
form_class=DiscountPricingForm,
template_name="shoop/admin/discount_pricing/form_part.jinja",
required=False,
kwargs={"product": self.object}
)
def form_valid(self, form):
form["discount_pricing"].save()
|
jly8866/archer
|
sql/data_masking.py
|
Python
|
apache-2.0
| 16,236
| 0.002478
|
# -*- coding:utf-8 -*-
from .inception import InceptionDao
from .models import DataMaskingRules, DataMaskingColumns
from simplejson import JSONDecodeError
import simplejson as json
import re
inceptionDao = InceptionDao()
class Masking(object):
# 脱敏数据
def data_masking(self, cluster_name, db_name, sql, sql_result):
result = {'status': 0, 'msg': 'ok', 'data': []}
# 通过inception获取语法树,并进行解析
try:
print_info = self.query_tree(sql, cluster_name, db_name)
except Exception as msg:
result['status'] = 1
result['msg'] = str(msg)
return result
if print_info is None:
result['status'] = 1
result['msg'] = 'inception返回的结果集为空!可能是SQL语句有语法错误,无法完成脱敏校验,如果需要继续查询请关闭校验'
elif print_info['errlevel'] != 0:
result['status'] = 2
result['msg'] = 'inception返回异常,无法完成脱敏校验,如果需要继续查询请关闭校验:\n' + print_info['errmsg']
else:
query_tree = print_info['query_tree']
# 获取命中脱敏规则的列数据
try:
table_hit_columns, hit_columns = self.analy_query_tree(query_tree, cluster_name)
except Exception as msg:
result['status'] = 2
result['msg'] = '解析inception语法树获取表信息出错,无法完成脱敏校验,如果需要继续查询请关闭校验:{}\nquery_tree:{}'.format(str(msg),
print_info)
return result
# 存在select * 的查询,遍历column_list,获取命中列的index,添加到hit_columns
if table_hit_columns and sql_result.get('rows'):
column_list = sql_result['column_list']
table_hit_column = {}
for column_info in table_hit_columns:
table_hit_column_info = {}
rule_type = column_info['rule_type']
table_hit_column_info[column_info['column_name']] = rule_type
table_hit_column.update(table_hit_column_info)
for index, item in enumerate(column_list):
if item in table_hit_column.keys():
column = {}
column['column_name'] = item
column['index'] = index
column['rule_type'] = table_hit_column.get(item)
hit_columns.append(column)
# 对命中规则列hit_columns的数据进行脱敏
# 获取全部脱敏规则信息,减少循环查询,提升效率
DataMaskingRulesOb = DataMaskingRules.objects.all()
if hit_columns and sql_result.get('rows'):
rows = list(sql_result['rows'])
for column in hit_columns:
index = column['index']
for idx, item in enumerate(rows):
rows[idx] = list(item)
rows[idx][index] = self.regex(DataMaskingRulesOb, column['rule_type'], rows[idx][index])
sql_result['rows'] = rows
return result
# 通过inception获取语法树
def query_tree(self, sqlContent, cluster_name, dbName):
try:
print_info = inceptionDao.query_print(sqlContent, cluster_name, dbName)
except Exception as e:
raise Exception('通过inception获取语法树异常,请检查inception配置,并确保inception可以访问实例:' + str(e))
if print_info:
id = print_info[0][0]
statement = print_info[0][1]
# 返回值为非0的情况下,说明是有错的,1表示警告,不影响执行,2表示严重错误,必须修改
errlevel = print_info[0][2]
query_tree = print_info[0][3]
errmsg = print_info[0][4]
# 提交给inception语法错误的情况
if errmsg == 'Global environment':
errlevel = 2
errmsg = 'Global environment: ' + query_tree
if errlevel == 0:
pass
# print(json.dumps(json.loads(query_tree), indent=4, sort_keys=False, ensure_ascii=False))
return {'id': id, 'statement': statement, 'errlevel': errlevel, 'query_tree': query_tree,
'errmsg': errmsg}
else:
return None
# 解析语法树,获取语句涉及的表,用于查询权限限制
def query_table_ref(self, sqlContent, cluster_name, dbName):
result = {'status': 0, 'msg': 'ok', 'data': []}
try:
print_info = self.query_tree(sqlContent, cluster_name, dbName)
except Exception as msg:
result['status'] = 1
result['msg'] = str(msg)
return result
if print_info is None:
result['status'] = 1
result['msg'] = 'inception返回的结果集为空!可能是SQL语句有语法错误,无法校验表权限,如果需要继续查询请关闭校验'
elif print_info['errlevel'] != 0:
result['status'] = 2
result['msg'] = 'inception返回异常,无法校验表权限,如果需要继续查询请关闭校验:\n' + print_info['errmsg']
else:
try:
table_ref = json.loads(print_info['query_tree'])['table_ref']
except Exception:
try:
table_ref = json.loads(print_info['query_tree'])['table_ref']
except JSONDecodeError:
try:
table_ref = json.loads(repair_json_str(print_info['query_tree']))['table_ref']
except JSONDecodeError as msg:
result['status'] = 2
result['msg'] = '通过inception语法树解析表信息出错,无法校验表权限,如果需要继续查询请关闭校验:{}\nquery_tree:{}'.format(str(msg),
print_info)
table_ref = ''
result['data'] = table_ref
return result
# 解析query_tree,获取语句信息,并返回命中脱敏规则的列信息
def analy_query_tree(self, query_tree, cluster_name):
try:
query_tree_dict = json.loads(query_tree)
except JSONDecodeError:
query_tree_dict = json.loads(repair_json_str(query_tree))
select_list = query_tree_dict.get('select_list')
table_ref = query_tree_dict.get('table_ref')
# 获取全部脱敏字段信息,减少循环查询,提升效率
DataMaskingColumnsOb = DataMaskingColumns.objects.all()
# 判断语句涉及的表是否存在脱敏字段配置
is_exist = False
for table in table_ref:
if DataMaskingColumnsOb.filter(cluster_name=cluster_name,
table_schema=table['db'],
|
table_name=table['table'],
active=1).exists():
is_exist = True
# 不存在脱敏字段则直接跳过规则解析
if is_exist:
# 遍历select_list
columns = []
hit_columns = [] # 命中列
table_hit_columns = [] # 涉及表命中的列,仅select *需要
# 判断是否存在不支持脱敏的语法
for select_item in select_list:
if select_item['type'] not in ('FIELD_ITEM', 'aggregate'):
|
raise Exception('不支持该查询语句脱敏!')
if select_item['type'] == 'aggregate':
if select_item['aggregate'].get('type') not in ('FIELD_ITEM', 'INT_ITEM'):
raise Exception('不支持该查询语句脱敏!')
# 获取select信息的规则,仅处理type为FIELD_ITEM和aggregate类型的select信息,如[*],[*,column_a],[column_a,*],[column_a,a.*,column_b],[a.*,column_a,b.*],
select_index = [
select_item['field'] if select_item['type'] == 'FIELD_ITEM' else select_item['aggregate'].get('field')
for
select_item in select_list if select_item['type'] in ('FIELD_ITEM', 'aggregate')]
# 处理select_list,为统一的{'type': 'FIELD_ITEM', 'db': 'archer_master', 'table': 'sql_users', 'field': 'email'}格式
select_list = [select_item if select_item['type'] == 'FIELD_ITEM' else select_item['aggregate'] for
select_item in select_list if select_item['type'] in ('FIELD_ITEM', 'aggregate')]
if select_index:
# 如果发现存在field='*',则遍历所有表,找出所有的命中字段
if '*' in select_index:
# 涉及表命中的列
for table in table_ref:
hit_columns_info = self.hit_table(DataMaskingColumnsOb, cluster_name, table['db'],
|
TAKEALOT/Diamond
|
src/collectors/docker_collector/test/testdocker_collector.py
|
Python
|
mit
| 5,993
| 0.001335
|
#!/usr/bin/python
# coding=utf-8
################################################################################
import os
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from mock import mock_open
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
from docker import Client
except ImportError:
Client = None
from diamond.collector import Collector
from docker_collector import DockerCollector
dirname = os.path.dirname(__file__)
fixtures_path = os.path.join(dirname, 'fixtures/')
fixtures = []
for root, dirnames, filenames in os.walk(fixtures_path):
fixtures.append([root, dirnames, filenames])
docker_fixture = [
{u'Id': u'c3341726a9b4235a35b390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5',
u'Names': [u'/testcontainer']},
{u'Id': u'0aec7f643ca1cb45f54d41dcabd8fcbcfcbc57170c3e6dd439af1a52761c2bed',
u'Names': [u'/testcontainer3']},
{u'Id': u'9c151939e20682b924d7299875e94a4aabbe946b30b407f89e276507432c625b',
u'Names': None}]
def run_only_if_docker_client_is_available(func):
try:
from docker import Client
except ImportError:
Client = None
pred = lambda: Client is not None
return run_only(func, pred)
class TestDockerCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('DockerCollector', {
'interval': 10,
'byte_unit': 'megabyte',
'memory_path': fixtures_path,
})
self.collector = DockerCollector(config, None)
def test_import(self):
self.assertTrue(DockerCollector)
def test_finds_linux_v2_memory_stat_path(self):
tid = 'c3341726a9b4235a35b390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5'
path = self.collector._memory_stat_path(tid)
self.assertTrue(path is not None)
self.assertTrue(os.path.exists(path))
def test_finds_linux_v3_memory_stat_path(self):
tid = '0aec7f643ca1cb45f54d41dcabd8fcbcfcbc57170c3e6dd439af1a52761c2bed'
path = self.collector._memory_stat_path(tid)
self.assertTrue(path is not None)
self.assertTrue(os.path.exists(path))
def test_doesnt_find_bogus_memory_stat_path(self):
tid = 'testcontainer'
path = self.collector._memory_stat_path(tid)
self.assertTrue(path is None)
@patch('os.path.exists', Mock(return_value=True))
def test_default_memory_path(self):
read_data = "\n".join([
'none /selinux selinuxfs rw,relatime 0 0',
'cgroup /goofy/memory cgroup'
' rw,nosuid,nodev,noexec,relatime,devices 0 0',
'cgroup /mickeymouse/memory cgroup'
' rw,nosuid,nodev,noexec,relatime,memory 0 0',
'tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0',
'',
])
m = mock_open(read_data=read_data)
with patch('__builtin__.open', m, create=True):
self.assertEqual(self.collector._default_memory_path(),
'/mickeymouse/memory')
m.assert_called_once_with('/proc/mounts')
# @run_only_if_docker_client_is_available
# @patch.object(Collector, 'publish')
# @patch.object(Client, 'containers', Mock(return_value=[]))
# @patch.object(Client, 'images', Mock(return_value=[]))
# def test_collect_sunny_day(self, publish_mock):
# self.assertTrue(self.collector.collect())
# self.assertPublishedMany(publish_mock, {
# 'containers_running_count': 100,
# 'containers_stopped_count': 100,
# 'i
|
mages_count': 100,
# 'images_dangling_count': 100,
# })
# @run_only_if_docker_client_is_available
# @patch('__builtin__.open')
# @patch.object(Client, 'containers', Mock(return_value=[]))
# @patch.object(Collector, 'publish')
# def test_should_open_memory_stat(self, publish_mock, open_mock):
# # open_mock.side_effect = lambda x: StringIO('')
#
|
self.collector.collect()
# print open_mock.mock_calls
# open_mock.assert_any_call(fixtures_path +
# 'docker/c3341726a9b4235a35b'
# '390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5/memory.stat')
# # open_mock.assert_any_call(fixtures_path +
# 'lxc/testcontainer/memory.stat')
# # open_mock.assert_any_call(fixtures_path + 'lxc/memory.stat')
# # open_mock.assert_any_call(fixtures_path + 'memory.stat')
# @run_only_if_docker_client_is_available
# @patch('__builtin__.open')
# @patch.object(Client, 'containers')
# @patch.object(Collector, 'publish')
# def test_should_get_containers(self, publish_mock, containers_mock,
# open_mock):
# containers_mock.return_value = []
# open_mock.side_effect = lambda x: StringIO('')
# self.collector.collect()
# containers_mock.assert_any_call(all=True)
# @run_only_if_docker_client_is_available
# @patch.object(Collector, 'publish')
# @patch.object(Client, 'containers',
# Mock(return_value=docker_fixture))
# def test_should_work_with_real_data(self, publish_mock):
# self.collector.collect()
# self.assertPublishedMany(publish_mock, {
# 'lxc.testcontainer.cache': 1,
# 'lxc.testcontainer.rss': 1,
# 'lxc.testcontainer.swap': 1,
# 'lxc.cache': 1,
# 'lxc.rss': 1,
# 'lxc.swap': 1,
# 'system.cache': 1,
# 'system.rss': 1,
# 'system.swap': 1,
# 'docker.testcontainer.cache': 1,
# 'docker.testcontainer.rss': 1,
# 'docker.testcontainer.swap': 1,
# 'docker.cache': 1,
# 'docker.rss': 1,
# 'docker.swap': 1,
# })
if __name__ == "__main__":
unittest.main()
|
0xPoly/ooni-probe
|
ooni/otime.py
|
Python
|
bsd-2-clause
| 479
| 0.006263
|
from datetime import datetime
def prettyDat
|
eNow():
"""
Returns a good looking string for the local time.
"""
return datetime.now().ctime()
def prettyDat
|
eNowUTC():
"""
Returns a good looking string for utc time.
"""
return datetime.utcnow().ctime()
def timestampNowLongUTC():
"""
Returns a timestamp in the format of %Y-%m-%d %H:%M:%S in Universal Time
Coordinates.
"""
return datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
lah7/polychromatic
|
pylib/controller/preferences.py
|
Python
|
gpl-3.0
| 23,790
| 0.003153
|
#!/usr/bin/python3
#
# Polychromatic is licensed under the GPLv3.
# Copyright (C) 2020-2021 Luke Horwell <code@horwell.me>
#
"""
This module controls the 'Preferences' window of the Controller GUI.
"""
from .. import common
from .. import effects
from .. import locales
from .. import middleman
from .. import procpid
from .. import preferences as pref
from . import shared
import os
#import configparser # Imported on demand, OpenRazer only
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QIcon, QPixmap
from PyQt5.QtWidgets import QWidget, QPushButton, QTreeWidget, QLabel, \
QComboBox, QCheckBox, QDialog, QSpinBox, \
QDoubleSpinBox, QDialogButtonBox, QTabWidget, \
QMessageBox, QAction, QToolButton
class PreferencesWindow(shared.TabData):
"""
A window for adjusting the options of the application, viewing background
processes and getting more information about backends in use.
"""
def __init__(self, appdata):
super().__init__(appdata)
self.openrazer = OpenRazerPreferences(appdata)
self.dialog = None
self.pref_data = None
self.prompt_restart = False
self.restart_applet = False
self.options = [
# [group, item, <Qt object>, Qt object name, Inverted?]
# -- General
["controller", "system_qt_theme", QCheckBox, "UseSystemQtTheme", False],
["controller", "show_menu_bar", QCheckBox, "AlwaysHideMenuBar", True],
["controller", "landing_tab", QComboBox, "LandingTabCombo", False],
["controller", "window_behaviour", QComboBox, "WindowBehaviourCombo", False],
["controller", "toolbar_style", QComboBox, "ToolbarStyle", False],
# -- Tray
["tray", "autostart", QCheckBox, "TrayAutoStart", False],
["tray", "mode", QComboBox, "TrayModeCombo", False],
["tray", "autostart_delay", QSpinBox, "TrayDelaySpinner", 0],
# -- Customise
["custom", "use_dpi_stages", QCheckBox, "DPIStagesAuto", True],
["custom", "dpi_stage_1", QSpinBox, "DPIStage1", 0],
["custom", "dpi_stage_2", QSpinBox, "DPIStage2", 0],
["custom", "dpi_stage_3", QSpinBox, "DPIStage3", 0],
["custom", "dpi_stage_4", QSpinBox, "DPIStage4", 0],
["custom", "dpi_stage_5", QSpinBox, "DPIStage5", 0],
# -- Editor
["editor", "live_preview", QCheckBox, "LivePreview", False],
["editor", "hide_key_labels", QCheckBox, "HideKeyLabels", False],
["editor", "system_cursors", QCheckBox, "UseSystemCursors", False],
["editor", "suppress_confirm_dialog", QCheckBox, "SuppressConfirmDialog", False],
["editor", "show_saved_colour_shades", QCheckBox, "ShowSavedColourShades", False],
]
def open_window(self, open_tab=None):
"""
Opens the Preferences window to change Polychromatic's options.
Parameters:
open_tab (int) Optionally jump to this specific tab index.
"""
self.pref_data = pref.load_file(self.paths.preferences)
self.prompt_restart = False
self.restart_applet = False
self.dialog = shared.get_ui_widget(self.appdata, "preferences", QDialog)
self.dialog.findChild(QDialogButtonBox, "DialogButtons").accepted.connect(self._save_changes)
# Set icons for tabs
tabs = self.dialog.findChild(QTabWidget, "PreferencesTabs")
tabs.setTabIcon(0, self.widgets.get_icon_qt("general", "controller"))
tabs.setTabIcon(1, self.widgets.get_icon_qt("general", "tray-applet"))
tabs.setTabIcon(2, self.widgets.get_icon_qt("effects", "paint"))
tabs.setTabIcon(3, self.widgets.get_icon_qt("general", "matrix"))
tabs.setTabIcon(4, self.widgets.get_icon_qt("emblems", "software"))
# Set icons for controls
if not self.appdata.system_qt_theme:
self.dialog.findChild(QPushButton, "SavedColoursButton").setIcon(self.widgets.get_icon_qt("general", "edit"))
self.dialog.findChild(QPushButton, "SavedColoursReset").setIcon(self.widgets.get_icon_qt("general", "reset"))
self.dialog.findChild(QToolButton, "DPIStagesReset").setIcon(self.widgets.get_icon_qt("general", "reset"))
# Options
for option in self.options:
self._load_option(option[0], option[1], option[2], option[3], option[4])
self.dialog.findChild(QPushButton, "SavedColoursButton").clicked.connect(self.modify_colours)
self.dialog.findChild(QPushButton, "SavedColoursReset").clicked.connect(self.reset_colours)
self.dialog.findChild(QToolButton, "DPIStagesReset").clicked.connect(self._reset_dpi_stages_from_hardware)
# Create Icon Picker
def _set_new_tray_icon(new_icon):
self.dbg.stdout("New tray icon saved in memory: " + new_icon, self.dbg.debug, 1)
self.pref_data["tray"]["icon"] = new_icon
self.restart_applet = True
tray_icon_picker = self.widgets.create_icon_picker_control(_set_new_tray_icon, self.pref_data["tray"]["icon"], self._("Choose Tray Applet Icon"), shared.IconPicker.PURPOSE_TRAY_ONLY)
tray_icon_widget = self.dialog.findChild(QLabel, "TrayIconPickerPlaceholder")
tray_icon_widget.parentWidget().layout().replaceWidget(tray_icon_widget, tray_icon_picker)
# Backend Buttons
self.dialog.findChild(QPushButton, "OpenRazerSettings").clicked.connect(self.menubar.openrazer.configure)
self.dialog.findChild(QPushButton, "OpenRazerAbout").clicked.connect(self.menubar.openrazer.about)
self.dialog.findChild(QPushButton, "OpenRazerRestartDaemon").clicked.connect(self.menubar.openrazer.restart_daemon)
self.dialog.findChild(QPushButton, "OpenRazerTroubleshoot").clicked.connect(self.menubar.openrazer.troubleshoot)
# Buttons disguised as labels
view_log = self.dialog.findChild(QLabel, "OpenRazerLog")
def view_log_clicked(QMouseEvent):
if QMouseEvent.button() == Qt.LeftButton:
self.openrazer.open_log()
view_log.mouseReleaseEvent = view_log_clicked
if not self.appdata.system_qt_theme:
self.dialog.findChild(QPushButton, "OpenRazerSettings").setIcon(self.widgets.get_icon_qt("general", "preferences"))
self.dialog.findChild(QPushButton, "OpenRazerAbout").setIcon(self.widgets.get_icon_qt("general", "info"))
self.dialog.findChild(QPushButton, "OpenRazerRestartDaemon").setIcon(self.widgets.get_icon_qt("general", "refresh"))
self.dialog.findChild(QPushButton, "OpenRazerTroubleshoot").setIcon(self.widgets.get_icon_qt("emblems", "utility"))
# Drop custom icons when using native themes
if self.appdata.system_qt_theme:
combo = self.di
|
alog.findChild(QComboBox, "LandingTabCombo")
for i in range(0, combo.count()):
combo.setItemIcon(i, QIcon())
# Prompt for a restart after changing these options
def _cb_set_restart_flag():
self.prompt_restart = True
self.dialog.findChild(QCheckBox, "UseSystemQtTheme").stateChanged.connect(_cb_set_restart_flag)
# Restart the tray applet after changing these optio
|
ns
def _cb_set_applet_flag(i):
self.restart_applet = True
self.dialog.findChild(QComboBox, "TrayModeCombo").currentIndexChanged.connect(_cb_set_applet_flag)
self.dialog.findChild(QCheckBox, "DPIStagesAuto").stateChanged.connect(_cb_set_applet_flag)
self.dialog.findChild(QCheckBox, "DPIStagesAuto").stateChanged.connect(self._refresh_dpi_stages_state)
for i in range(1, 6):
self.dialog.findChild(QSpinBox, "DPIStage" + str(i)).valueChanged.connect(_cb_set_applet_flag)
# FIXME: Hide incomplete features
self.dialog.findChild(QComboBox, "LandingTabCombo").removeItem(3)
self.dialog.findChild(QComboBox, "LandingTabCombo").removeItem(2)
# Disable tray applet tab if not installed
if not procpid.Proc
|
revesansparole/oacontainer
|
setup.py
|
Python
|
mit
| 1,872
| 0.001603
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# {{pkglts pysetup,
from os import walk
from os.path import abspath, normpath
from os.path import join as pj
from setuptools import setup, find_packages
short_descr = "Set of data structures used in openalea such as : graph, grid, topomesh"
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
def parse_requirements(fname):
with open(fname, 'r') as f:
txt = f.read()
reqs = []
for line in txt.splitlines():
line = line.strip()
|
if len(line) > 0 and not line.startswith("#"):
reqs.append(line)
return reqs
# find version number in /src/$pkg_pth/version.py
version = {}
with open("src/openalea/container/version.py") as fp:
exec(fp.read(), version)
setup(
name='openalea.container',
version=version["__version__"],
description=short_descr,
long_description=readme + '\n\n' + history,
author="rev
|
esansparole",
author_email='revesansparole@gmail.com',
url='',
license="mit",
zip_safe=False,
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=parse_requirements("requirements.txt"),
tests_require=parse_requirements("dvlpt_requirements.txt"),
entry_points={
# 'console_scripts': [
# 'fake_script = openalea.fakepackage.amodule:console_script', ],
# 'gui_scripts': [
# 'fake_gui = openalea.fakepackage.amodule:gui_script',],
# 'wralea': wralea_entry_points
},
keywords='',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2 :: Only',
'Programming Language :: Python :: 2.7'
],
test_suite='nose.collector',
)
# }}
|
StackPointCloud/libcloud
|
docs/examples/misc/twisted_create_node.py
|
Python
|
apache-2.0
| 898
| 0.001114
|
from __future__ import absolute_import
from pprint import pprint
# pylint: disable=import-error
from twisted.internet import defer, threads, reactor
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
@defer.inlineCallbacks
def create_node(name):
node = yield threads.deferToThread(_thread_create_node,
name=name)
pprint(node)
reactor.stop()
def _thread_create_node(name):
Driver = get_driver(Provider.RACKSPACE)
conn = Driver('username', 'api key')
image = conn.list_images()[0]
size = conn.list_sizes()[0]
node = conn.create_node(name=name, image=image, size=size)
return
|
node
def stop(*args, **kwargs):
reacto
|
r.stop()
d = create_node(name='my-lc-node')
d.addCallback(stop) # pylint: disable=no-member
d.addErrback(stop) # pylint: disable=no-member
reactor.run()
|
arozumenko/locust
|
tests/locust/tests_enable_network_adapters.py
|
Python
|
apache-2.0
| 8,175
| 0.000367
|
"""
Tests for locust api module
These tests requires locust installed
"""
#pylint: disable=W0403,C0103,too-many-public-methods
import unittest
from subprocess import call
from netifaces import interfaces
from locust.api import Agent
from time import time, sleep
STATUSES = {'success': 'success',
'error': 'error'}
MESSAGES = {'success': 'Network adapter is enabled',
'error': 'Network adapter is not enabled'}
HOSTNAME = 'google.com'
def is_network_enabled():
"""Ping a host to check if network is enabled """
cmd_ptrn = 'ping -c {packets} {hostname} '
cmd_ptrn = cmd_ptrn.format(packets=1, hostname=HOSTNAME)
result = not bool(call(cmd_ptrn, shell=True))
sleep(1)
return result
def wait_for_network_disabled(seconds=30):
"""Wait until network is disabled"""
then = time() + seconds
while then > time():
if not is_network_enabled():
return True
return False
def wait_for_network_enabled(seconds=30):
"""Wait until network is enabled"""
then = time() + seconds
while then > time():
if is_network_enabled():
return True
return False
def check_network_interface_is_up(interface_name):
"""Check if netiface is up using 'ip' console command"""
cmd_ptrn = "ip a|grep ': {interface}:.*state UP'"
cmd_ptrn = cmd_ptrn.format(interface=interface_name)
response = 0 == call(cmd_ptrn, shell=True)
return response
def get_active_adapter():
""" Returns first active adapter from the list of adapters"""
for adapter in interfaces():
if check_network_interface_is_up(adapter):
return adapter
def enable_network_adapters_if_disabled():
"""Enables all network adapters if disabled"""
if not wait_for_network_enabled():
Agent.enable_network_adapters()
wait_for_network_enabled()
def disable_adapters(adapters=None):
""" Disables specified adapter or list of adapters.
Disables all if no adapters provided.
Returns error message in case of error """
time_to_be_disabled = 0
result = Agent.disable_network_adapters(adapters, time_to_be_disabled)
if 0 == result['list'][0]['status'].find('error'):
return 'Error while disabling adapters. Result: {}'.format(result)
if not wait_for_network_disabled():
return 'Error while disabling adapters. Network is still enabled.' \
' Result: {}'.format(result)
if wait_for_network_enabled():
return 'Error while disabling adapters. Network was enabled. ' \
'But it should stay disabled. Result: {}'.format(result)
class EnableNetworkAdaptersApi(unittest.TestCase):
"""Implements unit tests
for enable_network_adapters method of bagent.api."""
wrong_status = 'Expected status: {expected}. Current status: {actual}'
wrong_message = 'Expected message: {expected}. Current message: {actual}'
def test_enable_one_network_adapter(self):
"""Enables an active adapter, that was disabled previously"""
adap
|
ter = get_active_adapter()
disable_adapters_error = disable_adapters(adapter)
self.assertFalse(disable_adapters_error, msg=disable_adapters_error)
self.assertTrue(wait_for_network_disabled(),
'Initially Network is enabled.')
result = Agent.enable_network_adapters(adapter)
self.a
|
ssertEqual(type(result), dict, 'Returned result should be dict')
status_from_result = result['list'][0]['status']
message_from_result = result['list'][0]['message']
self.assertEqual(status_from_result, STATUSES['success'],
self.wrong_status.format(
expected=STATUSES['success'],
actual=status_from_result))
self.assertEqual(message_from_result, MESSAGES['success'],
self.wrong_message.format(
expected=MESSAGES['success'],
actual=message_from_result))
self.assertTrue(wait_for_network_enabled(),
'Network was not enabled')
def test_enable_all_network_adapters_empty_list(self):
"""Enables all adapters, that was disabled previously
List of adapters is empty """
adapter = None
disable_adapters_error = disable_adapters(adapter)
self.assertFalse(disable_adapters_error, msg=disable_adapters_error)
self.assertTrue(wait_for_network_disabled(),
'Initially Network is enabled.')
result = Agent.enable_network_adapters(adapter)
self.assertEqual(type(result), dict, 'Returned result should be dict')
status_from_result = result['list'][0]['status']
message_from_result = result['list'][0]['message']
self.assertEqual(status_from_result, STATUSES['success'],
self.wrong_status.format(
expected=STATUSES['success'],
actual=status_from_result))
self.assertEqual(message_from_result, MESSAGES['success'],
self.wrong_message.format(
expected=MESSAGES['success'],
actual=message_from_result))
self.assertTrue(wait_for_network_enabled(),
'Network was not enabled')
def test_enable_all_network_adapters(self):
"""Enables all adapters, that was disabled previously"""
adapter = interfaces()
disable_adapters_error = disable_adapters(adapter)
self.assertFalse(disable_adapters_error, msg=disable_adapters_error)
self.assertTrue(wait_for_network_disabled(),
'Initially Network is enabled.')
result = Agent.enable_network_adapters(adapter)
self.assertEqual(type(result), dict, 'Returned result should be dict')
status_from_result = result['list'][0]['status']
message_from_result = result['list'][0]['message']
self.assertEqual(status_from_result, STATUSES['success'],
self.wrong_status.format(
expected=STATUSES['success'],
actual=status_from_result))
self.assertEqual(message_from_result, MESSAGES['success'],
self.wrong_message.format(
expected=MESSAGES['success'],
actual=message_from_result))
self.assertTrue(wait_for_network_enabled(),
'Network was not enabled')
def test_enable_non_existing_network_adapters(self):
""" Trying to use adapter name that does not exist"""
adapter = 'this_adapter_does_not_exist'
disable_adapters_error = disable_adapters()
self.assertFalse(disable_adapters_error, msg=disable_adapters_error)
self.assertTrue(wait_for_network_disabled(),
'Initially Network is enabled.')
result = Agent.enable_network_adapters(adapter)
self.assertEqual(type(result), dict, 'Returned result should be dict')
status_from_result = result['list'][0]['status']
message_from_result = result['list'][0]['message']
self.assertEqual(status_from_result, STATUSES['error'],
self.wrong_status.format(
expected=STATUSES['error'],
actual=status_from_result))
self.assertEqual(message_from_result, MESSAGES['error'],
self.wrong_message.format(
expected=MESSAGES['error'],
actual=message_from_result))
self.assertFalse(wait_for_network_enabled(),
'Network was enabled. But it should stay disabled.')
enable_network_adapters_if_disabled()
def setUp(self):
enable_network_adapters_if_disabled()
@classmethod
def tearDownClass(cls):
enable_network_adapters_if_disabled()
def main():
"""method for invoking unit tests."""
unittest.main(verbosity=3)
if __name__ == '__main__':
main()
|
kaiocesar/simplemooc
|
simplemooc/core/views.py
|
Python
|
mit
| 192
| 0.026042
|
from
|
django.shortcuts import render
from django.http import HttpResponse
def home(request):
return render(request, 'home.html')
def contact(request):
return render(request, 'contact.h
|
tml')
|
metabrainz/picard
|
picard/pluginmanager.py
|
Python
|
gpl-2.0
| 17,081
| 0.000995
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2007 Lukáš Lalinský
# Copyright (C) 2014 Shadab Zafar
# Copyright (C) 2015-2021 Laurent Monin
# Copyright (C) 2019 Wieland Hoffmann
# Copyright (C) 2019-2020 Philipp Wolfer
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from functools import partial
import imp
import importlib
import json
import os.path
import shutil
import tempfile
import zipfile
import zipimport
from PyQt5 import QtCore
from picard import log
from picard.const import (
PLUGINS_API,
USER_PLUGIN_DIR,
)
from picard.plugin import (
_PLUGIN_MODULE_PREFIX,
PluginData,
PluginWrapper,
_unregister_module_extensions,
)
import picard.plugins
from picard.version import (
Version,
VersionError,
)
_SUFFIXES = tuple(importlib.machinery.all_suffixes())
_PACKAGE_ENTRIES = ("__init__.py", "__init__.pyc", "__init__.pyo")
_PLUGIN_PACKAGE_SUFFIX = ".picard"
_PLUGIN_PACKAGE_SUFFIX_LEN = len(_PLUGIN_PACKAGE_SUFFIX)
_FILEEXTS = ('.py', '.pyc', '.pyo', '.zip')
_UPDATE_SUFFIX = '.update'
_UPDATE_SUFFIX_LEN = len(_UPDATE_SUFFIX)
_extension_points = []
def is_update(path):
return path.endswith(_UPDATE_SUFFIX)
def strip_update_suffix(path):
if not is_update(path):
return path
return path[:-_UPDATE_SUFFIX_LEN]
def is_zip(path):
return path.endswith('.zip')
def strip_zip_suffix(path):
if not is_zip(path):
return path
return path[:-4]
def is_package(path):
return path.endswith(_PLUGIN_PACKAGE_SUFFIX)
def strip_package_suffix(path):
if not is_package(path):
return path
return path[:-_PLUGIN_PACKAGE_SUFFIX_LEN]
def is_zipped_package(path):
return path.endswith(_PLUGIN_PACKAGE_SUFFIX + '.zip')
def _plugin_name_from_path(path):
path = os.path.normpath(path)
if is_zip(path):
name = os.path.basename(strip_zip_suffix(path))
if is_package(name):
return strip_package_suffix(name)
else:
return name
elif os.path.isdir(path):
for entry in _PACKAGE_ENTRIES:
if os.path.isfile(os.path.join(path, entry)):
return os.path.basename(path)
else:
file = os.path.basename(path)
if file in _PACKAGE_ENTRIES:
return None
name, ext = os.path.splitext(file)
if ext in _SUFFIXES:
return name
return None
def load_manifest(archive_path):
archive = zipfile.ZipFile(archive_path)
manifest_data = None
with archive.open('MANIFEST.json') as f:
manifest_data = json.loads(str(f.read().decode()))
return manifest_data
def zip_import(path):
if (not is_zip(path) or not os.path.isfile(path)):
return (None, None, None)
try:
zip_importer = zipimport.zipimporter(path)
plugin_name = _plugin_name_from_path(path)
manifest_data = None
if is_zipped_package(path):
try:
manifest_data = load_manifest(path)
except Exception as why:
log.warning("Failed to load manifest data from json: %s", why)
return (zip_importer, plugin_name, manifest_data)
except zipimport.ZipImportError as why:
log.error("ZIP import error: %s", why)
return (None, None, None)
def _compatible_api_versions(api_versions):
versions = [Version.from_string(v) for v in list(api_versions)]
return set(versions) & set(picard.api_versions_tuple)
class PluginManager(QtCore.QObject):
plugin_installed = QtCore.pyqtSignal(PluginWrapper, bool)
plugin_updated = QtCore.pyqtSignal(str, bool)
plugin_removed = QtCore.pyqtSignal(str, bool)
plugin_errored = QtCore.pyqtSignal(str, str, bool)
def __init__(self, plugins_directory=None):
super().__init__()
self.plugins = []
self._available_plugins = None # None=never loaded, [] = empty
if plugins_directory is None:
plugins_directory = USER_PLUGIN_DIR
self.plugins_directory = os.path.normpath(plugins_directory)
@property
def available_plugins(self):
return self._available_plugins
def plugin_error(self, name, error, *args, **kwargs):
"""Log a plugin loading error for the plugin `name` and signal the
error via the `plugin_errored` signal.
A string consisting of all `args` interpolated into `error` will be
passed to the function given via the `log_func` keyword argument
(default: log.error) and as the error message to the `plugin_errored`
signal."""
error = error % args
log_func = kwargs.get('log_func', log.error)
log_func(error)
self.plugin_errored.emit(name, error, False)
def _marked_for_update(self):
for file in os.listdir(self.plugins_directory):
if file.endswith(_UPDATE_SUFFIX):
source_path = os.path.join(self.plugins_directory, file)
target_path = strip_update_suffix(source_path)
plugin_name = _plugin_name_from_path(target_path)
if plugin_name:
yield (source_path, target_path, plugin_name)
else:
log.error('Cannot get plugin name from %r', source_path)
def handle_plugin_updates(self):
for source_path, target_path, plugin_name in self._marked_for_update():
self._remove_plugin(plugin_name)
os.rename(source_path, target_path)
log.debug('Updating plugin %r (%r))', plugin_name, target_path)
def load_plugins_from_directory(self, plugindir):
plugindir = os.path.normpath(plugindir)
if not os.path.isdir(plugindir):
log.info("Plugin directory %r doesn't exist", plugindir)
return
if plugindir == self.plugins_directory:
# .update trick is only for plugins installed through the Picard UI
# and only for plugins in plugins_directory (USER_PLUGIN_DIR by default)
self.handle_plugin_updates()
|
# now load found plugins
names = set()
for path in [os.path.join(plugindir, file) for file in os.listdir(plugindir)]:
name = _plugin_name_from_path(path)
if name:
names.add(name)
log.debug("Looking for plugins in directory %r, %d names found",
|
plugindir,
len(names))
for name in sorted(names):
try:
self._load_plugin_from_directory(name, plugindir)
except Exception:
self.plugin_error(name, _("Unable to load plugin '%s'"), name, log_func=log.exception)
def _get_plugin_index_by_name(self, name):
for index, plugin in enumerate(self.plugins):
if name == plugin.module_name:
return (plugin, index)
return (None, None)
def _load_plugin_from_directory(self, name, plugindir):
module_file = None
zipfilename = os.path.join(plugindir, name + '.zip')
(zip_importer, module_name, manifest_data) = zip_import(zipfilename)
if zip_importer:
name = module_name
if not zip_importer.find_module(name):
error = _("Failed loading zipped plugin %r from %r")
self.plugin_error(name, error, name, zipfilename)
return None
module_pathname = zip_importer.get_filename(name)
else:
try:
info = imp.find_module(
|
tomhenderson/ns-3-dev-git
|
src/olsr/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 542,189
| 0.014563
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.olsr', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector [class]
module.add_class('EventGarbageCollector', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## int-to-type.h (module 'core'): ns3::IntToType<0> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['0'])
## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<1> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['1'])
## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<2> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['2'])
## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<3> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['3'])
## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<4> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['4'])
## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<5> [struct]
module.add_class('IntToTy
|
pe', import_from_module='ns.core', template_parameters=['5'])
## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<6> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['6'])
## int-to-type.h (module 'core'):
|
ns
|
gusai-francelabs/datafari
|
windows/python/Lib/warnings.py
|
Python
|
apache-2.0
| 14,748
| 0.001492
|
"""Python part of the warnings subsystem."""
# Note: function level imports should *not* be used
# in this module as it may cause import lock deadlock.
# See bug 683658.
import linecache
import sys
import types
__all__ = ["warn", "warn_explicit", "showwarning",
"formatwarning", "filterwarnings", "simplefilter",
"resetwarnings", "catch_warnings"]
def warnpy3k(message, category=None, stacklevel=1):
"""Issue a deprecation warning for Python 3.x related changes.
Warnings are omitted unless Python is started with the -3 option.
"""
if sys.py3kwarning:
if category is None:
category = DeprecationWarning
warn(message, category, stacklevel+1)
def _show_warning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
if file is None:
# sys.stderr is None - warnings get lost
return
try:
file.write(formatwarning(message, category, filename, lineno, line))
except (IOError, UnicodeError):
pass # the file (probably stderr) is invalid - this warning gets lost.
# Keep a working version around in case the deprecation of the old API is
# triggered.
showwarning = _show_warning
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
try:
unicodetype = unicode
except NameError:
unicodetype = ()
try:
message = str(message)
except UnicodeEncodeError:
pass
s = "%s: %s: %s\n" % (lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
if isinstance(s, unicodetype) and isinstance(line, str):
line = unicode(line, 'latin1')
s += " %s\n" % line
if isinstance(s, unicodetype) and isinstance(filename, str):
enc = sys.getfilesystemencoding()
if enc:
try:
filename = unicode(filename, enc)
except UnicodeDecodeError:
pass
s = "%s:%s" % (filename, s)
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=0):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, basestring), "message must be a string"
assert isinstance(category, (type, types.ClassType)), \
"category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, basestring), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def simplefilter(action, category=Warning, lineno=0, append=0):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError, msg:
print >>sys.stderr, "Invalid -W option ignored:", msg
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
retu
|
rn "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
|
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warni
|
collinstocks/eventlet
|
setup.py
|
Python
|
mit
| 1,289
| 0
|
#!/usr/bin/env python
from setuptools import find_packages, setup
from eventlet import __version__
from os import path
setup(
name='eventlet',
version=__version__,
description='Highly concurrent networking library',
author='Linden Lab',
author_email='eventletdev@lists.secondlife.com',
url='http://eventlet.net',
packages=find_packages(exclude=['benchmarks', 'tests', 'tests.*']),
install_requires=(
'greenlet >= 0.3',
),
zip_safe=F
|
alse,
long_description=open(
path.join(
path.dirname(__file__),
'README.rst'
)
|
).read(),
test_suite='nose.collector',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
"Development Status :: 4 - Beta",
]
)
|
openilabs/falconlab
|
env/lib/python2.7/site-packages/talons/helpers.py
|
Python
|
mit
| 1,777
| 0
|
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Jay Pipes
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sys
import traceback
LOG = logging.getLogger(__name__)
def import_function(import_str):
"""
Attempts to import the specified class method or regular function,
and returns a callable.
:raises ImportError if the specified import_str cannot be found.
:raises TypeError if the specified import_str is found but is
not a callable.
"""
mod_str, _sep, class_str = import_str.rpartition('.')
try:
__import__(mod_str)
fn = getattr(sys.modules[mod_str], class_str)
if not callable(fn):
msg = '{0} is not callable'
LOG.error(msg)
raise TypeError(msg)
except (ValueError, AttributeError):
msg = 'Method or function {0} cannot be found.'.format(import_str)
err_details = traceback.for
|
mat_exception(*sys.exc_info())
LOG.error(msg + ' Details: (%s)'.format(err_details))
raise ImportError(msg)
except ImportError:
msg = 'Module {0} cannot be found.'.format(import_str)
err_details = traceback.
|
format_exception(*sys.exc_info())
LOG.error(msg + ' Details: (%s)'.format(err_details))
raise
|
VisualComputingInstitute/towards-reid-tracking
|
lib/models/lunet2.py
|
Python
|
mit
| 1,896
| 0.007384
|
import DeepFried2 as df
from .. import dfext
def mknet(mkbn=lambda chan: df.BatchNormalization(chan, 0.95)):
kw = dict(mkbn=mkbn)
net = df.Sequential(
# -> 128x48
df.SpatialConvolutionCUDNN(3, 64, (7,7), border='same', bias=None),
dfext.resblock(64, **kw),
df.PoolingCUDNN((2,2)), # -> 64x24
dfext.resblock(64, **kw),
dfext.resblock(64, **kw),
dfext.resblock(64, 96, **kw),
df.PoolingCUDNN((2,2)), # -> 32x12
dfext.resblock(96, **kw),
dfext.resblock(96, **kw),
df.PoolingCUDNN((2,2)), # -> 16x6
dfext.resblock(96, **kw),
dfext.resblock(96, **kw),
dfext.resblock(96, 128, **kw),
df.PoolingCUDNN((2,2)), # -> 8x3
dfext.resblock(128, **kw),
dfext.resblock(128, **kw),
df.PoolingCUDNN((2,3)), # -> 4x1
dfext.resblock(128, **kw),
# Eq. to flatten + linear
df.SpatialConvolutionCUDNN(128, 256, (4,1), bia
|
s=None),
mkbn(256), df.ReLU(),
df.StoreOut(df.SpatialConvolutionCUDNN(256, 128, (1,1)))
)
net.emb_mod = net[-1]
net.in_sh
|
ape = (128, 48)
net.scale_factor = (2*2*2*2*2, 2*2*2*2*3)
print("Net has {:.2f}M params".format(df.utils.count_params(net)/1000/1000), flush=True)
return net
def add_piou(lunet2):
newnet = lunet2[:-1]
newnet.emb_mod = lunet2[-1]
newnet.iou_mod = df.StoreOut(df.Sequential(df.SpatialConvolutionCUDNN(256, 1, (1,1)), df.Sigmoid()))
newnet.add(df.RepeatInput(newnet.emb_mod, newnet.iou_mod))
newnet.embs_from_out = lambda out: out[0]
newnet.ious_from_out = lambda out: out[1][:,0] # Also remove the first size-1 dimension.
newnet.in_shape = lunet2.in_shape
newnet.scale_factor = lunet2.scale_factor
print("Added {:.2f}k params".format(df.utils.count_params(newnet.iou_mod)/1000), flush=True)
return newnet
|
bennymartinson/Oort
|
oort/__init__.py
|
Python
|
gpl-3.0
| 335
| 0.008955
|
from utilities import docs, ftom, mtof, scale_val
from dynamic_value import *
f
|
rom behaviors import *
|
from schedule import now, wait, sprout
from rtcmix_import.commands import *
from instruments import *
import busses
#__all__ = ["rtcmix_import", "utilities", "abstract", "dynamic_value", "instruments", "behaviors", "schedule"]
|
harvardinformatics/jobTree
|
batchSystems/lsf.py
|
Python
|
mit
| 12,569
| 0.010661
|
#!/usr/bin/env python
#Copyright (C) 2013 by Thomas Keane (tk2@sanger.ac.uk)
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
import os
import re
import subprocess
import time
import sys
import random
import string
from Queue import Queue, Empty
from threading import Thread
from datetime import date
from sonLib.bioio import logger
from sonLib.bioio import system
from jobTree.batchSystems.abstractBatchSystem import AbstractBatchSystem
from jobTree.src.master import getParasolResultsFileName
class MemoryString:
def __init__(self, string):
if string[-1] == 'K' or string[-1] == 'M' or string[-1] == 'G':
self.unit = string[-1]
self.val = float(string[:-1])
else:
self.unit = 'B'
self.val = float(string)
self.bytes = self.byteVal()
def __str__(self):
if self.unit != 'B':
return str(self.val) + self.unit
else:
return str(self.val)
def byteVal(self):
if self.unit == 'B':
return self.val
elif self.unit == 'K':
return self.val * 1000
elif self.unit == 'M':
return self.val * 1000000
elif self.unit == 'G':
return self.val * 1000000000
def __cmp__(self, other):
return cmp(self.bytes, other.bytes)
def prepareBsub(cpu, mem):
mem = '' if mem is None else '-R "select[type==X86_64 && mem > ' + str(int(mem/ 1000000)) + '] rusage[mem=' + str(int(mem/ 1000000)) + ']" -M' + str(int(mem/ 1000000)) + '000'
cpu = '' if cpu is None else '-n ' + str(int(cpu))
bsubline = ["bsub", mem, cpu,"-cwd", ".", "-o", "/dev/null", "-e", "/dev/null"]
return bsubline
def bsub(bsubline):
process = subprocess.Popen(" ".join(bsubline), shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
liney = process.stdout.readline()
logger.info("BSUB: " + liney)
result = int(liney.strip().split()[1].strip('<>'))
logger.debug("Got the job id: %s" % (str(result)))
return result
def getjobexitcode(lsfJobID):
job, task = lsfJobID
#first try bjobs to find out job state
args = ["bjobs", "-l", str(job)]
logger.info("Checking job exit code for job via bjobs: " + str(job))
process = subprocess.Popen(" ".join(args), shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
started = 0
for line in process.stdout:
if line.find("Done successfully") > -1:
logger.info("bjobs detected job completed for job: " + str(job))
return 0
elif line.find("Completed <exit>") > -1:
logger.info("bjobs detected job failed for job: " + str(job))
return 1
elif line.find("New job is waiting for scheduling") > -1:
logger.info("bjobs detected job pending scheduling for job: " + str(job))
return None
elif line.find("PENDING REASONS") > -1:
logger.info("bjobs detected job pending for job: " + str(job))
return None
elif line.find("Started on ") > -1:
started = 1
if started == 1:
logger.info("bjobs detected job started but not completed: " + str(job))
return None
#if not found in bjobs, then try bacct (slower than bjobs)
logger.info("bjobs failed to detect job - trying bacct: " + str(job))
args = ["bacct", "-l", str(job)]
logger.info("Checking job exit code for job via bacct:" + str(job))
process = subprocess.Popen(" ".join(args), shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
for line in process.stdout:
if line.find("Completed <done>") > -1:
logger.info("Detected job completed for job: " + str(job))
return 0
elif line.find("Completed <exit>") > -1:
logger.info("Detected job failed for job: " + str(job))
return 1
logger.info("Cant determine exit code for job or job still running: " + str(job))
return None
class Worker(Thread):
def __init__(self, newJobsQueue, updatedJobsQueue, boss):
Thread.__init__(self)
self.newJobsQueue = newJobsQueue
self.updatedJobsQueue = updatedJobsQueue
self.currentjobs = list()
self.runningjobs = set()
self.boss = boss
def run(self):
while True:
# Load new job ids:
while not self.newJobsQueue.empty():
self.currentjobs.append(self.newJobsQueue.get())
# Launch jobs as necessary:
while len(self.currentjobs) > 0:
jobID, bsubline = self.currentjobs.pop()
lsfJobID = bsub(bsubline)
self.boss.jobIDs[(lsfJobID, None)] = jobID
self.boss.lsfJobIDs[jobID] = (lsfJobID, None)
self.runningjobs.add((lsfJobID, None))
# Test known job list
for lsfJobID in list(self.runningjobs):
exit = getjobexitcode(lsfJobID)
if exit is not None:
|
self.updatedJobsQueue.put((lsfJobID, exit))
|
self.runningjobs.remove(lsfJobID)
time.sleep(10)
class LSFBatchSystem(AbstractBatchSystem):
"""The interface for running jobs on lsf, runs all the jobs you
give it as they come in, but in parallel.
"""
@classmethod
def getDisplayNames(cls):
"""
Names used to select this batch system.
"""
return ["lsf","LSF"]
def __init__(self, config, maxCpus, maxMemory):
AbstractBatchSystem.__init__(self, config, maxCpus, maxMemory) #Call the parent constructor
self.lsfResultsFile = getParasolResultsFileName(config.attrib["job_tree"])
#Reset the job queue and results (initially, we do this again once we've killed the jobs)
self.lsfResultsFileHandle = open(self.lsfResultsFile, 'w')
self.lsfResultsFileHandle.close() #We lose any previous state in this file, and ensure the files existence
self.currentjobs = set()
self.obtainSystemConstants()
self.jobIDs = dict()
self.lsfJobIDs = dict()
self.nextJobID = 0
self.newJobsQueue = Queue()
self.updatedJobsQueue = Queue()
self.worker = Worker(self.newJobsQueue, self.updatedJobsQueue, self)
self.worker.setDaemon(True)
self.worker.start()
def __des__(self):
#Closes the file handle associated with the results file.
self.lsfResultsFileHandle.close() #Close the results file, cos were done.
def issueJob(self, command, memory, cpu):
jobID = self.nextJobID
self.nextJobID += 1
self.currentjobs.add(jobID)
bsubline = prepareBsub(cpu, memory) + [command]
self.newJobsQueue.put((jobID, bsubline))
logger.info("Issued the job command: %s with job id: %s " % (command, str(jobID)))
return jobID
def getLsfID(self, jobID):
if not jobID in self.lsfJobIDs:
RuntimeError("Unknown jobI
|
GHubgenius/clusterd
|
src/platform/weblogic/fingerprints/WL10s.py
|
Python
|
mit
| 510
| 0.001961
|
from src.platform.weblogic.interfaces impor
|
t WINTERFACES, WLConsole
class FPrint(WLConsole):
""" WebLogic 10 is bugged when using Oracle's custom implementation of SSL.
Only if the default Java implementation is set will this work; otherwise,
Oracle sends an SSL23_GET_SERVER_HELLO and breaks OpenSSL.
"""
def __init__(self):
super(FPrint, self).__init__()
self.version = "10"
self.title = WINTERFACES.WLS
self.port = 9002
self.ssl = True
| |
rdhyee/waterbutler
|
tests/providers/onedrive/test_path.py
|
Python
|
apache-2.0
| 5,141
| 0.003307
|
import pytest
from waterbutler.providers.onedrive.path import OneDrivePath
from tests.providers.onedrive.fixtures import (path_fixtures,
root_provider_fixtures,
subfolder_provider_fixtures)
class TestApiIdentifier:
def test_api_identifier_none(self):
path = OneDrivePath('/foo', _ids=('root', None,))
assert path.api_identifier is None
def test_api_identifier_root(self):
path = OneDrivePath('/', _ids=('root',))
assert path.api_identifier == ('root',)
def test_api_identifier_folder_id(self):
path = OneDrivePath('/', _ids=('123456',))
assert path.api_identifier == ('items', '123456',)
def test_api_identifier_file_id(self):
path = OneDrivePath('/foo', _ids=('123456','7891011',))
assert path.api_identifier == ('items', '7891011',)
class TestNewFromResponseRootProvider:
def test_file_in_root(self, root_provider_fixtures):
od_path = OneDrivePath.new_from_response(root_provider_fixtures['file_metadata'], 'root')
assert od_path.identifier == root_provider_fixtures['file_id']
assert str(od_path) == '/toes.txt'
assert len(od_path.parts) == 2
ids = [x.identifier for x in od_path.parts]
assert ids ==
|
['root', root_provider_fixtures['file_id']]
def test_f
|
older_in_root(self, root_provider_fixtures):
od_path = OneDrivePath.new_from_response(root_provider_fixtures['folder_metadata'], 'root')
assert od_path.identifier == root_provider_fixtures['folder_id']
assert str(od_path) == '/teeth/'
assert len(od_path.parts) == 2
ids = [x.identifier for x in od_path.parts]
assert ids == ['root', root_provider_fixtures['folder_id']]
def test_file_in_subdir(self, root_provider_fixtures):
od_path = OneDrivePath.new_from_response(root_provider_fixtures['subfile_metadata'], 'root')
assert od_path.identifier == root_provider_fixtures['subfile_id']
assert str(od_path) == '/teeth/bicuspid.txt'
assert len(od_path.parts) == 3
ids = [x.identifier for x in od_path.parts]
assert ids == ['root',
root_provider_fixtures['folder_id'],
root_provider_fixtures['subfile_id']]
def test_fails_without_base_folder(self, root_provider_fixtures):
with pytest.raises(Exception):
od_path = OneDrivePath.new_from_response(root_provider_fixtures['file_metadata'])
def test_insert_zero_ids(self, path_fixtures):
file_metadata = path_fixtures['deeply_nested_file_metadata']
od_path = OneDrivePath.new_from_response(file_metadata, 'root')
file_id = path_fixtures['deeply_nested_file_id']
assert od_path.identifier == file_id
assert str(od_path) == '/deep/deeper/deepest/positively abyssyal/the kraken.txt'
assert len(od_path.parts) == 6
ids = [x.identifier for x in od_path.parts]
assert ids == ['root', None, None, None, 'F4D50E400DFE7D4E!298', file_id]
class TestNewFromResponseSubfolderProvider:
def test_file_in_root(self, subfolder_provider_fixtures):
od_path = OneDrivePath.new_from_response(subfolder_provider_fixtures['file_metadata'],
subfolder_provider_fixtures['root_id'])
assert od_path.identifier == subfolder_provider_fixtures['file_id']
assert str(od_path) == '/bicuspid.txt'
assert len(od_path.parts) == 2
ids = [x.identifier for x in od_path.parts]
assert ids == [subfolder_provider_fixtures['root_id'],
subfolder_provider_fixtures['file_id']]
def test_subfolder_base_is_folder(self, subfolder_provider_fixtures):
od_path = OneDrivePath.new_from_response(subfolder_provider_fixtures['folder_metadata'],
subfolder_provider_fixtures['root_id'])
assert od_path.identifier == subfolder_provider_fixtures['folder_id']
assert str(od_path) == '/crushers/'
assert len(od_path.parts) == 2
ids = [x.identifier for x in od_path.parts]
assert ids == [subfolder_provider_fixtures['root_id'],
subfolder_provider_fixtures['folder_id']]
def test_file_in_subdir(self, subfolder_provider_fixtures):
od_path = OneDrivePath.new_from_response(subfolder_provider_fixtures['subfile_metadata'],
subfolder_provider_fixtures['root_id'],
base_folder_metadata=subfolder_provider_fixtures['root_metadata'])
assert od_path.identifier == subfolder_provider_fixtures['subfile_id']
assert str(od_path) == '/crushers/molars.txt'
assert len(od_path.parts) == 3
ids = [x.identifier for x in od_path.parts]
assert ids == [subfolder_provider_fixtures['root_id'],
subfolder_provider_fixtures['folder_id'],
subfolder_provider_fixtures['subfile_id']]
|
bramfoo/solarstats
|
tests/__init__.py
|
Python
|
gpl-2.0
| 82
| 0
|
import loggin
|
g
# Disable logging in unit t
|
ests
logging.disable(logging.CRITICAL)
|
hacpai/show-me-the-code
|
Python/0033/main.py
|
Python
|
gpl-2.0
| 540
| 0.038889
|
import math
def square_root ( a ):
"""Computes squar root of a
"""
espilon = 0.1e-11
x = a
while True:
y = ( x + a / x ) / 2.0
if abs( y - x ) < espilon:
return y
x = y
def test_square_root():
"""Compares custom square and math.sqrt.
"""
a = 1
|
.0
while a < 10.0:
print a, '{:<13}'.format( square_root( a ) ), \
'{:<1
|
3}'.format( math.sqrt( a ) ), \
abs( square_root( a ) - math.sqrt( a ) )
a += 1
test_square_root()
|
pydicom/sendit
|
sendit/apps/main/management/commands/export_metrics.py
|
Python
|
mit
| 1,497
| 0.014028
|
from sendit.logger import bot
from sendit.apps.main.models import Batch
from django.core.management.base import (
BaseCommand
)
from sendit.apps.main.models
|
import Batch
from sendit.apps.main.tasks import import_dicomdir
from sendit.apps.main.utils import ls_fullpath
from sendit.apps.api.utils import get_size
import sys
import os
import datetime
import pandas
class Command(BaseCommand):
help = '''export metrics about size and times to file'''
def handle(self,*args, **options):
df = pandas.DataFrame(columns=['batch_id','status','size_mb',
|
'start_time','finish_time',
'total_time_sec','total_time_min'])
output_file = 'sendit-process-time-%s.tsv' %datetime.datetime.today().strftime('%Y-%m-%d')
for batch in Batch.objects.all():
df.loc[batch.id,'batch_id'] = batch.id
df.loc[batch.id,'status'] = batch.status
if batch.status == "DONE":
df.loc[batch.id,'size_gb'] = get_size(batch)
df.loc[batch.id,'start_time'] = batch.qa['StartTime']
df.loc[batch.id,'finish_time'] = batch.qa['FinishTime']
time = batch.qa['FinishTime'] - batch.qa['StartTime']
df.loc[batch.id,'total_time_sec'] = time
df.loc[batch.id,'total_time_min'] = time/60.0
df.sort_values(by=['status'],inplace=True)
df.to_csv(output_file,sep='\t')
|
mattduan/proof
|
ProofResourceStrategy.py
|
Python
|
bsd-3-clause
| 946
| 0.004228
|
"""
ProofResourceStrategy is the real implementation of initializing the resource
data for ProofResource. There can be multiple strategies for one ProofResource.
Main considerations are configure file, database (may use a ProofInstance),
XML file.
This is the base class with all interfaces needed by ProofResource. Each different
strategies can futher extend from this class to include strategic specific
functions.
By separating the strategy process from
|
resource interfaces, we can make
ProofInstance work both as stand-alone and shared persistent data layer.
It also make testing easier by creating some dummy strategy implementations.
"""
__version__='$Revision: 117 $'[11:-2]
__author__ = "Duan Guoqiang (mattgduan@gmail.com)"
import logging
class ProofResourceStrategy:
def __init__(self, logger=None):
pass
def __parseXMLDBMap(self, schema, xml):
pass
|
def getDatabaseMap(self, schema):
pass
|
sputnick-dev/weboob
|
modules/lutim/browser.py
|
Python
|
agpl-3.0
| 1,959
| 0.001021
|
# -*- coding: utf-8 -*-
# Copyright(C) 2015 Vincent A
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero
|
General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANT
|
Y; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import math
from urlparse import urljoin
from StringIO import StringIO
from weboob.browser import PagesBrowser, URL
from .pages import ImagePage, UploadPage
class LutimBrowser(PagesBrowser):
BASEURL = 'https://lut.im'
VERIFY = False # XXX SNI is not supported
image_page = URL('/(?P<id>.+)', ImagePage)
upload_page = URL('/', UploadPage)
def __init__(self, base_url, *args, **kw):
PagesBrowser.__init__(self, *args, **kw)
self.base_url = self.BASEURL = base_url
def fetch(self, paste):
self.location(paste.id)
assert self.image_page.is_here()
paste.contents = unicode(self.page.contents.encode('base64'))
paste.title = self.page.filename
def post(self, paste, max_age=0):
bin = paste.contents.decode('base64')
name = paste.title or 'file' # filename is mandatory
filefield = {'file': (name, StringIO(bin))}
params = {'format': 'json'}
if max_age:
params['delete-day'] = math.ceil(max_age / 86400.)
self.location('/', data=params, files=filefield)
assert self.upload_page.is_here()
info = self.page.fetch_info()
paste.id = urljoin(self.base_url, info['short'])
|
jentjr/enviropy
|
enviropy/io/file.py
|
Python
|
mit
| 123
| 0.01626
|
import
|
pandas as pd
from enviropy import Enviropy
def read_csv(fname):
df = p
|
d.read_csv(fname)
return Enviropy(df)
|
pythonvlc/PyConES-2015
|
pycones/blog/admin.py
|
Python
|
mit
| 336
| 0.002976
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import ModelForm, TextInput
fr
|
om django.contrib import admin
from blog.models import Post
class PostAdmin(admin.ModelAdmin):
list_display = ['id', 'title', 'created', 'status']
list_filter = ('status', )
ad
|
min.site.register(Post, PostAdmin)
|
yosefk/heapprof
|
ok.py
|
Python
|
bsd-2-clause
| 823
| 0.010936
|
#!/usr/bin/python
'''test that the build and run results are OK'''
# threads.heapprof sh
|
ould have 2 call stacks allocating 1024 blocks each
import commands, sys
print 'testing that *.heapprof files contain the expected results...'
assert commands.getoutput("grep ' 1024 ' threads.heapprof | wc -l").strip() == '2'
second = open('second.heapprof').read()
if 'no heap blocks foun
|
d' in second:
print "threads.heapprof is OK but second.heapprof is not - perhaps gdb's gcore command doesn't work? Is it gdb 7.2 and up?"
print "anyway, this test failed but presumably heapprof itself works correctly."
sys.exit()
assert '1048576 [1048576]' in second
assert '1048576 [131073, 131073, 131071, 131073, 131071, 131073, 131071, 131071]' in second
assert 'example_func' in second
assert 'another_func' in second
print 'ok.'
|
DevinDewitt/pyqt5
|
examples/quick/scenegraph/customgeometry/customgeometry.py
|
Python
|
gpl-3.0
| 5,911
| 0.005414
|
#!/usr/bin/env python
##################################
|
###########################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modificati
|
on, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Digia Plc and its Subsidiary(-ies) nor the names
## of its contributors may be used to endorse or promote products derived
## from this software without specific prior written permission.
##
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import pyqtProperty, pyqtSignal, QPointF, QUrl
from PyQt5.QtGui import QColor, QGuiApplication
from PyQt5.QtQml import qmlRegisterType
from PyQt5.QtQuick import (QQuickItem, QQuickView, QSGFlatColorMaterial,
QSGGeometry, QSGGeometryNode, QSGNode)
import customgeometry_rc
class BezierCurve(QQuickItem):
p1Changed = pyqtSignal(QPointF)
@pyqtProperty(QPointF, notify=p1Changed)
def p1(self):
return self._p1
@p1.setter
def p1(self, p):
if self._p1 != p:
self._p1 = QPointF(p)
self.p1Changed.emit(p)
self.update()
p2Changed = pyqtSignal(QPointF)
@pyqtProperty(QPointF, notify=p2Changed)
def p2(self):
return self._p2
@p2.setter
def p2(self, p):
if self._p2 != p:
self._p2 = QPointF(p)
self.p2Changed.emit(p)
self.update()
p3Changed = pyqtSignal(QPointF)
@pyqtProperty(QPointF, notify=p3Changed)
def p3(self):
return self._p3
@p3.setter
def p3(self, p):
if self._p3 != p:
self._p3 = QPointF(p)
self.p3Changed.emit(p)
self.update()
p4Changed = pyqtSignal(QPointF)
@pyqtProperty(QPointF, notify=p4Changed)
def p4(self):
return self._p4
@p4.setter
def p4(self, p):
if self._p4 != p:
self._p4 = QPointF(p)
self.p4Changed.emit(p)
self.update()
segmentCountChanged = pyqtSignal(int)
@pyqtProperty(int, notify=segmentCountChanged)
def segmentCount(self):
return self._segmentCount
@segmentCount.setter
def segmentCount(self, count):
if self._segmentCount != count:
self._segmentCount = count
self.segmentCountChanged.emit(count)
self.update()
def __init__(self, parent=None):
super(BezierCurve, self).__init__(parent)
self._p1 = QPointF(0, 0)
self._p2 = QPointF(1, 0)
self._p3 = QPointF(0, 1)
self._p4 = QPointF(1, 1)
self._segmentCount = 32
self._root_node = None
self.setFlag(QQuickItem.ItemHasContents, True)
def updatePaintNode(self, oldNode, nodeData):
if self._root_node is None:
self._root_node = QSGGeometryNode()
geometry = QSGGeometry(QSGGeometry.defaultAttributes_Point2D(),
self._segmentCount)
geometry.setLineWidth(2)
geometry.setDrawingMode(QSGGeometry.GL_LINE_STRIP)
self._root_node.setGeometry(geometry)
self._root_node.setFlag(QSGNode.OwnsGeometry)
material = QSGFlatColorMaterial()
material.setColor(QColor(255, 0, 0))
self._root_node.setMaterial(material)
self._root_node.setFlag(QSGNode.OwnsMaterial)
else:
geometry = self._root_node.geometry()
geometry.allocate(self._segmentCount)
w = self.width()
h = self.height()
vertices = geometry.vertexDataAsPoint2D()
for i in range(self._segmentCount):
t = i / float(self._segmentCount - 1)
invt = 1 - t
pos = invt * invt * invt * self._p1 \
+ 3 * invt * invt * t * self._p2 \
+ 3 * invt * t * t * self._p3 \
+ t * t * t * self._p4
vertices[i].set(pos.x() * w, pos.y() * h)
self._root_node.markDirty(QSGNode.DirtyGeometry)
return self._root_node
if __name__ == '__main__':
import sys
app = QGuiApplication(sys.argv)
qmlRegisterType(BezierCurve, "CustomGeometry", 1, 0, "BezierCurve")
view = QQuickView()
format = view.format()
format.setSamples(16)
view.setFormat(format)
view.setSource(QUrl('qrc:///scenegraph/customgeometry/main.qml'))
view.show()
sys.exit(app.exec_())
|
achamely/omniwallet
|
api/debug.py
|
Python
|
agpl-3.0
| 530
| 0.032075
|
import os, sys, commands
def print_debug( msg, verbose ):
data_dir_root = os.environ.get('DATADIR')
de
|
bug_level = int(os.environ.get('DEBUGLEVEL'))
#print the message to debug log if debug variable is set
#add 'from debug import *' to header
# call with print_debug("my message",5)
# outputs to Datadir/debug.log if the number above is > than the number in Datadir/debug.level
if int(verbose) < debug_level:
command
|
s.getoutput('echo '+msg+' >> '+data_dir_root+'/debug.log')
return 1
return 0
|
dsweet04/rekall
|
rekall-core/rekall/plugins/renderers/xls.py
|
Python
|
gpl-2.0
| 9,365
| 0.000427
|
# Rekall Memory Forensics
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Authors:
# Michael Cohen <scudette@google.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""This file implements an xls renderer based on the openpyxl project.
We produce xls (Excel spreadsheet files) with the output from Rekall plugins.
"""
import time
import openpyxl
from openpyxl import styles
from openpyxl.styles import colors
from openpyxl.styles import fills
from rekall import utils
from rekall.ui import renderer
from rekall.ui import text
# pylint: disable=unexpected-keyword-arg,no-value-for-parameter
# pylint: disable=redefined-outer-name
HEADER_STYLE = styles.Style(font=styles.Font(bold=True))
SECTION_STYLE = styles.Style(
fill=styles.PatternFill(
fill_type=fills.FILL_SOLID, start_color=styles.Color(colors.RED)))
FORMAT_STYLE = styles.Style(
alignment=styles.Alignment(vertical="top", wrap_text=False))
class XLSObjectRenderer(renderer.ObjectRenderer):
"""By default the XLS renderer delegates to the text renderer."""
renders_type = "object"
renderers = ["XLSRenderer"]
STYLE = None
def _GetDelegateObjectRenderer(self, item):
return self.ForTarget(item, "TextRenderer")(
|
session=self.session, renderer=self.renderer.delegate_text_renderer)
def RenderHeader(self, worksheet, column):
cell = worksheet.cell(
row=worksheet.current_row, column=worksheet.current_column)
cell.value = column.name
cell.style = HEADER_STYLE
|
# Advance the pointer by 1 cell.
worksheet.current_column += 1
def RenderCell(self, value, worksheet, **options):
# By default just render a single value into the current cell.
cell = worksheet.cell(
row=worksheet.current_row, column=worksheet.current_column)
cell.value = self.GetData(value, **options)
if self.STYLE:
cell.style = self.STYLE
# Advance the pointer by 1 cell.
worksheet.current_column += 1
def GetData(self, value, **options):
if isinstance(value, (int, float, long)):
return value
return unicode(self._GetDelegateObjectRenderer(value).render_row(
value, **options))
class XLSColumn(text.TextColumn):
def __init__(self, type=None, table=None, renderer=None, session=None,
**options):
super(XLSColumn, self).__init__(table=table, renderer=renderer,
session=session, **options)
if type:
self.object_renderer = self.renderer.get_object_renderer(
type=type, target_renderer="XLSRenderer", **options)
class XLSTable(text.TextTable):
column_class = XLSColumn
def render_header(self):
current_ws = self.renderer.current_ws
for column in self.columns:
if column.object_renderer:
object_renderer = column.object_renderer
else:
object_renderer = XLSObjectRenderer(
session=self.session, renderer=self.renderer)
object_renderer.RenderHeader(self.renderer.current_ws, column)
current_ws.current_row += 1
current_ws.current_column = 1
def render_row(self, row=None, highlight=None, **options):
merged_opts = self.options.copy()
merged_opts.update(options)
# Get each column to write its own header.
current_ws = self.renderer.current_ws
for item in row:
# Get the object renderer for the item.
object_renderer = self.renderer.get_object_renderer(
target=item, type=merged_opts.get("type"), **merged_opts)
object_renderer.RenderCell(item, current_ws, **options)
current_ws.current_row += 1
current_ws.current_column = 1
class XLSRenderer(renderer.BaseRenderer):
"""A Renderer for xls files."""
name = "xls"
table_class = XLSTable
tablesep = ""
def __init__(self, output=None, **kwargs):
super(XLSRenderer, self).__init__(**kwargs)
# Make a single delegate text renderer for reuse. Most of the time we
# will just replicate the output from the TextRenderer inside the
# spreadsheet cell.
self.delegate_text_renderer = text.TextRenderer(session=self.session)
self.output = output or self.session.GetParameter("output")
# If no output filename was give, just make a name based on the time
# stamp.
if self.output == None:
self.output = "%s.xls" % time.ctime()
try:
self.wb = openpyxl.load_workbook(self.output)
self.current_ws = self.wb.create_sheet()
except IOError:
self.wb = openpyxl.Workbook()
self.current_ws = self.wb.active
def start(self, plugin_name=None, kwargs=None):
super(XLSRenderer, self).start(plugin_name=plugin_name, kwargs=kwargs)
# Make a new worksheet for this run.
if self.current_ws is None:
self.current_ws = self.wb.create_sheet()
ws = self.current_ws
ws.title = plugin_name or ""
ws.current_row = 1
ws.current_column = 1
return self
def flush(self):
super(XLSRenderer, self).flush()
self.current_ws = None
# Write the spreadsheet to a file.
self.wb.save(self.output)
def section(self, name=None, **_):
ws = self.current_ws
for i in range(10):
cell = ws.cell(row=ws.current_row, column=i + 1)
if i == 0:
cell.value = name
cell.style = SECTION_STYLE
ws.current_row += 1
ws.current_column = 1
def format(self, formatstring, *data):
worksheet = self.current_ws
if "%" in formatstring:
data = formatstring % data
else:
data = formatstring.format(*data)
cell = worksheet.cell(
row=worksheet.current_row, column=worksheet.current_column)
cell.value = data
cell.style = FORMAT_STYLE
worksheet.current_column += 1
if "\n" in data:
worksheet.current_row += 1
worksheet.current_column = 1
def table_header(self, *args, **options):
super(XLSRenderer, self).table_header(*args, **options)
self.table.render_header()
# Following here are object specific renderers.
class XLSEProcessRenderer(XLSObjectRenderer):
"""Expands an EPROCESS into three columns (address, name and PID)."""
renders_type = "_EPROCESS"
def RenderHeader(self, worksheet, column):
for heading in ["_EPROCESS", "Name", "PID"]:
cell = worksheet.cell(
row=worksheet.current_row, column=worksheet.current_column)
cell.value = heading
cell.style = HEADER_STYLE
worksheet.current_column += 1
def RenderCell(self, item, worksheet, **options):
for value in ["%#x" % item.obj_offset, item.name, item.pid]:
object_renderer = self.ForTarget(value, self.renderer)(
session=self.session, renderer=self.renderer, **options)
object_renderer.RenderCell(value, worksheet, **options)
class XLSStringRenderer(XLSObjectRenderer):
renders_type = "String"
def GetData(self, item, **_):
return utils.SmartStr(item)
class XLSStructRenderer(XLSObjectRenderer):
"""Hex format struct's offsets."""
renders_type = "Struct"
d
|
diekhans/ga4gh-server
|
ga4gh/datamodel/references.py
|
Python
|
apache-2.0
| 15,873
| 0.000126
|
"""
Module responsible for translating reference sequence data into GA4GH native
objects.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import hashlib
import json
import os
import random
import pysam
import ga4gh.datamodel as datamodel
import ga4gh.protocol as protocol
import ga4gh.exceptions as exceptions
DEFAULT_REFERENCESET_NAME = "Default"
"""
This is the name used for any reference set referred to in a BAM
file that does not provide the 'AS' tag in the @SQ header.
"""
class AbstractReferenceSet(datamodel.DatamodelObject):
"""
Class representing ReferenceSets. A ReferenceSet is a set of
References which typically comprise a reference assembly, such as
GRCh38.
"""
compoundIdClass = datamodel.ReferenceSetCompoundId
def __init__(self, localId):
super(AbstractReferenceSet, self).__init__(None, localId)
self._referenceIdMap = {}
self._referenceNameMap = {}
self._referenceIds = []
self._assemblyId = None
self._description = None
self._isDerived = False
self._ncbiTaxonId = None
self._sourceAccessions = []
self._sourceUri = None
def addReference(self, reference):
"""
Adds the specified reference to this ReferenceSet.
"""
id_ = reference.getId()
self._referenceIdMap[id_] = reference
self._referenceNameMap[reference.getLocalId()] = reference
self._referenceIds.append(id_)
def getReferences(self):
"""
Returns the References in this ReferenceSet.
"""
return [self._referenceIdMap[id_] for id_ in self._referenceIds]
def getNumReferences(self):
"""
Returns the number of references in
|
this ReferenceSet.
"""
return len(self._referenceIds)
def getReferenceByIndex(self, index):
"""
Returns the reference at the specified index in this ReferenceSet.
"""
return self._referenceIdMap[self._referenceIds[index]]
def getReferenceByName(self, name):
"""
Returns the reference with the specified name.
"""
if name not in se
|
lf._referenceNameMap:
raise exceptions.ReferenceNameNotFoundException(name)
return self._referenceNameMap[name]
def getReference(self, id_):
"""
Returns the Reference with the specified ID or raises a
ReferenceNotFoundException if it does not exist.
"""
if id_ not in self._referenceIdMap:
raise exceptions.ReferenceNotFoundException(id_)
return self._referenceIdMap[id_]
def getMd5Checksum(self):
"""
Returns the MD5 checksum for this reference set. This checksum is
calculated by making a list of `Reference.md5checksum` for all
`Reference`s in this set. We then sort this list, and take the
MD5 hash of all the strings concatenated together.
"""
references = sorted(
self.getReferences(),
key=lambda ref: ref.getMd5Checksum())
checksums = ''.join([ref.getMd5Checksum() for ref in references])
md5checksum = hashlib.md5(checksums).hexdigest()
return md5checksum
def getAssemblyId(self):
"""
Returns the assembly ID for this reference set.
This is the public id of this reference set, such as `GRCh37`
"""
return self._assemblyId
def getDescription(self):
"""
Returns the free text description of this reference set.
"""
return self._description
def getIsDerived(self):
"""
Returns True if this ReferenceSet is derived. A ReferenceSet
may be derived from a source if it contains additional sequences,
or some of the sequences within it are derived.
"""
return self._isDerived
def getSourceAccessions(self):
"""
Returns the list of source accession strings. These are all known
corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally
with a version number, e.g. `NC_000001.11`.
"""
return self._sourceAccessions
def getSourceUri(self):
"""
Returns the sourceURI for this ReferenceSet.
"""
return self._sourceUri
def getNcbiTaxonId(self):
"""
Returns the NCBI Taxon ID for this reference set. This is the
ID from http://www.ncbi.nlm.nih.gov/taxonomy (e.g. 9606->human)
indicating the species which this assembly is intended to model.
Note that contained `Reference`s may specify a different
`ncbiTaxonId`, as assemblies may contain reference sequences
which do not belong to the modeled species, e.g. EBV in a
human reference genome.
"""
return self._ncbiTaxonId
def toProtocolElement(self):
"""
Returns the GA4GH protocol representation of this ReferenceSet.
"""
ret = protocol.ReferenceSet()
ret.assemblyId = self.getAssemblyId()
ret.description = self.getDescription()
ret.id = self.getId()
ret.isDerived = self.getIsDerived()
ret.md5checksum = self.getMd5Checksum()
ret.ncbiTaxonId = self.getNcbiTaxonId()
ret.referenceIds = self._referenceIds
ret.sourceAccessions = self.getSourceAccessions()
ret.sourceURI = self.getSourceUri()
ret.name = self.getLocalId()
return ret
class AbstractReference(datamodel.DatamodelObject):
"""
Class representing References. A Reference is a canonical
assembled contig, intended to act as a reference coordinate space
for other genomic annotations. A single Reference might represent
the human chromosome 1, for instance.
"""
compoundIdClass = datamodel.ReferenceCompoundId
def __init__(self, parentContainer, localId):
super(AbstractReference, self).__init__(parentContainer, localId)
self._length = -1
self._md5checksum = ""
self._sourceUri = None
self._sourceAccessions = []
self._isDerived = False
self._sourceDivergence = None
self._ncbiTaxonId = None
def getLength(self):
"""
Returns the length of this reference's sequence string.
"""
return self._length
def getName(self):
"""
Returns the name of this reference, e.g., '22'.
"""
return self.getLocalId()
def getIsDerived(self):
"""
Returns True if this Reference is derived. A sequence X is said to be
derived from source sequence Y, if X and Y are of the same length and
the per-base sequence divergence at A/C/G/T bases is sufficiently
small. Two sequences derived from the same official sequence share the
same coordinates and annotations, and can be replaced with the official
sequence for certain use cases.
"""
return self._isDerived
def getSourceDivergence(self):
"""
Returns the source divergence for this reference. The sourceDivergence
is the fraction of non-indel bases that do not match the
reference this record was derived from.
"""
return self._sourceDivergence
def getSourceAccessions(self):
"""
Returns the list of source accession strings. These are all known
corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally
with a version number, e.g. `NC_000001.11`.
"""
return self._sourceAccessions
def getSourceUri(self):
"""
The URI from which the sequence was obtained. Specifies a FASTA format
file/string with one name, sequence pair.
"""
return self._sourceUri
def getNcbiTaxonId(self):
"""
Returns the NCBI Taxon ID for this reference. This is the
ID from http://www.ncbi.nlm.nih.gov/taxonomy (e.g. 9606->human)
indicating the species which this assembly is intended to model.
Note that contained `Reference`s may specify a different
`ncbiTaxonI
|
b-ritter/python-notes
|
concurrency/tests_integration/test_collection_times.py
|
Python
|
mit
| 1,528
| 0.003272
|
from concurrency.get_websites import get_number_of_links
import time
# Run get_number_of_links and compare it to a serial version
# stub out load_url with a sleep function so the time is always the same
# Show that the concurrent version takes less time than the serial
import unittest
from unittest.mock import patch, MagicMock
from bs4 import BeautifulSoup
from concurrency.get_websites import get_number_of_links, get_number_of_links_serial
class TestConcurrency(unittest.TestCase):
def setUp(self):
self.loadtime = 1
self.fake_urls = ['url1','url2', 'url3']
@patch('concurrency.get_websites.BeautifulSoup')
@patch('concurrency.get_websites.load_url')
def test_concurrent_slower_than_serial(self, mock_load_url, bs_mock):
""" Time the collection of data from websites """
bs_data = MagicMock(return_value="<html><a href='foo'>Baz</a></html>")
bs_mock.return_value = bs_data
mock_load_url.side_effect = lamb
|
da foo: time.sleep(self.loadtime)
concurrent_start = time.time()
list(get_number_of_links(self.fake_urls))
concurrent_total = time.time() -
|
concurrent_start
serial_start = time.time()
get_number_of_links_serial(self.fake_urls)
serial_total = time.time() - serial_start
print("Concurrent collection: {}".format(concurrent_total))
print("Serial collection: {}".format(serial_total))
self.assertLess(concurrent_total, serial_total)
if __name__ == "__main__":
unittest.main()
|
2Habibie/ctocpp
|
c2cpp/pmake.py
|
Python
|
gpl-2.0
| 1,606
| 0.009963
|
#!/usr/bin/env python
"""
C to C++ Translator
Convert a C program or whole project to C++
Copyright (C) 2001-2009 Denis Sureau
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 67
|
5 Mass Ave, Cambridge, MA 02139, USA.
webmaster@scriptol.com
http://www.scriptol.com
PMAKE
Compile a list of sources
"""
import os
import string
import sys
# remove unwanted codes from lines
def chop(n):
while (len(n) > 1) & (n[-1] in ("\n", "\r")):
n = n[0:-1]
return n
path = os.getcwd()
# read the list of files
fic = open("cdlist.prj","r")
liste = fic.readlines()
|
fic.close()
sortie = open("test", "w")
sys.stdout = sortie
# scan the list of sources and compile each .C one
for n in liste:
n = chop(n)
if os.path.isdir(n): continue
node, ext = os.path.splitext(n)
ext = string.upper(ext)
if ext in [ ".c", ".C" ]:
print "compiling " + n,
os.system("bcc32 -c " + node)
sortie.close()
|
webadmin87/midnight
|
midnight_catalog/services.py
|
Python
|
bsd-3-clause
| 1,282
| 0.00266
|
from django.conf import settings
from django.db.models import Prefetch
from midnight_catalog.models import Product, ParamValue
def get_all(slug=None):
"""
Возвращает QuerySet содержащий выборку товаров
:param slug: символьный код категории каталога, если не задан выбираются товары в не зависимости от категории
:return:
"""
if slug is None:
q = Product.objects.published()
else:
q = Product.objects.published().filter(sections__slug=slug)
if get
|
attr(settings, 'MIDNIGHT_CATALOG_PREFETCH_PARAMS', False):
q = q.prefetch_related(Prefetch("paramvalue_set", queryset=ParamVal
|
ue.objects.published().order_by('sort').prefetch_related("param")))
q = q.prefetch_related('sections').order_by('sort', '-id')
return q.all()
def get_one(slug):
"""
Возвращает один товар
:param slug: символьный код товара
:return:
"""
item = Product.objects.published()\
.prefetch_related(Prefetch("paramvalue_set", queryset=ParamValue.objects.published().order_by('sort').prefetch_related("param")))\
.filter(slug=slug).first()
return item
|
oeeagle/quantum
|
neutron/plugins/nicira/extensions/maclearning.py
|
Python
|
apache-2.0
| 1,838
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Nicira Networks, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from neutron.api.v2 import attributes
MAC_LEARNING = 'mac_learning_enabled'
EXTENDED_ATTRIBUTES_2_0 = {
'ports': {
MAC_LEARNING: {'allow_post': True, 'allow_put': True,
'convert_to': attributes.convert_to_boolean,
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True},
}
}
class Maclearning(object):
"""Extension class supporting port mac learning."""
@classmethod
def get_name(cls):
return "MAC Learning"
@class
|
method
def get_alias(cls):
return "mac-learning"
@classmethod
def get_description(cls):
return "Provides mac learning capabilities"
@classmethod
def get_namespace(cls):
|
return "http://docs.openstack.org/ext/maclearning/api/v1.0"
@classmethod
def get_updated(cls):
return "2013-05-1T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
return []
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
Jannes123/inasafe
|
safe/gui/widgets/test/test_dock.py
|
Python
|
gpl-3.0
| 37,784
| 0.000106
|
# coding=utf-8
"""
InaSAFE Disaster risk assessment
|
tool developed by AusAid and World Bank
- **GUI Test Cases.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of
|
the License, or
(at your option) any later version.
"""
__author__ = 'tim@kartoza.com'
__date__ = '10/01/2011'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import unittest
import sys
import os
import logging
import codecs
from os.path import join
from unittest import TestCase, skipIf
# this import required to enable PyQt API v2
# noinspection PyUnresolvedReferences
import qgis # pylint: disable=unused-import
from qgis.core import (
QgsVectorLayer,
QgsMapLayerRegistry,
QgsRectangle,
QgsCoordinateReferenceSystem)
from PyQt4 import QtCore
from safe.impact_functions import register_impact_functions
from safe.common.utilities import format_int, unique_filename
from safe.test.utilities import (
test_data_path,
load_standard_layers,
setup_scenario,
set_canvas_crs,
combos_to_string,
populate_dock,
canvas_list,
GEOCRS,
GOOGLECRS,
load_layer,
load_layers,
set_jakarta_extent,
set_jakarta_google_extent,
set_yogya_extent,
get_ui_state,
set_small_jakarta_extent,
get_qgis_app,
TESTDATA,
HAZDATA)
# AG: get_qgis_app() should be called before importing modules from
# safe.gui.widgets.dock
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app()
from safe.gui.widgets.dock import Dock
from safe.utilities.keyword_io import KeywordIO
from safe.utilities.styling import setRasterStyle
from safe.utilities.gis import read_impact_layer, qgis_version
LOGGER = logging.getLogger('InaSAFE')
DOCK = Dock(IFACE)
# noinspection PyArgumentList
class TestDock(TestCase):
"""Test the InaSAFE GUI."""
def setUp(self):
"""Fixture run before all tests"""
register_impact_functions()
DOCK.show_only_visible_layers_flag = True
load_standard_layers(DOCK)
DOCK.cboHazard.setCurrentIndex(0)
DOCK.cboExposure.setCurrentIndex(0)
DOCK.cboFunction.setCurrentIndex(0)
DOCK.run_in_thread_flag = False
DOCK.show_only_visible_layers_flag = False
DOCK.set_layer_from_title_flag = False
DOCK.zoom_to_impact_flag = False
DOCK.hide_exposure_flag = False
DOCK.show_intermediate_layers = False
DOCK.user_extent = None
DOCK.user_extent_crs = None
def tearDown(self):
"""Fixture run after each test"""
QgsMapLayerRegistry.instance().removeAllMapLayers()
DOCK.cboHazard.clear()
DOCK.cboExposure.clear()
# DOCK.cboAggregation.clear() #dont do this because the cboAggregation
# need to be able to react to the status changes of the other combos
def test_defaults(self):
"""Test the GUI in its default state"""
print combos_to_string(DOCK)
self.assertEqual(DOCK.cboHazard.currentIndex(), 0)
self.assertEqual(DOCK.cboExposure.currentIndex(), 0)
self.assertEqual(DOCK.cboFunction.currentIndex(), 0)
self.assertEqual(DOCK.cboAggregation.currentIndex(), 0)
def test_validate(self):
"""Validate function work as expected"""
self.tearDown()
# First check that we DONT validate a clear DOCK
flag, message = DOCK.validate()
self.assertTrue(message is not None, 'No reason for failure given')
message = 'Validation expected to fail on a cleared DOCK.'
self.assertEquals(flag, False, message)
# Now check we DO validate a populated DOCK
populate_dock(DOCK)
flag = DOCK.validate()
message = (
'Validation expected to pass on a populated dock with selections.')
self.assertTrue(flag, message)
def test_set_ok_button_status(self):
"""OK button changes properly according to DOCK validity"""
# First check that we ok ISNT enabled on a clear DOCK
self.tearDown()
flag, message = DOCK.validate()
self.assertTrue(message is not None, 'No reason for failure given')
message = 'Validation expected to fail on a cleared DOCK.'
self.assertEquals(flag, False, message)
# Now check OK IS enabled on a populated DOCK
populate_dock(DOCK)
flag = DOCK.validate()
message = (
'Validation expected to pass on a populated DOCK with selections.')
self.assertTrue(flag, message)
def test_insufficient_overlap(self):
"""Test Insufficient overlap errors are caught.
..note:: See https://github.com/AIFDR/inasafe/issues/372
"""
# Push OK with the left mouse button
button = DOCK.pbnRunStop
message = 'Run button was not enabled'
self.assertTrue(button.isEnabled(), message)
result, message = setup_scenario(
DOCK,
hazard='Continuous Flood',
exposure='Population',
function='Need evacuation',
function_id='FloodEvacuationRasterHazardFunction')
self.assertTrue(result, message)
# Enable on-the-fly reprojection
set_canvas_crs(GEOCRS, True)
# Zoom to an area where there is no overlap with layers
rectangle = QgsRectangle(
106.635434302702, -6.101567666986,
106.635434302817, -6.101567666888)
CANVAS.setExtent(rectangle)
crs = QgsCoordinateReferenceSystem('EPSG:4326')
DOCK.define_user_analysis_extent(rectangle, crs)
# Press RUN
DOCK.accept()
result = DOCK.wvResults.page_to_text()
# Check for an error containing InsufficientOverlapError
expected_string = 'InsufficientOverlapError'
message = 'Result not as expected %s not in: %s' % (
expected_string, result)
# This is the expected impact number
self.assertIn(expected_string, result, message)
# disabled this test until further coding
def xtest_print_map(self):
"""Test print map, especially on Windows."""
result, message = setup_scenario(
DOCK,
hazard='Flood in Jakarta',
exposure='Essential buildings',
function='Be affected',
function_id='Categorised Hazard Building Impact Function')
self.assertTrue(result, message)
# Enable on-the-fly reprojection
set_canvas_crs(GEOCRS, True)
set_jakarta_extent(DOCK)
# Press RUN
button = DOCK.pbnRunStop
# noinspection PyCallByClass,PyTypeChecker
button.click()
print_button = DOCK.pbnPrint
try:
# noinspection PyCallByClass,PyTypeChecker
print_button.click()
except OSError:
LOGGER.debug('OSError')
# pass
except Exception, e:
raise Exception('Exception is not expected, %s' % e)
def test_result_styling(self):
"""Test that ouputs from a model are correctly styled (colours and
opacity. """
# Push OK with the left mouse button
print '--------------------'
print combos_to_string(DOCK)
result, message = setup_scenario(
DOCK,
hazard='Continuous Flood',
exposure='Population',
function='Need evacuation',
function_id='FloodEvacuationRasterHazardFunction')
self.assertTrue(result, message)
# Enable on-the-fly reprojection
set_canvas_crs(GEOCRS, True)
set_jakarta_extent(DOCK)
DOCK.accept()
# DOCK.analysis.get_impact_layer()
safe_layer = DOCK.analysis.get_impact_layer()
qgis_layer = read_impact_layer(safe_layer)
style = safe_layer.get_style_info()
setRasterStyle(qgis_layer, style)
# simple test for now - we could test explicity for style state
# later if needed.
message = (
'Raster layer w
|
buhe/judge
|
sysinfo.py
|
Python
|
agpl-3.0
| 1,249
| 0.001601
|
import os
from multiprocessing import cpu_count
_cpu_co
|
unt = cpu_count()
if hasattr(os, 'getloadavg'):
def load_fair():
return 'load', os.getloadavg()[0] / _cpu_count
else:
from winperfmon import PerformanceCounter
from threading import Thread
from collections import deque
from time import sleep
class SystemLoadThread(Thread):
def __init__(self):
super(SystemLoadThread, self).__init__()
self.daemon = True
self.samples = deque(maxlen=10)
|
self.load = 0.5
self.counter = PerformanceCounter(r'\System\Processor Queue Length', r'\Processor(_Total)\% Processor Time')
def run(self):
while True:
pql, pt = self.counter.query()
self.samples.append(pql)
if pt >= 100:
self.load = max(sum(self.samples) / len(self.samples) / _cpu_count, pt / 100.)
else:
self.load = pt / 100.
sleep(1)
_load_thread = SystemLoadThread()
_load_thread.start()
def load_fair():
return 'load', _load_thread.load
def cpu_count():
return 'cpu-count', _cpu_count
report_callbacks = [load_fair, cpu_count]
|
Tassemble/jewelry
|
polls/admin.py
|
Python
|
mit
| 475
| 0.023158
|
from django.contrib import admin
# Register your models here.
from polls.models import Question,Choice
class ChoiceInline(admin.TabularInline):
|
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fields = ["question_text", "pub_date"]
inlines = [ChoiceInline]
list_display = ('questio
|
n_text', 'pub_date', 'was_published_recently')
search_fields = ['question_text']
list_filter = ['pub_date']
admin.site.register(Question, QuestionAdmin)
|
gdetrez/MyConf
|
apps/people/management/commands/addperson.py
|
Python
|
agpl-3.0
| 933
| 0.004287
|
from people.models import Person
from optparse import make_option
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
raise ImportError("Neither json or simplejson are available on your system")
from django.core.management.base import BaseCommand,
|
CommandError
from django.db.models import Count
class Command(BaseCommand):
args = 'NAME'
help = 'Export speakers from the database'
option_list = BaseCommand.option_list + (
make_option('--staff',
action='store_true',
dest='sta
|
ff',
default=False,
help='This person is staff'),
)
def handle(self, *args, **options):
name = u" ".join(map(lambda s: s.decode("utf8"),args))
print options
person = Person(name=name, staff=options['staff'])
person.save()
print person.pk
|
lfairchild/PmagPy
|
programs/core_depthplot.py
|
Python
|
bsd-3-clause
| 8,467
| 0.005551
|
#!/usr/bin/env pythonw
#from __future__ import print_function
import sys
import wx
import os
import matplotlib
if matplotlib.get_backend() != "WXAgg":
matplotlib.use("WXAgg")
import matplotlib.pyplot as plt
from pmagpy import pmagplotlib
import pmagpy.command_line_extractor as extractor
import pmagpy.ipmag as ipmag
import dialogs.pmag_widgets as pw
import dialogs.pmag_menu_dialogs as pmag_menu_dialogs
def main():
"""
NAME
core_depthplot.py
DESCRIPTION
plots various measurements versus core_depth or age. plots data flagged as 'FS-SS-C' as discrete samples.
SYNTAX
core_depthplot.py [command line options]
# or, for Anaconda users:
core_depthplot_anaconda [command line options]
OPTIONS
-h prints help message and quits
-f FILE: specify input measurments format file
-fsum FILE: specify input LIMS database (IODP) core summary csv file
-fwig FILE: specify input depth,wiggle to plot, in magic format with sample_core_depth key for depth
-fsa FILE: specify input er_samples format file from magic for depth
-fa FILE: specify input ages format file from magic for age
NB: must have either -fsa OR -fa (not both)
-fsp FILE sym size: specify input zeq_specimen format file from magic, sym and size
NB: PCAs will have specified color, while fisher means will be white with specified color as the edgecolor
-fres FILE specify input pmag_results file from magic, sym and size
-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot
-S do not plot blanket treatment da
|
ta (if this is set, you don't need the -LP)
-sym SYM SIZE, symbol, size for continuous points (e.g., ro 5, bs 10, g^ 10 for red dot, blue square, green triangle), default is blue dot at 5 pt
-D do not plot declination
-M do not plot magnetization
-log plot magnetization on a log scale
-L do not connect dots with a line
-I do not plot inclination
-d min
|
max [in m] depth range to plot
-n normalize by weight in er_specimen table
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04, gts12]
-ds [mbsf,mcd] specify depth scale, mbsf default
-fmt [svg, eps, pdf, png] specify output format for plot (default: svg)
-sav save plot silently
DEFAULTS:
Measurements file: measurements.txt
Samples file: samples.txt
NRM step
Summary file: none
"""
args = sys.argv
if '-h' in args:
print(main.__doc__)
sys.exit()
dataframe = extractor.command_line_dataframe([ ['f', False, 'measurements.txt'], ['fsum', False, ''],
['fwig', False, ''], ['fsa', False, ''],
['fa', False, ''], ['fsp', False, ''],
['fres', False, '' ], ['fmt', False, 'svg'],
['LP', False, ''], ['n', False, False],
['d', False, '-1 -1'], ['ts', False, ''],
['WD', False, '.'], ['L', False, True],
['S', False, True], ['D', False, True],
['I', False, True], ['M', False, True],
['log', False, 0],
['ds', False, 'sample_core_depth'],
['sym', False, 'bo 5'], ['ID', False, '.'],
['sav', False, False], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol, input_dir, save, data_model_num = extractor.get_vars(
['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym', 'ID', 'sav', 'DM'], checked_args)
# format some variables
# format symbol/size
try:
sym, size = symbol.split()
size = int(size)
except:
print('you should provide -sym in this format: ro 5')
print('using defaults instead')
sym, size = 'ro', 5
# format result file, symbol, size
if res_file:
try:
res_file, res_sym, res_size = res_file.split()
except:
print('you must provide -fres in this format: -fres filename symbol size')
print(
'could not parse {}, defaulting to using no result file'.format(res_file))
res_file, res_sym, res_size = '', '', 0
else:
res_file, res_sym, res_size = '', '', 0
# format specimen file, symbol, size
if spc_file:
try:
spc_file, spc_sym, spc_size = spc_file.split()
except:
print('you must provide -fsp in this format: -fsp filename symbol size')
print(
'could not parse {}, defaulting to using no specimen file'.format(spc_file))
spc_file, spc_sym, spc_size = '', '', 0
else:
spc_file, spc_sym, spc_size = '', '', 0
# format min/max depth
try:
dmin, dmax = depth.split()
except:
print('you must provide -d in this format: -d dmin dmax')
print('could not parse {}, defaulting to plotting all depths'.format(depth))
dmin, dmax = -1, -1
# format timescale, min/max time
if timescale:
try:
timescale, amin, amax = timescale.split()
pltTime = True
except:
print(
'you must provide -ts in this format: -ts timescale minimum_age maximum_age')
print(
'could not parse {}, defaulting to using no timescale'.format(timescale))
timescale, amin, amax = None, -1, -1
pltTime = False
else:
timescale, amin, amax = None, -1, -1
pltTime = False
# format norm and wt_file
if norm and not isinstance(norm, bool):
wt_file = norm
norm = True
else:
norm = False
wt_file = ''
# format list of protcols and step
try:
method, step = meth.split()
except:
print(
'To use the -LP flag you must provide both the protocol and the step in this format:\n-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot')
print('Defaulting to using no protocol')
method, step = 'LT-NO', 0
# list of varnames
#['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym' ]
#meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol
fig, figname = ipmag.core_depthplot(input_dir, meas_file, spc_file, samp_file, age_file, sum_file, wt_file, depth_scale, dmin, dmax, sym, size,
spc_sym, spc_size, method, step, fmt, pltDec, pltInc, pltMag, pltLine, pltSus, logit, pltTime, timescale, amin, amax, norm, data_model_num)
if not pmagplotlib.isServer:
figname = figname.replace(':', '_')
if fig and save:
print('-I- Created plot: {}'.format(figname))
plt.savefig(figname)
return
app = wx.App(redirect=False)
if not fig:
pw.simple_warning(
'No plot was able to be created with the data you provided.\nMake sure you have given all the required information and try again')
return False
dpi = fig.get_dpi()
pixel_width = dpi * fig.get_figwidth()
pixel_height = dpi * fig.
|
cryptapus/electrum
|
electrum/gui/kivy/uix/dialogs/qr_scanner.py
|
Python
|
mit
| 1,153
| 0.001735
|
from kivy.app import App
from kivy.factory import Factory
from kivy.lang import Builder
Factory.register('QRScanner', module='electrum.gui.kivy.qr_scanner')
class QrScannerDialog(Factory.AnimatedPopup):
__events__ = ('on_complete', )
def on_symbols(self, instance, value):
instance.stop()
self.dismiss()
data = value[0].data
self.dispatch('on_complete', data)
def on_complete(self, x):
''' Default Handler for on_complete event.
'''
print(x)
Builder.load_string('''
<QrScannerDialog>
title:
_(\
'[size=18dp]Hold your QRCode up to the camera[/size][size=7dp]\\n[/size]')
title_size: '24sp'
border: 7, 7, 7, 7
size_hint: None, None
size: '340dp', '290dp'
pos_hint: {'center_y': .53}
#separator_color: .89, .89, .89, 1
#separator_height: '1.2dp'
#title_color: .437, .437, .43
|
7, 1
#background: 'atlas://electrum/gu
|
i/kivy/theming/light/dialog'
on_activate:
qrscr.start()
qrscr.size = self.size
on_deactivate: qrscr.stop()
QRScanner:
id: qrscr
on_symbols: root.on_symbols(*args)
''')
|
linyvxiang/tera
|
test/testcase/test_root.py
|
Python
|
bsd-3-clause
| 3,730
| 0.006702
|
"""
Copyright (c) 2015, Baidu.com, Inc. All Rights Reserved
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
"""
import common
from conf import const
def setUp():
"""
set env
"""
common.print_debug_msg(1, "setup()")
def test_create_user():
cmd = "./teracli user create z1 z1pwd --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 0)
cmd = "./teracli user show z1"
common.check_show_user_result(cmd, True, "z1")
# user already exists
cmd = "./teracli user create z1 z1pwd --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
def test_change_pwd():
cmd = "./teracli user changepwd root rootpassword --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 0)
#now, using old password can not get root permission
cmd = "./teracli user create dummy dummypwd --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
#update flag file
cmd = ("sed -i 's/^--tera_user_passcode=.*/--tera_user_passcode=rootpassword/' "
+ const.user_root_flag_path)
common.execute_and_check_returncode(cmd, 0)
#now, using new password should work
cmd = "./teracli user changep
|
wd root helloroot --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 0)
#restore the original root password in flag file
cmd = ("sed -i 's/^--tera_user_passcode=.*/--tera_user_passcode=helloroot/' "
+ const.user_root_flag_path)
common.execute_and_check_returncode(cmd, 0)
# user not found
cmd = "./teracli user changepwd
|
oops z1pw2 --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
def test_addtogroup():
cmd = "./teracli user addtogroup z1 z1g --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 0)
cmd = "./teracli user show z1"
common.check_show_user_result(cmd, True, "z1g")
common.execute_and_check_returncode(cmd, 0)
# user not found
cmd = "./teracli user addtogroup z2 z1g --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
# user already in group
cmd = "./teracli user addtogroup z1 z1g --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
def test_deletefromgroup():
cmd = "./teracli user deletefromgroup z1 z1g --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 0)
cmd = "./teracli user show z1"
common.check_show_user_result(cmd, False, "z1g")
# user not found
cmd = "./teracli user deletefromgroup z2 z1g --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
# user not in group
cmd = "./teracli user deletefromgroup z1 z1g --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
def test_delete_user():
cmd = "./teracli user delete z1 --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 0)
cmd = "./teracli user show z1"
common.check_show_user_result(cmd, False, "z1")
# can not delete root
cmd = "./teracli user delete root --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
cmd = "./teracli user show root"
common.check_show_user_result(cmd, True, "root")
# user not found
cmd = "./teracli user delete z1 --flagfile=" + const.user_root_flag_path
common.execute_and_check_returncode(cmd, 255)
def tearDown():
"""
tear down
"""
common.print_debug_msg(1, "teardown()")
|
ddico/odoo
|
addons/sale_product_configurator/__init__.py
|
Python
|
agpl-3.0
| 93
| 0
|
# -*- cod
|
ing: utf-8 -*-
from . import models
from . import controlle
|
rs
from . import wizard
|
jlaine/django-timegraph
|
timegraph/admin.py
|
Python
|
bsd-2-clause
| 2,072
| 0.005794
|
# -*- coding: utf-8 -*-
#
# django-timegraph - monitoring graphs for django
# Copyright (c) 2011-2012, Wifirst
# Copyright (c) 2013, Jeremy Lainé
# All rights reserved.
#
# See AUTHORS file for a full list of contributors.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONT
|
RACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from django.contrib import admin
from timegraph.models import Graph, Metric
class GraphAdmin(admin.ModelAdmin):
list_display = ('slug', 'title', 'is_visible')
list_filter = ('is_visible',)
sea
|
rch_fields = ('slug', 'title')
class MetricAdmin(admin.ModelAdmin):
list_display = ('name', 'parameter', 'type', 'unit', 'rrd_enabled', 'graph_order')
list_filter = ('type', 'unit', 'rrd_enabled')
search_fields = ('name', 'parameter')
admin.site.register(Graph, GraphAdmin)
admin.site.register(Metric, MetricAdmin)
|
s-good/AutoQC
|
tests/EN_stability_check_validation.py
|
Python
|
mit
| 2,925
| 0.008547
|
import qctests.EN_stability_check
import util.testingProfile
import numpy
import util.main as main
##### EN_stability_check ----------------------------------------------
class TestClass():
parameters = {
"table": 'unit'
}
def setUp(self):
# this qc test will go looking for the profile in question in the db, needs to find something sensible
main.faketable('unit')
main.fakerow('unit')
def tearDown(self):
main.dbinteract('DROP TABLE unit;')
def test_mcdougallEOS(self):
'''
check the test values provided for the EOS in McDougall 2003
'''
eos = round(qctests.EN_stability_check.mcdougallEOS(35,25,2000), 6)
assert eos == 1031.654229, 'mcdougallEOS(35,25,2000) should be 1031.654229, instead got %f' % eos
eos = round(qctests.EN_stability_check.mcdougallEOS(20,20,1000), 6)
assert eos == 1017.726743, 'mcdougallEOS(20,20,1000) should be 1017.726743, instead got %f' % eos
eos = round(qctests.EN_stability_check.mcdougallEOS(40,12,8000), 6)
assert eos == 1062.928258, 'mcdougallEOS(40,12,8000) should be 1062.928258, instead got %f' % eos
def test_mcdougall_potential_temperature(self):
'''
check the test values provided for the potential temperature approximation in McDougall 2003
'''
pt = round(qctests.EN_stability_check.potentialTemperature(35, 20, 2000), 6)
assert pt == 19.621967, 'potential temperarure for S = 35 psu, T = 20C, p = 2000 db should be 19621967, instead got %f' % pt
def test_EN_stability_check_padded(self):
'''
check some behavior near the test values p
|
rovided in McDougall
padded with the same level to avoid flagging the entire profile
'''
p = util.testingProfile.fakeProfile([13.5, 25.5, 20.4, 13.5, 13.5, 13.5, 13.5, 13.5, 13.5], [0, 10, 20, 30, 40, 50, 60,
|
70, 80], salinities=[40, 35, 20, 40, 40, 40, 40, 40, 40], pressures=[8000, 2000, 1000, 8000, 8000, 8000, 8000, 8000, 8000], uid=8888)
qc = qctests.EN_stability_check.test(p, self.parameters)
truth = numpy.ma.array([False, True, True, False, False, False, False, False, False], mask=False)
assert numpy.array_equal(qc, truth), 'failed to flag padded stability example'
def test_EN_stability_check_unpadded(self):
'''
check same four levels as above,
but don't pad with extra levels, so that the whole profile ends up getting rejected at the last step.
'''
p = util.testingProfile.fakeProfile([13.5, 25.5, 20.4, 13.5], [0, 10, 20, 30], salinities=[40, 35, 20, 40], pressures=[8000, 2000, 1000, 8000], uid=8888)
qc = qctests.EN_stability_check.test(p, self.parameters)
truth = numpy.ma.array([True, True, True, True], mask=False)
assert numpy.array_equal(qc, truth), 'failed to flag unpadded stability example'
|
EnergyID/opengrid
|
scripts/job_cache_anonymous_houseprint.py
|
Python
|
gpl-2.0
| 730
| 0.005479
|
# -*- coding: utf-8 -*-
"""
Script to cache anonymous houseprint data into hp_anonymous.
|
pkl
Created on 05/07/2014 by Roel De Coninck
"""
import os, sys
import inspect
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# add the path to openg
|
rid to sys.path
sys.path.append(os.path.join(script_dir, os.pardir, os.pardir))
from opengrid.library.houseprint import Houseprint
##############################################################################
hp = Houseprint()
all_sensordata = hp.get_all_fluksosensors()
print('Sensor data fetched')
hp.save('/usr/local/src/opengrid/scripts/hp_anonymous.pkl')
hp.save('/var/www/private/hp_anonymous.pkl')
|
johnkastler/aws
|
get_account_urls.py
|
Python
|
gpl-3.0
| 456
| 0.013158
|
#!/usr/bin/env python
from keyring i
|
mport get_password
from boto.iam.connection import IAMConnection
im
|
port lib.LoadBotoConfig as BotoConfig
from sys import exit
envs = ['dev', 'qa', 'staging', 'demo', 'prod']
for env in envs:
id = BotoConfig.config.get(env, 'aws_access_key_id')
key = get_password(BotoConfig.config.get(env, 'keyring'), id)
conn = IAMConnection(aws_access_key_id=id, aws_secret_access_key=key)
print(conn.get_signin_url())
|
jmbott/test-repo
|
motion_cam.py
|
Python
|
mit
| 1,069
| 0
|
import time
import picamera
import RPi.G
|
PIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.OUT)
GPIO.output(18, False)
while True:
if(GPIO.input(23) == 1):
print "motion"
GPIO.output(18, True)
with picamera.PiCamera
|
() as camera:
# Turn the camera's LED off
camera.led = False
# Take video
camera.resolution = (1280, 720)
filename = 'video%02d.h264' % i
camera.start_recording(filename)
while(GPIO.input(23) == 1):
camera.wait_recording(5)
camera.stop_recording()
i = i + 1
elif(GPIO.input(23) == 0):
print "no motion"
GPIO.output(18, False)
#
#
# import time
# import picamera
# import RPi.GPIO as GPIO
#
# GPIO.setmode(GPIO.BCM)
# GPIO.setup(23, GPIO.IN)
# GPIO.setup(18, GPIO.OUT)
#
# while True:
# if(GPIO.input(23) == 1):
# print "motion"
# GPIO.output(18, True)
# elif(GPIO.input(23) == 0):
# print "no motion"
# GPIO.output(18, False)
#
#
|
enkidulan/enkiblog
|
src/enkiblog/core/deform/tempstorage.py
|
Python
|
apache-2.0
| 2,365
| 0
|
import os.path
from uuid import uuid4
import shutil
import logging
logger = logging.getLogger(__name__)
_MARKER = object()
class FileUploadTempStore(object):
session_storage_slug = 'websauna.tempstore'
def __init__(self, request):
self.tempdir = request.registry.settings['websauna.uploads_tempdir']
if os.path.os.makedirs(self.tempdir, mode=0o777, exist_ok=True):
logger.warning("Creating dir: '%s'", self.tempdir)
self.request = request
self.session = request.session
def preview_url(self, _uid):
# pylint: disable=no-self-use
return None
def __contains__(self, name):
return name in self.session.get(self.session_storage_slug, {})
def __setitem__(self, name, data):
newdata = data.copy()
stream = newdata.pop('fp', None)
if stream is not None:
newdata['randid'] = uuid4().hex
file_name = os.path.join(self.tempdir, newdata['randid'])
shutil.copyfileobj(stream, open(file_name, 'wb'))
self._tempstore_set(name, newdata)
de
|
f _tempstore_set(self, name, data):
# cope with sessioning implementations that cant deal with
# in-place mutation of mutable values (temporarily?)
existing = self.session.get(self.session_storage_slug, {})
existing[name] = data
self.session[self.session_storage_slug] = existing
def clear(self):
data = self.session.pop('substanced.tempstore', {})
for cookie in data.items():
randid = cookie.get('randid')
|
file_name = os.path.join(self.tempdir, randid)
try:
os.remove(file_name)
except OSError:
pass
def get(self, name, default=None):
data = self.session.get(self.session_storage_slug, {}).get(name)
if data is None:
return default
newdata = data.copy()
randid = newdata.get('randid')
if randid is not None:
file_name = os.path.join(self.tempdir, randid)
try:
newdata['fp'] = open(file_name, 'rb')
except IOError:
pass
return newdata
def __getitem__(self, name):
data = self.get(name, _MARKER)
if data is _MARKER:
raise KeyError(name)
return data
|
presidentielcoin/presidentielcoin
|
qa/rpc-tests/bip68-sequence.py
|
Python
|
mit
| 18,409
| 0.003477
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Presidentielcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test BIP68 implementation
#
from test_framework.test_framework import PresidentielcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.blocktools import *
SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
SEQUENCE_LOCKTIME_MASK = 0x0000ffff
# RPC error for non-BIP68 final transactions
|
NOT_FINAL_ERROR = "64: non-BIP68-final"
class BIP68Test(PresidentielcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = False
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.op
|
tions.tmpdir, ["-debug", "-blockprioritysize=0"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-blockprioritysize=0", "-acceptnonstdtxn=0"]))
self.is_network_split = False
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
connect_nodes(self.nodes[0], 1)
def run_test(self):
# Generate some coins
self.nodes[0].generate(110)
print("Running test disable flag")
self.test_disable_flag()
print("Running test sequence-lock-confirmed-inputs")
self.test_sequence_lock_confirmed_inputs()
print("Running test sequence-lock-unconfirmed-inputs")
self.test_sequence_lock_unconfirmed_inputs()
print("Running test BIP68 not consensus before versionbits activation")
self.test_bip68_not_consensus()
print("Verifying nVersion=2 transactions aren't standard")
self.test_version2_relay(before_activation=True)
print("Activating BIP68 (and 112/113)")
self.activateCSV()
print("Verifying nVersion=2 transactions are now standard")
self.test_version2_relay(before_activation=False)
print("Passed\n")
# Test that BIP68 is not in effect if tx version is 1, or if
# the first sequence bit is set.
def test_disable_flag(self):
# Create some unconfirmed inputs
new_addr = self.nodes[0].getnewaddress()
self.nodes[0].sendtoaddress(new_addr, 2) # send 2 PRC
utxos = self.nodes[0].listunspent(0, 0)
assert(len(utxos) > 0)
utxo = utxos[0]
tx1 = CTransaction()
value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN)
# Check that the disable flag disables relative locktime.
# If sequence locks were used, this would require 1 block for the
# input to mature.
sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1
tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)]
tx1.vout = [CTxOut(value, CScript([b'a']))]
tx1_signed = self.nodes[0].signrawtransaction(ToHex(tx1))["hex"]
tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
tx1_id = int(tx1_id, 16)
# This transaction will enable sequence-locks, so this transaction should
# fail
tx2 = CTransaction()
tx2.nVersion = 2
sequence_value = sequence_value & 0x7fffffff
tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)]
tx2.vout = [CTxOut(int(value-self.relayfee*COIN), CScript([b'a']))]
tx2.rehash()
try:
self.nodes[0].sendrawtransaction(ToHex(tx2))
except JSONRPCException as exp:
assert_equal(exp.error["message"], NOT_FINAL_ERROR)
else:
assert(False)
# Setting the version back down to 1 should disable the sequence lock,
# so this should be accepted.
tx2.nVersion = 1
self.nodes[0].sendrawtransaction(ToHex(tx2))
# Calculate the median time past of a prior block ("confirmations" before
# the current tip).
def get_median_time_past(self, confirmations):
block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations)
return self.nodes[0].getblockheader(block_hash)["mediantime"]
# Test that sequence locks are respected for transactions spending confirmed inputs.
def test_sequence_lock_confirmed_inputs(self):
# Create lots of confirmed utxos, and use them to generate lots of random
# transactions.
max_outputs = 50
addresses = []
while len(addresses) < max_outputs:
addresses.append(self.nodes[0].getnewaddress())
while len(self.nodes[0].listunspent()) < 200:
import random
random.shuffle(addresses)
num_outputs = random.randint(1, max_outputs)
outputs = {}
for i in range(num_outputs):
outputs[addresses[i]] = random.randint(1, 20)*0.01
self.nodes[0].sendmany("", outputs)
self.nodes[0].generate(1)
utxos = self.nodes[0].listunspent()
# Try creating a lot of random transactions.
# Each time, choose a random number of inputs, and randomly set
# some of those inputs to be sequence locked (and randomly choose
# between height/time locking). Small random chance of making the locks
# all pass.
for i in range(400):
# Randomly choose up to 10 inputs
num_inputs = random.randint(1, 10)
random.shuffle(utxos)
# Track whether any sequence locks used should fail
should_pass = True
# Track whether this transaction was built with sequence locks
using_sequence_locks = False
tx = CTransaction()
tx.nVersion = 2
value = 0
for j in range(num_inputs):
sequence_value = 0xfffffffe # this disables sequence locks
# 50% chance we enable sequence locks
if random.randint(0,1):
using_sequence_locks = True
# 10% of the time, make the input sequence value pass
input_will_pass = (random.randint(1,10) == 1)
sequence_value = utxos[j]["confirmations"]
if not input_will_pass:
sequence_value += 1
should_pass = False
# Figure out what the median-time-past was for the confirmed input
# Note that if an input has N confirmations, we're going back N blocks
# from the tip so that we're looking up MTP of the block
# PRIOR to the one the input appears in, as per the BIP68 spec.
orig_time = self.get_median_time_past(utxos[j]["confirmations"])
cur_time = self.get_median_time_past(0) # MTP of the tip
# can only timelock this input if it's not too old -- otherwise use height
can_time_lock = True
if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK:
can_time_lock = False
# if time-lockable, then 50% chance we make this a time lock
if random.randint(0,1) and can_time_lock:
# Find first time-lock value that fails, or latest one that succeeds
time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
if input_will_pass and time_delta > cur_time - orig_time:
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)
elif (not input_will_pass and time_delta <= cur_time - orig_time):
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1
sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
tx.vin.append(CTxIn(C
|
Jgarcia-IAS/SAT
|
openerp/addons-extra/odoo-pruebas/odoo-server/addons-extra/ifrs_report/wizard/ifrs_report_wizard.py
|
Python
|
agpl-3.0
| 7,032
| 0.002418
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.netsvc
class ifrs_report_wizard(osv.osv_memory):
""" Wizard que permite al usuario elegir que periodo quiere imprimir del año fiscal """
_name = 'ifrs.report.wizard'
_description = 'IFRS Report'
def onchange_company_id(self, cr, uid, ids, company_id, context=None):
context = context or {}
context['company_id'] = company_id
res = {'value': {}}
if not company_id:
return res
cur_id = self.pool.get('res.company').browse(
cr, uid, company_id, context=context).currency_id.id
fy_id = self.pool.get('account.fiscalyear').find(
cr, uid, context=context)
res['value'].update({'fiscalyear_id': fy_id})
res['value'].update({'currency_id': cur_id})
return res
_columns = {
'period': fields.many2one('account.period', 'Force period', help='Fiscal period to assign to the invoice. Keep empty to use the period of the current date.'),
'fiscalyear_id': fields.many2one('account.fiscalyear', 'Fiscal Year', help='Fiscal Year'),
'company_id': fields.many2one('res.company', string='Company', ondelete='cascade', required=True, help='Company name'),
'currency_id': fields.many2one('res.currency', 'Currency', help="Currency at which this report will be expressed. If not selected will be used the one set in the company"),
'exchange_date': fields.date('Exchange Date', help='Date of change that will be printed in the report, with respect to the currency of the company'),
'report_type': fields.selection([
('all', 'All Fiscalyear'),
('per', 'Force Period')],
string='Type', required=True, help='Indicates if the report it will be printed for the entire fiscal year, or for a particular period'),
'columns': fields.selection([
('ifrs', 'Two Columns'),
('webkitaccount.ifrs_12', 'Twelve Columns'),
#('ifrs_12_partner_detail', 'With Partner Detail')
],
string='Number of Columns',
help='Number of columns that will be printed in the report:'
" -Two Colums(02),-Twelve Columns(12)"),
'target_move': fields.selection([('posted', 'All Posted Entries'),
('all', 'All Entries'),
], 'Target Moves', help='Print All Accounting Entries or just Posted Accounting Entries'),
'report_format' : fields.selection([
('pdf', 'PDF'),
('spreadsheet', 'Spreadsheet')], 'Report Format')
}
_defaults = {
'report_type': 'all',
'target_move': 'posted',
'company_id': lambda self, cr, uid, c: self.pool.get('ifrs.ifrs').browse(cr, uid, c.get('active_id')).company_id.id,
'fiscalyear_id': lambda self, cr, uid, c: self.pool.get('ifrs.ifrs').browse(cr, uid, c.get('active_id')).fiscalyear_id.id,
'exchange_date': fields.date.today,
'columns': 'ifrs',
'report_format' : 'pdf'
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
|
context = {}
res = super(ifrs_report_wizard, self).default_get(
cr, uid, fields, context=context)
# res.update({'uid_country':
# self._get_country_code(cr,uid,context=context)})
return res
def _get_period(self, cr, uid, context={}):
""" Return the current period id """
account_period_obj = self.pool.get('account.period')
ids = account_perio
|
d_obj.find(
cr, uid, time.strftime('%Y-%m-%d'), context=context)
period_id = ids[0]
return period_id
def _get_fiscalyear(self, cr, uid, context={}, period_id=False):
""" Return fiscalyear id for the period_id given.
If period_id is nor given then return the current fiscalyear """
if period_id:
period_obj = self.pool.get(
'account.period').browse(cr, uid, period_id)
fiscalyear_id = period_obj.fiscalyear_id.id
else:
fiscalyear_obj = self.pool.get('account.fiscalyear')
ids = fiscalyear_obj.find(cr, uid, time.strftime(
'%Y-%m-%d'), context=context)
fiscalyear_id = ids
return fiscalyear_id
def print_report(self, cr, uid, ids, context={}):
datas = {'ids': context.get('active_ids', [])}
wizard_ifrs = self.browse(cr, uid, ids, context=context)[0]
datas['report_type'] = str(wizard_ifrs.report_type)
datas['company'] = wizard_ifrs.company_id.id
datas['columns'] = str(wizard_ifrs.columns)
datas['target_move'] = wizard_ifrs.target_move
datas['exchange_date'] = wizard_ifrs.exchange_date
datas['currency_wizard'] = wizard_ifrs.currency_id.id
datas['currency_wizard_name'] = wizard_ifrs.currency_id.name
if datas['report_type'] == 'all':
datas['fiscalyear'] = wizard_ifrs.fiscalyear_id.id or self._get_fiscalyear(
cr, uid, context=context)
datas['period'] = False
else:
datas['columns'] = 'ifrs'
datas['period'] = wizard_ifrs.period.id or self._get_period(
cr, uid, context=context)
datas['fiscalyear'] = self._get_fiscalyear(
cr, uid, context=context, period_id=datas['period'])
if str(wizard_ifrs.columns) == 'webkitaccount.ifrs_12' and wizard_ifrs.report_format == 'spreadsheet':
datas['columns'] = 'webkitaccount.ifrs_12_html'
if str(wizard_ifrs.columns) == 'ifrs' and wizard_ifrs.report_format == 'spreadsheet':
datas['columns'] = 'ifrs_report_html'
return {
'type': 'ir.actions.report.xml',
'report_name': datas['columns'],
'datas': datas
}
ifrs_report_wizard()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
emacsen/changemonger
|
features.py
|
Python
|
agpl-3.0
| 10,844
| 0.003689
|
## Changemonger: An OpenStreetMap change analyzer
## Copyright (C) 2012 Serge Wroclawski
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Contains functions related to Changemonger features for a yaml backend"""
import inflect
import yaml
import os.path
import imp
inflection = inflect.engine()
class BaseFeature:
"""The base feature class"""
def __init__(self, name):
"Init the object"
self.name = name
self.types = []
self.categories = []
self.named = True
self.id = unicode(id(self))
self._prominence = 0
@property
def prominence(self, ele):
"""How important a feature is"""
score = 0
tags = ele['tags']
if len(ele['tags']) > 0:
score += 1
if ( tags.get('name') or tags.get('brand') or tags.get('operator') ):
score += 2
if tags.get('historical') or tags.get('wikipedia'):
score += 3
return score + self._prominence
def _typecheck(self, ele):
"Check that the element matches this feature's type"
if self.types:
if ele['type'] in self.types:
return True
else:
return False
else:
return True
def category(self, cat):
"Add a category to this feature"
self.categories.append(cat)
def match(self, element):
"Generic function"
# Never use this directly
return True
@property
def plural(self):
"Returns the plural version of the feature's name"
return inflection.plural(self.name)
@property
def precision(self):
"Returns the precision of the object. This should be set"
return 0
class SimpleFeature(BaseFeature):
"""A simple feature (most objects in the yaml files are SimpleFeatures"""
def __init__(self, name):
"Init simple feature"
self.tags = []
BaseFeature.__init__(self, name)
def tag(self, tg):
"Add a tag to object's tags"
self.tags.append(tg)
def match(self, element):
"Matches for simple features uses tags"
if self._typecheck(element):
for tag in self.tags:
if not tag in element['_tags']:
return False
else:
return True
else:
return False
@property
def precision(self):
"Simple features have a precision of 10 + # of tags by default"
return 10 + len(self.tags)
class Category(BaseFeature):
"Feature categories"
def __init__(self, name):
"Init a category"
self.features = []
BaseFeature.__init__(self, name)
def register(self, feature):
"Register a feature to this category"
self.features.append(feature)
def match(self, element):
"The category checks all features for matches"
for feature in self.features:
if feature.match(element):
return True
return False
@property
def precision(self):
"Cat
|
egories are precision 3 by default"
return 3
def compare_precision(a, b):
"""Compare the precision of two features"""
return b.precision - a.precision
class FeatureDB:
"""This is the abstraction against using the features"""
def __init__(se
|
lf, directory = 'features'):
"""Initialize feature database, use the argument as the directory"""
self._simple = []
self._magic = []
# We almost never iterate through categories, but we do call
# them by name a lot
self._categories = {}
# The index contains unique IDs for features
self._index = {}
# Now load the actual features
if not os.path.isabs(directory):
directory = os.path.abspath(directory)
# We're going to just assume the directory exists for now
if os.path.exists(os.path.join(directory, 'features.yaml')):
self._load_yaml_simple_features(
os.path.join(directory, 'simple.yaml'))
elif os.path.isdir(os.path.join(directory, 'simple')):
self._load_simple_directory(os.path.join(directory, 'simple'))
if os.path.exists(os.path.join(directory, 'categories.yaml')):
self._load_yaml_categories(os.path.join(directory,
'categories.yaml'))
if os.path.exists(os.path.join(directory, 'magic.py')):
self._load_magic_file(directory)
@property
def all(self):
"""Return all objects in the database"""
return self._simple + self._categories.values() + self._magic
def _load_magic_file(self, directory):
"""Load a magic (plain python) features file"""
fp, pathname, description = imp.find_module('magic', [directory])
try:
module = imp.load_module('magic', fp, pathname, description)
features = module.magic()
for feature in features:
self._magic.append(feature)
self._index[feature.id] = feature
finally:
if fp:
fp.close()
def _load_simple_directory(self, dirname):
"""Load a directory of feature files"""
for subdir, dirs, files in os.walk(dirname):
for fname in files:
name, ext = os.path.splitext(fname)
if ext == '.yaml' and name[0] != '.':
self._load_yaml_simple_features(
os.path.join(dirname, fname))
def _get_or_make_category(self, name):
"""Either retrieve a category or create one as necessary"""
category = self._categories.get(name)
if not category:
category = Category(name)
self._categories[name] = category
self._index[category.id] = category
return category
def _yaml_item_to_feature(self, item):
"""Takes a yaml item and returns a Feature object"""
feature = SimpleFeature(item['name'])
# type
if item.has_key('types'):
if isinstance(item['types'], basestring):
feature.types = item['types'].split(',')
else:
feature.types = item['types']
# id (virtually unused)
if item.has_key('id'):
feature.id = unicode(item['id'])
# tags
if isinstance(item['tags'], basestring):
tags = item['tags'].split(',')
else:
tags = item['tags']
for tag in tags:
feature.tag(tag)
# plural
if item.has_key('plural'):
feature.plural = item['plural']
# precision
if item.has_key('precision'):
feature.precision = int(item['precision'])
# categories
if item.has_key('categories'):
if isinstance(item['categories'], basestring):
categories = item['categories'].split(',')
else:
categories = item['categories']
for cat_name in categories:
category = self._get_or_make_category(cat_name)
category.register(feature)
feature.category(category)
# Named?
if item.has_key('named'):
feature.named = item['named']
# Prominence
if item.has_key('promience'):
feature._promience = item['prominence']
return feature
def _load_yaml_categories
|
xaratustrah/pypiscope
|
zmq_listener.py
|
Python
|
gpl-3.0
| 1,215
| 0
|
"""
A client/server code for Raspberry Pi ADC input
Xaratustrah@GitHUB
2016
adapted from:
https://wiki.python.org/moin/PyQt/Writing%20a%20client%20for%20a%20zeromq%20service
"""
from PyQt5.QtCore import pyqtSignal, QThread
import zmq
class ZMQListener(QThread):
message = pyqtSignal(str)
err_msg = pyqtSignal(str)
def __init__(self, host, port, topic_filter):
QThread.__init__(self)
self.host = host
self.port = port
self.to
|
pic_filter = topic_filter
self.running = True
context = zmq.Context()
try:
self.sock = context.socket(zmq.SUB)
self.sock.connect("tcp://{}:{}".format(self.host, self.port))
self.sock.setsockopt_string(zmq.SUBSCRIBE, self.topic_filter)
except(ConnectionRefusedError):
self.err_msg.emit('Server not running. Aborting...')
except(EOFError, KeyboardInterrupt):
self.err_msg.emit('User input ca
|
ncelled. Aborting...')
def loop(self):
while self.running:
ba = self.sock.recv()
self.message.emit(ba.decode("utf-8"))
def __del__(self):
self.terminate()
self.quit()
self.wait()
|
GoogleCloudPlatform/analytics-componentized-patterns
|
retail/recommendation-system/bqml-scann/ann_grpc/match_pb2_grpc.py
|
Python
|
apache-2.0
| 4,364
| 0.008937
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import match_pb2 as match__pb2
class MatchServiceStub(object):
"""MatchService is a Google managed service for efficient vector similarity
search at scale.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Match = channel.unary_unary(
'/google.cloud.aiplatform.container.v1alpha1.MatchService/Match',
request_serializer=match__pb2.MatchRequest.SerializeToString,
response_deserializer=match__pb2.MatchResponse.FromString,
)
self.BatchMatch = channel.unary_unary(
'/google.cloud.aiplatform.container.v1alpha1.MatchService/BatchMatch',
request_serializer=match__pb2.BatchMatchRequest.SerializeToString,
response_deserializer=match__pb2.BatchMatchResponse.FromString,
)
class MatchServiceServicer(object):
"""MatchService is a Google managed service for efficient vector similarity
search at scale.
"""
def Match(self, request, context):
"""Returns the nearest neighbors for the query. If it is a sharded
deployment, calls the other shards and aggregates the responses.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def BatchMatch(self, request, context):
"""Returns the nearest neighbors for batch queries. If it is a sharded
deployment, calls the other shards and aggregates the responses.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MatchServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Match': grpc.unary_unary_rpc_method_handler(
servicer.Match,
request_deserializer=match__pb2.MatchRequest.FromString,
response_serializer=match__pb2.MatchResponse.SerializeToString,
),
'BatchMatch': grpc.unary_unary_rpc_method_handler(
servicer.BatchMatch,
request_deserializer=match__pb2.BatchMatchRequest.FromString,
response_serializer=match__pb2.BatchMatchResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.cloud.aiplatform.container.v1alpha1.MatchService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,
|
))
# This class is part of an EXPERIMENTAL API.
class MatchService(object):
"""MatchService is a Google managed
|
service for efficient vector similarity
search at scale.
"""
@staticmethod
def Match(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.cloud.aiplatform.container.v1alpha1.MatchService/Match',
match__pb2.MatchRequest.SerializeToString,
match__pb2.MatchResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def BatchMatch(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.cloud.aiplatform.container.v1alpha1.MatchService/BatchMatch',
match__pb2.BatchMatchRequest.SerializeToString,
match__pb2.BatchMatchResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
PanDAWMS/panda-server
|
pandaserver/test/lsst/lsstSubmit.py
|
Python
|
apache-2.0
| 5,693
| 0.006499
|
import sys
import time
import uuid
import pandaserver.userinterface.Client as Client
from pandaserver.taskbuffer.JobSpec import JobSpec
from pandaserver.taskbuffer.FileSpec import FileSpec
aSrvID = None
prodUserNameDefault = 'unknown-user'
prodUserName = None
prodUserNameDP = None
prodUserNamePipeline = None
site = 'ANALY_BNL-LSST'
PIPELINE_TASK = None
PIPELINE_PROCESSINSTANCE = None
PIPELINE_EXECUTIONNUMBER = None
PIPELINE_STREAM = None
lsstJobParams = ""
for idx,argv in enumerate(sys.argv):
if argv == '--site':
try:
site = sys.argv[idx + 1]
except Exception:
site = 'ANALY_BNL-LSST'
if argv == '-DP_USER':
try:
prodUserNameDP = sys.argv[idx + 1]
if len(lsstJobParams):
lsstJobParams += "|"
lsstJobParams += "%(key)s=%(value)s" % \
{'key': 'DP_USER', \
'value': str(prodUserNameDP)}
except Exception:
prodUserNameDP = None
if argv == '-PIPELINE_USER':
try:
prodUserNamePipeline = sys.argv[idx + 1]
if len(lsstJobParams):
lsstJobParams += "|"
lsstJobParams += "%(key)s=%(value)s" % \
{'key': 'PIPELINE_USER', \
'value': str(prodUserNamePipeline)}
except Exception:
prodUserNamePipeline = None
if argv == '-PIPELINE_TASK':
try:
PIPELINE_TASK = sys.argv[idx + 1]
if len(lsstJobParams):
lsstJobParams += "|"
lsstJobParams += "%(key)s=%(value)s" % \
{'key': 'PIPELINE_TASK', \
'value': str(PIPELINE_TASK)}
except Exception:
PIPELINE_TASK = None
if argv == '-PIPELINE_PROCESSINSTANCE':
try:
PIPELINE_PROCESSINSTANCE = int(sys.argv[idx + 1])
if len(lsstJobParams):
lsstJobParams += "|"
lsstJobParams += "%(key)s=%(value)s" % \
{'key': 'PIPELINE_PROCESSINSTANCE', \
'value': str(PIPELINE_PROCESSINSTANCE)}
except Exception:
PIPELINE_PROCESSINSTANCE = None
if argv == '-PIPELINE_EXECUTIONNUMBER':
try:
PIPELINE_EXECUTIONNUMBER = int(sys.argv[idx + 1])
if len(lsstJobParams):
lsstJobParams += "|"
lsstJobParams += "%(key)s=%(value)s" % \
{'key': 'PIPELINE_EXECUTIONNUMBER', \
'value': str(PIPELINE_EXECUTIONNUMBER)}
except Exception:
PIPELINE_EXECUTIONNUMBER = None
if argv == '-PIPELINE_STREAM':
try:
PIPELINE_STREAM = int(sys.argv[idx + 1])
if len(lsstJobParams):
lsstJobParams += "|"
lsstJobParams += "%(key)s=%(value)s" % \
{'key': 'PIPELINE_STREAM', \
'value': str(PIPELINE_STREAM)}
except Exception:
PIPELINE_STREAM = None
if argv == '-s':
aSrvID = sys.argv[idx+1]
sys.argv = sys.argv[:idx]
break
### DP_USER and PIPELINE_USER preference
if prodUserNameDP is not None:
prodUserName = prodUserNameDP
elif prodUserNamePipeline is not None:
prodUserName = prodUserNamePipeline
#site = sys.argv[1]
#site = 'ANALY_BNL-LSST' #orig
#site = 'BNL-LSST'
#site = 'SWT2_CPB-LSST'
#site = 'UTA_SWT2-LSST'
#site = 'ANALY_SWT2_CPB-LSST'
destName = None
if prodUserName is not None \
and PIPELINE_TASK is not None \
and PIPELINE_PROCESSINSTANCE is not None:
datasetName = 'panda.lsst.user.%(PIPELINE_PROCESSINSTANCE)s.%(PIPELINE_TASK)s.%(prodUserName)s' % \
{'prodUserName': str(prodUserName), \
'PIPELINE_TASK': str(PIPELINE_TASK), \
'PIPELINE_PROCESSINSTANCE': str(PIPELINE_PROCESSINSTANCE) \
}
else:
datasetName = 'panda.lsst.user.jschovan.%s' % str(uuid.uuid4())
if prodUserName is not None \
and PIPELINE_TASK is not None \
and PIPELINE_EXECUTIONNUMBER is not None \
and PIPELINE_STREAM is not None:
jobName = 'job.%(PIPELINE_PROCESSINSTANCE)s.%(PIPELINE_TASK)s.%(PIPELINE_EXECUTIONNUMBER)s.%(prodUserName)s.%(PIPELINE_STREAM)s' % \
{'prodUserName': str(prodUserName), \
'PIPELINE_TASK': str(PIPELIN
|
E_TASK), \
'PIPELINE_EXECUTIONNUMBER': str(PIPELINE_EXECUTIONNUMBER), \
'PIPELINE_STREAM': str(PIPELINE_STREAM), \
'PIPELINE_PROCESSINSTANCE': str(PIPELINE_PROCESSINSTANCE) \
}
else:
jobName = "%s" % str(uuid.uuid4())
if PIPELINE_STREAM is not None:
jobDefinitionID = PIPELINE_STREAM
else:
jobDefinitionID = int(time.time()) % 10000
job = JobSpec()
job.jobDefinitionI
|
D = jobDefinitionID
job.jobName = jobName
job.transformation = 'http://pandawms.org/pandawms-jobcache/lsst-trf.sh'
job.destinationDBlock = datasetName
job.destinationSE = 'local'
job.currentPriority = 1000
job.prodSourceLabel = 'panda'
job.jobParameters = ' --lsstJobParams="%s" ' % lsstJobParams
if prodUserName is not None:
job.prodUserName = prodUserName
else:
job.prodUserName = prodUserNameDefault
if PIPELINE_PROCESSINSTANCE is not None:
job.taskID = PIPELINE_PROCESSINSTANCE
if PIPELINE_EXECUTIONNUMBER is not None:
job.attemptNr = PIPELINE_EXECUTIONNUMBER
if PIPELINE_TASK is not None:
job.processingType = PIPELINE_TASK
job.computingSite = site
job.VO = "lsst"
fileOL = FileSpec()
fileOL.lfn = "%s.job.log.tgz" % job.jobName
fileOL.destinationDBlock = job.destinationDBlock
fileOL.destinationSE = job.destinationSE
fileOL.dataset = job.destinationDBlock
fileOL.type = 'log'
job.addFile(fileOL)
s,o = Client.submitJobs([job],srvID=aSrvID)
print(s)
for x in o:
print("PandaID=%s" % x[0])
|
globaltoken/globaltoken
|
test/functional/feature_bip9_softforks.py
|
Python
|
mit
| 12,919
| 0.004722
|
#!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP 9 soft forks.
Connect to a single node.
regtest lock-in with 108/144 block signalling
activation after a further 144 blocks
mine 2 block and save coinbases for later use
mine 141 blocks to transition from DEFINED to STARTED
mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
mine a further 143 blocks (LOCKED_IN)
test that enforcement has not triggered (which triggers ACTIVE)
test that enforcement has triggered
"""
from io import BytesIO
import shutil
import time
import itertools
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, network_thread_start
from test_framewor
|
k.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEG
|
ATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
class BIP9SoftForksTest(ComparisonTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-whitelist=127.0.0.1']]
self.setup_clean_chain = True
def run_test(self):
self.test = TestManager(self, self.options.tmpdir)
self.test.add_all_connections(self.nodes)
network_thread_start()
self.test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(rawtx))
tx.deserialize(f)
tx.nVersion = 2
return tx
def sign_transaction(self, node, tx):
signresult = node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def generate_blocks(self, number, version, test_blocks = []):
for i in range(number):
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = version
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
self.height += 1
return test_blocks
def get_bip9_status(self, key):
info = self.nodes[0].getblockchaininfo()
return info['bip9_softforks'][key]
def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature, bitno):
assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
assert_equal(self.get_bip9_status(bipName)['since'], 0)
# generate some coins for later
self.coinbase_blocks = self.nodes[0].generate(2)
self.height = 3 # height of the next block to build
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = int(time.time())
assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
assert_equal(self.get_bip9_status(bipName)['since'], 0)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert(bipName not in tmpl['vbavailable'])
assert_equal(tmpl['vbrequired'], 0)
assert_equal(tmpl['version'], 0x20000000)
# Test 1
# Advance from DEFINED to STARTED
test_blocks = self.generate_blocks(141, 4)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
assert_equal(self.get_bip9_status(bipName)['since'], 144)
assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0)
assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert_equal(tmpl['vbavailable'][bipName], bitno)
assert_equal(tmpl['vbrequired'], 0)
assert(tmpl['version'] & activated_version)
# Test 1-A
# check stats after max number of "signalling not" blocks such that LOCKED_IN still possible this period
test_blocks = self.generate_blocks(36, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(10, activated_version) # 0x20000001 (signalling ready)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 46)
assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 10)
assert_equal(self.get_bip9_status(bipName)['statistics']['possible'], True)
# Test 1-B
# check stats after one additional "signalling not" block -- LOCKED_IN no longer possible this period
test_blocks = self.generate_blocks(1, 4, test_blocks) # 0x00000004 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 47)
assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 10)
assert_equal(self.get_bip9_status(bipName)['statistics']['possible'], False)
# Test 1-C
# finish period with "ready" blocks, but soft fork will still fail to advance to LOCKED_IN
test_blocks = self.generate_blocks(97, activated_version) # 0x20000001 (signalling ready)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0)
assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0)
assert_equal(self.get_bip9_status(bipName)['statistics']['possible'], True)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
# Test 2
# Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
assert_equal(self.get_bip9_status(bipName)['since'], 144)
assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0)
assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0)
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert_equal(tmpl['vbavailable'][bipName], bitno)
assert_equal(tmpl['vbrequired'], 0)
assert(tmpl['version'] & activated_version)
# Test 3
# 108 out of 144 signal bit 1 to achieve LOCKED_IN
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(57, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
# check counting stats and "possible" flag before last block of this pe
|
GAngelov5/Sportvendor
|
sportvendor/sportvendor/users/forms.py
|
Python
|
gpl-2.0
| 1,164
| 0
|
from django.forms import ModelForm
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class UserLogin(ModelForm):
class Meta:
model = User
fields = ['username', 'password']
class UserRegister(UserCreationForm):
email = forms.EmailField(required=True)
first_name = forms.CharField(required=True)
last_name = forms.CharField(required=True)
class Meta:
model = User
fields = ['username']
def save(self, commit=True):
user = super(UserRegister, self).save(commit=False)
user.email = self.cleaned_data['email']
user.first_name = self.cleaned_data['first_name'],
user.last_name = self.cleaned_data
|
['last_name']
if commit:
user.save()
return user
class UserProfile(ModelForm):
class Meta:
model = User
fields = ['username', 'email', 'first_name', 'last_name']
def __init__(self, *args, **kwargs):
|
super(UserProfile, self).__init__(*args, **kwargs)
self.fields["username"].disabled = True
self.fields["email"].disabled = True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.