repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
xeor/test_files | binary/noise/_script/utf-8_0-ffff.py | Python | unlicense | 119 | 0.033613 | #!/ | usr/bin/env python
open('utf-8_0-ffff', 'w').write(''.join([ unichr(i) for i in xrange(65535) ]).encode('utf-8'));
| |
ging/horizon | openstack_dashboard/api/nova.py | Python | apache-2.0 | 29,745 | 0 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import logging
from django.conf import settings
from django.utils.functional import cached_property # noqa
from django.utils.translation import ugettext_lazy as _
from novaclient import exceptions as nova_exceptions
from novaclient.v1_1 import client as nova_client
from novaclient.v1_1.contrib import instance_action as nova_instance_action
from novaclient.v1_1.contrib import list_extensions as nova_list_extensions
from novaclient.v1_1 import security_group_rules as nova_rules
from novaclient.v1_1 import security_groups as nova_security_groups
from novaclient.v1_1 import servers as nova_servers
from horizon import conf
from horizon.utils import functions as utils
from horizon.utils.memoized import memoized # noqa
from openstack_dashboard.api import base
from openstack_dashboard.api import network_base
LOG = logging.getLogger(__name__)
# API static values
INSTANCE_ACTIVE_STATE = 'ACTIVE'
VOLUME_STATE_AVAILABLE = "available"
DEFAULT_QUOTA_NAME = 'default'
class VNCConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_vnc_console method.
"""
_attrs = ['url', 'type']
class SPICEConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_spice_console method.
"""
_attrs = ['url', 'type']
class RDPConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_rdp_console method.
"""
_attrs = ['url', 'type']
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'OS-EXT-STS:power_state',
'OS-EXT-STS:task_state', 'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:host', 'OS-EXT-AZ:availability_zone',
'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
import glanceclient.exc as glance_exceptions # noqa
from openstack_dashboard.api import glance # noqa
if not self.image:
return "-"
if hasattr(self.image, 'name'):
return self.image.name
if 'name' in self.image:
return self.image['name']
else:
try:
image = glance.image_get(self.request, self.image['id'])
return image.name
except glance_exceptions.ClientException:
return "-"
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
class Hypervisor(base.APIDictWrapper):
"""Simple wrapper around novaclient.hypervisors.Hypervisor."""
_attrs = ['manager', '_loaded', '_info', 'hypervisor_hostname', 'id',
'servers']
@property
def servers(self):
# if hypervisor doesn't have servers, the attribute is not present
servers = []
try:
servers = self._apidict.servers
except Exception:
pass
return servers
class NovaUsage(base.APIResourceWrapper):
"""Simple wrapper around contrib/simple_usage.py."""
_attrs = ['start', 'serv | er_usages', 'stop', 'tenant_id',
'total_local_gb_usage', 'total_memory_mb_usage',
'total_vcpus_usage', 'total_hours']
def get_summary(self):
return {'instances': self.total_active_instances,
'memory_mb': self.memory_mb,
'vcpus': getattr(self, "total_vcpus_usage", 0),
'vcpu_hours': self.vcpu_hours,
'local_gb': self.local_gb,
'disk_gb_hours': self.disk_gb_hours}
@property
def tot | al_active_instances(self):
return sum(1 for s in self.server_usages if s['ended_at'] is None)
@property
def vcpus(self):
return sum(s['vcpus'] for s in self.server_usages
if s['ended_at'] is None)
@property
def vcpu_hours(self):
return getattr(self, "total_hours", 0)
@property
def local_gb(self):
return sum(s['local_gb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def memory_mb(self):
return sum(s['memory_mb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def disk_gb_hours(self):
return getattr(self, "total_local_gb_usage", 0)
class SecurityGroup(base.APIResourceWrapper):
"""Wrapper around novaclient.security_groups.SecurityGroup.
Wraps its rules in SecurityGroupRule objects and allows access to them.
"""
_attrs = ['id', 'name', 'description', 'tenant_id']
@cached_property
def rules(self):
"""Wraps transmitted rule info in the novaclient rule class."""
manager = nova_rules.SecurityGroupRuleManager(None)
rule_objs = [nova_rules.SecurityGroupRule(manager, rule)
for rule in self._apiresource.rules]
return [SecurityGroupRule(rule) for rule in rule_objs]
class SecurityGroupRule(base.APIResourceWrapper):
"""Wrapper for individual rules in a SecurityGroup."""
_attrs = ['id', 'ip_protocol', 'from_port', 'to_port', 'ip_range', 'group']
def __unicode__(self):
if 'name' in self.group:
vals = {'from': self.from_port,
'to': self.to_port,
'group': self.group['name']}
return _('ALLOW %(from)s:%(to)s from %(group)s') % vals
else:
vals = {'from': self.from_port,
'to': self.to_port,
'cidr': self.ip_range['cidr']}
return _('ALLOW %(from)s:%(to)s from %(cidr)s') % vals
# The following attributes are defined to keep compatibility with Neutron
@property
def ethertype(self):
return None
@property
def direction(self):
return 'ingress'
class SecurityGroupManager(network_base.SecurityGroupManager):
backend = 'nova'
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list(self):
return [SecurityGroup(g) for g
in self.client.security_groups.list()]
def get(self, sg_id):
return SecurityGroup(self.client.security_groups.get(sg_id))
def create(self, name, desc):
return SecurityGroup(self.client.security_groups.create(name, desc))
def update(self, sg_id, name, desc):
return SecurityGroup(self.client.security_groups.update(sg_id,
name, desc))
def delete(self, security_group_id):
self.client.security_g |
RaD/django-tinymce | tinymce/widgets.py | Python | mit | 4,724 | 0.00127 | # Copyright (c) 2008 Joost Cassee
# Licensed under the terms of the MIT License (see LICENSE.txt)
"""
This TinyMCE widget was copied and extended from this code by John D'Agostino:
http://code.djangoproject.com/wiki/CustomWidgetsTinyMCE
"""
from django import forms
from django.conf import settings
from django.contrib.admin import widgets as admin_widgets
from django.core.urlresolvers import reverse
from django.forms.widgets import flatatt
from django.utils.encoding import smart_unicode
from django.utils.html import escape
from django.utils import simplejson
from django.utils.datastructures import SortedDict
from django.utils.safestring import mark_safe
from django.utils.translation import get_language, ugettext as _
import tinymce.settings
class TinyMCE(forms.Textarea):
"""
TinyMCE widget. Set settings.TINYMCE_JS_URL to set the location of the
javascript file. Default is "MEDIA_URL + 'js/tiny_mce/tiny_mce.js'".
You can customize the configuration with the mce_attrs argument to the
constructor.
In addition to the standard configuration you can set the
'content_language' parameter. It takes the value of the 'language'
parameter by default.
In addition to the default settings from settings.TINYMCE_DEFAULT_CONFIG,
this widget sets the 'language', 'directionality' and
'spellchecker_languages' parameters by default. The first is derived from
the current Django language, the others from the 'content_language'
parameter.
"""
def __init__(self, content_language=None, attrs=None, mce_attrs={}):
super(TinyMCE, self).__init__(attrs)
self.mce_attrs = mce_attrs
if content_language is None:
content_language = mce_attrs.get('language', None)
self.content_language = content_language
def render(self, name, value, attrs=None):
if value is None: value = ''
value = smart_unicode(value)
final_attrs = self.build_attrs(attrs)
final_attrs['name'] = name
assert 'id' in final_attrs, "TinyMCE widget attributes must contain 'id'"
mce_config = tinymce.settings.DEFAULT_CONFIG.copy()
mce_config.update(get_language_config(self.content_language))
if tinymce.settings.USE_FILEBROWSER:
mce_config['file_browser_callback'] = "djangoFileBrowser"
mce_config.update(self.mce_attrs)
mce_config['mode'] = 'exact'
mce_config['elements'] = final_attrs['id']
mce_config['strict_loading_mode'] = 1
mce_json = simplejson.dumps(mce_config)
html = [u'<textarea%s>%s</textarea>' % (flatatt(final_attrs), escape(value))]
if tinymce.settings.USE_COMPRESSOR:
compressor_config = {
'plugins': mce_config.get('plugins', ''),
'themes': mce_c | onfig.get('theme', 'advanced'),
'languages': mce_config.get('language', ''),
'diskcache': True,
'debug': False,
}
compressor_json = simplejson.dumps(compressor_config)
html.append(u'<script type="text/javascript">tinyMCE_GZ.init(%s)</script>' % compressor_json)
html.append(u'<script type="text/javascript">tinyMCE.init(%s)</script>' % mce | _json)
return mark_safe(u'\n'.join(html))
def _media(self):
if tinymce.settings.USE_COMPRESSOR:
js = [reverse('tinymce-compressor')]
else:
js = [tinymce.settings.JS_URL]
if tinymce.settings.USE_FILEBROWSER:
js.append(reverse('tinymce-filebrowser'))
return forms.Media(js=js)
media = property(_media)
class AdminTinyMCE(admin_widgets.AdminTextareaWidget, TinyMCE):
pass
def get_language_config(content_language=None):
language = get_language()[:2]
if content_language:
content_language = content_language[:2]
else:
content_language = language
config = {}
config['language'] = language
lang_names = SortedDict()
for lang, name in settings.LANGUAGES:
if lang[:2] not in lang_names: lang_names[lang[:2]] = []
lang_names[lang[:2]].append(_(name))
sp_langs = []
for lang, names in lang_names.items():
if lang == content_language:
default = '+'
else:
default = ''
sp_langs.append(u'%s%s=%s' % (default, ' / '.join(names), lang))
config['spellchecker_languages'] = ','.join(sp_langs)
if content_language in settings.LANGUAGES_BIDI:
config['directionality'] = 'rtl'
else:
config['directionality'] = 'ltr'
if tinymce.settings.USE_SPELLCHECKER:
config['spellchecker_rpc_url'] = reverse('tinymce.views.spell_check')
return config
|
HuygensING/bioport-buildout | plone-buildout/bootstrap.py | Python | gpl-3.0 | 3,857 | 0.005445 | ##############################################################################
#
# Copyright (c) 2006 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
$Id: bootstrap.py 108946 2010-02-12 02:40:18Z yusei $
"""
import os, shutil, sys, tempfile, urllib2
from optparse import OptionParser
tmpeggs = tempfile.mkdtemp()
is_jython = sys.platform.startswith('java')
# parsing arguments
parser = OptionParser()
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("-d", "--distribute",
action="store_true", dest="distribute", default=False,
help="Use Distribute rather than Setuptools.")
parser.add_option("-c", None, action="store", dest="config_file",
help=("Specify the path to the buildout configuration "
"file to be used."))
options, args = parser.parse_args()
# if -c was provided, we push it back into args for buildout' main function
if options.config_file is not None:
args += ['-c', options.config_file]
if options.version is not None:
| VERSION = '==%s' % options.version
else:
VERSION = ''
USE_DISTRIBUTE = options.distribute
args = args + ['bootstrap']
to_reload = False
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
to_reload = True
raise ImportError
except ImportError:
ez = {}
i | f USE_DISTRIBUTE:
exec urllib2.urlopen('http://python-distribute.org/distribute_setup.py'
).read() in ez
ez['use_setuptools'](to_dir=tmpeggs, download_delay=0, no_fake=True)
else:
exec urllib2.urlopen('http://peak.telecommunity.com/dist/ez_setup.py'
).read() in ez
ez['use_setuptools'](to_dir=tmpeggs, download_delay=0)
if to_reload:
reload(pkg_resources)
else:
import pkg_resources
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
def quote (c):
return c
cmd = 'from setuptools.command.easy_install import main; main()'
ws = pkg_resources.working_set
if USE_DISTRIBUTE:
requirement = 'distribute'
else:
requirement = 'setuptools'
if is_jython:
import subprocess
assert subprocess.Popen([sys.executable] + ['-c', quote(cmd), '-mqNxd',
quote(tmpeggs), 'zc.buildout' + VERSION],
env=dict(os.environ,
PYTHONPATH=
ws.find(pkg_resources.Requirement.parse(requirement)).location
),
).wait() == 0
else:
assert os.spawnle(
os.P_WAIT, sys.executable, quote (sys.executable),
'-c', quote (cmd), '-mqNxd', quote (tmpeggs), 'zc.buildout' + VERSION,
dict(os.environ,
PYTHONPATH=
ws.find(pkg_resources.Requirement.parse(requirement)).location
),
) == 0
ws.add_entry(tmpeggs)
ws.require('zc.buildout' + VERSION)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
shutil.rmtree(tmpeggs)
|
Zlash65/erpnext | erpnext/selling/page/point_of_sale/point_of_sale.py | Python | gpl-3.0 | 4,834 | 0.03041 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.utils.nestedset import get_root_of
from frappe.utils import cint
from erpnext.accounts.doctype.pos_profile.pos_profile import get_item_groups
from six import string_types
@frappe.whitelist()
def get_items(start, page_length, price_list, item_group, search_value="", pos_profile=None):
data = dict()
warehouse = ""
display_items_in_stock = 0
if pos_profile:
warehouse, display_items_in_stock = frappe.db.get_value('POS Profile', pos_profile, ['warehouse', 'display_items_in_stock'])
if not frappe.db.exists('Item Group', item_group):
item_group = get_root_of('Item Group')
if search_value:
data = search_serial_or_batch_or_barcode_number(search_value)
item_code = data.get("item_code") if data.get("item_code") else search_value
serial_no = data.get("serial_no") if data.get("serial_no") else ""
batch_no = data.get("batch_no") if data.get("batch_no") else ""
barcode = data.get("barcode") if data.get("barcode") else ""
condition = get_conditions(item_code, serial_no, batch_no, barcode)
if pos_profile:
condition += get_item_group_condition(pos_profile)
lft, rgt = frappe.db.get_value('Item Group', item_group, ['lft', 'rgt'])
# locate function is used to sort by closest match from the beginning of the value
result = []
items_data = frappe.db.sql(""" SELECT name as item_code,
item_name, image as item_image, idx as idx,is_stock_item
FROM
`tabItem`
WHERE
disabled = 0 and has_variants = 0 and is_sales_item = 1
and item_group in (select name from `tabItem Group` where lft >= {lft} and rgt <= {rgt})
and {condition} order by idx desc limit {start}, {page_length}"""
.format(
start=start, page_length=page_length,
lft=lft, rgt=rgt,
condition=condition
), as_dict=1)
if items_data:
items = [d.item_code for d in items_data]
item_prices_data = frappe.get_all("Item Price",
fields = ["item_code", "price_list_rate", "currency"],
filters = {'price_list': price_list, 'item_code': ['in', items]})
item_prices, bin_data = {}, {}
for d in item_prices_data:
item_prices[d.item_code] = d
if display_items_in_stock:
filters = {'actual_qty': [">", 0], 'item_code': ['in', items]}
if warehouse:
filters['warehouse'] = warehouse
bin_data = frappe._dict(
frappe.get_all("Bin", fields = ["item_code", "sum(actual_qty) as actual_qty"],
filters = filters, group_by = "item_code")
)
for item in items_data:
row = {}
row.update(item)
item_price = item_prices.get(item.item_code) or {}
row.update({
'price_list_rate': item_price.get('price_list_rate'),
'currency': item_price.get('currency'),
'actual_qty': bin_data.get('actual_qty')
})
result.append(row)
res = {
'items': result
}
if serial_no:
res.update({
'serial_no': serial_no
})
if batch_no:
res.update({
'batch_no': batch_no
})
if barcode:
res.update({
'barcode': barcode
})
return res
@frappe.whitelist()
def search_serial_or_batch_or_barcode_number(search_value):
# search barcode no
barcode_data = frappe.db.get_value('Item Barcode', {'barcode': search_value}, ['barcode', 'parent as item_code'], as_dict=True)
if barcode_data:
return barcode_data
# search serial no
serial_no_data = frappe.db.get_value('Serial No', search_value, ['name as serial_no', 'item_code'], as_dict=True)
if serial_no_data:
return serial_no_data
# search batch no
batch_no_data = frappe.db.get_value('Batch', search_value, ['name as batch_no', 'item as item_code'], as_dict=True)
if batch_no_data:
return batch_no_data
return {}
def get_conditions(item_code, serial_no, batch_no, barcode):
if serial_no or batch_no or barcode:
return "name = {0}".format(frappe.db.escape(item_code))
return """(name like {item_code}
or item_name like {item_code})""".format(item_code = frappe.db.escape('%' + item_code + '%'))
def get_item_group_condition(pos_profile):
cond = "and 1=1"
item_groups = get_item_groups(pos_profile)
if item_groups:
cond = "and item_group in (%s)"%(', '.join(['%s']*len(item_groups)))
return cond % tuple(item_groups)
def item_group_query(doctype, txt, searchfie | ld, start, page_len, filters):
item_groups = []
cond = "1=1"
pos_profile= filters.get('pos_profile')
if pos_profile:
item_groups = get_item_groups(pos_profile)
if item_groups:
cond = "name in (%s)"%(', '.join(['%s']*len(item_groups)))
cond = cond % tuple(item_groups)
return frappe.db.sql(""" select distinct name from `tabItem Group`
where {condition} a | nd (name like %(txt)s) limit {start}, {page_len}"""
.format(condition = cond, start=start, page_len= page_len),
{'txt': '%%%s%%' % txt})
|
tbentropy/tilecutter | old/v.0.5/tc.py | Python | bsd-3-clause | 5,573 | 0.007177 | # TC Backend functions
# Hack to make PIL work with py2exe
import Image
import PngImagePlugin
import JpegImagePlugin
import GifImagePlugin
import BmpImagePlugin
Image._initialized=2
import wx
##import wx.lib.masked as masked
##import wx.lib.scrolledpanel as scrolled
##import wx.lib.hyperlink as hl
import sys, os
import pickle, copy
def Export(self, export_dat=1, export_png=1):
"""Exports the cut png image and dat file"""
output_png = "test-output.png"
output_dat = "test-output.dat"
dat_to_png = "test-output"
# Firstly find the path from dat file to png
# Check that both of these are filled out, if png only then
# don't export the dat file and throw Warning
# If dat only then export the dat only, and throw Warning
# If neither, than stop with Error
p = self.active.info.paksize
x_dims = self.active.info.xdims
y_dims = self.active.info.ydims
z_dims = self.active.info.zdims
view_dims = self.active.info.views
winter_dims = self.active.info.winter
front_dims = self.active.info.frontimage
frame_dims = len(self.active.frame)
unit = (xdims,ydims*zdims)
width = view_dims * (unit[0] + unit[0]*winter_dims)
height = frame_dims * (unit[1] + unit[1]*front_dims)
# Create the wxImage and PILImage for the output
img = Image.new("RGBA", (width*p,height*p), color=(231,255,255,0))
if winter_dims == 0:
if front_dims == 0:
ii = [0]
else:
ii = [0,2]
else:
if front_dims == 0:
ii = [0,1]
else:
ii = [0,1,2,3]
for f in range(len(self.active.frame)):
for d in range(self.active.info.views):
for i in ii:
# Make a temp image to copy from
im = self.active.frame[f].direction[d].image[i].image
# If offset is negative...
if self.active.frame[f].direction[d] in [0,2]:
# Normal dimensions
xx = len(self.active.info.xdims)
yy = len(self.active.info.ydims)
else:
# Reverse dimensions
xx = len(self.activ | e.info.ydims)
yy = len(self.active. | info.xdims)
zz = self.active.info.zdims
w = (xx + yy) * (p/2)
h = ((xx + yy) * (p/4)) + (p/2) + ((zz - 1) * p)
offset_x = self.active.frame[f].direction[d].image[i].offset_x
offset_y = self.active.frame[f].direction[d].image[i].offset_y
abs_off_x = abs(offset_x)
abs_off_y = abs(offset_y)
if offset_x < 0:
# Image must be moved...
image_offset_x = abs_off_x
else:
image_offset_x = 0
if offset_y < 0:
image_offset_y = abs_off_y
else:
image_offset_y = 0
# Now create a copy of the input image to us...
tempimg = Image.new("RGB", (max([w,im.size[0]])+abs_offx, max([h,im.size[1]])+abs_offy), color=(231,255,255,0))
# And paste this image into it at the right spot
# Paste the base image into the output
tempimg.paste(im,(image_offset_x,image_offset_y))
# Now copy from and mask each bit of the image
for z in range(zz):
for x in range(xx):
for y in range(yy):
# Complex thing to work out where to paste this particular square
if winter_dims == 0:
xpos = (d * unit[0]) + x
else:
# Winter image also
if i in [0,2]:
# If no winter image
xpos = (d * unit[0] * 2) + x
else:
# If winter image
xpos = (d * unit[0] * 2) + unit[0] + x
if front_dims == 0:
ypos = (f * unit[1]) + y
else:
# Front image also
if i in [0,1]:
# If no front image
ypos = (f * unit[1] * 2) + y
else:
# If front image
ypos = (f * unit[1] * 2) + unit[1] + y
img.paste(tempim,(xpos,ypos,xpos+p,ypos+p))
# Masking routine goes here...
img.save("test.png")
# Make image to take outputs from
# If exporting png:
# Frames are primary vertical, then direction horizontally,
# followed by front/back vertically and summer/winter horizontally
# Then the individual cut images
# Even if not exporting png:
# For each one paste into a temporary proto-dat file the image
# array information
# If exporting dat:
# Write out all the necessary file data
def ExportSmoke(self):
"""Exports a smoke object"""
def ExportCursor(self):
"""Exports the cursor/icon for a building"""
|
cbuntain/ncaa_football_predictor | Grabber/StatsClass.py | Python | bsd-2-clause | 7,312 | 0.00547 | #!/usr/bin/python
import StringIO
import urllib
import urllib2
from lxml import etree
class NcaaGrabber:
def __init__(self):
self.ncaaUrl = 'http://web1.ncaa.org'
self.ncaaStatsSite = self.ncaaUrl+'/football/exec/rankingSummary'
# self.ncaaTeamList2008 = self.ncaaUrl+'/mfb/%d/Internet/ranking_summary/DIVISIONB.HTML'
# self.ncaaWeeklyBase2008 = self.ncaaUrl+'/mfb/%d/Internet/worksheets'
# self.ncaaWeekly2008 = self.ncaaWeeklyBase2008+'/DIVISIONB.HTML'
self.ncaaTeamListBase = self.ncaaUrl+'/mfb/%d/Internet/ranking_summary'
self.ncaaWeeklyBase = self.ncaaUrl+'/mfb/%d/Internet/worksheets'
self.fbsDiv = '/DIVISIONB.HTML'
self.fcsDiv = '/DIVISIONC.HTML'
def getTeams(self, division, year):
fullUrl = self.ncaaTeamListBase % year
if ( division == 'fbs' ):
fullUrl = fullUrl + self.fbsDiv
else:
fullUrl = fullUrl + self.fcsDiv
response = urllib2.urlopen(fullUrl)
responseHtml = response.read()
htmlParser = etree.HTMLParser()
htmlTree = etree.parse(StringIO.StringIO(responseHtml), htmlParser)
mainTablePaths = htmlTree.xpath('//body/table')
linkPaths = mainTablePaths[0].xpath('.//td/a')
data = {}
for link in linkPaths:
team = link.text
org = -1
linkStr = link.get('href')
linkStrArr = linkStr.split('&')
for linkStrPart in linkStrArr:
if ( linkStrPart.startswith('org=') ):
linkStrPart = linkStrPart.replace('org=', '')
if ( linkStrPart.isdigit() ):
org = linkStrPart
data[team] = org
return data
# def getTeams(self, year):
# data = {}
# data['year'] = year
# data['org'] = 8
# data['week'] = 1
# getData = urllib.urlencode(data)
# fullUrl = self.ncaaStatsSite + '?' + getData
# response = urllib2.urlopen(fullUrl)
# responseHtml = response.read()
# htmlParser = etree.HTMLParser()
# htmlTree = etree.parse(StringIO.StringIO(responseHtml), htmlParser)
# optionRows = htmlTree.xpath('/html/body/span[@class="noprint"]/select[@name="teamSelection"]/option')
# teams = {}
# for teamOption in optionRows:
# teamName = teamOption.text
# teamValue = int(teamOption.get("value"))
# if ( teamValue > -1 ):
# teams[teamName] = teamValue
# return teams
def getStats(self, team, year, week):
data = {}
data['org'] = team
data['week'] = week
data['year'] = year
getData = urllib.urlencode(data)
fullUrl = self.ncaaStatsSite + '?' + getData
response = urllib2.urlopen(fullUrl)
responseHtml = response.read()
htmlParser = etree.HTMLParser()
htmlTree = etree.parse(StringIO.StringIO(responseHtml), htmlParser)
teamTableRows = htmlTree.xpath('//table[@id="teamRankings"]/tr[position()>4]')
stats = {}
for statRow in teamTableRows:
dataCells = statRow.xpath('./td')
if ( len(dataCells) < 1 ):
continue
category = dataCells[0].xpath('./a')[0].text
value = dataCells[2].text
rank = dataCells[1].text.lstrip('T-')
stats[category] = (value, rank)
return stats
def isHomeGame(self, team, year, week):
data = {}
data['org'] = team
data['week'] = week
data['year'] = year
getData = urllib.urlencode(data)
fullUrl = self.ncaaStatsSite + '?' + getData
print(fullUrl)
response = urllib2.urlopen(fullUrl)
responseHtml = response.read()
htmlParser = etree.HTMLParser()
htmlTree = etree.parse(StringIO.StringIO(responseHtml), htmlParser)
scheduleTableRows = htmlTree.xpath('//table[@id="schedule"]/tr/td[position()=1]/a/../../td[position()=2]')
| lastScheduleRow = scheduleTableRows[-1]
isHome = False
if ( lastScheduleRow is not None ):
if ( lastScheduleRow.text is None ):
linkElement = lastScheduleRow.xpath('./a')[0]
gameLocation = linkElement.text
if ( gameLocation.isupper() and gameLocation.find("@") < 0 ):
if ( gameLocation.find("^") < 0 ):
isHome = 1
else:
| isHome = 2
else:
isHome = 0
else:
gameLocation = lastScheduleRow.text
if ( gameLocation.isupper() and gameLocation.find("@") < 0 ):
if ( gameLocation.find("^") < 0 ):
isHome = 1
else:
isHome = 2
else:
isHome = 0
return isHome
def getNumWeeks(self, division, year):
fullUrl = self.ncaaWeeklyBase % year
if ( division == 'fbs' ):
fullUrl = fullUrl + self.fbsDiv
else:
fullUrl = fullUrl + self.fcsDiv
response = urllib2.urlopen(fullUrl)
responseHtml = response.read()
htmlParser = etree.HTMLParser()
htmlTree = etree.parse(StringIO.StringIO(responseHtml), htmlParser)
tableRowArr = htmlTree.xpath('//body/table/tr')
count = len(tableRowArr) - 1
return count
def processWeekly(self, year, week, teams):
return self.processWeekly("fbs", year, week, team)
def processWeekly(self, division, year, week, teams):
schedule = []
week = week - 1
fullUrl = self.ncaaWeeklyBase % year
if ( division == 'fbs' ):
fullUrl = fullUrl + self.fbsDiv
else:
fullUrl = fullUrl + self.fcsDiv
response = urllib2.urlopen(fullUrl)
responseHtml = response.read()
htmlParser = etree.HTMLParser()
htmlTree = etree.parse(StringIO.StringIO(responseHtml), htmlParser)
tableRowArr = htmlTree.xpath('//body/table/tr')
weekRow = tableRowArr[week+1]
weekLinkCol = weekRow.find('td')
weekLink = weekLinkCol.find('a')
weekUrl = (self.ncaaWeeklyBase + '/' + weekLink.values()[0]) % year
response = urllib2.urlopen(weekUrl)
responseHtml = response.read()
htmlTree = etree.parse(StringIO.StringIO(responseHtml), htmlParser)
trList = htmlTree.xpath('//body/table[@width="80%"]/tr')
for tr in trList[1:]:
tds = tr.findall('td')
if(len(tds) > 2):
team1 = tds[0].find('a').text
team2 = tds[1].text
result = ""
if ( len(tds) > 3 ):
result = tds[3].text
if ( team1 not in teams ):
continue
org1 = teams[team1]
org2 = None
if ( team2 in teams ):
org2 = teams[team2]
schedule.append((org1, org2, result))
return schedule
|
khchine5/book | docs/dev/newbies/4.py | Python | bsd-2-clause | 280 | 0.017857 | # generator functions
def f1():
l = []
for i in range(1000):
l.append(i)
retur | n l
def f2():
for i in range(1000):
yield i # yield ulatama
# for i in f1():
# print i
# # to itera | te =
# for i in f2():
# print i
print f1()
print f2()
|
yajiedesign/mxnet | example/automatic-mixed-precision/amp_model_conversion.py | Python | apache-2.0 | 8,765 | 0.002738 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import logging
import argparse
import mxnet as mx
from common import modelzoo
import gluoncv
from gluoncv.model_zoo import get_model
from mxnet import amp
import numpy as np
def save_symbol(fname, sym, logger=None):
if logger is not None:
logger.info('Saving symbol into file at {}'.format(fname))
sym.save(fname, remove_amp_cast=False)
def save_params(fname, arg_params, aux_params, logger=None):
if logger is not None:
logger.info('Saving params into file at {}'.format(fname))
save_dict = {('arg:%s' % k): v.as_in_context(mx.cpu()) for k, v in arg_params.items()}
save_dict.update({('aux:%s' % k): v.as_in_context(mx.cpu()) for k, v in aux_params.items()})
mx.nd.save(fname, save_dict)
if __name__ == '__main__':
# Faster RCNN and Mask RCNN commented because of model loading issues
# https://github.com/dmlc/gluon-cv/issues/1034
gluon_models = [#'faster_rcnn_fpn_resnet50_v1b_coco',
'mobilenetv2_0.75',
'cifar_resnet56_v1',
'mobilenet0.25',
'mobilenet1.0',
#'mask_rcnn_fpn_resnet50_v1b_coco',
'simple_pose_resnet152_v1b',
'ssd_512_resnet50_v1_voc',
#'faster_rcnn_resnet50_v1b_voc',
'cifar_resnet20_v1',
'yolo3_darknet53_voc',
'resnet101_v1c',
'simple_pose_resnet18_v1b',
#'mask_rcnn_resnet50_v1b_coco',
'ssd_512_mobilenet1.0_coco',
'vgg19_bn',
#'faster_rcnn_resnet50_v1b_coco',
'cifar_resnet110_v1',
'yolo3_mobilenet1.0_voc',
'cifar_resnext29_16x64d',
'resnet34_v1',
'densenet121',
#'mask_rcnn_fpn_resnet101_v1d_coco',
'vgg13_bn',
'vgg19',
'resnet152_v1d',
'resnet152_v1s',
'densenet201',
'alexnet',
'se_resnext50_32x4d',
'resnet50_v1d_0.86',
'resnet18_v1b_0.89',
'yolo3_darknet53_coco',
'resnet152_v1',
'resnext101_64x4d',
'vgg13',
'resnet101_v1d_0.76',
'simple_pose_resnet50_v1d',
'senet_154',
'resnet50_v1',
'se_resnext101_32x4d',
'fcn_resnet101_voc',
'resnet152_v2',
#'mask_rcnn_resnet101_v1d_coco',
'squeezenet1.1',
'mobilenet0.5',
'resnet34_v2',
'resnet18_v1',
'resnet152_v1b',
'resnet101_v2',
'cifar_resnet56_v2',
'ssd_512_resnet101_v2_voc',
'resnet50_v1d_0.37',
'mobilenetv2_0.5',
#'faster_rcnn_fpn_bn_resnet50_v1b_coco',
'resnet50_v1c',
'densenet161',
'simple_pose_resnet50_v1b',
'resnet18_v1b',
'darknet53',
'fcn_resnet50_ade',
'cifar_wideresnet28_10',
'simple_pose_resnet101_v1d',
'vgg16',
'ssd_512_resnet50_v1_coco',
'resnet101_v1d_0.73',
'squeezenet1.0',
'resnet50_v1b',
#'faster_rcnn_resnet101_v1d_coco',
'ssd_512_mobilenet1.0_voc',
'cifar_wideresnet40_8',
'cifar_wideresnet16_10',
'cifar_resnet110_v2',
'resnet101_v1s',
'mobilenetv2_0.25',
'resnet152_v1c',
'se_resnext101_64x4d',
#'faster_rcnn_fpn_resnet101_v1d_coco',
'resnet50_v1d',
'densenet169',
'resnet34_v1b',
'resnext50_32x4d',
'resnet101_v1',
'resnet101_v1b',
'resnet50_v1s',
'mobilenet0.75',
'cifar_resnet20_v2',
'resnet101_v1d',
'vgg11_bn',
'resnet18_v2',
'vgg11',
'simple_pose_resnet101_v1b',
'resnext101_32x4d',
'resnet50_v2',
'vgg16_bn',
'mobilenetv2_1.0',
'resnet50_v1d_0.48',
'resnet50_v1d_0.11',
'fcn_resnet101_ade',
'simple_pose_resnet152_v1d',
'yolo3_mobilenet1.0_coco',
'fcn_resnet101_coco']
# TODO(anisub): add support for other models from gluoncv
# Not supported today mostly because of broken net.forward calls
segmentation_models = ['deeplab_resnet50_ade',
'psp_resnet101_voc',
'deeplab_resnet152_voc',
'deeplab_resnet101_ade',
'deeplab_resnet152_coco',
'psp_resnet101_ade',
'deeplab_resnet101_coco',
'psp_resnet101_citys',
'psp_resnet50_ade',
'psp_resnet101_coco',
'deeplab_resnet101_voc']
calib_ssd_models = ["ssd_512_vgg16_atrous_voc",
"ssd_300_vgg16_atrous_voc",
"ssd_300_vgg16_atrous_coco"]
calib_inception_models = ["inceptionv3"]
gluon_models = gluon_models + segmentation_models + \
calib_ssd_models + calib_inception_models
models = gluon_models
parser = argparse.ArgumentParser(description='Convert a provided FP32 model to a mixed precision model')
parser.add_argument('--model', type=str, choices=models)
parser.add_argument('--run-dummy-inference', action='store_true', default=False,
help='Will generate random input of shape (1, 3, | 224, 224) '
'and run a dummy inference forward pass')
parser.add_argument('--cast-optional-params', action='store_true', default=False,
help='If enabled, will try to cast params to target dtype wherever possible')
args = parser.parse_args()
logging.basicConfig()
logger = logging.getLogger('logger')
| logger.setLevel(logging.INFO)
assert args.model in gluon_models, "Please choose one of the available gluon models: {}".format(gluon_models)
shape = None
if args.model in segmentation_models:
shape = (1, 3, 480, 480)
elif args.model in calib_ssd_models:
shape = (1, 3, 512, 544)
elif args.model in calib_inception_models:
shape = (1, 3, 299, 299)
else:
shape = (1, 3, 224, 224)
net = gluoncv.model_zoo.get_model(args.model, pretrained=True)
net.hybridize()
result_before1 = net.forward(mx.nd.random.uniform(shape=shape))
|
Zlash65/erpnext | erpnext/patches/v11_0/move_item_defaults_to_child_table_for_multicompany.py | Python | gpl-3.0 | 3,389 | 0.027147 | # Copyright (c) 2018, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
'''
Fields to move from the item to item defaults child table
[ default_warehouse, buying_cost_center, expense_account, selling_cost_center, income_account ]
'''
if not frappe.db.has_column('Item', 'default_warehouse'):
return
frappe.reload_doc('stock', 'doctype', 'item_default')
frappe.reload_doc('stock', 'doctype', 'item')
companies = frappe.get_all("Company")
if len(companies) == 1 and not frappe.get_all("Item Default", limit=1):
try:
frappe.db.sql('''
INSERT INTO `tabItem Default`
(name, parent, parenttype, parentfield, idx, company, default_warehouse,
buying_cost_center, selling_cost_center, expense_account, income_account, default_supplier)
SELECT
SUBSTRING(SHA2(name,224), 1, 10) as name, name as parent, 'Item' as parenttype,
'item_defaults' as parentfield, 1 as idx, %s as company, default_warehouse,
buying_cost_center, selling_cost_center, expense_account, income_account, default_supplier
FROM `tabItem`;
''', companies[0].name)
except:
pass
else:
item_details = frappe.db.sql(""" SELECT name, default_warehouse,
buying_cost_center, expense_account, selling_cost_center, income_account
FROM tabItem
WHERE
name not in (select distinct parent from `tabItem Default`) and ifnull(disabled, 0) = 0"""
, as_dict=1)
items_default_data = {}
for item_data in item_details:
for d in [["default_warehouse", "Warehouse"], ["expense_account", "Account"],
["income_account", "Account"], ["buying_cost_center", "Cost Center"],
["selling_cost_center", "Cost Center"]]:
if item_data.get(d[0]):
company = frappe.get_value(d[1], item_data.get(d[0]), "company", cache=True)
if item_data.name not in items_default_data:
items_default_data[item_data.name] = {}
company_wise_data = items_default_data[item_data.name]
if company not in company_wise_data:
company_wise_data[company] = | {}
default_data = company_wise_data[company]
default_data[d[0]] = item_data.get(d[0])
to_insert_data = []
# items_default_data data structure will be as follow
# {
# 'item_code 1': {'company 1': {'default_warehouse': 'Test Warehouse 1'} | },
# 'item_code 2': {
# 'company 1': {'default_warehouse': 'Test Warehouse 1'},
# 'company 2': {'default_warehouse': 'Test Warehouse 1'}
# }
# }
for item_code, companywise_item_data in items_default_data.items():
for company, item_default_data in companywise_item_data.items():
to_insert_data.append((
frappe.generate_hash("", 10),
item_code,
'Item',
'item_defaults',
company,
item_default_data.get('default_warehouse'),
item_default_data.get('expense_account'),
item_default_data.get('income_account'),
item_default_data.get('buying_cost_center'),
item_default_data.get('selling_cost_center'),
))
if to_insert_data:
frappe.db.sql('''
INSERT INTO `tabItem Default`
(
`name`, `parent`, `parenttype`, `parentfield`, `company`, `default_warehouse`,
`expense_account`, `income_account`, `buying_cost_center`, `selling_cost_center`
)
VALUES {}
'''.format(', '.join(['%s'] * len(to_insert_data))), tuple(to_insert_data)) |
loads/loads-broker | loadsbroker/tests/test_db.py | Python | apache-2.0 | 908 | 0 | import unittest
from loadsbroker.db import Project, Plan, Step, Database
class DatabaseTest(unittest.TestCase):
def setUp(self):
self.db = Database('sqlite:///:memory:')
def test_project(self):
session = self.db. | session()
| # a project is defined by a name, a repo and strategies
project = Project(
name='simplepush',
home_page='https://services.mozilla.com')
session.add(project)
plan = Plan(name='s1', enabled=True)
project.plans.append(plan)
# Attach a container set to the strategy
cset = Step(
name="Awesome load-tester",
instance_type="t2.micro",
instance_count=5,
container_name="bbangert/simpletest:latest",
additional_command_args="--target=svc.dev.mozilla.com"
)
plan.steps.append(cset)
session.commit()
|
gotlium/WebPlayer2LocalPlayer | setup.py | Python | gpl-2.0 | 929 | 0 | from setuptools import setup
from glob import glob
from WebPlayer2LocalPlayer import __version__
APP = ['WebPlayer2LocalPlayer.py']
DATA_FILES = [
('images', glob('images/*.png')),
]
OPTIONS = {
'argv_emulation': True,
'includes': [
'sip',
'PyQt5', 'PyQt5.QtGui', 'PyQt5.QtPrintSupport',
'PyQt5.QtCore', 'PyQt5.QtWebKitWidgets',
'PyQt5.QtWidgets', 'PyQt5.QtNetwork', 'PyQt5.QtWebKit',
],
'semi_standalone': 'False',
'compressed': True,
"optimize": 2,
"iconfile": | 'images/app_icon.icns',
"qt_plugins": ["imageformats", "platforms"],
"plist": dict(
LSMinimumSystemVersion='10.8.0',
LSEnvironment=dict(
PATH='./../Resources:/usr/local/bin:/usr/bin:/bin'
)
)
}
setup(
name="WP2LP",
version=__version__,
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS | },
setup_requires=['py2app'],
)
|
msimacek/koschei | koschei/locks.py | Python | gpl-2.0 | 3,496 | 0.000572 | # Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Author: Michael Simacek <msimacek@redhat.com>
from contextlib import contextmanager
from sqlalchemy.sql import func
LOCK_REPO_RESOLVER = 1
LOCK_BUILD_RESOLVER = 2
class Locked(Exception):
pass
def pg_lock(db, namespace, key, block=True, transaction=False, shared=False):
"""
Lock an arbitrary resource identified by an integer key using PostgreSQL
advisory lock.
:param: db Database session
:param: namespace Integer namespace identifier. Should use one of the constants
defined in this module.
:param: key Integer identifier of the lock.
:param: block Whether to block waiting for the lock.
:param: transaction Whether the lock should be scoped by the transaction
(unlocks when the transaction ends). Otherwise scoped by the
session.
:param: shared Whether the lock should be shared. Otherwise it is
exclusive.
:raises: Locked if in non blocking mode and failed to obtain the lock.
Exact semantics are described in PostgreSQL documentation:
https://www.postgresql.org/docs/9.2/static/explicit-locking.html#ADVISORY-LOCKS
https://www.postgresql.org/docs/9.2/static/functions-admin.html#FUNCTIONS-ADVISORY-LOCKS
"""
fn_name = 'pg_'
if not block:
fn_name += 'try_'
fn_name += 'advisory_'
if transaction:
fn_name += 'xact_'
fn_name += 'lock'
if shared:
fn_name += '_shared'
function = getattr(func, fn_name)
res = db.query(function(namespace, key)).scalar()
if not block and not res:
raise Locked()
def pg_unlock(db, namespace, key, shared=False, ignore_exceptions=False):
"""
Unlocks given advisory session lock. Arguments have the same meaning as in pg_lock
"""
try:
fn_name = 'pg_advisory_unlock'
if shared:
fn_name += '_shared'
function = getattr(func, fn_name)
db.query(function(namespace, key)).one()
except Exception as e:
| try:
db.close_connection()
except Exception:
pass
if not ignore_exceptions:
raise e
def pg_unlock_all(db):
"""
Unlocks advisory session locks.
"""
db.query(func.pg_advisory_unlock_all()).one()
@contextmanager
def pg_session_lock(db, namespace, key, block=True, shared=False):
"""
| Context manager for obtaining a session lock.
With block=True (default) blocks until the resource is locked.
"""
pg_lock(db, namespace, key, block=block, shared=shared)
try:
yield
pg_unlock(db, namespace, key, shared=shared, ignore_exceptions=False)
except Exception as e:
pg_unlock(db, namespace, key, shared=shared, ignore_exceptions=True)
raise e
|
t-mertz/slurmCompanion | django-web/sshcomm/migrations/0001_initial.py | Python | mit | 1,536 | 0.003255 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-06 12:34
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.for | ms.widgets
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='RemoteServer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=Fals | e, verbose_name='ID')),
('server_url', models.CharField(max_length=50)),
('server_name', models.CharField(max_length=20)),
('date_added', models.DateField()),
],
),
migrations.CreateModel(
name='UserData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.IntegerField()),
('user_name', models.CharField(max_length=20)),
('user_password', models.CharField(max_length=20, verbose_name=django.forms.widgets.PasswordInput)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('server', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sshcomm.RemoteServer')),
],
),
]
|
bostonlink/nwmaltego | nw_phrase_2_threat.py | Python | gpl-3.0 | 1,962 | 0.003568 | #!/usr/bin/env python
# Copyright (C) 2012 nwmaltego Developer.
# This file is part of nwmaltego - https://github.com/bostonlink/nwmaltego
# See the file 'LICENSE' for copying permission.
# Maltego Phrase to NW Threat
# Author: David Bressler (@bostonlink)
import sys
import urllib2, urllib, json
from datetime import datetime, timedelta
from lib import nwmodule
# Maltego XML Header
trans_header = """<MaltegoMessage>
<MaltegoTransformResponseMessage>
<Entities>"""
# BASIC HTTP Authentication to NWD
nwmodule.nw_http_auth()
# NW REST API Query amd results
risk_phrase = sys.argv[1]
date_t = datetime.today()
tdelta = timedelta(days=1)
diff = date_t - tdelta
diff = "'" + diff.strftime('%Y-%b-%d %H:%M:%S') + "'-'" + date_t.strftime('%Y-%b-%d %H:%M:%S') + "'"
threat_ip_dst = 'sel | ect risk.warning where (time=%s) && risk.warning contains %s' % (diff, risk_phrase)
json_data = json.loads(nwmodule.nwQuery(0, 0, threat_ip_dst, 'application/json', 25))
ip_list = []
print trans_header
for d in json_data['results']['fields']:
value = d['value'].decode('ascii')
if value in ip_list:
conti | nue
else:
# Kind of a hack but hey it works!
print """ <Entity Type="netwitness.NWThreatNOIP">
<Value>%s</Value>
<AdditionalFields>
<Field Name="phrase" DisplayName="Phrase">%s</Field>
<Field Name="metaid1" DisplayName="Meta id1">%s</Field>
<Field Name="metaid2" DisplayName="Meta id2">%s</Field>
<Field Name="type" DisplayName="Type">%s</Field>
<Field Name="count" DisplayName="Count">%s</Field>
</AdditionalFields>
</Entity>""" % (value, risk_phrase, d['id1'], d['id2'], d['type'], d['count'])
ip_list.append(value)
# Maltego transform XML footer
trans_footer = """ </Entities>
</MaltegoTransformResponseMessage>
</MaltegoMessage> """
print trans_footer |
garybake/microbake | fruity_notes.py | Python | mit | 547 | 0.001828 | from microbit im | port *
import music
# Plays a different note when pressing different fruits
# Put a buzzer between pin0 and Gnd
# Attach a fruit to Pin1 and Pin2
# Have a loose wire from Gnd
# Hold the Gnd wire
# Press your other hand on the fruits to hear notes
note_low = ["C4:4"]
note_high = ["C5:4"]
while True:
if pin1.is_touched():
music.play(note_low)
display.show(Image.HAPPY)
elif pin2.is_touched( | ):
music.play(note_high)
display.show(Image.SURPRISED)
else:
display.show(Image.SAD) |
fsfrazao/Scarab-IBM | sim2.py | Python | gpl-2.0 | 9,111 | 0.035122 | '''
This program uses Scarab_IBM to implement Simulation 1.
Copyright (C) 2015 Fabio Soares Frazao
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from sys import argv
from scarab_classes import *
from graphics import *
from time import sleep
import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D
def load_spp_par(filename,world):
#sp_par=[]
with open(filename,'r') as f:
for line in f:
sp_par=eval(line)
distribute_beetles(sp_par,world)
def distribute_beetles(sp_par,world):
areas=world.grid.areas
n=sp_par['n']
args=sp_par['args']
args['sex']=np.random.choice(["M","F"])
habitat_probs=args['habitat_probs']
h_max=max(habitat_probs.values())
hp=0
for k in habitat_probs:
if habitat_probs[k]==h_max:
hp=k
habitat_area=[]
for a in areas.values():
if a['habitat']==hp:
habitat_area+=a['area']
p=world.random_positions(n=n,area=habitat_area)
world.create_agents(agent_type=Beetle, n=n, pos=p, **args)
grid_size=(200,200)
#traps_file='/home/fabio/Desktop/monografia/spillover/sampling-2'
#run=1
#days=2
#sampling_design=traps_file.split('/')
#sampling_design=sampling_design[len(sampling_design)-1]
#habitat_layer=load_habitats('/home/fabio/Desktop/monografia/spillover/land-2')
surface=np.zeros((2,grid_size[0],grid_size[1]))
#fragment1=[k for k in habitat_layer.keys() if habitat_layer[k]==54]
#fragment2=[k for k in habitat_layer.keys() if habitat_layer[k]==84]
#matrix=[k for k in habitat_layer.keys() if habitat_layer[k]==0]
areas={}
areas[0]={'habitat':1, 'area':[(x,y) for x in xrange(grid_size[0]) for y in xrange(grid_size[1])]}
#areas[1]={'habitat':1,'area':fragment1}
#areas[2]={'habitat':1,'area':fragment2}
#areas[3]={'habitat':0,'area':matrix}
habitat_array=fill_habitat_array(areas, size=(200,200))
#surface[0]=habitat_array
grid=Rectangular_Grid(x_max=200,y_max=200,areas=areas,ndim=2,dim_names=("Habitat","Cues"))
grid.load_surface(surface,('Habitat','Cues'))
world=World(grid,steps_in_day=1)
#trap_args={'grid':grid,'world':world,
#'dim':'Cues','reach': 8,'radius':5.0}
#trap_pos=load_trap_pos(traps_file)
#trap_pos=[(100,100)]
#world.create_agents(agent_type=Trap, n=len(trap_pos), pos=trap_pos, **trap_args)
#dung_pos=world.random_positions(n=5,area=areas[1]['area'])
#dung_pos+=world.random_positions(n=5,area=areas[2]['area'])
#dung_pos+=world.random_positions(n=15,area=areas[3]['area'])
dung_pos=world.random_positions(n=15,area=areas[0]['area'])
#dung_pos=[(100,100),(50,50),(100,90),(38,50)]
dung_args={'grid':grid,'world':world,
'dim':'Cues','reach': 10,'radius':5.0,'amount':2,'max_age':5}
world.create_agents(agent_type=Dung, n=len(dung_pos), pos=dung_pos, **dung_args)
#load_spp_par('/home/fabio/Desktop/monografia/spillover/spp_par',world)
active_days=range(0,180)+range(366,366+180)
#active_days=range(180,730)
breeding_days=range(90,180)+range(366+90,366+180)
sp1_args={'grid':grid,'sp':'sp.1','sex':'F',
'world':world,'habitat_probs':{0:0.999,1:0.001},
'dist_par':{'mean':5,'sd':1,'max_dist':3000},
'energy_par':{'hungry':15, 'max_energy':30,'initial_energy':25,
'rest':0.02, 'move':0.5, 'breed':4}, 'min_age':60,'age':0, 'max_age':1000,
'activity':{0:False,1:True,2:True,3:True,4:True,5:True,6:True,
7:True,8:True,9:True,10:True,11:True,12:True,13:True,14:True,
15:True,16:True,17:True,18:True,19:True,20:True,21:False,22:False,
23:False,24:False},'active_days':active_days,'breeding_days':breeding_days, 'perception_radius':5}
p=world.random_positions(n=100,area=areas[0]['area'])
#p=[(93,93),(100,95),(40,50),(70,50)]
world.create_agents(agent_type=Beetle, n=len(p), pos=p, **sp1_args)
for b in Beetle.Instances.values():
b.sex=np.random.choice(["F","M"])
b.age=np.random.randint(365)
b.energy=np.random.randint(10,30)
#b1=Beetle.Instances.values()[0]
#p=b1.grid.circle(center=b1.what_cell(b1.position),radius=b1.perception_radius)
#partner=b1.suitable_partner(p)
#b1.breed(partner)
N=[]
F1=[]
F2=[]
#####GUI#####
def main():
win = GraphWin("Sim 2",grid.x_max, grid.y_max)
for x in xrange(grid.x_max):
for y in xrange(grid.y_max):
if grid.surface[0][x,y]==1:
win.plot(x,y,"green")
for t in Trap.Instances.values():
c = Circle(Point(t.position[0],t.position[1]), 1)
c.setFill('black')
c.draw(win)
'''
Dung_dots={}
for d in Dung.Instances.values():
c = Circle(Point(d.position[0],d.position[1]), 1)
c.setFill('red')
c.setOutline('red')
c.draw(win)
Dung_dots[d.id]=c
'''
Beetle_dots={}
for b in Beetle.Instances.values():
c = Circle(Point(int(b.position[0]),int(b.position[1])), 1)
c.setFill('blue')
c.setOutline('blue')
c.draw(win)
#print b.position
#c.move_to(b.position[0],b.position[1])
Beetle_dots[b.id]=c
#world.step=25
Dung_dots={}
for i in xrange(365*2):
print world.day, Beetle.PopulationSize(), world.report_functions()[0].values()[0]
#dung_pos=world.random_positions(n=1,area=areas[1]['area'])
#dung_pos+=world.random_positions(n=1,area=areas[2]['area'])
#dung_pos+=world.random_positions(n=5,area=areas[3]['area'])
dung_pos=world.random_positions(n=3,area=areas[0]['area'])
dung_args={'grid':grid,'world':world,
'dim':'Cues','reach': 10,'radius':5.0,'amount':1.8,'max_age':7}
world.create_agents(agent_type=Dung, n=len(dung_pos), pos=dung_pos, **dung_args)
for d in Dung.Instances.values():
if d.id not in Dung_dots.keys():
c = Circle(Point(d.position[0],d.position[1]), 1)
c.setFill('red')
c.setOutline('red')
c.draw(win)
Dung_dots[d.id]=c
for b in Beetle.Instances.values():
if b.id not in Beetle_dots.keys():
c = Circle(Point(d.position[0],d.position[1]), 1)
c.setFill('blue')
c.setOutline('blue')
c.draw(win)
Beetle_dots[b.id]=c
shuffled_instances=Beetle.Instances.values()
np.random.shuffle(shuffled_instances)
for b in shuffled_instances:
#b.orientation=angle_cells(b.position,(100,100))%360
#b.move(1, angle=b.orientation)
#sleep(.05)
b.action()
#print b.id, b.position, b.energy
#sleep(0.5)
#print b.energy
#b.wiggle(1,sd=80)
if b.age<b.min_age:
Beetle_dots[b.id].setFill('yellow')
Beetle_dots[b.id].setOutline('yellow')
else:
Beetle_dots[b.id].setFill('blue')
Beetle_dots[b.id].setOutline('blue')
Beetle_dots[b.id].move_to(int(b.position[0]),int(b.position[1]))
for bd in Beetle_dots.keys():
if bd not in Beetle.Instances. | keys(): Beetle_dots[bd].undraw()
for dd in Dung_dots.keys():
if dd not in Dung.Instances.keys(): Dung_dots[dd].undraw()
n=Beetle.PopulationSize()
N.append(n)
F1 | .append(world.r |
FOSSRIT/PyCut | game/objects/pizza.py | Python | mpl-2.0 | 13,934 | 0.007751 | import pygame
from pygame import gfxdraw
from .rangable import Rangable
import random
class Pizza(Rangable):
"""docstring for Pizza"""
def __init__(self, context):
Rangable.__init__(self)
self.context = context
self.pizza = self.context.plain_pizza
self.trashed = False
self.perfected = False
self.trashing = False
self.trash_can = None
self.trash_pos = None
self.slices = None
self.offset = random.randint(0,4)
self.color=(0,0,0)
self.x = 100
self.y = 400 # 5=> margin between top and pizza
self.location = (self.x,self.y)
self.width = 150
self.height = 150
self.toppings = [0, 0, 0, 0]
self.requirements = []
self.potentalClues = []
sel | f.drawing = None
self.draw()
"""
update the button drawing surface.
"""
def draw(self):
surf = pygame.Surface((self.width, self.height), pygame.SRCALPHA)
pizza_img = pygame.transform.scale(self.context.plain_pizza, (self.width, self.height))
surf.blit(pizza_img, (0,0))
for i in range(0, len(self.toppings)):
if self.toppings[i] > 0:
self.drawTopping(surf, i, | 0)
#gfxdraw.filled_ellipse(surf, self.width//2,self.height//2, self.width/2, self.height/2, (219,162,74))#pizza
#pygame.draw.arc(surf, (225,216,0), [0, 0, self.width, self.height], 0, 360, 2)#crust
#draw slices on here afterwards
self.drawing = surf
self.dirty = False
"""
draw on a surface
"""
def drawOn(self, screen=None):
S = 8 #speed towards trash can
A = 9.8 #acceleration towards trash can
if self.trashing:
if self.touches(self.trash_can):
self.trashed = True
self.trashing = False
else:
self.setLocation(self.trash_pos[0] + 50, self.y + ((S)*A) )
if screen:
if self.dirty:
self.draw()
screen.blit(self.drawing, self.location)
else:
print("Error: drawOn was called on Button object but no screen argument was passed")
"""
return topping drawing
"""
def drawTopping(self, surf, i, pad=0):
#needs serious refactoring
topping_img = pygame.transform.scale(self.context.game_toppings[i], (self.width/4, self.height/4))
if self.context.difficulty == "Advanced":
amount = self.context.fractions[self.toppings[i]]
else:
amount = self.toppings[i]
#center portion
surf.blit(topping_img, ( (surf.get_width()/2) - (topping_img.get_width()/2), (surf.get_height()/2) - (topping_img.get_height()/2)))
#top portion
w,h = (surf.get_width()/6) + pad, surf.get_height()/6
if amount > 0:
surf.blit( pygame.transform.rotate(topping_img, 45), ( w, h ))
if amount > 0.25:
surf.blit( pygame.transform.rotate(topping_img, 45), ( 3*w , h ))
#bottom portion
if amount > 0.5:
surf.blit( pygame.transform.rotate(topping_img, 45), ( w, 3*h ))
if amount > 0.75:
surf.blit( pygame.transform.rotate(topping_img, 45), ( 3*w , 3*h ))
return surf
"""
draw on a surface
"""
def moveToTrash(self, trash_pos=None, trash_can=None):
if not(self.trashing or self.trashed):
if trash_pos and trash_can:
self.trash_pos = trash_pos
self.trash_can = pygame.Rect((trash_pos[0], trash_pos[1]+self.height), (trash_can.get_width(), trash_can.get_height()))
self.trashing = True
self.setLocation(trash_pos[0] + 50, 200)
else:
print("Error: expected a trash_pos, trash_can got {}, {}".format(trash_pos, trash_can))
"""
Add topping
"""
def addTopping(self, index):
if self.toppings[index] == 0:
self.toppings[index] = 1
else:
self.toppings[index] = 0
self.dirty = True
"""
Change Topping
"""
def changeTopping(self, index, amount):
self.toppings[index] = amount
self.dirty = True
"""
set Costumer hidden Pizza requirements
"""
def setRequirements(self, requirements):
self.requirements = requirements
"""
Checks if Pizza meets customer requirements.
Currently only support topping requirements
returns a tuple, boolean indicating whether it met the requirement
or not. (Boolean, Message)
"""
def checkRequirements(self):
if self.context.difficulty == "Easy":
message = []
metRequirement = False
notwanted = 0
missing = 0
for i in range(0, len(self.toppings)):
if self.toppings[i] > 0 and self.requirements[i] == 0:
notwanted += 1
elif self.toppings[i] == 0 and self.requirements[i] > 0:
missing += 1
if missing > 0:
message += ["There aren't enough toppings on the pizza. :(".format(notwanted)]
elif missing < 0:
message += ["There are more toppings on the pizza than I wanted. :(".format(notwanted)]
if notwanted > 0:
message += ["There {} {} {} on the pizza I don't like. :(".format(
'is' if notwanted == 1 else 'are', notwanted, 'topping' if notwanted == 1 else 'toppings'
)]
if not(notwanted) and missing == 0:
metRequirement = True
message += ["Thank you, that was the perfect pizza I was looking for! :)\n"]
return (metRequirement, message)
elif self.context.difficulty == "Advanced":
metRequirement = True
messages = []
names = ["Cheese", "Pepperoni", "Mushroom", "Pineapple"]
# calculate full pizza requirements
totalRequirements = [0 for i in range(0, len(self.toppings))]
for arr in self.requirements:
for i in range(0, len(arr)):
totalRequirements[i] += arr[i]
# check if pizza matches requirements
for i in range(0, len(self.toppings)):
topping = self.context.fractions[self.toppings[i]]
if topping > totalRequirements[i] or topping < totalRequirements[i]:
metRequirement = False
# set up person-specific messages
for personPreference in self.requirements:
message = []
notwanted = 0
missing = 0
for i in range(0, len(self.toppings)):
toppingAmount = self.context.fractions[self.toppings[i]]
if personPreference[i] == 0 and toppingAmount > totalRequirements[i]:
notwanted += 1
elif personPreference[i] > 0 and toppingAmount < totalRequirements[i]:
missing += 1
if notwanted == 1:
message += ["I want less of one topping"]
elif notwanted > 1:
message += ["I want less of {} toppings".format(notwanted)]
if missing == 1:
message += ["I want more of one topping"]
elif missing > 1:
message += ["I want more of {} toppings".format(missing)]
messages.append(message)
# Unique person messages
personSpecificMessages = []
# Wrong / correct pizza
if metRequirement:
personSpecificMessages.append(["The is the correct pizza!"])
else:
personSpecificMessages.append(["This is not the pizza I want."])
# Gather some potental 'simple' clues
potentialCluesMuch = []
potentialCluesLittle = []
for i in range(0, len(self.toppings)):
|
jamwt/diesel-pmxbot | pmxbot/karma.py | Python | bsd-3-clause | 9,256 | 0.029602 | # vim:ts=4:sw=4:noexpandtab
from __future__ import print_function, absolute_import
import itertools
import re
import random
import pmxbot
from . import storage
from .core import command
class SameName(ValueError): pass
class AlreadyLinked(ValueError): pass
class Karma(storage.SelectableStorage):
@classmethod
def initialize(cls):
cls.store = cls.from_URI(pmxbot.config.database)
cls._finalizers.append(cls.finalize)
@classmethod
def finalize(cls):
del cls.store
class SQLiteKarma(Karma, storage.SQLiteStorage):
def init_tables(self):
CREATE_KARMA_VALUES_TABLE = '''
CREATE TABLE IF NOT EXISTS karma_values (karmaid INTEGER NOT NULL, karmavalue INTEGER, primary key (karmaid))
'''
CREATE_KARMA_KEYS_TABLE = '''
CREATE TABLE IF NOT EXISTS karma_keys (karmakey varchar, karmaid INTEGER, primary key (karmakey))
'''
CREATE_KARMA_LOG_TABLE = '''
CREATE TABLE IF NOT EXISTS karma_log (karmakey varchar, logid INTEGER, change INTEGER)
'''
self.db.execute(CREATE_KARMA_VALUES_TABLE)
self.db.execute(CREATE_KARMA_KEYS_TABLE)
self.db.execute(CREATE_KARMA_LOG_TABLE)
self.db.commit()
def lookup(self, thi | ng):
thing = thing.strip().lower()
LOOKUP_SQL = 'SELECT karmavalue from karma_keys k join karma_values v on | k.karmaid = v.karmaid where k.karmakey = ?'
try:
karma = self.db.execute(LOOKUP_SQL, [thing]).fetchone()[0]
except:
karma = 0
if karma == None:
karma = 0
return karma
def set(self, thing, value):
thing = thing.strip().lower()
value = int(value)
UPDATE_SQL = 'UPDATE karma_values SET karmavalue = ? where karmaid = (select karmaid from karma_keys where karmakey = ?)'
res = self.db.execute(UPDATE_SQL, (value, thing))
if res.rowcount == 0:
INSERT_VALUE_SQL = 'INSERT INTO karma_values (karmavalue) VALUES (?)'
INSERT_KEY_SQL = 'INSERT INTO karma_keys (karmakey, karmaid) VALUES (?, ?)'
ins = self.db.execute(INSERT_VALUE_SQL, [value])
self.db.execute(INSERT_KEY_SQL, (thing, ins.lastrowid))
self.db.commit()
def change(self, thing, change):
thing = thing.strip().lower()
value = int(self.lookup(thing)) + int(change)
UPDATE_SQL = 'UPDATE karma_values SET karmavalue = ? where karmaid = (select karmaid from karma_keys where karmakey = ?)'
res = self.db.execute(UPDATE_SQL, (value, thing))
if res.rowcount == 0:
INSERT_VALUE_SQL = 'INSERT INTO karma_values (karmavalue) VALUES (?)'
INSERT_KEY_SQL = 'INSERT INTO karma_keys (karmakey, karmaid) VALUES (?, ?)'
ins = self.db.execute(INSERT_VALUE_SQL, [value])
self.db.execute(INSERT_KEY_SQL, (thing, ins.lastrowid))
self.db.commit()
def list(self, select=0):
KARMIC_VALUES_SQL = 'SELECT karmaid, karmavalue from karma_values order by karmavalue desc'
KARMA_KEYS_SQL= 'SELECT karmakey from karma_keys where karmaid = ?'
karmalist = self.db.execute(KARMIC_VALUES_SQL).fetchall()
karmalist.sort(key=lambda x: int(x[1]), reverse=True)
if select > 0:
selected = karmalist[:select]
elif select < 0:
selected = karmalist[select:]
else:
selected = karmalist
keysandkarma = []
for karmaid, value in selected:
keys = [x[0] for x in self.db.execute(KARMA_KEYS_SQL, [karmaid])]
keysandkarma.append((keys, value))
return keysandkarma
def link(self, thing1, thing2):
if thing1 == thing2:
raise SameName("Attempted to link two of the same name")
GET_KARMAID_SQL = 'SELECT karmaid FROM karma_keys WHERE karmakey = ?'
try:
t1id = self.db.execute(GET_KARMAID_SQL, [thing1]).fetchone()[0]
except TypeError:
raise KeyError(thing1)
t1value = self.lookup(thing1)
try:
t2id = self.db.execute(GET_KARMAID_SQL, [thing2]).fetchone()[0]
except TypeError:
raise KeyError(thing2)
if t1id == t2id:
raise AlreadyLinked("Those two are already linked")
t2value = self.lookup(thing2)
newvalue = t1value + t2value
# update the keys so t2 points to t1s value
self.db.execute('UPDATE karma_keys SET karmaid = ? where karmaid = ?',
(t1id, t2id))
# drop the old value row for neatness
self.db.execute('DELETE FROM karma_values WHERE karmaid = ?', (t2id,))
# set the new combined value
self.db.execute('UPDATE karma_values SET karmavalue = ? where karmaid = ?',
(newvalue, t1id))
self.db.commit()
def _get(self, id):
"""
Return keys and value for karma id
"""
VALUE_SQL = "SELECT karmavalue from karma_values where karmaid = ?"
KEYS_SQL = "SELECT karmakey from karma_keys where karmaid = ?"
value = self.db.execute(VALUE_SQL, [id]).fetchall()[0][0]
keys_cur = self.db.execute(KEYS_SQL, [id]).fetchall()
keys = sorted(x[0] for x in keys_cur)
return keys, value
def search(self, term):
query = "SELECT distinct karmaid from karma_keys where karmakey like ?"
matches = (id for (id,) in self.db.execute(query, '%%'+term+'%%'))
return (self._lookup(id) for id in matches)
def export_all(self):
return self.list()
class MongoDBKarma(Karma, storage.MongoDBStorage):
collection_name = 'karma'
def lookup(self, thing):
thing = thing.strip().lower()
res = self.db.find_one({'names':thing})
return res['value'] if res else 0
def set(self, thing, value):
thing = thing.strip().lower()
value = int(value)
query = {'names': {'$in': [thing]}}
oper = {'$set': {'value': value}, '$addToSet': {'names': thing}}
self.db.update(query, oper, upsert=True)
def change(self, thing, change):
thing = thing.strip().lower()
change = int(change)
query = {'names': {'$in': [thing]}}
oper = {'$inc': {'value': change}, '$addToSet': {'names': thing}}
self.db.update(query, oper, upsert=True)
def list(self, select=0):
res = list(self.db.find().sort('value', storage.pymongo.DESCENDING))
if select > 0:
selected = res[:select]
elif select < 0:
selected = res[select:]
else:
selected = res
aslist = lambda val: val if isinstance(val, list) else [val]
return [
(aslist(rec['names']), rec['value'])
for rec in selected
]
def link(self, thing1, thing2):
thing1 = thing1.strip().lower()
thing2 = thing2.strip().lower()
if thing1 == thing2:
raise SameName("Attempted to link two of the same name")
rec = self.db.find_one({'names': thing2})
if thing1 in rec['names']:
raise AlreadyLinked("Those two are already linked")
if not rec: raise KeyError(thing2)
try:
query = {'names': thing1}
update = {
'$inc': {'value': rec['value']},
'$pushAll': {'names': rec['names']},
}
self.db.update(query, update, safe=True)
except Exception:
raise KeyError(thing1)
self.db.remove(rec)
def search(self, term):
pattern = re.compile('.*' + re.escape(term) + '.*')
return (
(rec['names'], rec['value'])
for rec in self.db.find({'names': pattern})
)
def import_(self, item):
names, value = item
self.db.insert(dict(
names = names,
value = value,
))
def _all_names(self):
return set(itertools.chain.from_iterable(
names
for names, value in self.search('')
))
def repair_duplicate_names(self):
"""
Prior to 1101.1.1, pmxbot would incorrectly create new karma records
for individuals with multiple names.
This routine corrects those records.
"""
for name in self._all_names():
cur = self.db.find({'names': name})
main_doc = next(cur)
for duplicate in cur:
query = {'_id': main_doc['_id']}
update = {
'$inc': {'value': duplicate['value']},
'$pushAll': {'names': duplicate['names']},
}
self.db.update(query, update, safe=True)
self.db.remove(duplicate)
@command("karma", aliases=("k",), doc="Return or change the karma value for "
"some(one|thing)")
def karma(client, event, channel, nick, rest):
karmee = rest.strip('++').strip('--').strip('~~')
if '++' in rest:
Karma.store.change(karmee, 1)
elif '--' in rest:
Karma.store.change(karmee, -1)
elif '~~' in rest:
change = random.choice([-1, 0, 1])
Karma.store.change(karmee, change)
if change == 1:
return "%s karma++" % karmee
elif change == 0:
return "%s karma shall remain the same" % karmee
elif change == -1:
return "%s karma--" % karmee
elif '==' in rest:
t1, t2 = rest.split('==')
try:
Karma.store.link(t1, t2)
except SameName:
Karma.store.change(nick, -1)
return "Don't |
mnjy/critters | CrittersProto/generator/hummingloop_algorithm.py | Python | mit | 3,992 | 0.007766 | #####################################################################
#
# hummingloop_algorithm.py
#
# Copyright (c) 2015, Nick Benson
# Modifications by benchan
#
# Released under the MIT License (http://opensource.org/licenses/MIT)
#
#####################################################################
import random
# MIDI values
C = 60
# Chord types
MAJOR_S = [0, 2, 4, 5, 7, 9, 11]
MINOR_S = [-3, -1, 0, 2, 4, 5, 8]
MAJOR = [0, 4, 7]
MINOR = [-3, 0, 4]
MAJOR_7 = [0, 4, 7, 11]
MINOR_7 = [-3, 0, 4, 8]
MAJOR_SUS4 = [0, 4, 5, 7]
MINOR_SUS4 = [-3, 0, 2, 4]
MAJOR_7_SUS4 = [0, 4, 5, 7, 11]
MINOR_7_SUS4 = [-3, 0, 2, 4, 8]
MAJOR_ADD9 = [0, 4, 7, 14]
MINOR_ADD9 = [-3, 1, 4, 11]
MAJOR_7_ADD9 = [0, 4, 7, 11, 14]
MINOR_7_ADD9 = [-3, 1, 4, 8, 11]
# Master chord list
CHORDS = [MAJOR_S, MINOR_S,\
MAJOR, MINOR,\
MAJOR_7, MINOR_7,\
MAJOR_SUS4, MINOR_SUS4,\
MAJOR_7_SUS4, MINOR_7_SUS4,\
MAJOR_ADD9, MINOR_ADD9,\
MAJOR_7_ADD9, MINOR_7_ADD9]
# Silence probability lists
HALF_SILENCE = [0, 1]
THIRD_SILENCE = [0, 0, 1]
TWOTHIRD_SILENCE = [0, 1, 1]
# Master silence probability list
SILENCE_PROB_LISTS = [HALF_SILENCE, THIRD_SILENCE,\
TWOTHIRD_SILENCE]
# Sustain probability lists
HALF_SUSTAIN = [0, 1]
THIRD_SUSTAIN = [0, 0, 1]
TWOTHIRD_SUSTAIN = [0, 1, 1]
# Master sustain probability list
SUSTAIN_PROB_LISTS = [HALF_SUSTAIN, THIRD_SUSTAIN,\
TWOTHIRD_SUSTAIN]
# Patterns
PATTERN_1 = [("A", 8), ("B", 8), ("A", 8), ("C", 8)]
PATTERN_2 = [("A", 8), ("A", 8), ("B", 8), ("B", 8)]
PATTERN_3 = [("A", 8), ("B", 8), ("C", 8), ("B", 8)]
PATTERN_4 = [("A", 8), ("A", 8), ("B", 8), ("C", 8)]
PATTERN_5 = [("A", 8), ("B", 8), ("B", 8), ("A", 8)]
PATTERN_6 =\
[("A", 4), ("A", 4), ("B", 8), ("C", 8), ("A", 4), ("C", 4)]
PATTERN_7 =\
[("A", 4), ("B", 4), ("A", 4), ("B", 4),\
("A", 4), ("C", 4), ("A", 4), | ("C", 4)]
# Master pattern list
PATTERNS = [PATTERN_1,\
PATTERN_2, PATTERN_3,\
PATTERN_4, PATTERN_5,\
PATTERN_6, PATTERN_7]
# Interaction configuration
FOCUS_SPEED = 100 # velocity ints / second
MAX_VEL = 50
MIN_VEL = 50
NOTE_VELOCITY_MULT = 0.5
def choose_notes(key, chord):
notes_init = [key + chord[x]\
for x in range(len(chord))]
notes_span = notes_init + [note + 12 for note in notes_init]
# Generate notes
notes = []
subpat_ | dict = {}
pattern = random.choice(PATTERNS)
silence_prob_list = random.choice(SILENCE_PROB_LISTS)
for subpat in pattern:
if subpat[0] not in subpat_dict:
# Generate new subpattern
new_subpat = []
for i in range(subpat[1]):
new_subpat += [random.choice(notes_span)]
# Silence
if random.choice(silence_prob_list) is 1:
new_subpat[-1] = -1
subpat_dict[subpat[0]] = new_subpat
# Add each subpattern's notes according to pattern
notes += subpat_dict[subpat[0]]
# Sustain processing of notes.
# Add potential sustains instead of silences
# (0 instead of -1)
sustain_possible = False
sustain_prob_list = random.choice(SUSTAIN_PROB_LISTS)
for i in range(len(notes)):
if notes[i] is not -1:
# A note can be sustained.
sustain_possible = True
if notes[i] is -1 and sustain_possible:
if random.choice(sustain_prob_list) is 1:
notes[i] = 0 if i < len(notes)-1 else -1
else:
# A note-off event will happen,
# sustain is no longer possible
sustain_possible = False
# Octave-jump-removal processing.
# Removes jumps of larger than an octave in the middle
# of a melody. Should help create more
# melodic structures.
last_note = -1
for i in range(len(notes)):
cur_note = notes[i]
if last_note - cur_note > 12 and last_note is not -1:
notes[i] += 12
elif last_note - cur_note < -12 and last_note is not -1:
notes[i] -= 12
print "Notes: " + str(notes)
return notes |
suutari-ai/shoop | shuup_tests/notify/notification_test_urls.py | Python | agpl-3.0 | 364 | 0 | # -*- coding: utf-8 -*-
# This file is part o | f Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.conf.urls import url
u | rlpatterns = [
url('test/(?P<arg>.+)/$', (lambda: 0), name="test"),
]
|
rsnakamura/oldape | tests/testunits/testtools/testnetwork/test_network.py | Python | apache-2.0 | 1,234 | 0.005673 | """
A set of tests that check different cases for the network tester
"""
#python
#third-party
from mock import MagicMock
import nose
#from apetools.tools import networkcheck
from apetools.tools import timetorecovery as ttr
from apetools.tools import networktester
from apetools.commons import errors
def test_case_1():
"""
:description: dut and tpc ping each other
:assert: Nothing happens
"""
# Setup the mock ttr | test to the tpc (linux)
tpc_ttr = MagicMock()
tpc_ttr.run.return_value = ttr.TTRData(5, '0.98')
dut_ttr = MagicMock()
dut_ttr.run.return_value = ttr.TTRData(10, '9.2')
testers = [tpc_ttr, dut_ttr]
tester = networktester.NetworkTester(testers)
tester.run()
assert True
return
@nose.tools.raises(errors.ConnectionError)
def test_case_2 | ():
"""
:description: dut pings pc, ping fails to ping dut
:assert: Raises ConnectionError
"""
tpc_ttr = MagicMock()
tpc_ttr.run.return_value = None
dut_ttr = MagicMock()
dut_ttr.run.return_value = ttr.TTRData(2, '0.9')
tester = networktester.NetworkTester([dut_ttr, tpc_ttr])
tester.run()
return
if __name__ == "__main__":
import pudb; pudb.set_trace()
test_case_2()
|
kawamon/hue | desktop/core/ext-py/prometheus_client-0.7.1/prometheus_client/metrics.py | Python | apache-2.0 | 21,319 | 0.001782 | import sys
from threading import Lock
import time
import types
from . import values # retain this import style for testability
from .context_managers import ExceptionCounter, InprogressTracker, Timer
from .metrics_core import (
Metric, METRIC_LABEL_NAME_RE, METRIC_NAME_RE,
RESERVED_METRIC_LABEL_NAME_RE,
)
from .registry import REGISTRY
from .utils import floatToGoString, INF
if sys.version_info > (3,):
unicode = str
create_bound_method = types.MethodType
else:
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def _build_full_name(metric_type, name, namespace, subsystem, unit):
full_name = ''
if namespace:
full_name += namespace + '_'
if subsystem:
full_name += subsystem + '_'
full_name += name
if unit and not full_name.endswith("_" + unit):
full_name += "_" + unit
if unit and metric_type in ('info', 'stateset'):
raise ValueError('Metric name is of a type that cannot have a unit: ' + full_name)
if metric_type == 'counter' and full_name.endswith('_total'):
full_name = full_name[:-6] # Munge to OpenMetrics.
return full_name
def _validate_labelnames(cls, labelnames):
labelnames = tuple(labelnames)
for l in labelnames:
if not METRIC_LABEL_NAME_RE.match(l):
raise ValueError('Invalid label metric name: ' + l)
if RESERVED_METRIC_LABEL_NAME_RE.match(l):
raise ValueError('Reserved label metric name: ' + l)
if l in cls._reserved_labelnames:
raise ValueError('Reserved label metric name: ' + l)
return labelnames
class MetricWrapperBase(object):
_type = None
_reserved_labelnames = ()
def _is_observable(self):
# Whether this metric is observable, i.e.
# * a metric without label names and values, or
# * the child of a labelled metric.
return not self._labelnames or (self._labelnames and self._labelvalues)
def _is_parent(self):
return self._labelnames and not self._labelvalues
def _get_metric(self):
return Metric(self._name, self._documentation, self._type, self._unit)
def describe(self):
return [self._get_metric()]
def collect(self):
metric = self._get_metric()
for suffix, labels, value in self._samples():
metric.add_sample(self._name + suffix, labels, value)
return [metric]
def __init__(self,
name,
documentation,
labelnames=(),
namespace='',
subsystem='',
unit='',
registry=REGISTRY,
labelvalues=None,
):
self._name = _build_full_name(self._type, name, namespace, subsystem, unit)
self._labelnames = _validate_labelnames(self, labelnames)
self._labelvalues = tuple(labelvalues or ())
self._kwargs = {}
self._documentation = documentation
self._unit = unit
if not METRIC_NAME_RE.match(self._name):
raise ValueError('Invalid metric name: ' + self._name)
if self._is_parent():
# Prepare the fields needed for child metrics.
self._lock = Lock()
self._metrics = {}
if self._is_observable():
self._metric_init()
if not self._labelvalues:
# Register the multi-wrapper parent metric, or if a label-less metric, the whole shebang.
if registry:
| registry.register(self)
def labels(self, *labelvalues, **labelkwargs):
"""Return the child for the given labelset.
All metrics can have labels, allowing grouping of related time series.
Taking a counter as an example:
from prometheus_client import Counter
| c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint'])
c.labels('get', '/').inc()
c.labels('post', '/submit').inc()
Labels can also be provided as keyword arguments:
from prometheus_client import Counter
c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint'])
c.labels(method='get', endpoint='/').inc()
c.labels(method='post', endpoint='/submit').inc()
See the best practices on [naming](http://prometheus.io/docs/practices/naming/)
and [labels](http://prometheus.io/docs/practices/instrumentation/#use-labels).
"""
if not self._labelnames:
raise ValueError('No label names were set when constructing %s' % self)
if self._labelvalues:
raise ValueError('%s already has labels set (%s); can not chain calls to .labels()' % (
self,
dict(zip(self._labelnames, self._labelvalues))
))
if labelvalues and labelkwargs:
raise ValueError("Can't pass both *args and **kwargs")
if labelkwargs:
if sorted(labelkwargs) != sorted(self._labelnames):
raise ValueError('Incorrect label names')
labelvalues = tuple(unicode(labelkwargs[l]) for l in self._labelnames)
else:
if len(labelvalues) != len(self._labelnames):
raise ValueError('Incorrect label count')
labelvalues = tuple(unicode(l) for l in labelvalues)
with self._lock:
if labelvalues not in self._metrics:
self._metrics[labelvalues] = self.__class__(
self._name,
documentation=self._documentation,
labelnames=self._labelnames,
unit=self._unit,
labelvalues=labelvalues,
**self._kwargs
)
return self._metrics[labelvalues]
def remove(self, *labelvalues):
if not self._labelnames:
raise ValueError('No label names were set when constructing %s' % self)
"""Remove the given labelset from the metric."""
if len(labelvalues) != len(self._labelnames):
raise ValueError('Incorrect label count (expected %d, got %s)' % (len(self._labelnames), labelvalues))
labelvalues = tuple(unicode(l) for l in labelvalues)
with self._lock:
del self._metrics[labelvalues]
def _samples(self):
if self._is_parent():
return self._multi_samples()
else:
return self._child_samples()
def _multi_samples(self):
with self._lock:
metrics = self._metrics.copy()
for labels, metric in metrics.items():
series_labels = list(zip(self._labelnames, labels))
for suffix, sample_labels, value in metric._samples():
yield (suffix, dict(series_labels + list(sample_labels.items())), value)
def _child_samples(self): # pragma: no cover
raise NotImplementedError('_child_samples() must be implemented by %r' % self)
def _metric_init(self): # pragma: no cover
"""
Initialize the metric object as a child, i.e. when it has labels (if any) set.
This is factored as a separate function to allow for deferred initialization.
"""
raise NotImplementedError('_metric_init() must be implemented by %r' % self)
class Counter(MetricWrapperBase):
"""A Counter tracks counts of events or running totals.
Example use cases for Counters:
- Number of requests processed
- Number of items that were inserted into a queue
- Total amount of data that a system has processed
Counters can only go up (and be reset when the process restarts). If your use case can go down,
you should use a Gauge instead.
An example for a Counter:
from prometheus_client import Counter
c = Counter('my_failures_total', 'Description of counter')
c.inc() # Increment by 1
c.inc(1.6) # Increment by given value
There are utilities to count exceptions raised:
@c.count_exceptions()
def f():
pass
with c.count_exceptions():
pass
# Count on |
typesupply/extractor | Lib/extractor/stream.py | Python | mit | 9,611 | 0 | # -*- coding: utf-8 -*-
fro | m fontTools.misc.textTools import num2binary
from fontTools.ttLib.t | ables.ttProgram import streamOpcodeDict, opcodeDict
from io import BytesIO
class InstructionStream(object):
"""
:param program_bytes: The program bytecode.
:type program_bytes: bytes
The instruction stream.
"""
def __init__(self, instruction_processor=None, program_bytes=b"") -> None:
self.ip = instruction_processor
self.io = BytesIO(program_bytes)
self._num_bytes = len(program_bytes)
def __len__(self):
return self._num_bytes
def __repr__(self) -> str:
"""
Return the instructions from the bytecode in the current stream
starting at the beginning.
"""
return self.get_assembly()
def __str__(self) -> str:
"""
Return the instructions from the bytecode in the current stream
starting at the beginning.
"""
return self.get_assembly()
def move_instruction_pointer(self, bytes_offset: int) -> None:
"""
:param bytes_offset: The offset in bytes. May be positive or negative.
:type bytes_offset: int
Move the instruction pointer inside the current stream, relative to the
current pointer position.
"""
self.io.seek(bytes_offset, 1) # 1 = relative to current position
def read_byte(self):
"""
Read a byte from the instruction stream and advance the instruction
pointer. Returns the value as a tuple of (byte, int).
"""
b = self.io.read(1)
if not b:
return False
return b, int.from_bytes(b, byteorder="big", signed=False)
def read_word(self):
"""
Read a word from the instruction stream and advance the instruction
pointer. Returns the value as a tuple of (word, int).
"""
w = self.io.read(2)
if not w:
return False
return w, int.from_bytes(w, byteorder="big", signed=True)
def rewind(self) -> None:
"""
Rewind the instruction pointer to the beginning of the stream.
"""
self.io.seek(0)
# Getting the assembly code
@property
def vtt_assembly(self) -> str:
"""
Return the instructions from the bytecode in the current stream as VTT
assembly code.
"""
return self.get_assembly(dialect="vtt", end="\n")
def get_assembly(self, dialect="ttx", end="\n") -> str:
"""
Return the instructions from the bytecode in the current stream as
assembly code in the specified dialect, "ttx" or "vtt".
"""
vtt = dialect == "vtt"
ttx = dialect == "ttx"
self.rewind()
asm = ""
indent = 0
while True:
opcode = self.io.read(1)
if not opcode:
asm = asm.strip()
if ttx:
return asm
elif vtt:
if asm:
return f"#PUSHOFF{end}" + asm.strip() + f"{end}#PUSHON"
return ""
else:
# Unknown dialect
raise NotImplementedError
opcode = int.from_bytes(opcode, byteorder="big", signed=False)
cmd_info = streamOpcodeDict.get(opcode, None)
if cmd_info is None:
cmd_info = opcodeDict.get(opcode, None)
if cmd_info is None:
print(
asm + "\n"
"Illegal opcode 0x%02x at offset 0x%04x."
% (int(opcode), self.io.tell(),)
)
raise KeyError
cmd_name, arg_bits, base_opcode, name = cmd_info
args = []
if cmd_name in ("EIF", "ELSE", "ENDF"):
indent -= 1
if cmd_name in ("NPUSHB", "NPUSHW", "PUSHB", "PUSHW"):
# PUSH instructions read their arguments from the stream
if cmd_name.startswith("PUSH"):
# Take number of arguments from the opcode
num_args = opcode - base_opcode + 1
else:
# Take number of arguments from the stream
_, num_args = self.read_byte()
if cmd_name.endswith("B"):
for n in range(num_args):
_, i = self.read_byte()
args.append(str(i))
else:
for n in range(num_args):
_, i = self.read_word()
args.append(str(i))
arg_bits = 0 # Don't output bits for push instructions
if arg_bits == 0:
if ttx:
arg_bitstring = " "
else:
arg_bitstring = ""
else:
if ttx:
arg_bitstring = num2binary(opcode - base_opcode, arg_bits)
elif vtt:
arg_bitstring = self.bitstring_to_mnemonic(
cmd_name, num2binary(opcode - base_opcode, arg_bits)
)
else:
# Unknown dialect
raise NotImplementedError
if ttx:
if cmd_name in ("NPUSHB", "NPUSHW", "PUSHB", "PUSHW"):
num_args = len(args)
val = "value" if num_args == 1 else "values"
asm += (
f"\n{' ' * indent}{cmd_name}[{arg_bitstring}]"
f"\t/* {num_args} {val} pushed */"
)
else:
asm += (
f"\n{' ' * indent}{cmd_name}[{arg_bitstring}]"
f"\t/* {name} */"
)
if args:
asm += f"\n{' ' * indent}{' '.join(args)}"
elif vtt:
if cmd_name in ("NPUSHB", "NPUSHW", "PUSHB", "PUSHW"):
# Format as generic #PUSH for VTT assembly output
cmd_name = "#PUSH"
asm += f"{end}{' ' * indent}{cmd_name}, {', '.join(args)}"
elif cmd_name in ("JMPR", "JROF"):
# Special formatting for jump instructions
if cmd_name == "JPMR":
args = ("*",)
elif cmd_name == "JROF":
args = ("*", "*")
asm += f"{end}#PUSHON"
asm += f"{end}{' ' * indent}{cmd_name}, {', '.join(args)}"
asm += f"{end}#PUSHOFF"
else:
asm += (
f"{end}{' ' * indent}{cmd_name}[{arg_bitstring}]"
f"\t/* {name} */"
)
else:
# Unknown dialect
raise NotImplementedError
if cmd_name in ("ELSE", "FDEF", "IF"):
indent += 1
def bitstring_to_mnemonic(self, cmd_name: str, bitstring: str) -> str:
"""
Return VTT mnemonics for a bit string
"""
if cmd_name in ("SVTCA", "SPVTCA", "SFVTCA", "IUP"):
# Direction
if bitstring == "0":
return "Y" # Y axis
return "X" # X axis
elif cmd_name in ("SPVTL", "SFVTL", "SDPVTL"):
# Line relation
if bitstring == "0":
return "r" # parallel to line
return "R" # perpendicular to line
elif cmd_name in ("MDAP", "MIAP"):
# Rounding
if bitstring == "0":
return "r" # do not round distance
return "R" # round distance
elif cmd_name in ("SHP", "SHC", "SHZ"):
# Reference Point Usage
if bitstring == "0":
return "2" # Use rp2
return "1" # Use rp1
elif cmd_name in ("MSIRP",):
# Reference Point Autoset
if bitstring == "0":
return "m" # Do not set rp0
return "M" # Set rp0 to poin |
mattljc/LaminateTools | oldSauce/RevB/text_outputs.py | Python | apache-2.0 | 765 | 0.035294 | import numpy as np
def FullOut(outfi | le='results.txt',analysis={}, envelopes={}, index={}):
# Set global output options
np.set_printoptions(precision=5, linewidth=1000)
contents = outfile +'\n\n'
contents += '==LAMINATE CONSTANTS==\n'
for key in analysis:
contents += key+'\n'
contents += str(analysis[key])
contents += '\n'
contents += '==LAMINATE FAILURE INDICES==\n'
for key in index:
contents += key+'\n'
contents += str(index[key])
contents += '\n'
contents += '==LAMINATE FAILU | RE ENVELOPES==\n'
for key in envelopes:
contents += key + '\n'
for subkey in envelopes[key]:
contents += subkey + ' = '+str(envelopes[key][subkey])
contents += '\n'
contents += '\n'
file = open(outfile,'w')
file.write(contents)
file.close()
|
hashbrowncipher/pushmanager | pushmanager/servlets/delayrequest.py | Python | apache-2.0 | 2,656 | 0.00113 | import sqlalchemy as SA
import pushmanager.core.db as db
from pushmanager.core.mail import MailQueue
from pushmanager.core.requesthandler import RequestHandler
import pushmanager.core.util
class DelayRequestServlet(RequestHandler):
def post(self):
if not self.current_user:
return self.send_error(403)
self.requestid = pushmanager.core.util.get_int_arg(self.request, 'id')
update_query = db.push_requests.update().where(SA.and_(
db.push_requests.c.id == self.requestid,
db.push_requests.c.state.in_(('requested', 'pickme')),
)).values({
'state': 'delayed',
})
delete_query = db.push_pushcontents.delete().where(
SA.exists([1], SA.and_(
db.push_pushcontents.c.request == db.push_requests.c.id,
db.push_requests.c.state == 'delayed',
)))
select_query = db.push_requests.select().where(
db.push_requests.c.id == self.requestid,
)
db.execute_transaction_cb([update_query, delete_query, select_query], self.on_db_complete)
get = post
def on_db_complete(self, success, db_results):
self.check_db_results(success, db_results)
_, _, req = db_results
req = req.first()
if req['state'] != 'delayed':
# We didn't actually discard the record, for whatever reason
return self.redirect("/requests?user=%s" % self.current_user)
if req['wa | tchers']:
user_string = '%s (%s)' % (req['user'], req['watchers'])
users = [req['user']] + req['watchers'].split(',')
else:
user_string = req['user']
users = [req['user']]
msg = (
"""
<p>
Request for %(user)s has been marked as delayed by %(pus | hmaster)s, and will not be accepted into pushes until you
mark it as requested again:
</p>
<p>
<strong>%(user)s - %(title)s</strong><br />
<em>%(repo)s/%(branch)s</em>
</p>
<p>
Regards,<br />
PushManager
</p>"""
) % pushmanager.core.util.EscapedDict({
'pushmaster': self.current_user,
'user': user_string,
'title': req['title'],
'repo': req['repo'],
'branch': req['branch'],
})
subject = "[push] %s - %s" % (user_string, req['title'])
MailQueue.enqueue_user_email(users, msg, subject)
self.redirect("/requests?user=%s" % self.current_user)
|
ganeshrn/ansible | lib/ansible/vars/clean.py | Python | gpl-3.0 | 6,161 | 0.001785 | # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils import six
from ansible.module_utils._text import to_text
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence
from ansible.plugins.loader import connection_loader
from ansible.utils.display import Display
display = Display()
def module_response_deepcopy(v):
"""Function to create a deep copy of module response data
Designed to be used within the Ansible "engine" to improve performance
issues where ``copy.deepcopy`` was used previously, largely with CPU
and memory contention.
This only supports the following data types, and was designed to only
handle specific workloads:
* ``dict``
* ``list``
The data we pass here will come from a serialization such
as JSON, so we shouldn't have need for other data types such as
``set`` or ``tuple``.
Take note that this function should not be used extensively as a
replacement for ``deepcopy`` due to the naive way in which this
handles other data types.
Do not expect uses outside of those listed below to maintain
backwards compatibility, in case we need to extend this function
to handle our specific needs:
* ``ansible.executor.task_result.TaskResult.clean_copy``
* ``ansible.vars.clean.clean_facts``
* ``ansible.vars.namespace_facts``
"""
if isinstance(v, dict):
ret = v.copy()
items = six.iteritems(ret)
elif isinstance(v, list):
ret = v[:]
items = enumerate(ret)
else:
return v
for key, value in items:
if isinstance(value, (dict, list)):
ret[key] = module_response_deepcopy(value)
else:
ret[key] = value
return ret
def strip_internal_keys(dirty, exceptions=None):
# All keys starting with _ansible_ are internal, so change the 'dirty' mapping and remove them.
if exceptions is None:
exceptions = tuple()
if isinstance(dirty, MutableSequence):
for element in dirty:
if isinstance(element, (MutableMapping, MutableSequence)):
strip_internal_keys(element, exceptions=exceptions)
elif isinstance(dirty, MutableMapping):
# listify to avoid updating dict while iterating over it
for k in list(dirty.keys()):
if isinstance(k, six.string_types):
if k.startswith('_ansible_') and k not in exceptions:
del dirty[k]
continue
if isinstance(dirty[k], (MutableMapping, MutableSequence)):
strip_internal_keys(dirty[k], exceptions=exceptions)
else:
raise AnsibleError("Cannot strip invalid keys from %s" % type(dirty))
return dirty
def remove_internal_keys(data):
'''
More nuanced version of strip_internal_keys
'''
for key in list(data.keys()):
if (key.startswith('_ansible_') and key != '_ansible_parsed') or key in C.INTERNAL_RESULT_KEYS:
display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
del data[key]
# remove bad/empty internal keys
for key in ['warnings', 'deprecations']:
if key in data and not data[key]:
del data[key]
# cleanse fact values that are allowed from actions but not modules
for key in list(data.get('ansible_facts', {}).keys()):
if key.startswith('discovered_interpreter_') or key.startswith('ansible_discovered_interpreter_'):
del data['ansible_facts'][key]
def clean_facts(facts):
''' remove facts that can override internal keys or | otherwise deemed unsafe '''
data = module_response_deepcopy(facts)
remove_keys = set()
fact_keys = set(dat | a.keys())
# first we add all of our magic variable names to the set of
# keys we want to remove from facts
# NOTE: these will eventually disappear in favor of others below
for magic_var in C.MAGIC_VARIABLE_MAPPING:
remove_keys.update(fact_keys.intersection(C.MAGIC_VARIABLE_MAPPING[magic_var]))
# remove common connection vars
remove_keys.update(fact_keys.intersection(C.COMMON_CONNECTION_VARS))
# next we remove any connection plugin specific vars
for conn_path in connection_loader.all(path_only=True):
conn_name = os.path.splitext(os.path.basename(conn_path))[0]
re_key = re.compile('^ansible_%s_' % re.escape(conn_name))
for fact_key in fact_keys:
# most lightweight VM or container tech creates devices with this pattern, this avoids filtering them out
if (re_key.match(fact_key) and not fact_key.endswith(('_bridge', '_gwbridge'))) or fact_key.startswith('ansible_become_'):
remove_keys.add(fact_key)
# remove some KNOWN keys
for hard in C.RESTRICTED_RESULT_KEYS + C.INTERNAL_RESULT_KEYS:
if hard in fact_keys:
remove_keys.add(hard)
# finally, we search for interpreter keys to remove
re_interp = re.compile('^ansible_.*_interpreter$')
for fact_key in fact_keys:
if re_interp.match(fact_key):
remove_keys.add(fact_key)
# then we remove them (except for ssh host keys)
for r_key in remove_keys:
if not r_key.startswith('ansible_ssh_host_key_'):
display.warning("Removed restricted key from module data: %s" % (r_key))
del data[r_key]
return strip_internal_keys(data)
def namespace_facts(facts):
''' return all facts inside 'ansible_facts' w/o an ansible_ prefix '''
deprefixed = {}
for k in facts:
if k.startswith('ansible_') and k not in ('ansible_local',):
deprefixed[k[8:]] = module_response_deepcopy(facts[k])
else:
deprefixed[k] = module_response_deepcopy(facts[k])
return {'ansible_facts': deprefixed}
|
ImageMarkup/isic-archive | isic_archive/models/annotation.py | Python | apache-2.0 | 10,692 | 0.000561 | from bson import ObjectId
import jsonschema
import numpy
from girder.exceptions import ValidationException
from girder.models.file import File
from girder.models.model_base import Model
from girder.models.upload import Upload
from girder.utility.acl_mixin import AccessControlMixin
from .image import Image
from .segmentation_helpers import ScikitSegmentationHelper
from .study import Study
from .user import User
class Annotation(AccessControlMixin, Model):
def initialize(self):
self.name = 'annotation'
self.ensureIndices(['studyId', 'imageId', 'userId'])
# TODO: resourceColl should be ['study', 'isic_archive'], but upstream support is unclear
self.resourceColl = 'folder'
self.resourceParent = 'studyId'
def createAnnotation(self, study, image, user):
annotation = self.save({
'studyId': study['_id'],
'imageId': image['_id'],
'userId': user['_id'],
'startTime': None,
'stopTime': None,
'status': None,
'log': [],
'responses': {},
'markups': {},
})
return annotation
def getState(self, annotation):
return (Study().State.COMPLETE
if annotation['stopTime'] is not None
else Study().State.ACTIVE)
def _superpixelsToMasks(self, superpixelValues, image):
possibleSuperpixelNums = numpy.array([
superpixelNum
for superpixelNum, featureValue
in enumerate(superpixelValues)
if featureValue == 0.5
])
definiteSuperpixelNums = numpy.array([
superpixelNum
for superpixelNum, featureValue
in enumerate(superpixelValues)
if featureValue == 1.0
])
superpixelsLabelData = Image().superpixelsData(image)
possibleMask = numpy.in1d(
superpixelsLabelData.flat,
possibleSuperpixelNums
).reshape(superpixelsLabelData.shape)
possibleMask = possibleMask.astype(numpy.bool_)
definiteMask = numpy.in1d(
superpixelsLabelData.flat,
definiteSuperpixelNums
).reshape(superpixelsLabelData.shape)
definiteMask = definiteMask.astype(numpy.bool_)
return possibleMask, definiteMask
def _superpixelsToMaskMarkup(self, superpixelValues, image):
possibleMask, definiteMask = self._superpixelsToMasks(superpixelValues, image)
markupMask = numpy.zeros(possibleMask.shape, dtype=numpy.uint8)
markupMask[possibleMask] = 128
markupMask[definiteMask] = 255
return markupMask
def saveSuperpixelMarkup(self, annotation, featureId, superpixelValues):
image = Image().load(annotation['imageId'], force=True, exc=True)
annotator = User().load(annotation['userId'], force=True, exc=True)
markupMask = self._superpixelsToMaskMarkup(superpixelValues, image)
markupMaskEncodedStream = ScikitSegmentationHelper.writeImage(markupMask, 'png')
markupFile = Upload().uploadFromFile(
obj=markupMaskEncodedStream,
size=len(markupMaskEncodedStream.getvalue()),
name='annotation_%s_%s.png' % (
annotation['_id'],
# Rename features to ensure the file is downloadable on Windows
featureId.replace(' : ', ' ; ').replace('/', ',')
),
# TODO: change this once a bug in upstream Girder is fixed
parentType='annotation',
parent=annotation,
attachParent=True,
user=annotator,
mimeType='image/png'
)
markupFile['superpixels'] = superpixelValues
# TODO: remove this once a bug in upstream Girder is fixed
markupFile['attachedToType'] = ['annotation', 'isic_archive']
markupFile = File().save(markupFile)
annotation['markups'][featureId] = {
'fileId': markupFile['_id'],
'present': bool(markupMask.any())
}
return Annotation().save(annotation)
def getMarkupFile(self, annotation, featureId, includeSuperpixels=False):
if featureId in annotation['markups']:
markupFile = File().load(
annotation['markups'][featureId]['fileId'],
force=True,
exc=True,
fields={'superpixels': includeSuperpixels}
)
return markupFile
else:
return None
def renderMarkup(self, annotation, featureId):
image = Image().load(annotation['imageId'], force=True, exc=True)
renderData = Image().imageData(image)
markupFile = Annotation().getMarkupFile(annotation, featureId)
if markupFile:
markupMask = Image()._decodeDataFromFile(markupFile)
else:
image = Image().load(annotation['imageId'], force=True, exc=True)
markupMask = numpy.zeros(
(
image['meta']['acquisition']['pixelsY'],
image['meta']['acquisition']['pixelsX']
),
dtype=numpy.uint8
)
possibleMask = markupMask == 128
definiteMask = markupMask == 255
POSSIBLE_OVERLAY_COLOR = numpy.array([250, 250, 0])
DEFINITE_OVERLAY_COLOR = numpy.array([0, 0, 255])
renderData[possibleMask] = POSSIBLE_OVERLAY_COLOR
renderData[definiteMask] = DEFINITE_OVERLAY_COLOR
return renderData
def filter(self, annotation, user=None, additionalKeys=None):
output = {
'_id': annotation['_id'],
'_modelType': 'annotation',
'studyId': annotation['studyId'],
'image': Image().filterSummary(
Image().load(annotation['imageId'], force=True, exc=True),
user),
'user': User().filterSummary(
user=User().load(annotation['userId'], force=True, exc=True),
accessorUser=user),
'state': Annotation().getState(annotation)
}
if Annotation().getState(annotation) == Study().State.COMPLETE:
output.update({
'status': annotation['status'],
'startTime': annotation['startTime'],
'stopTime': annotation['stopTime'],
'responses': annotation['responses'],
'markups': {
featureId: markup['present']
for featureId, markup
in annotation['markups'].items()
},
'log': annotation.get('log', [])
})
return output
def filterSummary(self, annotation, user=None):
return {
'_id': annotation['_id'],
'studyId': annotation['studyId'],
'userId': annotation['userId'],
'imageId': annotation['imageId'],
'state': self.getState(annotation)
}
def remove(self, annotation, **kwargs):
for featureId in annotation['markups'].keys():
File().remove(self.getMarkupFile(annotation, featureId))
return super(Annotation, self).remove(annotation)
def validate(self, doc): # noqa C901
for field in ['studyId', 'userId', 'imageId']:
if not isinstance(doc.get(field), ObjectId):
raise ValidationException(f'Annotation field "{field}" must be an ObjectId')
study = Study().load(doc['studyId'], force=True, exc=False)
if not study:
raise ValidationException(
'Annotation field "studyId" must reference an existing Study.')
# If annotation is complete
| if doc.get('stopTime'):
schema = {
# '$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'annotation',
'type': 'object',
'properties': {
'_id': {
# TODO
},
'studyId': {
# | TODO
},
'imageId': {
|
Slack06/yadg | manage.py | Python | mit | 1,359 | 0.001472 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Slack
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS | PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os, sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.develop")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv) |
sidnarayanan/BAdNet | train/pf/adv/plot.py | Python | mit | 8,432 | 0.025498 | #!/usr/local/bin/python2.7
from sys import exit
from os import environ, system
environ['KERAS_BACKEND'] = 'tensorflow'
environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
environ["CUDA_VISIBLE_DEVICES"] = ""
import numpy as np
import utils
import adversarial
import obj
import config
#config.DEBUG = True
#config.n_truth = 5
#config.truth = 'resonanceType'
n_batches = 200
#n_batches = 1
partition = 'test'
p = utils.Plotter()
r = utils.Roccer()
APOSTLE = 'panda_3'
OUTPUT = environ['BADNET_FIGSDIR'] + '/' + APOSTLE + '/'
system('mkdir -p %s'%OUTPUT)
#components=['singletons', 'inclusive', 'nn1', 'nn2']
components=['singletons', APOSTLE+'_conv', 'panda_3_shallow']
def make_coll(fpath):
coll = obj.PFSVCollection()
coll.add_categories(components, fpath)
return coll
basedir = '/fastscratch/snarayan/pandaarrays/v1//PARTITION/'
colls = {
't' : make_coll(basedir + '/ZpTT_*_CATEGORY.npy'),
'q' : make_coll(basedir + '/QCD_*_CATEGORY.npy'),
}
'''
colls = {
# 't' : make_coll('/fastscratch/snarayan/baconarrays/v12_repro/PARTITION/ZprimeToTTJet_4_*_CATEGORY.npy'),
# 'q' : make_coll('/fastscratch/snarayan/baconarrays/v12_repro/PARTITION/QCD_0_*_CATEGORY.npy')
't' : make_coll('/fastscratch/snarayan/baconarrays/v13_repro/PARTITION/ZprimeToTTJet_3_*_CATEGORY.npy'),
# 'h' : make_coll('/fastscratch/snarayan/baconarrays/v13_repro/PARTITION/ZprimeToA0hToA0chichihbb_2_*_CATEGORY.npy'),
'q' : make_coll('/fastscratch/snarayan/baconarrays/v13_repro/PARTITION/QCD_1_*_CATEGORY.npy')
}
'''
# run DNN
def predict(data, model=0):
return data['panda_3_shallow']
def predict_conv(data, model):
#return data[APOSTLE+'_conv'][:,model]
return data[APOSTLE+'_conv']
f_vars = {
'tau32' : (lambda x : x['singletons'][:,obj.singletons['tau32']], np.arange(0,1.2,0.01), r'$\tau_{32}$'),
'tau21' : (lambda x : x['singletons'][:,obj.singletons['tau21']], np.arange(0,1.2,0.01), r'$\tau_{21}$'),
'tau32SD' : (lambda x : x['singletons'][:,obj.singletons['tau32SD']], np.arange(0,1.2,0.01), r'$\tau_{32}^\mathrm{SD}$'),
'tau21SD' : (lambda x : x['singletons'][:,obj.singletons['tau21SD']], np.arange(0,1.2,0.01), r'$\tau_{21}^\mathrm{SD}$'),
'partonM' : (lambda x : x['singletons'][:,obj.singletons['partonM']], np.arange(0,400,5), 'Parton mass [GeV]'),
'msd' : (lambda x : x['singletons'][:,obj.singletons['msd']], np.arange(0.,400.,20.), r'$m_\mathrm{SD}$ [GeV]'),
'pt' : (lambda x : x['singletons'][:,obj.singletons['pt']], np.arange(250.,1000.,50.), r'$p_\mathrm{T}$ [GeV]'),
'top_ecf_bdt' : (lambda x : x['singletons'][:,obj.singletons['top_ecf_bdt']], np.arange(-1.2,1,0.05), 'ECF classifier'),
'shallow_t' : (lambda x : predict(x, 0), np.arange(0,1.2,0.001), 'Shallow classifier'),
# 'shallow_h' : (lambda x : predict(x, 1), np.arange(0,1.2,0.001), 'Shallow classifier'),
'classifier_conv_t' : (lambda x : predict_conv(x, 0), np.arange(0,1.2,0.001), 'CLSTM'),
# 'classifier_conv_h' : (lambda x : predict_conv(x, 1), np.arange(0,1.2,0.001), 'CLSTM'),
'regularized_conv_t' : (lambda x : predict_conv(x, 1), np.arange(0,1.2,0.001), r'$m_\mathrm{SD}$-ecorr. CLSTM'),
# 'regularized_conv_h' : (lambda x : predict_conv(x, 3), np.arange(0,1.2,0.001), 'Decorrelated CLSTM'),
}
# f_vars2d = {
# 'correlation_reg' : (lambda x : (x['singletons'][:,obj.singletons['msd']], predict_conv(x, 2)),
# np.arange(40,400,10.),
# np. | arange(0,1,0.01)),
# 'correlation_class' : (lambda x : (x['singletons'][:,obj.singletons['msd']], predict_conv(x, 0)),
# np.arange(40,400,10.),
# np.arange(0,1,0.01)),
# }
# unmasked first
hists = {}
for k,v in colls.iteritems():
hists[k | ] = v.draw(components=components,
f_vars=f_vars,
n_batches=n_batches, partition=partition)
#hists2d['q']['correlation_reg'].scale()
#hists2d['q']['correlation_class'].scale()
#hists2d['q']['correlation_reg'].plot(xlabel=r'$m_{SD}$', ylabel='Regularized NN',
# output=OUTPUT+'correlation_reg', norm=utils.lognorm)
#hists2d['q']['correlation_class'].plot(xlabel=r'$m_{SD}$', ylabel='Classifier NN',
# output=OUTPUT+'correlation_class', norm=utils.lognorm)
for k in hists['t']:
ht = hists['t'][k]
hq = hists['q'][k]
# hh = hists['h'][k]
for h in [ht, hq]:
# for h in [ht, hq, hh]:
h.scale()
p.clear()
p.add_hist(ht, '3-prong top', 'r')
# p.add_hist(hh, '2-prong Higgs', 'b')
p.add_hist(hq, '1-prong QCD', 'k')
p.plot(output=OUTPUT+'unmasked_'+k, xlabel=f_vars[k][2])
r.clear()
r.add_vars(hists['t'],
hists['q'],
{'tau32':r'$\tau_{32}$', 'tau32SD':r'$\tau_{32}^\mathrm{SD}$',
'tau21':r'$\tau_{21}$', 'tau21SD':r'$\tau_{21}^\mathrm{SD}$',
'classifier_t':'classifier',
'regularized_t':'regularized', 'msd':r'$m_\mathrm{SD}$',
'top_ecf_bdt':'ECF classifier',
'classifier_conv_t':'CLSTM',
'regularized_conv_t':r'$m_\mathrm{SD}$-decorr. CLSTM',
'shallow_t':r'Shallow NN'},
)
r.plot(**{'output':OUTPUT+'unmasked_top_roc'})
# get the cuts
thresholds = [0, 0.5, 0.75, 0.9, 0.99, 0.999]
def sculpting(name, f_pred):
h = hists['q'][name]
tmp_hists = {t:{} for t in thresholds}
f_vars2d = {
'msd' : (lambda x : (x['singletons'][:,obj.singletons['msd']], f_pred(x)),
np.arange(40,400,20.),
np.arange(0,1,0.001)),
'pt' : (lambda x : (x['singletons'][:,obj.singletons['pt']], f_pred(x)),
np.arange(400,1000,50.),
np.arange(0,1,0.001)),
'partonM' : (lambda x : (x['singletons'][:,obj.singletons['partonM']], f_pred(x)),
np.arange(0,400,20.),
np.arange(0,1,0.001)),
}
h2d = colls['q'].draw(components=components,
f_vars={}, f_vars2d=f_vars2d,
n_batches=n_batches, partition=partition)
for t in thresholds:
cut = 0
for ib in xrange(h.bins.shape[0]):
frac = h.integral(lo=0, hi=ib) / h.integral()
if frac >= t:
cut = h.bins[ib]
break
print 'For classifier=%s, threshold=%.3f reached at cut=%.3f'%(name, t, cut )
for k,h2 in h2d.iteritems():
tmp_hists[t][k] = h2.project_onto_x(min_cut=cut)
colors = utils.pl.cm.tab10(np.linspace(0,1,len(thresholds)))
for k in tmp_hists[thresholds[0]]:
p.clear()
for i,t in enumerate(thresholds):
p.add_hist(tmp_hists[t][k], 'Acceptance=%.3f'%(1-t), colors[i])
p.plot(output=OUTPUT+name+'_progression_'+k, xlabel=f_vars[k][2], logy=True)
sculpting('regularized_conv_t', f_pred = lambda d : predict_conv(d, 1))
sculpting('shallow_t', f_pred = predict)
sculpting('classifier_conv_t', f_pred = lambda d : predict_conv(d, 0))
scuplting('tau32', f_pred = lambda x : x['singletons'][:,obj.singletons['tau32']])
# mask the top mass
def f_mask(data):
mass = data['singletons'][:,obj.singletons['msd']]
return (mass > 110) & (mass < 210)
hists = {}
for k,v in colls.iteritems():
hists[k] = v.draw(components=components,
f_vars=f_vars, n_batches=n_batches, partition=partition, f_mask=f_mask)
for k in hists['t']:
ht = hists['t'][k]
hq = hists['q'][k]
# hh = hists['h'][k]
for h in [ht, hq]:
h.scale()
p.clear()
p.add_hist(ht, '3-prong top', 'r')
# p.add_hist(hh, '3-prong Higgs', 'b')
p.add_hist(hq, '1-prong QCD', 'k')
p.plot(output=OUTPUT+'topmass_'+k, xlabel=f_vars[k][2])
r.clear()
r.add_vars(hists['t'],
hists['q'],
{'tau32':r'$\tau_{32}$', 'tau32SD':r'$\tau_{32}^\mathrm{SD}$',
'tau21':r'$\tau_{21}$', 'tau21SD':r'$\tau_{21}^\mathrm{SD}$',
'classifier_t':'classifier',
'regularized_t':'regularized', 'msd':r'$m_\mathrm{SD}$', |
stephanie-wang/ray | rllib/optimizers/aso_minibatch_buffer.py | Python | apache-2.0 | 1,622 | 0 | """Helper class for AsyncSamplesOptimizer."""
class MinibatchBuffer:
"""Ring buffer of recent data batches for minibatch SGD.
This is for use with AsyncSamplesOptimizer.
"""
def __init__(self, inqueue, size, timeout, num_passes, init_num_passes=1):
"""Initialize a minibatch buffer.
Arguments:
inqueue: Queue to populate the internal ring buffer from.
size: Max number of data items to buffer.
timeout: Queue timeout
num_passes: Max num times each data item should be emitted.
init_num_passes: Initial max passes for each data item
"""
self.inqueue = inqueue
self.size = size
self.timeout = timeout
self.max_ttl = num_passes
self.cur_max_ttl = init_num_passes
self.buffers = [None] * size
self.ttl = [0] * size
self.idx = 0
def get(self):
"""Get a new batch from the internal ring buffer.
Returns:
buf: Data item saved from inqueue.
released: True if the item is now removed from the ring buffer.
"""
if self.ttl[self.idx] <= 0:
self.buffers[self.idx] = self.inqueue.get(timeout=self.timeout)
self.ttl[self.idx] = self.cur_max_ttl
if self.cur_max_ttl < self.max_ttl:
self.cur_max_ttl += 1
buf = self.buffers[self.idx]
self.ttl[self.idx] -= 1
| released = self.ttl[self.idx] <= 0
if released:
self.buffers[self.idx] = None
self.idx = (self.idx + 1) % len(self.buffers)
return buf, re | leased
|
josephhardinee/PyDisdrometer | pydsd/tests/test_expfit.py | Python | lgpl-2.1 | 2,668 | 0.003373 | from ..utility import expfit
from unittest import TestCase
import numpy as np
class Test_expfit(TestCase):
""" Tests for the expfit module. """
def test_expfit_returns_correct_relationship(self):
"""
Test whether or not expfit can model a simple one variable exponential relationship.
"""
a = 2
b = 3
x = [1, 2, 3]
y = a * np.power(x, b)
fit = expfit.expfit(x, y)[0]
self.assertAlmostEqual(fit[0], a, 7, "Fit of Scale Parameter Failed for expfit")
self.assertAlmostEqual(
fit[1], b, 7, "Fit of Exponent Parameter Failed for expfit"
)
def test_expfit_handles_nan(self):
""" Test whether expfit correctly handles not a number in input array."""
a = 2
b = 3
x = [1, 2, 3, np.nan]
y = a * np.power(x, b)
fit = expfit.expfit(x, y)[0]
self.assertAlmostEqual(
fit[0], a, 7, "Fit of Scale Parameter Failed for expfit with nan data"
)
self.assertAlmostEqual(
fit[1], b, 7, "Fit of Exponent Parameter Failed for expfit with nan data"
)
def test_expfit2_returns_correct_relationship(self):
"""
Test whether or not expfit2 can model a simple two variable exponential relationship.
"""
a = 1.5
b = 2.5
c = 3.5
x1 = np.array([1, 2, 3, 4, 5])
x2 | = 2 * np.array([1, 3, 5, 7, 9])
y = a * np.power(x1, b) * np.power(x2, c)
fit = expfit.expfit2([x1, x2], y)[0]
self.assertAlmostEqual(fit[0], a, 7, "Fit of Scale Parameter Failed for expfit")
self.assertAlmostEqual(
fit[1], b, 7, "Fit of First Exponent Parameter Failed for expfit2"
)
self.assertAlmostEqual(
| fit[2], c, 7, "Fit of Second Exponent Parameter Failed for expfit2"
)
def test_expfit2_handles_nan(self):
"""
Test whether or not expfit2 can model a simple two variable exponential relationship in
the presence of nans.
"""
a = 1.5
b = 2.5
c = 3.5
x1 = np.array([1, 2, 3, np.nan, 5, 7, 9, 11, 12])
x2 = 2 * np.array([1, 3, 5, 7, np.nan, 9, 11, 12, 1])
y = a * np.power(x1, b) * np.power(x2, c)
fit = expfit.expfit2([x1, x2], y)[0]
self.assertAlmostEqual(fit[0], a, 7, "Fit of Scale Parameter Failed for expfit")
self.assertAlmostEqual(
fit[1], b, 7, "Fit of First Exponent Parameter Failed for expfit2"
)
self.assertAlmostEqual(
fit[2], c, 7, "Fit of Second Exponent Parameter Failed for expfit2"
)
|
tecnicatura-villa-el-libertador/CentroAsistencialH3 | CentroAsist/migrations/0007_profesional_user.py | Python | bsd-3-clause | 729 | 0.001372 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-10-29 23:47
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration( | migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('CentroAsist', '0006_auto_20161029_2341'),
]
operations = [
migrations.AddField(
model_name='profesional', |
name='user',
field=models.OneToOneField(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='profesional', to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
]
|
Black-Cog/BCnukeTools | core/__init__.py | Python | bsd-3-clause | 23 | 0 |
imp | ort renderm | anScene
|
jonmuckell/Motion-Tracking-Data-Parser | XYZ.py | Python | gpl-3.0 | 2,996 | 0.013365 | '''
Created on May 26, 2016
@author: Jonathan Muckell, Ph.D.
@license: GNU General Public License v3.0
Users are encouraged to use, modify and extend this work under the GNU GPLv3 license.
Please cite the following paper to provide credit to this work:
Jonathan Muckell, Yuchi Young, and Mitch Leventhal. 2017.
A Wearable Motion Tracking System to Reduce Direct Care Worker Injuries: An Exploratory Study.
In Proceedings of DH ’17, London, United Kingdom, July 02-05, 2017, 5 pages.
DOI: hp://dx.doi.org/10.1145/3079452.3079493
-------------
'''
import math
class XYZ:
def __init__(self,l):
self.x = float(l[0])
self.y = float(l[1])
self.z = float(l[2])
@staticmethod
def getDistanceBetweenPoints(p1, p2):
distance = math.sqrt( (p1.x - p2.x)**2 + (p1.y - p2.y)**2 + (p1.z - p2.z)**2 )
return distance
# https://www.mathsisfun.com/algebra/trig-cosine-law.html
@staticmethod
def getAngleOfTriangle(a,b,c):
#math.acos(0) # returns in radians
numerator = c**2 - a**2 - b**2
denominator = -2*a*b
# print("numerator: ",numerator)
# print("denominator: ",denominator)
radians = math.acos(numerator / denominator)
degrees = math.degrees(radians)
#print("degrees",degrees)
return degrees
@staticmethod
def getVector(pt1, pt2):
# FROM: http://tutorial.math.lamar.edu/Classes/CalcII/Vectors_Basics.aspx
x = pt2.x - pt1.x
y = pt2.y - pt1.y
z = pt2.z - pt1.z
vector = XYZ([x, y, z])
# print("get Vector = ", x, " | ", y, " | ", z)
return vector
@staticmethod
def crossPro | duct(a, b):
# Equation from # http://tutorial.math.lamar.edu/Classes/CalcII/CrossProduct.aspx#Vectors_CrossProd_Ex2
x = (a.y * b.z) - (a.z * b.y)
y = (a.z * b.x) - (a.x * b.z)
z = (a.x * b.y) - (a.y * b.x)
vector = XYZ([x, y, z])
# print("cross product = ", x, " | ", y, " | ", z)
return vector
@staticmethod
def getPlaneNormal(P, | Q,R):
# http://tutorial.math.lamar.edu/Classes/CalcIII/EqnsOfPlanes.aspx
PQ = XYZ.getVector(P,Q)
PR = XYZ.getVector(P,R)
normal = XYZ.crossProduct(PQ, PR)
return normal
@staticmethod
def getMidpoint(pt1, pt2):
x = (pt1.x + pt2.x) / 2
y = (pt1.y + pt2.y) / 2
z = (pt1.z + pt2.z) / 2
return XYZ([x, y, z])
@staticmethod
def testNormal():
P = XYZ([1, -2, 0])
Q = XYZ([3, 1, 4])
R = XYZ([0, -1, 2])
normal = XYZ.getPlaneNormal(P, Q, R)
# print("normal = ", normal.x, " | ", normal.y, " | ", normal.z)
a = XYZ([2, 1, -1])
b = XYZ([-3, 4, 1])
XYZ.crossProduct(a, b)
XYZ.testNormal()
|
rackerlabs/sloggingo | test_slogging/unit/test_compressing_file_reader.py | Python | apache-2.0 | 1,258 | 0 | # Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in co | mpliance with the License.
# You | may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests for swift.common.compressing_file_reader """
import unittest
import cStringIO
from slogging.compressing_file_reader import CompressingFileReader
class TestCompressingFileReader(unittest.TestCase):
def test_read(self):
plain = 'obj\ndata'
s = cStringIO.StringIO(plain)
expected = '\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff\xcaO\xca\xe2JI,'\
'I\x04\x00\x00\x00\xff\xff\x03\x00P(\xa8\x1f\x08\x00\x00'\
'\x00'
x = CompressingFileReader(s)
compressed = ''.join(iter(lambda: x.read(), ''))
self.assertEquals(compressed, expected)
self.assertEquals(x.read(), '')
|
dbbhattacharya/kitsune | vendor/packages/translate-toolkit/translate/filters/helpers.py | Python | bsd-3-clause | 2,818 | 0.007097 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2004-2006 Zuza Software Foundation
#
# This | file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your optio | n) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""a set of helper functions for filters..."""
import operator
def countmatch(str1, str2, countstr):
"""checks whether countstr occurs the same number of times in str1 and str2"""
return str1.count(countstr) == str2.count(countstr)
def funcmatch(str1, str2, func, *args):
"""returns whether the result of func is the same for str1 and str2"""
return func(str1, *args) == func(str2, *args)
def countsmatch(str1, str2, countlist):
"""checks whether each element in countlist occurs the same number of times in str1 and str2"""
return reduce(operator.and_, [countmatch(str1, str2, countstr) for countstr in countlist], True)
def funcsmatch(str1, str2, funclist):
"""checks whether the results of each func in funclist match for str1 and str2"""
return reduce(operator.and_, [funcmatch(str1, str2, funcstr) for funcstr in funclist], True)
def filtercount(str1, func):
"""returns the number of characters in str1 that pass func"""
return len(filter(func, str1))
def filtertestmethod(testmethod, strfilter):
"""returns a version of the testmethod that operates on filtered strings using strfilter"""
def filteredmethod(str1, str2):
return testmethod(strfilter(str1), strfilter(str2))
filteredmethod.__doc__ = testmethod.__doc__
filteredmethod.name = getattr(testmethod, 'name', testmethod.__name__)
return filteredmethod
def multifilter(str1, strfilters, *args):
"""passes str1 through a list of filters"""
for strfilter in strfilters:
str1 = strfilter(str1, *args)
return str1
def multifiltertestmethod(testmethod, strfilters):
"""returns a version of the testmethod that operates on filtered strings using strfilter"""
def filteredmethod(str1, str2):
return testmethod(multifilter(str1, strfilters), multifilter(str2, strfilters))
filteredmethod.__doc__ = testmethod.__doc__
filteredmethod.name = getattr(testmethod, 'name', testmethod.__name__)
return filteredmethod
|
RNAcentral/rnacentral-webcode | rnacentral/portal/rfam_matches.py | Python | apache-2.0 | 14,300 | 0.00021 | """
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This module contains code to detect certain types of problems with Rfam matches
to sequences. That is it can find if the sequence and Rfam domain conflict, or
if the sequence is only a partial sequence
"""
import json
import six
from django.urls import reverse
import attr
from attr.validators import instance_of as is_a
from portal.models import Accession
from portal.models.rfam import RfamModel
@attr.s()
class RfamMatchStatus(object):
"""
This represents implied problems from a match between an Rfam family and an
Rna sequence. Problems are detected by various objects and this simply
records which ones have found issues as well as some data about the issues.
This serves as a simple way to organize many possible issues that could be
detected.
"""
has_issue = attr.ib(validator=is_a(bool))
upi = attr.ib(validator=is_a(six.string_types))
taxid = attr.ib()
finders = attr.ib(validator=is_a(list))
messages = attr.ib(validator=is_a(list))
@classmethod
def with_issue(cls, upi, taxid, finder, msg):
"""
Create a new instance that indicates that the given finder has found an
issue specified in the given message.
"""
return cls(has_issue=True, upi=upi, taxid=taxid, finders=[finder], messages=[msg])
@classmethod
def no_issues(cls, upi, taxid):
"""
Create a new instance that indicates there are no issues.
"""
return cls(has_issue=False, upi=upi, taxid=taxid, finders=[], messages=[])
@property
def names(self):
"""
Get the names of all finders that have found issues.
"""
return sorted([finder.name for finder in self.finders])
def merge(self, status):
"""
Merge the given status with this one. This will update the issues found
if any.
"""
if status.upi != self.upi and self.taxid == status.taxid:
raise ValueError("Can only merge MatchStatus from the same RNA.")
self.finders.extend(status.finders)
self.messages.extend(status.messages)
self.has_issue = (self.has_issue or status.has_issue)
return self
def as_simple_data(self):
"""
Create a simplified dict representation of this data. This is useful
for storage.
"""
return {
'has_issue': self.has_issue,
'problems': [{'name': n, 'message': self.messages[i]} for i, n in enumerate(self.names)],
}
def | as_json(self):
"""
Create a JSON representation of the simplified data.
"""
return json.dumps(self.as_simple_data())
class DomainProblem(object):
"""
This detects if there is a mismatch between the domains of the matched
models and the sequence that has been run. For example if a bacterial model
only matches a mouse sequence then t | here is some sort of problem, likely
contamination, with the sequence.
"""
name = 'possible_contamination'
def message(self, model, rna, taxid=None):
"""
Get a message that indicates a problem.
"""
names = ''
is_common_name = False
if taxid is None:
names = ', '.join(rna.get_domains())
else:
names, is_common_name = rna.get_organism_name(taxid=taxid)
if not is_common_name:
names = '<i>%s</i>' % names
return (
'This {sequence_name} sequence matches a {match_domain} '
'Rfam model (<a href="{model_url}">{model_name}</a>). '
'<a href="{help_url}">Learn more →</a>'.format(
sequence_name=names,
match_domain=model.domain,
model_url=model.url,
model_name=model.short_name,
help_url=reverse('help-qc'),
)
)
def is_ignorable_mito_conflict(self, rna, hits, taxid=None):
"""
This can ignore any conflict where the sequence probably comes from a
mitochondria but it matches a bacterial rRNA. In that case we do not
warn since this is expected from evolution.
"""
has_mito_organelle = bool(Accession.objects.filter(
xrefs__upi=rna.upi,
xrefs__taxid=taxid,
organelle__istartswith='mitochondrion',
).count())
possible_mito = has_mito_organelle or \
'mitochondri' in rna.get_description(taxid=taxid).lower()
return possible_mito and \
rna.get_rna_type(taxid=taxid) == 'rRNA' and \
hits[0].rfam_model_id in set([
'RF00177', # Bacterial small subunit ribosomal RNA
'RF02541', # Bacterial large subunit ribosomal RNA
'RF01959', # Archaeal small subunit ribosomal RNA
'RF02540', # Archaeal large subunit ribosomal RNA
])
def __call__(self, rna, taxid=None):
hits = rna.get_rfam_hits()
if not hits or len(hits) > 1:
return RfamMatchStatus.no_issues(rna.upi, taxid)
model = hits[0].rfam_model
found = model.domain
if not found:
return RfamMatchStatus.no_issues(rna.upi, taxid)
rna_domains = rna.get_domains(
taxid=taxid,
ignore_synthetic=True,
ignore_unclassified=True
)
if not rna_domains:
return RfamMatchStatus.no_issues(rna.upi, taxid)
if found not in rna_domains and \
not self.is_ignorable_mito_conflict(rna, hits, taxid=taxid):
msg = self.message(model, rna, taxid=taxid)
return RfamMatchStatus.with_issue(rna.upi, taxid, self, msg)
return RfamMatchStatus.no_issues(rna.upi, taxid)
class IncompleteSequence(object):
"""
This checks if a sequence is considered incomplete according to it's
Rfam match. This is detected if the at least 90% sequence matches the
model but less 50% of the model is matched by the sequence. In
addition, we require that it only have one match. This will only work for
hits that are part of a selected set of families.
"""
name = 'incomplete_sequence'
def message(self, hit):
"""
Get a message that indicates a problem.
"""
return 'Potential <a href="{url}">{name}</a> fragment'.format(
name=hit.rfam_model.long_name,
url=hit.rfam_model.url
)
def allowed_families(self):
"""
Get the set of families we will check for incomplete sequences. We
don't want to do all families yet, as we aren't sure if this will
be too senestive. The selected families are well known for having
partial sequences.
"""
return set([
'RF00001', # 5S ribosomal RNA
'RF00002', # 5.8S ribosomal RNA
'RF00005', # tRNA
'RF00177', # Bacterial small subunit ribosomal RNA
'RF01959', # Archaeal small subunit ribosomal RNA
'RF01960', # Eukaryotic small subunit ribosomal RNA
'RF02540', # Archaeal large subunit ribosomal RNA
'RF02541', # Bacterial large subunit ribosomal RNA
'RF02542', # Microsporidia small subunit ribosomal RNA
'RF02543', # Eukaryotic large subunit ribosomal RNA
])
def __call__(self, rna, taxid=None):
hits = rna.get_rfam_hits()
if len(hits) != 1:
return RfamMatchStatus.no_issues(rna.upi, taxid)
if hits[0].rfam_model_id not in self.a |
alisaifee/flask-limiter | tests/test_flask_ext.py | Python | mit | 20,218 | 0.000692 | """
"""
import logging
import time
import hiro
import mock
from flask import Flask, request
from werkzeug.exceptions import BadRequest
from flask_limiter.extension import C, Limiter
from flask_limiter.util import get_remote_address
def test_reset(extension_factory):
app, limiter = extension_factory({C.DEFAULT_LIMITS: "1 per day"})
@app.route("/")
def null():
return "Hello Reset"
with app.test_client() as cli:
cli.get("/")
assert "1 per 1 day" in cli.get("/").data.decode()
limiter.reset()
assert "Hello Reset" == cli.get("/").data.decode()
assert "1 per 1 day" in cli.get("/").data.decode()
def test_reset_unsupported(extension_factory, memcached_connection):
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per day", C.STORAGE_URI: "memcached://localhost:31211"}
)
@app.route("/")
def null():
return "Hello Reset"
with app.test_client() as cli:
cli.get("/")
assert "1 per 1 day" in cli.get("/").data.decode()
# no op with memcached but no error raised
limiter.reset()
assert "1 per 1 day" in cli.get("/").data.decode()
def test_combined_rate_limits(extension_factory):
app, limiter = extension_factory({C.DEFAULT_LIMITS: "1 per hour; 10 per day"})
@app.route("/t1")
@limiter.limit("100 per hour;10/minute")
def t1():
return "t1"
@app.route("/t2")
def t2():
return "t2"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert 200 == cli.get("/t1").status_code
assert 200 == cli.get("/t2").status_code
assert 429 == cli.get("/t2").status_code
def test_defaults_per_method(extension_factory):
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per hour", C.DEFAULT_LIMITS_PER_METHOD: True}
)
@app.route("/t1", methods=["GET", "POST"])
def t1():
return "t1"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert 200 == cli.get("/t1").status_code
assert 429 == cli.get("/t1").status_code
assert 200 == cli.post("/t1").status_code
assert 429 == cli.post("/t1").status_code
def test_default_limit_with_exemption(extension_factory):
def is_backdoor():
return request.headers.get("backdoor") == "true"
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per hour", C.DEFAULT_LIMITS_EXEMPT_WHEN: is_backdoor}
)
@app.route("/t1")
def t1():
return "test"
with hiro.Timeline() as timeline:
with app.test_client() as cli:
assert cli.get("/t1", headers={"backdoor": "true"}).status_code == 200
assert cli.get("/t1", headers={"backdoor": "true"}).status_code == 200
assert cli.get("/t1").status_code == 200
assert cli.get("/t1").status_code == 429
timeline.forward(3600)
assert cli.get("/t1").status_code == 200
def test_default_limit_with_conditional_deduction(extension_factory):
def failed_request(response):
return response.status_code != 200
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per hour", C.DEFAULT_LIMITS_DEDUCT_WHEN: failed_request}
)
@app.route("/t1/<path:path>")
def t1(path):
if path != "1":
raise BadRequest()
return path
with hiro.Timeline() as timeline:
with app.test_client() as cli:
assert cli.get("/t1/1").status_code == 200
assert cli.get("/t1/1").status_code == 200
assert cli.get("/t1/2").status_code == 400
assert cli.get("/t1/1").status_code == 429
assert cli.get("/t1/2").status_code == 429
timeline.forward(3600)
assert cli.get("/t1/1").status_code == 200
assert cli.get("/t1/2").status_code == 400
def test_key_func(extension_factory):
app, limiter = extension_factory()
@app.route("/t1")
@limiter.limit("100 per minute", lambda: "test")
def t1():
return "test"
with hiro.Timeline().freeze():
with app.test_client() as cli:
for i in range(0, 100):
assert (
200
== cli.get(
"/t1", headers={"X_FORWARDED_FOR": "127.0.0.2"}
).status_code
)
assert 429 == cli.get("/t1").status_code
def test_logging(caplog):
app = Flask(__name__)
limiter = Limiter(app, key_func=get_remote_address)
@app.route("/t1")
@limiter.limit("1/minute")
def t1():
return "test"
with app.test_client() as cli:
assert 200 == cli.get("/t1").status_code
assert 429 == cli.get("/t1").status_code
assert len(caplog.records) == 1
assert caplog.records[0].levelname == "WARNING"
def test_reuse_logging():
app = Flask(__name__)
app_handler = mock.Mock()
app_handler.level = logging.INFO
app.logger.addHandler(app_handler)
limiter = Limiter(app, key_func=get_remote_address)
for handler in app.logger.handlers:
limiter.logger.addHandler(handler)
@app.route("/t1")
@limiter.limit("1/minute")
def t1():
return "42"
with app.test_client() as cli:
cli.get("/t1")
cli.get("/t1")
assert app_handler.handle.call_count == 1
def test_disabled_flag(extension_factory):
app, limiter = extension_factory(
config={C.ENABLED: False}, default_limits=["1/minute"]
)
@app.route("/t1")
def t1():
return "test"
@app.route("/t2")
@limiter.limit("10 per minute")
def t2():
return "test"
with app.test_client() as cli:
assert cli.get("/t1").status_code == 200
assert cli.get("/t1").status_code == 200
for i in range(0, 10):
assert cli.get("/t2").status_code == 200
assert cli.get("/t2").status_code == 200
def test_multiple_apps():
app1 = Flask(__name__)
app2 = Flask(__name__)
limiter = Limiter(default_limits=["1/second"], key_func=get_remote_address)
limiter.init_app(app1)
limiter.init_app(app2)
@app1.route("/ping")
def ping():
return "PONG"
@app1.route("/slowping")
@limiter.limit("1/minute")
def slow_ping():
return "PONG"
@app2.route("/ping")
@limiter.limit("2/second")
def ping_2():
return "PONG"
@app2.route("/slowping")
@limiter.limit("2/minute")
def slow_ping_2():
return "PONG"
with hiro.Timeline().freeze() as timeline:
with app1.test_client() as cli:
assert cli.get("/ping").status_code == 200
assert cli.get("/ping").status_code == 429
timeline.forward(1)
assert cli.get("/ping").status_code == 200
assert cli.get("/slowping").status_code == 200
timeline.forward(59)
assert cli.get("/slowping").status_code == 429
timeline.forward(1)
assert cli.get("/slowping").status_code == 200
with app2.test | _client() as cli:
assert cli.get("/ping").status_code == 200
assert cli.get("/ping").status_code == 200
assert cli.get("/ping").status_code == 429
timeline.forward(1)
assert cli.get("/ping").sta | tus_code == 200
assert cli.get("/slowping").status_code == 200
timeline.forward(59)
assert cli.get("/slowping").status_code == 200
assert cli.get("/slowping").status_code == 429
timeline.forward(1)
assert cli.get("/slowping").status_code == 200
def test_headers_no_breach():
app = Flask(__name__)
limiter = Limiter(
app,
default_limits=["10/minute"],
headers_enabled=True,
key_func=get_remote_address,
)
@app.route("/t1")
def t1():
return "test"
@app.route("/t2")
@limiter.limit("2/second; 5 per minute; 10/hour")
def t2():
return "test"
with hiro.Timeline().freeze():
with app.test_client() as cli:
resp = c |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/aio/operations/_express_route_gateways_operations.py | Python | mit | 22,493 | 0.005202 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteGatewaysOperations:
"""ExpressRouteGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list_by_subscription(
self,
**kwargs: Any
) -> "_models.ExpressRouteGatewayList":
"""Lists ExpressRoute gateways under a given subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteGatewayList, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.ExpressRouteGatewayList
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteGatewayList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteGatewayList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteGateways'} # type: ignore
async def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> "_models.ExpressRouteGatewayList":
"""Lists ExpressRoute gateways in a given resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteGatewayList, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.ExpressRouteGatewayList
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteGatewayList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteGatewayList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
express_route_gateway_name: str,
put_express_route_gateway_parameters: "_models.ExpressRouteGateway",
**kwargs: Any
) -> "_models.ExpressRouteGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "applicati | on/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRouteGatewayName': self._s | erialize.url("express_route_gateway_name", express_route_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_argume |
Hasimir/pyjs | examples/jsobject/TestRect.py | Python | apache-2.0 | 1,626 | 0.005535 | from pyjamas.ui.RootPanel import RootPanel
from pyjamas.ui.TextBox import TextBox
from pyjamas.ui.HTML import HTML
from pyjamas.ui.Button import Button
from __pyjamas__ import JS
class Rect:
def __init__(self, x, y):
JS("""@{{self}}.rect = new @{{!rectobj}}();""")
self.rect.init(x, y)
def add(self, r):
self.rect.add(r.rect)
def area(self):
return self.rect.area()
def get_x(self):
return self.rect.x
def get_y(self):
return self.rect.y
class TestRect:
def onModuleLoad(self):
self.r = Rect(0.0, 0.0)
self.xbox = TextBox()
self.ybox = TextBox()
self.addbutton = Button("Click to add x and y to Rectangle")
self.addbutton.addClickListener(self)
self.xbox.setText("2")
self.ybox.setText("5")
RootPanel().add(HTML("X Value:"))
RootPanel().add(self.xbox)
RootPanel().add(HTML("Y Value:"))
RootPanel().add(self.ybox)
RootPanel().add(self.addbutton)
RootPanel().add(HTML("Current value: %d %d" % ( self.r.get_x(), self.r.get_y())))
def onClick(self, sender):
x = int(self.xbox.getText())
y = int(self.ybox.getText())
r = Rect(x, y)
self.r.add(r)
RootPanel().add(HTML("New value: %d" % ( self.r.get_x())))
RootPanel().add(HTML("New value: %d" % ( self.r.get_y())))
RootPanel().add(HTML("New value: %d %d" % ( self.r.get_x(), self.r.get_y()) | ))
Roo | tPanel().add(HTML("New Area: %d" % self.r.area()))
if __name__ == '__main__':
app = TestRect()
app.onModuleLoad()
|
wuub/python_lcd | lcd/pyb_gpio_lcd_test8.py | Python | mit | 2,352 | 0.00085 | """Implements a character based lcd connected via PCF8574 on i2c."""
from pyb import Pin
from pyb import delay, millis
from pyb_gpio_lcd import GpioLcd
# Wiring used for this example:
#
# 1 - Vss (aka Ground) - Connect to one of the ground pins on you pyboard.
# 2 - VDD - I connected to VIN which is 5 volts when your pyboard is powerd vi USB
# 3 - VE (Contrast voltage) - I'll discuss this below
# 4 - RS (Register Select) connect to Y12 (as per call to GpioLcd)
# 5 - RW (Read/Write) - connect to ground
# 6 - EN (Enable) connect to Y11 (as per call to GpioLcd)
# 7 - D0 - connect to Y1 (as per call to GpioLcd)
# 8 - D1 - connect to Y2 (as per call to Gpio | Lcd)
# 9 - D2 - connect to Y3 (as per call to GpioLcd)
# 10 - D3 - connect to Y4 (as per call to GpioLcd)
# 11 - D4 - connect to Y5 (as per call to GpioLcd)
# 12 - D5 - connect to Y6 (as per call to GpioLcd)
# 13 - D6 - connect to Y7 (as per call to GpioLcd)
# 14 - D7 - connect to Y8 (as per call to GpioLcd)
# 15 - A (BackLight Anode) - Connect to VIN
# 16 - K (Backlight Cathode) - Connect to Ground
#
# On 14-pin LCDs, there is no backlight, so pins 15 & 16 don't exist.
#
# The Contrast line (pin 3) ty | pically connects to the center tap of a
# 10K potentiometer, and the other 2 legs of the 10K potentiometer are
# connected to pins 1 and 2 (Ground and VDD)
#
# The wiring diagram on the followig page shows a typical "base" wiring:
# http://www.instructables.com/id/How-to-drive-a-character-LCD-displays-using-DIP-sw/step2/HD44780-pinout/
# Add to that the EN, RS, and D0-D7 lines.
def test_main():
"""Test function for verifying basic functionality."""
print("Running test_main")
lcd = GpioLcd(rs_pin=Pin.board.Y12,
enable_pin=Pin.board.Y11,
d0_pin=Pin.board.Y1,
d1_pin=Pin.board.Y2,
d2_pin=Pin.board.Y3,
d3_pin=Pin.board.Y4,
d4_pin=Pin.board.Y5,
d5_pin=Pin.board.Y6,
d6_pin=Pin.board.Y7,
d7_pin=Pin.board.Y8,
num_lines=4, num_columns=20)
lcd.putstr("It Works!\nSecond Line\nThird Line\nFourth Line")
delay(3000)
lcd.clear()
count = 0
while True:
lcd.move_to(0, 0)
lcd.putstr("%7d" % (millis() // 1000))
delay(1000)
count += 1
|
Alshak/clowdflows | workflows/cforange/interaction_views.py | Python | mit | 5,590 | 0.013775 | from django.shortcuts import render
import json
def cforange_filter_integers(request,input_dict,output_dict,widget):
return render(request, 'interactions/cforange_filter_integers.html',{'widget':widget,'intList':input_dict['intList']})
def cforange_hierarchical_clustering(request,input_dict,output_dict,widget):
import Orange, orange, sys
from library import Clustering
sys.setrecursionlimit(10000)
| ##try:
#iris = Orange.data.Table("vehicle")
##iris = Orange.data.Table("iris")
#print len(iris)
#m = Orange.misc.SymMatrix(len(iris))
#m = Orange.distance.distance_matrix(iris, Orange.distance.Euclidean)
#matrix = m
######################
matrix = input_dic | t['dm']
#print matrix
linkage = int(input_dict['linkage'])
root = Clustering.hierarchical_clustering(linkage, matrix)
dm_examples = True
######################
try:
#attributes = [x.name for x in matrix.items.domain]
attributes = [x.name for x in matrix.items.domain] + [m.name for m in matrix.items.domain.getmetas().values()]
except:
attributes = ['attribute']
dm_examples = False
"""
def build_hierarchy(node, root=False):
if dm_examples:
values_dict = dict([(x,matrix.items[node.first][x].value) for x in attributes]) if node.branches != None and (not node.branches) else {}
else:
try:
values_dict = dict([(x,matrix.items[node.first].name) for x in attributes]) if node.branches != None and (not node.branches) else {}
except AttributeError as e:
print e
print sys.exc_info()[0]
for attribute in values_dict.keys():
if type(values_dict[attribute]) == float:
values_dict[attribute]="%.3f" % values_dict[attribute]
try:
ret = {
'name' : 'root' if root else '',
'id' : node.first if not node.branches else -1,
'height' : node.height if node.branches else 0,
'children' : [build_hierarchy(node.left), build_hierarchy(node.right)] if node.branches else [],
'values' : values_dict,
'leaf' : True if (not node.branches is None) and not node.branches else False
}
print "returning"
except:
print sys.exc_info()[0]
ret = {}
return ret
hierarchy = json.dumps(build_hierarchy(root, root=True))
"""
def build_hierarchy2(node, position, root=False):
if dm_examples:
values_dict = dict([(x,matrix.items[node.first][x].value) for x in attributes]) if node.branches != None and (not node.branches) else {}
else:
try:
values_dict = dict([(x,matrix.items[node.first].name) for x in attributes]) if node.branches != None and (not node.branches) else {}
except AttributeError as e:
print e, sys.exc_info()[0]
for attribute in values_dict.keys():
if type(values_dict[attribute]) == float:
values_dict[attribute]="%.3f" % values_dict[attribute]
ret = {
'name' : 'root' if root else '',
'id' : node.first if not node.branches else -1,
'height' : node.height if node.branches else 0,
#'children' : [build_hierarchy(node.left), build_hierarchy(node.right)] if node.branches else [],
'parent' : position,
'children' : [position+1, position+2] if node.branches else [],
'values' : values_dict,
'leaf' : True if (node.branches is None) else False
}
#print ret
return ret
hierarchy = [build_hierarchy2(root, 0, root=True)] #json.dumps(build_hierarchy(root, root=True))
stack = [root.left, root.right];
#i = 2 # position in the hierarchy
while stack != []:
node = stack.pop(0)
#print node, node.branches
if node.branches:
# inseatd of saving tree in breadth first manner
# we are going to save it in dynamic manner,(we won't save empty nodes)
# current nodes children are in position (len(hierarchy)+len(stack)+1))+1, left child
# and (len(hierarchy)+len(stack)+1))+2, right child
hierarchy.append(build_hierarchy2(node, len(hierarchy)+len(stack)))
stack.append(node.left)
stack.append(node.right)
else:
# if not, it has an empty list
hierarchy.append(build_hierarchy2(node, len(hierarchy)+len(stack)))
hierarchy[0]['parent'] = -1;
for e in xrange(len(hierarchy)):
print hierarchy[e]
if hierarchy[e]['children'] != []:
hierarchy[hierarchy[e]['children'][0]]['parent'] = e;
hierarchy[hierarchy[e]['children'][1]]['parent'] = e;
hierarchy = json.dumps(hierarchy)
return render(request, 'interactions/cforange_hierarchical_clustering.html', {'widget' : widget, 'hierarchy' : hierarchy, 'attributes':attributes,'vizualization':input_dict['visualization']})
def filter_table(request, input_dict, output_dict, widget):
from workflows.visualization_views import orng_table_to_dict
data = input_dict['data']
return render(request, 'interactions/cforange_filter_table.html', {'widget' : widget,'input_dict' : input_dict,'output_dict' : orng_table_to_dict(data)})
|
pombredanne/anvil | anvil/ini_parser.py | Python | apache-2.0 | 9,960 | 0.000502 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ConfigParser
from ConfigParser import NoOptionError
from ConfigParser import NoSectionError
from StringIO import StringIO
import iniparse
import re
from anvil import log as logging
from anvil import utils
from iniparse import ini
LOG = logging.getLogger(__name__)
class StringiferMixin(object):
def __init__(self):
pass
def stringify(self, fn=None):
outputstream = StringIO()
self.write(outputstream)
contents = utils.add_header(fn, outputstream.getvalue())
return contents
class ConfigHelperMixin(object):
DEF_INT = 0
DEF_FLOAT = 0.0
DEF_BOOLEAN = False
DEF_BASE = None
def __init__(self, templatize_values=False):
self.templatize_values = templatize_values
def get(self, section, option):
value = self.DEF_BASE
try:
value = super(ConfigHelperMixin, self).get(section, option)
except NoSectionError:
pass
except NoOptionError:
pass
return value
def _template_value(self, option, value):
if not self.templatize_values:
return value
tpl_value = StringIO()
safe_value = str(option)
for c in ['-', ' ', '\t', ':', '$', '%', '(', ')']:
safe_value = safe_value.replace(c, '_')
tpl_value.write("$(%s)" % (safe_value.upper().strip()))
comment_value = str(value).strip().encode('string_escape')
for c in ['(', ')', '$']:
comment_value = comment_value.replace(c, '')
comment_value = comment_value.strip()
tpl_value.write(" # %s" % (comment_value))
return tpl_value.getvalue()
def set(self, section, option, value):
if not self.has_section(section) and section.lower() != 'default':
self.add_section(section)
value = self._template_value(option, value)
super(ConfigHelperMixin, self).set(section, option, value)
def remove_option(self, section, option):
if self.has_option(section, option):
super(ConfigHelperMixin, self).remove_option(section, option)
def getboolean(self, section, option):
if not self.has_option(section, option):
return self.DEF_BOOLEAN
return super(ConfigHelperMixin, self).getboolean(section, option)
def getfloat(self, section, option):
if not self.has_option(section, option):
return self.DEF_FLOAT
return super(ConfigHelperMixin, self).getfloat(section, option)
def getint(self, section, option):
if not self.has_option(section, option):
return self.DEF_INT
return super(ConfigHelperMixin, self).getint(section, option)
def getlist(self, section, option):
return self.get(section, option).split(",")
class BuiltinConfigParser(ConfigHelper | Mixin, ConfigParser.RawConfigParser, StringiferMixin):
def __init__(self, fns=None, templatize_values=False):
ConfigHelperMixin.__init__(self, templatize_values)
ConfigParser.RawConfigParser.__init__(self)
StringiferMixin.__init__(self)
# Make option names case sensitive
# See: http://docs.python.org/library/configparser.html#ConfigParser.RawConfigParser.optionxform
self.optionxform = str
if fns:
fo | r f in fns:
self.read(f)
class AnvilConfigParser(iniparse.RawConfigParser):
"""Extends RawConfigParser with the following functionality:
1. All commented options with related comments belong to
their own section, but not to the global scope. This is
needed to insert new options into proper position after
same commented option in the section, if present.
2. Override set option behavior to insert option right
after same commented option, if present, otherwise insert
in the section beginning.
"""
# commented option regexp
option_regex = re.compile(
r"""
^[;#] # comment line starts with ';' or '#'
\s* # then maybe some spaces
# then option name
([^:=\s[] # at least one non-special symbol here
[^:=]*?) # option continuation
\s* # then maybe some spaces
[:=] # option-value separator ':' or '='
.* # then option value
$ # then line ends
""", re.VERBOSE)
def readfp(self, fp, filename=None):
super(AnvilConfigParser, self).readfp(fp, filename)
self._on_after_file_read()
def set(self, section, option, value):
"""Overrides option set behavior."""
try:
self._set_section_option(self.data[section], option, value)
except KeyError:
raise NoSectionError(section)
def _on_after_file_read(self):
"""This function is called after reading config file
to move all commented lines into section they belong to,
otherwise such commented lines are placed on top level,
that is not very suitable for us.
"""
curr_section = None
pending_lines = []
remove_lines = []
for line_obj in self.data._data.contents:
if isinstance(line_obj, ini.LineContainer):
curr_section = line_obj
pending_lines = []
else:
if curr_section is not None:
pending_lines.append(line_obj)
# if line is commented option - add it and all
# pending lines into current section
if self.option_regex.match(line_obj.line) is not None:
curr_section.extend(pending_lines)
remove_lines.extend(pending_lines)
pending_lines = []
for line_obj in remove_lines:
self.data._data.contents.remove(line_obj)
@classmethod
def _set_section_option(cls, section, key, value):
"""This function is used to override the __setitem__ behavior
of the INISection to search suitable place to insert new
option if it doesn't exist. The 'suitable' place is
considered to be after same commented option, if present,
otherwise new option is placed at the section beginning.
"""
if section._optionxform:
xkey = section._optionxform(key)
else:
xkey = key
if xkey in section._compat_skip_empty_lines:
section._compat_skip_empty_lines.remove(xkey)
if xkey not in section._options:
# create a dummy object - value may have multiple lines
obj = ini.LineContainer(ini.OptionLine(key, ''))
# search for the line index to insert after
line_idx = 0
section_lines = section._lines[-1].contents
for idx, line_obj in reversed(list(enumerate(section_lines))):
if not isinstance(line_obj, ini.LineContainer):
if line_obj.line is not None:
match_res = cls.option_regex.match(line_obj.line)
if match_res is not None and match_res.group(1) == xkey:
line_idx = idx
break
# insert new parameter object on the next line after
# commented option, otherwise insert it at the beginning
section_lines.insert(line_idx + 1, obj)
section._options[xkey] = obj
section._options[xkey].value = |
logpai/logparser | logparser/LKE/LKE.py | Python | mit | 21,195 | 0.016183 | """
Description : This file implements the log key extraction algorithm for log parsing
Author : LogPAI team
License : MIT
"""
import math
from datetime import datetime
from ..logmatch import regexmatch
import re
import os
import sys
import pandas as pd
import hashlib
import numpy as np
SAVEDISTANCE = True
class Para:
def __init__(self, path, split_threshold, rex, savePath, logformat):
self.path = path
self.split_threshold = split_threshold
self.rex = rex
self.savePath = savePath
self.logformat = logformat
class LogParser:
def __init__(self, log_format, indir='../logs/', outdir='./results/', split_threshold=4, rex=[], seed=1):
self.para = Para(path=indir, split_threshold=split_threshold, rex=rex,
savePath=outdir, logformat=log_format)
self.wordLL = []
self.wordLen = []
self.groups = [] # the list of list of words list, each group->each log lines->each words
self.loglineNumPerGroup = [] # how many lines in each groups
self.wordLenPerGroup = [] # maximum word positions in one group
self.wordOccuOfPosiLLD = [] # each word in each position in each group occurrence/frequency
self.loglinesOfGroups = []
self.flatLogLineGroups = []
self.newGroups = []
self.dedup_lines = []
self.templates = []
self.seed = seed # Random seed for kmeans clustering
def preprocess(self, x):
for currentRex in self.para.rex:
x = re.sub(currentRex, '', x)
return x
def paraErasing(self):
print('=== Step 1: Erasing parameters ===')
headers, regex = self.generate_logformat_regex(self.para.logformat)
self.df_log = self.log_to_dataframe(os.path.join(self.para.path, self.logname), regex,
headers, self.para.logformat)
self.dedup_lines = self.df_log['Content'].map(self.preprocess)
for line in self.dedup_lines:
wordSeq = line.strip().split()
self.wordLen.append(len(wordSeq))
self.wordLL.append(tuple(wordSeq))
if not os.path.exists(self.para.savePath):
os.makedirs(self.para.savePath)
def clustering(self):
sys.setrecursionlimit(100000000) #set the recursion limits number
v=math.floor(sum(self.wordLen)/len(self.wordLen))
print('the parameter v is: %d' %(v))
logNum=len(self.wordLen)
print('there are %d loglines'%(logNum))
#In order to save time, load distArraydata, if exist, do not calculate the edit distance again:
if os.path.exists(self.para.savePath+self.logname+'editDistance.csv') and SAVEDISTANCE:
print('Loading distance matrix from cache..')
distMat=np.genfromtxt(self.para.savePath+self.logname+'editDistance.csv',delimiter=',')
distList=np.genfromtxt(self.para.savePath+self.logname+'distArray.csv',delimiter=',')
else:
print('calculating distance....')
path=self.para.savePath+self.logname
distMat,distList=calDistance(self.wordLL,v,path)
distArray=np.array(distList)
threshold1=self.GetkMeans | Threshold(distArray)
print('the threshold1 is: %s'%(threshold1))
# connect two loglines with distance < threshold, logDict is a dictionary
# where the key is line num while
logDict={}
for i in range(logNum):
logLineSet=set()
for j in range(i+1,logNum):
if distMat[i,j]<threshold1:
logLineSet.add(j)
logDict[i]=logLine | Set
#use DFS to get the initial group.
flag=np.zeros((logNum,1)) # used to label whether line has been visited, 0 represents not visited
for key in logDict:
if flag[key]==1:
continue
groupLoglist=[]
groupLoglist.append(key) # add the key of dict into the list firstly, and then add others
flag[key]=1 #line is visited
dfsTraversal(key,logDict,flag,groupLoglist)
self.loglinesOfGroups.append(groupLoglist)
self.loglineNumPerGroup.append(len(groupLoglist))
print('================get the initial groups splitting=============')
wordLenArray=np.array(self.wordLen)
for row in self.loglinesOfGroups:
eachLineLogList=[]
self.wordLenPerGroup.append(max(wordLenArray[row]))
for colu in row:
eachLineLogList.append(self.wordLL[colu])
self.groups.append(eachLineLogList)
print('there are %s groups'%(len(self.wordLenPerGroup)))
#k-means where k equals 2 to divide the edit distance into two groups
def GetkMeansThreshold(self, distArray):
print('kMeans calculation...')
distArraySize=len(distArray)
#random choose two centroids
minValue=min(distArray)
centroids=np.zeros((2,1))
rangeValue=float(max(distArray)-minValue)
# np.random.seed(self.seed)
centroids[:]=np.random.rand(2,1)*rangeValue+minValue
maxInnerDist=np.zeros((2,1))
clusterChanged=True
clusterAssment=np.zeros((distArraySize,1))
while clusterChanged:
clusterChanged=False
for i in range(distArraySize):
minIndex=-1
if math.fabs(distArray[i]-centroids[0])<math.fabs(distArray[i]-centroids[1]):
minIndex=0
else:
minIndex=1
if clusterAssment[i]!=minIndex:
clusterChanged=True
clusterAssment[i]=minIndex
for cent in range(2):
indexs=np.where(clusterAssment==cent)[0]
disInClust=distArray[indexs]
maxInnerDist[cent]=min(disInClust)
centroids[cent]=np.mean(disInClust,axis=0)
return max(maxInnerDist)
#split the current group recursively.
def splitting(self):
print('splitting into different groups...')
print ('the split_threshold is %d'%(self.para.split_threshold))
groupNum=len(self.groups) #how many groups initially
for i in range(groupNum):
splitEachGroup(self.groups[i],self.para.split_threshold,self.loglinesOfGroups[i])
# to flat the list of list of list to list of many lists, that is only one layer lists nested
mergeLists(self.groups,self.newGroups)
mergeLists(self.loglinesOfGroups,self.flatLogLineGroups)
print('Merge the lists together...')
print('there are %s different groups'%(len(self.flatLogLineGroups)))
#extract the templates according to the logs in each group
def extracting(self):
for i in range(len(self.flatLogLineGroups)):
groupLen=len(self.flatLogLineGroups[i])
eachGroup=self.newGroups[i]
if groupLen==1:
self.templates.append(eachGroup[0])
else:
# commonPart =
# print(eachGroup)
# sys.exit()
commonPart=LCS(eachGroup[0],eachGroup[1])
for k in range(2,groupLen):
if not comExit(commonPart,eachGroup[k]):
commonPart=LCS(commonPart,eachGroup[k])
if len(commonPart)==0:
print('there is no common part in this group')
commonPart=["<*>"]
break
self.templates.append(commonPart)
# save to logs in groups into different template txt
def writeResultToFile(self):
content_event = {}
for idx, item in enumerate(self.flatLogLineGroups):
for LineId in item:
content = self.dedup_lines[LineId]
template = ' '.join(self.templates[idx])
i |
vitorio/pygrow | grow/pods/preprocessors/preprocessors.py | Python | mit | 1,086 | 0.007366 | from grow.pods.preprocessors import clo | sure_compiler
from g | row.pods.preprocessors import google_sheets
from grow.pods.preprocessors import sass_preprocessor
from protorpc import protojson
import json
_preprocessor_kinds_to_classes = {}
_builtins = (
sass_preprocessor.SassPreprocessor,
google_sheets.GoogleSheetsPreprocessor,
closure_compiler.ClosureCompilerPreprocessor,
)
def register_preprocessor(class_obj):
_preprocessor_kinds_to_classes[class_obj.KIND] = class_obj
def config_from_json(preprocessor_class, content):
config_class = preprocessor_class.Config
return protojson.decode_message(config_class, content)
def make_preprocessor(name, config, pod):
class_obj = _preprocessor_kinds_to_classes.get(name)
if class_obj is None:
raise ValueError('No preprocessor named "{}".'.format(name))
if isinstance(config, dict):
config = json.dumps(config)
config = config_from_json(class_obj, config)
return class_obj(pod, config)
def register_builtins():
for builtin in _builtins:
register_preprocessor(builtin)
register_builtins()
|
munhyunsu/Hobby | 2019S_TCPIP/t07-tls-socket/c02-tcp-client.py | Python | gpl-3.0 | 716 | 0.001397 | import socket
FLAGS = None
def main(_):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.sett | imeout(5)
sock.connect((FLAGS.ip, FLAGS.port))
print('Connected with server')
msg = input('Type the message: ')
msg = msg.encode('utf-8')
sock.sendall(msg)
data = sock.recv(1500)
print('Echoed {0}'.format(data.decode('utf-8')))
soc | k.close()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--ip', type=str,
default='localhost')
parser.add_argument('-p', '--port', type=int,
default=8000)
FLAGS, _ = parser.parse_known_args()
main(_)
|
frlen/simian | src/tests/simian/auth/x509_test.py | Python | apache-2.0 | 37,278 | 0.003246 | #!/usr/bin/env python
#
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""x509 module tests."""
import array
import types
from google.apputils import app
from google.apputils import basetest
import mox
import stubout
from pyasn1.type import univ
from simian.auth import x509
from simian.auth import tlslite_bridge
class Error(Exception):
"""Base Error."""
class X509ModuleTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
def testLoadPemGeneric(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = '\n\n\n-----BEGIN-----\nhello\n-----END-----\n\n\n'
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenInfo(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = ('\n\n\n-----BEGIN-----\n'
'Proc-Type: foo\nhello\n-----END-----\n\n\n')
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenSpaces(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = ' \n\n\n-----BEGIN----- \nhello \n-----END----- \n\n\n '
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenSpacesNoLastNewline(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = ' \n\n\n-----BEGIN----- \nhello \n-----END-----'
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenMissingHeader(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN BLAH'
footer = 'END BLAH'
input = '\n\n\n-----BEGIN-----\nhello\n-----END-----\n\n\n'
self.assertRaises(
x509.HeaderMissingPEMFormatError, x509.LoadPemGeneric,
input, header, footer)
def testLoadPemGenericWhenMissingFooter(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END BLAH'
input = '\n\n\n-----BEGIN-----\nhello\n-----END-----\n\n\n'
self.assertRaises(
x509.FooterMissingPEMFormatError, x509.LoadPemGeneric,
input, header, footer)
def testLoadPemGenericWhenTooFewLines(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END BLAH'
input = '\n\n\n-----BEGIN-----\n\n\n\n'
self.assertRaises(
x509.PEMFormatError, x509.LoadPemGeneric, input, header, footer)
def testLoadCertificateFromPEM(self):
"""Test LoadCertificateFromPEM()."""
header = 'BEGIN CERTIFICATE'
footer = 'END CERTIFICATE'
pem_input = 'pem_input'
pem_output = ['---header---', 'base64', '---footer---']
self.mox.StubOutWithMock(x509, 'LoadPemGeneric')
self.mox.StubOutWithMock(x509, 'LoadCertificateFromBase64')
x509.LoadPemGeneric(pem_input, header, footer).AndReturn(pem_output)
x509.LoadCertificateFromBase64('base64').AndReturn('ok')
self.mox.ReplayAll()
self.assertEqual(x509.LoadCertificateFromPEM(pem_input), 'ok')
self.mox.VerifyAll()
def testLoadRSAPrivateKeyFromPEM(self):
"""Test LoadRSAPrivateKeyFromPEM()."""
header = 'BEGIN RSA PRIVATE KEY'
footer = 'END RSA PRIVATE KEY'
pem_input = 'pem_input'
pem_output = ['---header---', 'base64', '---footer---']
self.mox.StubOutWithMock(x509, 'LoadPemGeneric')
self.mox.StubOutWithMock(
x509.tlslite_bridge, 'parsePEMKey')
x509.LoadPemGeneric(pem_input, header, footer).AndReturn(pem_output)
x509.tlslite_bridge.parsePEMKey(
'\n'.join(pem_output)).AndReturn('ok')
self.mox.ReplayAll()
self.assertEqual(x509.LoadRSAPrivateKeyFromPEM(pem_input), 'ok')
self.mox.VerifyAll()
def testLoadRSAPrivateKeyFromPEMWhenSyntaxError(self):
"""Test LoadRSAPrivateKeyFromPEM()."""
header = 'BEGIN RSA PRIVATE KEY'
footer = 'END RSA PRIVATE KEY'
pem_input = 'pem_input'
pem_output = ['---header---', 'base64', '---footer---']
self.mox.StubOutWithMock(x509, 'LoadPemGeneric')
self.mox.StubOutWithMock(
x509.tlslite_bridge, 'parsePEMKey')
x509.LoadPemGeneric(pem_input, header, footer).AndReturn(pem_output)
x509.tlslite_bridge.parsePEMKey(
'\n'.join(pem_output)).AndRaise(SyntaxError)
self.mox.ReplayAll()
self.assertRaises(
x509.RSAPrivateKeyPEMFormatError,
x509.LoadRSAPrivateKeyFromPEM, pem_input)
self.mox.VerifyAll()
def testLoadCertificateFromBase64(self):
"""Test LoadCertificateFromBase64()."""
self.mox.StubOutWithMock(x509.base64, 'b64decode')
self.mox.StubOutWithMock(x509, 'BASE64_RE')
x509.BASE64_RE.search('b64str').AndReturn(True)
x509.base64.b64decode('b64str').AndReturn('binary')
mock_x509 = self.mox.CreateMockAnything()
self.stubs.Set(x509, 'X509Certificate', mock_x509)
mock_x509().AndReturn(mock_x509)
mock_x509.LoadFromByteString('bi | nary').AndReturn(None)
self.mox.ReplayAll()
self.assertEqual(
mock_x509,
x509.LoadCertificateFromBase64('b64str'))
self.mox.VerifyAll()
def testLoadCertificateFromBase64WhenBase64CharacterCheckFail(self):
"""Test LoadCertificateFromBase64()."""
self.mox.StubOutWithMock(x509.base64, 'b64decode')
self.mox.StubOutWithMock(x509, 'BASE64_RE')
x509.BASE64_RE.search('b64str').AndReturn(None)
|
self.mox.ReplayAll()
self.assertRaises(
x509.PEMFormatError,
x509.LoadCertificateFromBase64, 'b64str')
self.mox.VerifyAll()
def testLoadCertificateFromBase64WhenBase64DecodeFail(self):
"""Test LoadCertificateFromBase64()."""
self.mox.StubOutWithMock(x509.base64, 'b64decode')
self.mox.StubOutWithMock(x509, 'BASE64_RE')
x509.BASE64_RE.search('b64str').AndReturn(True)
x509.base64.b64decode('b64str').AndRaise(TypeError)
self.mox.ReplayAll()
self.assertRaises(
x509.PEMFormatError,
x509.LoadCertificateFromBase64, 'b64str')
self.mox.VerifyAll()
class BaseDataObjectTest(mox.MoxTestBase):
"""Test BaseDataObject class."""
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
self.bdo = x509.BaseDataObject()
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
def testGetDataDict(self):
"""Test _GetDataDict()."""
try:
self.bdo._GetDataDict()
self.fail('NotImplementedError not raised')
except NotImplementedError:
pass
def testCreateGetMethod(self):
"""Test CreateGetMethod()."""
mock_dataobj = self.mox.CreateMockAnything()
mock_dataobj._GetDataDict().AndReturn({'foo': 123})
def mock_setattr(cls, key, value):
self.assertEquals(key, 'GetFoo')
self.assertTrue(type(value) is types.FunctionType)
self.assertEqual(123, value(mock_dataobj))
self.mox.ReplayAll()
x509.BaseDataObject.CreateGetMethod('Foo', 'foo', setattr_=mock_setattr)
self.mox.VerifyAll()
class X509CertificateTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
self.x = x509.X509Certificate()
self._cert_reset = {
'serial |
shoopio/shoop | shuup_tests/core/test_foreignkeys.py | Python | agpl-3.0 | 4,853 | 0.00103 | # This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from django.db.models import ProtectedError
from shuup.core.models import (
CustomerTaxGroup,
Manufacturer,
PaymentMethod,
PersonContact,
Product,
SalesUnit,
ShippingMethod,
Shop,
ShopProduct,
Tax,
TaxClass,
)
from shuup.default_tax.models import TaxRule
from shuup.testing.factories import (
DEFAULT_NAME,
create_order_with_product,
create_product,
get_default_category,
get_default_shop,
get_default_supplier,
get_test_tax,
)
def get_product():
shop = get_default_shop()
product = create_product("tmp", shop, default_price=200)
return product
@pytest.mark.django_db
def test_manufacturer_removal():
product = get_product()
manufacturer = Manufacturer.objects.create(name=DEFAULT_NAME)
product.manufacturer = manufacturer
product.save()
manufacturer.delete()
product.refresh_from_db()
assert not product.manufacturer
assert Product.objects.filter(pk=product.pk).exists()
@pytest.mark.django_db
def test_sales_unit_removal():
product = get_product()
sales_unit = SalesUnit.objects.create(name="test", symbol="te")
product.sales_unit = sales_unit
product.save()
with pytest.raises(ProtectedError):
sales_unit.delete()
assert Product.objects.filter(pk=product.pk).exists()
@pytest.mark.django_db
def test_tax_class_removal():
product = get_product()
tax_class = TaxClass.objects.create(name="test")
product.tax_class = tax_class
product.save()
with pytest.raises(ProtectedError):
tax_class.delete()
assert Product.objects.filter(pk=product.pk).exists()
# -------------- CONTACT ----------------
@pytest.mark.django_db
def test_shipping_method_removal():
tax_class = TaxClass.objects.create(name="test")
shop = get_default_shop()
sm = ShippingMethod.objects.create(name="sm", shop=shop, tax_class=tax_class)
contact = PersonContact.objects.create(name="test", default_shipping_method=sm)
| sm.delete()
assert PersonContact.objects.filter(pk=contact.pk).exists()
@pytest.mark.django_db
def test_payment_method_removal():
tax_class = TaxClass.objects.create(name="test")
shop = get_default_shop()
pm = PaymentMethod.objects.create(name="sm", shop=shop, tax_class=tax_class)
contact = PersonContact.objects.create(name="test", default_payment_method=pm)
| pm.delete()
assert PersonContact.objects.filter(pk=contact.pk).exists()
@pytest.mark.django_db
def test_customer_tax_group_removal():
ctg = CustomerTaxGroup.objects.create(name="test")
contact = PersonContact.objects.create(name="test", tax_group=ctg)
with pytest.raises(ProtectedError):
ctg.delete()
assert PersonContact.objects.filter(pk=contact.pk).exists()
# ------------ METHODS ----------------
@pytest.mark.django_db
def test_method_taxclass_removal():
tax_class = TaxClass.objects.create(name="test")
shop = get_default_shop()
pm = PaymentMethod.objects.create(name="test", shop=shop, tax_class=tax_class)
with pytest.raises(ProtectedError):
tax_class.delete()
assert PaymentMethod.objects.filter(pk=pm.id).exists()
# ------------SHOP PRODUCT-------------
@pytest.mark.django_db
def test_shopproduct_primary_category_removal():
product = get_product()
category = get_default_category()
sp = product.get_shop_instance(get_default_shop())
sp.primary_category = category
sp.save()
with pytest.raises(NotImplementedError):
category.delete()
assert ShopProduct.objects.filter(pk=sp.pk).exists()
# ------------SHOP -------------
@pytest.mark.django_db
def test_shop_owner_removal():
contact = PersonContact.objects.create(name="test")
shop = Shop.objects.create(name="test", public_name="test", owner=contact)
contact.delete()
assert Shop.objects.filter(pk=shop.pk).exists()
# -------- TAX ---------------
@pytest.mark.django_db
def test_taxrule_tax_removal():
tax = Tax.objects.create(rate=1)
taxrule = TaxRule.objects.create(tax=tax)
with pytest.raises(ProtectedError):
tax.delete()
assert TaxRule.objects.filter(pk=taxrule.pk).exists()
@pytest.mark.django_db
def test_orderlinetax_tax_removal():
# todo fix
product = get_product()
tax_rate = 1
order = create_order_with_product(
product=product,
supplier=get_default_supplier(),
quantity=1,
taxless_base_unit_price=10,
tax_rate=tax_rate,
shop=get_default_shop(),
)
tax = get_test_tax(tax_rate)
with pytest.raises(ProtectedError):
tax.delete()
|
the-it/WS_THEbotIT | service/ws_re/register/register_types/public_domain.py | Python | mit | 3,225 | 0.002171 | from typing import Dict, List
from service.ws_re.register._base import Register
from service.ws_re.register.author import Author
from service.ws_re.register.authors import Authors
from service.ws_re.register.lemma import Lemma
from service.ws_re.register.register_types.volume import VolumeRegister
class PublicDomainRegister(Register):
def __init__(self,
year: int,
authors: Authors,
registers: Dict[str, VolumeRegister]):
super().__init__()
self.year: int = year
self._authors: Authors = authors
self._registers = registers
self._pd_authors: List[Author] = self._get_pd_authors()
self._init_lemmas()
def __repr__(self):
return f"<{self.__class__.__name__} - year:{self.year}, lemmas:{len(self)}>"
def __len__(self):
return len(self.squash_lemmas(self._lemmas))
def __getitem__(self, item: int) -> Lemma:
return self._lemmas[item]
def _init_lemmas(self):
lemmas = []
for volume_str in self._registers:
for lemma in self._registers[volume_str].lemmas:
if self._is_lemma_of_author(lemma):
lemmas.append(lemma)
self._lemmas = sorted(lemmas, key=lambda k: (k.sort_key, k.volume.sort_key))
def _get_pd_authors(self) -> List[Author]:
author_list = []
for author in self._authors:
if author.death:
if author.death == self.year - 71:
author_list.append(author)
continue
if author.birth == self.year - 171:
author_list.append(author)
return author_list
def _is_lemma_of_author(self, lemma: Lemma) -> bool:
for chapter in lemma.chapters:
if chapter.author:
authors_of_lemma = self._authors.get_author_by_mapping(chapter.author, lemma.volume.name)
for author in self._pd_authors:
if author in authors_of_lemma:
return True
return False
def _get_table(self) -> str:
| header = """{|class="wikitable sortable"
!Artikel
!Band
!Status
!Wikilinks
!Seite
!Autor
!Sterbejahr"""
table = [header]
for lemmas in self.squash_lemmas(self._lemmas):
chapter_sum = 0
| table_rows = []
lemma = None
for lemma in lemmas:
# if there are no chapters ... one line must be added no madder what
chapter_sum += max(len(lemma.chapters), 1)
table_rows.append(lemma.get_table_row(print_volume=True))
# strip |-/n form the first line it is later replaced by the lemma line
table_rows[0] = table_rows[0][3:]
if chapter_sum > 1:
table.append(f"|-\n|rowspan={chapter_sum} data-sort-value=\"{lemma.sort_key}\"|{lemma.get_link()}")
else:
table.append(f"|-\n|data-sort-value=\"{lemma.sort_key}\"|{lemma.get_link()}")
table += table_rows
table.append("|}")
return "\n".join(table)
def get_register_str(self) -> str:
return f"{self._get_table()}\n[[Kategorie:RE:Register|!]]"
|
NicovincX2/Python-3.5 | Physique/Onde/Mécanique ondulatoire/Acoustique/battements_interactif.py | Python | gpl-3.0 | 2,089 | 0.00144 | # -*- coding: utf-8 -*-
import os
"""
Ce programme est proposé par Vincent Grenard (PCSI, Lycée Poincaré, Nancy).
Il produit une boîte de dialogue qui permet un tracé interactif des battements
en permettant de modifier les divers paramètres pour illustrer la notion de
battements.
"""
import numpy as np
import pylab as py
from matplotlib.widgets import Slider, Button, RadioButtons
def s(t, w=1, a=1, phi=0):
return a * np.cos(3 * w * t + phi)
t = np.linspace(0, 100, 5000)
fig = py.figure('Battement')
ax = fig.add_subplot(1, 1, 1)
py.subplots_adjust(left=0.2, bottom=0.35)
py.axis([0, 0.8, 0, 0.8])
l1, l2 = py.plot([], [], '-k', [], [], '-r')
def rebond(event):
a1 = amplitude1.val
a2 = amplitude2.val
f2 = frequence2.val
phi = phase.val
t = np.linspace(0, 100, 1000)
l1.set_xdata(t)
y = s(t, a=a1) + s(t, f2, a2, phi)
l1.set_ydata(y)
l2.set_xdata(t)
if(a2 == a1):
l2.set_ydata(s(t, (f2 - 1) / 2.0, 2 * a2, phi / 2.0))
else:
l2.set_ydata(np.sqrt(a1**2 + a2**2 + 2 *
a1 * a2 * s(t, f2 - 1, 1, phi)))
# elif(a1>a2):
# l2.set_ydata(np.abs(a1-a2)+np.abs(s(t,(f2-1)/2.0,2*a2,phi/2.0)))
# else:
# | l2.set_ydata(np.abs(a2-a1)+np.abs(s(t,(f2-1)/2.0,2*a1,phi/2.0)))
ax.axes.axis([0, 100, 1.1 * min(y), 1.1 * max(y)])
py.draw()
sld_amplitude1 = py.axes([0.2, 0.1, 0.7, 0.03], axisbg='grey')
sld_amplitud | e2 = py.axes([0.2, 0.15, 0.7, 0.03], axisbg='grey')
sld_frequence2 = py.axes([0.2, 0.2, 0.7, 0.03], axisbg='grey')
sld_phase2 = py.axes([0.2, 0.25, 0.7, 0.03], axisbg='grey')
amplitude1 = Slider(sld_amplitude1, 'amplitude 1', 0.0, 2.0, valinit=1)
amplitude2 = Slider(sld_amplitude2, 'amplitude 2', 0, 2.0, valinit=1)
frequence2 = Slider(sld_frequence2, r'$f_2/f_1$', 0.9, 1.1, valinit=1)
phase = Slider(sld_phase2, r'$\varphi_2-\varphi_1$', 1, 5.0, valinit=0)
button_demarre = py.axes([0.7, 0.02, 0.2, 0.05])
button = Button(button_demarre, 'animation', color='grey', hovercolor='white')
button.on_clicked(rebond)
py.show()
os.system("pause")
|
cloudbase/maas | src/metadataserver/tests/test_nodeinituser.py | Python | agpl-3.0 | 1,151 | 0.003475 | # Copyright 2012, 2013 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Model tests f | or metadata server."""
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
str = None
__metaclass__ = type
__all__ = []
from django.contrib.auth.models import User
from maasserver.models import UserProfile
from maastesting.testcase import MAASTestCase
from metadataserver. | nodeinituser import (
get_node_init_user,
user_name,
)
class TestNodeInitUser(MAASTestCase):
"""Test the special "user" that makes metadata requests from nodes."""
def test_always_returns_same_user(self):
node_init_user = get_node_init_user()
self.assertEqual(node_init_user.id, get_node_init_user().id)
def test_holds_node_init_user(self):
user = get_node_init_user()
self.assertIsInstance(user, User)
self.assertEqual(user_name, user.username)
def test_node_init_user_has_no_profile(self):
user = get_node_init_user()
self.assertRaises(UserProfile.DoesNotExist, user.get_profile)
|
aspiringguru/sentexTuts | PracMachLrng/sentex_ML_demo7.py | Python | mit | 1,403 | 0.027085 | '''
working exercise from sentex tutorials. with mods for clarification + api doc references.
How to program the Best Fit Line - Practical Machine Learning Tutorial with Python p.9
https://youtu.be/KLGfMGsgP34?list=PLQVvvaa0QuDfKTOs3Keq_kaG2P55YRn5v
linear regression model y=mx+b
m = mean(x).mean(y) - mean (x.y)
------------------------------
(mean(x)^2 - mean(x^2)
b = mean(y) - m . mean(x)
'''
from statistics import mean
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import style
style.use('fivethirtyeight')
xs = [1,2,3,4,5,6]
ys = [5,4,6,5,6,7]
#plt.scatter(xs, ys)
#plt.show()
xs = np.array([1,2,3,4,5,6], dtype=np.float64)
ys = np.array([5,4,6,5,6,7], dtype=np.float64)
def best_fit_slope_and_intercept(xs, ys):
m = (mean(xs) * mean(ys) - mean(xs*ys)) / ( mean(xs)*mean(xs) - mean(xs*xs) )
b = mean(ys) - m * mean(xs)
return m, b
m,b = best_fit_slope_and_intercept(xs, ys)
#regression_line = xs*m+b
regression_lin | e = [m*x+b for x in xs]
print ( "m={}".format(m), ", b={}".format(b) )
predict_x = 8
predict_y = (m*predict_x) + b
plt.scatter(xs, ys)
plt.scatter(predict_x, predict_y, color = 'g', marker='s', s=50)
#plt.plot(xs, xs*m+b)
plt.plot(xs, regression_line)
plt.xlabel('xs')
plt.ylabel('ys')
plt.title("plot mx+b usi | ng linear regression fit")
plt.show()
'''
http://matplotlib.org/examples/style_sheets/plot_fivethirtyeight.html
''' |
ppolewicz/ant-colony | antcolony/simulation.py | Python | bsd-3-clause | 2,667 | 0.005249 | import heapq
class AbstractSimulation(object):
def __init__(self, reality, antmoves, stats):
self.reality = reality
self.antmoves = antmoves
heapq.heapify(antmoves)
self.stats = stats
self.ticks = 0
def tick(self):
ant_move = heapq.heappop(self.antmoves)
self.reality.world.elapsed_time = ant_move.end_time # simulation is now at the point of ant_move.ant arriving at ant_move.destination
new_antmove, changed_items_end = ant_move.process_end(self.reality, self.stats)
assert not self.reality.world.elapsed_time > ant_move.end_time
changed_items_start = new_antmove.process_start()
assert changed_items_start is not None, new_antmove
heapq.heappush(self.antmoves, new_antmove)
self.ticks += 1
return changed_items_start & changed_items_end, self.stats
class TickStepSimulation(AbstractSi | mulation):
def advance(self):
if self.reality.is_resol | ved():
return [], True, None
tick_changed_items, stats = self.tick()
print 'ticks: %d, food_discovered: %d' % (self.ticks, stats.food_discovered)
return tick_changed_items, False, stats.last_route
class MultiSpawnStepSimulation(AbstractSimulation):
def __init__(self, reality, *args, **kwargs):
super(MultiSpawnStepSimulation, self).__init__(reality, *args, **kwargs)
self.spawn_amount = 50
self.anthills = reality.world.get_anthills()
def _anthill_food_sum(self):
return sum(anthill.food for anthill in self.anthills)
def advance(self):
if self.reality.is_resolved():
return [], True, None
anthill_food_pre_tick = self._anthill_food_sum()
changed_items = set()
amount = 0
while amount <= self.spawn_amount:
tick_changed_items, stats = self.tick()
changed_items.update(tick_changed_items)
anthill_food_post_tick = self._anthill_food_sum()
if anthill_food_post_tick != anthill_food_pre_tick+amount:
if self.reality.is_resolved():
break
amount += 1
return changed_items, False, stats.last_route
class SpawnStepSimulation(MultiSpawnStepSimulation):
def __init__(self, reality, *args, **kwargs):
super(SpawnStepSimulation, self).__init__(reality, *args, **kwargs)
self.spawn_amount = 1
class LastSpawnStepSimulation(MultiSpawnStepSimulation):
def __init__(self, reality, *args, **kwargs):
super(LastSpawnStepSimulation, self).__init__(reality, *args, **kwargs)
self.spawn_amount = reality.world.get_total_food()
|
sunzhongyuan/MxOnline | apps/courses/views.py | Python | mit | 2,367 | 0.000892 | # _*_ coding:utf-8 _*_
from django.shortcuts import render
from django.views.generic.base im | port View
from django.http import HttpResponse
from pure_pagination import Paginator, EmptyPage, PageNotAnInteger
from .models import Course, Lesson
from operation.models import UserFavorite
# Create your views here.
class CourseView(View):
def get | (self, request):
course_all = Course.objects.all()
# 热门课程
course_hot = course_all.order_by('-fav_nums')[:3]
# 排序 最热门
sort = request.GET.get('sort', '')
if sort == 'students':
course_all = course_all.order_by('-students')
elif sort == 'hot':
course_all = course_all.order_by('-click_nums')
else:
course_all = course_all.order_by('-add_time')
# 分页
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(course_all, 4, request=request)
course_all = p.page(page)
return render(request, 'course-list.html', {
'course_all': course_all,
'course_hot': course_hot,
'sort': sort,
'type': 'course',
})
class CourseDescView(View):
def get(self, request, course_id):
course = Course.objects.get(id=int(course_id))
course.click_nums += 1
course.save()
# 相关课程 根据标签查找相关课程
if course.tag:
relate_course = Course.objects.filter(tag=course.tag)[:3]
else:
relate_course = []
course_fav = False
org_fav = False
# 如果用户登录了 判断这个用户是否收藏了这个课程 或者 机构
if request.user.is_authenticated():
# 课程是否收藏
if UserFavorite.objects.filter(user=request.user, fav_id=int(course.id), fav_type=1):
course_fav = True
# 机构是否收藏
if UserFavorite.objects.filter(user=request.user, fav_id=int(course.course_org.id), fav_type=2):
org_fav = True
return render(request, 'course-detail.html', {
'course': course,
'relate_course': relate_course,
'type': 'course',
'course_fav': course_fav,
'org_fav': org_fav,
})
|
mad-lab/transit | src/pytpp/tpp_gui.py | Python | gpl-3.0 | 25,744 | 0.017557 | #!/usr/bin/env python
# Copyright 2017.
# Michael A. DeJesus, Chaitra Ambadipudi, and Thomas R. Ioerger.
#
#
# This file is part of TRANSIT.
#
# TRANSIT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License.
#
#
# TRANSIT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TRANSIT. If not, see <http://www.gnu.org/licenses/>.
import sys
import glob
import os
import time
import math
import re
import shutil
import platform
import gzip
try:
import wx
import wx.lib.filebrowsebutton
hasWx = True
except Exception as e:
hasWx = False
from pytpp.tpp_tools import *
if hasWx:
class TPPIcon(wx.StaticBitmap):
def __init__(self, panel, flag, bmp, tooltip=""):
wx.StaticBitmap.__init__(self, panel, flag, bmp)
tp = wx.ToolTip(tooltip)
self.SetToolTip(tp)
class MyForm(wx.Frame):
def __init__(self,vars):
self.vars = vars
initialize_globals(self.vars)
wx.Frame.__init__(self, None, wx.ID_ANY, "TPP: Tn-Seq PreProcessor") # v%s" % vars.version
# Add a panel so it looks the correct on all platforms
panel = wx.ScrolledWindow( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( -1,-1 ), wx.HSCROLL|wx.VSCROLL )
panel.SetScrollRate( 5, 5 )
panel.SetMaxSize( wx.Size( -1, 1000 ) )
sizer = wx.BoxSizer(wx.VERTICAL)
self.list_ctrl = None
self.InitMenu()
self.InitFiles(panel,sizer)
buttonrow = wx.BoxSizer(wx.HORIZONTAL)
btn = wx.Button(panel, label="Start")
btn.Bind(wx.EVT_BUTTON, self.map_reads)
buttonrow.Add(btn,0,0,0,10)
btn = wx.Button(panel, label="Quit")
btn.Bind(wx.EVT_BUTTON, self.OnQuit)
buttonrow.Add(btn,0,0,0,10)
sizer.Add(buttonrow,0,0,0)
self.InitList(panel,sizer)
panel.SetSizer(sizer)
# self.SetSize((1305, 700))
self.SetSize((900, 750))
#self.SetTitle('Simple menu')
self.Centre()
#self.Show(True)
self.pid = None
#
def InitFiles(self,panel,sizer):
vars = self.vars
# Define
bmp = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_OTHER, (16, 16))
# REFERENCE
sizer3 = wx.BoxSizer(wx.HORIZONTAL)
label3 = wx.StaticText(panel, label='Choose a reference genome (FASTA) (REQUIRED):',size=(330,-1))
sizer3.Add(label3,0,wx.ALIGN_CENTER_VERTICAL,0)
self.picker3 = wx.lib.filebrowsebutton.FileBrowseButton(panel, id=wx.ID_ANY, dialogTitle='Please select the reference genome', fileMode=wx.FD_OPEN, fileMask='*.fna;*.fasta;*.fa', size=(400,30), startDirectory=os.path.dirname(vars.ref), initialValue=vars.ref, labelText='')
sizer3.Add(self.picker3, proportion=1, flag=wx.EXPAND|wx.ALL, border=5)
sizer3.Add(TPPIcon(panel, wx.ID_ANY, bmp, "Select a reference genome in FASTA format (can be a multi-contig fasta file)."), flag=wx.CENTER, border=0)
sizer3.Add((10, 1), 0, wx.EXPAND)
sizer.Add(sizer3,0,wx.EXPAND,0)
# REPLICON ID NAMES
sizer_replicon_ids = wx.BoxSizer(wx.HORIZONTAL)
label_replicon_ids = wx.StaticText(panel, label='ID names for each replicon: \n(if genome has multiple contigs)',size=(340,-1))
sizer_replicon_ids.Add(label_replicon_ids,0,wx.ALIGN_CENTER_VERTICAL,0)
self.replicon_ids = wx.TextCtrl(panel,value=vars.replicon_ids,size=(400,30))
sizer_replicon_ids.Add(self.replicon_ids, proportion=1.0, flag=wx.EXPAND|wx.ALL, border=5)
sizer_replicon_ids.Add(TPPIcon(panel, wx.ID_ANY, bmp, "Specify names of each contig within the reference genome separated by commas (if using wig_gb_to_csv.py you must use the contig names in the Genbank file). Only required if there are multiple contigs; can leave blank if there is just one sequence.\nEnter 'auto' for autogenerated ids."), flag=wx.CENTER, border=0)
sizer_replicon_ids.Add((10, 1), 0, wx.EXPAND)
sizer.Add(sizer_replicon_ids,0,wx.EXPAND,0)
# READS 1
sizer1 = wx.BoxSizer(wx.HORIZONTAL)
label1 = wx.StaticText(panel, label='Choose the Fastq file for read 1 (REQUIRED):',size=(330,-1))
sizer1.Add(label1,0,wx.ALIGN_CENTER_VERTICAL,0)
self.picker1 = wx.lib.filebrowsebutton.FileBrowseButton(panel, id=wx.ID_ANY, dialogTitle='Please select the .fastq file for read 1', fileMode=wx.FD_OPEN, fileMask='*.fastq;*.fq;*.reads;*.fasta;*.fa;*.fastq.gz', size=(400,30), startDirectory=os.path.dirname(vars.fq1), initialValue=vars.fq1, labelText='',changeCallback=self.OnChanged2)
sizer1.Add(self.picker1, proportion=1, flag=wx.EXPAND|wx.ALL, border=5)
sizer1.Add(TPPIcon(panel, wx.ID_ANY, bmp, "Select a file containing the reads in .FASTQ (or compressed FASTQ) format."), flag=wx.CENTER, border=0)
sizer1.Add((10, 1), 0, wx.EXPAND)
sizer.Add(sizer1,0,wx.EXPAND,0)
# READS 2
sizer2 = wx.BoxSizer(wx.HORIZONTAL)
label2 = wx.StaticText(panel, label='Choose the Fastq file for read 2:',size=(330,-1))
sizer2.Add(label2,0,wx.ALIGN_CENTER_VERTICAL,0)
self.picker2 = wx.lib.filebrowsebutton.FileBrowseButton(panel, id=wx.ID_ANY, dialogTitle='Please select the .fastq file for read 2', fileMode=wx.FD_OPEN, fileMask='*.fastq;*.fq;*.reads;*.fasta;*.fa;*.fastq.gz', size=(400,30), startDirectory=os.path.dirname(vars.fq2), initialValue=vars.fq2, labelText='', changeCallback=self.OnChanged2)
sizer2.Add(self.picker2, proportion=1, flag=wx.EXPAND|wx.ALL, border=5)
sizer2.Add(TPPIcon(panel, wx.ID_ANY, bmp, "Select a file containing the pair-end reads in .FASTQ (or compressed FASTQ) format. Optional."), flag=wx.CENTER, border=0)
sizer2.Add((10, 1), 0, wx.EXPAND)
sizer.Add(sizer2,0,wx.EXPAND,0)
# OUTPUT PREFIX
sizer5 = wx.BoxSizer(wx.HORIZONTAL)
label5 = wx.StaticText(panel, label='Prefix to use for output filenames (REQUIRED):',size=(340,-1))
sizer5.Add(label5,0,wx.ALIGN_CENTER_VERTICAL,0)
self.base = wx.TextCtrl(panel,value=vars.base,size=(400,30))
sizer5.Add(self.base, proportion=1.0, flag=wx.EXPAND|wx.ALL, border=5)
sizer5.Add(TPPIcon(panel, wx.ID_ANY, bmp, "Select a prefix that will be used when writing output files"), flag=wx.CENTER, border=0)
sizer5.Add((10, 1), 0, wx.EXPAND)
sizer.Add(sizer5,0,wx.EXPAND,0)
# PROTOCOL
sizer_protocol = wx.BoxSizer(wx.HORIZONTAL)
label_protocol = wx.StaticText(panel, label='Protocol used:',size=(340,-1))
sizer_protocol.Add(label_protocol,0,wx.ALIGN_CENTER_VERTICAL,0)
self.protocol = wx.ComboBox(p | anel,choices=['Sassetti','Mme1', 'Tn5'],size=(400,30))
self.protocol.SetStringSelection(vars.protocol)
sizer_protocol.Add(self.protocol, proportion=1, flag=wx.EXPAND|wx.ALL, border=5)
protocol_tooltip_text = """Select which protocol used to prepare the sequencing samples. Default values will populate the other fields | .
The Sassetti protocol generally assumes the reads include the primer prefix and part of the transposon sequence, followed by genomic sequence. It also assumes reads are sequenced in the forward direction. Barcodes are in read 2, along with genomic DNA from the other end of the fragment.
The Mme1 protocol generally assumes reads do NOT include the prim |
PetrDlouhy/django-fluent-comments | example/article/tests.py | Python | apache-2.0 | 2,825 | 0.002479 | # -*- coding: utf-8 -*-
# Author: Petr Dlouhý <petr.dlouhy@auto-mat.cz>
#
# Copyright (C) 2015 o.s. Auto*Mat
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from fluent_comments.compat import CommentForm
from freezegun import freeze_time
from article.models import Article
class CommentsTests(TestCase):
fixtures = ["data", ]
def setUp(self):
self.admin = User.objects.create_superuser('superuser', 'myemail@test.com', 'secret')
def test_admin_comments_access(self):
self.client.login(username=self.admin.username, password='secret')
response = self.client.get(reverse('admin:fluent_comments_fluentcomment_changelist'))
self.assertContains(response, "Comment", status_code=200)
def test_get_article_with_comment(self):
response = self.client.get(reverse('article-details', kwargs={"slug": "testing-article"}))
self.assertContains(response, "Comment", status_code=200)
def test_get_article_with_comment(self):
response = self.client.get(reverse('article-details', kwargs={"slug": "testing-article"}))
self.assertContains(response, "Comment", status_code=200)
@freeze_time("2016-01-04 17:00:00")
def test_comment_post(self):
content_type = "article.article"
object_pk = "1"
timestamp = "1451919617"
form = CommentForm(Article())
security_hash = form.generate_security_hash(content_type, object_pk, timestamp)
post_data = {
"content_type": content_type,
"object_pk": | object_pk,
"name": "Testing name",
"email": "test@email.com",
"comment": "Testing comment",
"timestamp": timestamp,
"security_hash": security_hash,
}
response = self.client.post(reverse("comments-post-comment-ajax"), post_data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertContains(response, "Testing comment", status_code=200)
self.assertEqual(response.status_code, 200, re | sponse.content.decode("utf-8"))
|
fmichea/lpbm | setup.py | Python | bsd-3-clause | 2,729 | 0.000733 | import os
import shlex
import sys
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
_ROOT = os.path.abspath(os.path.dirname(__file__))
class PyTest(TestCommand):
user_options = [
('args=', 'a', 'Additional arguments to pass to py.test'),
('debug=', 'D', 'Enable debugging of test suite (on, first, off)'),
('coverage=', 'C', 'Enable coverage of the test project (on, keep-result, off)'),
('exec-only=', 'k', 'Filter tests by test name or filename'),
]
def initialize_options(self):
TestCommand.initialize_options(self)
self.args, self.debug, self.coverage, self.exec_only = [], 'off', 'on', ''
def run(self):
import pytest
args = []
if self.debug in ['first', 'on']:
if self.debug == 'first':
args.append('-x')
args.extend(['--pdb', '-vv', '-s'])
if self.coverage in ['on', 'keep-result']:
args.extend([
'--cov-config', os.path.join(_ROOT, '.coveragerc'),
'--cov', 'lpbm',
'--cov-report', 'term-missing',
'--no-cov-on-fail',
])
if self.exec_only:
args.append('-k{}'.format(self.exec_only))
if self.args:
args.extend(shlex.split(self.args))
args.append(os.path.join(_ | ROOT, 'tests'))
print('execute: py.test', ' '.join(shlex.quote(arg) for arg in args))
try:
errno = pytest.main(args)
finally:
cov_file = os.path.join(_ROOT, '.coverage')
if self.coverage != 'keep-result' and os.path.exists(cov_file):
os.unlink(cov_file)
sys.exit(errno)
setup(
# General information.
name='lpbm',
description='Lightweight personal blog maker | ',
url='http://github.com/fmichea/lpbm',
# Version information.
license='BSD',
version='3.0.0',
# Author.
author='Franck Michea',
author_email='franck.michea@gmail.com',
# File information.
install_requires=open('requirements/command.txt').readlines(),
packages=find_packages(exclude=['test', 'doc']),
package_data={'': ['*.css', '*.html']},
include_package_data=True,
entry_points={
'console_scripts': [
'lpbm = lpbm.main:main',
],
},
cmdclass={
'test': PyTest,
},
# Categories
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
PhasesResearchLab/ESPEI | tests/testing_data.py | Python | mit | 49,302 | 0.001765 | """Databases and datasets used in common tests"""
import yaml
YAML_LOADER = yaml.FullLoader
A_B_REGULAR_SOLUTION_TDB = """
ELEMENT A ALPHA 0.0 0.0 0.0 !
ELEMENT B ALPHA 0.0 0.0 0.0 !
TYPE_DEFINITION % SEQ *!
PHASE ALPHA % 1 1 !
CONSTITUENT ALPHA : A B : !
$ Default the lattice stability and regular solution parameter to zero
FUNCTION G_ALPHA_A 1 0.0; 10000 N !
FUNCTION G_ALPHA_B 1 0.0; 10000 N !
FUNCTION L_ALPHA 1 0.0; 10000 N !
$ Set the ALPHA lattice stability to zero
PARAMETER G(ALPHA,A;0) 1 G_ALPHA_A; 10000 N !
PARAMETER G(ALPHA,B;0) 1 G_ALPHA_B; 10000 N !
PARAMETER L(ALPHA,A,B;0) 1 L_ALPHA; 10000 N !
"""
CU_MG_TDB = """$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
$ Date: 2017-09-27 21:10
$ Components: CU, MG, VA
$ Phases: CUMG2, FCC_A1, HCP_A3, LAVES_C15, LIQUID
$ Generated by brandon (pycalphad 0.5.2.post1)
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
ELEMENT CU BLANK 0 0 0 !
ELEMENT MG BLANK 0 0 0 !
ELEMENT VA BLANK 0 0 0 !
FUNCTION GFCCCU 298.15 GHSERCU#; 3200.0 N !
FUNCTION GFCCMG 298.15 -0.9*T + GHSERMG# + 2600; 3000.0 N !
FUNCTION GHCPCU 298.15 0.2*T + GHSERCU# + 600; 3200.0 N !
FUNCTION GHCPMG 298.15 GHSERMG#; 3000.0 N !
FUNCTION GHSERCU 298.15 1.29223E-7*T**3 - 0.00265684*T**2 - 24.112392*T*LN(T)
+ 130.485235*T - 7770.458 + 52478*T**(-1); 1357.77 Y -31.38*T*LN(T) +
183.803828*T - 13542.026 + 3.64167E+29*T**(-9); 3200.0 N !
FUNCTION GHSERMG 298.15 -1.393669E-6*T**3 + 0.0004858*T**2 -
26.1849782*T*LN(T) + 143.675547*T - 8367.34 + 78950*T**(-1); 923.0 Y
-34.3088*T*LN(T) + 204.716215*T - 14130.185 + 1.038192E+28*T**(-9); 3000.0
N !
FUNCTION GHSERVA 1 0; 10000 N !
FUNCTION GLAVCU 298.15 3.87669E-7*T**3 - 0.00797052*T**2 - 72.337176*T*LN(T)
+ 391.455705*T - 8311.374 + 157434*T**(-1); 1357.77 Y -94.14*T*LN(T) +
551.411484*T - 25626.078 + 1.092501E+30*T**(-9); 3200.0 N !
FUNCTION GLAVMG 298.15 -4.181007E-6*T**3 + 0.0014574*T**2 -
78.5549346*T*LN(T) + 431.026641*T - 10102.02 + 236850*T**(-1); 923.0 Y
-102.9264*T*LN(T) + 614.148645*T - 27390.555 + 3.11458E+28*T**(-9); 3000.0
N !
FUNCTION GLIQCU 298.15 -5.8489E-21*T**7 - 9.511904*T + GHSERCU# + 12964.735;
1357.77 Y -31.38*T*LN(T) + 173.881484*T - 46.545; 3200.0 N !
FUNCTION GLIQMG 298.15 -8.0176E-20*T**7 - 8.83693*T + GHSERMG# + 8202.243;
923.0 Y -34.3088*T*LN(T) + 195.324057*T - 5439.869; 3000.0 N !
FUNCTION VV0000 1 -32429.6; 10000 N !
FUNCTION VV0001 1 -4.12896; 10000 N !
FUNCTION VV0002 1 8.2363; 10000 N !
FUNCTION VV0003 1 -14.0865; 10000 N !
FUNCTION VV0004 1 -11.2723; 10000 N !
FUNCTION VV0005 1 11.1114; 10000 N !
FUNCTION VV0006 1 -8.29125; 10000 N !
FUNCTION VV0007 1 -14.9845; 10000 N !
FUNCTION VV0008 1 -40470.2; 10000 N !
FUNCTION VV0009 1 104160.0; 10000 N !
FUNCTION VV0010 1 17766.4; 10000 N !
FUNCTION VV0011 1 150325.0; 10000 N !
FUNCTION VV0012 1 21243.0; 10000 N !
FUNCTION VV0013 1 214671.0; 10000 N !
FUNCTION VV0014 1 14321.1; 10000 N !
FUNCTION VV0015 1 -4923.18; 10000 N !
FUNCTION VV0016 1 -1962.8; 10000 N !
FUNCTION VV0017 1 -31626.6; 10000 N !
TYPE_DEFINITION % SEQ * !
DEFINE_SYSTEM_DEFAULT ELEMENT 2 !
DEFAULT_COMMAND DEFINE_SYSTEM_ELEMENT VA !
PHASE CUMG2 % 2 1 2 !
CONSTITUENT CUMG2 :CU:MG: !
PHASE FCC_A1 % 2 1 1 !
CONSTITUENT FCC_A1 :CU,MG:VA: !
PHASE HCP_A3 % 2 1 0.5 !
CONSTITUENT HCP_A3 :CU,MG:VA: !
PHASE LAVES_C15 % 2 2 1 !
CONSTITUENT LAVES_C15 :CU,MG:CU,MG: !
PHASE LIQUID % 1 1 !
CONSTITUENT LIQUID :CU,MG: !
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
$ CU $
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
PARAMETER G(FCC_A1,CU:VA;0) 1 GFCCCU#; 10000 N !
PARAMETER G(HCP_A3,CU:VA;0) 1 GHCPCU#; 10000 N !
PARAMETER G(LAVES_C15,CU:CU;0) 1 GLAVCU#; 10000 N !
PARAMETER G(LIQUID,CU;0) 1 GLIQCU#; 10000 N !
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
$ MG $
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
PARAMETER G(FCC_A1,MG:VA;0) 1 GFCCMG#; 10000 N !
PARAMETER G(HCP_A3,MG:VA;0) 1 GHCPMG#; 10000 N !
PARAMETER G(LAVES_C15,MG:MG;0) 1 GLAVMG#; 10000 N !
PARAMETER G(LIQUID,MG;0) 1 GLIQMG#; 10000 N !
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
$ CU-MG $
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
PARAMETER G(CUMG2,CU:MG;0) 1 GHSERCU# + 2*GHSERMG# + VV0000#; 10000 N !
PARAMETER L(FCC_A1,CU,MG:VA;0) 1 VV0003#; 10000 N !
PARAMETER L(FCC_A1,CU,MG:VA;1) 1 VV0002#; 10000 N !
PARAMETER L(FCC_A1,CU,MG:VA;2) 1 VV0001#; 10000 N !
PARAMETER L(HCP_A3,CU,MG:VA;0) 1 VV0007#; 10000 N !
PARAMETER L(HCP_A3,CU,MG:VA;1) 1 VV0006#; 10000 N !
PARAMETER L(HCP_A3,CU,MG:VA;2) 1 VV0005#; 10000 N !
PARAMETER L(HCP_A3,CU,MG:VA;3) 1 VV0004#; 10000 N !
PARAMETER G(LAVES_C15,CU:MG;0) 1 2*GHSERCU# + GHSERMG# + VV0008#; 10000 N !
PARAMETER G(LAVES_C15,MG:CU;0) 1 GHSERCU# + 2*GHSERMG# + VV0009#; 10000 N !
PARAMETER L(LAVES_C15,CU:CU,MG;0) 1 VV0010#; 10000 N !
PARAMETER L(LAVES_C15,CU,MG:CU;0) 1 VV0011#; 10000 N !
PARAMETER L(LAVES_C15,CU,MG:MG;0) 1 VV0012#; 10000 N !
PARAMETER L(LAVES_C15,MG:CU,MG;0) 1 VV0013#; 10000 N !
PARAMETER L(LIQUID,CU,MG;0) 1 VV0017#; 10000 N !
PARAMETER L(LIQUID,CU,MG;1) 1 VV0016#; 10000 N !
PARAMETER L(LIQUID,CU,MG;2) 1 VV0015#; 10000 N !
PARAMETER L(LIQUID,CU,MG;3) 1 VV0014#; 10000 N !
"""
CU_MG_TDB_FCC_ONLY = """$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
$ Cu-Mg, FCC only
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
ELEMENT CU BLANK 0 0 0 !
ELEMENT MG BLANK 0 0 0 !
ELEMENT VA BLANK 0 0 0 !
FUNCTION GFCCCU 298.15 GHSERCU#; 3200.0 N !
FUNCTION GFCCMG 298.15 -0.9*T + GHSERMG# + 2600; 3000.0 N !
FUNCTION GHSERCU 298.15 1.29223E-7*T**3 - 0.00265684*T**2 - 24.112392*T*LN(T)
+ 130.485235*T - 7770.458 + 52478*T**(-1); 1357.77 Y -31.38*T*LN(T) +
183.803828*T - 13542.026 + 3.64167E+29*T**(-9); 3200.0 N !
FUNCTION GHSERMG 298.15 -1.393669E-6*T**3 + 0.0004858*T**2 -
26.1849782*T*LN(T) + 143.675547*T - 8367.34 + 78950*T**(-1); 923.0 Y
-34.3088*T*LN(T) + 204.716215*T - 14130.185 + 1.038192E+28*T**(-9); 3000.0
N !
FUNCTION GHSERVA 1 0; 10000 N !
FUNCTION VV0001 1 -4.12896; 10000 N !
FUNCTION VV0002 1 8.2363; 10000 N !
FUNCTION VV0003 1 -14.0865; 10000 N !
TYPE_DEFINITION % SEQ * !
DEFINE_SYSTEM_DEFAULT ELEMENT 2 !
DEFAULT_COMMAN | D DEFINE_SYSTEM_ELEMENT VA !
PHASE FCC_A1 % 2 1 1 !
CONSTITUENT FCC_A1 :CU,MG:VA: !
PARAMETER G(FCC_A1,CU:VA;0) 1 GFCCCU#; 10000 N !
PARAMETER G(FCC_A1,MG:VA;0) 1 GFCCMG#; 10000 N !
PARAMETER L(FCC_A1,CU,MG:VA;0) 1 VV0003#; 10000 N !
PARAMETER L(FCC_A1,CU, | MG:VA;1) 1 VV0002#; 10000 N !
PARAMETER L(FCC_A1,CU,MG:VA;2) 1 VV0001#; 10000 N !
"""
CU_MG_TDB_ASSOC = """$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
$ ASSOC TDB FROM CU_MG
$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
ELEMENT CU BLANK 0 0 0 !
ELEMENT MG BLANK 0 0 0 !
ELEMENT VA BLANK 0 0 0 !
SPECIES COMPA CU1.0 !
SPECIES COMPB CU1MG2.0 !
SPECIES COMPC MG1.0 !
FUNCTION GFCCCU 298.15 GHSERCU#; 3200.0 N !
FUNCTION GFCCMG 298.15 -0.9*T + GHSERMG# + 2600; 3000.0 N !
FUNCTION GHCPCU 298.15 0.2*T + GHSERCU# + 600; 3200.0 N !
FUNCTION GHCPMG 298.15 GHSERMG#; 3000.0 N !
FUNCTION GHSERCU 298.15 1.29223E-7*T**3 - 0.00265684*T**2 - 24.112392*T*LN(T)
+ 130.485235*T - 7770.458 + 52478*T**(-1); 1357.77 Y -31.38*T*LN(T) +
183.803828*T - 13542.026 + 3.64167E+29*T**(-9); 3200.0 N !
FUNCTION GHSERMG 298.15 -1.393669E-6*T**3 + 0.0004858*T**2 -
26.1849782*T*LN(T) + 143.675547*T - 8367.34 + 78950*T**(-1); 923.0 Y
-34.3088*T*LN(T) + 204.716215*T - 14130.185 + 1.038192E+28*T**(-9); 3000.0
N !
FUNCTION GHSERVA 1 0; 10000 N !
FUNCTION GLAVCU 298.15 3.87669E-7*T**3 - 0.00797052*T**2 - 72.337176*T*LN(T)
+ 391.455705*T - 8311.374 + 157434*T**(-1); 1357.77 Y -94.14*T*LN(T) +
551.411484*T - 25626.078 + 1.09 |
cbertinato/pandas | pandas/tests/io/conftest.py | Python | bsd-3-clause | 2,699 | 0 | from distutils.version import LooseVersion
import os
import pytest
import pandas.util.testing as tm
from pandas.io.parsers import read_csv
@pytest.fixture
def tips_file(datapath):
"""Path to the tips dataset"""
return datapath('io', 'parser', 'data', 'tips.csv')
@pytest.fixture
def jsonl_file(datapath):
"""Path a JSONL dataset"""
return datapath('io', 'parser', 'data', 'items.jsonl')
@ | pytest.fixture
def salaries_table(datapath):
"""DataFrame with the salaries dataset"""
return read_csv(datapath('io', 'parser', 'data', 'salaries.csv'), sep='\t')
@pytest.fixture
def s3_resource(tips_file, jsonl_file):
"""Fixture for mocking S3 interaction.
The primary bucket name is "pandas-test". The follow | ing datasets
are loaded.
- tips.csv
- tips.csv.gz
- tips.csv.bz2
- items.jsonl
A private bucket "cant_get_it" is also created. The boto3 s3 resource
is yielded by the fixture.
"""
pytest.importorskip('s3fs')
boto3 = pytest.importorskip('boto3')
botocore = pytest.importorskip('botocore')
if LooseVersion(botocore.__version__) < LooseVersion("1.11.0"):
# botocore leaks an uncatchable ResourceWarning before 1.11.0;
# see GH 23731 and https://github.com/boto/botocore/issues/1464
pytest.skip("botocore is leaking resources before 1.11.0")
with tm.ensure_safe_environment_variables():
# temporary workaround as moto fails for botocore >= 1.11 otherwise,
# see https://github.com/spulec/moto/issues/1924 & 1952
os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key")
os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret")
moto = pytest.importorskip('moto')
test_s3_files = [
('tips#1.csv', tips_file),
('tips.csv', tips_file),
('tips.csv.gz', tips_file + '.gz'),
('tips.csv.bz2', tips_file + '.bz2'),
('items.jsonl', jsonl_file),
]
def add_tips_files(bucket_name):
for s3_key, file_name in test_s3_files:
with open(file_name, 'rb') as f:
conn.Bucket(bucket_name).put_object(
Key=s3_key,
Body=f)
try:
s3 = moto.mock_s3()
s3.start()
# see gh-16135
bucket = 'pandas-test'
conn = boto3.resource("s3", region_name="us-east-1")
conn.create_bucket(Bucket=bucket)
add_tips_files(bucket)
conn.create_bucket(Bucket='cant_get_it', ACL='private')
add_tips_files('cant_get_it')
yield conn
finally:
s3.stop()
|
Donkyhotay/MoonPy | zope/testing/testrunner-ex/sample1/sampletests/test112.py | Python | gpl-3.0 | 3,574 | 0.005596 | ##############################################################################
#
# Copyright (c) 2003 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import unittest
from zope.testing import doctest
import samplelayers
layername = 'samplelayers.Layer112'
layer = samplelayers.Layer112
x=0
y=0
z=0
class TestA(unittest.TestCase):
layer = layername
def setUp(self):
global x
x = 1
def tearDown(self):
global x
x = 0
def test_x1(self):
self.assertEqual(x, 1)
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
def test_y0(self):
self.assertEqual(y, 0)
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
def test_z0(self):
self.assertEqual(z, 0)
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
class TestB(unittest.TestCase):
layer = layername
def setUp(self):
global y
y = 1
def tearDown(self):
global y
y = 0
def test_y1(self):
self.assertEqual(y, 1)
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
def test_x0(self):
self.assertEqual(x, 0)
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
def test_z0(self):
self.assertEqual(z, 0)
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
class TestNotMuch(unittest.TestCase):
layer = layername
def test_1(self):
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
def test_2(self):
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layerx)
def test_3(self):
self.assertEqual(samplelayers.layer, layer.layer)
self.assertEqual(samplelayers.layerx, layer.layer | x)
def setUp(test):
test.globs['z'] = 1
test.globs['layer'] = layer.layer
test.globs['layerx'] = layer.layerx
def test_y0(self):
"""
>>> y
0
>>> (layer == samplelayers.layer), (layerx | == samplelayers.layerx)
(True, True)
"""
def test_x0(self):
"""
>>> x
0
>>> (layer == samplelayers.layer), (layerx == samplelayers.layerx)
(True, True)
"""
def test_z1(self):
"""
>>> z
1
>>> (layer == samplelayers.layer), (layerx == samplelayers.layerx)
(True, True)
"""
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestA))
suite.addTest(unittest.makeSuite(TestB))
suite.addTest(unittest.makeSuite(TestNotMuch))
s = doctest.DocTestSuite(setUp=setUp)
s.layer = layer
suite.addTest(s)
s = doctest.DocFileSuite('../../sampletestsl.txt', setUp=setUp)
s.layer = layer
suite.addTest(s)
return suite
|
plotly/plotly.py | packages/python/plotly/plotly/validators/layout/xaxis/_ticktextsrc.py | Python | mit | 410 | 0.002439 | import _p | lotly_utils.basevalidators
class TicktextsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="ticktextsrc", parent_name="layout.xaxis", **kwargs):
super(TicktextsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
| edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
|
ryanofsky/bitcoin | test/functional/feature_fee_estimation.py | Python | mit | 11,247 | 0.003201 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test fee estimation code."""
from decimal import Decimal
import random
from test_framework.mininode import CTransaction, CTxIn, CTxOut, COutPoint, ToHex, COIN
from test_framework.script import CScript, OP_1, OP_DROP, OP_2, OP_HASH160, OP_EQUAL, hash160, OP_TRUE
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
connect_nodes,
satoshi_round,
sync_blocks,
sync_mempools,
)
# Construct 2 trivial P2SH's and the ScriptSigs that spend them
# So we can create many transactions without needing to spend
# time signing.
REDEEM_SCRIPT_1 = CScript([OP_1, OP_DROP])
REDEEM_SCRIPT_2 = CScript([OP_2, OP_DROP])
P2SH_1 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_1), OP_EQUAL])
P2SH_2 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_2), OP_EQUAL])
# Associated ScriptSig's to spend satisfy P2SH_1 and P2SH_2
SCRIPT_SIG = [CScript([OP_TRUE, REDEEM_SCRIPT_1]), CScript([OP_TRUE, REDEEM_SCRIPT_2])]
def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
"""Create and send a transaction with a random fee.
The transaction pays to a trivial P2SH script, and assumes that its inputs
are of the same form.
The function takes a list of confirmed outputs and unconfirmed outputs
and attempts to use the confirmed list first for its inputs.
It adds the newly created outputs to the unconfirmed list.
Returns (raw transaction, fee)."""
# It's best to exponentially distribute our random fees
# because the buckets are exponentially spaced.
# Exponentially distributed from 1-128 * fee_increment
rand_fee = float(fee_increment) * (1.1892 ** random.randint(0, 28))
# Total fee ranges from min_fee to min_fee + 127*fee_increment
fee = min_fee - fee_increment + satoshi_round(rand_fee)
tx = CTransaction()
total_in = Decimal("0.00000000")
while total_in <= (amount + fee) and len(conflist) > 0:
t = conflist.pop(0)
total_in += t["amount"]
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
if total_in <= amount + fee:
while total_in <= (amount + fee) and len(unconflist) > 0:
t = unconflist.pop(0)
total_in += t["amount"]
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
if total_in <= amount + fee:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount + fee, total_in))
tx.vout.append(CTxOut(int((total_in - amount - fee) * COIN), P2SH_1))
tx.vout.append(CTxOut(int(amount * COIN), P2SH_2))
# These transactions don't need to be signed, but we still have to insert
# the ScriptSig that will satisfy the ScriptPubKey.
for inp in tx.vin:
inp.scriptSig = SCRIPT_SIG[inp.prevout.n]
txid = from_node.sendrawtransaction(ToHex(tx), True)
unconflist.append({"txid": txid, "vout": 0, "amount": total_in - amount - fee})
unconflist.append({"txid": txid, "vout": 1, "amount": amount})
return (ToHex(tx), fee)
def split_inputs(from_node, txins, txouts, initial_split=False):
"""Generate a lot of inputs so we can generate a ton of transactions.
This function takes an input from txins, and creates and sends a transaction
which splits the value into 2 outputs which are appended to txouts.
Previously this was designed to be small inputs so they wouldn't have
a high coin age when the notion of priority still existed."""
prevtxout = txins.pop()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(prevtxout["txid"], 16), prevtxout["vout"]), b""))
half_change = satoshi_round(prevtxout["amount"] / 2)
rem_change = prevtxout["amount"] - half_change - Decimal("0.00001000")
tx.vout.append(CTxOut(int(half_change * COIN), P2SH_1))
tx.vout.append(CTxOut(int(rem_change * COIN), P2SH_2))
# If this is the initial split we actually need to sign the transaction
# Otherwise we just need to insert the proper ScriptSig
if (initial_split):
completetx = from_node.signrawtransaction(ToHex(tx))["hex"]
else:
tx.vin[0].scriptSig = SCRIPT_SIG[prevtxout["vout"]]
completetx = ToHex(tx)
txid = from_node.sendrawtransaction(completetx, True)
txouts.ap | pend({"txid": txid, "vout": 0, "amount": half_change})
txouts.append({"txid": txid, "vout": 1, "amount": rem_change})
def check_estimates(node, fees_seen, max_invalid):
"""Call estimatesmartfee and verify that the estimates meet certain invariants."""
de | lta = 1.0e-6 # account for rounding error
last_feerate = float(max(fees_seen))
all_smart_estimates = [node.estimatesmartfee(i) for i in range(1, 26)]
for i, e in enumerate(all_smart_estimates): # estimate is for i+1
feerate = float(e["feerate"])
assert_greater_than(feerate, 0)
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"
% (feerate, min(fees_seen), max(fees_seen)))
if feerate - delta > last_feerate:
raise AssertionError("Estimated fee (%f) larger than last fee (%f) for lower number of confirms"
% (feerate, last_feerate))
last_feerate = feerate
if i == 0:
assert_equal(e["blocks"], 2)
else:
assert_greater_than_or_equal(i + 1, e["blocks"])
class EstimateFeeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
"""
We'll setup the network to have 3 nodes that all mine with different parameters.
But first we need to use one node to create a lot of outputs
which we will use to generate our transactions.
"""
self.add_nodes(3, extra_args=[["-maxorphantx=1000", "-whitelist=127.0.0.1"],
["-blockmaxsize=17000", "-maxorphantx=1000"],
["-blockmaxsize=8000", "-maxorphantx=1000"]])
# Use node0 to mine blocks for input splitting
# Node1 mines small blocks but that are bigger than the expected transaction rate.
# NOTE: the CreateNewBlock code starts counting block size at 1,000 bytes,
# (17k is room enough for 110 or so transactions)
# Node2 is a stingy miner, that
# produces too small blocks (room for only 55 or so transactions)
def transact_and_mine(self, numblocks, mining_node):
min_fee = Decimal("0.00001")
# We will now mine numblocks blocks generating on average 100 transactions between each block
# We shuffle our confirmed txout set before each set of transactions
# small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible
# resorting to tx's that depend on the mempool when those run out
for i in range(numblocks):
random.shuffle(self.confutxo)
for j in range(random.randrange(100 - 50, 100 + 50)):
from_index = random.randint(1, 2)
(txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo,
self.memutxo, Decimal("0.005"), min_fee, min_fee)
tx_kbytes = (len(txhex) // 2) / 1000.0
self.fees_per_kb.append(float(fee) / tx_kbytes)
sync_mempools(self.nodes[0:3], wait=.1)
mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"]
sync_blocks(self.nodes[0:3], wait=.1)
# update which txouts are confirmed
newmem = []
for utx in self.memutxo:
if utx["txid"] in mined:
self.confutxo.append(utx)
else:
newmem.append(utx)
|
devilry/devilry-django | devilry/devilry_import_v2database/migrations/0002_auto_20190624_1238.py | Python | bsd-3-clause | 463 | 0 | # Generated by Django 2.2.1 on 2019-06-24 10:38
import django.contrib.postgres.fields.jsonb
from django.db import | migrations
class Migration(migrations.Migration):
dependencies = [
('devilry_import_v2database', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='imp | ortedmodel',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
]
|
jhole89/convet-image-classifier | test/test_dataset.py | Python | gpl-3.0 | 1,572 | 0.000636 | from main.dataset import DataSet
import numpy as np
def test_dataset(load_image_data, image_size):
images, labels, ids, cls, _ = load_image_data
dataset = DataSet(images, labels, ids, cls)
assert sorted(list(dataset.cls)) == sorted(['cat', 'dog'] * 10)
assert dataset.cls.shape == (20,)
assert dataset.epochs_completed == 0
assert dataset.ids.shape == (20,)
assert dataset.images.shape == (20, image_size, image_ | size, 3)
assert dataset.images.dtype == np.float32
assert dataset.images.min() == float(0)
assert dataset.images.max() == float(1)
assert np.array_equal(dataset.labels, [[1., 0.]] * 10 + [[0., 1.]] * 10)
assert dataset.labels.shape == (20, 2)
assert dataset.labels.dtype == np.float64
assert dataset.labels.min() == float(0)
assert dataset.labels.max() == float(1)
assert dataset.num_examples | == 20
def test_next_batch(load_image_data, image_size):
images, labels, ids, cls, _ = load_image_data
dataset = DataSet(images, labels, ids, cls)
image_batch, label_batch, id_batch, cls_batch = dataset.next_batch(batch_size=2)
assert list(cls_batch) == ['cat', 'cat']
assert cls_batch.shape == (2,)
assert image_batch.shape == (2, image_size, image_size, 3)
assert image_batch.dtype == np.float32
assert np.array_equal(label_batch, [[1., 0.], [1., 0.]])
assert label_batch.shape == (2, 2)
assert label_batch.dtype == np.float64
assert label_batch.min() == float(0)
assert label_batch.max() == float(1)
assert id_batch.shape == (2,)
|
hzj123/56th | pombola/core/admin.py | Python | agpl-3.0 | 8,525 | 0.001525 | from django import forms
from django.contrib import admin
from django.contrib.gis import db
from django.contrib.contenttypes.generic import GenericTabularInline
from django.core.exceptions import ValidationError
from ajax_select import make_ajax_form
from ajax_select.admin import AjaxSelectAdmin
from pombola.core import models
from pombola.scorecards import models as scorecard_models
from pombola.images.admin import ImageAdminInline
from pombola.slug_helpers.admin import StricterSlugFieldMixin
def create_admin_link_for(obj, link_text):
return u'<a href="%s">%s</a>' % (obj.get_admin_url(), link_text)
class ContentTypeModelAdmin(admin.ModelAdmin):
def show_foreign(self, obj):
return create_admin_link_for(
obj.content_object, unicode(obj.content_object))
show_foreign.allow_tags = True
class ContactKindAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ["name"]}
search_fields = ['name']
class AlternativePersonNameInlineAdmin(admin.TabularInline):
model = models.AlternativePersonName
extra = 0
class InformationSourceAdmin(ContentTypeModelAdmin):
list_display = ['source', 'show_foreign', 'entered']
list_filter = ['entered']
search_fields = ['source']
class InformationSourceInlineAdmin(GenericTabularInline):
model = models.InformationSource
| extra = 0
can_delete = False
fields = ['source', 'note', 'entered']
formfield_overrides = {
db.models.TextField: {
'widget': forms.Textarea(attrs={'rows':2, 'cols':40}),
},
}
class ContactAdmin(ContentTypeModelAdmin):
list_display = ['kind', 'value', 'show_foreign']
| search_fields = ['value']
inlines = [InformationSourceInlineAdmin]
class ContactInlineAdmin(GenericTabularInline):
model = models.Contact
extra = 0
can_delete = True
fields = ['kind', 'value', 'source', 'note']
formfield_overrides = {
db.models.TextField: {
'widget': forms.Textarea(attrs={'rows':2, 'cols':20}),
},
}
class IdentifierAdmin(ContentTypeModelAdmin):
list_display = ['scheme', 'identifier', 'show_foreign']
search_fields = ['identifier']
inlines = [InformationSourceInlineAdmin]
class IdentifierInlineAdmin(GenericTabularInline):
model = models.Identifier
extra = 0
can_delete = False
fields = ['scheme', 'identifier']
class PositionAdmin(AjaxSelectAdmin):
list_display = [
'id',
'show_person',
'show_organisation',
'show_place',
'show_title',
'start_date',
'end_date',
]
search_fields = ['person__legal_name', 'organisation__name', 'title__name']
list_filter = ['title__name']
inlines = [InformationSourceInlineAdmin]
readonly_fields = ['sorting_start_date', 'sorting_end_date']
form = make_ajax_form(
models.Position,
{
'organisation': 'organisation_name',
'place': 'place_name',
'person': 'person_name',
'title': 'title_name',
}
)
def show_person(self, obj):
return create_admin_link_for(obj.person, obj.person.name)
show_person.allow_tags = True
def show_organisation(self, obj):
return create_admin_link_for(obj.organisation, obj.organisation.name)
show_organisation.allow_tags = True
def show_place(self, obj):
return create_admin_link_for(obj.place, obj.place.name)
show_place.allow_tags = True
def show_title(self, obj):
return create_admin_link_for(obj.title, obj.title.name)
show_title.allow_tags = True
class PositionInlineAdmin(admin.TabularInline):
model = models.Position
extra = 3 # do not set to zero as the autocomplete does not work in inlines
can_delete = True
fields = [
'person',
'organisation',
'place',
'title',
'subtitle',
'category',
'start_date',
'end_date',
]
form = make_ajax_form(
models.Position,
{
'organisation': 'organisation_name',
'place': 'place_name',
'person': 'person_name',
'title': 'title_name',
},
)
class ScorecardInlineAdmin(GenericTabularInline):
model = scorecard_models.Entry
fields = ('date', 'score', 'disabled')
readonly_fields = ('date', 'score')
extra = 0
can_delete = False
class PersonAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ["legal_name"]}
inlines = [
AlternativePersonNameInlineAdmin,
PositionInlineAdmin,
ContactInlineAdmin,
InformationSourceInlineAdmin,
ImageAdminInline,
ScorecardInlineAdmin,
IdentifierInlineAdmin,
]
list_display = ['slug', 'name', 'date_of_birth']
list_filter = ['can_be_featured']
search_fields = ['legal_name']
class PlaceAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('slug', 'name', 'kind', 'show_organisation')
list_filter = ('kind',)
search_fields = ('name', 'organisation__name')
inlines = (
InformationSourceInlineAdmin,
ScorecardInlineAdmin,
IdentifierInlineAdmin,
)
def show_organisation(self, obj):
if obj.organisation:
return create_admin_link_for(
obj.organisation, obj.organisation.name)
else:
return '-'
show_organisation.allow_tags = True
class PlaceInlineAdmin(admin.TabularInline):
model = models.Place
extra = 0
can_delete = False
fields = ['name', 'slug', 'kind']
class OrganisationAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = [
PlaceInlineAdmin,
PositionInlineAdmin,
ContactInlineAdmin,
InformationSourceInlineAdmin,
IdentifierInlineAdmin,
ImageAdminInline,
]
list_display = ['slug', 'name', 'kind']
list_filter = ['kind']
search_fields = ['name']
class OrganisationKindAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
search_fields = ['name']
class PlaceKindAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ['slug', 'name']
search_fields = ['name']
class PositionTitleAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
search_fields = ['name']
# Add these to the admin
admin.site.register(models.Contact, ContactAdmin)
admin.site.register(models.ContactKind, ContactKindAdmin)
admin.site.register(models.Identifier, IdentifierAdmin)
admin.site.register(models.InformationSource, InformationSourceAdmin)
admin.site.register(models.Organisation, OrganisationAdmin)
admin.site.register(models.OrganisationKind, OrganisationKindAdmin)
admin.site.register(models.Person, PersonAdmin)
admin.site.register(models.Place, PlaceAdmin)
admin.site.register(models.PlaceKind, PlaceKindAdmin)
admin.site.register(models.Position, PositionAdmin)
admin.site.register(models.PositionTitle, PositionTitleAdmin)
class LogAdmin(admin.ModelAdmin):
"""Create an admin view of the history/log table"""
list_display = (
'action_time',
'user',
'content_type',
'change_message',
'is_addition',
'is_change',
'is_deletion',
)
list_filter = ['action_time', 'user', 'content_type']
ordering = ('-action_time',)
readonly_fields = [
'user',
'content_type',
'object_id',
'object_repr',
'action_flag',
'change_message',
]
date_hierarchy = 'action_time'
#We don't want people changing this historical record:
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
#returning false causes table to not show up in admin page :-(
#I guess we have to allow changing for now
return True
def has_delete_permission(self, |
nickstenning/honcho | tests/integration/test_main.py | Python | mit | 382 | 0 | import pytest
i | mport textwrap
@pytest.mark.parametrize('testenv', [{
'Procfile': textwrap.dedent("""
foo: python web.py
bar: ruby worker.rb
""")
}], indirect=True)
def test_main(testenv):
result = testenv.run(['python', '-m', 'honcho', 'check'], check=True)
assert 'Valid procfile detected' in resul | t.stderr
assert 'foo, bar' in result.stderr
|
apsmi/PyTanks | server_dispatcher.py | Python | gpl-2.0 | 4,595 | 0.007973 | # -*- coding: utf-8 -*-
import asyncore
import socket
import struct
import pickle
from server_tank import Tank, Tank_config
from server_player import Game_Client
# сокет, принимающий соединение от клиентов
class Game_Server_UDP(asyncore.dispatcher):
# инициализация
def __init__(self, host, port, BLOCK_SIZE, LEVEL_W, LEVEL_H, players_green, players_yellow,
total_level_width, total_level_height, level_width, level_height, BLOCK_DEMAGE, FRAME_RATE, blocks):
asyncore.dispatcher.__init__(self)
self.BLOCK_SIZE, self.LEVEL_W, self.LEVEL_H, self.players_green, self.players_yellow = \
BLOCK_SIZE, LEVEL_W, LEVEL_H, playe | rs_green, players_yellow
self.total_level_width, self.total_level_height, self.level_width, self.level_height, self.BLOCK_DEMAGE, self.FRAME_RATE, self.blocks = \
total_level_width, total_level_height, level_width, level_height, BLOCK_DEMAGE, FRAME_RATE, blocks
self.create_socket(socket.AF_INET, socket.SOCK_DGRAM)
self.set_reuse_addr()
self.bind( | (host, port))
self.player_count = 0 # количество подключенных клиентов
self.players = [] # список пдключенных клиентов
# подготовка пакета к передаче
def pack_data(self, data):
tmp = pickle.dumps(data)
l = len(tmp)
return struct.pack('L', l) + tmp
# входящее соедение - создается новый клиент
def handle_read(self):
# получем номер клиентского порта
data, addr = self.recvfrom(4)
client_port = struct.unpack('L',data)[0]
# создаем UPD сокет для клиента
socket_udp = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
socket_udp.bind(('', 0))
server_port = socket_udp.getsockname()[1]
# отправляем порт созданного сокета клиенту
buf = struct.pack('L', server_port)
socket_udp.sendto(buf, addr)
# создаем нового клиента
addr = (addr[0], client_port)
self.player_count += 1
player = Game_Client(socket_udp, addr)
# создаем спрайт нового клиента
#x = random.randint(self.BLOCK_SIZE, (self.LEVEL_W - 2) * self.BLOCK_SIZE)
#y = random.randint(self.BLOCK_SIZE, (self.LEVEL_H - 2) * self.BLOCK_SIZE)
player_config = Tank_config(min_x=self.BLOCK_SIZE, max_x=(self.LEVEL_W - 2) * self.BLOCK_SIZE,
min_y=self.BLOCK_SIZE, max_y=(self.LEVEL_H - 2) * self.BLOCK_SIZE,
speed=2, lifes=1, dead_count=15)
player_sprite = Tank(player_config)
player.sprite = player_sprite
# идентификатор создаваемого игрока
player.id = server_port
# определяем команду нового клиента
if (self.player_count % 2) == 0 :
player.team = "green"
self.players_green.add(player_sprite)
else:
player.team = "yellow"
self.players_yellow.add(player_sprite)
self.players.append(player)
# отправить текущую конфигурацию уровня
dataframe = {}
# формируем список параметров
dataframe['params'] = {'total_width': self.total_level_width, 'total_height': self.total_level_height, 'width': self.level_width,
'height': self.level_height, 'block_demage': self.BLOCK_DEMAGE, 'frame_rate': self.FRAME_RATE}
#блоки
dataframe['blocks'] = []
for b in self.blocks.sprites():
data = {'id' : b.id, 'x' : b.rect.x, 'y' : b.rect.y, 'type' : b.type, 'hits': b.hits}
dataframe['blocks'].append(data)
#игроки
dataframe['players'] = []
for gamer in self.players:
data = {'id' : gamer.id, 'x' : gamer.sprite.rect.x, 'y' : gamer.sprite.rect.y, 'team' : gamer.team, 'dead_count': gamer.sprite.config.dead_count}
dataframe['players'].append(data)
# упаковываем данные
message = self.pack_data(dataframe)
# отправляем
player.obuffer += message
player.ready = True
print("Connected client %s:%d, team: %s" % (addr[0], addr[1], player.team)) |
aperigault/ansible | test/units/module_utils/basic/test_tmpdir.py | Python | gpl-3.0 | 4,159 | 0.001443 | # -*- coding: utf-8 -*-
# Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import json
import os
import shutil
import tempfile
import pytest
from units.compat.mock import patch, MagicMock
from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
class TestAnsibleModuleTmpDir:
DATA = (
(
{
"_ansible_tmpdir": "/path/to/dir",
"_ansible_remote_tmp": "/path/tmpdir",
"_ansible_keep_remote_files": False,
},
True,
"/path/to/dir"
),
(
{
"_ansible_tmpdir": None,
"_ansible_remote_tmp": "/path/tmpdir",
"_ansible_keep_remote_files": False
},
False,
"/path/tmpdir/ansible-moduletmp-42-"
),
(
{
"_ansible_tmpdir": None,
"_ansible_remote_tmp": "/path/tmpdir",
"_ansible_keep_remote_files": False
},
True,
"/path/tmpdir/ansible-moduletmp-42-"
),
(
{
"_ansible_tmpdir": None,
"_ansible_remote_tmp": "$HOME/.test",
"_ansible_keep_remote_files": False
},
False,
os.path.join(os.environ['HOME'], ".test/ansible-moduletmp-42-")
),
)
# pylint bug: https://github.com/PyCQA/pylint/issues/511
# pylint: disable=undefined-variable
@pytest.mark.parametrize('args, expected, stat_exists', ((s, e, t) for s, t, e in DATA))
def test_tmpdir_property(self, monkeypatch, args, expected, stat_exists):
makedirs | = {'called': False} |
def mock_mkdtemp(prefix, dir):
return os.path.join(dir, prefix)
def mock_makedirs(path, mode):
makedirs['called'] = True
makedirs['path'] = path
makedirs['mode'] = mode
return
monkeypatch.setattr(tempfile, 'mkdtemp', mock_mkdtemp)
monkeypatch.setattr(os.path, 'exists', lambda x: stat_exists)
monkeypatch.setattr(os, 'makedirs', mock_makedirs)
monkeypatch.setattr(shutil, 'rmtree', lambda x: None)
monkeypatch.setattr(basic, '_ANSIBLE_ARGS', to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': args})))
with patch('time.time', return_value=42):
am = basic.AnsibleModule(argument_spec={})
actual_tmpdir = am.tmpdir
assert actual_tmpdir == expected
# verify subsequent calls always produces the same tmpdir
assert am.tmpdir == actual_tmpdir
if not stat_exists:
assert makedirs['called']
expected = os.path.expanduser(os.path.expandvars(am._remote_tmp))
assert makedirs['path'] == expected
assert makedirs['mode'] == 0o700
@pytest.mark.parametrize('stdin', ({"_ansible_tmpdir": None,
"_ansible_remote_tmp": "$HOME/.test",
"_ansible_keep_remote_files": True},),
indirect=['stdin'])
def test_tmpdir_makedirs_failure(self, am, monkeypatch):
mock_mkdtemp = MagicMock(return_value="/tmp/path")
mock_makedirs = MagicMock(side_effect=OSError("Some OS Error here"))
monkeypatch.setattr(tempfile, 'mkdtemp', mock_mkdtemp)
monkeypatch.setattr(os.path, 'exists', lambda x: False)
monkeypatch.setattr(os, 'makedirs', mock_makedirs)
actual = am.tmpdir
assert actual == "/tmp/path"
assert mock_makedirs.call_args[0] == (os.path.expanduser(os.path.expandvars("$HOME/.test")),)
assert mock_makedirs.call_args[1] == {"mode": 0o700}
# because makedirs failed the dir should be None so it uses the System tmp
assert mock_mkdtemp.call_args[1]['dir'] is None
assert mock_mkdtemp.call_args[1]['prefix'].startswith("ansible-moduletmp-")
|
sander76/home-assistant | homeassistant/components/demo/sensor.py | Python | apache-2.0 | 3,738 | 0.000268 | """Demo platform that has a couple of fake sensors."""
from __future__ import annotations
from typing import Any
from homeassistant.components.sensor import STATE_CLASS_MEASUREMENT, SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
CONCENTRATION_PARTS_PER_MILLION,
DEVICE_CLASS_CO,
DEVICE_CLASS_CO2,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_TEMPERATURE,
ENERGY_KILO_WATT_HOUR,
PERCENTAGE,
POWER_WATT,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, StateType
from . import DOMAIN
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: dict[str, Any] | None = None,
) -> None:
"""Set up the Demo sensors."""
async_add_entities(
[
DemoSensor(
"sensor_1",
"Outside Temperature",
15.6,
DEVICE_CLASS_TEMPERATURE,
STATE_CLASS_MEASUREMENT,
TEMP_CELSIUS,
12,
),
DemoSensor(
"sensor_2",
"Outside Humidity",
54,
DEVICE_CLASS_HUMIDITY,
STATE_CLASS_MEASUREMENT,
PERCENTAGE,
None,
),
DemoSensor(
"sensor_3",
"Carbon monoxide",
54,
DEVICE_CLASS_CO,
STATE_CLASS_MEASUREMENT,
CONCENTRATION_PARTS_PER_MILLION,
None,
),
DemoSensor(
"sensor_4",
"Carbon dioxide",
54,
DEVICE_CLASS_CO2,
STATE_CLASS_MEASUREMENT,
CONCENTRATION_PARTS_PER_MILLION,
14,
),
| DemoSensor(
"sensor_5",
"Power consumption",
100,
DEVICE_CLASS_POWER,
STATE_CLASS_MEASUREMENT,
POWER_WATT,
None,
),
DemoSensor(
"sensor_6",
| "Today energy",
15,
DEVICE_CLASS_ENERGY,
STATE_CLASS_MEASUREMENT,
ENERGY_KILO_WATT_HOUR,
None,
),
]
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoSensor(SensorEntity):
"""Representation of a Demo sensor."""
_attr_should_poll = False
def __init__(
self,
unique_id: str,
name: str,
state: StateType,
device_class: str | None,
state_class: str | None,
unit_of_measurement: str | None,
battery: StateType,
) -> None:
"""Initialize the sensor."""
self._attr_device_class = device_class
self._attr_name = name
self._attr_native_unit_of_measurement = unit_of_measurement
self._attr_native_value = state
self._attr_state_class = state_class
self._attr_unique_id = unique_id
self._attr_device_info = {
"identifiers": {(DOMAIN, unique_id)},
"name": name,
}
if battery:
self._attr_extra_state_attributes = {ATTR_BATTERY_LEVEL: battery}
|
ocket8888/slackbot | slackbot/modules/patch/patch.py | Python | gpl-3.0 | 1,502 | 0.023302 | """
This module checks for the name of a supported game in the arguments to the api command, and
if found it reads the latest patch, printing it to the channel in which it was requested.
"""
import os.path
import typing
#I should probably find a way to utilize these
dota_synonymns=["dota", "Dota", "dota2", "Dota2"]
ow_synonymns=["ow", "overwatch", "Overwatch"]
def patchFetch(args: typing.List[str], slackAPI: object, channel: object, users: list) -> str:
"""
Fetches th epatch notes for the appropriate game, and posts it to the channel.
"""
|
# If there isn't a game name, print and log an error
if len(args) == 0:
slackAPI.chat.post_message(channel['name'], "Not eno | ugh arguments to `!patch`. Usage: `!patch <game>`")
return "WW: patch: not enough arguments"
# Read in the patch notes from the file if it exists
datfile = "/etc/slackbot/modules/patch/" + args[0]
if os.path.isfile(datfile):
patchnotes = open('/etc/slackbot/modules/patch/'+args[0]).read()
slackAPI.chat.post_message(channel['name'], patchnotes, as_user=True, username="patchbot")
return "II: patch: posted patch notes for "+args[0]
# If patch notes are not found, post and log an error message
slackAPI.chat.post_message(channel['name'], "Couldn't find patch notes for '%s'."\
"Either this isn't a real game or"\
"@roogz is fucking memeing again." % args[0])
return "WW: patch: patch notes not found for "+args[0]
|
prometheanfire/openstack-guest-agents-unix | commands/kms.py | Python | apache-2.0 | 1,790 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); yo | u may
# not use this file exce | pt in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
JSON KMS activation
"""
import os
import platform
import commands
import redhat.kms
class ActivateCommand(commands.CommandBase):
def __init__(self, *args, **kwargs):
pass
@staticmethod
def detect_os():
"""
Return the Linux Distribution or other OS name
"""
translations = {"redhat": redhat}
system = os.uname()[0]
if system == "Linux":
system = platform.linux_distribution(full_distribution_name=0)[0]
# Arch Linux returns None for platform.linux_distribution()
if not system and os.path.exists('/etc/arch-release'):
system = 'arch'
if not system:
return None
system = system.lower()
global DEFAULT_HOSTNAME
DEFAULT_HOSTNAME = system
return translations.get(system)
@commands.command_add('kmsactivate')
def activate_cmd(self, data):
os_mod = self.detect_os()
if not os_mod:
raise SystemError("KMS not supported on this OS")
return os_mod.kms.kms_activate(data)
|
uclouvain/OSIS-Louvain | base/migrations/0017_person_language.py | Python | agpl-3.0 | 520 | 0.001927 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-25 11:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bas | e', '0016_auto_20160224_1039'),
]
operations = [
migrations.AddField(
model_name='person',
name='language',
field=models.CharField(choices=[('FR', 'Français'), ('EN', 'English')], default=' | FR', max_length=30, null=True),
),
]
|
Tapo4ek/django-banzai | banzai/migrations/0001_initial.py | Python | mit | 4,368 | 0.008013 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Package'
db.create_table('banzai_package', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('file', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('status', self.gf('django.db.models.fields.CharField')(max_length=4, blank=True)),
('pack_id', self.gf('django.db.models.fields.CharField')(max_length=100)),
('emails_all', self.gf('django.db.models.fields.PositiveIntegerField')()),
('emails_correct', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('description', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
))
db.send_create_signal('banzai', ['Package'])
# Adding model 'Report'
db.create_table('banzai_report', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('package', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['banzai.Package'])),
('status', self.gf('django.db.models.fields.CharField')(max_length=4, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
('reject_code', self.gf('django.db.models.fields.CharField')(max_length=250)),
('reject_message', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('banzai', ['Report'])
# Adding model 'ReportFBL'
db.create_table('banzai_reportfbl', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('package', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['banzai.Package'])),
('status', self.gf('django.db.models.fields.CharField')(max_length=4, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
))
db.send_create_signal('banzai', ['ReportFBL'])
def backwards(self, orm):
# Deleting model 'Package'
db.delete_table('banzai_package')
# Deleting model 'Report'
db.delete_table('banzai_report')
# Deleting model 'ReportFBL'
db.delete_table('banzai_reportfbl')
models = {
'banzai.package': {
'Meta': {'object_name': 'Package'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'emails_all': ('django.db.models.fields.PositiveIntegerField', [], {}),
'emails_correct': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pack_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'status': ('django.db.models.fie | lds.CharField', [], {'max_length': '4', 'blank': 'True'})
},
'banzai.report': {
'Meta': {'object_name': 'Report'},
'email': ('django.db.models.fields.EmailField', [], {'ma | x_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['banzai.Package']"}),
'reject_code': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'reject_message': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
},
'banzai.reportfbl': {
'Meta': {'object_name': 'ReportFBL'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['banzai.Package']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
}
}
complete_apps = ['banzai'] |
lucasa/landell_gst-gengui | sltv/gstmanager/sbins/source.py | Python | gpl-2.0 | 601 | 0.009983 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
class AVSource(object):
def __init__(self, sbin_conten | t):
self.tags = ["a_src", "v_src"]
#self.sbin = "%s ! queue ! tee name=%s_tee" %(sbin_content, self.tags[0])
self.sbin = sbin_content
class AudioSource(object):
def __init__(self, sbin_content):
self.tags = ["a_src"]
self.sbin = "%s ! tee name=%s_tee" %(sbin_content, self.tags[0])
class VideoSource(object):
def __init__(self, sbin_content):
self.tags = ["v_src"]
| self.sbin = "%s ! tee name=%s_tee" %(sbin_content, self.tags[0])
|
wordsforthewise/shinyei-ppd42ns-arduino | plotdustlive.py | Python | mit | 995 | 0.047236 | import pylab as plt
import pickle, csv, matplotlib, time
import numpy as np
import matplotlib.dates as mdates
import pandas as pd
import matplotlib.animation as animation
| def get_data(file):
with open(file,'rb') as f:
data=pickle.load(f)
f.close()
counter=0
dates=[]
dustdata=[]
for each in data:
if counter%2==0:
dates.append(each)
else:
dustdata.append(str(each).split(',')[2])
counter+=1
return dates, dustdata
def animate(i):
da | tes,dustdata=get_data('dustdata')
dates = matplotlib.dates.date2num(dates)
dustdata=np.array(dustdata)
ax.plot_date(dates, dustdata,c='grey')
ax.plot_date(dates, pd.rolling_mean(dustdata,20),'w-',linewidth=4)
return
fig = plt.figure(facecolor='k')
ax = fig.add_subplot(111,axisbg='k')
ax.xaxis.set_major_formatter(mdates.DateFormatter('%I:%M'))
ax.tick_params(color='w', labelcolor='w')
for spine in ax.spines.values():
spine.set_edgecolor('w')
ani = animation.FuncAnimation(fig, animate, interval=10000)
plt.show() |
nish10z/CONCUSS | lib/coloring/basic/trans_frater_augmentation.py | Python | bsd-3-clause | 1,365 | 0.000733 | #!/usr/bin/python
#
# This file is part of CONCUSS, https://github.com/theoryinpractice/concuss/,
# | and is Copyright (C) North Carolina State University, 2015. It is licensed
# under the three-clause BSD license; see LICENSE.
#
from lib.util.memori | zed import memorized
from lib.graph.graph import Graph
# Calculate one transitive-fraternal-augmentation-step and
# result a tuple (newgraph, transedges, fratedges)
@memorized(['orig', 'step'])
def trans_frater_augmentation(orig, g, trans, frat, col,
nodes, step, td, ldoFunc):
fratGraph = Graph()
newTrans = {}
for v in g:
for x, y, _, in g.trans_trips(v):
newTrans[(x, y)] = step
assert (not g.adjacent(x, y)), \
"{0} {1} transitive but adjacent".format(x, y)
for x, y, _ in g.frat_trips(v):
fratGraph.add_edge(x, y)
assert (not g.adjacent(x, y)), \
"{0} {1} fraternal but adjacent".format(x, y)
for (s, t) in newTrans.keys():
g.add_arc(s, t, 1)
fratGraph.remove_edge(s, t)
# TODO: support dict to see current in-degree...
fratDigraph = ldoFunc(fratGraph)
# calculate result
trans.update(newTrans)
for s, t, _ in fratDigraph.arcs():
frat[(s, t)] = step
g.add_arc(s, t, 1)
return (g, trans, frat)
# end def
|
shoopio/shoop | shuup/admin/modules/products/views/edit_parent.py | Python | agpl-3.0 | 3,373 | 0.001779 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from django.http import HttpResponseRedirect
from django.views.generic import UpdateView
from shuup.admin.base import MenuEntry
from shuup.admin.form_part import FormPart, FormPartsViewMixin, TemplatedFormDef
from shuup.admin.toolbar import Toolbar, get_default_edit_toolbar
from shuup.admin.utils.urls import get_model_url
from shuup.core.models import Product
from shuup.utils.django_compat import reverse
class ProductChildrenBaseFormPart(FormPart):
invalid_modes = []
priority = 0
form_name = None
def get_form_defs(self, form, template_name):
yield TemplatedFormDef(
"children",
form,
template_name=template_name,
required=False,
kwargs={"parent_product": self.object, "request": self.request},
)
def form_valid(self, form):
try:
children_formset = form["children"]
except KeyError:
return
children_formset.save()
class ProductParentBaseToolbar(Toolbar):
def __init__(self, view):
super(ProductParentBaseToolbar, self).__init__()
self.view = view
self.parent_product = view.object
self.request = view.request
get_default_edit_toolbar(self.view, "product_form", with_split_save=False, toolbar=self)
class ProductParentBaseView(FormPartsViewMixin, UpdateView):
model = Product
context_object_name = "product"
form_class = forms.Form
form_part_classes = []
toolbar_class = None
def dispatch(self, request, *args, **kwargs):
self.object = self.get_object()
parent = self.object.get_all_package_parents().first()
if parent:
# By default, redirect to the first parent
return HttpResponseRedirect(reverse("shuup_admin:shop_product.edit_package", kwa | rgs={"pk": parent.id}))
return super(ProductParentBaseView, self).dispatch(request, *args, **kwargs)
def get_breadcrumb_parents(self):
return [MenuEntry(text=self.object, url=get_model_url(self.object, shop=self.request.shop))]
def post(self, reque | st, *args, **kwargs):
command = request.POST.get("command")
if command:
return self.dispatch_command(request, command)
return super(ProductParentBaseView, self).post(request, *args, **kwargs)
def get_form_part_classes(self):
for form_part_class in self.form_part_classes:
yield form_part_class
def get_context_data(self, **kwargs):
context = super(ProductParentBaseView, self).get_context_data(**kwargs)
if self.toolbar_class:
context["toolbar"] = self.toolbar_class(self)
return context
def form_valid(self, form):
form_parts = self.get_form_parts(self.object)
for form_part in form_parts:
form_part.form_valid(form)
self.object.verify_mode()
self.object.save()
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return self.request.path
def dispatch_command(self, request, command):
pass
|
ivanamihalek/blender | texture_shadow_no_bg/cube_to_png.py | Python | gpl-2.0 | 10,222 | 0.007826 | #!bpy
"""
to run:
(aAtually I have not been able to run this from command line - the thing
exits without rendering. If I add bpy.ops.render, it alwys renders layers, rather
then compositing output)
blender -b --python this_fnm.py
"""
import bpy
from math import radians
import fnmatch
import os
###################################
def delete_old_stuff():
# escape edit mode
if bpy.ops.object.mode_set.poll():
bpy.ops.object.mode_set(mode='OBJECT')
# delete all mesh objects
bpy.ops.object.select_by_type(type='MESH')
bpy.ops.object.delete()
# delete all lamps
bpy.ops.object.select_by_type(type='LAMP')
bpy.ops.object.delete()
# delete all font objects
bpy.ops.object.select_by_type(type='FONT')
bpy.ops.object.delete()
# delete all render layers but one
render_layers = bpy.context.scene.render.layers
for active_index in range (1,len(render_layers)):
render_layers.active_index = active_index
render_layers.remove(render_layers.active)
# delete all materials
for i in bpy.data.materials.values():
bpy.data.materials.remove(i)
# delete all textures
for i in bpy.data.textures.values():
bpy.data.textures.remove(i)
#####################################################################
def makeGlossyTextured (object, image_loaded, material_name):
material = bpy.data.materials.new(material_name)
# as soon as we do this we have Diffuse BSDF and Material Output nodes, linked:
material.use_nodes = True
nodes = material.node_tree.nodes
links = material.node_tree.links
# uv map node
uv_node = nodes.new('ShaderNodeUVMap')
uv_node.uv_map = object.data.uv_textures.active.name
# image texture node
image_texture_node = nodes.new(type='ShaderNodeTexImage')
image_texture_node.image = image_loaded
links.new(uv_node.outputs['UV'], image_texture_node.inputs['Vector'])
# diffuse node and Material output are already generated, and linked
# so we just need to pipe in the testure into Diffuse BSDF node
diffuse_node = nodes.get("Diffuse BSDF")
links.new(image_texture_node.outputs[0], diffuse_node.inputs[0])
# add a glossy BSDF
glossy_node = nodes.new(type='ShaderNodeBsdfGlossy')
glossy_node.inputs["Color"].default_value = [1.0, 1.0, 1.0, 1.0]
glossy_node.inputs["Roughness"].default_value = 0.0
# add a mix node
mix_node = nodes.new(type='ShaderNodeMixShader')
links.new(diffuse_node.outputs[0], mix_node.inputs[1]) # whats mix.inputs[0]?
links.new(glossy_node.outputs[0], mix_node.inputs[2])
# output of the mix node into Material Output
mat_output = nodes.get("Material Output")
links.new(mix_node.outputs[0], mat_output.inputs[0])
return material
#####################################################################
def makeEmission (material_name):
material = bpy.data.materials.new(material_name)
# as soon as we do this we]'' have Diffuse BSDF and Material Output nodes, linked:
material.use_nodes = True
nodes = material.node_tree.nodes
links = material.node_tree.links
# add an emission node
emission_node = nodes.new(type='ShaderNodeEmission')
emission_node.inputs["Color"].default_value = [0.335, 0.583, 0.8, 1.0]
emission_node.inputs["Strength"].default_value = 20.0
mat_output = nodes.get("Material Output")
links.new(emission_node.outputs[0], mat_output.inputs[0])
return material
#####################################################################
def set_camera(scene):
# Set camera rotation in euler angles
scene.camera.rotation_mode = 'XYZ'
scene.camera.rotation_euler[0] = radians(12)
scene.camera.rotation_euler[1] = 0.0
scene.camera.rotation_euler[2] = 0.0
# Set camera translation
scene.camera.location.x = 0.34
scene.camera.location.y = -1.2
scene.camera.location.z = 6.7
#####################################################################
def set_lights(scene):
# ambient
scene.world.light_settings.use_ambient_occlusion = True
scene.world.light_settings.distance = 2
# spotlight - sun
bpy.ops.object.lamp_add(type='SUN', location=(-2.0, 0.32, 6.5), rotation=(radians(-21), radians(-5), radians(69)))
lamp = bpy.context.object
#lamp.color = (0.43, 0.78,1.0,1.0) # the code does not complain, but not sure if it does anything
# lamp does not have strength, but the associated rendering node does (sigh)
# lamp.strength = 5.0 # this does not work
lamp.data.node_tree.nodes['Emission'].inputs['Strength'].default_value= 5.0
lamp.data.node_tree.nodes['Emission'].inputs['Color'].default_value= (0.43, 0.78,1.0,1.0)
lamp.cycles_visibility.shadow = False
# light emmission plane
bpy.ops.mesh.primitive_plane_add(location=(-10.0, 3.5, 12.0),
rotation=(radians(-104), radians(-98), radians(80)))
emission_plane = bpy.context.object
emission_plane.scale = (3.3, -5.5, -28.3)
emission_plane.name = "emission plane"
emission_plane.data.materials.append (makeEmission ("emission mat"))
emission_plane.cycles_visibility.shadow = False
#####################################################################
def create_object():
bpy.ops.mesh.primitive_cube_add(location=(0.0, 0.0, 0.7), enter_editmode=True, layers= [l==0 for l in range(20)])
bpy.ops.mesh.subdivide(number_cuts=4)
obj = bpy.context.object
obj.name = "cube"
obj.location.z += obj.dimensions.z/4
obj.modifiers.new("cube_subsurf", "SUBSURF")
obj.modifiers["cube_subsurf"].subdivision_type = 'CATMULL_CLARK'
obj.modifiers["cube_subsurf"].render_levels = 4
mesh = obj.data
bpy.ops.object.editmode_toggle()
# show mesh as smooth
for p in mesh.polygons:
p.use_smooth = True
# texture layer: Smart projection
bpy.ops.mesh.uv_texture_add()
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.uv.cube_project(cube_size=0.1*obj.dimensions.x)
bpy.ops.object.mode_set(mode='OBJECT')
return obj
#####################################################################
def compositing(scene, outdir, outfile_base_name):
# let's try to work in some layers
render_layers = scene.render.layers
render_layers.active.name = | "main"
render_layers.new("shadow")
render_layers["main"].layers = [l==0 for l in range(20)]
render_layers["shadow"].layers = [l==1 for l in range(20)]
render_layers["shadow"].use_pass_s | hadow = True
scene.layers[0] = True
scene.layers[1] = True
# and now ... compositing!
# I'll want transaprent background
scene.cycles.film_transparent = True
# switch on nodes and get reference
scene.use_nodes = True
tree = scene.node_tree
# the default nodes are Composite and RenderLayers, that contains out main layer already
# I better remove them if I am going to run this script repeatedly
for node in tree.nodes:
tree.nodes.remove(node)
links = tree.links
main_layer_node = tree.nodes.new('CompositorNodeRLayers')
main_layer_node.layer = "main"
main_layer_node.location = 200, -100
shadow_layer_node = tree.nodes.new('CompositorNodeRLayers')
shadow_layer_node.layer = "shadow"
shadow_layer_node.location = -400, 100
# note here: mix, not math
subtract_node = tree.nodes.new('CompositorNodeMixRGB')
subtract_node.blend_type = "SUBTRACT" # the default is add
subtract_node.location = -200, 200
# inputs[0] here is 'Fac' (?)
links.new(shadow_layer_node.outputs['Alpha'], subtract_node.inputs[1])
links.new(shadow_layer_node.outputs['Shadow'], subtract_node.inputs[2])
set_alpha_node = tree.nodes.new('CompositorNodeSetAlpha')
set_alpha_node.location = 0, 200
links.new(subtract_node.outputs['Image'], set_alpha_node.inputs['Alpha'])
blur_node = tree.nodes.new('CompositorNodeBlur')
blur_node.filter_type = 'FAST_GAUSS'
blur_node.size_x = 5
blur_node.size_y = 5
blur_node.location = 200, 200
links.new(set_alpha_node.outputs['Image'], blur_node.inputs['Image'])
alph |
michaupl/materialsapp | finishes/admin.py | Python | apache-2.0 | 1,110 | 0.003604 | # coding: utf-8
from __future__ import unicode_literals
from django.contrib import admin
from core.admin import SubcategoryAdmin, DetailAdmin
from .models import FinishDetail, FinishSubcategory
class FinishSubcategoryAdmin(SubcategoryAdmin):
def get_form(self, request, obj=None, **kwargs):
from | . import DETAIL_TYPE
form = super(Fi | nishSubcategoryAdmin, self).get_form(request, obj, **kwargs)
if 'category' in form.base_fields:
field = form.base_fields['category']
field.queryset = field.queryset.filter(type=DETAIL_TYPE)
return form
admin.site.register(FinishSubcategory, FinishSubcategoryAdmin)
class FinishDetailAdmin(DetailAdmin):
def get_form(self, request, obj=None, **kwargs):
from . import DETAIL_TYPE
form = super(FinishDetailAdmin, self).get_form(request, obj, **kwargs)
if 'subcategory' in form.base_fields:
field = form.base_fields['subcategory']
field.queryset = field.queryset.filter(type=DETAIL_TYPE)
return form
admin.site.register(FinishDetail, FinishDetailAdmin) |
apy2017/Anaconda | technobot/__init__.py | Python | mit | 6,849 | 0.001752 | from technobot import api
from technobot import utils
from threading import Timer
from functools import wraps
import re
import copy
import logging
logging.basicConfig(level = logging.DEBUG)
class TechnoBot:
def __init__(self, token, exit_button=None, ignore_old=False, timeout=3):
self.token = token
self._exit_reply_button = exit_button
self._ignore_old = ignore_old
self._timeout = timeout
self._last_update_id = ignore_old
self._updates = {}
self._conversations = {}
# Pre Handlers for conversation coroutines
self._pre_handlers = []
self._handlers = []
self._thread = Timer(self._timeout, self._process_updates)
self.logger = logging.getLogger("ex")
def _receive_updates(self):
self._updates = {}
updates = api.get_updates(self.token, self._last_update_id)
for update in updates:
new_update = utils.Update(update)
if self._ignore_old:
self._last_update_id = new_update.update_id
continue
if self._last_update_id is not None and new_update.update_id > self._last_update_id:
self._updates[new_update.update_id] = new_update
self._last_update_id = new_update.update_id
elif self._last_update_id is None:
self._updates[new_update.update_id] = new_update
self._ignore_old = False
def receive_updates(self, updates):
self._updates = {}
for update in updates:
new_update = utils.Update(update)
if self._ignore_old:
self._last_update_id = new_update.update_id
continue
if self._last_update_id is not None and new_update.update_id > self._last_update_id:
self._updates[new_update.update_id] = new_update
self._last_update_id = new_update.update_id
elif self._last_update_id is None:
self._updates[new_update.update_id] = new_update
self._ignore_old = False
def process_webhook_update(self, update):
new_update = utils.Update(update)
self.logger.debug('Updates code {code} recieved'.format(code=new_update.update_id))
message = new_update.message
|
if message is None: return
self._process_conversation(message)
self._process_handlers(message)
def _process_updat | es(self):
self._receive_updates()
self._thread.cancel()
for update_id, update in self._updates.items():
message = update.message
if message is None: break
self._process_conversation(message)
self._process_handlers(message)
self._thread = Timer(self._timeout, self._process_updates)
self._thread.start()
def _process_conversation(self, message):
if message.text == self._exit_reply_button.text:
self._default_exit_handler(message)
if message.chat.id in self._conversations:
if self._conversations[message.chat.id].send(message):
return None
else:
del (self._conversations[message.chat.id])
elif len(self._pre_handlers)>0:
handler_func = self._match_pre_handler(message)
if handler_func is not None:
started_coroutine = handler_func(message)
self._conversations[message.chat.id] = started_coroutine
return None
def _process_handlers(self, message):
if message.chat.id not in self._conversations:
handler_func = self._match_handler(message)
if handler_func is not None:
handler_func(message)
def _match_handler(self, message):
for handler, handler_func in self._handlers:
if re.match(handler, message.text):
return handler_func
return None
def _match_pre_handler(self, message):
for handler_obj in self._pre_handlers:
handler, handler_func = handler_obj
if re.match(handler, message.text):
return copy.copy(handler_func)
return None
def _default_exit_handler(self, message):
if message.chat.id in self._conversations:
del self._conversations[message.chat.id]
def conversation(self, handler=None):
def _outer_decorator(func):
def _decorator(*args, **kwargs):
result = func(*args, **kwargs)
return result
re_handler = re.compile(handler)
self._pre_handlers.append((re_handler, Conversation(func)))
return wraps(func)(_decorator)
return _outer_decorator
def handler(self, handler=None):
def _outer_decorator(func):
def _decorator(*args, **kwargs):
result = func(*args, **kwargs)
return result
re_handler = re.compile(handler)
self._handlers.append((re_handler, func))
return wraps(func)(_decorator)
return _outer_decorator
def set_exit_button(self, exit_button):
self._exit_reply_button = exit_button
def send_message(self, chat_id, text, reply_markup=None):
if chat_id in self._conversations:
new_reply_markup = utils.ReplyKeyboardMarkup()
new_reply_markup.append_buttons(self._exit_reply_button)
api.send_message(self.token, chat_id=chat_id, text=text, reply_markup=new_reply_markup)
elif reply_markup is not None:
reply_markup.append_buttons(self._exit_reply_button)
api.send_message(self.token, chat_id=chat_id, text=text, reply_markup=reply_markup)
else:
api.send_message(self.token, chat_id=chat_id, text=text)
def set_webhook(self, url=None, certificate=None):
self.logger.debug('Webhook set in url {url}'.format(url=url))
return api.set_webhook(self.token, url, certificate)
def delete_webhook(self):
self.logger.debug('Webhook deleted')
return api.delete_webhook(self.token)
def polling(self):
self._process_updates()
class Conversation:
def __init__(self, coroutine):
self._coroutine = coroutine
self._initialized = False
def send(self, message):
try:
self._coroutine.send(message)
return True
except StopIteration:
return False
def __call__(self, *args, **kwargs):
logging.debug('Started conversation')
self._coroutine = self._coroutine(*args, **kwargs)
next(self._coroutine)
return self
|
paninetworks/neutron | neutron/extensions/quotasv2.py | Python | apache-2.0 | 5,149 | 0 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_utils import importutils
import webob
from neutron.api import extensions
from neutron.api.v2 import attributes
from neutron.api.v2 import base
from neutron.api.v2 import resource
from neutron.common import constants as const
from neutron.common import exceptions as n_exc
from neutron import manager
from neutron import quota
from neutron import wsgi
RESOURCE_NAME = 'quota'
RESOURCE_COLLECTION = RESOURCE_NAME + "s"
QUOTAS = quota.QUOTAS
DB_QUOTA_DRIVER = 'neutron.db.quota_db.DbQuotaDriver'
EXTENDED_ATTRIBUTES_2_0 = {
RESOURCE_COLLECTION: {}
}
class QuotaSetsController(wsgi.Controller):
def __init__(self, plugin):
self._resource_name = RESOURCE_NAME
self._plugin = plugin
self._driver = importutils.import_class(
cfg.CONF.QUOTAS.quota_driver
)
self._update_extended_attributes = True
def _update_attributes(self):
for quota_resource in QUOTAS.resources.keys():
attr_dict = EXTENDED_ATTRIBUTES_2_0[RESOURCE_COLLECTION]
attr_dict[quota_resource] = {
'allow_post': False,
'allow_put': True,
'convert_to': attributes.convert_to_int,
'validate': {'type:range': [-1, const.DB_INTEGER_MAX_VALUE]},
'is_visible': True}
self._update_extended_attributes = False
def _get_quotas(self, request, tenant_id):
return self._driver.get_tenant_quotas(
request.context, QUOTAS.resources, tenant_id)
def create(self, request, body=None):
msg = _('POST requests are not supported on this resource.')
raise webob.exc.HTTPNotImplemented(msg)
def index(self, request):
context = request.context
self._ | check_admin(context)
return {self._resource_name + "s":
self._driver.get_all_quotas(context, QUOTAS.resources)}
def tenant(self, request):
"""Retrieve the tenant info in context."""
context = request.context
if not context.tenant_id:
raise n_e | xc.QuotaMissingTenant()
return {'tenant': {'tenant_id': context.tenant_id}}
def show(self, request, id):
if id != request.context.tenant_id:
self._check_admin(request.context,
reason=_("Only admin is authorized "
"to access quotas for another tenant"))
return {self._resource_name: self._get_quotas(request, id)}
def _check_admin(self, context,
reason=_("Only admin can view or configure quota")):
if not context.is_admin:
raise n_exc.AdminRequired(reason=reason)
def delete(self, request, id):
self._check_admin(request.context)
self._driver.delete_tenant_quota(request.context, id)
def update(self, request, id, body=None):
self._check_admin(request.context)
if self._update_extended_attributes:
self._update_attributes()
body = base.Controller.prepare_request_body(
request.context, body, False, self._resource_name,
EXTENDED_ATTRIBUTES_2_0[RESOURCE_COLLECTION])
for key, value in body[self._resource_name].items():
self._driver.update_quota_limit(request.context, id, key, value)
return {self._resource_name: self._get_quotas(request, id)}
class Quotasv2(extensions.ExtensionDescriptor):
"""Quotas management support."""
@classmethod
def get_name(cls):
return "Quota management support"
@classmethod
def get_alias(cls):
return RESOURCE_COLLECTION
@classmethod
def get_description(cls):
description = 'Expose functions for quotas management'
if cfg.CONF.QUOTAS.quota_driver == DB_QUOTA_DRIVER:
description += ' per tenant'
return description
@classmethod
def get_updated(cls):
return "2012-07-29T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
controller = resource.Resource(
QuotaSetsController(manager.NeutronManager.get_plugin()),
faults=base.FAULT_MAP)
return [extensions.ResourceExtension(
Quotasv2.get_alias(),
controller,
collection_actions={'tenant': 'GET'})]
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
dnacreative/jam-py | tests/server.py | Python | bsd-3-clause | 232 | 0 | #!/usr/bin/env python
# - | *- coding: utf-8 -*-
if __name__ == '__main__':
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
im | port jam.webserver
from jam.server import server
jam.webserver.run(server)
|
gratefulfrog/ArduGuitar | Ardu2/design/POC-3_MAX395/pyboard/DraftDevt/dbh.py | Python | gpl-2.0 | 761 | 0.027595 | #!/usr/local/bin/python3.4
# dbh.ph debounce hardware
"""
Pyboard:
Switch pins: Y1 or X19
usage:
>>> init()
>>> loop()
"""
from pyb import ExtInt,Pin
# declare the pin id
pinId = 'X19' # interrupt 0 'Y1' # interrupt 6
# itnerrupt mechanics and debounce globals
flag= False
interCount=0
eObj = None
# define ISR
def callback(line):
global flag
flag += 1
def init():
global eObj
eObj=ExtInt(pinId, ExtInt.IRQ_FALLING, Pin.PULL_UP, callback)
def doFlag ():
global flag,interCount
print('Flag:',flag,'\tInterCount: ',interCount)
flag=0
interCount +=1
def loop():
| try:
while True:
if flag>0:
doFlag()
except KeyboardInterrupt:
print('Test ended!\nBye . | ..')
|
sbobovyc/GameTools | TSW/src/rdbdata.py | Python | gpl-3.0 | 1,836 | 0.006536 | """
Copyright (C) 2013 Stanislav Bobovych
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
class RDBDATA_data_entry:
def __init__(self, offset, file_pointer):
old_offset = file | _pointer.tell()
file_pointer.seek(offset)
self.data_type, = struct.unpack("<I", file_pointer.read(4) | )
self.RDB_id, = = struct.unpack("<I", file_pointer.read(4))
self.data_length, = struct.unpack("<I", file_pointer.read(4))
self.unknown, = struct.unpack("<I", file_pointer.read(4))
self.data = file_pointer.read(self.data_length)
file_pointer.seek(old_offset)
class RDBDATA_file:
def __init__(self, filepath=None):
self.filepath = filepath
self.header = None #RDB0
self.data = None
if self.filepath != None:
self.open(filepath)
def open(self, filepath=None):
if filepath == None and self.filepath == None:
print "File path is empty"
return
if self.filepath == None:
self.filepath = filepath
def dump(self, dest_filepath=os.getcwd(), verbose=False):
with open(self.filepath, "rb") as f:
self.header = struct.unpack("IIII", f.read(4))
self.data = f.read()
|
teamfx/openjfx-8u-dev-rt | modules/web/src/main/native/Source/JavaScriptCore/Scripts/builtins/builtins_generate_wrapper_implementation.py | Python | gpl-2.0 | 2,485 | 0.000402 | #!/usr/bin/env python
#
# Copyright (c) 2016 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain | the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNE | SS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
import logging
from string import Template
from builtins_generator import BuiltinsGenerator
from builtins_templates import BuiltinsGeneratorTemplates as Templates
log = logging.getLogger('global')
class BuiltinsWrapperImplementationGenerator(BuiltinsGenerator):
def __init__(self, model):
BuiltinsGenerator.__init__(self, model)
def output_filename(self):
return "%sJSBuiltins.cpp" % self.model().framework.setting('namespace')
def generate_output(self):
args = {
'namespace': self.model().framework.setting('namespace'),
}
sections = []
sections.append(self.generate_license())
sections.append(Template(Templates.DoNotEditWarning).substitute(args))
sections.append(self.generate_section_for_object())
return "\n\n".join(sections)
def generate_section_for_object(self):
header_includes = []
for object in self.model().objects:
header_includes.append((["WebCore"], ("WebCore", object.object_name + "Builtins.cpp")))
return '\n'.join(self.generate_includes_from_entries(header_includes))
|
sharoonthomas/clearstate | clearstate/public/views.py | Python | bsd-3-clause | 2,490 | 0 | # -*- coding: utf-8 -*-
'''Public section, including homepage and signup.'''
from flask import (
Blueprint, request, render_template, flash, url_for,
redirect, session
)
from flask.ext.login import login_user, login_required, logout_user
from clearstate.extensions import login_manager
from clearstate.user.models import User, users_exist
from clearstate.page.models import Page
from clearstate.public.forms import LoginForm
from clearstate.user.forms import RegisterForm
from clearstate.utils import flash_errors
from clearstate.database import db
blueprint = Blueprint('public', __name__, static_folder="../static")
@login_manager.user_loader
def load_user(id):
return User.get_by_id(int(id))
@blueprint.route("/", methods=["GET"])
def home():
"""
The home page by default is expected to serve the status page, identified
from the host name.
"""
# TODO: Find status page from host n | ame
# If there are no matches, are there any pages or users at all ?
if not users_exist():
return redirect(url_for('user.initial_setup'))
else:
return redirect(url_for('pages.pages'))
@blueprint.route("/login", methods=["GET", "POST" | ])
def login():
form = LoginForm(request.form)
# Handle logging in
if request.method == 'POST':
if form.validate_on_submit():
login_user(form.user)
flash("You are logged in.", 'success')
redirect_url = request.args.get("next") or \
url_for("pages.pages")
return redirect(redirect_url)
else:
flash_errors(form)
return render_template("public/login.html", form=form)
@blueprint.route('/logout/')
@login_required
def logout():
logout_user()
flash('You are logged out.', 'info')
return redirect(url_for('public.home'))
@blueprint.route("/register/", methods=['GET', 'POST'])
def register():
form = RegisterForm(request.form, csrf_enabled=False)
if form.validate_on_submit():
new_user = User.create(
email=form.email.data,
password=form.password.data,
active=True
)
flash("Thank you for registering. You can now log in.", 'success')
return redirect(url_for('public.home'))
else:
flash_errors(form)
return render_template('public/register.html', form=form)
@blueprint.route("/about/")
def about():
form = LoginForm(request.form)
return render_template("public/about.html", form=form)
|
DominikDitoIvosevic/Uni | AI/lab2/ghostAgents.py | Python | mit | 3,390 | 0.019174 | # ghostAgents.py
# --------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel (pabbeel@cs.berkeley.edu).
from game import Agent
from game import Actions
from game import Directions
import random
from util import manhattanDistance
import util
class GhostAgent( Agent ):
def __init__( self, index ):
self.index = index
def getAction( self, state ):
dist = self.getDistribution(state)
if len(dist) == 0:
return Directions.STOP
else:
return util.chooseFromDistribution( dist )
def getDistribution(self, state):
"Returns a Counter encoding a distribution over actions from the provided state."
util.raiseNotDefined()
class WumpusGhost( GhostAgent ):
"A ghost that does not move, but smells!"
def __init__ ( self, index ):
self.index = index
def getAction( self, state ):
return Directions.STOP
def getDistribution(self, state):
util.raiseNotDefined()
class RandomGhost( GhostAgent ):
"A ghost that chooses a legal action uniformly at random."
def getDistribution( self, state ):
dist = util.Counter()
for a in state.getLegalActions( self.index ): dist[a] = 1.0
dist.normalize()
return dist
class DirectionalGhost( GhostAgent ):
"A ghost that prefers to rush Pacman, or flee when scared."
def __init__( self, index, prob_attack=0.8, prob_scaredFlee=0.8 ):
self.index = index
self.prob_attack = prob_attack
self.prob_scaredFlee = prob_scaredFlee
def getDistribution( self, state ):
# Read variables from state
ghostState = state.getGhostS | tate( self.index )
legalActions = state.getLegalActions( self.index )
pos = state.getGhostPosition( self.index )
isScared = ghostState.scaredTimer > 0
speed = 1
if isScared: speed = 0.5
actionVectors = [Actions.directionToVector( a, speed ) for a in legalActions]
newPositions = [( pos[0]+a[0], pos[1]+a[1] ) for a in actionVectors]
pacma | nPosition = state.getPacmanPosition()
# Select best actions given the state
distancesToPacman = [manhattanDistance( pos, pacmanPosition ) for pos in newPositions]
if isScared:
bestScore = max( distancesToPacman )
bestProb = self.prob_scaredFlee
else:
bestScore = min( distancesToPacman )
bestProb = self.prob_attack
bestActions = [action for action, distance in zip( legalActions, distancesToPacman ) if distance == bestScore]
# Construct distribution
dist = util.Counter()
for a in bestActions: dist[a] = bestProb / len(bestActions)
for a in legalActions: dist[a] += ( 1-bestProb ) / len(legalActions)
dist.normalize()
return dist
|
vitay/ANNarchy | ANNarchy/parser/report/LatexParser.py | Python | gpl-2.0 | 16,861 | 0.007532 | from sympy import *
from sympy.parsing.sympy_parser import parse_expr, standard_transformations, convert_xor, auto_number
from sympy.printing.latex import LatexPrinter
import re
import ANNarchy.core.Global as Global
from ANNarchy.core.Random import RandomDistribution
from ..Extraction import *
from ANNarchy.parser.AnalyseSynapse import analyse_synapse
##################################
### Process individual equations
##################################
def _process_random(val):
"Transforms a connector attribute (weights, delays) into a string representation"
if isinstance(val, RandomDistribution):
return val.latex()
else:
return str(val)
# Really crappy...
# When target has a number (ff1), sympy thinks the 1 is a number
# the target is replaced by a text to avoid this
target_replacements = [
'firsttarget',
'secondtarget',
'thirdta | rget',
'fourthtarget',
'fifthtarget',
'si | xthtarget',
'seventhtarget',
'eighthtarget',
'ninthtarget',
'tenthtarget',
]
def _process_neuron_equations(neuron):
code = ""
# Extract parameters and variables
parameters = extract_parameters(neuron.parameters, neuron.extra_values)
variables = extract_variables(neuron.equations)
variable_names = [var['name'] for var in variables]
attributes, local_var, semiglobal_var, global_var = get_attributes(parameters, variables, neuron=True)
# Create a dictionary for parsing
local_dict = {
'g_target': Symbol('g_{\\text{target}}'),
'dt': Symbol('\Delta t'),
't_pre': Symbol('t_{\\text{pre}}'),
't_post': Symbol('t_{\\text{pos}}'),
'Uniform': Function('\mathcal{U}'),
'Normal': Function('\mathcal{N}'),
'ite': Function('ite', nargs=3)
}
for att in attributes:
local_dict[att] = Symbol(_latexify_name(att, variable_names))
tex_dict = {}
for key, val in local_dict.items():
tex_dict[val] = str(val)
for var in variables:
# Retrieve the equation
eq = var['eq']
# Extract sum(target)
targets = []
target_list = re.findall('(?P<pre>[^\w.])sum\(\s*([^()]+)\s*\)', eq)
for l, t in target_list:
if t.strip() == '':
continue
replacement = target_replacements[len(targets)]
targets.append((t.strip(), replacement))
local_dict[replacement] = Symbol(replacement)
tex_dict[replacement] = replacement
for target, repl in targets:
eq = eq.replace('sum('+target+')', repl)
# Parse the equation
ode = re.findall(r'([^\w]*)d([\w]+)/dt', eq)
if len(ode) > 0:
name = ode[0][1]
eq = eq.replace('d'+name+'/dt', '_grad_'+name)
grad_symbol = Symbol('\\frac{d'+_latexify_name(name, variable_names)+'}{dt}')
local_dict['_grad_'+name] = grad_symbol
tex_dict[grad_symbol] = '\\frac{d'+_latexify_name(name, variable_names)+'}{dt}'
var_code = _analyse_equation(var['eq'], eq, local_dict, tex_dict)
# Replace the targets
for target, repl in targets:
target = target.replace("_","\_")
var_code = var_code.replace(repl, '\\sum_{\\text{'+target+'}} w \cdot r^{\\text{pre}}(t-d)')
# Add the code
var['latex'] = var_code
var['ode'] = len(ode) > 0
if not neuron.spike: # rate-code, no spike
return variables, "", []
# Additional code for spiking neurons
spike_condition = _analyse_part(neuron.spike, local_dict, tex_dict)
# Reset
spike_reset = []
reset_vars = extract_variables(neuron.reset)
for var in reset_vars:
eq = var['eq']
spike_reset.append(_analyse_equation(var['eq'], eq, local_dict, tex_dict))
return variables, spike_condition, spike_reset
def _process_synapse_equations(synapse):
psp = ""
code = ""
pre_event = []
post_event = []
# Extract parameters and variables
parameters = extract_parameters(synapse.parameters)
variables = extract_variables(synapse.equations)
variable_names = [var['name'] for var in variables]
attributes, local_var, semiglobal_var, global_var = get_attributes(parameters, variables, neuron=False)
# Create a dictionary for parsing
local_dict = {
'w': Symbol('w(t)'),
'dt': Symbol('\Delta t'),
'g_target': Symbol('g_{\\text{target}(t)}'),
't_pre': Symbol('t_{\\text{pre}}'),
't_post': Symbol('t_{\\text{pos}}'),
'Uniform': Function('\mathcal{U}'),
'Normal': Function('\mathcal{N}'),
'ite': Function('ite', nargs=3)
}
for att in attributes:
local_dict[att] = Symbol(_latexify_name(att, variable_names))
tex_dict = {}
for key, val in local_dict.items():
tex_dict[val] = str(val)
# PSP
if synapse.psp:
psp, untouched_var, dependencies = extract_prepost('psp', synapse.psp.strip(), synapse.description)
for dep in dependencies['post']:
local_dict['_post_'+dep+'__'] = Symbol("{" + dep + "^{\\text{post}}}(t)")
for dep in dependencies['pre']:
local_dict['_pre_'+dep+'__'] = Symbol("{" + dep + "^{\\text{pre}}}(t-d)")
if synapse.type == 'rate':
psp = _analyse_part(psp, local_dict, tex_dict)
else:
psp = "g_\\text{target}(t) \mathrel{+}= " + _analyse_part(psp, local_dict, tex_dict)
else:
if synapse.type == 'rate':
psp = "w(t) \cdot r^{\\text{pre}}(t)"
else:
psp = ""
# Variables
for var in variables:
# Retrieve the equation
eq = var['eq']
# pre/post variables
targets=[]
eq, untouched_var, dependencies = extract_prepost(var['name'], eq, synapse.description)
for dep in dependencies['post']:
if dep.startswith('sum('):
target = re.findall(r'sum\(([\w]+)\)', dep)[0]
targets.append(target)
local_dict['_post_sum_'+target] = Symbol('PostSum'+target)
else:
local_dict['_post_'+dep+'__'] = Symbol("{{" + _latexify_name(dep, variable_names) + "}^{\\text{post}}}(t)")
for dep in dependencies['pre']:
if dep.startswith('sum('):
target = re.findall(r'sum\(([\w]+)\)', dep)[0]
targets.append(target)
local_dict['_pre_sum_'+target] = Symbol('PreSum'+target)
else:
local_dict['_pre_'+dep+'__'] = Symbol("{" + dep + "^{\\text{pre}}}(t-d)")
# Parse the equation
#eq = eq.replace(' ', '') # supress spaces
ode = re.findall(r'([^\w]*)d([\w]+)/dt', eq)
if len(ode) > 0:
name = ode[0][1]
eq = eq.replace('d'+name+'/dt', '_grad_'+name)
grad_symbol = Symbol('\\frac{d'+_latexify_name(name, variable_names)+'}{dt}')
local_dict['_grad_'+name] = grad_symbol
tex_dict[grad_symbol] = '\\frac{d'+_latexify_name(name, variable_names)+'}{dt}'
# Analyse
var_code = _analyse_equation(var['eq'], eq, local_dict, tex_dict)
# replace targets
for target in targets:
var_code = var_code.replace('PostSum'+target, "(\\sum_{\\text{" + target + "}} \\text{psp}(t))^{\\text{post}}")
var_code = var_code.replace('PreSum'+target, "(\\sum_{\\text{" + target + "}} \\text{psp}(t))^{\\text{pre}}")
# Add the code
var['latex'] = var_code
var['ode'] = len(ode) > 0
# Pre-event
if synapse.type == 'spike':
desc = analyse_synapse(synapse)
for var in extract_pre_spike_variable(desc):
eq = var['eq']
# pre/post variables
eq, untouched_var, dependencies = extract_prepost(var['name'], eq, desc)
for dep in dependencies['post']:
local_dict['_post_'+dep+'__'] = Symbol("{" + dep + "^{\\text{post}}}(t)")
for dep in dependencies['pre']:
local_dict['_pre_'+dep+'__'] = Symbol("{" + dep + "^{\\text{pre}}}(t)")
pre_event.append(_analyse_equation(v |
TylerTemp/tomorrow | lib/ui/error.py | Python | gpl-3.0 | 4,288 | 0.000233 | import logging
import os
import base64
try:
from io import BytesIO
except ImportError:
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
import tornado.web
import cairocffi as cairo
from lib.config.tomorrow import Config
from lib.tool import b64
# from lib.hdlr.utility.whoops import WoopseHandler
logger = logging.getLogger('tomorrow.ui.editor')
config = Config()
class ErrorImageModule(tornado.web.UIModule):
base = os.path.normpath(
os.path.join(__file__, '..', '..', '..',
'static', 'img', 'error'))
with open(os.path.join(base, 'puzzle.png'), 'rb') as f:
puzzle_png = BytesIO(f.read())
def render(self, code):
# host = config.tomorrow_host
img = self.mk_img(str(code))
# b64_string = base64.b64encode(img).decode('utf-8')
# data_url = 'data:image/gif;base64,' + b64_string
data_url = b64.gen_data_url(img, is_byte=True)
return (
'<div class="am-cf">'
'<img src="{src}" '
'class="swing am-img-responsive am-center am-padding">'
'</div>'
).format(src=data_url)
def _cache(func):
_cache_result = {}
def wrapper(self, nums):
if nums not in _cache_result:
result = func(self, nums)
_cache_result[nums] = result
return _cache_result[nums]
return wrapper
@_cache
def mk_img(self, nums):
source_png = self.puzzle_png
source_png.seek(0)
source = cairo.ImageSurface.create_from_png(source_png)
bg_context = cairo.Context(source)
num_surface = cairo.ImageSurface.create_from_png(
BytesIO(self.get_num_data(nums)))
x, y = self.find_position(num_surface.get_width(),
num_surface.get_height(),
source.get_width(),
source.get_height()) |
bg_context.set_source_surface(num_surface, x, y)
bg_context.paint()
result = BytesIO()
source.write_to_png(result)
| return result.getvalue()
def find_position(self, s_width, s_height, b_width, b_height):
left = (b_width - s_width) / 2.0
top = (b_height - s_height) / 2.0
return (left, top)
def get_num_file(self, num):
with open(os.path.join(self.base, '%s.png' % num), 'rb') as f:
return BytesIO(f.read())
def get_num_data(self, nums):
surfaces = []
for each in nums:
surfaces.append(
cairo.ImageSurface.create_from_png(
self.get_num_file(each)))
width = 0
height = 0
gap = 10
for each in surfaces:
this_width = each.get_width()
this_height = each.get_height()
width += (this_width + gap)
height = max(height, this_height)
else:
width -= gap
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
surface_context = cairo.Context(surface)
surface_context.set_source_rgba(0, 0, 0, 1)
currect_x = 0
for each_surface in surfaces:
surface_context.set_source_surface(each_surface, currect_x)
surface_context.paint()
currect_x += (each_surface.get_width() + gap)
surface_context.paint()
result = BytesIO()
surface.write_to_png(result)
return result.getvalue()
def embedded_css(self):
return '''
.swing {
-moz-animation: 3s ease 0s normal none infinite swing;
-moz-transform-origin: center top;
-webkit-animation:swing 3s infinite ease-in-out;
-webkit-transform-origin:top;
max-width: 500px;
}
@-moz-keyframes swing{
0%{-moz-transform:rotate(-7deg)}
50%{-moz-transform:rotate(7deg)}
100%{-moz-transform:rotate(-7deg)}
}
@-webkit-keyframes swing{
0%{-webkit-transform:rotate(-7deg)}
50%{-webkit-transform:rotate(7deg)}
100%{-webkit-transform:rotate(-7deg)}
}'''
|
simbs/edx-platform | lms/djangoapps/instructor_task/tests/test_api.py | Python | agpl-3.0 | 13,801 | 0.003623 | """
Test for LMS instructor background task queue management
"""
from mock import patch, Mock
from bulk_email.models import CourseEmail, SEND_TO_ALL
from courseware.tests.factories import UserFactory
from xmodule.modulestore.exceptions import ItemNotFoundError
from instructor_task.api import (
get_running_instructor_tasks,
get_instructor_task_history,
submit_rescore_problem_for_all_students,
submit_rescore_problem_for_student,
submit_reset_problem_attempts_for_all_students,
submit_delete_problem_state_for_all_students,
submit_bulk_course_email,
submit_calculate_problem_responses_csv,
submit_calculate_students_features_csv,
submit_cohort_students,
submit_detailed_enrollment_features_csv,
submit_calculate_may_enroll_csv,
submit_executive_summary_report,
submit_course_survey_report,
generate_certificates_for_students,
regenerate_certificates
)
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.models import InstructorTask, PROGRESS
from instructor_task.tests.test_base import (InstructorTaskTestCase,
InstructorTaskCourseTestCase,
InstructorTaskModuleTestCase,
TestReportMixin,
TEST_COURSE_KEY)
from certificates.models import CertificateStatuses, CertificateGenerationHistory
class InstructorTaskReportTest(InstructorTaskTestCase):
"""
Tests API methods that involve the reporting of status for background tasks.
"""
def test_get_running_instructor_tasks(self):
# when fetching running tasks, we get all running tasks, and only running tasks
for _ in range(1, 5):
self._create_failure_entry()
self._create_success_entry()
progress_task_ids = [self._create_progress_entry().task_id for _ in range(1, 5)]
task_ids = [instructor_task.task_id for instructor_task in get_running_instructor_tasks(TEST_COURSE_KEY)]
self.assertEquals(set(task_ids), set(progress_task_ids))
def test_get_instructor_task_history(self):
# when fetching historical tasks, we get all tasks, including running tasks
expected_ids = []
for _ in range(1, 5):
expected_ids.append(self._create_failure_entry().task_id)
expected_ids.append(self._create_success_entry().task_id)
expected_ids.append(self._create_progress_entry().task_id)
task_ids = [instructor_task.task_id for instructor_task
in get_instructor_task_history(TEST_COURSE_KEY, usage_key=self.problem_url)]
self.assertEquals(set(task_ids), set(expected_ids))
# make the same call using explicit task_type:
task_ids = [instructor_task.task_id for instructor_task
in get_instructor_task_history(
TEST_COURSE_KEY,
usage_key=self.problem_url,
task_type='rescore_problem'
)]
self.assertEquals(set(task_ids), set(expected_ids))
# make the same call using a non-existent task_type:
task_ids = [instructor_task.task_id for instructor_task
in get_instructor_task_history(
TEST_COURSE_KEY,
usage_key=self.problem_url,
task_type='dummy_type'
)]
self.assertEquals(set(task_ids), set())
class InstructorTaskModuleSubmitTest(InstructorTaskModuleTestCase):
"""Tests API methods that involve the submission of module-based background tasks."""
def setUp(self):
super(InstructorTaskModuleSubmitTest, self).setUp()
self.initialize_course()
self.student = UserFactory.create(username="student", email="student@edx.org")
self.instructor = UserFactory.create(username="instructor", email="instructor@edx.org")
def test_submit_nonexistent_modules(self):
# confirm that a rescore of a non-existent module returns an exception
problem_url = InstructorTaskModuleTestCase.problem_location("NonexistentProblem")
request = None
with self.assertRaises(ItemNotFoundError):
submit_rescore_problem_for_student(request, problem_url, self.student)
with self.assertRaises(ItemNotFoundError):
submit_rescore_problem_for_all_students(request, problem_url)
with self.assertRaises(ItemNotFoundError):
submit_reset_problem_attempts_for_all_students(request, problem_url)
with self.assertRaises(ItemNotFoundError):
submit_delete_problem_state_for_all_students(request, problem_url)
def test_submit_nonrescorable_modules(self):
# confirm that a rescore of an existent but unscorable module returns an exception
# (Note that it is easier to test a scoreable but non-rescorable module in test_tasks,
# where we are creating real modules.)
problem_url = self.problem_section.location
request = None
with self.assertRaises(NotImplementedError):
submit_rescore_problem_for_student(request, problem_url, self.student)
with self.assertRaises(NotImplementedError):
submit_rescore_problem_for_all_students(request, problem_url)
def _test_submit_with_long_url(self, task_function, student=None):
problem_url_name = 'x' * 255
self.define_option_problem(problem_url_name)
location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
with self.assertRaises(ValueError):
if student is not None:
task_function(self.create_task_request(self.instructor), location, student)
else:
task_function(self.create_task_request(self.instructor), location)
def test_submit_rescore_all_with_long_url(self):
self._test_submit_with_long_url(submit_rescore_problem_for_all_students)
def test_submit_rescore_student_with_long_url(self):
self._test_submit_with_long_url(submit_rescore_problem_for_student, self.student)
def test_submit_reset_all_with_long_url(self):
self._test_submit_with_long_url(submit_reset_problem_attempts_for_all_students)
def test_submit_delete_all_with_long_url(self):
self._test_submit_with_long_url(submit_delete_problem_state_for_all_students)
def _test_submit_task(self, task_function, student=None):
# tests submit, and then tests a second identical submission.
problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name)
location = Instructo | rTaskModuleTestCase.problem_location(problem_url_name)
if student is not None:
instructor_task = task_function(self.create_task_request(self.instructor), location, student)
else:
instructor_task | = task_function(self.create_task_request(self.instructor), location)
# test resubmitting, by updating the existing record:
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
instructor_task.task_state = PROGRESS
instructor_task.save()
with self.assertRaises(AlreadyRunningError):
if student is not None:
task_function(self.create_task_request(self.instructor), location, student)
else:
task_function(self.create_task_request(self.instructor), location)
def test_submit_rescore_all(self):
self._test_submit_task(submit_rescore_problem_for_all_students)
def test_submit_rescore_student(self):
self._test_submit_task(submit_rescore_problem_for_student, self.student)
def test_submit_reset_all(self):
self._test_submit_task(submit_reset_problem_attempts_for_all_students)
def test_submit_delete_all(self):
self._test_submit_task(submit_delete_problem_state_for_all_students)
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message'))
class InstructorTaskCourseSubmitTest(TestReportMixin, InstructorTaskCourseTestCase):
"""Test |
ljx0305/ice | scripts/tests/Ice/properties.py | Python | gpl-2.0 | 1,004 | 0.005976 | # -*- coding: utf-8 -*-
# **********************************************************************
#
# Copyright (c) 2003-2017 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
class PropertiesTestSuite(TestSuite):
def setup(self, current):
name = "\xe4\xb8\xad\xe5\x9b\xbd_client.config" if isPython2 else "\u4e2d\u56fd_client.config"
current.createFile("./config/" + name | ,
["# Automatically generated by Ice test driver.",
"Ice.Trace.Protocol=1",
"Ice.Trace.Network=1",
"Ice.ProgramName=PropertiesClient",
| "Config.Path=./config/" + name],
"utf-8")
PropertiesTestSuite(__name__, [ ClientTestCase(client=Client(args=["{testdir}"])) ])
|
memsharded/conan | conans/test/integration/install_outdated_test.py | Python | mit | 8,169 | 0.002938 | import time
import unittest
from collections import OrderedDict
from conans.model.ref import ConanFileReference
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from conans.test.utils.tools import TestClient, TestServer, TurboTestClient, GenConanfile
from conans.util.env_reader import get_env
from conans.util.files import rmdir
class InstallOutdatedPackagesTest(unittest.TestCase):
def setUp(self):
test_server = TestServer()
self.servers = {"default": test_server}
self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
self.new_client = TestClient(servers=self.servers,
users={"default": [("lasote", "mypass")]})
self.ref = ConanFileReference.loads("Hello0/0.1@lasote/stable")
files = cpp_hello_conan_files("Hello0", "0.1", build=False)
self.client.save(files)
self.client.run("export . lasote/stable")
self.client.run("install Hello0/0.1@lasote/stable --build missing")
self.client.run("upload Hello0/0.1@lasote/stable --all")
@unittest.skipIf(get_env("TESTING_REVISIONS_ENABLED", False), "No sense with revs")
def install_outdated_test(self):
# If we try to install the same package with --build oudated it's already ok
self.client.run("install Hello0/0.1@lasote/stable --build outdated")
self.assertIn("Hello0/0.1@lasote/stable: Package is u | p to date", self.client.user_io.out)
# Then we can export a modified recipe and try to install without --build outdated
files = | cpp_hello_conan_files("Hello0", "0.1", build=False)
files["conanfile.py"] += "\n#Otherline"
self.client.save(files)
self.client.run("export . lasote/stable")
self.client.run("install Hello0/0.1@lasote/stable")
self.assertIn("Hello0/0.1@lasote/stable: Already installed!", self.client.user_io.out)
self.assertNotIn("Package is up to date", self.client.user_io.out)
self.assertNotIn("Outdated package!", self.client.user_io.out)
# Try now with the --build outdated
self.client.run("install Hello0/0.1@lasote/stable --build outdated")
self.assertNotIn("Package is up to date", self.client.user_io.out)
self.assertIn("Outdated package!", self.client.user_io.out)
self.assertIn("Building your package", self.client.user_io.out)
# Remove all local references, export again (the modified version not uploaded)
# and try to install, it will discard the remote package too
self.client.run("remove Hello0* -f")
self.client.save(files)
self.client.run("export . lasote/stable")
self.client.run("remote add_ref Hello0/0.1@lasote/stable default")
self.client.run("install Hello0/0.1@lasote/stable --build outdated")
self.assertNotIn("Hello0/0.1@lasote/stable: Already installed!", self.client.user_io.out)
self.assertNotIn("Package is up to date", self.client.user_io.out)
self.assertIn("Building your package", self.client.user_io.out)
def install_outdated_dep_test(self):
# A new recipe that depends on Hello0/0.1
new_client = TestClient(servers=self.servers,
users={"default": [("lasote", "mypass")]})
files = cpp_hello_conan_files("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], build=False)
new_client.save(files)
new_client.run("export . lasote/stable")
self.assertIn("A new conanfile.py version was exported", new_client.user_io.out)
# It will retrieve from the remote Hello0 and build Hello1
new_client.run("install Hello1/0.1@lasote/stable --build missing")
# Then modify REMOTE Hello0 recipe files (WITH THE OTHER CLIENT)
files = cpp_hello_conan_files("Hello0", "0.1", build=False)
files["conanfile.py"] += "\n#MODIFIED RECIPE"
self.client.save(files)
self.client.run("export . lasote/stable")
self.assertIn("A new conanfile.py version was exported", self.client.user_io.out)
self.client.run("install Hello0/0.1@lasote/stable --build missing")
# Upload only the recipe, so the package is outdated in the server
self.client.run("upload Hello0/0.1@lasote/stable")
# Now, with the new_client, remove only the binary package from Hello0
rmdir(new_client.cache.package_layout(self.ref).packages())
# And try to install Hello1 again, should not complain because the remote
# binary is in the "same version" than local cached Hello0
new_client.run("install Hello1/0.1@lasote/stable --build outdated")
self.assertIn("Downloading conan_package.tgz", new_client.user_io.out)
self.assertIn("Hello0/0.1@lasote/stable: Package is up to date", new_client.user_io.out)
# With revisions makes no sense, it won't download an outdated package, it belongs to
# a different recipe
if not new_client.cache.config.revisions_enabled:
# But if we remove the full Hello0 local package, will retrieve the updated
# recipe and the outdated package
new_client.run("remove Hello0* -f")
new_client.run("install Hello1/0.1@lasote/stable --build outdated")
self.assertIn("Hello0/0.1@lasote/stable: Outdated package!", new_client.user_io.out)
self.assertIn("Hello0/0.1@lasote/stable: Building your package", new_client.user_io.out)
def install_outdated_and_dep_test(self):
# regression test for https://github.com/conan-io/conan/issues/1053
# A new recipe that depends on Hello0/0.1
new_client = TestClient(servers=self.servers,
users={"default": [("lasote", "mypass")]})
files = cpp_hello_conan_files("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], build=False)
new_client.save(files)
new_client.run("export . lasote/stable")
self.assertIn("A new conanfile.py version was exported", new_client.user_io.out)
# It will retrieve from the remote Hello0 and build Hello1
new_client.run("install Hello1/0.1@lasote/stable --build missing")
# Then modify REMOTE Hello0 recipe files (WITH THE OTHER CLIENT)
files = cpp_hello_conan_files("Hello0", "0.1", build=False)
files["conanfile.py"] += "\n#MODIFIED RECIPE"
self.client.save(files)
self.client.run("export . lasote/stable")
self.assertIn("A new conanfile.py version was exported", self.client.user_io.out)
self.client.run("install Hello0/0.1@lasote/stable --build missing")
# Upload only the recipe, so the package is outdated in the server
self.client.run("upload Hello0/0.1@lasote/stable")
# Now, with the new_client, remove only the binary package from Hello0
rmdir(new_client.cache.package_layout(self.ref).packages())
# And try to install Hello1 again, should not complain because the remote
# binary is in the "same version" than local cached Hello0
new_client.run("install Hello1/0.1@lasote/stable --build outdated --build Hello1")
self.assertIn("Downloading conan_package.tgz", new_client.user_io.out)
self.assertIn("Hello1/0.1@lasote/stable: Forced build from source",
new_client.user_io.out)
def install_outdated_checking_updates_test(self):
server = TestServer()
servers = OrderedDict([("default", server)])
client = TurboTestClient(servers=servers)
client2 = TurboTestClient(servers=servers)
ref = ConanFileReference.loads("lib/1.0@conan/testing")
client.create(ref)
client.upload_all(ref)
# Generate a new recipe, the binary becomes outdated
time.sleep(1)
client2.create(ref, conanfile=GenConanfile().with_build_msg("Some modified stuff"))
client2.run("upload {} -r default".format(ref))
# Update, building the outdated
client.run("install -u -b outdated {}".format(ref))
# The outdated is built
self.assertIn("Some modified stuff", client.out)
|
IPMITMO/statan | coala-bears/bears/css/StyleLintBear.py | Python | mit | 7,474 | 0 | import json
from coalib.bearlib.abstractions.Linter import linter
from dependency_management.requirements.NpmRequirement import NpmRequirement
@linter(executable='stylelint',
output_format='regex',
output_regex=r'\s*(?P<filename>.+)\s*(?P<line>\d+):(?P<column>\d+)\s*'
r'\D\s*(?P<message>.+)',
config_suffix='.json')
class StyleLintBear:
"""
Checks the code with stylelint. This will run stylelint over each file
separately.
Detect errors and potential problems in CSS code and to enforce
appropriate coding conventions. For example, problems like syntax errors,
invalid color codes etc can be detected.
For more information on the analysis visit <http://stylelint.io/>
"""
LANGUAGES = {'CSS', 'SCSS'}
REQUIREMENTS = {NpmRequirement('stylelint', '7')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'Syntax', 'Unused Code', 'Formatting'}
@staticmethod
def generate_config(filename, file):
# Use standard stylelint rules
rules = {
'at-rule-name-case': 'lower',
'at-rule-name-space-after': 'always-single-line',
'at-rule-semicolon-newline-after': 'always',
'block-closing-brace-empty-line-before': 'never',
'block-closing-brace-newline-after': 'always',
'block-closing-brace-newline-before': 'always-multi-line',
'block-closing-brace-space-before': 'always-single-line',
'block-no-empty': True,
'block-opening-brace-newline-after': 'always-multi-line',
'block-opening-brace-space-after': 'always-single-line',
'block-opening-brace-space-before': 'always',
'color-hex-case': 'lower',
'color-hex-length': 'short',
'color-no-invalid-hex': True,
'comment-no-empty': True,
'comment-whitespace-inside': 'always',
'declaration-bang-space-after': 'never',
'declaration-bang-space-before': 'always',
'declaration-block-no-redundant-longhand-properties': True,
'declaration-block-no-shorthand-property-overrides': True,
'declaration-block-semicolon-newline-after': 'always-multi-line',
'declaration-block-semicolon-space-after': 'always-single-line',
'declaration-block-semicolon-space-before': 'never',
'declaration-block-single-line-max-declarations': 1,
'declaration-block-trailing-semicolon': 'always',
'declaration-colon-newline-after': 'always-multi-line',
'declaration-colon-space-after': 'always-single-line',
'declaration-colon-space-before': 'never',
'font-family-no-duplicate-names': True,
'function-calc-no-unspaced-operator': True,
'function-comma-newline-after': 'always-multi-line',
'function-comma-space-after': 'always-single-line',
'function-comma-space-before': 'never',
'function-linear-gradient-no-nonstandard-direction': True,
'function-max-empty-lines': 0,
'function-name-case': 'lower',
'function-parentheses-newline-inside': 'always-multi-line',
'function-parentheses-space-inside': 'never-single-line',
'function-whitespace-after': 'always',
'indentation': 2,
'keyframe-declaration-no-important': True,
'length-zero-no-unit': True,
'max-empty-lines': 1,
'media-feature-colon-space-after': 'always',
'media-feature-colon-space-before': 'never',
'media-feature-name-case': 'lower',
'media-feature-name-no-unknown': True,
'media-feature-parentheses-space-inside': 'never',
'media-feature-range-operator-space-after': 'always',
'media-feature-range-operator-space-before': 'always',
'media-query-list-comma-newline-after': 'always-multi-line',
'media-query-list-comma-space-after': 'always-single-line',
'media-query-list-comma-space-before': 'never',
'no-empty-source': True,
'no-eol-whitespace': True,
'no-extra-semicolons': True,
'no-invalid-double-slash-comments': True,
'no-missing-end-of-source-newline': True,
'number-leading-zero': 'always',
'number-no-trailing-zeros': True,
'property-case': 'lower',
'property-no-unknown': True,
'selector-attribute-brackets-space-inside': 'never',
'selector-attribute-operator-space-after': 'never',
'selector-attribute-operator-space-before': 'never',
'selector-combinator-space-after': 'always',
'selector-combinator-space-before': 'always',
'selector-descendant-combinator-no-non-space': True,
'selector-list-comma-newline-after': 'always',
'selector-list-comma-space-before': 'never',
'selector-max-empty-lines': 0,
'selector-pseudo-class-case': 'lower',
'selector-pseudo-class-no-unknown': True,
'selector-pseudo-class-parentheses-space-inside': 'never',
'selector-pseudo-element-case': 'lower',
'selector-pseudo-element-colon-notation': 'double',
'selector-pseudo-element-no-unknown': True,
'selector-type-case': 'lower',
'selector-type-no-unknown': True,
'shorthand-property-no-redundant-values': True,
'string-no-newline': True,
'unit-case': 'lower',
'unit-no-unknown': True,
'value-list-comma-newline-after': 'always-multi-line',
'value-list-comma-space-after': 'always-single-line',
'value-list-comma-space-before': 'never',
'value-list-max-empty-lines': 0
}
rules['at-rule-empty-line-before'] = [
'always',
{'except': ['blockless-after-same-name-blockless',
'first-nested'],
'ignore': ['after-comment']}
]
rules['comment-empty-line-before'] = [
'always',
{'except': ['first-nested'], 'ignore': ['stylelint-commands']}
]
rules['custom-property-empty-line-before'] = [
'always',
{'except': ['after-custom-property', 'first-nested'],
'ignore': ['after-comment', 'inside-single-line-block']}
]
rules['declaration-block-no-duplicate-properties'] = [
True,
{'ignore': ['consecutive-duplicates-with-different-v | alues']}
]
rules['declaration-empty-line-before'] = [
'always',
{'except': | ['after-declaration', 'first-nested'],
'ignore': ['after-comment', 'inside-single-line-block']}
]
rules['rule-nested-empty-line-before'] = [
'always-multi-line',
{'except': ['first-nested'], 'ignore': ['after-comment']}
]
rules['rule-non-nested-empty-line-before'] = [
'always-multi-line',
{'ignore': ['after-comment']}
]
default_config = {'rules': rules}
return json.dumps(default_config)
@staticmethod
def create_arguments(filename, file, config_file):
return filename, '--config=' + config_file
|
griffy/sikwidgets | sikwidgets/window.py | Python | mit | 3,281 | 0.003048 | import types
from sikwidgets.region_group import RegionGroup
from sikwidgets.util import to_snakecase
from sikwidgets.widgets import *
def gen_widget_method(widget_class):
def widget(self, *args, **kwargs):
return self.create_widget(widget_class, *args, **kwargs)
return widget
class Window(RegionGroup):
def __init__(self, region, parent=None):
# FIXME: this is hacky
RegionGroup.__init__(self, parent)
# manually set the region to the given one rather
# than the region from the parent
self.search_region = region
self.region | = region
self.widgets = []
self.windows = []
self.add_widget_methods()
self.contains()
# FIXME: str() shouldn't return a URI.. use image_folder() method for this
def __str__(self):
uri = to_snakecase(self.__class__.__name__)
if self.parent:
uri = os.path.join(str(self.parent), uri)
return uri
def create_image_folders(self):
for widget in self.widgets:
widget.create_image_folder( | )
for window in self.windows:
window.create_image_folders()
def capture_screenshots(self):
for widget in self.widgets:
widget.capture_screenshots()
for window in self.windows:
window.capture_screenshots()
def contains(self):
pass
# TODO: use some basic statistics to decide
# if we see the window or not
def exists(self):
#pop_size = len(self.widgets)
#n = sample_size(pop_size)
#random.sample(self.widgets, n)
seen_widgets = 0
unseen_widgets = 0
for widget in self.widgets:
if seen_widgets >= 10:
# we're confident enough it exists
return True
if widget.exists():
seen_widgets += 1
else:
unseen_widgets += 1
if seen_widgets > 2 * unseen_widgets + 1:
return True
if seen_widgets >= unseen_widgets:
return True
return False
def create_widget(self, widget_class, *args, **kwargs):
widget = widget_class(self, *args, **kwargs)
self.widgets.append(widget)
return widget
def add_widget_methods(self):
for class_name in instantiable_widget_class_names:
widget_class = eval(class_name)
method = types.MethodType(gen_widget_method(widget_class), self, self.__class__)
# take the class, get its name in string form, and convert to snake case
method_name = to_snakecase(widget_class.__name__)
setattr(self, method_name, method)
def menu(self, menu_class, *args, **kwargs):
return self.create_widget(menu_class, *args, **kwargs)
def page(self, page_class, *args, **kwargs):
return self.create_widget(page_class, *args, **kwargs)
def window(self, window_class):
# since the region for a child window may actually be larger than
# the region for this window, we should default to passing the
# entire screen
window = window_class(self.region.getScreen(), self)
self.windows.append(window)
return window
|
shedskin/shedskin | shedskin/graph.py | Python | gpl-3.0 | 71,909 | 0.002336 | '''
*** SHED SKIN Python-to-C++ Compiler ***
Copyright 2005-2013 Mark Dufour; License GNU GPL version 3 (See LICENSE)
graph.py: build constraint graph used in dataflow analysis
constraint graph: graph along which possible types 'flow' during an 'abstract execution' of a program (a dataflow analysis). consider the assignment statement 'a = b'. it follows that the set of possible types of b is smaller than or equal to that of a (a constraint). we can determine possible types of a, by 'flowing' the types from b to a, in other words, along the constraint.
constraint graph nodes are stored in gx.cnode, and the set of types of for each node in gx.types. nodes are identified by an AST Node, and two integers. the integers are used in py to duplicate parts of the constraint graph along two dimensions. in the initial constraint graph, these integers are always 0.
class ModuleVisitor: inherits visitor pattern from compiler.visitor.ASTVisitor, to recursively generate constraints for each syntactical Python construct. for example, the visitFor method is called in case of a for-loop. temporary variables are introduced in many places, to enable translation to a lower-level language.
parse_module(): locate module by name (e.g. 'os.path'), and use ModuleVisitor if not cached
'''
import copy
import os
import re
import sys
from compiler.ast import Const, AssTuple, AssList, From, Add, ListCompFor, \
UnaryAdd, Import, Bitand, Stmt, Assign, FloorDiv, Not, Mod, AssAttr, \
Keyword, GenExpr, LeftShift, AssName, Div, Or, Lambda, And, CallFunc, \
Global, Slice, RightShift, Sub, Getattr, Dict, Ellipsis, Mul, \
Subscript, Function as FunctionNode, Return, Power, Bitxor, Class as ClassNode, Name, List, \
Discard, Sliceobj, Tuple, Pass, UnarySub, Bitor, ListComp, TryExcept, With
from compiler.visitor import ASTVisitor
from error import error
from infer import inode, in_out, CNode, default_var, register_temp_var
from python import StaticClass, lookup_func, Function, is_zip2, \
lookup_class, is_method, is_literal, is_enum, lookup_var, assign_rec, \
Class, is_property_setter, is_fastfor, aug_msg, is_isinstance, \
Module, def_class, parse_file, find_module
# --- global variable mv
_mv = None
def setmv(mv):
global _mv
_mv = mv
return _mv
def getmv():
return _mv
class FakeGetattr3(Getattr):
pass
class FakeGetattr2(Getattr):
pass
class FakeGetattr(Getattr):
pass # XXX ugly
def check_redef(gx, node, s=None, onlybuiltins=False): # XXX to modvisitor, rewrite
if not getmv().module.builtin:
existing = [getmv().ext_classes, getmv().ext_funcs]
if not onlybuiltins:
existing += [getmv().classes, getmv().funcs]
for whatsit in existing:
if s is not None:
name = s
else:
name = node.name
if name in whatsit:
error("function/class redefinition is not supported", gx, node, mv=getmv())
# --- maintain inheritance relations between copied AST nodes
def inherit_rec(gx, original, copy, mv):
gx.inheritance_relations.setdefault(original, []).append(copy)
gx.inherited.add(copy)
gx.parent_nodes[copy] = original
for (a, b) in zip(original.getChildNodes(), copy.getChildNodes()):
inherit_rec(gx, a, b, mv)
def register_node(node, func):
if func:
func.registered.append(node)
def slice_nums(nodes):
nodes2 = []
x = 0
for i, n in enumerate(nodes):
if not n or (isinstance(n, Const) and n.value is None):
nodes2.append(Const(0))
else:
nodes2.append(n)
x |= (1 << i)
return [Const(x)] + nodes2
# --- module visitor; analyze program, build constraint graph
class ModuleVisitor(ASTVisitor):
def __init__(self, module, gx):
ASTVisitor.__init__(self)
self.module = module
self.gx = gx
self.classes = {}
self.funcs = {}
self.globals = {}
self.exc_names = {}
self.current_with_vars = []
self.lambdas = {}
self.imports = {}
self.fake_imports = {}
self.ext_classes = {}
self.ext_funcs = {}
self.lambdaname = {}
self.lwrapper = {}
self.tempcount = self.gx.tempcount
self.callfuncs = []
self.for_in_iters = []
self.listcomps = []
self.defaults = {}
self.importnodes = []
def dispatch(self, node, *args):
if (node, 0, 0) not in self.gx.cnode:
ASTVisitor.dispatch(self, node, *args)
def fake_func(self, node, objexpr, attrname, args, func):
if (node, 0, 0) in self.gx.cnode: # XXX
newnode = self.gx.cnode[node, 0, 0]
else:
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.type | s[newnode] = set()
fakefunc = CallFunc(Getattr(objexpr, attrname), args)
fakefunc.lineno = objexpr.lineno
self.visit(fakefunc, func)
self.add_constraint((inode(self.gx, fakefunc), newnode), func)
inode(self.gx, objexpr).fakefunc = fakefun | c
return fakefunc
# simple heuristic for initial list split: count nesting depth, first constant child type
def list_type(self, node):
count = 0
child = node
while isinstance(child, (List, ListComp)):
if not child.getChildNodes():
return None
child = child.getChildNodes()[0]
count += 1
if isinstance(child, (UnarySub, UnaryAdd)):
child = child.expr
if isinstance(child, CallFunc) and isinstance(child.node, Name):
map = {'int': int, 'str': str, 'float': float}
if child.node.name in ('range'): # ,'xrange'):
count, child = count + 1, int
elif child.node.name in map:
child = map[child.node.name]
elif child.node.name in (cl.ident for cl in self.gx.allclasses) or child.node.name in getmv().classes: # XXX getmv().classes
child = child.node.name
else:
if count == 1:
return None
child = None
elif isinstance(child, Const):
child = type(child.value)
elif isinstance(child, Name) and child.name in ('True', 'False'):
child = bool
elif isinstance(child, Tuple):
child = tuple
elif isinstance(child, Dict):
child = dict
else:
if count == 1:
return None
child = None
self.gx.list_types.setdefault((count, child), len(self.gx.list_types) + 2)
# print 'listtype', node, self.gx.list_types[count, child]
return self.gx.list_types[count, child]
def instance(self, node, cl, func=None):
if (node, 0, 0) in self.gx.cnode: # XXX to create_node() func
newnode = self.gx.cnode[node, 0, 0]
else:
newnode = CNode(self.gx, node, parent=func, mv=getmv())
newnode.constructor = True
if cl.ident in ['int_', 'float_', 'str_', 'none', 'class_', 'bool_']:
self.gx.types[newnode] = set([(cl, cl.dcpa - 1)])
else:
if cl.ident == 'list' and self.list_type(node):
self.gx.types[newnode] = set([(cl, self.list_type(node))])
else:
self.gx.types[newnode] = set([(cl, cl.dcpa)])
def constructor(self, node, classname, func):
cl = def_class(self.gx, classname)
self.instance(node, cl, func)
default_var(self.gx, 'unit', cl)
if classname in ['list', 'tuple'] and not node.nodes:
self.gx.empty_constructors.add(node) # ifa disables those that flow to instance variable assignments
# --- internally flow binary tuples
if cl.ident == 'tuple2':
default_var(self.gx, 'first', cl)
default_var(self.gx, 'second', cl)
elem0, elem1 = node.nodes
self.visit(elem0, func)
self.visit(elem1, func)
self.add_dynamic_constraint(node, elem0, 'unit', func)
self.add_dynamic_c |
google-research/data-driven-advection | datadrivenpdes/core/readers_test.py | Python | apache-2.0 | 6,924 | 0.001155 | # python3
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test for dataset readers.py."""
import os.path
from absl import flags
from absl.testing import flagsaver
import apache_beam as beam
import numpy as np
from datadrivenpdes.advection import equations as advection_equations
from datadrivenpdes.core import readers
from datadrivenpdes.pipelines import create_training_data
import tensorflow as tf
from tensorflow.io import gfile
from absl.testing import absltest
# dataset writing needs to be happen in eager mode
tf.enable_eager_execution()
FLAGS = flags.FLAGS
class WriteReadDataTest(absltest.TestCase):
def test_shapes_and_exceptions(self):
"""Dataset writer and reader test, checks shapes and exceptions."""
output_path = FLAGS.test_tmpdir
output_name = 'temp'
equation_name = 'advection_diffusion'
discretization = 'finite_volume'
dataset_type = 'all_derivatives'
high_resolution = 125
low_resolution = 25
shards = 2
example_num_time_steps = 3
batch_size = 4
diffusion_coefficient = 0.3
expected_equation = advection_equations.FiniteVolumeAdvectionDiffusion(
diffusion_coefficient=diffusion_coefficient)
# create a temporary dataset
with flagsaver.flagsaver(
dataset_path=output_path,
dataset_name=output_name,
equation_name=equation_name,
discretization=discretization,
simulation_grid_size=high_resolution,
output_grid_size=low_resolution,
equation_kwargs=str(dict(diffusion_coefficient=diffusion_coefficient)),
dataset_type=dataset_type,
num_shards=shards,
total_time_steps=10,
example_num_time_steps=example_num_time_steps,
time_step_interval=5,
num_seeds=4,
):
create_training_data.main([], runner=beam.runners.DirectRunner())
metadata_path = os.path.join(output_path, output_name + '.metadata.json')
self.assertTrue(gfile.exists(metadata_path))
dataset_metadata = readers.load_metadata(metadata_path)
low_res_grid = readers | .get_output_grid(dataset_metadata)
high_res_grid = readers.get_simulation_grid(dataset_metadata)
equation = readers.get_equation(dataset_metadata)
self.assertEqual(low_re | s_grid.size_x, low_resolution)
self.assertEqual(low_res_grid.size_y, low_resolution)
self.assertEqual(high_res_grid.size_x, high_resolution)
self.assertEqual(high_res_grid.size_y, high_resolution)
self.assertAlmostEqual(high_res_grid.step, 2 * np.pi / high_resolution)
self.assertAlmostEqual(
equation.diffusion_coefficient, diffusion_coefficient)
self.assertIs(type(equation), type(expected_equation))
state_keys = expected_equation.key_definitions
valid_data_keys = ((state_keys['concentration'].exact(),),
(state_keys['concentration_edge_x'].exact(),
state_keys['concentration_y_edge_y'].exact()))
invalid_data_keys = ((state_keys['concentration'],
state_keys['concentration_edge_x']),
(state_keys['concentration_edge_x'],))
valid_data_grids = (low_res_grid, low_res_grid)
invalid_data_grids = (low_res_grid, high_res_grid)
with self.assertRaises(ValueError):
readers.initialize_dataset(
dataset_metadata, invalid_data_keys, valid_data_grids)
with self.assertRaises(ValueError):
readers.initialize_dataset(
dataset_metadata, valid_data_keys, invalid_data_grids)
with self.assertRaises(ValueError):
readers.initialize_dataset(
dataset_metadata, invalid_data_keys, invalid_data_grids)
dataset = readers.initialize_dataset(
dataset_metadata, valid_data_keys, valid_data_grids)
dataset = dataset.repeat()
dataset = dataset.batch(batch_size)
[(first_state, second_state)] = dataset.take(1)
self.assertEqual(set(first_state.keys()), set(valid_data_keys[0]))
self.assertEqual(set(second_state.keys()), set(valid_data_keys[1]))
first_state_shape = np.shape(first_state[valid_data_keys[0][0]])
second_state_shape = np.shape(second_state[valid_data_keys[1][0]])
expected_shape = (
batch_size, example_num_time_steps, low_resolution, low_resolution)
self.assertEqual(first_state_shape, expected_shape)
self.assertEqual(second_state_shape, expected_shape)
def test_statistics(self):
"""Dataset writer and reader test, checks statistics computations."""
output_path = FLAGS.test_tmpdir
output_name = 'temp'
equation_name = 'advection'
discretization = 'finite_volume'
# create a temporary dataset
with flagsaver.flagsaver(
dataset_path=output_path,
dataset_name=output_name,
equation_name=equation_name,
discretization=discretization,
simulation_grid_size=256,
output_grid_size=32,
dataset_type='all_derivatives',
total_time_steps=10,
example_num_time_steps=3,
time_step_interval=5,
num_seeds=4,
):
create_training_data.main([], runner=beam.runners.DirectRunner())
metadata_path = os.path.join(output_path, output_name + '.metadata.json')
dataset_metadata = readers.load_metadata(metadata_path)
low_res_grid = readers.get_output_grid(dataset_metadata)
equation = advection_equations.FiniteVolumeAdvectionDiffusion(
diffusion_coefficient=0.1)
data_key = equation.key_definitions['concentration'].exact()
dataset = readers.initialize_dataset(
dataset_metadata, ((data_key,),), (low_res_grid,))
dataset = dataset.repeat(1)
dataset = dataset.batch(1)
all_data = np.concatenate(
[np.ravel(data[0][data_key]) for data in dataset])
expected_mean = np.mean(all_data)
expected_variance = np.var(all_data, ddof=1)
keys = readers.data_component_keys(dataset_metadata['components'])
components_dict = dict(zip(keys, dataset_metadata['components']))
component = components_dict[data_key, low_res_grid]
metadata_mean = component['statistics']['mean']
metadata_variance = component['statistics']['variance']
np.testing.assert_allclose(metadata_mean, expected_mean, atol=1e-3)
np.testing.assert_allclose(metadata_variance, expected_variance, atol=1e-3)
if __name__ == '__main__':
absltest.main()
|
dan-cristian/haiot | main/persistence/__init__.py | Python | gpl-2.0 | 5,411 | 0.005729 | from common import Constant
from common import utils
from main.logger_helper import L
__author__ = 'Dan Cristian <dan.cristian@gmail.com>'
# saves record to cloud database
def save_to_history_cloud(obj):
try:
L.l.debug('Trying to save historical record to cloud {}'.format(obj))
if Constant.JSON_PUBLISH_GRAPH_X in obj:
# name of x field
axis_x_field = obj[Constant.JSON_PUBLISH_GRAPH_X]
graph_id_field = obj[Constant.JSON_PUBLISH_GRAPH_ID]
graph_legend_field = obj[Constant.JSON_PUBLISH_GRAPH_LEGEND]
graph_shape_fields = obj[Constant.JSON_PUBLISH_GRAPH_SHAPE]
graph_y_fields = obj[Constant.JSON_PUBLISH_GRAPH_Y]
# names of fields that have value changed to record smallest amount of data
changed_fields = obj[Constant.JSON_PUBLISH_FIELDS_CHANGED]
# intersect lists and get only graphable fields that had values changed
list_axis_y = list(set(graph_y_fields) & set(changed_fields))
if len(list_axis_y) == 0:
L.l.info('Ignoring record save graph={} changed fields={} obj={}'.format(graph_y_fields,
changed_fields, obj))
else:
L.l.debug('Trying to save y axis {}'.format(list_axis_y))
if axis_x_field in obj and graph_id_field in obj:
table = obj[Constant.JSON_PUBLISH_TABLE]
trace_unique_id = obj[graph_id_field] # unique record/trace identifier
x_val = obj[axis_x_field]
graph_legend_item_name = obj[graph_legend_field] # unique key for legend
x_val = utils.parse_to_date(x_val)
x = x_val
index = 0
field_pairs = [[axis_x_field, x], [graph_legend_field, graph_legend_item_name],
[Constant.JSON_PUBLISH_RECORD_UUID, obj[Constant.JSON_PUBLISH_RECORD_UUID]],
[Constant.JSON_PUBLISH_SOURCE_HOST, obj[Constant.JSON_PUBLISH_SOURCE_HOST]]]
for axis_y in list_axis_y:
if axis_y in obj:
trace_list = []
y = obj[axis_y]
# add multiple y values for later save in db as a single record
field_pairs.append([axis_y, y])
# upload to cloud if plotly is initialised
#from cloud import graph_plotly
#if graph_plotly.initialised:
# from cloud.graph_plotly import graph_plotly_run
# Log.logger.info('Uploading to cloud field {}'.format(graph_legend_field))
# shape visual type for this trace
# shape = graph_shape_fields[index]
# unique name used for grid on upload
# grid_base_name = str(table)
# graph_plotly_run.add_grid_data(grid_unique_name=grid_base_name, x=x, y=y,
# axis_x_name=axis_x_field, axis_y_name=axis_y,
# record_unique_id_name=graph_legend_field,
# record_unique_id_value=graph_legend_item_name)
#Log.logger.debug('Skip upload to cloud, plotly not init')
index += 1
else:
L.l.critical('Missing history axis_x [{}], graph_id [{}], in obj {}'.format(
axis_x_field,graph_id_field,obj))
else:
L.l.critical('Missing history axis X field {}'.format(Constant.JSON_PUBLISH_GRAPH_X))
except Exception as ex:
L.l.exception('General error saving historical cloud record, err {} obj={}'.format(ex, obj))
# saves record to cloud database
def save_to_history_db(obj):
try:
table = obj[Constant.JSON_PUBLISH_TABLE]
# L.l.debug('Trying to save historical record to db={}'.format(table))
# save to local history DB, a | ppend history to source table name
dest_table = str(table) + 'History'
# L.l.debug('Saving to local db table {} obj={}'.format(dest_table, obj))
from storage.sqalc import models
# http://stackoverflow.com/questions/4030982/initialise-class-object-by-name
try:
class_table = getattr(models, dest_table)
new_record = class_table()
for field in obj:
if hasattr(new_record, | field) and field != "id":
setattr(new_record, field, obj[field])
if new_record.add_commit_record_to_db():
# L.l.debug('Saved OK to local db table {} obj={}'.format(dest_table, new_record))
pass
else:
L.l.critical("Cannot save history db record={}".format(obj))
except Exception as ex:
L.l.critical("Cannot save history db err={} record={}".format(ex, obj))
except Exception as ex:
L.l.exception('General error saving historical db record, err {} obj={}'.format(ex, obj))
|
Affirm/cabot | cabot/cabotapp/utils.py | Python | mit | 2,425 | 0.006186 | from django.conf import settings
from mock import Mock
from cabot.cabotapp import defs
from datetime import datetime
def build_absolute_url(relative_url):
"""Prepend https?://host to a url, useful for links going into emails"""
return '{}://{}{}'.format(settings.WWW_SCHEME, settings.WWW_HTTP_HOST, relative_url)
def create_failing_service_mock():
"""
Create a Mock object mimicking a critical service, with a single (also mocked) failing check.
Note that not all attributes are mocked (notably hipchat_instance, mattermost_instance).
Primary keys/IDs are mocked to be 0. Functions that return querysets in reality (like active_status_checks)
will return hard-coded lists.
This is typically called by an AlertPlugin.send_test_alert() implementation, and further configured by calling
service_mock.configure_mock(attr=value, ...) to add any plugin-specific attributes (like mattermost_instance).
:return: Mock emulating a service with 1 failing check
"""
check_mock = Mock()
check_mock.configure_mock(id=0, pk=0, name='Alert Testing Check', active=True,
get_status_image=lambda: None, check_category=lambda: "Mock Check",
get_importance_display=lambda: "Critical")
service_mo | ck = Mock()
service_mock.configure_mock(id=0, pk=0, name='Alert Testing Service', alerts_enabled=True,
# plugins use service.CRITICAL_STATUS etc, so we mock these constants too
CRITICAL_STATUS=defs.CRITICAL_STATUS, PASSING_STATUS=defs.PASSING_STATUS,
WARNING_STATUS=defs.WARNING_STATUS, ERROR_STATUS= | defs.ERROR_STATUS,
status_checks=[check_mock], recent_snapshots=[],
overall_status=defs.CRITICAL_STATUS,
active_status_checks=lambda: [check_mock],
all_passing_checks=lambda: [], all_failing_checks=lambda: [check_mock])
return service_mock
def format_datetime(dt):
'''
Convert datetime to string. None is converted to empty string. This is used
primarily for formatting datetimes in API responses, whereas format_timestamp
is used for a more human-readable format to be displayed on the web.
'''
return '' if dt is None else datetime.strftime(dt, '%Y-%m-%d %H:%M:%S')
|
bhpayne/domino_tile_floor | transitions/domino_tiles_transition_graph_breadth_first_multiprocessing.py | Python | apache-2.0 | 5,268 | 0.019172 | #!/usr/bin/env python3
# http://sebastianraschka.com/Articles/2014_multiprocessing.html
import multiprocessing as mp
import yaml
# Define an output queue
output = mp.Queue()
def create_transition_dic(width,height):
transition_dic={}
num_tiles=width*height
# print("width= "+str(width))
# print("height= "+str(height))
for n in range(1,num_tiles+1):
# print("\nn = "+str(n))
adjacent_edges_list=[]
# print("if (n-1)%width !=0, then left exists; value is "+str((n-1)%width))
if ((n-1)%width != 0):
# print("left = "+str(n-1))
adjacent_edges_list.append(n-1) # left
# print("if n%width !=0, then right exists; value is "+str(n%width))
if (n%width != 0):
# print("right = "+str(n+1))
adjacent_edges_list.append(n+1) # right
# print("if n > width, then top exists")
if (n > width):
# print("top = "+str(n-width))
adjacent_edges_list.append(n-width) # top
# print("if n<=((width*height)-width), then bottom exists; value is "+str( ((width*height)-width)))
if (n<=((width*height)-width)):
# print("bottom = "+str(n+width))
adjacent_edges_list.append(n+width) # bottom
transition_dic[n]=adjacent_edges_list
return transition_dic
def print_transition_dic(transition_dic):
for key,value in transition_dic.iteritems():
print("start = "+str(key) +"; neighbors = "+ str(value))
return
def transition_2x3():
transition_dic={}
transition_dic[1]=[2,4]
transition_dic[2]=[1,5,3]
transition_dic[3]=[2,6]
transition_dic[4]=[1,5]
transition_dic[5]=[4,2,6]
transition_dic[6]=[3,5]
return transition_dic
def print_list_of_transitions(list_of_transitions):
for this_list in list_of_transitions:
print(this_list)
return
def append_next_value(transition_dic,list_of_transitions,number_of_tiles_to_fill,print_status):
new_transition_list=[]
for this_list in list_of_transitions:
if print_status: print("\nthis list = ")
if print_status: print(this_list)
if (len(this_list)<(number_of_tiles_to_fill)): # if this list isn't "done"
last_value=this_list[len(this_list)-1]
if print_status: print("last value = " + str(last_value))
for next_value in transition_dic[last_value]:
if print_status: print("next value = "+str(next_value))
if next_value not in this_list:
if print_status: print("adding next value to list")
new_list=list(this_list) # https://stackoverflow.com/questions/2612802/how-to-clone-or-copy-a-list
new_list.append(next_value)
if print_status: print(new_list)
new_transition_list.append(new_list)
list_of_transitions=new_transition_list
return list_of_transitions
def get_transitions_for_seed(starting_value,print_status,
number_of_tiles_to_fill,transition_dic,list_of_transitions):
f=open('record_'+str(width)+'x'+str(height)+'_'+str(starting_value)+'.dat','w')
if print_status: print("\nseed:")
this_transition=[starting_value]
list_of_transitions.append(this_transition)
if print_status:
print("list of transitions:")
print_list_of_transitions(list_of_transitions)
for loop_indx in range(number_of_tiles_to_fill-1):
print("\nstep "+str(loop_indx) + " of "+str(number_of_tiles_to_fill)+" for seed "+str(starting_value))
list_of_transitions = append_next_value(transition_dic,list_of_transitions,number_of_tiles_to_fill,print_status)
print("number of searches = "+str(len(list_of_transitions)))
f.write(str(loop_indx+1)+" "+str(number_of_tiles_to_fill)+" "+str(len(list_of_transitions) | )+"\n")
# print("list of transitions:")
# print_list_of_transitions(list_of_transitions)
# return list_of_transitions
f.close()
output.put(list_of_transitions)
# these values get over-written by the config.yaml values
width=4
height=4
#starting_value=1
number_of_p | rocesses=4
try:
config = yaml.load(file('config.yaml', 'r'))
except yaml.YAMLError, exc:
print "Error in configuration file:", exc
config=yaml.load(file('config.yaml','r'))
width=config['width']
height=config['height']
number_of_processes=config['num_proc']
number_of_tiles_to_fill=width*height
transition_dic = create_transition_dic(width,height)
#print_transition_dic(transition_dic)
print_status=False
list_of_transitions=[]
processes = [mp.Process(target=get_transitions_for_seed, args=(starting_value,
print_status,number_of_tiles_to_fill,
transition_dic,list_of_transitions))
for starting_value in range(1,number_of_processes+1)]
# Run processes
for p in processes:
p.start()
# Exit the completed processes
for p in processes:
p.join()
# Get process results from the output queue
results = [output.get() for p in processes]
print("list of transitions:")
for this_list_of_transitions in results:
print_list_of_transitions(this_list_of_transitions)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.