text stringlengths 4 1.02M | meta dict |
|---|---|
import time
import random
class Sensor(object):
def __init__(self, name):
self.id = name
self.value = 0
def readvalue(self):
# We simulate a sensor by generating random values
self.value = random.randint(0,100)
self.timestamp = time.time()
return self.value
if __name__ == '__main__':
import rticonnextdds_connector as rti
import time
import argparse
cliparser = argparse.ArgumentParser(description="Connect Sensor simulator")
cliparser.add_argument("--id", help="sensor identifier", default=0)
args = cliparser.parse_args()
connector = rti.Connector("MyParticipantLibrary::Sensor", 'Tutorial.xml')
writer = connector.getOutput("TempPublisher::TempWriter")
# Create a sensor
sensor = Sensor("SENSOR#{}".format(args.id))
while 1:
sensor.readvalue()
writer.instance.setString('id', sensor.id)
writer.instance.setNumber('value', sensor.value)
writer.instance.setNumber('timestamp', sensor.timestamp)
print "[{ts}] Updating sensor {id} value: {value}".format(ts=sensor.timestamp,id=sensor.id, value=sensor.value)
writer.write()
time.sleep(1)
| {
"content_hash": "d13a3f2b99058dc624fcbb35f431ea89",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 119,
"avg_line_length": 26.934782608695652,
"alnum_prop": 0.6351896690879741,
"repo_name": "gianpiero/dds-firststeps",
"id": "c3c94db5fd047df9bd9a7ae0c54547326cabde02",
"size": "1263",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ex4-durability/sensor.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "13412"
},
{
"name": "Python",
"bytes": "10093"
},
{
"name": "Shell",
"bytes": "42"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Logging Test
url(r'^fail500/$', 'homepage.views.fail500', name='fail500'),
# Login
url(r'^signup/?$', 'users.views.signup', name='signup'),
url(r'^login/?$', 'users.views.user_login', name='user_login'),
url(r'^logout/?$', 'users.views.logout_request', name='logout_request'),
url(r'^confirm/(?P<verif_code>[-\w]+)/$', 'users.views.confirm_subscription', name='confirm_subscription'),
url(r'^unconfirmed-email/?$', 'users.views.unconfirmed_email', name='unconfirmed_email'),
url(r'^confirm-pw-reset/(?P<email_address>[-\w@.+]+)?$', 'users.views.confirm_pw_reset', name='confirm_pw_reset'),
url(r'^set-password/?$', 'users.views.password_upsell', name='password_upsell'),
url(r'^change-password/?$', 'users.views.change_password', name='change_password'),
url(r'^forgot-password/?$', 'users.views.forgot_password', name='forgot_password'),
url(r'^reset-pw/(?P<verif_code>[-\w@.+]+)?$', 'users.views.reset_pw', name='reset_pw'),
url(r'^unsubscribe/(?P<unsub_code>[-\w]+)/$', 'addresses.views.unsubscribe_address', name='unsubscribe_address'),
url(r'^remove-subscription/(?P<address_subscription_id>[-\w]+)/$', 'addresses.views.user_unsubscribe_address', name='user_unsubscribe_address'),
url(r'^archive-forwarding-address/(?P<address_forwarding_id>[-\w]+)/$', 'addresses.views.user_archive_forwarding_address', name='user_archive_forwarding_address'),
url(r'^dashboard/?$', 'users.views.dashboard', name='dashboard'),
# Webhooks:
url(r'^address-webhook/(?P<secret_key>[-\w]+)/(?P<ignored_key>[-\w]+)?$', 'addresses.views.address_webhook', name='address_webhook'),
# Admin
url(r'^admin/', include(admin.site.urls)),
# App pages
url(r'^$', 'homepage.views.home', name='home'),
url(r'^(?P<coin_symbol>[-\w]+)/forwarding/$', 'addresses.views.setup_address_forwarding', name='setup_address_forwarding'),
url(r'^(?P<coin_symbol>[-\w]+)/subscribe/$', 'addresses.views.subscribe_address', name='subscribe_address'),
url(r'^(?P<coin_symbol>[-\w]+)/address/(?P<address>[-\w]+)/$', 'addresses.views.address_overview', name='address_overview'),
url(r'^(?P<coin_symbol>[-\w]+)/address/(?P<address>[-\w]+)/(?P<wallet_name>[-\w\.]+)/$', 'addresses.views.address_overview', name='address_overview'),
url(r'^(?P<coin_symbol>[-\w]+)/tx/(?P<tx_hash>[-\w]+)/$', 'transactions.views.transaction_overview', name='transaction_overview'),
url(r'^(?P<coin_symbol>[-\w]+)/tx-confidence/(?P<tx_hash>[-\w]+)/$', 'transactions.views.poll_confidence', name='poll_confidence'),
url(r'^(?P<coin_symbol>[-\w]+)/block/(?P<block_representation>[-\w]+)/$', 'blocks.views.block_overview', name='block_overview'),
url(r'^(?P<coin_symbol>[-\w]+)/pushtx/$', 'transactions.views.push_tx', name='push_tx'),
url(r'^(?P<coin_symbol>[-\w]+)/decodetx/$', 'transactions.views.decode_tx', name='decode_tx'),
url(r'^highlights/$', 'homepage.views.highlights', name='highlights'),
# Widget
url(r'^widgets/(?P<coin_symbol>[-\w]+)/?$', 'addresses.views.search_widgets', name='search_widgets'),
url(r'^show-widgets/(?P<coin_symbol>[-\w]+)/(?P<address>[-\w]+)/$', 'addresses.views.widgets_overview', name='widgets_overview'),
url(r'^widget/(?P<coin_symbol>[-\w]+)/(?P<address>[-\w]+)/balance/$', 'addresses.views.render_balance_widget', name='render_balance_widget'),
url(r'^widget/(?P<coin_symbol>[-\w]+)/(?P<address>[-\w]+)/received/$', 'addresses.views.render_received_widget', name='render_received_widget'),
url(r'^widgets/$', 'addresses.views.widget_forwarding', name='widget_forwarding'),
# Forwarding Pages (URL hacks)
url(r'^forwarding/$', 'addresses.views.forward_forwarding', name='forward_forwarding'), # awesome name
url(r'^subscribe/$', 'addresses.views.subscribe_forwarding', name='subscribe_forwarding'),
url(r'^pushtx/$', 'transactions.views.pushtx_forwarding', name='pushtx_forwarding'),
url(r'^decodetx/$', 'transactions.views.decodetx_forwarding', name='decodetx_forwarding'),
url(r'^latest-block/$', 'blocks.views.latest_block_forwarding', name='latest_block_forwarding'),
url(r'^(?P<coin_symbol>[-\w]+)/latest-block/$', 'blocks.views.latest_block', name='latest_block'),
url(r'^latest-unconfirmed-tx/$', 'transactions.views.latest_unconfirmed_tx_forwarding', name='latest_unconfirmed_tx_forwarding'),
url(r'^(?P<coin_symbol>[-\w]+)/latest-unconfirmed-tx/$', 'transactions.views.latest_unconfirmed_tx', name='latest_unconfirmed_tx'),
# So broad it must be last
url(r'^(?P<coin_symbol>[-\w]+)/$', 'homepage.views.coin_overview', name='coin_overview'),
)
| {
"content_hash": "1410052dc7a9ce51eae7910c1cbbcba3",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 167,
"avg_line_length": 74.109375,
"alnum_prop": 0.6554923044486611,
"repo_name": "linuxmahara/blockcypher-explorer",
"id": "d357bc385e13f8031a1f41f9a655ee39e8137546",
"size": "4743",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blockexplorer/urls.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8356"
},
{
"name": "HTML",
"bytes": "85988"
},
{
"name": "JavaScript",
"bytes": "5588"
},
{
"name": "Python",
"bytes": "139805"
}
],
"symlink_target": ""
} |
__author__ = 'stachern'
from teamcity import underTeamcity as is_running_under_teamcity
from teamcity.unittestpy import TeamcityTestRunner
from django.test.simple import DjangoTestSuiteRunner
class TeamCityTestRunner(DjangoTestSuiteRunner):
def run_suite(self, suite, **kwargs):
if is_running_under_teamcity():
return TeamcityTestRunner().run(suite)
else:
return unittest.TextTestRunner(
verbosity=self.verbosity, failfast=self.failfast).run(suite) | {
"content_hash": "44cde86df807f3f53a8efb0eed4b3418",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 76,
"avg_line_length": 34.13333333333333,
"alnum_prop": 0.72265625,
"repo_name": "stachern/django_teamcity_test_runner",
"id": "dc7384cc2ae021f23c453ef700fd1e077877cd38",
"size": "512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "teamcity_runner/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1057"
}
],
"symlink_target": ""
} |
from django.conf.urls import url
from . import views, list_views
urlpatterns = [
url(r'^article-column/$', views.article_column, name='article_column'),
url(r'^rename-column/$', views.rename_article_column, name='rename_article_column'),
url(r'^article-post/$', views.article_post, name='article_post'),
url(r'^article-list/$', views.article_list, name='article_list'),
url(r'^article-detail/(?P<id>\d+)/(?P<slug>[-\w]+)/$', views.article_detail, name='article_detail'),
url(r'^del-article/$', views.del_article, name='del_article'),
url(r'^redit-article/(?P<article_id>\d+)/$', views.redit_article, name='redit_article'),
url(r'^list-article-titles/$', list_views.article_titles, name='article_titles'),
url(r'^list-article-detail/(?P<id>\d+)/(?P<slug>[-\w]+)/$', list_views.article_detail, name='list_article_detail'),
url(r'^list-article-titles/(?P<username>[-\w]+)/$', list_views.article_titles, name='author_articles'),
url(r'^like-article/$', list_views.like_article, name='like_article'),
url(r'^article-tag/$', views.article_tag, name='article_tag'),
url(r'^del-article-tag/$', views.del_article_tag, name='del_article_tag'),
] | {
"content_hash": "f702558b0d652b26ed5765d4714bfeda",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 119,
"avg_line_length": 62.578947368421055,
"alnum_prop": 0.6593776282590412,
"repo_name": "glon/django_test",
"id": "4e70118430f83e616a97cb7f0cd3fd3736da8c83",
"size": "1189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "article/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "191988"
},
{
"name": "HTML",
"bytes": "121545"
},
{
"name": "JavaScript",
"bytes": "584736"
},
{
"name": "Python",
"bytes": "67192"
}
],
"symlink_target": ""
} |
import json
import argparse
import boto3
from sys import getsizeof
### Read config file
def readConfigFile(filename):
parser = argparse.ArgumentParser()
parser.add_argument(filename)
args = parser.parse_args()
try:
with open(args.filename) as file:
return json.load(file)
except ValueError:
print("ERROR: Unable to read your JSON, please check your configuration file.")
exit(1)
## initialize the ec2 boto3 base on what you have in your config file,
def ini_ec2_Boto(data):
if data['BotoInit']['ProfileName']:
boto3.setup_default_session(profile_name=data['BotoInit']['ProfileName'])
if data['BotoInit']['RegionName']:
boto_ec2 = boto3.resource('ec2', region_name=data['BotoInit']['RegionName'])
else:
boto_ec2 = boto3.resource('ec2')
return boto_ec2
## Look for the VPC matching your VpcName from the config file
def getVPC(data):
boto_ec2 = ini_ec2_Boto(data)
### Creating the filter to find the VPC if name provided in config file
vpcName = data['BotoFilterInfo']['VpcName']
try:
if vpcName:
vpcfilters = [{'Name':'tag:Name', 'Values':[vpcName]}]
VPCs = list(boto_ec2.vpcs.filter(Filters=vpcfilters))
else:
VPCs = list(boto_ec2.vpcs.all())
if len(VPCs) == 1:
return VPCs[0]
elif len(VPCs) == 0:
print("ERROR: Cloud not found any VPCs matching \""+data['BotoFilterInfo']['VpcName']+"\"" )
exit(1)
else:
print("ERROR: Found \""+str(len(VPCs))+"\", please update the BotoFilterInfo.VpcName from your config file")
exit(1)
except IndexError as e:
print("ERROR: Boto can't find the "+data['BotoFilterInfo']['VpcName']+". Is it there? [%s]" % e)
exit(1)
### get Security attached to this vpc with group_name of data.BotoFilterInfo.SecurityGroupName
def getSecurityGroups(vpc, data):
securityGroupFilter = data['BotoFilterInfo']['SecurityGroupName']
if not securityGroupFilter:
## Using Default SG for this VPC
securityGroupFilter = "default"
securityGroups = list(vpc.security_groups.filter(Filters=[{"Name":"group-name", "Values":
[securityGroupFilter]}]))
if len(securityGroups) > 0:
return securityGroups
else:
print("ERROR: Unable to find any SecurityGroup for BotoFilterInfo.SecurityGroupName: "+ securityGroupFilter)
exit(1)
### getSubnets(vpc,data):
def getSubnets(vpc, data):
subnetfilter = [{'Name':'tag:Network', 'Values':[data['BotoFilterInfo']['SubnetName']]}]
try:
if data['BotoFilterInfo']['SubnetName']:
subnets = list(vpc.subnets.filter(Filters=subnetfilter))
else:
subnets = list(vpc.subnets.all())
if len(subnets) > 0:
return subnets
else:
print("ERROR: Unable to find any subnets with Name: "+data['BotoFilterInfo']['SubnetName'])
exit(1)
except IndexError as e:
print("ERROR: Unexpected error: %s" % e)
exit(1) | {
"content_hash": "e2e2904794ac1b3921870bc80d2f0944",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 120,
"avg_line_length": 34.79775280898876,
"alnum_prop": 0.6283500161446561,
"repo_name": "thibeault/CloudFormation",
"id": "35ca4dd85c9e4568b9b2ac66693db232b0c403c5",
"size": "3097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ECS/functions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "22377"
}
],
"symlink_target": ""
} |
from .iq_unregister import UnregisterIqProtocolEntity
from .iq_status_set import SetStatusIqProtocolEntity
from .iq_picture_get import GetPictureIqProtocolEntity
from .iq_picture_get_result import ResultGetPictureIqProtocolEntity
from .iq_pictures_list import ListPicturesIqProtocolEntity
from .iq_picture_set import SetPictureIqProtocolEntity
from .iq_privacy_set import SetPrivacyIqProtocolEntity
from .iq_privacy_get import GetPrivacyIqProtocolEntity
from .iq_privacy_result import ResultPrivacyIqProtocolEntity
| {
"content_hash": "47705cf391c0d6b88b1f7aa1e790b027",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 67,
"avg_line_length": 57.22222222222222,
"alnum_prop": 0.8776699029126214,
"repo_name": "ongair/yowsup",
"id": "427ef6b6cff2eabaa38efb15df1f4d656054ab91",
"size": "515",
"binary": false,
"copies": "27",
"ref": "refs/heads/master",
"path": "yowsup/layers/protocol_profiles/protocolentities/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "216657"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='taskestimate',
name='is_na',
field=models.BooleanField(default=False),
preserve_default=True,
),
]
| {
"content_hash": "dc90126513c68d4993c0c49d3f744c79",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 53,
"avg_line_length": 21.42105263157895,
"alnum_prop": 0.5847665847665847,
"repo_name": "mfwarren/projector",
"id": "9728cbea50b42dab05f2fec366302a76cf0dadb0",
"size": "431",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projector/main/migrations/0002_auto_20141111_1500.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1212"
},
{
"name": "JavaScript",
"bytes": "22553"
},
{
"name": "Makefile",
"bytes": "5576"
},
{
"name": "Python",
"bytes": "55790"
},
{
"name": "Shell",
"bytes": "5102"
}
],
"symlink_target": ""
} |
from docutils import nodes
from packaging.version import Version
from sphinx import __version__ as sphinx_version
from os import listdir
from os import environ
from os.path import basename
from os.path import dirname
from os.path import join as path_join
from sys import argv, stderr
import configparser
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
except:
print('Install the sphinx ReadTheDocs theme for improved html documentation '
'layout: https://sphinx-rtd-theme.readthedocs.io/',
file=stderr)
pass
stop_on_error = ('-W' in argv)
project = 'Data Plane Development Kit'
html_logo = '../logo/DPDK_logo_vertical_rev_small.png'
if Version(sphinx_version) >= Version('3.5'):
html_permalinks = False
else:
html_add_permalinks = ""
html_show_copyright = False
highlight_language = 'none'
release = environ.setdefault('DPDK_VERSION', "None")
version = release
master_doc = 'index'
# Maximum feature description string length
feature_str_len = 30
# Figures, tables and code-blocks automatically numbered if they have caption
numfig = True
# Configuration for man pages
man_pages = [("testpmd_app_ug/run_app", "testpmd",
"tests for dpdk pmds", "", 1),
("tools/pdump", "dpdk-pdump",
"enable packet capture on dpdk ports", "", 1),
("tools/proc_info", "dpdk-procinfo",
"access dpdk port stats and memory info", "", 1),
("tools/pmdinfo", "dpdk-pmdinfo",
"dump a PMDs hardware support info", "", 1),
("tools/devbind", "dpdk-devbind",
"check device status and bind/unbind them from drivers", "", 8)]
# ####### :numref: fallback ########
# The following hook functions add some simple handling for the :numref:
# directive for Sphinx versions prior to 1.3.1. The functions replace the
# :numref: reference with a link to the target (for all Sphinx doc types).
# It doesn't try to label figures/tables.
def numref_role(reftype, rawtext, text, lineno, inliner):
"""
Add a Sphinx role to handle numref references. Note, we can't convert
the link here because the doctree isn't build and the target information
isn't available.
"""
# Add an identifier to distinguish numref from other references.
newnode = nodes.reference('',
'',
refuri='_local_numref_#%s' % text,
internal=True)
return [newnode], []
def process_numref(app, doctree, from_docname):
"""
Process the numref nodes once the doctree has been built and prior to
writing the files. The processing involves replacing the numref with a
link plus text to indicate if it is a Figure or Table link.
"""
# Iterate over the reference nodes in the doctree.
for node in doctree.traverse(nodes.reference):
target = node.get('refuri', '')
# Look for numref nodes.
if target.startswith('_local_numref_#'):
target = target.replace('_local_numref_#', '')
# Get the target label and link information from the Sphinx env.
data = app.builder.env.domains['std'].data
docname, label, _ = data['labels'].get(target, ('', '', ''))
relative_url = app.builder.get_relative_uri(from_docname, docname)
# Add a text label to the link.
if target.startswith('figure'):
caption = 'Figure'
elif target.startswith('table'):
caption = 'Table'
else:
caption = 'Link'
# New reference node with the updated link information.
newnode = nodes.reference('',
caption,
refuri='%s#%s' % (relative_url, label),
internal=True)
node.replace_self(newnode)
def generate_overview_table(output_filename, table_id, section, table_name, title):
"""
Function to generate the Overview Table from the ini files that define
the features for each driver.
The default features for the table and their order is defined by the
'default.ini' file.
"""
# Default warning string.
warning = 'Warning generate_overview_table()'
# Get the default features and order from the 'default.ini' file.
ini_path = path_join(dirname(output_filename), 'features')
config = configparser.ConfigParser()
config.optionxform = str
config.read(path_join(ini_path, 'default.ini'))
default_features = config.items(section)
# Create a dict of the valid features to validate the other ini files.
valid_features = {}
max_feature_length = 0
for feature in default_features:
key = feature[0]
valid_features[key] = ' '
max_feature_length = max(max_feature_length, len(key))
# Get a list of driver ini files, excluding 'default.ini'.
ini_files = [basename(file) for file in listdir(ini_path)
if file.endswith('.ini') and file != 'default.ini']
ini_files.sort()
# Build up a list of the table header names from the ini filenames.
pmd_names = []
for ini_filename in ini_files:
name = ini_filename[:-4]
name = name.replace('_vf', 'vf')
pmd_names.append(name)
if not pmd_names:
# Add an empty column if table is empty (required by RST syntax)
pmd_names.append(' ')
# Pad the table header names.
max_header_len = len(max(pmd_names, key=len))
header_names = []
for name in pmd_names:
if '_vec' in name:
pmd, vec = name.split('_')
name = '{0:{fill}{align}{width}}vec'.format(pmd,
fill='.', align='<', width=max_header_len-3)
else:
name = '{0:{fill}{align}{width}}'.format(name,
fill=' ', align='<', width=max_header_len)
header_names.append(name)
# Create a dict of the defined features for each driver from the ini files.
ini_data = {}
for ini_filename in ini_files:
config = configparser.ConfigParser()
config.optionxform = str
config.read(path_join(ini_path, ini_filename))
# Initialize the dict with the default.ini value.
ini_data[ini_filename] = valid_features.copy()
# Check for a section.
if not config.has_section(section):
continue
# Check for valid features names.
for name, value in config.items(section):
if name not in valid_features:
print("{}: Unknown feature '{}' in '{}'".format(warning,
name,
ini_filename),
file=stderr)
if stop_on_error:
raise Exception('Warning is treated as a failure')
continue
if value:
# Get the first letter only.
ini_data[ini_filename][name] = value[0]
# Print out the RST Driver Overview table from the ini file data.
outfile = open(output_filename, 'w')
num_cols = len(header_names)
print_table_css(outfile, table_id)
print('.. table:: ' + table_name + '\n', file=outfile)
print_table_header(outfile, num_cols, header_names, title)
print_table_body(outfile, num_cols, ini_files, ini_data, default_features)
def print_table_header(outfile, num_cols, header_names, title):
""" Print the RST table header. The header names are vertical. """
print_table_divider(outfile, num_cols)
line = ''
for name in header_names:
line += ' ' + name[0]
print_table_row(outfile, title, line)
for i in range(1, len(header_names[0])):
line = ''
for name in header_names:
line += ' ' + name[i]
print_table_row(outfile, '', line)
print_table_divider(outfile, num_cols)
def print_table_body(outfile, num_cols, ini_files, ini_data, default_features):
""" Print out the body of the table. Each row is a NIC feature. """
for feature, _ in default_features:
line = ''
for ini_filename in ini_files:
line += ' ' + ini_data[ini_filename][feature]
print_table_row(outfile, feature, line)
print_table_divider(outfile, num_cols)
def print_table_row(outfile, feature, line):
""" Print a single row of the table with fixed formatting. """
line = line.rstrip()
print(' {:<{}}{}'.format(feature, feature_str_len, line), file=outfile)
def print_table_divider(outfile, num_cols):
""" Print the table divider line. """
line = ' '
column_dividers = ['='] * num_cols
line += ' '.join(column_dividers)
feature = '=' * feature_str_len
print_table_row(outfile, feature, line)
def print_table_css(outfile, table_id):
template = """
.. raw:: html
<style>
.wy-nav-content {
opacity: .99;
}
table#idx {
cursor: default;
overflow: hidden;
}
table#idx p {
margin: 0;
line-height: inherit;
}
table#idx th, table#idx td {
text-align: center;
border: solid 1px #ddd;
}
table#idx th {
padding: 0.5em 0;
}
table#idx th, table#idx th p {
font-size: 11px;
white-space: pre-wrap;
vertical-align: top;
min-width: 0.9em;
}
table#idx col:first-child {
width: 0;
}
table#idx th:first-child {
vertical-align: bottom;
}
table#idx td {
padding: 1px;
}
table#idx td, table#idx td p {
font-size: 11px;
}
table#idx td:first-child {
padding-left: 1em;
text-align: left;
}
table#idx tr:nth-child(2n-1) td {
background-color: rgba(210, 210, 210, 0.2);
}
table#idx th:not(:first-child):hover,
table#idx td:not(:first-child):hover {
position: relative;
}
table#idx th:not(:first-child):hover::after,
table#idx td:not(:first-child):hover::after {
content: '';
height: 6000px;
top: -3000px;
width: 100%;
left: 0;
position: absolute;
z-index: -1;
background-color: #ffb;
}
table#idx tr:hover td {
background-color: #ffb;
}
</style>
"""
print(template.replace("idx", "id%d" % (table_id)), file=outfile)
def setup(app):
table_file = dirname(__file__) + '/nics/overview_table.txt'
generate_overview_table(table_file, 1,
'Features',
'Features availability in networking drivers',
'Feature')
table_file = dirname(__file__) + '/nics/rte_flow_items_table.txt'
generate_overview_table(table_file, 2,
'rte_flow items',
'rte_flow items availability in networking drivers',
'Item')
table_file = dirname(__file__) + '/nics/rte_flow_actions_table.txt'
generate_overview_table(table_file, 3,
'rte_flow actions',
'rte_flow actions availability in networking drivers',
'Action')
table_file = dirname(__file__) + '/cryptodevs/overview_feature_table.txt'
generate_overview_table(table_file, 1,
'Features',
'Features availability in crypto drivers',
'Feature')
table_file = dirname(__file__) + '/cryptodevs/overview_cipher_table.txt'
generate_overview_table(table_file, 2,
'Cipher',
'Cipher algorithms in crypto drivers',
'Cipher algorithm')
table_file = dirname(__file__) + '/cryptodevs/overview_auth_table.txt'
generate_overview_table(table_file, 3,
'Auth',
'Authentication algorithms in crypto drivers',
'Authentication algorithm')
table_file = dirname(__file__) + '/cryptodevs/overview_aead_table.txt'
generate_overview_table(table_file, 4,
'AEAD',
'AEAD algorithms in crypto drivers',
'AEAD algorithm')
table_file = dirname(__file__) + '/cryptodevs/overview_asym_table.txt'
generate_overview_table(table_file, 5,
'Asymmetric',
'Asymmetric algorithms in crypto drivers',
'Asymmetric algorithm')
table_file = dirname(__file__) + '/cryptodevs/overview_os_table.txt'
generate_overview_table(table_file, 6,
'OS',
'Operating systems support for crypto drivers',
'Operating system')
table_file = dirname(__file__) + '/compressdevs/overview_feature_table.txt'
generate_overview_table(table_file, 1,
'Features',
'Features availability in compression drivers',
'Feature')
table_file = dirname(__file__) + '/regexdevs/overview_feature_table.txt'
generate_overview_table(table_file, 1,
'Features',
'Features availability in regex drivers',
'Feature')
table_file = dirname(__file__) + '/vdpadevs/overview_feature_table.txt'
generate_overview_table(table_file, 1,
'Features',
'Features availability in vDPA drivers',
'Feature')
table_file = dirname(__file__) + '/bbdevs/overview_feature_table.txt'
generate_overview_table(table_file, 1,
'Features',
'Features availability in bbdev drivers',
'Feature')
table_file = dirname(__file__) + '/gpus/overview_feature_table.txt'
generate_overview_table(table_file, 1,
'Features',
'Features availability in GPU drivers',
'Feature')
table_file = dirname(__file__) + '/eventdevs/overview_feature_table.txt'
generate_overview_table(table_file, 1,
'Scheduling Features',
'Features availability in eventdev drivers',
'Feature')
table_file = dirname(__file__) + '/eventdevs/overview_rx_adptr_feature_table.txt'
generate_overview_table(table_file, 2,
'Eth Rx adapter Features',
'Features availability for Ethdev Rx adapters',
'Feature')
table_file = dirname(__file__) + '/eventdevs/overview_tx_adptr_feature_table.txt'
generate_overview_table(table_file, 3,
'Eth Tx adapter Features',
'Features availability for Ethdev Tx adapters',
'Feature')
table_file = dirname(__file__) + '/eventdevs/overview_crypto_adptr_feature_table.txt'
generate_overview_table(table_file, 4,
'Crypto adapter Features',
'Features availability for Crypto adapters',
'Feature')
table_file = dirname(__file__) + '/eventdevs/overview_timer_adptr_feature_table.txt'
generate_overview_table(table_file, 5,
'Timer adapter Features',
'Features availability for Timer adapters',
'Feature')
if Version(sphinx_version) < Version('1.3.1'):
print('Upgrade sphinx to version >= 1.3.1 for '
'improved Figure/Table number handling.',
file=stderr)
# Add a role to handle :numref: references.
app.add_role('numref', numref_role)
# Process the numref references once the doctree has been created.
app.connect('doctree-resolved', process_numref)
try:
# New function in sphinx 1.8
app.add_css_file('css/custom.css')
except:
app.add_stylesheet('css/custom.css')
| {
"content_hash": "6cbddae45be7db7e484cacab7b8058fd",
"timestamp": "",
"source": "github",
"line_count": 439,
"max_line_length": 89,
"avg_line_length": 37.533029612756266,
"alnum_prop": 0.5554409176427748,
"repo_name": "john-mcnamara-intel/dpdk",
"id": "a55ce38800d59133a30ff09a5891f48165fc5368",
"size": "16584",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "doc/guides/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "1623"
},
{
"name": "C",
"bytes": "39269990"
},
{
"name": "C++",
"bytes": "860345"
},
{
"name": "Makefile",
"bytes": "342834"
},
{
"name": "Meson",
"bytes": "144875"
},
{
"name": "Objective-C",
"bytes": "224248"
},
{
"name": "Python",
"bytes": "115929"
},
{
"name": "Shell",
"bytes": "77250"
},
{
"name": "SmPL",
"bytes": "2074"
}
],
"symlink_target": ""
} |
""" Module for easy KMC from python. """
# Copyright (c) 2013 Mikael Leetmaa
#
# This file is part of the KMCLib project distributed under the terms of the
# GNU General Public License version 3, see <http://www.gnu.org/licenses/>.
#
from CoreComponents.KMCLocalConfiguration import KMCLocalConfiguration
from CoreComponents.KMCInteractions import KMCInteractions
from CoreComponents.KMCProcess import KMCProcess
from CoreComponents.KMCConfiguration import KMCConfiguration
from CoreComponents.KMCLattice import KMCLattice
from CoreComponents.KMCLatticeModel import KMCLatticeModel
from CoreComponents.KMCUnitCell import KMCUnitCell
from CoreComponents.KMCControlParameters import KMCControlParameters
from Analysis.OnTheFlyMSD import OnTheFlyMSD
from Analysis.TimeStepDistribution import TimeStepDistribution
from Utilities.SaveAndReadUtilities import KMCInteractionsFromScript
from Utilities.SaveAndReadUtilities import KMCConfigurationFromScript
from PluginInterfaces.KMCRateCalculatorPlugin import KMCRateCalculatorPlugin
from PluginInterfaces.KMCAnalysisPlugin import KMCAnalysisPlugin
from Backend.Backend import MPICommons
from KMCLib.Utilities.Trajectory.XYZTrajectory import XYZTrajectory
from KMCLib.Utilities.Trajectory.Trajectory import Trajectory
from KMCLib.Utilities.Trajectory.CFGTrajectory import CFGTrajectory
__all__ = ['KMCLocalConfiguration', 'KMCInteractions', 'KMCConfiguration',
'KMCLattice', 'KMCLatticeModel', 'KMCUnitCell',
'KMCControlParameters', 'KMCInteractionsFromScript',
'KMCConfigurationFromScript', 'KMCRateCalculatorPlugin',
'KMCAnalysisPlugin', 'KMCProcess', 'OnTheFlyMSD',
'TimeStepDistribution', 'MPICommons', 'XYZTrajectory', 'Trajectory', 'CFGTrajectory']
# Trick to initialize and finalize MPI only once.
MPICommons.init()
# Print the header when the module is loaded.
Utilities.PrintUtilities.printHeader()
# Make sure to finalize MPI on exit.
def killme():
MPICommons.finalize()
import atexit
atexit.register(killme)
| {
"content_hash": "0c2bca3f0d7eacf70afc586f010dbbf6",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 96,
"avg_line_length": 39.13461538461539,
"alnum_prop": 0.8176904176904177,
"repo_name": "txd283/FeCu-Irradiation-KMCLib",
"id": "5b69772f3a689307b822bab3c067d9c4f7fedd4a",
"size": "2035",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "KMCLib/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "5690"
},
{
"name": "Python",
"bytes": "280662"
}
],
"symlink_target": ""
} |
'''
Created by auto_sdk on 2014-12-17 17:22:51
'''
from top.api.base import RestApi
class ItemImgDeleteRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.id = None
self.num_iid = None
def getapiname(self):
return 'taobao.item.img.delete'
| {
"content_hash": "447800435c87a9c1cefd9ba73c07f4f4",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 55,
"avg_line_length": 27,
"alnum_prop": 0.6759259259259259,
"repo_name": "CooperLuan/devops.notes",
"id": "4d9ca40f51c306dca71398056742802a01352b98",
"size": "324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taobao/top/api/rest/ItemImgDeleteRequest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1505"
},
{
"name": "JavaScript",
"bytes": "29"
},
{
"name": "Python",
"bytes": "211546"
},
{
"name": "Shell",
"bytes": "150"
}
],
"symlink_target": ""
} |
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.behaviors import ButtonBehavior
from kivy.properties import ObjectProperty
from yahtr.ui.hex_widget import HexWidget
from yahtr.game import game_instance
from yahtr.data.actions import ActionType
from yahtr.core.hex_lib import Layout
from yahtr.utils import Color
class UnitActionTile(ButtonBehavior, HexWidget):
action_key = ObjectProperty(None)
action_name = ObjectProperty(None)
colors = {
ActionType.move: Color.action_move_rotate,
ActionType.undo_move: Color.action_move_rotate,
ActionType.rotate: Color.action_move_rotate,
ActionType.weapon: Color.action_weapon,
ActionType.skill: Color.action_skill,
ActionType.end_turn: Color.action_endturn
}
def __init__(self, index, action_type, text, rk_skill=None, **kwargs):
super(UnitActionTile, self).__init__(color=self.colors[action_type], **kwargs)
self.action_type = action_type
self.rk_skill = rk_skill
self.action_name.text = text
self.action_index = index
self.unselect()
def select(self):
self.action_key.text = ''
self.action_name.color = [1, 1, 1, 1]
def unselect(self):
self.action_key.text = f'[b]{self.action_index}[/b]'
self.action_name.color = [0.3, 0.3, 0.3, 1]
class ActionsBar(RelativeLayout):
__Layouts__ = [[(0, -1)],
[(-1, 0), (0, -1)],
[(-1, 0), (0, 0), (0, -1)],
[(-1, 1), (0, 0), (-1, 0), (0, -1)],
[(-1, 1), (0, 0), (-1, 0), (0, -1), (-1, -1)],
[(-2, 1), (-1, 1), (0, 0), (-2, 0), (-1, 0), (0, -1)],
[(-2, 1), (-1, 1), (0, 0), (-2, 0), (-1, 0), (0, -1), (-1, -1)]]
def __init__(self, **kwargs):
super(ActionsBar, self).__init__(**kwargs)
self.hex_layout = Layout(origin=self.pos, size=40, flat=True, margin=1)
self.last_hovered_child = None
self.selected_button = None
def create(self):
# game_instance.battle.on_new_turn += lambda unit: self.on_new_action(unit, None, unit.actions_tree)
game_instance.battle.on_new_actions += self.on_new_actions
def create_action_widget(self, q, r, index, action_type, text, rk_skill=None):
new_widget = UnitActionTile(index, action_type, text, rk_skill, q=q, r=r, layout=self.hex_layout)
self.add_widget(new_widget)
def on_new_actions(self, unit, action_node):
self.clear_widgets()
if unit.ai_controlled:
return
widget_data = []
index = 1
for a in action_node:
if a.data in [ActionType.weapon, ActionType.skill]:
for rk_skill in unit.get_skills(a.data):
widget_data.append((index, a.data, str(rk_skill.skill.name), rk_skill))
index += 1
elif a.data != ActionType.end_turn:
widget_data.append((index, a.data, str(a.data), None))
index += 1
count = len(widget_data) # not including the mandatory End Turn!
assert count < len(ActionsBar.__Layouts__)
for i, (index, action_type, text, rk_skill) in enumerate(widget_data):
q, r = ActionsBar.__Layouts__[count][i]
self.create_action_widget(q, r, index, action_type, text, rk_skill)
q, r = ActionsBar.__Layouts__[count][count]
self.create_action_widget(q, r, 0, ActionType.end_turn, str(ActionType.end_turn))
def _on_action_selected(self, index=None, button=None):
if not button:
for child in self.children:
if child.action_index == index:
button = child
break
if button:
if button is self.selected_button:
if button.action_type in [ActionType.undo_move, ActionType.end_turn]:
game_instance.battle.notify_action_end(button.action_type)
else:
if self.selected_button:
self.selected_button.unselect()
self.selected_button = button
self.selected_button.select()
game_instance.battle.notify_action_change(button.action_type, button.rk_skill)
def on_key_pressed(self, code, key):
self._on_action_selected(index=int(key))
def on_touch_up(self, touch):
local_pos = self.to_local(*touch.pos)
hover_hex = self.hex_layout.pixel_to_hex(local_pos)
for child in self.children:
if child.hex_coords == hover_hex:
self._on_action_selected(button=child)
return True
return super(ActionsBar, self).on_touch_down(touch)
def on_mouse_pos(self, __, pos):
local_pos = self.to_local(*pos)
hover_hex = self.hex_layout.pixel_to_hex(local_pos)
for child in self.children:
if child.hex_coords == hover_hex:
if self.last_hovered_child != child:
if self.last_hovered_child:
self.last_hovered_child.selector.hide()
child.selector.show()
self.last_hovered_child = child
return True
self.on_no_mouse_pos()
return False
def on_no_mouse_pos(self):
if self.last_hovered_child:
self.last_hovered_child.selector.hide()
self.last_hovered_child = None
| {
"content_hash": "8b54262e14b2d3c19175ed0705c61a08",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 108,
"avg_line_length": 39.07857142857143,
"alnum_prop": 0.5700968744288064,
"repo_name": "fp12/yahtr",
"id": "16f2125e29eedce2f85196baacfb46e4f58b9b12",
"size": "5471",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yahtr/ui/actions_bar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "794"
},
{
"name": "GLSL",
"bytes": "1120"
},
{
"name": "Python",
"bytes": "169305"
},
{
"name": "Shell",
"bytes": "91"
}
],
"symlink_target": ""
} |
import os
class Path:
def __init__(self, path, arg_names=None, defaults=None):
self._path = path
self._arg_names = tuple([] if arg_names is None else arg_names)
self._defaults = tuple([] if defaults is None else defaults)
@property
def path(self):
return self._path
@property
def arg_names(self):
return self._arg_names
@property
def defaults(self):
return self._defaults
def __eq__(self, other):
return (self.path == other.path and
self.arg_names == other.arg_names and
self.defaults == other.defaults)
def __hash__(self):
return hash((self.path,) + self.arg_names + self.defaults)
def resolve(self, *args, **kwargs):
"""
Resolve the path for use including variable interpolation.
If there are too many parameter, resolution raises a TypeError
(just like a function with too many parameters would raise.)
Otherwise, it scans over the path arguments from left to right.
During the scan, if the argument name exists in the kwargs, it's
popped off from the kwargs. If there are more required arguments
than there are given arguments -- both keyword and positional --
it will try to use the defaults for all arguments prefixed with an
underscore.
:param args: positional arguments for interpolation
:param kwargs: keyword-based arguments for interpolation
:return: a path string
"""
arg_names = self.arg_names
n_args_expected = len(arg_names)
if n_args_expected == 0:
return self.path
skip_func_args, n_args, path_args = set(), len(args) + len(kwargs), []
if n_args < len(arg_names):
name_strs = (x[0] if isinstance(x, list) else x for x in arg_names)
skip_func_args = {s for s in name_strs if s.startswith('_')}
args = list(args)
for name, default in zip(arg_names, self.defaults):
skip = name in skip_func_args
if not skip and name in kwargs:
path_args.append(kwargs.pop(name))
elif not skip and args:
path_args.append(args.pop(0))
else:
if default is None:
expected = ", ".join(arg_names)
raise TypeError("Expected args: {}".format(expected))
else:
path_args.append(default)
if args or kwargs:
expected = ", ".join(arg_names)
raise TypeError("Too many args. Expected: {}".format(expected))
return os.path.normpath(self.path.format(*path_args))
| {
"content_hash": "7580b93e82257637048b9bddf692b1f3",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 34.35443037974684,
"alnum_prop": 0.5832719233603537,
"repo_name": "jbn/pathsjson",
"id": "693efe2e0af491d4cba63427f2d0702e335ac0e1",
"size": "2714",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pathsjson/path.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35126"
},
{
"name": "Shell",
"bytes": "3066"
}
],
"symlink_target": ""
} |
'''
Many people initially have difficulty formating their Python code so it
looks good in posts to the Pythonisa Forum. http://omz-forums.appspot.com
This script attempts to properly format your code with appropriate carrage
returns and backticks so that it is ready to post to the forum.
'''
import clipboard, console, ui
fmt = '\n\n```{}\n{}\n```\n' # .format(language, post_text)
def text_is_python(in_text):
for line in in_text.splitlines():
line = line.partition('#')[0].strip() or '.'
first_word = line.split()[0]
if first_word == 'import':
return True
if first_word in ('class', 'def') and line.endswith('):'):
return True
return False
class ThreeBackticksView(ui.View):
def __init__(self):
self.present()
self.add_subview(self.make_button())
self.add_subview(self.make_label())
self.add_subview(self.make_switch())
self.add_subview(self.make_text_view())
self.textview_did_change(self['text_view'])
def make_button(self):
button = ui.Button(title='Format the post')
button.action = lambda sender: self.do_it()
button.center = self.width * .75, 50
button.tint_color = 'steelblue'
return button
def make_label(self):
label = ui.Label(name='label')
label.width = 234
label.text = 'Post contains Python code?'
label.text_color = 'steelblue'
label.center = self.width * .20, 50
return label
def make_switch(self):
switch = ui.Switch(name='python_code',
title='Post contains Python code?')
switch.center = self.width * .30, 50
switch.x = self['label'].x + self['label'].width
return switch
def make_text_view(self):
text_view = ui.TextView(frame=self.bounds, name='text_view')
text_view.y += 100
text_view.height -= 100
text_view.delegate = self
text_view.text = clipboard.get()
text_view.autocapitalization_type = ui.AUTOCAPITALIZE_NONE
text_view.autocorrection_type = False
text_view.spellchecking_type = False
return text_view
def textview_did_change(self, textview):
self['python_code'].value = text_is_python(textview.text)
def do_it(self):
text = self['text_view'].text.rstrip()
if text:
lang = 'python' if self['python_code'].value else ''
text = fmt.format(lang, text)
clipboard.set(text)
self['text_view'].text = text
console.hud_alert('The post is now on your clipboard.')
print(text)
else:
print('No user text.')
ThreeBackticksView()
| {
"content_hash": "ef5c07e70b925df25f2bad33f5d4180b",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 74,
"avg_line_length": 34.607594936708864,
"alnum_prop": 0.5994879297732261,
"repo_name": "cclauss/Pythonista_ui",
"id": "6dba46318eb5320f9ce4314eac894aadc0951ea3",
"size": "2734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ThreeBackticks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "47876"
}
],
"symlink_target": ""
} |
import os
pdsDefaultLocation = "pds.linkedpersonaldata.org"
SERVER_UPLOAD_DIR = '/var/www/trustframework/'
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
# supported db backends are 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'
#'ENGINE': 'django.db.backends.mysql',
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/var/www/trustframework/registryEnv/OMS-RegistryServer/test.db',
#'NAME': 'test.db',
'USER': 'default',
'PASSWORD': 'default',
'HOST': '',
'PORT': '',
# 'OPTIONS': {
# 'read_default_file': '/etc/mysql/my.cnf',
# },
}
}
# where can we find db fixtures?
FIXTURE_DIRS = (
os.path.join(PROJECT_DIR, 'apps/account/fixtures'),
)
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# when auth is required, django will redirect here
LOGIN_URL = "/Shibboleth.sso/Login"
# after a successful login, django will redirect here
LOGIN_REDIRECT_URL = "/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_DIR, 'static_collection')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
# Don't forget to use absolute paths, not relative paths, and use forward slashes
STATICFILES_DIRS = (os.path.join(PROJECT_DIR, 'static'),)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
# XXX - we'll need to figure out a sensible way to regenerate this on deployment
SECRET_KEY = 'shfkjs894fFerER#5h346&25hjkfbc2=23_6817A1lh[dfjg3=_-89j'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
# 'django.contrib.auth.backends.RemoteUserBackend',
'shibboleth.backends.ShibbolethRemoteUserBackend',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
)
# where to look for templates
# Don't forget to use absolute paths, not relative paths, and use forward slashes
TEMPLATE_DIRS = (
os.path.join(PROJECT_DIR, 'templates'),
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware', #Currently CSRF forgery protection is turned off
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'shibboleth.middleware.ShibbolethRemoteUserMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# Python dotted path to the WSGI application used by Django's runserver.
#WSGI_APPLICATION = 'registryServer.wsgi.application'
ROOT_URLCONF = 'urls'
# Define user profile associated with a User
AUTH_PROFILE_MODULE = 'account.Profile'
SHIBBOLETH_ATTRIBUTE_MAP = {
"HTTP_REMOTE_USER": (True, "username"),
# "Shibboleth-givenName": (False, "first_name"),
# "Shibboleth-sn": (False, "last_name"),
"HTTP_MAIL": (True, "email"),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.sites',
'apps.base',
'apps.client',
'apps.account',
'apps.oauth2',
'apps.questions',
'uni_form',
'oauth2app',
'django_extensions',
'lib',
# 'shibboleth',
)
import logging
import sys
logger = logging.getLogger('')
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(levelname)-8s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
# 'regisryServer.apps.oauth2',
# XXX - look up to confirm this is correct
# TTL for an OAUTH2 access token, in seconds (presumably)
OAUTH2_ACCESS_TOKEN_EXPIRATION = 36000000
| {
"content_hash": "63bcf84ec734243fb949c2e178e1eaeb",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 98,
"avg_line_length": 31.73404255319149,
"alnum_prop": 0.7184042909822327,
"repo_name": "HumanDynamics/openPDS-RegistryServer",
"id": "d8db5305f0a9fefb880e5661282e3a14f084c02c",
"size": "6018",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "registryServer/settings.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47821"
},
{
"name": "JavaScript",
"bytes": "200605"
},
{
"name": "Python",
"bytes": "85615"
},
{
"name": "Shell",
"bytes": "1107"
}
],
"symlink_target": ""
} |
"""
viewaction
"""
from __future__ import absolute_import, division, print_function
import logging
from PySide import QtGui, QtCore
from PySide.QtCore import Qt
from mceditlib.util.lazyprop import weakrefprop
from mcedit2.util.settings import Settings
log = logging.getLogger(__name__)
class ViewAction(QtCore.QObject):
button = Qt.NoButton
modifiers = Qt.NoModifier
key = 0
labelText = "Unknown Action"
hidden = False # Hide from configuration
settingsKey = NotImplemented
acceptsMouseWheel = False
WHEEL_UP = 0x100
WHEEL_DOWN = 0x200
_buttonNames = None
def __init__(self):
"""
An action that can be bound to a keypress or mouse button click, drag, or movement with the bound key or button held.
"""
super(ViewAction, self).__init__()
if self.settingsKey is not None:
settings = Settings()
prefix = "keybindings/"
try:
modifiers = int(settings.value(prefix + self.settingsKey + "/modifiers", self.modifiers))
button = int(settings.value(prefix + self.settingsKey + "/button", self.button))
key = int(settings.value(prefix + self.settingsKey + "/key", self.key))
except Exception as e:
log.error("Error while reading key binding:")
else:
self.modifiers = modifiers
self.button = button
self.key = key
def __repr__(self):
return "%s(button=%s, key=%s, modifiers=%s)" % (self.__class__.__name__, self.button, self.key, self.modifiers)
def setBinding(self, button, key, modifiers):
self.button = button
self.key = key
self.modifiers = modifiers
if self.settingsKey is not None:
settings = Settings()
prefix = "keybindings/"
settings.setValue(prefix + self.settingsKey + "/button", self.button)
settings.setValue(prefix + self.settingsKey + "/key", self.key)
settings.setValue(prefix + self.settingsKey + "/modifiers", int(self.modifiers))
def matchKeyEvent(self, event):
key = event.key()
modifiers = event.modifiers()
if key in (Qt.Key_Shift, Qt.Key_Control, Qt.Key_Alt, Qt.Key_Meta):
modifiers = self.modifiers # pressing modifier key by itself has modifiers set, but releasing modifiers does not
return self.key == key and (self.modifiers & modifiers or self.modifiers == modifiers)
def mouseMoveEvent(self, event):
"""
Called when the mouse moves while the bound keys or buttons are pressed.
:type event: QtGui.QMouseEvent
"""
def mousePressEvent(self, event):
"""
Called when the bound mouse button is pressed. By default, calls buttonPressEvent.
:type event: QtGui.QMouseEvent
"""
self.buttonPressEvent(event)
def mouseReleaseEvent(self, event):
"""
Called when the bound mouse button is released. By default, calls buttonReleaseEvent
:type event: QtGui.QMouseEvent
"""
self.buttonReleaseEvent(event)
def keyPressEvent(self, event):
"""
Called when the bound key is pressed. By default, calls buttonPressEvent.
:type event: QtGui.QKeyEvent
"""
self.buttonPressEvent(event)
def keyReleaseEvent(self, event):
"""
Called when the bound key is released. By default, calls buttonReleaseEvent
:type event: QtGui.QKeyEvent
"""
self.buttonReleaseEvent(event)
def buttonPressEvent(self, event):
"""
Called by mousePressEvent and keyPressEvent.
Implement this to handle button-press events if it doesn't matter whether the action is bound to a key or
mouse button.
:type event: QtGui.QEvent
"""
def buttonReleaseEvent(self, event):
"""
Called by mouseReleaseEvent and keyReleaseEvent.
Implement this to handle button-release events if it doesn't matter whether the action is bound to a key or
mouse button.
:type event: QtGui.QEvent
"""
def buttonName(self, buttons):
if ViewAction._buttonNames is None:
ViewAction._buttonNames = [
(Qt.LeftButton, self.tr("Left Button")),
(Qt.RightButton, self.tr("Right Button")),
(Qt.MiddleButton, self.tr("Middle Button")),
(ViewAction.WHEEL_UP, self.tr("Mousewheel Up")),
(ViewAction.WHEEL_DOWN, self.tr("Mousewheel Down")),
]
parts = [name for mask, name in self._buttonNames if buttons & mask]
return "+".join(parts)
def describeKeys(self):
modifierKeyNames = {
Qt.Key_Shift: self.tr("Shift"),
Qt.Key_Control: self.tr("Control"),
Qt.Key_Alt: self.tr("Alt"),
Qt.Key_Meta: self.tr("Meta"),
}
s = modifierKeyNames.get(self.key) # QKeySequence returns weird strings when only a modifier is pressed
if s is None:
try:
s = QtGui.QKeySequence(self.key | self.modifiers).toString()
except TypeError:
log.error("KEY: %r MOD: %r", self.key, self.modifiers)
raise
if self.key == 0:
s = s[:-2]
if self.button != Qt.NoButton:
if len(s):
s += "+"
s += self.buttonName(self.button)
return s
class UseToolMouseAction(ViewAction):
button = Qt.LeftButton
labelText = "Use Tool (Don't change!)"
hidden = True
settingsKey = None
editorTab = weakrefprop()
def __init__(self, editorTab):
super(UseToolMouseAction, self).__init__()
self.editorTab = editorTab
def mousePressEvent(self, event):
self.editorTab.editorSession.viewMousePress(event)
event.view.update()
def mouseMoveEvent(self, event):
self.editorTab.editorSession.viewMouseDrag(event)
event.view.update()
def mouseReleaseEvent(self, event):
self.editorTab.editorSession.viewMouseRelease(event)
event.view.update()
class TrackingMouseAction(ViewAction):
button = Qt.NoButton
hidden = True
labelText = "Mouse Tracking (Don't change!)"
settingsKey = None
editorTab = weakrefprop()
def __init__(self, editorTab):
super(TrackingMouseAction, self).__init__()
self.editorTab = editorTab
def mouseMoveEvent(self, event):
self.editorTab.editorSession.viewMouseMove(event)
class MoveViewMouseAction(ViewAction):
button = Qt.RightButton
labelText = "Pan View"
settingsKey = "worldview/general/holdToMove"
def buttonPressEvent(self, event):
x, y = event.x(), event.y()
self.dragStart = event.view.unprojectAtHeight(x, y, 0)
self.startOffset = event.view.centerPoint
log.debug("Drag start %s", self.dragStart)
event.view.update()
def mouseMoveEvent(self, event):
x = event.x()
y = event.y()
log.debug("mouseMoveEvent %s", (x, y))
if self.dragStart:
d = event.view.unprojectAtHeight(x, y, 0) - self.dragStart
event.view.centerPoint -= d
log.debug("Drag continue delta %s", d)
event.view.update()
def buttonReleaseEvent(self, event):
x, y = event.x(), event.y()
self.dragStart = None
log.debug("Drag end")
event.view.update()
class ZoomWheelAction(ViewAction):
_zooms = None
labelText = "Zoom View"
maxScale = 16.
minScale = 1. / 64
settingsKey = None
@property
def zooms(self):
if self._zooms:
return self._zooms
zooms = []
_i = self.minScale
while _i < self.maxScale:
zooms.append(_i)
_i *= 2.0
self._zooms = zooms
return zooms
def wheelEvent(self, event):
d = event.delta()
mousePos = (event.x(), event.y())
if d < 0:
i = self.zooms.index(event.view.scale)
if i < len(self.zooms) - 1:
self.zoom(event.view, self.zooms[i + 1], mousePos)
elif d > 0:
i = self.zooms.index(event.view.scale)
if i > 0:
self.zoom(event.view, self.zooms[i - 1], mousePos)
def zoom(self, view, scale, (mx, my)):
# Get mouse position in world coordinates
worldPos = view.unprojectAtHeight(mx, my, 0)
if scale != view.scale:
view.scale = scale
# Get the new position under the mouse, find its distance from the old position,
# and shift the centerPoint by that amount.
newWorldPos = view.unprojectAtHeight(mx, my, 0)
delta = newWorldPos - worldPos
view.centerPoint = view.centerPoint - delta
log.debug("zoom offset %s, pos %s, delta %s, scale %s", view.centerPoint, (mx, my), delta, view.scale)
| {
"content_hash": "6f3d3e8290ae614de5a175788f61d60f",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 125,
"avg_line_length": 31.041095890410958,
"alnum_prop": 0.59454986760812,
"repo_name": "vorburger/mcedit2",
"id": "a58161d79cde7482b00ba0c4db4b0b4e329001e6",
"size": "9064",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/mcedit2/worldview/viewaction.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "8578"
},
{
"name": "Makefile",
"bytes": "156"
},
{
"name": "Python",
"bytes": "1639144"
}
],
"symlink_target": ""
} |
import os, sublime, sublime_plugin, subprocess
# Array of required Brew packages
packages = [
]
def conf_error(me):
sublime.error_message("No packages specified in config")
def brew_error(me):
import sys, webbrowser
if sublime.ok_cancel_dialog("Some package dependencies could not be installed automatically. Please refer to the installation guide to resolve this problem.\n\nDo you want to open the website for this package?", "Open website"):
webbrowser.open("https://packagecontrol.io/packages/"+me)
sys.exit()
def plugin_loaded():
from package_control import events
from subprocess import check_call
# Get name of package
me = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
if events.install(me) or events.post_upgrade(me):
# Get absolute package path
package_path = os.path.join(sublime.packages_path(), me)
for package in packages:
if package:
try:
os.chdir(package_path)
sublime.status_message("[%s] brew install %s" % ( me, package))
check_call(['brew', 'install', package])
sublime.status_message("[%s] Completed" % me)
except subprocess.CalledProcessError:
brew_error(me)
else:
conf_error(me)
else:
conf_error(me) | {
"content_hash": "2e88fc2c7f6ca33275c58ae45c314b91",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 232,
"avg_line_length": 32.06818181818182,
"alnum_prop": 0.6137491141034727,
"repo_name": "idleberg/sublime-developer-scripts",
"id": "00a4a868da4224f2e3bc2f2ec0edf105efc4ff20",
"size": "1490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/brew-install.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8044"
}
],
"symlink_target": ""
} |
import cython
@cython.profile(False)
def my_often_called_function():
pass
| {
"content_hash": "6f5e490950959e8cd44ca20bddb16b60",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 31,
"avg_line_length": 15.8,
"alnum_prop": 0.7341772151898734,
"repo_name": "cython/cython",
"id": "15197cb97210e38e1d523bacea8641139040ba0a",
"size": "79",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "docs/examples/tutorial/profiling_tutorial/often_called.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1429"
},
{
"name": "C",
"bytes": "786161"
},
{
"name": "C++",
"bytes": "32603"
},
{
"name": "Cython",
"bytes": "3391513"
},
{
"name": "Emacs Lisp",
"bytes": "12379"
},
{
"name": "Makefile",
"bytes": "3184"
},
{
"name": "PowerShell",
"bytes": "4022"
},
{
"name": "Python",
"bytes": "4081204"
},
{
"name": "Shell",
"bytes": "6371"
},
{
"name": "Smalltalk",
"bytes": "618"
},
{
"name": "Starlark",
"bytes": "3341"
},
{
"name": "sed",
"bytes": "807"
}
],
"symlink_target": ""
} |
"""
Record types for representing deployment models.
"""
from characteristic import attributes
@attributes(["repository", "tag"], defaults=dict(tag=u'latest'))
class DockerImage(object):
"""
An image that can be used to run an application using Docker.
:ivar unicode repository: eg ``u"hybridcluster/flocker"``
:ivar unicode tag: eg ``u"release-14.0"``
:ivar unicode full_name: A readonly property which combines the repository
and tag in a format that can be passed to `docker run`.
"""
@property
def full_name(self):
return "{repository}:{tag}".format(
repository=self.repository, tag=self.tag)
@classmethod
def from_string(cls, input):
"""
Given a Docker image name, return a :class:`DockerImage`.
:param unicode input: A Docker image name in the format
``repository[:tag]``.
:raises ValueError: If Docker image name is not in a valid format.
:returns: A ``DockerImage`` instance.
"""
kwargs = {}
parts = input.rsplit(u':', 1)
repository = parts[0]
if not repository:
raise ValueError("Docker image names must have format "
"'repository[:tag]'. Found '{image_name}'."
.format(image_name=input))
kwargs['repository'] = repository
if len(parts) == 2:
kwargs['tag'] = parts[1]
return cls(**kwargs)
@attributes(["name", "mountpoint"])
class AttachedVolume(object):
"""
A volume attached to an application to be deployed.
:ivar unicode name: A short, human-readable identifier for this
volume. For now this is always the same as the name of the
application it is attached to (see
https://github.com/ClusterHQ/flocker/issues/49).
:ivar FilePath mountpoint: The path within the container where this
volume should be mounted, or ``None`` if unknown
(see https://github.com/ClusterHQ/flocker/issues/289).
"""
@attributes(["name", "image", "ports", "volume", "environment"],
defaults=dict(
image=None, ports=frozenset(), volume=None, environment=None)
)
class Application(object):
"""
A single `application <http://12factor.net/>`_ to be deployed.
XXX: The image and ports attributes defaults to `None` until we have a way
to interrogate geard for the docker images associated with its containers.
See https://github.com/ClusterHQ/flocker/issues/207
XXX: Only the name is compared in equality checks. See
https://github.com/ClusterHQ/flocker/issues/267
:ivar unicode name: A short, human-readable identifier for this
application. For example, ``u"site-example.com"`` or
``u"pgsql-payroll"``.
:ivar DockerImage image: An image that can be used to run this
containerized application.
:ivar frozenset ports: A ``frozenset`` of ``Port`` instances that
should be exposed to the outside world.
:ivar volume: ``None`` if there is no volume, otherwise an
``AttachedVolume`` instance.
"""
@attributes(["hostname", "applications"])
class Node(object):
"""
A single node on which applications will be managed (deployed,
reconfigured, destroyed, etc).
:ivar unicode hostname: The hostname of the node. This must be a
resolveable name so that Flocker can connect to the node. This may be
a literal IP address instead of a proper hostname.
:ivar frozenset applications: A ``frozenset`` of ``Application`` instances
describing the applications which are to run on this ``Node``.
"""
@attributes(["nodes"])
class Deployment(object):
"""
A ``Deployment`` describes the configuration of a number of applications on
a number of cooperating nodes. This might describe the real state of an
existing deployment or be used to represent a desired future state.
:ivar frozenset nodes: A ``frozenset`` containing ``Node`` instances
describing the configuration of each cooperating node.
"""
@attributes(['internal_port', 'external_port'])
class Port(object):
"""
A record representing the mapping between a port exposed internally by an
application and the corresponding port exposed to the outside world.
:ivar int internal_port: The port number exposed by the application.
:ivar int external_port: The port number exposed to the outside world.
"""
@attributes(["volume", "hostname"])
class VolumeHandoff(object):
"""
A record representing a volume handoff that needs to be performed from this
node.
See :cls:`flocker.volume.service.VolumeService.handoff`` for more details.
:ivar AttachedVolume volume: The volume to hand off.
:ivar bytes hostname: The hostname of the node to which the volume is
meant to be handed off.
"""
@attributes(["going", "coming", "creating"])
class VolumeChanges(object):
"""
``VolumeChanges`` describes the volume-related changes necessary to change
the current state to the desired state.
:ivar frozenset going: The ``VolumeHandoff``\ s necessary to let other
nodes take over hosting of any volume-having applications being moved
away from a node. These must be handed off.
:ivar frozenset coming: The ``AttachedVolume``\ s necessary to let this
node take over hosting of any volume-having applications being moved to
this node. These must be acquired.
:ivar frozenset creating: The ``AttachedVolume``\ s necessary to let this
node create any new volume-having applications meant to be hosted on
this node. These must be created.
"""
| {
"content_hash": "9c98c0fa6c31d6a1e1d9f269cc6d1188",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 79,
"avg_line_length": 35.0609756097561,
"alnum_prop": 0.664,
"repo_name": "beni55/flocker",
"id": "0761745869928fa690ddbb3af6643562000ec034",
"size": "5867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flocker/node/_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "540895"
},
{
"name": "Ruby",
"bytes": "797"
},
{
"name": "Shell",
"bytes": "3744"
}
],
"symlink_target": ""
} |
import threading
import Queue
import time
import logging
import sys
sys.path.insert(0, "../lib/Adafruit_MotorHAT_mod")
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor, Adafruit_StepperMotor
import Adafruit_PWM_Servo_Driver
def main():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="(%(name)s %(threadName)-9s) %(message)s")
class StepperController:
def __init__(self, num, logger=None):
self._number = num
self._ppr = 96 # pulses per revolution for stepper motor
self.mh = Adafruit_MotorHAT() # create a default object, no changes to I2C address or frequency
self.myStepper = self.mh.getStepper(self._ppr, self._number) #96 steps per revolution. Stepper port num.
self.mypwm = Adafruit_PWM_Servo_Driver.PWM(0x60, debug=False) #I2C address 0x60
self.logger = logger or logging.getLogger(__name__)
self.speed_pps = 20 #Speed in pulses per second
self.steps = 0
self.step_type = 0 #SINGLE = 1 DOUBLE = 2 INTERLEAVE = 3 MICROSTEP = 4
self.motorq = Queue.Queue(5)
self.motor_pos = 0
self.motor_pos_new = 0
self.motor_speed = 0
self.motor_half_step = True
self.motor_running = False
self.inv_dir = True
#Constants from motorhat library
num -= 1
self.FORWARD = 1
self.BACKWARD = 2
self.BRAKE = 3
self.RELEASE = 4
if (num == 0):
self.PWMA = 8
self.AIN2 = 9
self.AIN1 = 10
self.PWMB = 13
self.BIN2 = 12
self.BIN1 = 11
elif (num == 1):
self.PWMA = 2
self.AIN2 = 3
self.AIN1 = 4
self.PWMB = 7
self.BIN2 = 6
self.BIN1 = 5
else:
self.logger.error("MotorHAT Stepper must be between 1 and 2 inclusive")
def write_motorq(self, command_tuple): #Tuple: command, speed, direction, motor.handle
self.motorq.put(command_tuple)
self.logger.debug("Putting " + str(command_tuple) + " : " + str(self.motorq.qsize()) + " items in queue")
# Disable motors/ no force on poles!
def turn_off_all(self ):
self.mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def motor_set_speed(self, speed):
self.motor_speed = speed
self.speed_pps = float(speed)/60*self._ppr
self.logger.info("Speed: {} RPM".format(speed))
if self.speed_pps > 95 and self.speed_pps < 193:
self.mypwm.setPWMFreq( self.speed_pps/4) # Speed is 4 steps per frequency puls
elif self.speed_pps < 96:
self.mypwm.setPWMFreq(1600)
self.myStepper.setSpeed(speed)
else:
self.logger.error("Speed {} rpm is to high or otherwise wrong".format(speed))
return
def motor_set_step_type(self, half_step):
self.motor_half_step = half_step
if half_step:
self.step_type = 3
else:
self.step_type = 2
return
def start_motor (self, position=None):
if self.step_type != 2 and self.step_type != 3:
self.motor_set_step_type(self.motor_half_step)
if not position == None:
self.motor_pos_new = position
direction = self.FORWARD if self.motor_pos_new > self.motor_pos else self.BACKWARD #check direction (forward is True)
if self.speed_pps > 95 and self.speed_pps < 193: #High speed stepping
self.motorq.put(["fast", position, direction])
self.logger.debug("Putting " + str(["fast", position, direction]) + " : " + str(self.motorq.qsize()) + " items in queue")
elif self.speed_pps < 96:
self.motorq.put(["slow", position, direction])
self.logger.debug("speed_pps: {}".format(self.speed_pps))
self.logger.debug("Putting " + str(["slow", position, direction]) + " : " + str(self.motorq.qsize()) + " items in queue")
else:
self.logger.error("Speed {} rpm is to high or otherwise wrong".format(self.motor_speed))
return
def release_motor (self):
self.mh.setPin(self.AIN2, 0)
self.mh.setPin(self.BIN1, 0)
self.mh.setPin(self.AIN1, 0)
self.mh.setPin(self.BIN2, 0)
'''if self._number == 2:
self.mh.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
else :
self.mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)'''
def stop_motor (self):
self.motorq.put(["stop", 0])
self.myStepper.stop_stepper = True
return
def async_stop(self):
self.myStepper.stop_stepper = True
self.motorq.put(["exit", 0])
return
def calc_pos(self, direction, start_time, run_type, step_count):
if run_type == "slow":
#if step_count != None:
if self.motor_pos_new > self.motor_pos:
self.motor_pos = self.motor_pos + step_count
else:
self.motor_pos = self.motor_pos - step_count
elif run_type == "fast":
if self.motor_pos_new > self.motor_pos:
self.motor_pos =self.motor_pos + int((time.time() - start_time)*self.speed_pps * (2 if self.motor_half_step else 1))
else:
self.motor_pos =self.motor_pos - int((time.time() - start_time)*self.speed_pps * (2 if self.motor_half_step else 1))
def async_motor (self):
steps = 0
start_time = 0
run_time = 0
step_count = 0
running_type = "fast"
while True:
if not self.motorq.empty():
#Get items from Queue
item = self.motorq.get()
command = item[0]
value = item[1]
if len(item) == 3:
motor_dir = item[2]
elif len(item) > 3:
motor_dir = item[2]
motor_handle = item[3]
self.logger.info("Getting " + str(item) + " : " + str(self.motorq.qsize()) + " items in queue")
#direction = self.motor_pos_new > self.motor_pos #True = going out.
steps = int(abs(self.motor_pos_new - self.motor_pos)) #Add steps if going out
if command == "exit":
self.turn_off_all()
break
elif command == "fast":
running_type = command
try:
if self.motor_half_step:
#t= steps/pulses per second
run_time = (float(abs(steps))/float(self.speed_pps))/2
else:
run_time = (float(abs(steps))/float(self.speed_pps))
if run_time > 0.1:
if self.step_type == 2:
if (motor_dir != self.inv_dir):
self.mypwm.setPWM(self.AIN2, 0, 2048)
self.mypwm.setPWM(self.BIN1, 1024, 3072)
self.mypwm.setPWM(self.AIN1, 2048, 4095)
self.mypwm.setPWM(self.BIN2, 3072, 512)
else:
self.mypwm.setPWM(self.AIN2, 3072, 512)
self.mypwm.setPWM(self.BIN1, 2048, 4095)
self.mypwm.setPWM(self.AIN1, 1024, 3072)
self.mypwm.setPWM(self.BIN2, 0, 2048)
elif self.step_type == 3:
if (motor_dir != self.inv_dir):
self.mypwm.setPWM(self.AIN2, 0, 1536)
self.mypwm.setPWM(self.BIN1, 1024, 2560)
self.mypwm.setPWM(self.AIN1, 2048, 3584)
self.mypwm.setPWM(self.BIN2, 3072, 512)
else:
self.mypwm.setPWM(self.AIN2, 3072, 512)
self.mypwm.setPWM(self.BIN1, 2048, 3584)
self.mypwm.setPWM(self.AIN1, 1024, 2560)
self.mypwm.setPWM(self.BIN2, 0, 1536)
else:
self.logger.error("Other step modes currently not supported")
start_time = time.time()
self.mypwm.setPWM(self.PWMA, 0, 4095)
self.mypwm.setPWM(self.PWMB, 0, 4095)
self.motor_running = True
self.logger.info("Going to position {}".format(self.motor_pos_new))
else:
pass
except:
self.logger.error("Start fast fault")
elif command == "slow":
running_type = command
try:
if self.step_type == 3:
#t= steps/pulses per second
run_time = (float(abs(steps))/float(self.speed_pps))/2
else:
run_time = (float(abs(steps))/float(self.speed_pps))
start_time = time.time()
self.motor_running = True
self.logger.info("Going to position {}".format(self.motor_pos_new))
step_count = self.myStepper.step(steps, (motor_dir != self.inv_dir), self.step_type)
self.release_motor()
except:
self.logger.exception("Start slow fault")
elif command == "DCMotor":
motor_handle.setSpeed(value)
motor_handle.run(motor_dir)
elif command == "stop" and self.motor_running:
self.release_motor()
self.motor_running = False
self.calc_pos(motor_dir, start_time, running_type, step_count)
run_time = 0
self.logger.info("Reached position {}, {}".format(self.motor_pos, motor_dir))
else:
pass
# Catch the time to stop the execution of fast moving motor.
if self.motor_running and time.time() - start_time >= run_time:
stop_time = time.time()
self.logger.debug("Running time: {}, expected time: {}".format(stop_time - start_time, run_time))
run_time = 0
self.motor_running = False
if running_type == "fast":
self.mypwm.setPWM(self.PWMA, 0, 0)
self.mypwm.setPWM(self.PWMB, 0, 0)
self.calc_pos(motor_dir, start_time, running_type, step_count)
elif running_type == "slow":
self.calc_pos(motor_dir, start_time, running_type, step_count)
self.logger.info("Reached position: {}, {}".format(self.motor_pos, motor_dir))
time.sleep(0.05)
return
class DCMotorController:
def __init__(self, num):
self._number = num
self.mh = Adafruit_MotorHAT() #Open motor hat instance
self.motor = self.mh.getMotor(self._number)
def stop_motor (self):
self.motor.run(MtrHat.Adafruit_MotorHAT.RELEASE)
def start_motor (self, contr_handle, duty_cycle, motor_dir):
self.motor.setSpeed(duty_cycle)
self.motor.run(motor_dir)
logging.info("Heating at: {}% Duty Cycle".format(duty_cycle))
#local_motor = self.motor
#contr_handle.write_motorq(["DCMotor", duty_cycle, motor_dir, self.motor])
if __name__ == '__main__':
main()
controller = StepperController(2)
motor_thread = threading.Thread(target=controller.async_motor)
#read_thread.setDaemon(True)
motor_thread.start()
#controller.motorq.put(["exit", 0])
time.sleep(2)
while True:
try:
controller.motor_set_speed(25)
controller.motor_set_step_type(True)
time.sleep(2)
controller.start_motor(400)
time.sleep(10)
except KeyboardInterrupt:
controller.async_stop()
controller.turn_off_all()
break
| {
"content_hash": "6982c54d35ce4a78d8d33a31c17c4d92",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 133,
"avg_line_length": 42.35197368421053,
"alnum_prop": 0.5087378640776699,
"repo_name": "Zimcoding/Raspy-Telescope",
"id": "7d0e65f1b30db3b9c862fe9a453364d5f25fc8df",
"size": "12894",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Raspy-Telescope/motor_controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "72753"
}
],
"symlink_target": ""
} |
import StringIO, os, sys
from zope.interface import implements
from zope.interface.verify import verifyObject
from twisted.trial.itrial import IReporter, ITestCase
from twisted.trial import unittest, runner, reporter, util
from twisted.python import failure, log, reflect, filepath
from twisted.scripts import trial
from twisted.plugins import twisted_trial
from twisted import plugin
from twisted.internet import defer
pyunit = __import__('unittest')
class CapturingDebugger(object):
def __init__(self):
self._calls = []
def runcall(self, *args, **kwargs):
self._calls.append('runcall')
args[0](*args[1:], **kwargs)
class CapturingReporter(object):
"""
Reporter that keeps a log of all actions performed on it.
"""
implements(IReporter)
stream = None
tbformat = None
args = None
separator = None
testsRun = None
def __init__(self, stream=None, tbformat=None, rterrors=None,
publisher=None):
"""
Create a capturing reporter.
"""
self._calls = []
self.shouldStop = False
self._stream = stream
self._tbformat = tbformat
self._rterrors = rterrors
self._publisher = publisher
def startTest(self, method):
"""
Report the beginning of a run of a single test method
@param method: an object that is adaptable to ITestMethod
"""
self._calls.append('startTest')
def stopTest(self, method):
"""
Report the status of a single test method
@param method: an object that is adaptable to ITestMethod
"""
self._calls.append('stopTest')
def cleanupErrors(self, errs):
"""called when the reactor has been left in a 'dirty' state
@param errs: a list of L{twisted.python.failure.Failure}s
"""
self._calls.append('cleanupError')
def addSuccess(self, test):
self._calls.append('addSuccess')
def done(self):
"""
Do nothing. These tests don't care about done.
"""
class TrialRunnerTestsMixin:
"""
Mixin defining tests for L{runner.TrialRunner}.
"""
def tearDown(self):
self.runner._tearDownLogFile()
def test_empty(self):
"""
Empty test method, used by the other tests.
"""
def _getObservers(self):
return log.theLogPublisher.observers
def test_addObservers(self):
"""
Any log system observers L{TrialRunner.run} adds are removed by the
time it returns.
"""
originalCount = len(self._getObservers())
self.runner.run(self.test)
newCount = len(self._getObservers())
self.assertEqual(newCount, originalCount)
def test_logFileAlwaysActive(self):
"""
Test that a new file is opened on each run.
"""
oldSetUpLogFile = self.runner._setUpLogFile
l = []
def setUpLogFile():
oldSetUpLogFile()
l.append(self.runner._logFileObserver)
self.runner._setUpLogFile = setUpLogFile
self.runner.run(self.test)
self.runner.run(self.test)
self.failUnlessEqual(len(l), 2)
self.failIf(l[0] is l[1], "Should have created a new file observer")
def test_logFileGetsClosed(self):
"""
Test that file created is closed during the run.
"""
oldSetUpLogFile = self.runner._setUpLogFile
l = []
def setUpLogFile():
oldSetUpLogFile()
l.append(self.runner._logFileObject)
self.runner._setUpLogFile = setUpLogFile
self.runner.run(self.test)
self.failUnlessEqual(len(l), 1)
self.failUnless(l[0].closed)
class TestTrialRunner(TrialRunnerTestsMixin, unittest.TestCase):
"""
Tests for L{runner.TrialRunner} with the feature to turn unclean errors
into warnings disabled.
"""
def setUp(self):
self.stream = StringIO.StringIO()
self.runner = runner.TrialRunner(CapturingReporter, stream=self.stream)
self.test = TestTrialRunner('test_empty')
def test_publisher(self):
"""
The reporter constructed by L{runner.TrialRunner} is passed
L{twisted.python.log} as the value for the C{publisher} parameter.
"""
result = self.runner._makeResult()
self.assertIdentical(result._publisher, log)
class TrialRunnerWithUncleanWarningsReporter(TrialRunnerTestsMixin,
unittest.TestCase):
"""
Tests for the TrialRunner's interaction with an unclean-error suppressing
reporter.
"""
def setUp(self):
self.stream = StringIO.StringIO()
self.runner = runner.TrialRunner(CapturingReporter, stream=self.stream,
uncleanWarnings=True)
self.test = TestTrialRunner('test_empty')
class DryRunMixin(object):
suppress = [util.suppress(
category=DeprecationWarning,
message="Test visitors deprecated in Twisted 8.0")]
def setUp(self):
self.log = []
self.stream = StringIO.StringIO()
self.runner = runner.TrialRunner(CapturingReporter,
runner.TrialRunner.DRY_RUN,
stream=self.stream)
self.makeTestFixtures()
def makeTestFixtures(self):
"""
Set C{self.test} and C{self.suite}, where C{self.suite} is an empty
TestSuite.
"""
def test_empty(self):
"""
If there are no tests, the reporter should not receive any events to
report.
"""
result = self.runner.run(runner.TestSuite())
self.assertEqual(result._calls, [])
def test_singleCaseReporting(self):
"""
If we are running a single test, check the reporter starts, passes and
then stops the test during a dry run.
"""
result = self.runner.run(self.test)
self.assertEqual(result._calls, ['startTest', 'addSuccess', 'stopTest'])
def test_testsNotRun(self):
"""
When we are doing a dry run, the tests should not actually be run.
"""
self.runner.run(self.test)
self.assertEqual(self.log, [])
class DryRunTest(DryRunMixin, unittest.TestCase):
"""
Check that 'dry run' mode works well with Trial tests.
"""
def makeTestFixtures(self):
class MockTest(unittest.TestCase):
def test_foo(test):
self.log.append('test_foo')
self.test = MockTest('test_foo')
self.suite = runner.TestSuite()
class PyUnitDryRunTest(DryRunMixin, unittest.TestCase):
"""
Check that 'dry run' mode works well with stdlib unittest tests.
"""
def makeTestFixtures(self):
class PyunitCase(pyunit.TestCase):
def test_foo(self):
pass
self.test = PyunitCase('test_foo')
self.suite = pyunit.TestSuite()
class TestRunner(unittest.TestCase):
def setUp(self):
self.config = trial.Options()
# whitebox hack a reporter in, because plugins are CACHED and will
# only reload if the FILE gets changed.
parts = reflect.qual(CapturingReporter).split('.')
package = '.'.join(parts[:-1])
klass = parts[-1]
plugins = [twisted_trial._Reporter(
"Test Helper Reporter",
package,
description="Utility for unit testing.",
longOpt="capturing",
shortOpt=None,
klass=klass)]
# XXX There should really be a general way to hook the plugin system
# for tests.
def getPlugins(iface, *a, **kw):
self.assertEqual(iface, IReporter)
return plugins + list(self.original(iface, *a, **kw))
self.original = plugin.getPlugins
plugin.getPlugins = getPlugins
self.standardReport = ['startTest', 'addSuccess', 'stopTest',
'startTest', 'addSuccess', 'stopTest',
'startTest', 'addSuccess', 'stopTest',
'startTest', 'addSuccess', 'stopTest',
'startTest', 'addSuccess', 'stopTest',
'startTest', 'addSuccess', 'stopTest',
'startTest', 'addSuccess', 'stopTest']
def tearDown(self):
plugin.getPlugins = self.original
def parseOptions(self, args):
self.config.parseOptions(args)
def getRunner(self):
r = trial._makeRunner(self.config)
r.stream = StringIO.StringIO()
# XXX The runner should always take care of cleaning this up itself.
# It's not clear why this is necessary. The runner always tears down
# its log file.
self.addCleanup(r._tearDownLogFile)
# XXX The runner should always take care of cleaning this up itself as
# well. It's necessary because TrialRunner._setUpTestdir might raise
# an exception preventing Reporter.done from being run, leaving the
# observer added by Reporter.__init__ still present in the system.
# Something better needs to happen inside
# TrialRunner._runWithoutDecoration to remove the need for this cludge.
r._log = log.LogPublisher()
return r
def test_runner_can_get_reporter(self):
self.parseOptions([])
result = self.config['reporter']
runner = self.getRunner()
self.assertEqual(result, runner._makeResult().__class__)
def test_runner_get_result(self):
self.parseOptions([])
runner = self.getRunner()
result = runner._makeResult()
self.assertEqual(result.__class__, self.config['reporter'])
def test_uncleanWarningsOffByDefault(self):
"""
By default Trial sets the 'uncleanWarnings' option on the runner to
False. This means that dirty reactor errors will be reported as
errors. See L{test_reporter.TestDirtyReactor}.
"""
self.parseOptions([])
runner = self.getRunner()
self.assertNotIsInstance(runner._makeResult(),
reporter.UncleanWarningsReporterWrapper)
def test_getsUncleanWarnings(self):
"""
Specifying '--unclean-warnings' on the trial command line will cause
reporters to be wrapped in a device which converts unclean errors to
warnings. See L{test_reporter.TestDirtyReactor} for implications.
"""
self.parseOptions(['--unclean-warnings'])
runner = self.getRunner()
self.assertIsInstance(runner._makeResult(),
reporter.UncleanWarningsReporterWrapper)
def test_runner_working_directory(self):
self.parseOptions(['--temp-directory', 'some_path'])
runner = self.getRunner()
self.assertEquals(runner.workingDirectory, 'some_path')
def test_concurrentImplicitWorkingDirectory(self):
"""
If no working directory is explicitly specified and the default
working directory is in use by another runner, L{TrialRunner.run}
selects a different default working directory to use.
"""
self.parseOptions([])
initialDirectory = os.getcwd()
self.addCleanup(os.chdir, initialDirectory)
firstRunner = self.getRunner()
secondRunner = self.getRunner()
where = {}
class ConcurrentCase(unittest.TestCase):
def test_first(self):
"""
Start a second test run which will have a default working
directory which is the same as the working directory of the
test run already in progress.
"""
# Change the working directory to the value it had before this
# test suite was started.
where['concurrent'] = subsequentDirectory = os.getcwd()
os.chdir(initialDirectory)
self.addCleanup(os.chdir, subsequentDirectory)
secondRunner.run(ConcurrentCase('test_second'))
def test_second(self):
"""
Record the working directory for later analysis.
"""
where['record'] = os.getcwd()
result = firstRunner.run(ConcurrentCase('test_first'))
bad = result.errors + result.failures
if bad:
self.fail(bad[0][1])
self.assertEqual(
where, {
'concurrent': os.path.join(initialDirectory, '_trial_temp'),
'record': os.path.join(initialDirectory, '_trial_temp-1')})
def test_concurrentExplicitWorkingDirectory(self):
"""
If a working directory which is already in use is explicitly specified,
L{TrialRunner.run} raises L{_WorkingDirectoryBusy}.
"""
self.parseOptions(['--temp-directory', os.path.abspath(self.mktemp())])
initialDirectory = os.getcwd()
self.addCleanup(os.chdir, initialDirectory)
firstRunner = self.getRunner()
secondRunner = self.getRunner()
class ConcurrentCase(unittest.TestCase):
def test_concurrent(self):
"""
Try to start another runner in the same working directory and
assert that it raises L{_WorkingDirectoryBusy}.
"""
self.assertRaises(
runner._WorkingDirectoryBusy,
secondRunner.run, ConcurrentCase('test_failure'))
def test_failure(self):
"""
Should not be called, always fails.
"""
self.fail("test_failure should never be called.")
result = firstRunner.run(ConcurrentCase('test_concurrent'))
bad = result.errors + result.failures
if bad:
self.fail(bad[0][1])
def test_runner_normal(self):
self.parseOptions(['--temp-directory', self.mktemp(),
'--reporter', 'capturing',
'twisted.trial.test.sample'])
my_runner = self.getRunner()
loader = runner.TestLoader()
suite = loader.loadByName('twisted.trial.test.sample', True)
result = my_runner.run(suite)
self.assertEqual(self.standardReport, result._calls)
def test_runner_debug(self):
self.parseOptions(['--reporter', 'capturing',
'--debug', 'twisted.trial.test.sample'])
my_runner = self.getRunner()
debugger = CapturingDebugger()
def get_debugger():
return debugger
my_runner._getDebugger = get_debugger
loader = runner.TestLoader()
suite = loader.loadByName('twisted.trial.test.sample', True)
result = my_runner.run(suite)
self.assertEqual(self.standardReport, result._calls)
self.assertEqual(['runcall'], debugger._calls)
def test_removeSafelyNoTrialMarker(self):
"""
If a path doesn't contain a node named C{"_trial_marker"}, that path is
not removed by L{runner._removeSafely} and a L{runner._NoTrialMarker}
exception is raised instead.
"""
directory = self.mktemp()
os.mkdir(directory)
dirPath = filepath.FilePath(directory)
self.parseOptions([])
myRunner = self.getRunner()
self.assertRaises(runner._NoTrialMarker,
myRunner._removeSafely, dirPath)
def test_removeSafelyRemoveFailsMoveSucceeds(self):
"""
If an L{OSError} is raised while removing a path in
L{runner._removeSafely}, an attempt is made to move the path to a new
name.
"""
def dummyRemove():
"""
Raise an C{OSError} to emulate the branch of L{runner._removeSafely}
in which path removal fails.
"""
raise OSError()
# Patch stdout so we can check the print statements in _removeSafely
out = StringIO.StringIO()
stdout = self.patch(sys, 'stdout', out)
# Set up a trial directory with a _trial_marker
directory = self.mktemp()
os.mkdir(directory)
dirPath = filepath.FilePath(directory)
dirPath.child('_trial_marker').touch()
# Ensure that path.remove() raises an OSError
dirPath.remove = dummyRemove
self.parseOptions([])
myRunner = self.getRunner()
myRunner._removeSafely(dirPath)
self.assertIn("could not remove FilePath", out.getvalue())
def test_removeSafelyRemoveFailsMoveFails(self):
"""
If an L{OSError} is raised while removing a path in
L{runner._removeSafely}, an attempt is made to move the path to a new
name. If that attempt fails, the L{OSError} is re-raised.
"""
def dummyRemove():
"""
Raise an C{OSError} to emulate the branch of L{runner._removeSafely}
in which path removal fails.
"""
raise OSError("path removal failed")
def dummyMoveTo(path):
"""
Raise an C{OSError} to emulate the branch of L{runner._removeSafely}
in which path movement fails.
"""
raise OSError("path movement failed")
# Patch stdout so we can check the print statements in _removeSafely
out = StringIO.StringIO()
stdout = self.patch(sys, 'stdout', out)
# Set up a trial directory with a _trial_marker
directory = self.mktemp()
os.mkdir(directory)
dirPath = filepath.FilePath(directory)
dirPath.child('_trial_marker').touch()
# Ensure that path.remove() and path.moveTo() both raise OSErrors
dirPath.remove = dummyRemove
dirPath.moveTo = dummyMoveTo
self.parseOptions([])
myRunner = self.getRunner()
error = self.assertRaises(OSError, myRunner._removeSafely, dirPath)
self.assertEquals(str(error), "path movement failed")
self.assertIn("could not remove FilePath", out.getvalue())
class TestTrialSuite(unittest.TestCase):
def test_imports(self):
# FIXME, HTF do you test the reactor can be cleaned up ?!!!
from twisted.trial.runner import TrialSuite
# silence pyflakes warning
silencePyflakes = TrialSuite
class TestUntilFailure(unittest.TestCase):
class FailAfter(unittest.TestCase):
"""
A test case that fails when run 3 times in a row.
"""
count = []
def test_foo(self):
self.count.append(None)
if len(self.count) == 3:
self.fail('Count reached 3')
def setUp(self):
TestUntilFailure.FailAfter.count = []
self.test = TestUntilFailure.FailAfter('test_foo')
self.stream = StringIO.StringIO()
self.runner = runner.TrialRunner(reporter.Reporter, stream=self.stream)
def test_runUntilFailure(self):
"""
Test that the runUntilFailure method of the runner actually fail after
a few runs.
"""
result = self.runner.runUntilFailure(self.test)
self.failUnlessEqual(result.testsRun, 1)
self.failIf(result.wasSuccessful())
self.assertEquals(self._getFailures(result), 1)
def _getFailures(self, result):
"""
Get the number of failures that were reported to a result.
"""
return len(result.failures)
def test_runUntilFailureDecorate(self):
"""
C{runUntilFailure} doesn't decorate the tests uselessly: it does it one
time when run starts, but not at each turn.
"""
decorated = []
def decorate(test, interface):
decorated.append((test, interface))
return test
self.patch(unittest, "decorate", decorate)
result = self.runner.runUntilFailure(self.test)
self.failUnlessEqual(result.testsRun, 1)
self.assertEquals(len(decorated), 1)
self.assertEquals(decorated, [(self.test, ITestCase)])
def test_runUntilFailureForceGCDecorate(self):
"""
C{runUntilFailure} applies the force-gc decoration after the standard
L{ITestCase} decoration, but only one time.
"""
decorated = []
def decorate(test, interface):
decorated.append((test, interface))
return test
self.patch(unittest, "decorate", decorate)
self.runner._forceGarbageCollection = True
result = self.runner.runUntilFailure(self.test)
self.failUnlessEqual(result.testsRun, 1)
self.assertEquals(len(decorated), 2)
self.assertEquals(decorated,
[(self.test, ITestCase),
(self.test, unittest._ForceGarbageCollectionDecorator)])
class UncleanUntilFailureTests(TestUntilFailure):
"""
Test that the run-until-failure feature works correctly with the unclean
error suppressor.
"""
def setUp(self):
TestUntilFailure.setUp(self)
self.runner = runner.TrialRunner(reporter.Reporter, stream=self.stream,
uncleanWarnings=True)
def _getFailures(self, result):
"""
Get the number of failures that were reported to a result that
is wrapped in an UncleanFailureWrapper.
"""
return len(result._originalReporter.failures)
class BreakingSuite(runner.TestSuite):
"""
A L{TestSuite} that logs an error when it is run.
"""
def run(self, result):
try:
raise RuntimeError("error that occurs outside of a test")
except RuntimeError, e:
log.err(failure.Failure())
class TestLoggedErrors(unittest.TestCase):
"""
It is possible for an error generated by a test to be logged I{outside} of
any test. The log observers constructed by L{TestCase} won't catch these
errors. Here we try to generate such errors and ensure they are reported to
a L{TestResult} object.
"""
def tearDown(self):
self.flushLoggedErrors(RuntimeError)
def test_construct(self):
"""
Check that we can construct a L{runner.LoggedSuite} and that it
starts empty.
"""
suite = runner.LoggedSuite()
self.assertEqual(suite.countTestCases(), 0)
def test_capturesError(self):
"""
Chek that a L{LoggedSuite} reports any logged errors to its result.
"""
result = reporter.TestResult()
suite = runner.LoggedSuite([BreakingSuite()])
suite.run(result)
self.assertEqual(len(result.errors), 1)
self.assertEqual(result.errors[0][0].id(), runner.NOT_IN_TEST)
self.failUnless(result.errors[0][1].check(RuntimeError))
class TestTestHolder(unittest.TestCase):
def setUp(self):
self.description = "description"
self.holder = runner.TestHolder(self.description)
def test_holder(self):
"""
Check that L{runner.TestHolder} takes a description as a parameter
and that this description is returned by the C{id} and
C{shortDescription} methods.
"""
self.assertEqual(self.holder.id(), self.description)
self.assertEqual(self.holder.shortDescription(), self.description)
def test_holderImplementsITestCase(self):
"""
L{runner.TestHolder} implements L{ITestCase}.
"""
self.assertIdentical(self.holder, ITestCase(self.holder))
self.assertTrue(
verifyObject(ITestCase, self.holder),
"%r claims to provide %r but does not do so correctly."
% (self.holder, ITestCase))
def test_runsWithStandardResult(self):
"""
A L{runner.TestHolder} can run against the standard Python
C{TestResult}.
"""
result = pyunit.TestResult()
self.holder.run(result)
self.assertTrue(result.wasSuccessful())
self.assertEquals(1, result.testsRun)
class TestErrorHolder(TestTestHolder):
"""
Test L{runner.ErrorHolder} shares behaviour with L{runner.TestHolder}.
"""
def setUp(self):
self.description = "description"
# make a real Failure so we can construct ErrorHolder()
try:
1/0
except ZeroDivisionError:
error = failure.Failure()
self.holder = runner.ErrorHolder(self.description, error)
def test_runsWithStandardResult(self):
"""
A L{runner.ErrorHolder} can run against the standard Python
C{TestResult}.
"""
result = pyunit.TestResult()
self.holder.run(result)
self.assertFalse(result.wasSuccessful())
self.assertEquals(1, result.testsRun)
class TestMalformedMethod(unittest.TestCase):
"""
Test that trial manages when test methods don't have correct signatures.
"""
class ContainMalformed(unittest.TestCase):
"""
This TestCase holds malformed test methods that trial should handle.
"""
def test_foo(self, blah):
pass
def test_bar():
pass
test_spam = defer.deferredGenerator(test_bar)
def _test(self, method):
"""
Wrapper for one of the test method of L{ContainMalformed}.
"""
stream = StringIO.StringIO()
trialRunner = runner.TrialRunner(reporter.Reporter, stream=stream)
test = TestMalformedMethod.ContainMalformed(method)
result = trialRunner.run(test)
self.failUnlessEqual(result.testsRun, 1)
self.failIf(result.wasSuccessful())
self.failUnlessEqual(len(result.errors), 1)
def test_extraArg(self):
"""
Test when the method has extra (useless) arguments.
"""
self._test('test_foo')
def test_noArg(self):
"""
Test when the method doesn't have even self as argument.
"""
self._test('test_bar')
def test_decorated(self):
"""
Test a decorated method also fails.
"""
self._test('test_spam')
class DestructiveTestSuiteTestCase(unittest.TestCase):
"""
Test for L{runner.DestructiveTestSuite}.
"""
def test_basic(self):
"""
Thes destructive test suite should run the tests normally.
"""
called = []
class MockTest(unittest.TestCase):
def test_foo(test):
called.append(True)
test = MockTest('test_foo')
result = reporter.TestResult()
suite = runner.DestructiveTestSuite([test])
self.assertEquals(called, [])
suite.run(result)
self.assertEquals(called, [True])
self.assertEquals(suite.countTestCases(), 0)
def test_shouldStop(self):
"""
Test the C{shouldStop} management: raising a C{KeyboardInterrupt} must
interrupt the suite.
"""
called = []
class MockTest(unittest.TestCase):
def test_foo1(test):
called.append(1)
def test_foo2(test):
raise KeyboardInterrupt()
def test_foo3(test):
called.append(2)
result = reporter.TestResult()
loader = runner.TestLoader()
loader.suiteFactory = runner.DestructiveTestSuite
suite = loader.loadClass(MockTest)
self.assertEquals(called, [])
suite.run(result)
self.assertEquals(called, [1])
# The last test shouldn't have been run
self.assertEquals(suite.countTestCases(), 1)
def test_cleanup(self):
"""
Checks that the test suite cleanups its tests during the run, so that
it ends empty.
"""
class MockTest(unittest.TestCase):
def test_foo(test):
pass
test = MockTest('test_foo')
result = reporter.TestResult()
suite = runner.DestructiveTestSuite([test])
self.assertEquals(suite.countTestCases(), 1)
suite.run(result)
self.assertEquals(suite.countTestCases(), 0)
class TestRunnerDeprecation(unittest.TestCase):
class FakeReporter(reporter.Reporter):
"""
Fake reporter that does *not* implement done() but *does* implement
printErrors, separator, printSummary, stream, write and writeln
without deprecations.
"""
done = None
separator = None
stream = None
def printErrors(self, *args):
pass
def printSummary(self, *args):
pass
def write(self, *args):
pass
def writeln(self, *args):
pass
def test_reporterDeprecations(self):
"""
The runner emits a warning if it is using a result that doesn't
implement 'done'.
"""
trialRunner = runner.TrialRunner(None)
result = self.FakeReporter()
trialRunner._makeResult = lambda: result
def f():
# We have to use a pyunit test, otherwise we'll get deprecation
# warnings about using iterate() in a test.
trialRunner.run(pyunit.TestCase('id'))
self.assertWarns(
DeprecationWarning,
"%s should implement done() but doesn't. Falling back to "
"printErrors() and friends." % reflect.qual(result.__class__),
__file__, f)
| {
"content_hash": "49fee0046286648a7b262c3b9965804f",
"timestamp": "",
"source": "github",
"line_count": 934,
"max_line_length": 80,
"avg_line_length": 31.61884368308351,
"alnum_prop": 0.6016185832317487,
"repo_name": "GetSomeBlocks/ServerStatus",
"id": "3f36e956063a55bb43636d6c8b1ac719b8091f13",
"size": "29672",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "resources/lib/twisted/twisted/trial/test/test_runner.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "930"
},
{
"name": "C",
"bytes": "293000"
},
{
"name": "C#",
"bytes": "9664"
},
{
"name": "CSS",
"bytes": "24716"
},
{
"name": "D",
"bytes": "542"
},
{
"name": "HTML",
"bytes": "374176"
},
{
"name": "Java",
"bytes": "206"
},
{
"name": "Objective-C",
"bytes": "9421"
},
{
"name": "Python",
"bytes": "8744725"
},
{
"name": "Ruby",
"bytes": "6773"
},
{
"name": "Shell",
"bytes": "13600"
}
],
"symlink_target": ""
} |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 7, transform = "Integration", sigma = 0.0, exog_count = 100, ar_order = 12); | {
"content_hash": "04c802d85ac7199aae1c8d3598fd54df",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 172,
"avg_line_length": 38.857142857142854,
"alnum_prop": 0.7132352941176471,
"repo_name": "antoinecarme/pyaf",
"id": "333c9d868d46976848a164e36fe0340cabcc06c7",
"size": "272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_Integration/trend_ConstantTrend/cycle_7/ar_12/test_artificial_128_Integration_ConstantTrend_7_12_100.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
import numpy as np
import csv
from config import FLAGS
from sklearn.cross_validation import train_test_split
class DataSet(object):
def __init__(self, examples, labels=None):
if labels is not None:
assert len(examples) == len(labels), (
'examples.shape: %s labels.shape: %s'
% (examples.shape, labels.shape))
self._num_examples = examples.shape[0]
self._examples = examples
self._labels = labels
self._epochs_completed = 0
self._index_in_epoch = 0
@property
def examples(self):
return self._examples
@property
def labels(self):
return self._labels
@property
def num_examples(self):
return self._num_examples
@property
def epochs_completed(self):
return self._epochs_completed
@property
def index_in_epoch(self):
return self._index_in_epoch
def next_batch(self, batch_size):
"""Return the next `batch_size` examples from this data set."""
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
perm = np.arange(self._num_examples)
np.random.shuffle(perm)
self._examples = self._examples[perm]
if self._labels is not None:
self._labels = self._labels[perm]
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
if self._labels is None:
return self._examples[start:end] #self._examples.iloc[start:end]
else:
# return self._examples.iloc[start:end], self._labels.iloc[start:end]
return self._examples[start:end], self._labels[start:end]
class DataSetPreTraining(object):
def __init__(self, examples):
self._num_examples = examples.shape[0]
self._examples = examples
self._examples[self._examples < FLAGS.zero_bound] = FLAGS.zero_bound
self._examples[self._examples > FLAGS.one_bound] = FLAGS.one_bound
self._epochs_completed = 0
self._index_in_epoch = 0
@property
def examples(self):
return self._examples
@property
def num_examples(self):
return self._num_examples
@property
def num_batches(self):
return self.num_examples / FLAGS.batch_size
@property
def epochs_completed(self):
return self._epochs_completed
@property
def index_in_epoch(self):
return self._index_in_epoch
# """ TODO: Under implementation """
# def same_batch(self):
# pass
def next_batch(self, batch_size):
"""Return the next `batch_size` examples from this data set."""
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
perm = np.arange(self._num_examples)
np.random.shuffle(perm)
self._images = self._examples[perm]
# Start next epoch
start = 0
self._index_in_epoch = batch_size
# print self._num_examples
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self._examples[start:end]
def load_data_sets(input_data, labels, split_only=True, valid_set=False):
class DataSets(object):
pass
data_sets = DataSets()
print("\nSplitting to Train & Test sets for Finetuning")
if valid_set:
train_examples, test_examples, train_labels, test_labels = \
train_test_split(input_data, labels, test_size=0.2)
train_examples, validation_examples, train_labels, validation_labels = \
train_test_split(train_examples, train_labels, test_size=0.05)
data_sets.validation = DataSet(validation_examples, validation_labels)
else:
train_examples, test_examples, train_labels, test_labels = \
train_test_split(input_data, labels, test_size=0.3)
data_sets.validation = None
# validation_examples = input_data[:VALIDATION_SIZE]
# train_examples = input_data[VALIDATION_SIZE:]
data_sets.train = DataSet(train_examples, train_labels)
data_sets.test = DataSet(test_examples, test_labels)
if not split_only:
data_sets.all = DataSet(input_data, labels)
return data_sets
def load_data_sets_pretraining(input_data, split_only=True, valid_set=False):
""" Load data-sets for pre-training
Data-sets for pre-training does not include labels. It takes
an input data-set and it splits it in train, test and validation
(optional) sets. Then it returns these subsets as DataSetPreTraining
objects which have the ability to give the data in batches (among
other useful functions). If split_only argument is False then it
also returns the whole input data-set as a DataSetPreTraining object.
Args:
input_data: The data-set to be split.
split_only: If True it just splits the data-set and returns its
subsets as DataSetPreTraining objects, otherwise it
also returns the data-set as DataSetPreTraining object.
valid_set: Whether to create a validation set along with test
and train or not (default False)
"""
class DataSets(object):
pass
data_sets = DataSets()
print("\nSplitting to Train & Test sets for pre-training")
if valid_set:
train_examples, test_examples = train_test_split(input_data, test_size=0.20)
train_examples, validation_examples = train_test_split(train_examples, test_size=0.05)
data_sets.validation = DataSetPreTraining(validation_examples)
else:
train_examples, test_examples = train_test_split(input_data, test_size=0.3)
data_sets.validation = None
if not split_only:
data_sets.all = DataSetPreTraining(input_data)
data_sets.train = DataSetPreTraining(train_examples)
data_sets.test = DataSetPreTraining(test_examples)
return data_sets
'''
""" TODO: ADD more noise functions such as Gaussian noise etc. """
def _add_noise(x, ratio, n_type='MN'):
""" Noise adding (or input corruption)
This function adds noise to the given dataset.
Args:
x : The input dataset for the noise to be applied (numpy array)
ratio: The percentage of the data affected by the noise addition
n_type: The type of noise to be applied.
Choices: MN (masking noise), SP (salt-and-pepper noise)
"""
'''
def fill_feed_dict_dae(data_set, input_pl, batch_size=None):
b_size = FLAGS.batch_size if batch_size is None else batch_size
input_feed = data_set.next_batch(b_size)
feed_dict = { input_pl: input_feed }
return feed_dict
def fill_feed_dict(data_set, input_pl, labels_pl, batch_size=None):
"""Fills the feed_dict for training the given step.
A feed_dict takes the form of:
feed_dict = {
<placeholder>: <tensor of values to be passed for placeholder>,
....
}
Args:
data_set: The set of images and labels, from input_data.read_data_sets()
images_pl: The examples placeholder, from placeholder_inputs().
labels_pl: The labels placeholder, from placeholder_inputs().
Returns:
feed_dict: The feed dictionary mapping from placeholders to values.
"""
# Create the feed_dict for the placeholders filled with the next
# `batch size ` examples.
b_size = FLAGS.batch_size if batch_size is None else batch_size
examples_feed, labels_feed = data_set.next_batch(b_size)
feed_dict = {
input_pl: examples_feed,
labels_pl: labels_feed
}
return feed_dict
def normalize_data(x, transpose=False):
# Normalization across the whole matrix
# x_max = np.max(x)
# x_min = np.min(x)
# x_norm = (x - x_min) / np.float32(x_max - x_min)
# Normalization across the features
x_norm = []
if transpose:
x = np.transpose(x)
print("\nData Transposed.")
print "\nNormalizing", len(x), "Features..."
for i in range(len(x)):
x_norm.append((x[i] - np.min(x[i])) / np.float32(np.max(x[i]) - np.min(x[i])))
if np.isnan(x_norm[i]).any():
print("NAN at:", i)
""" OR (norm='l1' or 'l2' or 'max')
from sklearn.preprocessing import normalize
x_norm = normalize(input_data, axis=??, norm='??')
"""
print("Normalization: Done. Transposing...")
return np.asarray(np.transpose(x_norm))
def label_metadata(label_matrix, label_col):
# Check whether the column value is given as index (number) or name (string)
try:
label_col = int(label_col)
# If given as number, take the name of the column out of it
label_col = label_matrix.columns[label_col]
except ValueError:
pass
import pandas as pd
# Get the unique classes in the given column, and how many of them are there
unique_classes = pd.unique(label_matrix[label_col].ravel())
#num_classes = unique_classes.shape[0]
# Map the unique n classes with a number from 0 to n
label_map = pd.DataFrame({label_col: unique_classes, label_col+'_id':range(len(unique_classes))})
# Replace the given column's values with the mapped equivalent
mapped_labels = label_matrix.replace(label_map[[0]].values.tolist(), label_map[[1]].values.tolist())
# Return the mapped labels as numpy list and the label map (unique classes and number can be obtained from map)
return np.reshape(mapped_labels[[label_col]].values, (mapped_labels.shape[0],)), np.asarray(label_map) #, unique_classes, num_classes
def write_csv(filename, data, sep='\t'):
with open(filename, 'w') as fp:
a = csv.writer(fp, delimiter='\t')
a.writerows(data)
| {
"content_hash": "2ffc1cafc0caeb9b78a6133f0693e750",
"timestamp": "",
"source": "github",
"line_count": 313,
"max_line_length": 137,
"avg_line_length": 33.36741214057508,
"alnum_prop": 0.6115472998851015,
"repo_name": "glrs/StackedDAE",
"id": "bb1db8863c5582f45e866b7731ebb360ad5ea4ee",
"size": "10444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Train_SDAE/tools/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "103363"
},
{
"name": "R",
"bytes": "8944"
}
],
"symlink_target": ""
} |
import socket
import re
import sys
from multiprocessing import Process
# 设置静态文件根目录
HTML_ROOT_DIR = "./html"
# 设置脚本文件根目录
WSGI_PYTHON_DIR = "./wsgipython"
class HTTPServer(object):
def __init__(self):
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def start(self):
self.server_socket.listen(128)
while True:
client_socket, client_address = self.server_socket.accept()
# print("[%s, %s]用户连接上了" % (client_address[0],client_address[1]))
print("[%s, %s]用户连接上了" % client_address)
handle_client_process = Process(target=self.handle_client, args=(client_socket,))
handle_client_process.start()
client_socket.close()
def start_response(self, status, headers):
"""
status = "200 OK"
headers = [
("Content-Type", "text/plain")
]
"""
response_headers = "HTTP/1.1 " + status + "\r\n"
for header in headers:
response_headers += "%s: %s\r\n" % header
self.response_headers = response_headers
def handle_client(self, client_socket):
"""处理客户端请求"""
# 获取客户端请求数据
request_data = client_socket.recv(1024)
print("request data:", request_data)
request_lines = request_data.splitlines()
for line in request_lines:
print(line)
# 解析请求报文
# 'GET / HTTP/1.1'
request_start_line = request_lines[0]
# 提取用户请求的文件名
print("*" * 10)
print(request_start_line.decode("utf-8"))
file_name = re.match(r"\w+ +(/[^ ]*) ", request_start_line.decode("utf-8")).group(1)
method = re.match(r"(\w+) +/[^ ]* ", request_start_line.decode("utf-8")).group(1)
# "/ctime.py"
# "/sayhello.py"
if file_name.endswith(".py"):
try:
m = __import__(file_name[1:-3])
except Exception:
self.response_headers = "HTTP/1.1 404 Not Found\r\n"
response_body = "not found"
else:
env = {
"PATH_INFO": file_name,
"METHOD": method
}
response_body = m.application(env, self.start_response)
response = self.response_headers + "\r\n" + response_body
else:
if "/" == file_name:
file_name = "/index.html"
# 打开文件,读取内容
try:
file = open(HTML_ROOT_DIR + file_name, "rb")
except IOError:
response_start_line = "HTTP/1.1 404 Not Found\r\n"
response_headers = "Server:Python server\r\n"
response_body = "The file is not found!"
else:
file_data = file.read()
file.close()
# 构造响应数据
response_start_line = "HTTP/1.1 200 OK\r\n"
response_headers = "Server:Python server\r\n"
response_body = file_data.decode("utf-8")
response = response_start_line + response_headers + "\r\n" + response_body
print("response data:", response)
# 向客户端返回响应数据
client_socket.send(bytes(response, "utf-8"))
# 关闭客户端连接
client_socket.close()
def bind(self, port):
self.server_socket.bind(("", port))
def main():
# 放入到第一个位置,覆盖标准库文件
sys.path.insert(1, WSGI_PYTHON_DIR)
http_server = HTTPServer()
# http_server.set_port
http_server.bind(8000)
http_server.start()
if __name__ == "__main__":
main()
| {
"content_hash": "d2e99ea00095b7e1c7bd053578b38321",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 93,
"avg_line_length": 31.194915254237287,
"alnum_prop": 0.5281173594132029,
"repo_name": "Ztiany/CodeRepository",
"id": "61e2fd64ec2615160df1a94d2ae1342ee639e336",
"size": "3901",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Python/Python3-Base/13_Network/HttpServer/04_dynamic_web_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "104421"
},
{
"name": "C++",
"bytes": "43060"
},
{
"name": "CMake",
"bytes": "7529"
},
{
"name": "CSS",
"bytes": "9794"
},
{
"name": "Groovy",
"bytes": "193822"
},
{
"name": "HTML",
"bytes": "239910"
},
{
"name": "Java",
"bytes": "3587367"
},
{
"name": "JavaScript",
"bytes": "294734"
},
{
"name": "Kotlin",
"bytes": "203000"
},
{
"name": "Makefile",
"bytes": "15406"
},
{
"name": "Python",
"bytes": "17218"
},
{
"name": "Shell",
"bytes": "1356"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from mock import patch
import sure # noqa
from moto.server import main, create_backend_app, DomainDispatcherApplication
def test_wrong_arguments():
try:
main(["name", "test1", "test2", "test3"])
assert False, ("main() when called with the incorrect number of args"
" should raise a system exit")
except SystemExit:
pass
@patch('moto.server.run_simple')
def test_right_arguments(run_simple):
main(["s3"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("127.0.0.1")
func_call[1].should.equal(5000)
@patch('moto.server.run_simple')
def test_port_argument(run_simple):
main(["s3", "--port", "8080"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("127.0.0.1")
func_call[1].should.equal(8080)
def test_domain_dispatched():
dispatcher = DomainDispatcherApplication(create_backend_app)
backend_app = dispatcher.get_application(
{"HTTP_HOST": "email.us-east1.amazonaws.com"})
keys = list(backend_app.view_functions.keys())
keys[0].should.equal('EmailResponse.dispatch')
def test_domain_dispatched_with_service():
# If we pass a particular service, always return that.
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
backend_app = dispatcher.get_application(
{"HTTP_HOST": "s3.us-east1.amazonaws.com"})
keys = set(backend_app.view_functions.keys())
keys.should.contain('ResponseObject.key_response')
| {
"content_hash": "4eb171375ba5ae614f3cb92916264c47",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 78,
"avg_line_length": 32.744680851063826,
"alnum_prop": 0.6770630279402209,
"repo_name": "whummer/moto",
"id": "bd00b17c34d4b02d5b85d461c3a7ff316ae006ed",
"size": "1539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_core/test_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "443"
},
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "1148"
},
{
"name": "Python",
"bytes": "6015085"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Scala",
"bytes": "782"
},
{
"name": "Shell",
"bytes": "797"
}
],
"symlink_target": ""
} |
from pathlib import Path
from tempfile import TemporaryDirectory
from unittest.mock import patch
import pytest
from pythonfmu.builder import FmuBuilder
from pythonfmu.deploy import deploy
PYTHON_SLAVE = Path(__file__).parent / "slaves/pythonslave.py"
@pytest.mark.parametrize("test_manager", [None, "pip", "conda"])
@pytest.mark.parametrize(
"requirements, test_requirements, expected", [
(None, None, ValueError),
(None, "requirements.txt", ValueError),
(None, "environment.yaml", ValueError),
("requirements.txt", None, "pip"),
("environment.yaml", None, "conda"),
("environment.yml", None, "conda"),
("req.txt", "req.txt", "pip"),
("env.yml", "env.yml", "conda"),
]
)
def test_deploy(tmp_path, test_manager, requirements, test_requirements, expected):
dummy_requirements = """numpy=1.16
scipy
"""
if requirements is not None:
with TemporaryDirectory() as tempd:
requirements_file = Path(tempd) / requirements
requirements_file.write_text(dummy_requirements)
fmu = FmuBuilder.build_FMU(PYTHON_SLAVE, dest=tmp_path, project_files=[requirements_file, ])
else:
fmu = FmuBuilder.build_FMU(PYTHON_SLAVE, dest=tmp_path)
assert fmu.exists()
with patch("subprocess.run") as run:
if isinstance(expected, type) and issubclass(expected, Exception):
with pytest.raises(expected):
deploy(fmu, environment=test_requirements, package_manager=test_manager)
run.assert_not_called()
else:
deploy(fmu, environment=test_requirements, package_manager=test_manager)
run.assert_called_once()
assert (test_manager or expected) in " ".join(run.call_args[0][0])
| {
"content_hash": "8b20dc10d7572220a9f04cace463e8d1",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 104,
"avg_line_length": 37.104166666666664,
"alnum_prop": 0.6513194834362718,
"repo_name": "joshua-cogliati-inl/raven",
"id": "01f46a022a0ad79ec6a96c95688aa857f94ed3fe",
"size": "1781",
"binary": false,
"copies": "2",
"ref": "refs/heads/devel",
"path": "ravenframework/contrib/PythonFMU/pythonfmu/tests/test_deploy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1556080"
},
{
"name": "Batchfile",
"bytes": "1095"
},
{
"name": "C",
"bytes": "148504"
},
{
"name": "C++",
"bytes": "48279546"
},
{
"name": "CMake",
"bytes": "9998"
},
{
"name": "Jupyter Notebook",
"bytes": "84202"
},
{
"name": "MATLAB",
"bytes": "202335"
},
{
"name": "Makefile",
"bytes": "2399"
},
{
"name": "Perl",
"bytes": "1297"
},
{
"name": "Python",
"bytes": "6952659"
},
{
"name": "R",
"bytes": "67"
},
{
"name": "SWIG",
"bytes": "8574"
},
{
"name": "Shell",
"bytes": "124279"
},
{
"name": "TeX",
"bytes": "479725"
}
],
"symlink_target": ""
} |
"""
cloak.py - Simple file encryption/compression utility
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import os
import sys
import zlib
from optparse import OptionError
from optparse import OptionParser
def hideAscii(data):
retVal = ""
for i in xrange(len(data)):
if ord(data[i]) < 128:
retVal += chr(ord(data[i]) ^ 127)
else:
retVal += data[i]
return retVal
def cloak(inputFile=None, data=None):
if data is None:
with open(inputFile, "rb") as f:
data = f.read()
return hideAscii(zlib.compress(data))
def decloak(inputFile=None, data=None):
if data is None:
with open(inputFile, "rb") as f:
data = f.read()
try:
data = zlib.decompress(hideAscii(data))
except:
print 'ERROR: the provided input file \'%s\' does not contain valid cloaked content' % inputFile
sys.exit(1)
finally:
f.close()
return data
def main():
usage = '%s [-d] -i <input file> [-o <output file>]' % sys.argv[0]
parser = OptionParser(usage=usage, version='0.1')
try:
parser.add_option('-d', dest='decrypt', action="store_true", help='Decrypt')
parser.add_option('-i', dest='inputFile', help='Input file')
parser.add_option('-o', dest='outputFile', help='Output file')
(args, _) = parser.parse_args()
if not args.inputFile:
parser.error('Missing the input file, -h for help')
except (OptionError, TypeError), e:
parser.error(e)
if not os.path.isfile(args.inputFile):
print 'ERROR: the provided input file \'%s\' is non existent' % args.inputFile
sys.exit(1)
if not args.decrypt:
data = cloak(args.inputFile)
else:
data = decloak(args.inputFile)
if not args.outputFile:
if not args.decrypt:
args.outputFile = args.inputFile + '_'
else:
args.outputFile = args.inputFile[:-1]
f = open(args.outputFile, 'wb')
f.write(data)
f.close()
if __name__ == '__main__':
main()
| {
"content_hash": "ce1cf663c3aeb83ed9663d5d616f5cc3",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 104,
"avg_line_length": 25.927710843373493,
"alnum_prop": 0.595724907063197,
"repo_name": "michaelhidalgo/7WCSQ",
"id": "b93583711259471743f03d8ef1a0be3673beb974",
"size": "2175",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Tools/SQLMap/sqlmap/extra/cloak/cloak.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "13307"
},
{
"name": "C++",
"bytes": "1641"
},
{
"name": "Objective-C",
"bytes": "516"
},
{
"name": "PLpgSQL",
"bytes": "536"
},
{
"name": "Perl",
"bytes": "2136"
},
{
"name": "Python",
"bytes": "1630594"
},
{
"name": "Shell",
"bytes": "9683"
}
],
"symlink_target": ""
} |
"""
Pickling of python instances
"""
from concurrent.core.components.component import Component, implements
from concurrent.core.config.config import IntItem, ConfigItem
from concurrent.core.application.api import IPickler
from concurrent.core.util.cryptohelper import CryptoHelper
from concurrent.core.transport.pyjsonrpc.rpcerror import JsonRpcError, jsonrpcerrors
from concurrent.core.exceptions.baseerror import ErrorCodeMap
# Import cPickle over pickle (python 2.7+ and 3+ compatible)
try: import cPickle as pickle
except ImportError: import pickle
import gzip
import base64
class PickleException(JsonRpcError):
code = ErrorCodeMap.PickleException
message = u"Failed to pickle a remote object"
jsonrpcerrors[PickleException.code] = PickleException
class UnpickleException(JsonRpcError):
code = ErrorCodeMap.UnpickleException
message = u"Failed to unpickle a remote object"
jsonrpcerrors[UnpickleException.code] = UnpickleException
class Pickler(Component):
implements(IPickler)
"""
Class responsible for pickling and unpickling objects
"""
pickle_protocol = IntItem('pickler', 'protocol', pickle.HIGHEST_PROTOCOL,
"""Protocol used when pickling, by default pickle.HIGHEST_PROTOCOL""")
secret = ConfigItem('pickler', 'secret', 'JhTv535Vg385V',
"""Default salt used on decrypting encrypting a pickle""")
# salt size in bytes
salt_size = IntItem('pickler', 'salt_size', 16,
"""Size of the salt used in the encryption process""")
# number of iterations in the key generation
num_iterations = IntItem('pickler', 'num_iterations', 20,
"""Number of iterations used in the key generation""")
# the size multiple required for AES
aes_padding = IntItem('pickler', 'aes_padding', 16,
"""Padding used for AES encryption""")
def __init__(self):
super(Pickler, self).__init__()
self.crypto_helper = CryptoHelper(self.salt_size, self.num_iterations, self.aes_padding)
if self.secret == Pickler.secret.default.decode('utf-8'):
self.log.warn("Pickler using default secret, please setup you own to avoid security vulnerabilities!")
def pickle_f(self, fname, obj):
"""
picke an object into a file
"""
try:
pickle.dump(obj=obj, file=gzip.open(fname, "wb"), protocol=self.pickle_protocol)
except:
raise PickleException()
def unpickle_f(self, fname):
"""
Unpicke an object from a file
"""
try:
return pickle.load(gzip.open(fname, "rb"))
except:
raise UnpickleException()
def pickle_s(self, obj):
"""
pickle an object and return the pickled string
"""
try:
return pickle.dumps(obj, protocol=self.pickle_protocol)
except:
raise PickleException()
def pickle_encode_s(self, obj):
"""
Encode a pickled object
"""
try:
return base64.b64encode(self.crypto_helper.encrypt(self.pickle_s(obj), self.secret))
except:
raise PickleException()
def unpickle_s(self, pickle_string):
"""
unpickle a string and return an object
"""
try:
return pickle.loads(pickle_string)
except:
raise UnpickleException()
def unpickle_decode_s(self, pickle_string):
"""
Unpickle a base64 string and return an object
"""
try:
return self.unpickle_s(self.crypto_helper.decrypt(base64.b64decode(pickle_string), self.secret))
except:
raise UnpickleException()
| {
"content_hash": "7f1073c6ee0e4f68ee58a3e877a8ac83",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 114,
"avg_line_length": 33.16814159292036,
"alnum_prop": 0.6424759871931697,
"repo_name": "moritz-wundke/Concurrent",
"id": "3f3492a41a88269612317c945779c86aff2294f8",
"size": "3772",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "concurrent/core/application/pickler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "93243"
},
{
"name": "Python",
"bytes": "511887"
},
{
"name": "Shell",
"bytes": "7035"
}
],
"symlink_target": ""
} |
"""
Demonstration of accessing wrapped functions for testing.
"""
from __future__ import print_function
from crochet import setup, run_in_reactor
setup()
@run_in_reactor
def add(x, y):
return x + y
if __name__ == '__main__':
print("add() returns EventualResult:")
print(" ", add(1, 2))
print("add.wrapped_function() returns result of underlying function:")
print(" ", add.wrapped_function(1, 2))
| {
"content_hash": "d3266767128bee2c08d34b7a75fc0660",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 22.473684210526315,
"alnum_prop": 0.6463700234192038,
"repo_name": "wrmsr/crochet",
"id": "cded7dce569864347514b9fe261569927931cb89",
"size": "445",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/testing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "128275"
}
],
"symlink_target": ""
} |
import functools
import re
from nova import availability_zones
from nova import context
from nova import db
from nova import exception
from nova.network import model as network_model
from nova.openstack.common import log as logging
from nova.openstack.common import memorycache
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
LOG = logging.getLogger(__name__)
# NOTE(vish): cache mapping for one week
_CACHE_TIME = 7 * 24 * 60 * 60
_CACHE = None
def memoize(func):
@functools.wraps(func)
def memoizer(context, reqid):
global _CACHE
if not _CACHE:
_CACHE = memorycache.get_client()
key = "%s:%s" % (func.__name__, reqid)
value = _CACHE.get(key)
if value is None:
value = func(context, reqid)
_CACHE.set(key, value, time=_CACHE_TIME)
return value
return memoizer
def reset_cache():
global _CACHE
_CACHE = None
def image_type(image_type):
"""Converts to a three letter image type.
aki, kernel => aki
ari, ramdisk => ari
anything else => ami
"""
if image_type == 'kernel':
return 'aki'
if image_type == 'ramdisk':
return 'ari'
if image_type not in ['aki', 'ari']:
return 'ami'
return image_type
def resource_type_from_id(context, resource_id):
"""Get resource type by ID
Returns a string representation of the Amazon resource type, if known.
Returns None on failure.
:param context: context under which the method is called
:param resource_id: resource_id to evaluate
"""
known_types = {
'i': 'instance',
'r': 'reservation',
'vol': 'volume',
'snap': 'snapshot',
'ami': 'image',
'aki': 'image',
'ari': 'image'
}
type_marker = resource_id.split('-')[0]
return known_types.get(type_marker)
@memoize
def id_to_glance_id(context, image_id):
"""Convert an internal (db) id to a glance id."""
return db.s3_image_get(context, image_id)['uuid']
@memoize
def glance_id_to_id(context, glance_id):
"""Convert a glance id to an internal (db) id."""
if glance_id is None:
return
try:
return db.s3_image_get_by_uuid(context, glance_id)['id']
except exception.NotFound:
return db.s3_image_create(context, glance_id)['id']
def ec2_id_to_glance_id(context, ec2_id):
image_id = ec2_id_to_id(ec2_id)
return id_to_glance_id(context, image_id)
def glance_id_to_ec2_id(context, glance_id, image_type='ami'):
image_id = glance_id_to_id(context, glance_id)
return image_ec2_id(image_id, image_type=image_type)
def ec2_id_to_id(ec2_id):
"""Convert an ec2 ID (i-[base 16 number]) to an instance id (int)."""
try:
return int(ec2_id.split('-')[-1], 16)
except ValueError:
raise exception.InvalidEc2Id(ec2_id=ec2_id)
def image_ec2_id(image_id, image_type='ami'):
"""Returns image ec2_id using id and three letter type."""
template = image_type + '-%08x'
return id_to_ec2_id(image_id, template=template)
def get_ip_info_for_instance_from_nw_info(nw_info):
ip_info = {}
fixed_ips = nw_info.fixed_ips()
ip_info['fixed_ips'] = [ip['address'] for ip in fixed_ips
if ip['version'] == 4]
ip_info['fixed_ip6s'] = [ip['address'] for ip in fixed_ips
if ip['version'] == 6]
ip_info['floating_ips'] = [ip['address'] for ip in nw_info.floating_ips()]
return ip_info
def get_ip_info_for_instance(context, instance):
"""Return a dictionary of IP information for an instance."""
info_cache = instance['info_cache'] or {}
cached_nwinfo = info_cache.get('network_info')
# Make sure empty response is turned into []
if not cached_nwinfo:
cached_nwinfo = []
nw_info = network_model.NetworkInfo.hydrate(cached_nwinfo)
return get_ip_info_for_instance_from_nw_info(nw_info)
def get_availability_zone_by_host(host, conductor_api=None):
return availability_zones.get_host_availability_zone(
context.get_admin_context(), host, conductor_api)
def id_to_ec2_id(instance_id, template='i-%08x'):
"""Convert an instance ID (int) to an ec2 ID (i-[base 16 number])."""
return template % int(instance_id)
def id_to_ec2_inst_id(instance_id):
"""Get or create an ec2 instance ID (i-[base 16 number]) from uuid."""
if instance_id is None:
return None
elif uuidutils.is_uuid_like(instance_id):
ctxt = context.get_admin_context()
int_id = get_int_id_from_instance_uuid(ctxt, instance_id)
return id_to_ec2_id(int_id)
else:
return id_to_ec2_id(instance_id)
def ec2_inst_id_to_uuid(context, ec2_id):
""""Convert an instance id to uuid."""
int_id = ec2_id_to_id(ec2_id)
return get_instance_uuid_from_int_id(context, int_id)
@memoize
def get_instance_uuid_from_int_id(context, int_id):
return db.get_instance_uuid_by_ec2_id(context, int_id)
def id_to_ec2_snap_id(snapshot_id):
"""Get or create an ec2 volume ID (vol-[base 16 number]) from uuid."""
if uuidutils.is_uuid_like(snapshot_id):
ctxt = context.get_admin_context()
int_id = get_int_id_from_snapshot_uuid(ctxt, snapshot_id)
return id_to_ec2_id(int_id, 'snap-%08x')
else:
return id_to_ec2_id(snapshot_id, 'snap-%08x')
def id_to_ec2_vol_id(volume_id):
"""Get or create an ec2 volume ID (vol-[base 16 number]) from uuid."""
if uuidutils.is_uuid_like(volume_id):
ctxt = context.get_admin_context()
int_id = get_int_id_from_volume_uuid(ctxt, volume_id)
return id_to_ec2_id(int_id, 'vol-%08x')
else:
return id_to_ec2_id(volume_id, 'vol-%08x')
def ec2_vol_id_to_uuid(ec2_id):
"""Get the corresponding UUID for the given ec2-id."""
ctxt = context.get_admin_context()
# NOTE(jgriffith) first strip prefix to get just the numeric
int_id = ec2_id_to_id(ec2_id)
return get_volume_uuid_from_int_id(ctxt, int_id)
_ms_time_regex = re.compile('^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3,6}Z$')
def is_ec2_timestamp_expired(request, expires=None):
"""Checks the timestamp or expiry time included in an EC2 request
and returns true if the request is expired
"""
query_time = None
timestamp = request.get('Timestamp')
expiry_time = request.get('Expires')
def parse_strtime(strtime):
if _ms_time_regex.match(strtime):
# NOTE(MotoKen): time format for aws-sdk-java contains millisecond
time_format = "%Y-%m-%dT%H:%M:%S.%fZ"
else:
time_format = "%Y-%m-%dT%H:%M:%SZ"
return timeutils.parse_strtime(strtime, time_format)
try:
if timestamp and expiry_time:
msg = _("Request must include either Timestamp or Expires,"
" but cannot contain both")
LOG.error(msg)
raise exception.InvalidRequest(msg)
elif expiry_time:
query_time = parse_strtime(expiry_time)
return timeutils.is_older_than(query_time, -1)
elif timestamp:
query_time = parse_strtime(timestamp)
# Check if the difference between the timestamp in the request
# and the time on our servers is larger than 5 minutes, the
# request is too old (or too new).
if query_time and expires:
return timeutils.is_older_than(query_time, expires) or \
timeutils.is_newer_than(query_time, expires)
return False
except ValueError:
LOG.audit(_("Timestamp is invalid."))
return True
@memoize
def get_int_id_from_instance_uuid(context, instance_uuid):
if instance_uuid is None:
return
try:
return db.get_ec2_instance_id_by_uuid(context, instance_uuid)
except exception.NotFound:
return db.ec2_instance_create(context, instance_uuid)['id']
@memoize
def get_int_id_from_volume_uuid(context, volume_uuid):
if volume_uuid is None:
return
try:
return db.get_ec2_volume_id_by_uuid(context, volume_uuid)
except exception.NotFound:
return db.ec2_volume_create(context, volume_uuid)['id']
@memoize
def get_volume_uuid_from_int_id(context, int_id):
return db.get_volume_uuid_by_ec2_id(context, int_id)
def ec2_snap_id_to_uuid(ec2_id):
"""Get the corresponding UUID for the given ec2-id."""
ctxt = context.get_admin_context()
# NOTE(jgriffith) first strip prefix to get just the numeric
int_id = ec2_id_to_id(ec2_id)
return get_snapshot_uuid_from_int_id(ctxt, int_id)
@memoize
def get_int_id_from_snapshot_uuid(context, snapshot_uuid):
if snapshot_uuid is None:
return
try:
return db.get_ec2_snapshot_id_by_uuid(context, snapshot_uuid)
except exception.NotFound:
return db.ec2_snapshot_create(context, snapshot_uuid)['id']
@memoize
def get_snapshot_uuid_from_int_id(context, int_id):
return db.get_snapshot_uuid_by_ec2_id(context, int_id)
_c2u = re.compile('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))')
def camelcase_to_underscore(str):
return _c2u.sub(r'_\1', str).lower().strip('_')
def _try_convert(value):
"""Return a non-string from a string or unicode, if possible.
============= =====================================================
When value is returns
============= =====================================================
zero-length ''
'None' None
'True' True case insensitive
'False' False case insensitive
'0', '-0' 0
0xN, -0xN int from hex (positive) (N is any number)
0bN, -0bN int from binary (positive) (N is any number)
* try conversion to int, float, complex, fallback value
"""
def _negative_zero(value):
epsilon = 1e-7
return 0 if abs(value) < epsilon else value
if len(value) == 0:
return ''
if value == 'None':
return None
lowered_value = value.lower()
if lowered_value == 'true':
return True
if lowered_value == 'false':
return False
for prefix, base in [('0x', 16), ('0b', 2), ('0', 8), ('', 10)]:
try:
if lowered_value.startswith((prefix, "-" + prefix)):
return int(lowered_value, base)
except ValueError:
pass
try:
return _negative_zero(float(value))
except ValueError:
return value
def dict_from_dotted_str(items):
"""parse multi dot-separated argument into dict.
EBS boot uses multi dot-separated arguments like
BlockDeviceMapping.1.DeviceName=snap-id
Convert the above into
{'block_device_mapping': {'1': {'device_name': snap-id}}}
"""
args = {}
for key, value in items:
parts = key.split(".")
key = str(camelcase_to_underscore(parts[0]))
if isinstance(value, str) or isinstance(value, unicode):
# NOTE(vish): Automatically convert strings back
# into their respective values
value = _try_convert(value)
if len(parts) > 1:
d = args.get(key, {})
args[key] = d
for k in parts[1:-1]:
k = camelcase_to_underscore(k)
v = d.get(k, {})
d[k] = v
d = v
d[camelcase_to_underscore(parts[-1])] = value
else:
args[key] = value
return args
def search_opts_from_filters(filters):
return dict((f['name'].replace('-', '_'), f['value']['1'])
for f in filters if f['value']['1']) if filters else {}
| {
"content_hash": "738b5e9a52322075adc36883bfb7bb4d",
"timestamp": "",
"source": "github",
"line_count": 382,
"max_line_length": 78,
"avg_line_length": 30.76439790575916,
"alnum_prop": 0.5999829816201497,
"repo_name": "zestrada/nova-cs498cc",
"id": "660c0e8d44970f614827c57620a45fda6077b2d6",
"size": "12529",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/api/ec2/ec2utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "9215416"
},
{
"name": "Shell",
"bytes": "17117"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
import types
# Dependency imports
import numpy as np
import tensorflow as tf
import tensorflow.compat.v1 as tf1
import tensorflow_probability as tfp
from odin.backend import parse_activation
from odin.backend.maths import softplus1
from odin.bay.distributions import NegativeBinomialDisp, ZeroInflated
from six import string_types
from tensorflow.python.keras.utils import tf_utils as keras_tf_utils
# By importing `distributions` as `tfd`, docstrings will show
# `tfd.Distribution`. We import `bijectors` the same way, for consistency.
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python import layers as tfl
from tensorflow_probability.python.internal import \
distribution_util as dist_util
from tensorflow_probability.python.layers.distribution_layer import _event_size
from tensorflow_probability.python.layers.internal import \
distribution_tensor_coercible as dtc
__all__ = [
'DistributionLambda',
'MultivariateNormalLayer',
'GammaLayer',
'BetaLayer',
'DirichletLayer',
'GaussianLayer',
'NormalLayer',
'LogNormalLayer',
'LogisticLayer',
'update_convert_to_tensor_fn',
]
DistributionLambda = tfl.DistributionLambda
LogisticLayer = tfl.IndependentLogistic
# ===========================================================================
# Helper
# ===========================================================================
def update_convert_to_tensor_fn(dist, fn):
assert isinstance(dist, dtc._TensorCoercible), \
"dist must be output from tfd.DistributionLambda"
assert callable(fn), "fn must be callable"
if isinstance(fn, property):
fn = fn.fget
dist._concrete_value = None
dist._convert_to_tensor_fn = fn
return dist
def _preprocess_eventshape(params, event_shape, n_dims=1):
if isinstance(event_shape, string_types):
if event_shape.lower().strip() == 'auto':
event_shape = params.shape[-n_dims:]
else:
raise ValueError("Not support for event_shape='%s'" % event_shape)
return event_shape
# ===========================================================================
# Simple distribution
# ===========================================================================
class BetaLayer(DistributionLambda):
r"""An independent Beta Keras layer.
Arguments:
event_shape: integer vector `Tensor` representing the shape of single
draw from this distribution.
concentration_activation : activation function for `concentration1`,
must return only positive values.
beta_activation : activation function for `concentration0`,
must return only positive values.
convert_to_tensor_fn: Python `callable` that takes a `tfd.Distribution`
instance and returns a `tf.Tensor`-like object.
Default value: `tfd.Distribution.sample`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
Default value: `False`.
**kwargs: Additional keyword arguments passed to `tf.keras.Layer`.
"""
def __init__(self,
event_shape=(),
concentration_activation='softplus1',
beta_activation='softplus1',
clip_for_stable=True,
convert_to_tensor_fn=tfd.Distribution.sample,
validate_args=False,
**kwargs):
super(BetaLayer, self).__init__(
lambda t: type(self).
new(t, event_shape, concentration_activation, beta_activation,
clip_for_stable, validate_args), convert_to_tensor_fn, **kwargs)
@staticmethod
def new(params,
event_shape=(),
concentration_activation=softplus1,
beta_activation=softplus1,
clip_for_stable=True,
validate_args=False,
name="BetaLayer"):
r"""Create the distribution instance from a `params` vector."""
params = tf.convert_to_tensor(value=params, name='params')
concentration_activation = parse_activation(concentration_activation, 'tf')
beta_activation = parse_activation(beta_activation, 'tf')
event_shape = dist_util.expand_to_vector(
tf.convert_to_tensor(value=event_shape,
name='event_shape',
dtype=tf.int32),
tensor_name='event_shape',
)
output_shape = tf.concat((tf.shape(input=params)[:-1], event_shape), axis=0)
# alpha, beta
concentration1, concentration0 = tf.split(params, 2, axis=-1)
#
concentration1 = tf.reshape(concentration1, output_shape)
concentration1 = concentration_activation(concentration1)
#
concentration0 = tf.reshape(concentration0, output_shape)
concentration0 = beta_activation(concentration0)
if clip_for_stable:
concentration0 = tf.clip_by_value(concentration0, 1e-3, 1e3)
concentration1 = tf.clip_by_value(concentration1, 1e-3, 1e3)
return tfd.Independent(
tfd.Beta(concentration1=concentration1,
concentration0=concentration0,
validate_args=validate_args),
reinterpreted_batch_ndims=tf.size(input=event_shape),
name=name,
)
@staticmethod
def params_size(event_shape=(), name='BetaLayer_params_size'):
r"""The number of `params` needed to create a single distribution."""
return 2 * _event_size(event_shape, name=name)
class DirichletLayer(DistributionLambda):
r""" The Dirichlet distribution is defined over the
[`(k-1)`-simplex](https://en.wikipedia.org/wiki/Simplex) using a positive,
length-`k` vector `concentration` (`k > 1`). The Dirichlet is identically the
Beta distribution when `k = 2`.
Arguments:
concentration_activation: activation function return positive floating-point `Tensor`
indicating mean number of class occurrences; aka "alpha"
clip_for_stable : bool (default: True)
clipping the concentration into range [1e-3, 1e3] for stability
"""
def __init__(self,
event_shape=(),
concentration_activation='softplus1',
concentration_clip=True,
convert_to_tensor_fn=tfd.Distribution.sample,
validate_args=False,
**kwargs):
super(DirichletLayer, self).__init__(
lambda t: type(self).new(t, event_shape, concentration_activation,
concentration_clip, validate_args),
convert_to_tensor_fn, **kwargs)
@staticmethod
def new(params,
event_shape=(),
concentration_activation=softplus1,
concentration_clip=True,
validate_args=False,
name="DirichletLayer"):
r"""Create the distribution instance from a `params` vector."""
params = tf.convert_to_tensor(value=params, name='params')
# Clips the Dirichlet parameters to the numerically stable KL region
concentration_activation = parse_activation(concentration_activation, 'tf')
params = concentration_activation(params)
if concentration_clip:
params = tf.clip_by_value(params, 1e-3, 1e3)
return tfd.Dirichlet(concentration=params,
validate_args=validate_args,
name=name)
@staticmethod
def params_size(event_shape=(), name='DirichletLayer_params_size'):
r"""The number of `params` needed to create a single distribution."""
return _event_size(event_shape, name=name)
class GaussianLayer(DistributionLambda):
r"""An independent normal Keras layer.
Arguments:
event_shape: integer vector `Tensor` representing the shape of single
draw from this distribution.
scale_activation : activation function for scale parameters, default:
`softplus1(x) = softplus(x) + softplus_inverse(1.0)`
convert_to_tensor_fn: Python `callable` that takes a `tfd.Distribution`
instance and returns a `tf.Tensor`-like object.
Default value: `tfd.Distribution.sample`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs. Default value: `False`.
**kwargs: Additional keyword arguments passed to `tf.keras.Layer`.
"""
def __init__(self,
event_shape=(),
loc_activation='linear',
scale_activation='softplus1',
convert_to_tensor_fn=tfd.Distribution.sample,
validate_args=False,
**kwargs):
super(GaussianLayer, self).__init__(
lambda t: type(self).new(
t, event_shape, parse_activation(loc_activation, self),
parse_activation(scale_activation, self), validate_args),
convert_to_tensor_fn, **kwargs)
@staticmethod
def new(params,
event_shape,
loc_activation,
scale_activation,
validate_args,
name="GaussianLayer"):
"""Create the distribution instance from a `params` vector."""
params = tf.convert_to_tensor(value=params, name='params')
event_shape = dist_util.expand_to_vector(
tf.convert_to_tensor(value=event_shape,
name='event_shape',
dtype=tf.int32),
tensor_name='event_shape',
)
output_shape = tf.concat(
[tf.shape(input=params)[:-1], event_shape],
axis=0,
)
loc_params, scale_params = tf.split(params, 2, axis=-1)
loc_params = tf.reshape(loc_activation(loc_params), output_shape)
scale_params = tf.reshape(scale_activation(scale_params), output_shape)
return tfd.Independent(
tfd.Normal(loc=loc_params,
scale=scale_params,
validate_args=validate_args),
reinterpreted_batch_ndims=tf.size(input=event_shape),
name=name,
)
@staticmethod
def params_size(event_shape=(), name="GaussianLayer_params_size"):
r"""The number of `params` needed to create a single distribution."""
return 2 * _event_size(event_shape, name=name)
class LogNormalLayer(DistributionLambda):
r"""An independent LogNormal Keras layer.
Arguments:
event_shape: integer vector `Tensor` representing the shape of single
draw from this distribution.
scale_activation : activation function for scale parameters, default:
`softplus1(x) = softplus(x) + softplus_inverse(1.0)`
convert_to_tensor_fn: Python `callable` that takes a `tfd.Distribution`
instance and returns a `tf.Tensor`-like object.
Default value: `tfd.Distribution.sample`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
Default value: `False`.
**kwargs: Additional keyword arguments passed to `tf.keras.Layer`.
"""
def __init__(self,
event_shape=(),
loc_activation='linear',
scale_activation='softplus1',
convert_to_tensor_fn=tfd.Distribution.sample,
validate_args=False,
**kwargs):
super(LogNormalLayer, self).__init__(
lambda t: type(self).new(
t, event_shape, parse_activation(loc_activation, self),
parse_activation(scale_activation, self), validate_args),
convert_to_tensor_fn, **kwargs)
@staticmethod
def new(params,
event_shape,
loc_activation,
scale_activation,
validate_args=False,
name="LogNormalLayer"):
"""Create the distribution instance from a `params` vector."""
params = tf.convert_to_tensor(value=params, name='params')
event_shape = dist_util.expand_to_vector(
tf.convert_to_tensor(value=event_shape,
name='event_shape',
dtype=tf.int32),
tensor_name='event_shape',
)
output_shape = tf.concat(
[tf.shape(input=params)[:-1], event_shape],
axis=0,
)
loc_params, scale_params = tf.split(params, 2, axis=-1)
loc_params = tf.reshape(loc_activation(loc_params), output_shape)
scale_params = tf.reshape(scale_activation(scale_params), output_shape)
return tfd.Independent(
tfd.LogNormal(loc=loc_params,
scale=scale_params,
validate_args=validate_args),
reinterpreted_batch_ndims=tf.size(input=event_shape),
name=name,
)
@staticmethod
def params_size(event_shape=(), name="LogNormal_params_size"):
r"""The number of `params` needed to create a single distribution."""
return 2 * _event_size(event_shape, name=name)
class GammaLayer(DistributionLambda):
r"""An independent Gamma Keras layer.
Arguments:
event_shape: integer vector `Tensor` representing the shape of single
draw from this distribution.
convert_to_tensor_fn: Python `callable` that takes a `tfd.Distribution`
instance and returns a `tf.Tensor`-like object.
Default value: `tfd.Distribution.sample`.
concentration_activation : activation function return positive values.
rate_activation : activation function return positive values.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
Default value: `False`.
**kwargs: Additional keyword arguments passed to `tf.keras.Layer`.
"""
def __init__(self,
event_shape=(),
convert_to_tensor_fn=tfd.Distribution.sample,
concentration_activation='softplus1',
rate_activation='softplus1',
validate_args=False,
**kwargs):
super(GammaLayer, self).__init__(
lambda t: type(self).new(
t, event_shape, parse_activation(concentration_activation, self),
parse_activation(rate_activation, self), validate_args),
convert_to_tensor_fn, **kwargs)
@staticmethod
def new(params,
event_shape,
concentration_activation=softplus1,
rate_activation=softplus1,
validate_args=False,
name="GammaLayer"):
"""Create the distribution instance from a `params` vector."""
params = tf.convert_to_tensor(value=params, name='params')
event_shape = dist_util.expand_to_vector(
tf.convert_to_tensor(value=event_shape,
name='event_shape',
dtype=tf.int32),
tensor_name='event_shape',
)
output_shape = tf.concat((tf.shape(input=params)[:-1], event_shape), axis=0)
concentration, rate = tf.split(params, 2, axis=-1)
concentration = tf.reshape(concentration, output_shape)
concentration = concentration_activation(concentration)
rate = tf.reshape(rate, output_shape)
rate = rate_activation(rate)
return tfd.Independent(
tfd.Gamma(concentration=concentration,
rate=rate,
validate_args=validate_args),
reinterpreted_batch_ndims=tf.size(input=event_shape),
name=name,
)
@staticmethod
def params_size(event_shape=(), name="GammaLayer_params_size"):
r"""The number of `params` needed to create a single distribution."""
return 2 * _event_size(event_shape, name=name)
# ===========================================================================
# Multivariate distribution
# ===========================================================================
class MultivariateNormalLayer(DistributionLambda):
r"""A `d`-variate Multivariate Normal distribution Keras layer:
Different covariance mode:
- tril (lower triangle): `d + d * (d + 1) // 2` params.
- diag (diagonal) : `d + d` params.
- full (full) : `d + d * d` params.
Arguments:
event_size: Scalar `int` representing the size of single draw from this
distribution.
covariance : {'diag', 'tril', 'full'}
loc_activation : activation function for loc (a.k.a mean), default:
'identity'
scale_activation : activation function for scale, default:
`softplus1(x) = softplus(x) + softplus_inverse(1.0)`
convert_to_tensor_fn: Python `callable` that takes a `tfd.Distribution`
instance and returns a `tf.Tensor`-like object. For examples, see
`class` docstring.
Default value: `tfd.Distribution.sample`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
Default value: `False`.
**kwargs: Additional keyword arguments passed to `tf.keras.Layer`.
"""
def __init__(self,
event_shape,
covariance='diag',
loc_activation=None,
scale_activation=None,
convert_to_tensor_fn=tfd.Distribution.sample,
validate_args=False,
**kwargs):
super(MultivariateNormalLayer, self).__init__(
lambda t: type(self).new(t, event_shape, covariance, loc_activation,
scale_activation, validate_args),
convert_to_tensor_fn, **kwargs)
@staticmethod
def new(params,
event_shape,
covariance,
loc_activation=None,
scale_activation=None,
validate_args=False,
name=None):
r"""Create the distribution instance from a `params` vector."""
covariance = str(covariance).lower().strip()
event_size = tf.reduce_prod(event_shape)
assert covariance in ('full', 'tril', 'diag'), \
f"No support for given covariance: '{covariance}'"
if name is None:
name = f"MultivariateNormal{covariance.capitalize()}"
# parameters
params = tf.convert_to_tensor(value=params, name='params')
loc = params[..., :event_size]
if loc_activation is not None:
loc = loc_activation(loc)
scale = params[..., event_size:]
### the distribution
if covariance == 'tril':
scale_tril = tfb.FillScaleTriL(
diag_bijector=scale_activation,
diag_shift=np.array(1e-5, params.dtype.as_numpy_dtype()),
validate_args=validate_args,
)
return tfd.MultivariateNormalTriL(loc=loc,
scale_tril=scale_tril(scale),
validate_args=validate_args,
name=name)
elif covariance == 'diag':
# NOTE: never forget to use activation softplus for the scale,
# or you will suffer
if scale_activation is None:
scale_activation = tf.nn.softplus
return tfd.MultivariateNormalDiag(loc=loc,
scale_diag=scale_activation(scale),
validate_args=validate_args,
name=name)
elif covariance == 'full':
raise NotImplementedError(
'MVN full covariance is deprecated, '
'use `scale_tril=tf.linalg.cholesky(covariance_matrix)` instead')
@staticmethod
def params_size(event_size, covariance='diag', name=None):
"""The number of `params` needed to create a single distribution."""
covariance = str(covariance).lower().strip()
assert covariance in ('full', 'tril', 'diag'), \
"No support for given covariance: '%s'" % covariance
if covariance == 'tril':
return event_size + event_size * (event_size + 1) // 2
elif covariance == 'diag':
return event_size + event_size
elif covariance == 'full':
raise NotImplementedError(
'MVN full covariance is deprecated, '
'use `scale_tril=tf.linalg.cholesky(covariance_matrix)` instead')
# ===========================================================================
# Shortcut
# ===========================================================================
NormalLayer = GaussianLayer
| {
"content_hash": "c806866cd1291dd941ff1c49a1b8a4e0",
"timestamp": "",
"source": "github",
"line_count": 508,
"max_line_length": 89,
"avg_line_length": 40.15551181102362,
"alnum_prop": 0.6268444531594686,
"repo_name": "imito/odin",
"id": "29d6294755942d675d9647ec721f6048135a3f66",
"size": "20399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "odin/bay/layers/continuous.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1516670"
}
],
"symlink_target": ""
} |
"""Extending the template engine variable namespace to always include certain values is quite easy.
There are two places you can extend:
* The 'web' variable can be extended by assigning to the request context.
* Adding additonal top-level variables is accomplished by assigning to the 'namespace' attribute of the context.
Note that both of these sources are omitted from serialized rendering. To add variables for serialization use the
__after__ handler within your controllers. Pro tip:
class MyController(object):
def __init__(self, context):
self.ctx = context
def index(self):
return 'json:', dict(hello="world")
def __after__(self, result):
result[1]['meaning'] = 42
"""
class SampleExtension(object):
uses = [] # add soft dependencies or omit
needs = [] # add hard dependencies or omit
always = True # usually you don't want to be required to activate your own extensions
provides = ['sample'] # can be omitted if always is True
def __init__(self, config):
"""Executed to configure the extension."""
super(SampleExtension, self).__init__()
def prepare(self, context):
"""Executed during request set-up."""
# The request context is used as the 'web' template variable.
context.foo = 27
# To extend the top-level namespace for temlpates:
context.namespace.bar = 42
| {
"content_hash": "713e14fcb1b2b12c0fe46ca906d612a7",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 114,
"avg_line_length": 36.725,
"alnum_prop": 0.6494213750850919,
"repo_name": "marrow/WebCore",
"id": "194501019b5ad703e09db462638581ed3470bb0d",
"size": "1488",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "docs/old/recipes/extend_template_namespace.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "747"
},
{
"name": "Makefile",
"bytes": "901"
},
{
"name": "Python",
"bytes": "133793"
}
],
"symlink_target": ""
} |
import re
import json
import sys
import os
movie_ratings_pattern = re.compile(r"""(?P<distribution>[\d\.\*]{10}?)\t # distribution of votes
(?P<votes>[\d]*?)\t # number of votes
(?P<rating>\d0?\.\d?)\t # rating
(?P<title>.+?)\t # movie title
(?P<year>[\d]{4})""", re.VERBOSE) # year
actor_pattern = re.compile(r"""(?P<actor>.+?)\t # distribution of votes
(?P<title>.+?)\t # movie title
(?P<year>[\d]{4})""", re.VERBOSE) # year
def extract_movie_ratings_data(s):
try:
return movie_ratings_pattern.search(s).groups()
except AttributeError:
print("ERROR while parsing the following line from movie rating data file\n" + s)
return None
def extract_actor_data(s):
try:
return actor_pattern.search(s).groups()
except AttributeError:
print("ERROR while parsing the following line from actor data file\n" + s)
return None
if len(sys.argv) != 4:
print("ERROR\nusage: prepare_streaming_data.py <movie rating tsv file> <actor tsv file> <output folder>\n")
sys.exit(1)
known_movies = set()
actor_data_by_year = dict()
movie_rating_data_by_year = dict()
movie_num = 0
for line in open(sys.argv[1], "r", encoding="utf-8"):
if len(line.strip()) == 0:
continue
movie_data = extract_movie_ratings_data(line)
if not movie_data:
continue
year = int(movie_data[4])
title = movie_data[3]
known_movies.add("%s_%d" % (title, year))
movie_num += 1
if year not in movie_rating_data_by_year:
movie_rating_data_by_year[year] = []
movie_rating_data_by_year[year].append(movie_data)
print("movies found: %d" % movie_num)
for year in movie_rating_data_by_year:
out_file = open("%s/movie_ratings/movie_ratings_%d.tsv" % (sys.argv[3], year), "w", encoding="utf-8")
for movie_data in movie_rating_data_by_year[year]:
distribution_sum = 0
for digit in range(10):
distribution_sum += int(movie_data[0][digit])
bucket_value = (int(movie_data[1]) / distribution_sum) / 1000
for digit in range(10):
number_of_votes = int(bucket_value * int(movie_data[0][digit]))
to_print = "\t".join([str(digit+1), movie_data[3], movie_data[4]])
for i in range(number_of_votes):
print(to_print, file = out_file)
print(to_print)
out_file.close()
for actor_file in os.listdir(sys.argv[2]):
for line in open("%s/%s" % (sys.argv[2], actor_file), "r", encoding="utf-8"):
if len(line.strip()) == 0:
continue
actor_data = extract_actor_data(line)
if not actor_data:
continue
year = int(actor_data[2])
title = actor_data[1]
if "%s_%d" % (title, year) in known_movies:
if year not in actor_data_by_year:
actor_data_by_year[year] = set()
actor_data_by_year[year].add("\t".join(actor_data))
for year in actor_data_by_year:
out_file = open("%s/actor_data/actor_data_%d.tsv" % (sys.argv[3], year), "w", encoding="utf-8")
for actor_data in actor_data_by_year[year]:
print(actor_data, file = out_file)
print(actor_data)
out_file.close()
| {
"content_hash": "d77dff92f071ed479d6fc806a612c52a",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 111,
"avg_line_length": 34.29126213592233,
"alnum_prop": 0.5407701019252548,
"repo_name": "symat/spark-api-comparison",
"id": "a2ba1f8f712b5d411fb988ee21d425e634758b2e",
"size": "3548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/prepare_streaming_data.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "8552"
},
{
"name": "Scala",
"bytes": "25674"
}
],
"symlink_target": ""
} |
"""Module loader file for jobstamps tests."""
| {
"content_hash": "28b1d391d9f177718cd4170efe13c163",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 45,
"avg_line_length": 46,
"alnum_prop": 0.717391304347826,
"repo_name": "polysquare/jobstamps",
"id": "23f836df2c28955694e0265d4c14154fdf63404d",
"size": "156",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40341"
}
],
"symlink_target": ""
} |
"""Test the PackagesPlugin.
"""
import pytest
import enaml
from atom.api import Atom, Bool, Value, Str
from exopy.testing.util import handle_dialog
with enaml.imports():
from enaml.workbench.core.core_manifest import CoreManifest
from exopy.app.app_manifest import AppManifest
from exopy.app.errors.manifest import ErrorsManifest
from exopy.app.packages.manifest import PackagesManifest
from .packages_utils import Manifest1, Manifest2
APP_ID = 'exopy.app'
PACKAGES_ID = 'exopy.app.packages'
@pytest.fixture
def pack_workbench(workbench):
"""Create a workbench and register basic manifests.
"""
workbench.register(CoreManifest())
workbench.register(AppManifest())
workbench.register(ErrorsManifest())
workbench.register(PackagesManifest())
return workbench
def patch_pkg(monkey, answer):
"""Patch the pkg_resources.iter_entry_points function.
"""
from exopy.app.packages.plugin import pkg_resources
monkey.setattr(pkg_resources, 'iter_entry_points', lambda x: answer)
class FalseEntryPoint(Atom):
"""False entry whose behavior can be customized.
"""
#: Name of this entry point
name = Str()
#: Flag indicating whether the require method should raise an error.
missing_require = Bool()
#: List of manifest to return when load method is called.
manifests = Value()
def require(self):
if self.missing_require:
raise Exception()
return True
def load(self):
return lambda: self.manifests
def test_collecting_registering_and_stopping(monkeypatch, pack_workbench,
exopy_qtbot):
"""Test basic behavior of PackaggesPlugin.
"""
patch_pkg(monkeypatch, [FalseEntryPoint(name='test',
manifests=[Manifest1, Manifest2]),
FalseEntryPoint(name='test2',
manifests=[])])
app = pack_workbench.get_plugin(APP_ID)
app.run_app_startup(object())
def assert_registered():
plugin = pack_workbench.get_plugin(PACKAGES_ID)
assert 'test' in plugin.packages
exopy_qtbot.wait_until(assert_registered)
plugin = pack_workbench.get_plugin(PACKAGES_ID)
assert 'test2' in plugin.packages
assert 'exopy.test1' in plugin.packages['test']
assert 'exopy.test2' in plugin.packages['test']
assert (100, 0, 'exopy.test1') in plugin._registered
assert (0, 1, 'exopy.test2') in plugin._registered
assert pack_workbench.get_plugin('exopy.test1')
assert pack_workbench.get_plugin('exopy.test2')
pack_workbench.unregister(PACKAGES_ID)
with pytest.raises(ValueError):
pack_workbench.get_plugin('exopy.test1')
with pytest.raises(ValueError):
pack_workbench.get_plugin('exopy.test2')
def test_unmet_requirement(monkeypatch, pack_workbench, exopy_qtbot):
"""Test loading an extension package for which some requirements are not
met.
"""
patch_pkg(monkeypatch, [FalseEntryPoint(name='test', missing_require=True),
FalseEntryPoint(name='test2',
manifests=[])])
app = pack_workbench.get_plugin(APP_ID)
with handle_dialog(exopy_qtbot):
app.run_app_startup(object())
plugin = pack_workbench.get_plugin(PACKAGES_ID)
assert 'test' in plugin.packages
assert 'test2' in plugin.packages
assert 'load' in plugin.packages['test']
assert not plugin._registered
def test_wrong_return_type(monkeypatch, pack_workbench, exopy_qtbot):
"""Test handling a wrong return type from the callable returned by load.
"""
patch_pkg(monkeypatch, [FalseEntryPoint(name='test',
manifests=Manifest1),
FalseEntryPoint(name='test2',
manifests=[])])
app = pack_workbench.get_plugin(APP_ID)
with handle_dialog(exopy_qtbot):
app.run_app_startup(object())
plugin = pack_workbench.get_plugin(PACKAGES_ID)
assert 'test' in plugin.packages
assert 'test2' in plugin.packages
assert 'list' in plugin.packages['test']
assert not plugin._registered
def test_non_manifest(monkeypatch, pack_workbench, exopy_qtbot):
"""Test handling a non PluginManifest in the list of manifests.
"""
patch_pkg(monkeypatch, [FalseEntryPoint(name='test',
manifests=[Manifest1, object]),
FalseEntryPoint(name='test2',
manifests=[])])
app = pack_workbench.get_plugin(APP_ID)
with handle_dialog(exopy_qtbot):
app.run_app_startup(object())
plugin = pack_workbench.get_plugin(PACKAGES_ID)
assert 'test' in plugin.packages
assert 'test2' in plugin.packages
assert 'PluginManifests' in plugin.packages['test']
assert not plugin._registered
def test_registering_issue(monkeypatch, pack_workbench, exopy_qtbot):
"""Test handling an error when registering a manifest.
"""
patch_pkg(monkeypatch, [FalseEntryPoint(name='test',
manifests=[Manifest1, Manifest1]),
FalseEntryPoint(name='test2',
manifests=[])])
app = pack_workbench.get_plugin(APP_ID)
with handle_dialog(exopy_qtbot):
app.run_app_startup(object())
plugin = pack_workbench.get_plugin(PACKAGES_ID)
assert 'test' in plugin.packages
assert 'test2' in plugin.packages
assert 'exopy.test1' in plugin.packages['test']
assert len(plugin.packages['test']) == 1
def test_reporting_single_package_error(pack_workbench):
"""Check handling a single package error.
"""
plugin = pack_workbench.get_plugin('exopy.app.errors')
handler = plugin._errors_handlers.contributions['package']
assert handler.handle(pack_workbench, {'id': 'test', 'message': 'test'})
with pytest.raises(Exception):
handler.handle(pack_workbench, {})
def test_reporting_multiple_package_error(pack_workbench):
"""Check handling multiple package errors.
"""
plugin = pack_workbench.get_plugin('exopy.app.errors')
handler = plugin._errors_handlers.contributions['package']
assert handler.handle(pack_workbench, [{'id': 'test', 'message': 'test'}])
with pytest.raises(Exception):
handler.handle(pack_workbench, {})
| {
"content_hash": "d05a803fd09960f980b354e7dfd60835",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 79,
"avg_line_length": 31.79126213592233,
"alnum_prop": 0.6387234692319438,
"repo_name": "Ecpy/ecpy",
"id": "ede6ca5b3352cd8726bd3f306c71b795c4d43ebd",
"size": "6932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/app/packages/test_plugin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "162"
},
{
"name": "Python",
"bytes": "1344669"
},
{
"name": "Shell",
"bytes": "420"
}
],
"symlink_target": ""
} |
from flask import Flask
from flask import render_template
from pymongo import MongoClient
import json
import os
app = Flask(__name__)
# connection settings
MONGO_URI = os.getenv('MONGODB_URI', 'mongodb://localhost:27017')
DBS_NAME = os.getenv('MONGO_DB_NAME', 'avengers')
COLLECTION_NAME = 'avengers'
#
@app.route('/')
def dashboard():
"""
:return:
"""
return render_template("index.html")
# routing to the database info
@app.route("/avengers/avengers_project")
def avengers_projects():
"""
A flask view to serve the project data from
MongoDB in JSON format.
:return: the database data in JSON format file
"""
# define a constant with the record fields we wish to retrieve
FIELDS = {
'_id': False, 'Name': True, 'Current': True,
'Gender': True, 'Year': True,
'Honorary': True, 'Appearances': True, 'URL': True
}
# open a connection to MongoDB, we will use a 'WITH' statement
# this allows the connection to close as soon as we leave the 'WITH' statement
with MongoClient(MONGO_URI) as connection:
# define the collection of data we wish to access
collection = connection[DBS_NAME][COLLECTION_NAME]
# retrieve a result set with the fields defined in FIELDS
# and then we will limit the results to 55000
projects = collection.find(projection=FIELDS, limit=20000)
# convert projects to a list in a JSON object and return the JSON data
return json.dumps(list(projects))
# FOR DEBUGGING USE ONLY [COMMENT THIS OUT]
if __name__ == '__main__':
app.run(debug=True)
""" [UNCOMMENT THIS]
if __name__ == '__main__':
app.run()
"""
| {
"content_hash": "f7f3fb9e92887a3a7c400ec1b51c24f0",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 82,
"avg_line_length": 27.983333333333334,
"alnum_prop": 0.6569386539606908,
"repo_name": "GunnerJnr/_CodeInstitute",
"id": "60ec3765a6eeb83fb85452e71d4ea6db9db2c691",
"size": "1679",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Stream-2/Back-End-Development/Final-Project/Avengers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "336"
},
{
"name": "CSS",
"bytes": "2545480"
},
{
"name": "HTML",
"bytes": "708226"
},
{
"name": "JavaScript",
"bytes": "1984479"
},
{
"name": "Python",
"bytes": "1727585"
},
{
"name": "Shell",
"bytes": "75780"
},
{
"name": "TSQL",
"bytes": "642"
}
],
"symlink_target": ""
} |
# Base class for RPC testing
import logging
import optparse
import os
import sys
import shutil
import tempfile
import traceback
from .util import (
initialize_chain,
start_nodes,
connect_nodes_bi,
sync_blocks,
sync_mempools,
stop_nodes,
stop_node,
wait_bitcoinds,
enable_coverage,
check_json_precision,
initialize_chain_clean,
PortSeed,
)
from .authproxy import JSONRPCException
class BitcoinTestFramework(object):
def __init__(self):
self.num_nodes = 4
self.setup_clean_chain = False
self.nodes = None
def run_test(self):
raise NotImplementedError
def add_options(self, parser):
pass
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
if self.setup_clean_chain:
initialize_chain_clean(self.options.tmpdir, self.num_nodes)
else:
initialize_chain(self.options.tmpdir, self.num_nodes, self.options.cachedir)
def stop_node(self, num_node):
stop_node(self.nodes[num_node], num_node)
def setup_nodes(self):
return start_nodes(self.num_nodes, self.options.tmpdir)
def setup_network(self, split = False):
self.nodes = self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
# If we joined network halves, connect the nodes from the joint
# on outward. This ensures that chains are properly reorganised.
if not split:
connect_nodes_bi(self.nodes, 1, 2)
sync_blocks(self.nodes[1:3])
sync_mempools(self.nodes[1:3])
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 2, 3)
self.is_network_split = split
self.sync_all()
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
assert not self.is_network_split
stop_nodes(self.nodes)
wait_bitcoinds()
self.setup_network(True)
def sync_all(self):
if self.is_network_split:
sync_blocks(self.nodes[:2])
sync_blocks(self.nodes[2:])
sync_mempools(self.nodes[:2])
sync_mempools(self.nodes[2:])
else:
sync_blocks(self.nodes)
sync_mempools(self.nodes)
def join_network(self):
"""
Join the (previously split) network halves together.
"""
assert self.is_network_split
stop_nodes(self.nodes)
wait_bitcoinds()
self.setup_network(False)
def main(self):
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave bitcoinds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop bitcoinds after the test execution")
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"),
help="Source directory containing bitcoind/bitcoin-cli (default: %default)")
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../cache"),
help="Directory for caching pregenerated datadirs")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_option("--portseed", dest="port_seed", default=os.getpid(), type='int',
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_option("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
# backup dir variable for removal at cleanup
self.options.root, self.options.tmpdir = self.options.tmpdir, self.options.tmpdir + '/' + str(self.options.port_seed)
if self.options.trace_rpc:
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
if self.options.coveragedir:
enable_coverage(self.options.coveragedir)
PortSeed.n = self.options.port_seed
os.environ['PATH'] = self.options.srcdir+":"+self.options.srcdir+"/qt:"+os.environ['PATH']
check_json_precision()
success = False
try:
if not os.path.isdir(self.options.tmpdir):
os.makedirs(self.options.tmpdir)
self.setup_chain()
self.setup_network()
self.run_test()
success = True
except JSONRPCException as e:
print("JSONRPC error: "+e.error['message'])
traceback.print_tb(sys.exc_info()[2])
except AssertionError as e:
print("Assertion failed: " + str(e))
traceback.print_tb(sys.exc_info()[2])
except KeyError as e:
print("key not found: "+ str(e))
traceback.print_tb(sys.exc_info()[2])
except Exception as e:
print("Unexpected exception caught during testing: " + repr(e))
traceback.print_tb(sys.exc_info()[2])
except KeyboardInterrupt as e:
print("Exiting after " + repr(e))
if not self.options.noshutdown:
print("Stopping nodes")
stop_nodes(self.nodes)
wait_bitcoinds()
else:
print("Note: bitcoinds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success:
print("Cleaning up")
shutil.rmtree(self.options.tmpdir)
if not os.listdir(self.options.root):
os.rmdir(self.options.root)
else:
print("Not cleaning up dir %s" % self.options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
# Test framework for doing p2p comparison testing, which sets up some bitcoind
# binaries:
# 1 binary: test binary
# 2 binaries: 1 test binary, 1 ref binary
# n>2 binaries: 1 test binary, n-1 ref binaries
class ComparisonTestFramework(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = True
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("BITCOIND", "bitcoind"),
help="bitcoind binary to test")
parser.add_option("--refbinary", dest="refbinary",
default=os.getenv("BITCOIND", "bitcoind"),
help="bitcoind binary to use for reference nodes (if any)")
def setup_network(self):
self.nodes = start_nodes(
self.num_nodes, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']] * self.num_nodes,
binary=[self.options.testbinary] +
[self.options.refbinary]*(self.num_nodes-1))
| {
"content_hash": "4260fed27f6541daf91e7e8573431a3e",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 142,
"avg_line_length": 36.56744186046512,
"alnum_prop": 0.5775883998982447,
"repo_name": "realzzt/BitCoin2013",
"id": "d5d26368439ae154713f4bb15f32b3513bbdc424",
"size": "8080",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/test_framework/test_framework.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "29375"
},
{
"name": "C",
"bytes": "703206"
},
{
"name": "C++",
"bytes": "4667168"
},
{
"name": "CSS",
"bytes": "1216"
},
{
"name": "HTML",
"bytes": "51842"
},
{
"name": "Java",
"bytes": "33209"
},
{
"name": "M4",
"bytes": "189542"
},
{
"name": "Makefile",
"bytes": "102451"
},
{
"name": "Objective-C",
"bytes": "4081"
},
{
"name": "Objective-C++",
"bytes": "7465"
},
{
"name": "Protocol Buffer",
"bytes": "2376"
},
{
"name": "Python",
"bytes": "983598"
},
{
"name": "QMake",
"bytes": "4108"
},
{
"name": "Shell",
"bytes": "50752"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/item/shared_item_space_station.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "81fe79f1c69ff8eb316f456b923895c7",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 78,
"avg_line_length": 23.615384615384617,
"alnum_prop": 0.6938110749185668,
"repo_name": "anhstudios/swganh",
"id": "53304a24602532896ff757157095d7b0e4fd366e",
"size": "452",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/draft_schematic/item/shared_item_space_station.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
from copy import deepcopy
from typing import Any, Dict, List, Tuple
import torch
import pytest
from allennlp.common import Lazy, Params
from allennlp.common.checks import ConfigurationError
from allennlp.common.testing import AllenNlpTestCase
from allennlp.data.data_loaders import SimpleDataLoader
from allennlp.training import Trainer
from allennlp.training.learning_rate_schedulers import LearningRateScheduler, SlantedTriangular
from allennlp.training.optimizers import Optimizer
def is_hat_shaped(learning_rates: List[float]):
"""
Check if the list of learning rates is "hat" shaped, i.e.,
increases then decreases
"""
# sufficient conditions:
# has both an increasing and decreasing segment
# decrease segment occurs after increasing segment
# once start decreasing, can't increase again
has_increasing_segment = False
has_decreasing_segment = False
for k in range(1, len(learning_rates)):
delta = learning_rates[k] - learning_rates[k - 1]
if delta > 1e-8:
has_increasing_segment = True
if has_decreasing_segment:
# can't increase again after hitting the max
return False
elif delta < -1e-8:
if not has_increasing_segment:
# can't decrease without have an increasing segment
return False
has_decreasing_segment = True
else:
# no change
pass
return has_increasing_segment and has_decreasing_segment
class SlantedTriangularTest(AllenNlpTestCase):
def setup_method(self):
super().setup_method()
self.model = torch.nn.Sequential(
OrderedDict([("lin1", torch.nn.Linear(10, 10)), ("lin2", torch.nn.Linear(10, 10))])
)
def _get_optimizer(self, lr: float = 1.0):
optimizer_params = Params({"type": "sgd", "lr": lr})
optimizer_params["parameter_groups"] = [[[f"^{m}"], {}] for m in self.model._modules]
return Optimizer.from_params(
model_parameters=self.model.named_parameters(), params=optimizer_params
)
def _run_scheduler_get_lrs(self, params, num_steps_per_epoch):
optimizer = self._get_optimizer()
params["type"] = "slanted_triangular"
scheduler = LearningRateScheduler.from_params(
optimizer=optimizer, params=Params(deepcopy(params))
)
lrs = []
batch_num_total = 0
for epoch in range(params["num_epochs"]):
for _ in range(num_steps_per_epoch):
batch_num_total += 1
# allennlp trainer calls step_batch after updating parameters
# so collect lr at time of parameter update
lrs.append(
[
param_group["lr"] * float(param_group["params"][0].requires_grad)
for param_group in optimizer.param_groups[:2]
]
)
scheduler.step_batch(batch_num_total)
if params.get("gradual_unfreezing") and epoch == 0:
assert scheduler.freezing_current
# step() takes two arguments: validation metric and epoch
scheduler.step(None)
return lrs
def test_is_hat_shaped(self):
assert not is_hat_shaped([0.0] * 10)
assert not is_hat_shaped([float(k) for k in range(10)])
assert not is_hat_shaped([float(10 - k) for k in range(10)])
assert is_hat_shaped([float(k) for k in range(10)] + [float(10 - k) for k in range(10)])
assert not is_hat_shaped(
[float(k) for k in range(10)]
+ [float(10 - k) for k in range(10)]
+ [float(k) for k in range(10)]
)
def test_from_params_in_trainer(self):
# This is more of an integration test, making sure that a bunch of pieces fit together
# correctly, but it matters most for this learning rate scheduler, so we're testing it here.
params = Params(
{
"num_epochs": 5,
"learning_rate_scheduler": {
"type": "slanted_triangular",
"gradual_unfreezing": True,
"discriminative_fine_tuning": True,
"decay_factor": 0.5,
},
}
)
# The method called in the logic below only checks the length of this list, not its
# contents, so this should be safe.
instances = [1] * 40
optim = self._get_optimizer()
trainer = Trainer.from_params(
model=self.model,
optimizer=Lazy(lambda **kwargs: optim),
serialization_dir=self.TEST_DIR,
params=params,
data_loader=SimpleDataLoader(instances, batch_size=10),
)
assert isinstance(trainer._learning_rate_scheduler, SlantedTriangular)
# This is what we wrote this test for: to be sure that num_epochs is passed correctly, and
# that num_steps_per_epoch is computed and passed correctly. This logic happens inside of
# `Trainer.from_partial_objects`.
assert trainer._learning_rate_scheduler.num_epochs == 5
assert trainer._learning_rate_scheduler.num_steps_per_epoch == 4
# And we'll do one more to make sure that we can override num_epochs in the scheduler if we
# really want to. Not sure why you would ever want to in this case; this is just testing
# the functionality.
params = Params(
{
"num_epochs": 5,
"learning_rate_scheduler": {
"type": "slanted_triangular",
"num_epochs": 3,
"gradual_unfreezing": True,
"discriminative_fine_tuning": True,
"decay_factor": 0.5,
},
}
)
trainer = Trainer.from_params(
model=self.model,
optimizer=Lazy(lambda **kwargs: optim),
serialization_dir=self.TEST_DIR,
params=params,
data_loader=SimpleDataLoader(instances, batch_size=10),
)
assert trainer._learning_rate_scheduler.num_epochs == 3
def test_from_params(self):
optim = self._get_optimizer()
sched = LearningRateScheduler.from_params(
optimizer=optim,
params=Params(
{
"type": "slanted_triangular",
"num_epochs": 5,
"num_steps_per_epoch": 10,
"gradual_unfreezing": True,
"discriminative_fine_tuning": True,
"decay_factor": 0.5,
}
),
)
assert sched.num_epochs == 5
assert sched.num_steps_per_epoch == 10
assert sched.gradual_unfreezing is True
assert sched.freezing_current is True
assert len(optim.param_groups) == 3
# The default parameter group in the Optimizer is empty
assert not optim.param_groups[-1]["params"]
assert optim.param_groups[-2]["lr"] == 1.0 / sched.ratio
assert optim.param_groups[-3]["lr"] == 0.5 / sched.ratio
with pytest.raises(ConfigurationError):
# num_epochs and num_steps_per_epoch are required
LearningRateScheduler.from_params(
optimizer=optim, params=Params({"type": "slanted_triangular", "num_epochs": 5})
)
LearningRateScheduler.from_params(
optimizer=optim,
params=Params({"type": "slanted_triangular", "num_steps_epochs": 10}),
)
def test_schedules(self):
slanted_triangular_cases: List[Tuple[Dict[str, Any], List[Tuple[int, int, float]]]] = [
(
{
"num_epochs": 5,
"num_steps_per_epoch": 10,
"gradual_unfreezing": True,
}, # parameters
[
(0, 1, 0.03125), # iteration, layer, learning rate
(0, 0, 0.0),
(1, 1, 1.0),
(1, 0, 0.0),
(9, 1, 0.138888),
(9, 0, 0.0), # end of the first epoch
(10, 1, 0.03125),
(10, 0, 0.03125),
(14, 1, 1.0),
(14, 0, 1.0),
(49, 1, 0.05815972),
(49, 0, 0.05815972),
],
),
(
{
"num_epochs": 5,
"num_steps_per_epoch": 10,
"discriminative_fine_tuning": True,
"decay_factor": 0.5,
}, # parameters
[
(0, 1, 0.03125), # iteration, layer, learning rate
(0, 0, 0.015625),
(5, 1, 1.0),
(5, 0, 0.5),
(49, 1, 0.052777),
(49, 0, 0.026388),
],
),
(
{
"num_epochs": 5,
"num_steps_per_epoch": 10,
"gradual_unfreezing": True,
"discriminative_fine_tuning": True,
"decay_factor": 0.5,
}, # parameters
[
(0, 1, 0.03125), # iteration, layer, learning rate
(0, 0, 0.0),
(1, 1, 1.0),
(1, 0, 0.0),
(9, 1, 0.138888),
(9, 0, 0.0), # end of the first epoch
(10, 1, 0.03125),
(10, 0, 0.015625),
(14, 1, 1.0),
(14, 0, 0.5),
(49, 1, 0.0581597222),
(49, 0, 0.0290798611),
],
),
]
for params, lr_checks in slanted_triangular_cases:
lrs = self._run_scheduler_get_lrs(params, params["num_steps_per_epoch"])
for it, layer, lr in lr_checks:
lr_check = round(lr, 5)
lr = round(lrs[it][layer], 5)
assert (
lr == lr_check
), f"Learning rate {lr} at iteration {it} at layer {layer} != {lr_check}."
def test_schedules_num_steps_per_epoch(self):
# ensure the learning rate schedule still maintains hat shape
# if number of actual batches differs from parameter provided
# in constructor
for gradual_unfreezing in [True, False]:
for discriminative_fine_tuning in [True, False]:
for num_actual_steps_per_epoch in [7, 11]:
params = {
"num_epochs": 5,
"num_steps_per_epoch": 10,
"gradual_unfreezing": gradual_unfreezing,
"discriminative_fine_tuning": discriminative_fine_tuning,
}
lrs = self._run_scheduler_get_lrs(params, num_actual_steps_per_epoch)
first_layer_lrs = [rates[0] for rates in lrs]
second_layer_lrs = [rates[1] for rates in lrs]
if gradual_unfreezing:
assert max(first_layer_lrs[:num_actual_steps_per_epoch]) < 1e-8
assert min(first_layer_lrs[:num_actual_steps_per_epoch]) > -1e-8
assert is_hat_shaped(first_layer_lrs[num_actual_steps_per_epoch:])
assert is_hat_shaped(second_layer_lrs[:num_actual_steps_per_epoch])
assert is_hat_shaped(second_layer_lrs[num_actual_steps_per_epoch:])
else:
assert is_hat_shaped(first_layer_lrs)
assert is_hat_shaped(second_layer_lrs)
| {
"content_hash": "ed97db207a5b191aa2f1686c1edc1c32",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 100,
"avg_line_length": 40.99657534246575,
"alnum_prop": 0.5130732603792498,
"repo_name": "allenai/allennlp",
"id": "5280970a34a9ca0eafc6982495e572b78d81f671",
"size": "11971",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/training/learning_rate_schedulers/slanted_triangular_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "39870"
},
{
"name": "Dockerfile",
"bytes": "1190"
},
{
"name": "Jsonnet",
"bytes": "4469"
},
{
"name": "Makefile",
"bytes": "5306"
},
{
"name": "Perl",
"bytes": "101"
},
{
"name": "Python",
"bytes": "3575059"
},
{
"name": "Scilab",
"bytes": "4085"
},
{
"name": "Shell",
"bytes": "2092"
}
],
"symlink_target": ""
} |
"""Module containing classes related to GCE VM networking.
The Firewall class provides a way of opening VM ports. The Network class allows
VMs to communicate via internal ips and isolates PerfKitBenchmarker VMs from
others in the
same project. See https://developers.google.com/compute/docs/networking for
more information about GCE VM networking.
"""
import threading
from perfkitbenchmarker import flags
from perfkitbenchmarker import network
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.gcp import util
FLAGS = flags.FLAGS
class GceFirewallRule(resource.BaseResource):
"""An object representing a GCE Firewall Rule."""
def __init__(self, name, project, port):
super(GceFirewallRule, self).__init__()
self.name = name
self.project = project
self.port = port
def __eq__(self, other):
"""Defines equality to make comparison easy."""
return (self.name == other.name and
self.port == other.port and
self.project == other.project)
def _Create(self):
"""Creates the Firewall Rule."""
create_cmd = [FLAGS.gcloud_path,
'compute',
'firewall-rules',
'create',
self.name,
'--allow', 'tcp:%d' % self.port, 'udp:%d' % self.port]
create_cmd.extend(util.GetDefaultGcloudFlags(self))
vm_util.IssueCommand(create_cmd)
def _Delete(self):
"""Deletes the Firewall Rule."""
delete_cmd = [FLAGS.gcloud_path,
'compute',
'firewall-rules',
'delete',
self.name]
delete_cmd.extend(util.GetDefaultGcloudFlags(self))
vm_util.IssueCommand(delete_cmd)
def _Exists(self):
"""Returns True if the Firewall Rule exists."""
describe_cmd = [FLAGS.gcloud_path,
'compute',
'firewall-rules',
'describe',
self.name]
describe_cmd.extend(util.GetDefaultGcloudFlags(self))
_, _, retcode = vm_util.IssueCommand(describe_cmd, suppress_warning=True)
if retcode:
return False
return True
class GceFirewall(network.BaseFirewall):
"""An object representing the GCE Firewall."""
def __init__(self, project):
"""Initialize GCE firewall class.
Args:
project: The GCP project name under which firewall is created.
"""
self._lock = threading.Lock()
self.firewall_rules = []
self.project = project
def AllowPort(self, vm, port):
"""Opens a port on the firewall.
Args:
vm: The BaseVirtualMachine object to open the port for.
port: The local port to open.
"""
if vm.is_static:
return
with self._lock:
firewall_name = ('perfkit-firewall-%s-%d' %
(FLAGS.run_uri, port))
firewall_rule = GceFirewallRule(firewall_name, self.project, port)
if firewall_rule in self.firewall_rules:
return
self.firewall_rules.append(firewall_rule)
firewall_rule.Create()
def DisallowAllPorts(self):
"""Closes all ports on the firewall."""
for firewall_rule in self.firewall_rules:
firewall_rule.Delete()
class GceNetwork(network.BaseNetwork):
"""Object representing a GCE Network."""
def Create(self):
"""Creates the actual network."""
pass
def Delete(self):
"""Deletes the actual network."""
pass
| {
"content_hash": "21eafe4b90340045b5c70a31fdee80ef",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 79,
"avg_line_length": 29.127118644067796,
"alnum_prop": 0.6342740762292697,
"repo_name": "lleszczu/PerfKitBenchmarker",
"id": "0c51e91ecd01a313833e3f87a57fa91b802f5a75",
"size": "4031",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "perfkitbenchmarker/gcp/gce_network.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "976252"
},
{
"name": "Shell",
"bytes": "25179"
}
],
"symlink_target": ""
} |
"""
This is part of shot detector.
Produced by w495 at 2017.05.04 04:18:27
"""
from __future__ import absolute_import, division, print_function
import logging
import time
import types
from functools import wraps, partial
import six
class LogMeta(type):
"""
Metaclass for logging every call
of every method of target class.
"""
__default_logger = logging.getLogger(__name__)
__default_log_level = logging.DEBUG
@staticmethod
def log_settings_configure(**kwargs):
"""
:param kwargs:
:return:
"""
from shot_detector.utils.log_settings import LogSetting
log_setting = LogSetting(**kwargs)
conf = log_setting.configure()
return conf
@staticmethod
def ignore_method_call(func):
"""
:param func:
:return:
"""
func.ignore_log_meta = True
return func
@staticmethod
def should_be_overloaded(func):
"""
:param func:
:return:
"""
func.should_be_overloaded = True
return func
@classmethod
def log_method_call(mcs, func):
"""
:param func:
:return:
"""
logger = mcs.__default_logger
level = mcs.__default_log_level
return mcs.decorate(logger, level, str(), func)
@classmethod
def log_method_call_with(mcs, level=None, logger=None):
"""
:param level:
:param logger:
:return:
"""
if not logger:
logger = mcs.__default_logger
if not level:
level = mcs.__default_log_level
def _log_call(func):
return mcs.decorate(logger, level, str(), func)
return _log_call
@classmethod
def log_dummy_call(mcs, func):
"""
:param func:
:return:
"""
logger = mcs.__default_logger
level = mcs.__default_log_level
func = mcs.should_be_overloaded(func)
return mcs.decorate(logger, level, str(), func)
def __new__(mcs,
class_name=None,
bases=None,
attr_dict=None,
*args,
**kwargs):
"""
:param class_name:
:param bases:
:param attr_dict:
:param args:
:param kwargs:
:return:
"""
logger = attr_dict.get('meta_logger',
mcs.__default_logger)
log_level = attr_dict.get('meta_log_level',
mcs.__default_log_level)
if logger.isEnabledFor(log_level):
for key, value in six.iteritems(attr_dict):
if (isinstance(value, types.FunctionType) or
isinstance(value, types.LambdaType) or
isinstance(value, types.MethodType)):
new_value = mcs.decorate(
logger,
log_level,
class_name,
value
)
attr_dict[key] = new_value
return super(LogMeta, mcs).__new__(
mcs,
class_name,
bases,
attr_dict
)
@classmethod
def decorate(mcs, logger, level, class_name, func):
"""
Decorate method `func`.
Every call of `func` will be reported to logger.
This func (decorate) calls only one time
— at target class construction,
:param logging.Logger logger: logger object
:param int level: logger level
:param string class_name:
the name of target class
:param func | method | lambda | frame func:
input method for decoration
:return: wrapped(func)
decorated version of input func
"""
if hasattr(func, 'ignore_log_meta'):
return func
if hasattr(func, 'should_be_overloaded'):
pre_call = partial(mcs.dummy_pre_call,
logger,
level,
class_name,
func)
@wraps(func)
def dummy_wrapper(self, *args, **kwargs):
"""
:param self:
:param args:
:param kwargs:
:return:
"""
pre_call()
res = func(self, *args, **kwargs)
return res
return dummy_wrapper
pre_call = partial(mcs.pre_call,
logger,
level,
class_name,
func)
post_call = partial(mcs.post_call,
logger,
level,
class_name,
func)
@wraps(func)
def call_wrapper(self, *args, **kwargs):
"""
:param self:
:param args:
:param kwargs:
:return:
"""
pre_call()
res = func(self, *args, **kwargs)
post_call()
return res
return call_wrapper
@classmethod
def pre_call(mcs, logger, level, class_name, func):
"""
:param logger:
:param level:
:param class_name:
:param func:
:return:
"""
func = mcs.add_pre_call_attrs(func)
logger.log(level,
"[{num}] {mod}.{cls} {fun}".format(
num=func.call_number,
mod=func.__module__,
cls=class_name,
fun=func.__name__,
))
return func
@classmethod
def post_call(mcs, logger, level, class_name, func):
"""
:param logger:
:param level:
:param class_name:
:param func:
:return:
"""
func = mcs.add_post_call_attrs(func)
logger.log(level,
"[{num}] {mod}.{cls} {fun} ({time:f})".format(
num=func.call_number,
mod=func.__module__,
cls=class_name,
fun=func.__name__,
time=func.delta_time,
))
return func
@classmethod
def dummy_pre_call(mcs, logger, level, class_name, func):
"""
:param logger:
:param level:
:param class_name:
:param func:
:return:
"""
logger.log(level,
"{mod}.{cls}{fun}: "
"dummy method: "
"should be overloaded".format(
mod=func.__module__,
cls=class_name,
fun=func.__name__,
))
return func
@classmethod
def add_pre_call_attrs(mcs, func):
"""
:param func:
:return:
"""
if not hasattr(func, 'call_number'):
func.call_number = 0
func.call_number += 1
func.start_time = time.time()
return func
@classmethod
def add_post_call_attrs(mcs, func):
"""
:param func:
:return:
"""
func.stop_time = time.time()
func.delta_time = func.stop_time - func.start_time
return func
ignore_log_meta = LogMeta.ignore_method_call
log_method_call = LogMeta.log_method_call
log_method_call_with = LogMeta.log_method_call_with
log_dummy_call = LogMeta.log_dummy_call
should_be_overloaded = LogMeta.should_be_overloaded
# for_overload = LogMeta.should_be_overloaded
# overload_me = should_be_overloaded
| {
"content_hash": "db1763cfed8a73e8a7353fe6c3f47c27",
"timestamp": "",
"source": "github",
"line_count": 307,
"max_line_length": 65,
"avg_line_length": 26.042345276872965,
"alnum_prop": 0.4544090056285178,
"repo_name": "w495/python-video-shot-detector",
"id": "26c33ddc10f10289e1df32036bc2e91cfc3b96fb",
"size": "8021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shot_detector/utils/log_meta.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "51"
},
{
"name": "Makefile",
"bytes": "1751"
},
{
"name": "Python",
"bytes": "599048"
},
{
"name": "Shell",
"bytes": "89"
}
],
"symlink_target": ""
} |
from .models import PollingLocation, PollingLocationManager
from .controllers import filter_polling_locations_structured_json_for_local_duplicates, \
import_and_save_all_polling_locations_data, polling_locations_import_from_structured_json
from admin_tools.views import redirect_to_sign_in_page
from ballot.models import BallotReturnedListManager
from config.base import get_environment_variable
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.messages import get_messages
from django.db.models import Q
from django.shortcuts import render
from exception.models import handle_record_found_more_than_one_exception
from voter.models import voter_has_authority
from wevote_functions.functions import convert_state_code_to_state_text, convert_to_float, convert_to_int, \
positive_value_exists, process_request_from_master
import wevote_functions.admin
from django.http import HttpResponse
import json
WE_VOTE_API_KEY = get_environment_variable("WE_VOTE_API_KEY")
POLLING_LOCATIONS_SYNC_URL = get_environment_variable("POLLING_LOCATIONS_SYNC_URL") # pollingLocationsSyncOut
WE_VOTE_SERVER_ROOT_URL = get_environment_variable("WE_VOTE_SERVER_ROOT_URL")
logger = wevote_functions.admin.get_logger(__name__)
# These are states for which we have polling location data
STATE_LIST_IMPORT = {
'AK': 'Alaska',
'AL': 'Alabama',
'AR': 'Arkansas',
# 'AS': 'American Samoa',
'AZ': 'Arizona',
'CA': 'California',
'CO': 'Colorado',
'CT': 'Connecticut',
'DC': 'District of Columbia',
'DE': 'Delaware',
'FL': 'Florida',
'GA': 'Georgia',
# 'GU': 'Guam',
'HI': 'Hawaii',
'IA': 'Iowa',
'ID': 'Idaho',
'IL': 'Illinois',
'IN': 'Indiana',
'KS': 'Kansas',
'KY': 'Kentucky',
'LA': 'Louisiana',
'MA': 'Massachusetts',
'MD': 'Maryland',
'ME': 'Maine',
'MI': 'Michigan',
'MN': 'Minnesota',
'MO': 'Missouri',
# 'MP': 'Northern Mariana Islands',
'MS': 'Mississippi',
'MT': 'Montana',
# 'NA': 'National',
'NC': 'North Carolina',
'ND': 'North Dakota',
'NE': 'Nebraska',
'NH': 'New Hampshire',
'NJ': 'New Jersey',
'NM': 'New Mexico',
'NV': 'Nevada',
'NY': 'New York',
'OH': 'Ohio',
'OK': 'Oklahoma',
'OR': 'Oregon',
'PA': 'Pennsylvania',
# 'PR': 'Puerto Rico',
'RI': 'Rhode Island',
'SC': 'South Carolina',
'SD': 'South Dakota',
'TN': 'Tennessee',
'TX': 'Texas',
'UT': 'Utah',
'VA': 'Virginia',
# 'VI': 'Virgin Islands',
'VT': 'Vermont',
'WA': 'Washington',
'WI': 'Wisconsin',
'WV': 'West Virginia',
'WY': 'Wyoming'
}
polling_locations_import_status_string = ""
# This page does not need to be protected.
def polling_locations_sync_out_view(request): # pollingLocationsSyncOut
state = request.GET.get('state', '')
try:
polling_location_list = PollingLocation.objects.using('readonly').all()
if positive_value_exists(state):
polling_location_list = polling_location_list.filter(state__iexact=state)
polling_location_list_dict = polling_location_list.values('we_vote_id', 'city', 'directions_text',
'latitude', 'longitude',
'line1', 'line2', 'location_name',
'polling_hours_text',
'polling_location_id', 'state',
'use_for_bulk_retrieve',
'polling_location_deleted',
'zip_long')
if polling_location_list_dict:
polling_location_list_json = list(polling_location_list_dict)
return HttpResponse(json.dumps(polling_location_list_json), content_type='application/json')
except Exception as e:
pass
json_data = {
'success': False,
'status': 'POLLING_LOCATION_LIST_MISSING'
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
@login_required
def polling_locations_import_from_master_server_view(request):
"""
This view reaches out to the master servers configured in WeVoteServer/config/environment_variables.json
:param request:
:return:
"""
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'admin'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
if WE_VOTE_SERVER_ROOT_URL in POLLING_LOCATIONS_SYNC_URL:
messages.add_message(request, messages.ERROR, "Cannot sync with Master We Vote Server -- "
"this is the Master We Vote Server.")
return HttpResponseRedirect(reverse('admin_tools:admin_home', args=()))
global polling_locations_import_status_string
status = ""
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
state_code = request.GET.get('state_code', '')
# results = polling_locations_import_from_master_server(request, state_code)
import_results, structured_json = process_request_from_master(
request, "Loading Polling Locations from We Vote Master servers",
POLLING_LOCATIONS_SYNC_URL, {
"key": WE_VOTE_API_KEY, # This comes from an environment variable
"state": state_code,
}
)
duplicates_removed = 0
json_retrieved = False
saved = 0
updated = 0
not_processed = 0
if import_results['success']:
status += import_results['status']
json_retrieved = True
polling_locations_import_status_string = "Checking for duplicate Polling locations. "
results = filter_polling_locations_structured_json_for_local_duplicates(structured_json)
filtered_structured_json = results['structured_json']
duplicates_removed = results['duplicates_removed']
polling_locations_import_status_string = "Importing Polling locations."
import_results = polling_locations_import_from_structured_json(filtered_structured_json)
saved = import_results['saved']
updated = import_results['updated']
not_processed = import_results['not_processed']
else:
polling_locations_import_status_string = "Not able to retrieve filtered_structured_json from Master Server. "
status += polling_locations_import_status_string + import_results['status']
if not json_retrieved:
messages.add_message(request, messages.ERROR, status)
else:
messages.add_message(request, messages.INFO, 'Polling Locations import completed. '
'Saved: {saved}, Updated: {updated}, '
'Duplicates skipped: '
'{duplicates_removed}, '
'Not processed: {not_processed}'
''.format(saved=saved,
updated=updated,
duplicates_removed=duplicates_removed,
not_processed=not_processed))
return HttpResponseRedirect(reverse('admin_tools:sync_dashboard', args=()) + "?google_civic_election_id=" +
str(google_civic_election_id) + "&state_code=" + str(state_code))
@login_required
def polling_locations_import_from_master_server_status_view(request):
global polling_locations_import_status_string
if 'polling_locations_import_status_string' not in globals():
polling_locations_import_status_string = ""
json_data = {
'text': polling_locations_import_status_string,
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
@login_required
def import_polling_locations_process_view(request):
"""
This view imports the polling location data from xml files from VIP (http://data.votinginfoproject.org)
:param request:
:return:
"""
authority_required = {'admin'} # admin, verified_volunteer
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
state_code = request.GET.get('state_code', '')
# state_code = 'mo' # State code for Missouri
if not positive_value_exists(state_code):
messages.add_message(request, messages.INFO,
'State code required to run import_polling_locations_process.')
return HttpResponseRedirect(reverse('polling_location:polling_location_list',
args=()) + "?state_code={var}".format(
var=state_code))
results = import_and_save_all_polling_locations_data(state_code.lower())
messages.add_message(request, messages.INFO,
'Polling locations retrieved from file. '
'({saved} added, {updated} updated, {not_processed} not_processed)'.format(
saved=results['saved'],
updated=results['updated'],
not_processed=results['not_processed'],))
return HttpResponseRedirect(reverse('polling_location:polling_location_list',
args=()) + "?state_code={var}".format(
var=state_code))
@login_required
def polling_location_edit_process_view(request):
"""
Process the new or edit polling_location forms
:param request:
:return:
"""
authority_required = {'verified_volunteer'} # admin, verified_volunteer
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
status = ""
google_civic_election_id = request.POST.get('google_civic_election_id', 0)
state_code = request.POST.get('state_code', "")
polling_location_id = convert_to_int(request.POST['polling_location_id'])
location_name = request.POST.get('location_name', "")
line1 = request.POST.get('line1', "")
line2 = request.POST.get('line2', "")
city = request.POST.get('city', "")
zip_long_raw = request.POST.get('zip_long', "")
zip_long = zip_long_raw.strip()
latitude = convert_to_float(request.POST.get('latitude', 0))
longitude = convert_to_float(request.POST.get('longitude', 0))
use_for_bulk_retrieve = request.POST.get('use_for_bulk_retrieve', False)
polling_location_deleted = request.POST.get('polling_location_deleted', False)
# Check to see if this polling_location is already being used anywhere
polling_location_on_stage_found = False
polling_location_on_stage = PollingLocation()
polling_location_manager = PollingLocationManager()
polling_location_we_vote_id = ""
try:
polling_location_query = PollingLocation.objects.filter(id=polling_location_id)
if len(polling_location_query):
polling_location_on_stage = polling_location_query[0]
polling_location_on_stage_found = True
except Exception as e:
pass
try:
if not polling_location_on_stage_found:
# Create new
polling_location_on_stage = PollingLocation.objects.create(
state=state_code,
zip_long=zip_long,
)
polling_location_on_stage.location_name = location_name
polling_location_on_stage.state = state_code
polling_location_on_stage.line1 = line1
polling_location_on_stage.line2 = line2
polling_location_on_stage.city = city
polling_location_on_stage.zip_long = zip_long
polling_location_on_stage.latitude = latitude
polling_location_on_stage.longitude = longitude
polling_location_on_stage.use_for_bulk_retrieve = positive_value_exists(use_for_bulk_retrieve)
polling_location_on_stage.polling_location_deleted = positive_value_exists(polling_location_deleted)
polling_location_on_stage.save()
polling_location_id = polling_location_on_stage.id
polling_location_we_vote_id = polling_location_on_stage.we_vote_id
if not zip_long or not latitude or not longitude:
lat_long_results = polling_location_manager.populate_latitude_and_longitude_for_polling_location(
polling_location_on_stage)
status += lat_long_results['status']
latitude = lat_long_results['latitude']
longitude = lat_long_results['longitude']
if polling_location_on_stage_found:
# Update
messages.add_message(request, messages.INFO, 'Polling location updated. ' + status)
else:
# Create new
messages.add_message(request, messages.INFO, 'Polling location created. ' + status)
except Exception as e:
messages.add_message(request, messages.ERROR, 'Could not save polling_location. ' + status)
# Now update ballot returned with lat/long
try:
if latitude and longitude:
ballot_returned_list_manager = BallotReturnedListManager()
results = ballot_returned_list_manager.retrieve_ballot_returned_list(
google_civic_election_id, polling_location_we_vote_id)
if results['ballot_returned_list_found']:
ballot_returned_list = results['ballot_returned_list']
for one_ballot_returned in ballot_returned_list:
one_ballot_returned.latitude = latitude
one_ballot_returned.longitude = longitude
one_ballot_returned.save()
except Exception as e:
messages.add_message(request, messages.ERROR, 'Could not update ballot_returned. ' + status)
url_variables = "?google_civic_election_id=" + str(google_civic_election_id) + \
"&state_code=" + str(state_code)
if positive_value_exists(polling_location_we_vote_id):
return HttpResponseRedirect(reverse('polling_location:polling_location_summary_by_we_vote_id',
args=(polling_location_we_vote_id,)) + url_variables)
else:
return HttpResponseRedirect(reverse('polling_location:polling_location_list', args=()) + url_variables)
@login_required
def polling_location_edit_view(request, polling_location_local_id=0, polling_location_we_vote_id=""):
authority_required = {'verified_volunteer'} # admin, verified_volunteer
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = request.GET.get('google_civic_election_id', 0)
state_code = request.GET.get('state_code', "")
messages_on_stage = get_messages(request)
polling_location_local_id = convert_to_int(polling_location_local_id)
polling_location_on_stage_found = False
polling_location_on_stage = PollingLocation()
try:
if positive_value_exists(polling_location_local_id):
polling_location_on_stage = PollingLocation.objects.get(id=polling_location_local_id)
polling_location_on_stage_found = True
elif positive_value_exists(polling_location_we_vote_id):
polling_location_on_stage = PollingLocation.objects.get(we_vote_id=polling_location_we_vote_id)
polling_location_on_stage_found = True
except PollingLocation.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
except PollingLocation.DoesNotExist:
# This is fine, create new
pass
if polling_location_on_stage_found:
template_values = {
'google_civic_election_id': google_civic_election_id,
'messages_on_stage': messages_on_stage,
'polling_location': polling_location_on_stage,
'polling_location_id': polling_location_on_stage.id,
'state_code': state_code,
}
else:
template_values = {
'google_civic_election_id': google_civic_election_id,
'messages_on_stage': messages_on_stage,
'polling_location_id': 0,
'state_code': state_code,
}
return render(request, 'polling_location/polling_location_edit.html', template_values)
@login_required
def polling_location_list_view(request):
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'partner_organization', 'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
limit = convert_to_int(request.GET.get('limit', 100))
show_bulk_retrieve = request.GET.get('show_bulk_retrieve', 0)
state_code = request.GET.get('state_code', '')
polling_location_search = request.GET.get('polling_location_search', '')
polling_location_count_query = PollingLocation.objects.all()
polling_location_count_query = polling_location_count_query.filter(polling_location_deleted=False)
polling_location_without_latitude_count = 0
polling_location_query = PollingLocation.objects.all()
polling_location_query = polling_location_query.filter(polling_location_deleted=False)
if positive_value_exists(show_bulk_retrieve):
polling_location_count_query = polling_location_count_query.filter(use_for_bulk_retrieve=True)
polling_location_query = polling_location_query.filter(use_for_bulk_retrieve=True)
if positive_value_exists(state_code):
polling_location_count_query = polling_location_count_query.filter(state__iexact=state_code)
polling_location_query = polling_location_query.filter(state__iexact=state_code)
polling_location_without_latitude_count_query = PollingLocation.objects.all()
polling_location_without_latitude_count_query = \
polling_location_without_latitude_count_query.filter(state__iexact=state_code)
polling_location_without_latitude_count_query = \
polling_location_without_latitude_count_query.filter(polling_location_deleted=False)
if positive_value_exists(show_bulk_retrieve):
polling_location_without_latitude_count_query = \
polling_location_without_latitude_count_query.filter(use_for_bulk_retrieve=True)
polling_location_without_latitude_count_query = \
polling_location_without_latitude_count_query.filter(Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_without_latitude_count = polling_location_without_latitude_count_query.count()
if positive_value_exists(polling_location_search):
search_words = polling_location_search.split()
for one_word in search_words:
filters = []
new_filter = Q(we_vote_id__icontains=one_word)
filters.append(new_filter)
new_filter = Q(location_name__icontains=one_word)
filters.append(new_filter)
new_filter = Q(directions_text__icontains=one_word)
filters.append(new_filter)
new_filter = Q(city__icontains=one_word)
filters.append(new_filter)
new_filter = Q(zip_long__icontains=one_word)
filters.append(new_filter)
new_filter = Q(line1__icontains=one_word)
filters.append(new_filter)
new_filter = Q(line2__icontains=one_word)
filters.append(new_filter)
# Add the first query
if len(filters):
final_filters = filters.pop()
# ...and "OR" the remaining items in the list
for item in filters:
final_filters |= item
polling_location_count_query = polling_location_count_query.filter(final_filters)
polling_location_query = polling_location_query.filter(final_filters)
polling_location_count = polling_location_count_query.count()
info_message = '{polling_location_count} polling locations found.'.format(
polling_location_count=polling_location_count)
if positive_value_exists(polling_location_without_latitude_count):
info_message += ' {polling_location_without_latitude_count} polling locations without lat/long.'.format(
polling_location_without_latitude_count=polling_location_without_latitude_count)
messages.add_message(request, messages.INFO, info_message)
polling_location_list = polling_location_query.order_by('location_name')[:limit]
state_list = STATE_LIST_IMPORT
sorted_state_list = sorted(state_list.items())
messages_on_stage = get_messages(request)
template_values = {
'messages_on_stage': messages_on_stage,
'google_civic_election_id': google_civic_election_id,
'polling_location_list': polling_location_list,
'polling_location_count': polling_location_count,
'polling_location_search': polling_location_search,
'show_bulk_retrieve': show_bulk_retrieve,
'state_code': state_code,
'state_name': convert_state_code_to_state_text(state_code),
'state_list': sorted_state_list,
}
return render(request, 'polling_location/polling_location_list.html', template_values)
@login_required
def polling_locations_add_latitude_and_longitude_view(request):
"""
Find polling location entries that don't have latitude/longitude (up to a limit), and update them
:param request:
:return:
"""
authority_required = {'verified_volunteer'} # admin, verified_volunteer
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
status = ""
limit = request.GET.get('limit', 1000)
state_code = request.GET.get('state_code', "")
refresh_all = request.GET.get('refresh_all', "")
google_civic_election_id = request.GET.get('google_civic_election_id', "")
if not positive_value_exists(state_code):
messages.add_message(request, messages.ERROR, 'State code required.')
return HttpResponseRedirect(reverse('polling_location:polling_location_list', args=()) +
"?google_civic_election_id=" + str(google_civic_election_id) +
"&state_code=" + str(state_code))
polling_location_manager = PollingLocationManager()
polling_location_we_vote_id = ""
polling_location_list = []
polling_locations_saved = 0
polling_locations_not_saved = 0
try:
# Find all polling locations with an empty latitude (with limit)
polling_location_query = PollingLocation.objects.all()
if positive_value_exists(refresh_all):
# Do not restrict to entries without lat/long
pass
else:
polling_location_query = polling_location_query.filter(Q(latitude__isnull=True) | Q(latitude__exact=0.0))
polling_location_query = polling_location_query.filter(state__iexact=state_code)
polling_location_query = polling_location_query.order_by('location_name')[:limit]
polling_location_list = list(polling_location_query)
except Exception as e:
messages.add_message(request, messages.ERROR, 'No polling locations found that need lat/long: ' + str(e))
for polling_location_on_stage in polling_location_list:
try:
lat_long_results = polling_location_manager.populate_latitude_and_longitude_for_polling_location(
polling_location_on_stage)
status += lat_long_results['status']
if lat_long_results['success']:
polling_locations_saved += 1
else:
polling_locations_not_saved += 1
except Exception as e:
polling_locations_not_saved += 1
messages.add_message(request, messages.INFO, 'Polling locations saved: ' + str(polling_locations_saved) +
", not saved: " + str(polling_locations_not_saved))
url_variables = "?google_civic_election_id=" + str(google_civic_election_id) + \
"&state_code=" + str(state_code)
if positive_value_exists(polling_location_we_vote_id):
return HttpResponseRedirect(reverse('polling_location:polling_location_summary_by_we_vote_id',
args=(polling_location_we_vote_id,)) + url_variables)
else:
return HttpResponseRedirect(reverse('polling_location:polling_location_list', args=()) + url_variables)
@login_required
def polling_location_summary_view(request, polling_location_local_id):
# admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer
authority_required = {'partner_organization', 'verified_volunteer'}
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
messages_on_stage = get_messages(request)
polling_location_local_id = convert_to_int(polling_location_local_id)
polling_location_on_stage_found = False
polling_location_on_stage = PollingLocation()
try:
polling_location_on_stage = PollingLocation.objects.get(id=polling_location_local_id)
polling_location_on_stage_found = True
except PollingLocation.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
except PollingLocation.DoesNotExist:
# This is fine, create new
pass
template_values = {
'google_civic_election_id': google_civic_election_id,
'messages_on_stage': messages_on_stage,
'polling_location': polling_location_on_stage,
}
return render(request, 'polling_location/polling_location_summary.html', template_values)
@login_required
def polling_location_summary_by_we_vote_id_view(request, polling_location_we_vote_id):
authority_required = {'verified_volunteer'} # admin, verified_volunteer
if not voter_has_authority(request, authority_required):
return redirect_to_sign_in_page(request, authority_required)
google_civic_election_id = convert_to_int(request.GET.get('google_civic_election_id', 0))
messages_on_stage = get_messages(request)
polling_location_on_stage_found = False
polling_location_on_stage = PollingLocation()
try:
polling_location_on_stage = PollingLocation.objects.get(we_vote_id=polling_location_we_vote_id)
polling_location_on_stage_found = True
except PollingLocation.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
except PollingLocation.DoesNotExist:
# This is fine, create new
pass
template_values = {
'google_civic_election_id': google_civic_election_id,
'messages_on_stage': messages_on_stage,
'polling_location': polling_location_on_stage,
}
return render(request, 'polling_location/polling_location_summary.html', template_values)
| {
"content_hash": "cc431f98db6ff774809518c2a6567278",
"timestamp": "",
"source": "github",
"line_count": 619,
"max_line_length": 117,
"avg_line_length": 45.41518578352181,
"alnum_prop": 0.6369877632327832,
"repo_name": "jainanisha90/WeVoteServer",
"id": "c99051b7895537c15cd9074821060a726ac87329",
"size": "28209",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "polling_location/views_admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3612"
},
{
"name": "HTML",
"bytes": "1003027"
},
{
"name": "Python",
"bytes": "7489854"
},
{
"name": "Shell",
"bytes": "611"
}
],
"symlink_target": ""
} |
"""The tests for Alarm control panel device triggers."""
from datetime import timedelta
import pytest
from homeassistant.components.alarm_control_panel import DOMAIN
import homeassistant.components.automation as automation
from homeassistant.components.device_automation import DeviceAutomationType
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_VACATION,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
)
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_fire_time_changed,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
@pytest.mark.parametrize(
"set_state,features_reg,features_state,expected_trigger_types",
[
(False, 0, 0, ["triggered", "disarmed", "arming"]),
(
False,
47,
0,
[
"triggered",
"disarmed",
"arming",
"armed_home",
"armed_away",
"armed_night",
"armed_vacation",
],
),
(True, 0, 0, ["triggered", "disarmed", "arming"]),
(
True,
0,
47,
[
"triggered",
"disarmed",
"arming",
"armed_home",
"armed_away",
"armed_night",
"armed_vacation",
],
),
],
)
async def test_get_triggers(
hass,
device_reg,
entity_reg,
set_state,
features_reg,
features_state,
expected_trigger_types,
):
"""Test we get the expected triggers from an alarm_control_panel."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
"5678",
device_id=device_entry.id,
supported_features=features_reg,
)
if set_state:
hass.states.async_set(
"alarm_control_panel.test_5678",
"attributes",
{"supported_features": features_state},
)
expected_triggers = []
expected_triggers += [
{
"platform": "device",
"domain": DOMAIN,
"type": trigger,
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
}
for trigger in expected_trigger_types
]
triggers = await async_get_device_automations(
hass, DeviceAutomationType.TRIGGER, device_entry.id
)
assert_lists_same(triggers, expected_triggers)
async def test_get_trigger_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from an alarm_control_panel."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
hass.states.async_set(
"alarm_control_panel.test_5678", "attributes", {"supported_features": 15}
)
triggers = await async_get_device_automations(
hass, DeviceAutomationType.TRIGGER, device_entry.id
)
assert len(triggers) == 6
for trigger in triggers:
capabilities = await async_get_device_automation_capabilities(
hass, DeviceAutomationType.TRIGGER, trigger
)
assert capabilities == {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
async def test_if_fires_on_state_change(hass, calls):
"""Test for turn_on and turn_off triggers firing."""
hass.states.async_set("alarm_control_panel.entity", STATE_ALARM_PENDING)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "alarm_control_panel.entity",
"type": "triggered",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"triggered - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "alarm_control_panel.entity",
"type": "disarmed",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"disarmed - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "alarm_control_panel.entity",
"type": "armed_home",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"armed_home - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "alarm_control_panel.entity",
"type": "armed_away",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"armed_away - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "alarm_control_panel.entity",
"type": "armed_night",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"armed_night - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "alarm_control_panel.entity",
"type": "armed_vacation",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"armed_vacation - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
]
},
)
# Fake that the entity is triggered.
hass.states.async_set("alarm_control_panel.entity", STATE_ALARM_TRIGGERED)
await hass.async_block_till_done()
assert len(calls) == 1
assert (
calls[0].data["some"]
== "triggered - device - alarm_control_panel.entity - pending - triggered - None"
)
# Fake that the entity is disarmed.
hass.states.async_set("alarm_control_panel.entity", STATE_ALARM_DISARMED)
await hass.async_block_till_done()
assert len(calls) == 2
assert (
calls[1].data["some"]
== "disarmed - device - alarm_control_panel.entity - triggered - disarmed - None"
)
# Fake that the entity is armed home.
hass.states.async_set("alarm_control_panel.entity", STATE_ALARM_ARMED_HOME)
await hass.async_block_till_done()
assert len(calls) == 3
assert (
calls[2].data["some"]
== "armed_home - device - alarm_control_panel.entity - disarmed - armed_home - None"
)
# Fake that the entity is armed away.
hass.states.async_set("alarm_control_panel.entity", STATE_ALARM_ARMED_AWAY)
await hass.async_block_till_done()
assert len(calls) == 4
assert (
calls[3].data["some"]
== "armed_away - device - alarm_control_panel.entity - armed_home - armed_away - None"
)
# Fake that the entity is armed night.
hass.states.async_set("alarm_control_panel.entity", STATE_ALARM_ARMED_NIGHT)
await hass.async_block_till_done()
assert len(calls) == 5
assert (
calls[4].data["some"]
== "armed_night - device - alarm_control_panel.entity - armed_away - armed_night - None"
)
# Fake that the entity is armed vacation.
hass.states.async_set("alarm_control_panel.entity", STATE_ALARM_ARMED_VACATION)
await hass.async_block_till_done()
assert len(calls) == 6
assert (
calls[5].data["some"]
== "armed_vacation - device - alarm_control_panel.entity - armed_night - armed_vacation - None"
)
async def test_if_fires_on_state_change_with_for(hass, calls):
"""Test for triggers firing with delay."""
entity_id = f"{DOMAIN}.entity"
hass.states.async_set(entity_id, STATE_ALARM_DISARMED)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": entity_id,
"type": "triggered",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED
assert len(calls) == 0
hass.states.async_set(entity_id, STATE_ALARM_TRIGGERED)
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert len(calls) == 1
await hass.async_block_till_done()
assert (
calls[0].data["some"]
== f"turn_off device - {entity_id} - disarmed - triggered - 0:00:05"
)
| {
"content_hash": "0d3b540cbbefa8575d2732b28c122bf5",
"timestamp": "",
"source": "github",
"line_count": 397,
"max_line_length": 103,
"avg_line_length": 35.130982367758186,
"alnum_prop": 0.45701584570158454,
"repo_name": "home-assistant/home-assistant",
"id": "c8082e415e0ca85dad302523c3100b6609871e5d",
"size": "13947",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/alarm_control_panel/test_device_trigger.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20557383"
},
{
"name": "Shell",
"bytes": "6671"
}
],
"symlink_target": ""
} |
import logging
try:
from configparser import ConfigParser
except ImportError:
# Python 2 support
from ConfigParser import ConfigParser
logger = logging.getLogger("packges.knightos.org")
logger.setLevel(logging.DEBUG)
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
sh.setFormatter(formatter)
logger.addHandler(sh)
# scss logger
logging.getLogger("scss").addHandler(sh)
config = ConfigParser()
config.readfp(open('config.ini'))
env = 'dev'
_cfg = lambda k: config.get(env, k)
_cfgi = lambda k: int(_cfg(k))
| {
"content_hash": "3e2a71932d340ce518a569cf6c32f9b5",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 85,
"avg_line_length": 22.925925925925927,
"alnum_prop": 0.7334410339256866,
"repo_name": "KnightOS/packages.knightos.org",
"id": "ac0a98a95fb282311e40e80cfdbfd6bd96d2e2f0",
"size": "619",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "packages/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1192"
},
{
"name": "HTML",
"bytes": "35847"
},
{
"name": "JavaScript",
"bytes": "2228"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "47910"
}
],
"symlink_target": ""
} |
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ExpressMailServiceRequest(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ExpressMailServiceRequest Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ExpressMailServiceRequest, self).__init__(temboo_session, '/Library/USPS/DeliveryInformationAPI/ExpressMailServiceRequest')
def new_input_set(self):
return ExpressMailServiceRequestInputSet()
def _make_result_set(self, result, path):
return ExpressMailServiceRequestResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ExpressMailServiceRequestChoreographyExecution(session, exec_id, path)
class ExpressMailServiceRequestInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ExpressMailServiceRequest
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Date(self, value):
"""
Set the value of the Date input for this Choreo. ((required, date) Date the package is to be shipped. Must take the form 'MM/DD/YYYY'.)
"""
super(ExpressMailServiceRequestInputSet, self)._set_input('Date', value)
def set_DestinationZip(self, value):
"""
Set the value of the DestinationZip input for this Choreo. ((required, integer) Five digit zip code.)
"""
super(ExpressMailServiceRequestInputSet, self)._set_input('DestinationZip', value)
def set_Endpoint(self, value):
"""
Set the value of the Endpoint input for this Choreo. ((optional, string) If you are accessing the production server, set to 'production'. Defaults to 'testing' which indicates that you are using the sandbox.)
"""
super(ExpressMailServiceRequestInputSet, self)._set_input('Endpoint', value)
def set_OriginZip(self, value):
"""
Set the value of the OriginZip input for this Choreo. ((required, integer) Three or five digit zip code.)
"""
super(ExpressMailServiceRequestInputSet, self)._set_input('OriginZip', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((required, password) The password assigned by USPS)
"""
super(ExpressMailServiceRequestInputSet, self)._set_input('Password', value)
def set_UserId(self, value):
"""
Set the value of the UserId input for this Choreo. ((required, string) Alphanumeric ID assigned by USPS)
"""
super(ExpressMailServiceRequestInputSet, self)._set_input('UserId', value)
class ExpressMailServiceRequestResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ExpressMailServiceRequest Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((xml) The response from USPS Web Service)
"""
return self._output.get('Response', None)
class ExpressMailServiceRequestChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ExpressMailServiceRequestResultSet(response, path)
| {
"content_hash": "f444ecce59849278d3eb3bfb255f8eac",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 216,
"avg_line_length": 45.08641975308642,
"alnum_prop": 0.7061883899233297,
"repo_name": "jordanemedlock/psychtruths",
"id": "07d5bdd720425f713c08dd319581467bf9f783da",
"size": "4565",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "temboo/Library/USPS/DeliveryInformationAPI/ExpressMailServiceRequest.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18544"
},
{
"name": "HTML",
"bytes": "34650"
},
{
"name": "JavaScript",
"bytes": "423"
},
{
"name": "PHP",
"bytes": "1097"
},
{
"name": "Python",
"bytes": "23444578"
}
],
"symlink_target": ""
} |
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.fields import StreamField
from wagtail.wagtailsearch import index
from wagtail.wagtailadmin.edit_handlers import StreamFieldPanel
from ..fields import BodyStreamBlock
class StaticPage(Page):
body = StreamField(BodyStreamBlock())
search_fields = Page.search_fields + [
index.SearchField('body'),
]
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
| {
"content_hash": "cd8b0d82e03ad0689e7a667a4caddf68",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 63,
"avg_line_length": 25.105263157894736,
"alnum_prop": 0.740041928721174,
"repo_name": "palazzem/wagtail-nesting-box",
"id": "d0415eb1102f995e062069752fdd8365d31506ec",
"size": "477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wagtail_box/pages/models.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "5480"
},
{
"name": "Python",
"bytes": "33049"
}
],
"symlink_target": ""
} |
import requests
import urllib
import lxml.html
import time
class Web:
""" Test the functionality of a web application """
def __init__(self, web_url, ignore_validity=False):
"""
:param web_url: Full URL of website.
:return: n/a
"""
# Base will never be modified
self.base = web_url
# Includes routes, will be modified
self.url = web_url
self.ignore_valid = ignore_validity
self.session = requests.Session()
# Request will crash when the connection is refused
try:
self.__status = self.session.get(self.base).status_code
self.__headers = self.session.get(self.base).headers
self.__encoding = self.session.get(self.base).encoding
self.__text = self.session.get(self.base).text
self.__json = self.session.get(self.base).json
self.__links = self.__get_links()
except Exception:
if not self.ignore_valid:
print "Connection to %s couldn't be established" % self.base
exit(1)
self.__status = ''
self.__headers = ''
self.__encoding = ''
self.__text = ''
self.__json = ''
self.__links = ''
def __get_links(self):
"""
Gets hyperlinks found in current page
:return: List of hyperlinks in current url.
"""
connection = urllib.urlopen(self.url)
dom = lxml.html.fromstring(connection.read())
links = []
for link in dom.xpath('//a/@href'):
links.append(link)
return links
def __reload(self):
"""
Get the current page's status, headers, enconding, text, json, and links
:return: n/a
"""
# Request will crash when the connection is refused
try:
self.__status = self.session.get(self.url).status_code
self.__headers = self.session.get(self.url).headers
self.__encoding = self.session.get(self.url).encoding
self.__text = self.session.get(self.url).text
self.__json = self.session.get(self.url).json
self.__links = self.__get_links()
# TODO: catch connection_refused
except Exception:
if not self.ignore_valid:
print "Connection to %s couldn't be established" % self.base
exit(1)
self.__status = ''
self.__headers = ''
self.__encoding = ''
self.__text = ''
self.__json = ''
self.__links = ''
def check_route(self, route=""):
"""
Check if route is valid. It differs from exists() in that it will always
append the route to the base_url
:param route: Route to page.
:return: True is the route is up and valid; False, otherwise.
"""
page_is_ok = True
# Request will crash when the connection is refused
try:
__status = self.session.get(self.base + route).status_code
except Exception:
page_is_ok = False
__status = 520
if 600 > __status > 399:
page_is_ok = False
return page_is_ok
# TODO: change the substring lookup to a regex
def wait_for(self, status_code=0, html='', time_in_seconds=120):
"""
Waits for a page to have a given status or html
:param status_code: Status code to wait for.
:param html: Html text to wait for. Example "<b>I'm here</b>"
:return: n/a
"""
start = time.time()
if status_code:
while self.__status != status_code:
elapsed_time = time.time() - start
assert elapsed_time <= time_in_seconds, 'Wait_for() %s timeout' % self.url
self.__reload()
if html:
while html not in self.__text:
elapsed_time = time.time() - start
assert elapsed_time <= time_in_seconds, 'Wait_for() %s timeout' % self.url
self.__reload()
# TODO: eliminate redundancy by merging exists() with check_route()
def exists(self, route=""):
"""
Check if route is valid. It differs from check_route() in that it will always
append the route to the current url.
:param route: Route to page.
:return: True is the route is up and valid; False, otherwise.
"""
page_is_ok = True
# Request will crash when the connection is refused
try:
__status = self.session.get(self.url + route).status_code
except Exception:
page_is_ok = False
__status = 520
if 399 < __status < 600:
page_is_ok = False
return page_is_ok
def go_to(self, route=""):
"""
Append a route to current URL.
:param route: Route to page.
:return: True if the route is valid; False, otherwise.
"""
valid_url = self.check_route(route)
if valid_url:
self.url = self.base + route
self.__reload()
return valid_url
# TODO: change the substring lookup to a regex
def has(self, html):
"""
Checks if current URL contains specific html
:param html: Html to look for.
:return: True if it contains the html; False, otherwise.
"""
found = True
position = self.__text.find(html)
if position == -1:
found = False
return found
# TODO: Automate/simplify this step. As it is, it requires too much from the user.
def login(self, action_route, login_data):
"""
Attempt a login post request.
:param action_route: Login action route.
:param login_data: Dictionary of <input> login parameters.
:return:
"""
html = "empty"
try:
url = self.base + action_route
self.session.get(url)
# Jenkins will not accept requests without a header
html = self.session.post(url, data=login_data, headers={"Referer": "http://ci-jenkins.org/"})
except Exception:
if not self.ignore_valid:
print "Connection to %s couldn't be established" % self.base
exit(1)
return html.text
| {
"content_hash": "337897c8b45a44a5f0ba8b82ac56f8f2",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 105,
"avg_line_length": 32.774358974358975,
"alnum_prop": 0.5420122046628071,
"repo_name": "gmatoshp/jenkins-tomcat-nginx",
"id": "d2e9c60101d222ef9cd7d97c134f06d270cd7e98",
"size": "6391",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/jtests/functionality/Web.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "603"
},
{
"name": "Nginx",
"bytes": "2880"
},
{
"name": "Python",
"bytes": "16313"
},
{
"name": "Shell",
"bytes": "21251"
}
],
"symlink_target": ""
} |
"""fontTools.ttLib -- a package for dealing with TrueType fonts.
This package offers translators to convert TrueType fonts to Python
objects and vice versa, and additionally from Python to TTX (an XML-based
text format) and vice versa.
Example interactive session:
Python 1.5.2c1 (#43, Mar 9 1999, 13:06:43) [CW PPC w/GUSI w/MSL]
Copyright 1991-1995 Stichting Mathematisch Centrum, Amsterdam
>>> from kiva.fonttools.fontTools import ttLib
>>> tt = ttLib.TTFont("afont.ttf")
>>> tt['maxp'].numGlyphs
242
>>> tt['OS/2'].achVendID
'B&H\000'
>>> tt['head'].unitsPerEm
2048
>>> tt.saveXML("afont.ttx")
Dumping 'LTSH' table...
Dumping 'OS/2' table...
Dumping 'VDMX' table...
Dumping 'cmap' table...
Dumping 'cvt ' table...
Dumping 'fpgm' table...
Dumping 'glyf' table...
Dumping 'hdmx' table...
Dumping 'head' table...
Dumping 'hhea' table...
Dumping 'hmtx' table...
Dumping 'loca' table...
Dumping 'maxp' table...
Dumping 'name' table...
Dumping 'post' table...
Dumping 'prep' table...
>>> tt2 = ttLib.TTFont()
>>> tt2.importXML("afont.ttx")
>>> tt2['maxp'].numGlyphs
242
>>>
"""
#
# $Id: __init__.py,v 1.36 2002/07/23 16:43:55 jvr Exp $
#
import os
import string
import types
class TTLibError(Exception): pass
class TTFont:
"""The main font object. It manages file input and output, and offers
a convenient way of accessing tables.
Tables will be only decompiled when neccesary, ie. when they're actually
accessed. This means that simple operations can be extremely fast.
"""
def __init__(self, file=None, res_name_or_index=None,
sfntVersion="\000\001\000\000", checkChecksums=0,
verbose=0, recalcBBoxes=1):
"""The constructor can be called with a few different arguments.
When reading a font from disk, 'file' should be either a pathname
pointing to a file, or a readable file object.
It we're running on a Macintosh, 'res_name_or_index' maybe an sfnt
resource name or an sfnt resource index number or zero. The latter
case will cause TTLib to autodetect whether the file is a flat file
or a suitcase. (If it's a suitcase, only the first 'sfnt' resource
will be read!)
The 'checkChecksums' argument is used to specify how sfnt
checksums are treated upon reading a file from disk:
- 0: don't check (default)
- 1: check, print warnings if a wrong checksum is found
- 2: check, raise an exception if a wrong checksum is found.
The TTFont constructor can also be called without a 'file'
argument: this is the way to create a new empty font.
In this case you can optionally supply the 'sfntVersion' argument.
If the recalcBBoxes argument is false, a number of things will *not*
be recalculated upon save/compile:
1. glyph bounding boxes
2. maxp font bounding box
3. hhea min/max values
(1) is needed for certain kinds of CJK fonts (ask Werner Lemberg ;-).
Additionally, upon importing an TTX file, this option cause glyphs
to be compiled right away. This should reduce memory consumption
greatly, and therefore should have some impact on the time needed
to parse/compile large fonts.
"""
import sfnt
self.verbose = verbose
self.recalcBBoxes = recalcBBoxes
self.tables = {}
self.reader = None
if not file:
self.sfntVersion = sfntVersion
return
if isinstance(file, basestring):
if os.name == "mac" and res_name_or_index is not None:
# on the mac, we deal with sfnt resources as well as flat files
import macUtils
if res_name_or_index == 0:
if macUtils.getSFNTResIndices(file):
# get the first available sfnt font.
file = macUtils.SFNTResourceReader(file, 1)
else:
file = open(file, "rb")
else:
file = macUtils.SFNTResourceReader(file, res_name_or_index)
else:
file = open(file, "rb")
else:
pass # assume "file" is a readable file object
self.reader = sfnt.SFNTReader(file, checkChecksums)
self.sfntVersion = self.reader.sfntVersion
def close(self):
"""If we still have a reader object, close it."""
if self.reader is not None:
self.reader.close()
def save(self, file, makeSuitcase=0):
"""Save the font to disk. Similarly to the constructor,
the 'file' argument can be either a pathname or a writable
file object.
On the Mac, if makeSuitcase is true, a suitcase (resource fork)
file will we made instead of a flat .ttf file.
"""
from kiva.fonttools.fontTools.ttLib import sfnt
if isinstance(file, basestring):
closeStream = 1
if os.name == "mac" and makeSuitcase:
import macUtils
file = macUtils.SFNTResourceWriter(file, self)
else:
file = open(file, "wb")
if os.name == "mac":
import macfs
fss = macfs.FSSpec(file.name)
fss.SetCreatorType('mdos', 'BINA')
else:
# assume "file" is a writable file object
closeStream = 0
tags = self.keys()
tags.remove("GlyphOrder")
numTables = len(tags)
writer = sfnt.SFNTWriter(file, numTables, self.sfntVersion)
done = []
for tag in tags:
self._writeTable(tag, writer, done)
writer.close(closeStream)
def saveXML(self, fileOrPath, progress=None,
tables=None, skipTables=None, splitTables=0, disassembleInstructions=1):
"""Export the font as TTX (an XML-based text file), or as a series of text
files when splitTables is true. In the latter case, the 'fileOrPath'
argument should be a path to a directory.
The 'tables' argument must either be false (dump all tables) or a
list of tables to dump. The 'skipTables' argument may be a list of tables
to skip, but only when the 'tables' argument is false.
"""
from kiva.fonttools.fontTools import version
import xmlWriter
self.disassembleInstructions = disassembleInstructions
if not tables:
tables = self.keys()
if skipTables:
for tag in skipTables:
if tag in tables:
tables.remove(tag)
numTables = len(tables)
numGlyphs = self['maxp'].numGlyphs
if progress:
progress.set(0, numTables)
idlefunc = getattr(progress, "idle", None)
else:
idlefunc = None
writer = xmlWriter.XMLWriter(fileOrPath, idlefunc=idlefunc)
writer.begintag("ttFont", sfntVersion=`self.sfntVersion`[1:-1],
ttLibVersion=version)
writer.newline()
if not splitTables:
writer.newline()
else:
# 'fileOrPath' must now be a path
path, ext = os.path.splitext(fileOrPath)
fileNameTemplate = path + ".%s" + ext
for i in range(numTables):
if progress:
progress.set(i)
tag = tables[i]
if splitTables:
tablePath = fileNameTemplate % tagToIdentifier(tag)
tableWriter = xmlWriter.XMLWriter(tablePath, idlefunc=idlefunc)
tableWriter.begintag("ttFont", ttLibVersion=version)
tableWriter.newline()
tableWriter.newline()
writer.simpletag(tagToXML(tag), src=os.path.basename(tablePath))
writer.newline()
else:
tableWriter = writer
self._tableToXML(tableWriter, tag, progress)
if splitTables:
tableWriter.endtag("ttFont")
tableWriter.newline()
tableWriter.close()
if progress:
progress.set((i + 1))
writer.endtag("ttFont")
writer.newline()
writer.close()
if self.verbose:
debugmsg("Done dumping TTX")
def _tableToXML(self, writer, tag, progress):
if self.has_key(tag):
table = self[tag]
report = "Dumping '%s' table..." % tag
else:
report = "No '%s' table found." % tag
if progress:
progress.setLabel(report)
elif self.verbose:
debugmsg(report)
else:
print report
if not self.has_key(tag):
return
xmlTag = tagToXML(tag)
if hasattr(table, "ERROR"):
writer.begintag(xmlTag, ERROR="decompilation error")
else:
writer.begintag(xmlTag)
writer.newline()
if tag in ("glyf", "CFF "):
table.toXML(writer, self, progress)
else:
table.toXML(writer, self)
writer.endtag(xmlTag)
writer.newline()
writer.newline()
def importXML(self, file, progress=None):
"""Import a TTX file (an XML-based text format), so as to recreate
a font object.
"""
if self.has_key("maxp") and self.has_key("post"):
# Make sure the glyph order is loaded, as it otherwise gets
# lost if the XML doesn't contain the glyph order, yet does
# contain the table which was originally used to extract the
# glyph names from (ie. 'post', 'cmap' or 'CFF ').
self.getGlyphOrder()
import xmlImport
xmlImport.importXML(self, file, progress)
def isLoaded(self, tag):
"""Return true if the table identified by 'tag' has been
decompiled and loaded into memory."""
return self.tables.has_key(tag)
def has_key(self, tag):
if self.isLoaded(tag):
return 1
elif self.reader and self.reader.has_key(tag):
return 1
elif tag == "GlyphOrder":
return 1
else:
return 0
def keys(self):
keys = self.tables.keys()
if self.reader:
for key in self.reader.keys():
if key not in keys:
keys.append(key)
keys.sort()
if "GlyphOrder" in keys:
keys.remove("GlyphOrder")
return ["GlyphOrder"] + keys
def __len__(self):
return len(self.keys())
def __getitem__(self, tag):
try:
return self.tables[tag]
except KeyError:
if tag == "GlyphOrder":
table = GlyphOrder(tag)
self.tables[tag] = table
return table
if self.reader is not None:
import traceback
if self.verbose:
debugmsg("Reading '%s' table from disk" % tag)
data = self.reader[tag]
tableClass = getTableClass(tag)
table = tableClass(tag)
self.tables[tag] = table
if self.verbose:
debugmsg("Decompiling '%s' table" % tag)
try:
table.decompile(data, self)
except "_ _ F O O _ _": # dummy exception to disable exception catching
print "An exception occurred during the decompilation of the '%s' table" % tag
from tables.DefaultTable import DefaultTable
import StringIO
file = StringIO.StringIO()
traceback.print_exc(file=file)
table = DefaultTable(tag)
table.ERROR = file.getvalue()
self.tables[tag] = table
table.decompile(data, self)
return table
else:
raise KeyError, "'%s' table not found" % tag
def __setitem__(self, tag, table):
self.tables[tag] = table
def __delitem__(self, tag):
if not self.has_key(tag):
raise KeyError, "'%s' table not found" % tag
if self.tables.has_key(tag):
del self.tables[tag]
if self.reader and self.reader.has_key(tag):
del self.reader[tag]
def setGlyphOrder(self, glyphOrder):
self.glyphOrder = glyphOrder
def getGlyphOrder(self):
try:
return self.glyphOrder
except AttributeError:
pass
if self.has_key('CFF '):
cff = self['CFF ']
if cff.haveGlyphNames():
self.glyphOrder = cff.getGlyphOrder()
else:
# CID-keyed font, use cmap
self._getGlyphNamesFromCmap()
elif self.has_key('post'):
# TrueType font
glyphOrder = self['post'].getGlyphOrder()
if glyphOrder is None:
#
# No names found in the 'post' table.
# Try to create glyph names from the unicode cmap (if available)
# in combination with the Adobe Glyph List (AGL).
#
self._getGlyphNamesFromCmap()
else:
self.glyphOrder = glyphOrder
else:
self._getGlyphNamesFromCmap()
return self.glyphOrder
def _getGlyphNamesFromCmap(self):
#
# This is rather convoluted, but then again, it's an interesting problem:
# - we need to use the unicode values found in the cmap table to
# build glyph names (eg. because there is only a minimal post table,
# or none at all).
# - but the cmap parser also needs glyph names to work with...
# So here's what we do:
# - make up glyph names based on glyphID
# - load a temporary cmap table based on those names
# - extract the unicode values, build the "real" glyph names
# - unload the temporary cmap table
#
if self.isLoaded("cmap"):
# Bootstrapping: we're getting called by the cmap parser
# itself. This means self.tables['cmap'] contains a partially
# loaded cmap, making it impossible to get at a unicode
# subtable here. We remove the partially loaded cmap and
# restore it later.
# This only happens if the cmap table is loaded before any
# other table that does f.getGlyphOrder() or f.getGlyphName().
cmapLoading = self.tables['cmap']
del self.tables['cmap']
else:
cmapLoading = None
# Make up glyph names based on glyphID, which will be used by the
# temporary cmap and by the real cmap in case we don't find a unicode
# cmap.
numGlyphs = int(self['maxp'].numGlyphs)
glyphOrder = [None] * numGlyphs
glyphOrder[0] = ".notdef"
for i in range(1, numGlyphs):
glyphOrder[i] = "glyph%.5d" % i
# Set the glyph order, so the cmap parser has something
# to work with (so we don't get called recursively).
self.glyphOrder = glyphOrder
# Get a (new) temporary cmap (based on the just invented names)
tempcmap = self['cmap'].getcmap(3, 1)
if tempcmap is not None:
# we have a unicode cmap
from kiva.fonttools.fontTools import agl
cmap = tempcmap.cmap
# create a reverse cmap dict
reversecmap = {}
for unicode, name in cmap.items():
reversecmap[name] = unicode
allNames = {}
for i in range(numGlyphs):
tempName = glyphOrder[i]
if reversecmap.has_key(tempName):
unicode = reversecmap[tempName]
if agl.UV2AGL.has_key(unicode):
# get name from the Adobe Glyph List
glyphName = agl.UV2AGL[unicode]
else:
# create uni<CODE> name
glyphName = "uni" + string.upper(string.zfill(
hex(unicode)[2:], 4))
tempName = glyphName
n = 1
while allNames.has_key(tempName):
tempName = glyphName + "#" + `n`
n = n + 1
glyphOrder[i] = tempName
allNames[tempName] = 1
# Delete the temporary cmap table from the cache, so it can
# be parsed again with the right names.
del self.tables['cmap']
else:
pass # no unicode cmap available, stick with the invented names
self.glyphOrder = glyphOrder
if cmapLoading:
# restore partially loaded cmap, so it can continue loading
# using the proper names.
self.tables['cmap'] = cmapLoading
def getGlyphNames(self):
"""Get a list of glyph names, sorted alphabetically."""
glyphNames = self.getGlyphOrder()[:]
glyphNames.sort()
return glyphNames
def getGlyphNames2(self):
"""Get a list of glyph names, sorted alphabetically,
but not case sensitive.
"""
from kiva.fonttools.fontTools.misc import textTools
return textTools.caselessSort(self.getGlyphOrder())
def getGlyphName(self, glyphID):
try:
return self.getGlyphOrder()[glyphID]
except IndexError:
# XXX The ??.W8.otf font that ships with OSX uses higher glyphIDs in
# the cmap table than there are glyphs. I don't think it's legal...
return "glyph%.5d" % glyphID
def getGlyphID(self, glyphName):
if not hasattr(self, "_reverseGlyphOrderDict"):
self._buildReverseGlyphOrderDict()
glyphOrder = self.getGlyphOrder()
d = self._reverseGlyphOrderDict
if not d.has_key(glyphName):
if glyphName in glyphOrder:
self._buildReverseGlyphOrderDict()
return self.getGlyphID(glyphName)
else:
raise KeyError, glyphName
glyphID = d[glyphName]
if glyphName <> glyphOrder[glyphID]:
self._buildReverseGlyphOrderDict()
return self.getGlyphID(glyphName)
return glyphID
def _buildReverseGlyphOrderDict(self):
self._reverseGlyphOrderDict = d = {}
glyphOrder = self.getGlyphOrder()
for glyphID in range(len(glyphOrder)):
d[glyphOrder[glyphID]] = glyphID
def _writeTable(self, tag, writer, done):
"""Internal helper function for self.save(). Keeps track of
inter-table dependencies.
"""
if tag in done:
return
tableClass = getTableClass(tag)
for masterTable in tableClass.dependencies:
if masterTable not in done:
if self.has_key(masterTable):
self._writeTable(masterTable, writer, done)
else:
done.append(masterTable)
tabledata = self.getTableData(tag)
if self.verbose:
debugmsg("writing '%s' table to disk" % tag)
writer[tag] = tabledata
done.append(tag)
def getTableData(self, tag):
"""Returns raw table data, whether compiled or directly read from disk.
"""
if self.isLoaded(tag):
if self.verbose:
debugmsg("compiling '%s' table" % tag)
return self.tables[tag].compile(self)
elif self.reader and self.reader.has_key(tag):
if self.verbose:
debugmsg("Reading '%s' table from disk" % tag)
return self.reader[tag]
else:
raise KeyError, tag
class GlyphOrder:
"""A pseudo table. The glyph order isn't in the font as a separate
table, but it's nice to present it as such in the TTX format.
"""
def __init__(self, tag):
pass
def toXML(self, writer, ttFont):
glyphOrder = ttFont.getGlyphOrder()
writer.comment("The 'id' attribute is only for humans; "
"it is ignored when parsed.")
writer.newline()
for i in range(len(glyphOrder)):
glyphName = glyphOrder[i]
writer.simpletag("GlyphID", id=i, name=glyphName)
writer.newline()
def fromXML(self, (name, attrs, content), ttFont):
if not hasattr(self, "glyphOrder"):
self.glyphOrder = []
ttFont.setGlyphOrder(self.glyphOrder)
if name == "GlyphID":
self.glyphOrder.append(attrs["name"])
def _test_endianness():
"""Test the endianness of the machine. This is crucial to know
since TrueType data is always big endian, even on little endian
machines. There are quite a few situations where we explicitly
need to swap some bytes.
"""
import struct
data = struct.pack("h", 0x01)
if data == "\000\001":
return "big"
elif data == "\001\000":
return "little"
else:
assert 0, "endian confusion!"
endian = _test_endianness()
def getTableModule(tag):
"""Fetch the packer/unpacker module for a table.
Return None when no module is found.
"""
import tables
pyTag = tagToIdentifier(tag)
try:
module = __import__("kiva.fonttools.fontTools.ttLib.tables." + pyTag)
except ImportError:
return None
else:
return getattr(tables, pyTag)
def getTableClass(tag):
"""Fetch the packer/unpacker class for a table.
Return None when no class is found.
"""
module = getTableModule(tag)
if module is None:
from tables.DefaultTable import DefaultTable
return DefaultTable
pyTag = tagToIdentifier(tag)
tableClass = getattr(module, "table_" + pyTag)
return tableClass
def newTable(tag):
"""Return a new instance of a table."""
tableClass = getTableClass(tag)
return tableClass(tag)
def _escapechar(c):
"""Helper function for tagToIdentifier()"""
import re
if re.match("[a-z0-9]", c):
return "_" + c
elif re.match("[A-Z]", c):
return c + "_"
else:
return hex(ord(c))[2:]
def tagToIdentifier(tag):
"""Convert a table tag to a valid (but UGLY) python identifier,
as well as a filename that's guaranteed to be unique even on a
caseless file system. Each character is mapped to two characters.
Lowercase letters get an underscore before the letter, uppercase
letters get an underscore after the letter. Trailing spaces are
trimmed. Illegal characters are escaped as two hex bytes. If the
result starts with a number (as the result of a hex escape), an
extra underscore is prepended. Examples::
'glyf' -> '_g_l_y_f'
'cvt ' -> '_c_v_t'
'OS/2' -> 'O_S_2f_2'
"""
import re
if tag == "GlyphOrder":
return tag
assert len(tag) == 4, "tag should be 4 characters long"
while len(tag) > 1 and tag[-1] == ' ':
tag = tag[:-1]
ident = ""
for c in tag:
ident = ident + _escapechar(c)
if re.match("[0-9]", ident):
ident = "_" + ident
return ident
def identifierToTag(ident):
"""the opposite of tagToIdentifier()"""
if ident == "GlyphOrder":
return ident
if len(ident) % 2 and ident[0] == "_":
ident = ident[1:]
assert not (len(ident) % 2)
tag = ""
for i in range(0, len(ident), 2):
if ident[i] == "_":
tag = tag + ident[i+1]
elif ident[i+1] == "_":
tag = tag + ident[i]
else:
# assume hex
tag = tag + chr(string.atoi(ident[i:i+2], 16))
# append trailing spaces
tag = tag + (4 - len(tag)) * ' '
return tag
def tagToXML(tag):
"""Similarly to tagToIdentifier(), this converts a TT tag
to a valid XML element name. Since XML element names are
case sensitive, this is a fairly simple/readable translation.
"""
import re
if tag == "OS/2":
return "OS_2"
elif tag == "GlyphOrder":
return "GlyphOrder"
if re.match("[A-Za-z_][A-Za-z_0-9]* *$", tag):
return string.strip(tag)
else:
return tagToIdentifier(tag)
def xmlToTag(tag):
"""The opposite of tagToXML()"""
if tag == "OS_2":
return "OS/2"
if len(tag) == 8:
return identifierToTag(tag)
else:
return tag + " " * (4 - len(tag))
return tag
def debugmsg(msg):
import time
print msg + time.strftime(" (%H:%M:%S)", time.localtime(time.time()))
| {
"content_hash": "0359afb57a99a0349683b75bd2ac5a28",
"timestamp": "",
"source": "github",
"line_count": 704,
"max_line_length": 118,
"avg_line_length": 43.07954545454545,
"alnum_prop": 0.46396069638617776,
"repo_name": "tommy-u/enable",
"id": "e2aa3e82cd4cf99a08c0164e4efba41485141dfd",
"size": "30328",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kiva/fonttools/fontTools/ttLib/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "240"
},
{
"name": "C",
"bytes": "5526949"
},
{
"name": "C++",
"bytes": "3058044"
},
{
"name": "DIGITAL Command Language",
"bytes": "35819"
},
{
"name": "Groff",
"bytes": "236"
},
{
"name": "Makefile",
"bytes": "58238"
},
{
"name": "Objective-C",
"bytes": "16551"
},
{
"name": "Python",
"bytes": "2202660"
},
{
"name": "Shell",
"bytes": "6286"
}
],
"symlink_target": ""
} |
import sys
import os
import java.lang.System
import imp
from Screen import Screen
from org.sikuli.script import Env
from org.sikuli.script import Debug
import Sikuli
def _stripPackagePrefix(module_name):
pdot = module_name.rfind('.')
if pdot >= 0:
return module_name[pdot+1:]
return module_name
class SikuliImporter:
class SikuliLoader:
def __init__(self, path):
self.path = path
def _load_module(self, fullname):
(file, pathname, desc) = imp.find_module(fullname)
try:
return imp.load_module(fullname, file, pathname, desc)
except Exception,e:
raise ImportError( "Errors in loading sikuli module: %s\n%s\n - HOW TO FIX? Try adding \"from sikuli import *\" in the module.\n" %(fullname, e) )
finally:
if file:
file.close()
def load_module(self, module_name):
#print "SikuliLoader.load_module", module_name
module_name = _stripPackagePrefix(module_name)
sys.path.append(self.path)
img_path = java.lang.System.getProperty("SIKULI_IMAGE_PATH")
if not img_path:
img_path = ""
elif img_path[-1] != Env.getSeparator():
img_path += Env.getSeparator()
img_path += self.path
java.lang.System.setProperty("SIKULI_IMAGE_PATH", img_path)
return self._load_module(module_name)
def _find_module(self, module_name, fullpath):
fullpath = fullpath + "/" + module_name + ".sikuli"
if os.path.exists(fullpath):
#print "SikuliImporter found", fullpath
return self.SikuliLoader(fullpath)
return None
def find_module(self, module_name, package_path):
#print "SikuliImporter.find_module", module_name, package_path
module_name = _stripPackagePrefix(module_name)
if package_path:
paths = package_path
else:
paths = sys.path
if not "." in paths:
paths.append(".")
for path in paths:
mod = self._find_module(module_name, path)
if mod:
return mod
if Sikuli.load(module_name +".jar"):
Debug.info(module_name + ".jar loaded")
return None
return None
sys.meta_path.append(SikuliImporter())
del SikuliImporter
| {
"content_hash": "dd6e4b53a31f833da8b907f9134ce9cd",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 159,
"avg_line_length": 30.68,
"alnum_prop": 0.6114732724902217,
"repo_name": "bx5974/sikuli",
"id": "70a12aae51a81824bc6dce03443fc1abc657d957",
"size": "2369",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "sikuli-script/src/main/python/sikuli/SikuliImporter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "687"
},
{
"name": "C",
"bytes": "4033"
},
{
"name": "C++",
"bytes": "198803"
},
{
"name": "CMake",
"bytes": "54481"
},
{
"name": "CSS",
"bytes": "9821"
},
{
"name": "Groff",
"bytes": "3041"
},
{
"name": "HTML",
"bytes": "3477218"
},
{
"name": "Java",
"bytes": "1571495"
},
{
"name": "JavaScript",
"bytes": "63045"
},
{
"name": "Makefile",
"bytes": "831"
},
{
"name": "Objective-C",
"bytes": "6872"
},
{
"name": "Python",
"bytes": "109800"
},
{
"name": "Shell",
"bytes": "2533"
}
],
"symlink_target": ""
} |
import json
from .compat import six
def extra_as_internal(obj):
if not hasattr(obj, 'extra'):
return {}
if isinstance(obj.extra, six.string_types):
try:
obj.extra = json.loads(obj.extra)
except (TypeError, ValueError):
pass
return obj.extra
def get_extra(obj, attr_name, default=None):
if not hasattr(obj, 'extra'):
return default
if isinstance(obj.extra, six.string_types):
try:
obj.extra = json.loads(obj.extra)
except (TypeError, ValueError):
return default
return obj.extra.get(attr_name, default) if obj.extra else default
| {
"content_hash": "c2c2e04010e80db62f17f8c06360e0d4",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 70,
"avg_line_length": 27.166666666666668,
"alnum_prop": 0.6134969325153374,
"repo_name": "djaodjin/djaodjin-survey",
"id": "24835f5b066db68368697bb0fe4c3b146ce69e78",
"size": "1996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "survey/helpers.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "296"
},
{
"name": "HTML",
"bytes": "29371"
},
{
"name": "JavaScript",
"bytes": "111241"
},
{
"name": "Makefile",
"bytes": "4583"
},
{
"name": "Python",
"bytes": "344027"
}
],
"symlink_target": ""
} |
from pyfaces import pyfaces
import sys,time
if __name__ == "__main__":
try:
start = time.time()
argsnum=len(sys.argv)
#print "args:",argsnum
if(argsnum<5):
print "usage:python pyfacesdemo imgname dirname numofeigenfaces threshold "
sys.exit(2)
imgname=sys.argv[1]
dirname=sys.argv[2]
egfaces=int(sys.argv[3])
thrshld=float(sys.argv[4])
pyf=pyfaces.PyFaces(imgname,dirname,egfaces,thrshld)
end = time.time()
#print 'took :',(end-start),'secs'
except Exception,detail:
print detail.args
print "usage:python pyfacesdemo imgname dirname numofeigenfaces threshold "
| {
"content_hash": "a838256ed00fb637167b0e5606cb840c",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 87,
"avg_line_length": 35,
"alnum_prop": 0.5782312925170068,
"repo_name": "anandmv/face-recognition",
"id": "8b17534ef4cf1009ffb23394ff9d58c5b3d989e4",
"size": "735",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/pyfaces/__main__.py",
"mode": "33261",
"license": "mit",
"language": [],
"symlink_target": ""
} |
"""
Author: Frederik Muller, xmulle20@stud.fit.vutbr.cz
Date: 04/2017
"""
import bcrypt
from flask import request
from bson import json_util, ObjectId
import pymongo
from slugify import slugify
from api import auth, db
from api.module import Module
from api.models.models import Product, ProductException, Review, ReviewException, Customer, ProductProperty, TypeProperty, TypePropertyException, Category, CategoryException
from api.role import Role
products = Module('products', __name__, url_prefix='/products', no_version=True)
def count_products():
""" FIXME not used, remove """
return db.products.count()
def get_products():
res = Product.query.all()
products = []
for product in res:
tmp = product.to_dict()
tmp["categories"] = product.categories_dict()
products.append(tmp)
return(json_util.dumps(products))
@auth.required(Role.admin)
def add_product():
r = request.get_json()
try:
categories_dict = r.pop("categories", [])
product = Product.from_dict(r)
for cat in categories_dict:
category_id = cat.get("id", None)
category = Category.query.get(category_id)
if category != None:
product.categories.append(category)
else:
raise CategoryException("Missing category")
except Exception as e:
print(e)
raise ProductException("Could not convert dictionary to Product")
try:
product.slug = slugify(product.name, to_lower=True)
db.db.session.add(product)
res = db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise ProductException("Could not add product to database")
inserted = Product.query.get_or_404(product.id)
product = inserted.to_dict()
product["categories"] = inserted.categories_dict()
return(json_util.dumps(product))
@auth.required(Role.admin)
def remove_product(product_id):
"""
Remove the product
"""
product = Product.query.get_or_404(product_id)
try:
db.db.session.delete(product)
db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise ProductException("Could not remove product")
tmp = product.to_dict()
return(json_util.dumps(tmp))
@auth.required(Role.admin)
def edit_product(product_id):
product_dict = request.get_json()
product = Product.query.get_or_404(product_id)
# check for all fields to be updated
if "name" in product_dict and product_dict["name"] != "":
product.name = product_dict["name"]
product.slug = slugify(product.name, to_lower=True)
if "price" in product_dict and product_dict["price"] != "":
product.price = product_dict["price"]
if "slug" in product_dict and product_dict["slug"] != "":
product.slug = product_dict["slug"]
if "description" in product_dict and product_dict["description"] != "":
product.description = product_dict["description"]
if "image" in product_dict and product_dict["image"] != "":
product.image = product_dict["image"]
if "in_stock" in product_dict and product_dict["in_stock"] != "":
product.in_stock = product_dict["in_stock"]
if "hidden" in product_dict and product_dict["hidden"] != "":
product.hidden = product_dict["hidden"]
# Clear product categories
product.categories = []
# Add all categories again
try:
categories_dict = product_dict.pop("categories", [])
for cat in categories_dict:
category_id = cat.get("id", None)
category = Category.query.get(category_id)
if category != None:
product.categories.append(category)
else:
raise CategoryException("Missing category")
except Exception as e:
print(e)
raise ProductException("Could not edit product's categories")
# Update the product and return updated document
try:
db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise ProductException("Could not edit product")
tmp = product.to_dict()
tmp["categories"] = product.categories_dict()
return(json_util.dumps(tmp))
def get_product(product_id):
product = Product.query.get_or_404(product_id)
product_dict = product.to_dict()
product_dict["categories"] = product.categories_dict()
return(json_util.dumps(product_dict))
@auth.required()
def add_review(product_id):
session_id = request.headers.get('Authorization', None)
if not session_id:
raise SessionException("Header field 'Authorization' not found.")
try:
session = auth.lookup(session_id)
except SessionException:
raise SessionException("Session not found")
r = request.get_json()
r["product"] = Product.query.get_or_404(product_id)
r["customer"] = Customer.query.get_or_404(session["user"].id)
try:
review = Review.from_dict(r)
except Exception as e:
print(e)
raise ReviewException("Could not convert dictionary to Review")
try:
db.db.session.add(review)
res = db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise ReviewException("Could not add review to database")
inserted = db.db.session.query(Review).\
filter_by(product_id = product_id, customer_id = session["user"].id).first()
review = inserted.to_dict()
return(json_util.dumps(review))
def get_reviews(product_id):
reviews_with_customers = []
reviews = db.db.session.query(Review).filter_by(product_id = product_id).all()
for review in reviews:
customer = review.customer.to_dict()
review = review.to_dict()
review_customer = {"customer" : {"username" : customer["username"],
"first_name" : customer["first_name"],
"last_name" : customer["last_name"]}}
review.update(review_customer)
reviews_with_customers.append(review)
return(json_util.dumps(reviews_with_customers))
def get_properties(product_id):
product_properties = []
type_properties = db.db.session.query(TypeProperty).filter_by(product_id = product_id).all()
for type_property in type_properties:
product_property = type_property.product_property.to_dict()
type_property = type_property.to_dict()
type_property.update(product_property)
product_properties.append(type_property)
return(json_util.dumps(product_properties))
@auth.required(Role.admin)
def add_property(product_id, property_id):
r = request.get_json()
r["product"] = Product.query.get_or_404(product_id)
r["product_property"] = ProductProperty.query.get_or_404(property_id)
try:
review = TypeProperty.from_dict(r)
except Exception as e:
print(e)
raise TypePropertyException("Could not convert dictionary to TypeProperty")
try:
db.db.session.add(review)
res = db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise TypePropertyException("Could not add type property to database")
inserted = db.db.session.query(TypeProperty).\
filter_by(product_id = product_id, product_property_id = property_id).first()
property = inserted.to_dict()
return(json_util.dumps(property))
@auth.required(Role.admin)
def edit_property(product_id, property_id):
property_dict = request.get_json()
property = db.db.session.query(TypeProperty).\
filter_by(product_id = product_id, product_property_id = property_id).first()
# check for all fields to be updated
if "value" in property_dict and property_dict["value"] != "":
property.value = property_dict["value"]
# Update the property and return updated document
try:
db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise TypePropertyException("Could not edit type property")
inserted = db.db.session.query(TypeProperty).\
filter_by(product_id = product_id, product_property_id = property_id).first()
tmp = inserted.to_dict()
return(json_util.dumps(tmp))
@auth.required(Role.admin)
def remove_property(product_id, property_id):
"""
Remove the property
"""
property = db.db.session.query(TypeProperty).\
filter_by(product_id = product_id, product_property_id = property_id).first()
try:
db.db.session.delete(property)
db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise TypePropertyException("Could not remove type property")
tmp = property.to_dict()
return(json_util.dumps(tmp))
@auth.required(Role.admin)
def add_category(product_id, category_id):
product = Product.query.get_or_404(product_id)
category = Category.query.get_or_404(category_id)
category.products.append(product)
try:
res = db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise CategoryException("Could not add category to product")
product = Product.query.get_or_404(product_id)
category = Category.query.get_or_404(category_id)
if product in category.products:
return(json_util.dumps(product.to_dict()))
raise CategoryException("Not found", status_code=404)
def get_categories(product_id):
product = Product.query.get_or_404(product_id)
categories = []
for category in product.categories:
categories.append(category.to_dict())
return(json_util.dumps(categories))
@auth.required(Role.admin)
def remove_category(product_id, category_id):
product = Product.query.get_or_404(product_id)
category = Category.query.get_or_404(category_id)
if category in product.categories:
product.categories.remove(category)
else:
raise CategoryException("There is not such a category", status_code=404)
try:
db.db.session.commit()
except Exception as e:
db.db.session.rollback()
print(e)
raise CategoryException("Could not remove category from product")
product = Product.query.get_or_404(product_id)
if category in product.categories:
raise CategoryException("Failed to delete", status_code=404)
tmp = category.to_dict()
return(json_util.dumps(tmp))
products.add_url_rule('', view_func=get_products, methods=['GET'])
products.add_url_rule('', view_func=add_product, methods=['POST'])
products.add_url_rule('/<string:product_id>', view_func=get_product, methods=['GET'])
products.add_url_rule('/<string:product_id>', view_func=edit_product, methods=['PUT'])
products.add_url_rule('/<string:product_id>', view_func=remove_product, methods=['DELETE'])
products.add_url_rule('/<string:product_id>/reviews', view_func=add_review, methods=['POST'])
products.add_url_rule('/<string:product_id>/reviews', view_func=get_reviews, methods=['GET'])
products.add_url_rule('/<string:product_id>/properties', view_func=get_properties, methods=['GET'])
products.add_url_rule('/<string:product_id>/properties/<string:property_id>', view_func=add_property, methods=['POST'])
products.add_url_rule('/<string:product_id>/properties/<string:property_id>', view_func=edit_property, methods=['PUT'])
products.add_url_rule('/<string:product_id>/properties/<string:property_id>', view_func=remove_property, methods=['DELETE'])
products.add_url_rule('/<string:product_id>/categories/<string:category_id>', view_func=add_category, methods=['POST'])
products.add_url_rule('/<string:product_id>/categories', view_func=get_categories, methods=['GET'])
products.add_url_rule('/<string:product_id>/categories/<string:category_id>', view_func=remove_category, methods=['DELETE'])
| {
"content_hash": "a3ecfbd2709ee9789da1afd70981ae43",
"timestamp": "",
"source": "github",
"line_count": 356,
"max_line_length": 173,
"avg_line_length": 30.691011235955056,
"alnum_prop": 0.7250594911220941,
"repo_name": "petrstehlik/pyngShop",
"id": "2ff33e19add1bcca83ef6825502425e52fe85205",
"size": "10926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/modules/products.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "253"
},
{
"name": "CSS",
"bytes": "15263"
},
{
"name": "HTML",
"bytes": "79492"
},
{
"name": "JavaScript",
"bytes": "26720"
},
{
"name": "PLSQL",
"bytes": "20567"
},
{
"name": "Python",
"bytes": "94376"
},
{
"name": "TypeScript",
"bytes": "96175"
}
],
"symlink_target": ""
} |
def none_to_zero(integer):
if integer is None:
return 0
else:
return integer
| {
"content_hash": "d43f2deef183846146cd9d1b306d2de0",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 26,
"avg_line_length": 20.2,
"alnum_prop": 0.5841584158415841,
"repo_name": "RedBulli/Django_SnookerStats",
"id": "f809f345328eb96a07cfc4282c9919dfecaebfe7",
"size": "101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Snooker/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "359363"
},
{
"name": "Python",
"bytes": "43108"
}
],
"symlink_target": ""
} |
import logging
import os
import re
import time
from datetime import datetime
from jinja2 import Environment, FileSystemLoader
import eitu.constants as constants
import eitu.formaters as formaters
import eitu.time_edit as timeEdit
from eitu.wifi import retrieve, write_database, read_database, stale_database
def fake_room(room):
return any([re.search(fake, room, re.IGNORECASE) for fake in constants.FAKES])
def fetch_schedules():
if timeEdit.stale_database(time.time()):
timeEdit.write_database()
events = timeEdit.get_events()
# Remove duplicate events
events = {e['uid']: e for e in events}.values()
# Establish schedules of events for each room
logging.info('Establishing schedules')
schedules = {}
for event in events:
for room in event['location']:
if room not in schedules: schedules[room] = []
schedules[room].append(event)
schedules = {key: s for key, s in schedules.items() if not fake_room(key)}
# Merge adjacent and overlapping events in each schedule
logging.info('Merging events')
for schedule in schedules.values():
schedule.sort(key=lambda event: event['start'])
merged = []
for event in schedule:
if merged and merged[-1]['end'] >= event['start']:
merged[-1]['end'] = event['end']
else:
merged.append(event)
schedule = merged
return schedules
def fetch_wifi():
try:
if(stale_database(time.time())):
data = retrieve()
write_database(data)
# could be cleaned up so that retieved data is converted to correct format without
# writing to database and then reading it
occupancy_rooms = read_database()
else:
occupancy_rooms = read_database()
return occupancy_rooms
except:
logging.error('Failed to fetch WiFi data')
return {}
def render(schedules, wifi):
# Establish present time
NOW = datetime.now(constants.TZ)
# Determine the status of each room and how long it will be empty for
logging.info('Determining status of rooms')
# Render index.html
logging.info('Rendering index.html')
env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')))
template = env.get_template('index.html')
return template.render(
updated=formaters.format_date(NOW),
wifi=wifi,
)
| {
"content_hash": "47d61a014118633aa9cab3e42ad9e320",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 100,
"avg_line_length": 30.666666666666668,
"alnum_prop": 0.6473429951690821,
"repo_name": "christianknu/eitu",
"id": "b2573bdbd24d7ff99e965ab484f69c1f8a204481",
"size": "2532",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "eitu/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2348"
},
{
"name": "JavaScript",
"bytes": "11198"
},
{
"name": "Python",
"bytes": "22187"
}
],
"symlink_target": ""
} |
import copy
from collections.abc import MutableMapping
from functools import partial
from typing import List
from flask import Response, render_template_string, request
from flask.views import View
from graphql import specified_rules
from graphql.error import GraphQLError
from graphql.type.schema import GraphQLSchema
from graphql_server import (
GraphQLParams,
HttpQueryError,
encode_execution_results,
format_error_default,
json_encode,
load_json_body,
run_http_query,
)
from graphql_server.render_graphiql import (
GraphiQLConfig,
GraphiQLData,
GraphiQLOptions,
render_graphiql_sync,
)
class GraphQLView(View):
schema = None
root_value = None
context = None
pretty = False
graphiql = False
graphiql_version = None
graphiql_template = None
graphiql_html_title = None
middleware = None
validation_rules = None
batch = False
subscriptions = None
headers = None
default_query = None
header_editor_enabled = None
should_persist_headers = None
methods = ["GET", "POST", "PUT", "DELETE"]
format_error = staticmethod(format_error_default)
encode = staticmethod(json_encode)
def __init__(self, **kwargs):
super(GraphQLView, self).__init__()
for key, value in kwargs.items():
if hasattr(self, key):
setattr(self, key, value)
if not isinstance(self.schema, GraphQLSchema):
# maybe the GraphQL schema is wrapped in a Graphene schema
self.schema = getattr(self.schema, "graphql_schema", None)
if not isinstance(self.schema, GraphQLSchema):
raise TypeError("A Schema is required to be provided to GraphQLView.")
def get_root_value(self):
return self.root_value
def get_context(self):
context = (
copy.copy(self.context)
if self.context and isinstance(self.context, MutableMapping)
else {}
)
if isinstance(context, MutableMapping) and "request" not in context:
context.update({"request": request})
return context
def get_middleware(self):
return self.middleware
def get_validation_rules(self):
if self.validation_rules is None:
return specified_rules
return self.validation_rules
def dispatch_request(self):
try:
request_method = request.method.lower()
data = self.parse_body()
show_graphiql = request_method == "get" and self.should_display_graphiql()
catch = show_graphiql
pretty = self.pretty or show_graphiql or request.args.get("pretty")
all_params: List[GraphQLParams]
execution_results, all_params = run_http_query(
self.schema,
request_method,
data,
query_data=request.args,
batch_enabled=self.batch,
catch=catch,
# Execute options
root_value=self.get_root_value(),
context_value=self.get_context(),
middleware=self.get_middleware(),
validation_rules=self.get_validation_rules(),
)
result, status_code = encode_execution_results(
execution_results,
is_batch=isinstance(data, list),
format_error=self.format_error,
encode=partial(self.encode, pretty=pretty), # noqa
)
if show_graphiql:
graphiql_data = GraphiQLData(
result=result,
query=getattr(all_params[0], "query"),
variables=getattr(all_params[0], "variables"),
operation_name=getattr(all_params[0], "operation_name"),
subscription_url=self.subscriptions,
headers=self.headers,
)
graphiql_config = GraphiQLConfig(
graphiql_version=self.graphiql_version,
graphiql_template=self.graphiql_template,
graphiql_html_title=self.graphiql_html_title,
jinja_env=None,
)
graphiql_options = GraphiQLOptions(
default_query=self.default_query,
header_editor_enabled=self.header_editor_enabled,
should_persist_headers=self.should_persist_headers,
)
source = render_graphiql_sync(
data=graphiql_data, config=graphiql_config, options=graphiql_options
)
return render_template_string(source)
return Response(result, status=status_code, content_type="application/json")
except HttpQueryError as e:
parsed_error = GraphQLError(e.message)
return Response(
self.encode(dict(errors=[self.format_error(parsed_error)])),
status=e.status_code,
headers=e.headers,
content_type="application/json",
)
@staticmethod
def parse_body():
# We use mimetype here since we don't need the other
# information provided by content_type
content_type = request.mimetype
if content_type == "application/graphql":
return {"query": request.data.decode("utf8")}
elif content_type == "application/json":
return load_json_body(request.data.decode("utf8"))
elif content_type in (
"application/x-www-form-urlencoded",
"multipart/form-data",
):
return request.form
return {}
def should_display_graphiql(self):
if not self.graphiql or "raw" in request.args:
return False
return self.request_wants_html()
@staticmethod
def request_wants_html():
best = request.accept_mimetypes.best_match(["application/json", "text/html"])
return (
best == "text/html"
and request.accept_mimetypes[best]
> request.accept_mimetypes["application/json"]
)
| {
"content_hash": "ba00631866843fdee98be529ff07e774",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 88,
"avg_line_length": 33.63586956521739,
"alnum_prop": 0.5845855550169656,
"repo_name": "graphql-python/graphql-server-core",
"id": "063a67a2271d1b6d60ecac3076e2feed4c00240d",
"size": "6189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "graphql_server/flask/graphqlview.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "218628"
},
{
"name": "Shell",
"bytes": "329"
}
],
"symlink_target": ""
} |
import StringIO
from PIL import Image
class Modifier(object):
def __init__(self, image, params):
self.image = image
self.params = params
def run(self):
return self
class SizeModifier(Modifier):
"""
Modifies the physical dimensions of an image. Will keep the image ratio if either
`width` or `height` is provided and warp the ratio if both are provided
Params:
- width: integer value indicating the desired width, in pixels
- height: integer value indicating the desired height, in pixels
"""
def run(self):
if self.params.get('width') or self.params.get('height'):
image_ratio = self.image.size[0] / float(self.image.size[1]) # width / height
# determine the width and height values
if self.params.get('width') and self.params.get('height'):
desired_width = int(float(self.params['width']))
desired_height = int(float(self.params['height']))
width, height = self.calculate_minimum_height_width(self.image.size[0], self.image.size[1], desired_width, desired_height)
offset_x, offset_y = self.calculate_x_y_crop_offsets(width, height, desired_width, desired_height)
self.image = self.image.resize((width, height), Image.ANTIALIAS)
self.image = self.image.crop((offset_x, offset_y, offset_x + desired_width, offset_y + desired_height))
elif self.params.get('width'):
width = int(float(self.params['width']))
height = int(width / image_ratio)
# apply new width and height values
self.image = self.image.resize((width, height), Image.ANTIALIAS)
elif self.params.get('height'):
height = int(float(self.params['height']))
width = int(height * image_ratio)
# apply new width and height values
self.image = self.image.resize((width, height), Image.ANTIALIAS)
else:
raise Exception('Internal Error. Something really strange happened')
return self
@staticmethod
def calculate_minimum_height_width(image_width, image_height, desired_width, desired_height):
"""
Takes a current image width & height, as well as a desired width & height
Returns a (width, height) tuple that returns the minimum width & height values
that will work for the desired_width & desired_height, while maintaining image ratio
"""
image_width, image_height = float(image_width), float(image_height)
desired_width, desired_height = float(desired_width), float(desired_height)
# resize the width and height to match the desired height, while maintaining ratio
scaled_width = desired_height / image_height * image_width
scaled_height = desired_height
# if the new width is below the desired width, scale up to match width
if scaled_width < desired_width:
scaled_height = desired_width / scaled_width * scaled_height
scaled_width = desired_width
scaled_width, scaled_height = int(scaled_width), int(scaled_height)
return scaled_width, scaled_height
@staticmethod
def calculate_x_y_crop_offsets(image_width, image_height, desired_width, desired_height):
"""
Takes a current image width & height, as well as a desired width & height
Returns the offset x & y positions needed when cropping an image down the center
"""
offset_x = float(image_width - desired_width) / 2
offset_y = float(image_height - desired_height) / 2
offset_x, offset_y = int(offset_x), int(offset_y)
return offset_x, offset_y
class QualityModifier(Modifier):
"""
Modifies the quality (and file size) of the image
Params:
- quality: 0-100 integer value representing the desired quality
"""
def run(self):
if self.params.get('quality') and not float(self.params.get('quality')) >= 100:
quality = int(float(self.params['quality']))
# PIL can only apply a quality change during the save method, so we need to
# save the modified image to a string buffer and then re-initialize it. This
# is kind of stupid and slow, so there may be a better way
quality_modified_data = StringIO.StringIO()
self.image.save(quality_modified_data, 'jpeg', quality=quality)
quality_modified_data.seek(0)
self.image = Image.open(quality_modified_data)
return self
| {
"content_hash": "1aae4d36936e886330f27c2fcb98f153",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 138,
"avg_line_length": 44.59615384615385,
"alnum_prop": 0.6304441569642087,
"repo_name": "jskopek/frame",
"id": "0411f29abdde5611790e5d5a9d1491dcd88a8e30",
"size": "4638",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "images/modifiers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2537"
},
{
"name": "Python",
"bytes": "35547"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title_text', models.CharField(default='', max_length=200)),
('desc_text', models.TextField()),
('impact_text', models.TextField()),
('start_date', models.DateTimeField()),
('due_date', models.DateTimeField()),
('priority', models.FloatField(default='-1')),
('complete', models.BooleanField(default=False)),
],
),
]
| {
"content_hash": "a4bc5769478a18544caca77b6cc3a3af",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 114,
"avg_line_length": 31,
"alnum_prop": 0.5424133811230586,
"repo_name": "noah-dev/todo_django",
"id": "bb537f0d3c9f8b073701df692c35ef4c774f6a74",
"size": "910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "todo/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "138847"
},
{
"name": "HTML",
"bytes": "120293"
},
{
"name": "JavaScript",
"bytes": "366216"
},
{
"name": "Python",
"bytes": "20795"
}
],
"symlink_target": ""
} |
import xml.dom.minidom as dom
from math import *
class Element:
x = 0
y = 0
def __eq__(self,other):
if(isinstance(other,Element)):
if( self.x == other.x and self.y == other.y):
return True
else:
return False
return NotImplemented
def __ne__(self,other):
if(isinstance(other,Element)):
if(self.x != other.x or self.y != other.y):
return True
else:
return False
return NotImplemented
def __init__(self, xpos, ypos):
self.x = xpos
self.y = ypos
class Device:
__columns = 0
__rows = 0
__occupiedSpace = list()
__freeSpace = list()
__firstUnit = 0
def __init__(self):
pass
def getColumns(self):
return self.__columns
def getRows(self):
return self.__rows
def getOccupiedSpace(self):
return self.__occupiedSpace
def getFreeSpace(self):
return self.__freeSpace
def setFreeSpaceFromFile(self, xmlDocument):
self.setOccupiedSpaceFromFile(xmlDocument)
occ = self.getOccupiedSpace()
oldY = occ[0].y
freeList = list()
for element in occ:
diff = element.y - oldY
if(diff > 1):
for i in range(1,diff):
newElement = Element(element.x, oldY + i)
freeList.append(newElement)
oldY = element.y
sortedFreeList = sorted(freeList, key= lambda obj: (obj.x, obj.y))
self.__freeSpace = sortedFreeList
def setDeviceSizeFromFile(self,xmlDocument):
size = xmlDocument.getElementsByTagName("size")
size = size[0]
self.__columns = size.getAttribute("cols")
self.__rows = size.getAttribute("rows")
def setOccupiedSpaceFromFile(self,xmlDocument):
obstacles = xmlDocument.getElementsByTagName("obstacle")
units = xmlDocument.getElementsByTagName("unit")
self.getFirstUnitOccurence(units)
occupied = obstacles + units
occ = list()
for element in occupied:
x = element.getAttribute("x")
y = element.getAttribute("y")
newElement = Element(int(x),int(y))
occ.append(newElement)
sortedOccupied = sorted(occ, key= lambda obj: (obj.x, obj.y))
self.__occupiedSpace = sortedOccupied
def generateLinearThermometers(self,xmlOutputDocument, thermNumber):
root = xmlOutputDocument.getElementsByTagName("board")
root = root[0]
oldY = 0
thermID = 0
occList = self.getOccupiedSpace()
for occ in occList:
col = occ.x
row = occ.y
diff = row - oldY
if(diff > 1):
for i in range(1,diff):
newTherm = xmlOutputDocument.createElement("thermometer")
newTherm.setAttribute("name", "t{}".format(thermID))
newTherm.setAttribute("type", "RO7")
newTherm.setAttribute("col", str(col))
newTherm.setAttribute("row", str(oldY + i))
root.appendChild(newTherm)
thermID = thermID + 1
if(thermID > int(thermNumber) - 1):
return xmlOutputDocument
oldY = row
return xmlOutputDocument
def getFreeRowLenList(self,freeList):
rowsLen = list()
freeList = self.getFreeSpace()
oldRowLen = freeList[0].x
#make a list of rows length
for element in freeList:
diff = element.x - oldRowLen
if(diff < 0):
rowsLen.append(int(oldRowLen + 1))
elif(freeList[-1] is element):
rowsLen.append(int(element.x + 1))
oldRowLen = element.x
return rowsLen
def getFirstUnitOccurence(self,units):
unitsList = list()
for unit in units:
x = unit.getAttribute("x")
y = unit.getAttribute("y")
newElement = Element(int(x),int(y))
unitsList.append(newElement)
firstElement = unitsList[1]
self.__firstUnit = firstElement
print("First Unit x: {} y: {}".format(firstElement.x,firstElement.y))
def getFreeColumnLenList(self,freeList):
colsLen = list()
oldColLen = freeList[0].y
for element in freeList:
diff = element.y - oldColLen
if(diff < 0):
colsLen.append(int(oldColLen + 1))
elif(freeList[-1] is element):
colsLen.append(int(element.y + 1))
return colsLen
def getFreeRowLen(self,sortedFreeList):
maximum = -1
l = 0
listLen = len(sortedFreeList)
colLen = self.getFreeColLen(sortedFreeList)
for i in range(0,listLen,colLen):
if(sortedFreeList[i] > maximum):
maximum = sortedFreeList[i].x
l = l + 1
else:
break
return l
def getFreeColLen(self,sortedFreeList):
maximum = -1
l = 0
for i in sortedFreeList:
if(i.y > maximum):
maximum = i.y
l = l + 1
else:
break
return l
def getFreeSingleRow(self,freeList,index):
singleColumnList = list()
for item in freeList:
if(item.y == index):
singleColumnList.append(item.x)
return singleColumnList
def getFreeSingleColumn(self, freeList, index):
singleRowList = list()
for item in freeList:
if(item.x == index):
singleRowList.append(item.y)
elif(item.x > index):
break
return singleRowList
def generateCoords(self, coordList, termNumber):
coordLen = len(coordList)
posList = list()
for i in range(1,coordLen):
termsLeft = termNumber
newList = list()
for item in range(0,coordLen,i):
newList.append(coordList[item])
termsLeft = termsLeft - 1
if(termsLeft < 0 or termsLeft == 0):
break
if(termsLeft == 0):
posList = newList
return posList
def generateThermometersNet(self, xmlOutDocument,thermsInRow, rowsNumber):
xmlList = xmlOutDocument.getElementsByTagName("board")
root = xmlList[0]
freeList = self.getFreeSpace()
row = self.getFreeSingleRow(freeList,6)
column = self.getFreeSingleColumn(freeList,38)
colsCoords = self.generateCoords(row,int(thermsInRow))
rowsCoords = self.generateCoords(column, int(rowsNumber))
thermID = 0
for row in rowsCoords:
for col in colsCoords:
newElement = xmlOutDocument.createElement("thermometer")
newElement.setAttribute("type","RO7")
newElement.setAttribute("name","T{}".format(str(thermID)))
thermID = thermID + 1
newElement.setAttribute("col",str(col))
newElement.setAttribute("row",str(row))
root.appendChild(newElement)
return xmlOutDocument
def generateXmlHeader(self, xmlOutputDocument, ncdFile):
root = xmlOutputDocument.createElement("board")
root.setAttribute("device", "Virtex5")
root.setAttribute("mode", "emulation")
root.setAttribute("version", "0.1")
xmlOutputDocument.appendChild(root)
inputComponent = xmlOutputDocument.createElement("input")
outputComponent = xmlOutputDocument.createElement("output")
inputComponent.setAttribute("name", str(ncdFile))
ncdName = str(ncdFile).rsplit(".")
ncdName = ncdName[0]
outputComponent.setAttribute("name", "{}_new.ncd".format(ncdName))
root.appendChild(inputComponent)
root.appendChild(outputComponent)
return xmlOutputDocument
| {
"content_hash": "b5212f766816d2229410ced2551875c0",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 85,
"avg_line_length": 24.888157894736842,
"alnum_prop": 0.5971451229183188,
"repo_name": "ComputerArchitectureGroupPWr/Floorplan-Maker",
"id": "8a763993638f47899f4c14238f61b16e635c2f46",
"size": "7566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/device.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "156423"
}
],
"symlink_target": ""
} |
from oslo.config import cfg
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from sqlalchemy.orm import joinedload
from neutron.common import constants
from neutron.common import utils
from neutron.db import agents_db
from neutron.db import model_base
from neutron.extensions import dhcpagentscheduler
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
AGENTS_SCHEDULER_OPTS = [
cfg.StrOpt('network_scheduler_driver',
default='neutron.scheduler.'
'dhcp_agent_scheduler.ChanceScheduler',
help=_('Driver to use for scheduling network to DHCP agent')),
cfg.BoolOpt('network_auto_schedule', default=True,
help=_('Allow auto scheduling networks to DHCP agent.')),
cfg.IntOpt('dhcp_agents_per_network', default=1,
help=_('Number of DHCP agents scheduled to host a network.')),
]
cfg.CONF.register_opts(AGENTS_SCHEDULER_OPTS)
class NetworkDhcpAgentBinding(model_base.BASEV2):
"""Represents binding between neutron networks and DHCP agents."""
network_id = sa.Column(sa.String(36),
sa.ForeignKey("networks.id", ondelete='CASCADE'),
primary_key=True)
dhcp_agent = orm.relation(agents_db.Agent)
dhcp_agent_id = sa.Column(sa.String(36),
sa.ForeignKey("agents.id",
ondelete='CASCADE'),
primary_key=True)
class AgentSchedulerDbMixin(agents_db.AgentDbMixin):
"""Common class for agent scheduler mixins."""
# agent notifiers to handle agent update operations;
# should be updated by plugins;
agent_notifiers = {
constants.AGENT_TYPE_DHCP: None,
constants.AGENT_TYPE_L3: None,
constants.AGENT_TYPE_LOADBALANCER: None,
}
@staticmethod
def is_eligible_agent(active, agent):
if active is None:
# filtering by activeness is disabled, all agents are eligible
return True
else:
# note(rpodolyaka): original behaviour is saved here: if active
# filter is set, only agents which are 'up'
# (i.e. have a recent heartbeat timestamp)
# are eligible, even if active is False
return not agents_db.AgentDbMixin.is_agent_down(
agent['heartbeat_timestamp'])
def update_agent(self, context, id, agent):
original_agent = self.get_agent(context, id)
result = super(AgentSchedulerDbMixin, self).update_agent(
context, id, agent)
agent_data = agent['agent']
agent_notifier = self.agent_notifiers.get(original_agent['agent_type'])
if (agent_notifier and
'admin_state_up' in agent_data and
original_agent['admin_state_up'] != agent_data['admin_state_up']):
agent_notifier.agent_updated(context,
agent_data['admin_state_up'],
original_agent['host'])
return result
class DhcpAgentSchedulerDbMixin(dhcpagentscheduler
.DhcpAgentSchedulerPluginBase,
AgentSchedulerDbMixin):
"""Mixin class to add DHCP agent scheduler extension to db_base_plugin_v2.
"""
network_scheduler = None
def get_dhcp_agents_hosting_networks(
self, context, network_ids, active=None):
if not network_ids:
return []
query = context.session.query(NetworkDhcpAgentBinding)
query = query.options(joinedload('dhcp_agent'))
if len(network_ids) == 1:
query = query.filter(
NetworkDhcpAgentBinding.network_id == network_ids[0])
elif network_ids:
query = query.filter(
NetworkDhcpAgentBinding.network_id in network_ids)
if active is not None:
query = (query.filter(agents_db.Agent.admin_state_up == active))
return [binding.dhcp_agent
for binding in query
if AgentSchedulerDbMixin.is_eligible_agent(active,
binding.dhcp_agent)]
def add_network_to_dhcp_agent(self, context, id, network_id):
self._get_network(context, network_id)
with context.session.begin(subtransactions=True):
agent_db = self._get_agent(context, id)
if (agent_db['agent_type'] != constants.AGENT_TYPE_DHCP or
not agent_db['admin_state_up']):
raise dhcpagentscheduler.InvalidDHCPAgent(id=id)
dhcp_agents = self.get_dhcp_agents_hosting_networks(
context, [network_id])
for dhcp_agent in dhcp_agents:
if id == dhcp_agent.id:
raise dhcpagentscheduler.NetworkHostedByDHCPAgent(
network_id=network_id, agent_id=id)
binding = NetworkDhcpAgentBinding()
binding.dhcp_agent_id = id
binding.network_id = network_id
context.session.add(binding)
dhcp_notifier = self.agent_notifiers.get(constants.AGENT_TYPE_DHCP)
if dhcp_notifier:
dhcp_notifier.network_added_to_agent(
context, network_id, agent_db.host)
def remove_network_from_dhcp_agent(self, context, id, network_id):
agent = self._get_agent(context, id)
with context.session.begin(subtransactions=True):
try:
query = context.session.query(NetworkDhcpAgentBinding)
binding = query.filter(
NetworkDhcpAgentBinding.network_id == network_id,
NetworkDhcpAgentBinding.dhcp_agent_id == id).one()
except exc.NoResultFound:
raise dhcpagentscheduler.NetworkNotHostedByDhcpAgent(
network_id=network_id, agent_id=id)
# reserve the port, so the ip is reused on a subsequent add
device_id = utils.get_dhcp_agent_device_id(network_id,
agent['host'])
filters = dict(device_id=[device_id])
ports = self.get_ports(context, filters=filters)
for port in ports:
port['device_id'] = constants.DEVICE_ID_RESERVED_DHCP_PORT
self.update_port(context, port['id'], dict(port=port))
context.session.delete(binding)
dhcp_notifier = self.agent_notifiers.get(constants.AGENT_TYPE_DHCP)
if dhcp_notifier:
dhcp_notifier.network_removed_from_agent(
context, network_id, agent.host)
def list_networks_on_dhcp_agent(self, context, id):
query = context.session.query(NetworkDhcpAgentBinding.network_id)
query = query.filter(NetworkDhcpAgentBinding.dhcp_agent_id == id)
net_ids = [item[0] for item in query]
if net_ids:
return {'networks':
self.get_networks(context, filters={'id': net_ids})}
else:
return {'networks': []}
def list_active_networks_on_active_dhcp_agent(self, context, host):
agent = self._get_agent_by_type_and_host(
context, constants.AGENT_TYPE_DHCP, host)
if not agent.admin_state_up:
return []
query = context.session.query(NetworkDhcpAgentBinding.network_id)
query = query.filter(NetworkDhcpAgentBinding.dhcp_agent_id == agent.id)
net_ids = [item[0] for item in query]
if net_ids:
return self.get_networks(
context,
filters={'id': net_ids, 'admin_state_up': [True]}
)
else:
return []
def list_dhcp_agents_hosting_network(self, context, network_id):
dhcp_agents = self.get_dhcp_agents_hosting_networks(
context, [network_id])
agent_ids = [dhcp_agent.id for dhcp_agent in dhcp_agents]
if agent_ids:
return {
'agents': self.get_agents(context, filters={'id': agent_ids})}
else:
return {'agents': []}
def schedule_network(self, context, created_network):
if self.network_scheduler:
return self.network_scheduler.schedule(
self, context, created_network)
def auto_schedule_networks(self, context, host):
if self.network_scheduler:
self.network_scheduler.auto_schedule_networks(self, context, host)
| {
"content_hash": "1c8c689a98d9cab6e053eefaf969d341",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 79,
"avg_line_length": 42.03414634146341,
"alnum_prop": 0.5909249158639898,
"repo_name": "subramani95/neutron",
"id": "aa8dfadb63bbc6228648e42cbdb79dc25e97f043",
"size": "9258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/db/agentschedulers_db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""
Turns raw statistics about soccer matches into features we use
for prediction. Combines a number of games of history to compute
aggregates that can be used to predict the next game.
"""
from pandas.io import gbq
import match_stats
# Games that have stats available. Not all games in the match_games table
# will have stats (e.g. they might be in the future).
MATCH_GAME_WITH_STATS = """
SELECT * FROM (%(match_games)s)
WHERE matchid in (
SELECT matchid FROM (%(stats_table)s) GROUP BY matchid)
""" % {'match_games': match_stats.match_games_table(),
'stats_table': match_stats.team_game_summary_query()}
# Combines statistics from both teams in a match.
# For each two records matching the pattern (m, t1, <stats1>) and
# (m, t2, <stats2>) where m is the match id, t1 and t2 are the two teams,
# stats1 and stats2 are the statistics for those two teams, combines them
# into a single row (m, t1, t2, <stats1>, <stats2>) where all of the
# t2 field names are decorated with the op_ prefix. For example, teamid becomes
# op_teamid, and pass_70 becomes op_pass_70.
_TEAM_GAME_OP_SUMMARY = """
SELECT cur.matchid as matchid,
cur.teamid as teamid,
cur.passes as passes,
cur.bad_passes as bad_passes,
cur.pass_ratio as pass_ratio,
cur.corners as corners,
cur.fouls as fouls,
cur.cards as cards,
cur.goals as goals,
cur.shots as shots,
cur.is_home as is_home,
cur.team_name as team_name,
cur.pass_80 as pass_80,
cur.pass_70 as pass_70,
cur.expected_goals as expected_goals,
cur.on_target as on_target,
cur.length as length,
opp.teamid as op_teamid,
opp.passes as op_passes,
opp.bad_passes as op_bad_passes,
opp.pass_ratio as op_pass_ratio,
opp.corners as op_corners,
opp.fouls as op_fouls,
opp.cards as op_cards,
opp.goals as op_goals,
opp.shots as op_shots,
opp.team_name as op_team_name,
opp.pass_80 as op_pass_80,
opp.pass_70 as op_pass_70,
opp.expected_goals as op_expected_goals,
opp.on_target as op_on_target,
cur.competitionid as competitionid,
cur.seasonid as seasonid,
if (opp.shots > 0, cur.shots / opp.shots, cur.shots * 1.0)
as shots_op_ratio,
if (opp.goals > 0, cur.goals / opp.goals, cur.goals * 1.0)
as goals_op_ratio,
if (opp.pass_ratio > 0, cur.pass_ratio / opp.pass_ratio, 1.0)
as pass_op_ratio,
if (cur.goals > opp.goals, 3,
if (cur.goals == opp.goals, 1, 0)) as points,
cur.timestamp as timestamp,
FROM (%(team_game_summary)s) cur
JOIN (%(team_game_summary)s) opp
ON cur.matchid = opp.matchid
WHERE cur.teamid != opp.teamid
ORDER BY cur.matchid, cur.teamid
""" % {'team_game_summary': match_stats.team_game_summary_query()}
def get_match_history(history_size):
""" For each team t in each game g, computes the N previous game
ids where team t played, where N is the history_size (number
of games of history we use for prediction). The statistics of
the N previous games will be used to predict the outcome of
game g.
"""
return """
SELECT h.teamid as teamid, h.matchid as matchid,
h.timestamp as timestamp,
m1.timestamp as previous_timestamp,
m1.matchid as previous_match
FROM (
SELECT teamid, matchid, timestamp,
LEAD(matchid, 1) OVER (
PARTITION BY teamid ORDER BY timestamp DESC)
as last_matchid,
LEAD(timestamp, 1) OVER (
PARTITION BY teamid ORDER BY timestamp DESC)
as last_match_timestamp,
LEAD(timestamp, %(history_size)d) OVER (
PARTITION BY teamid ORDER BY timestamp DESC)
as nth_last_matchid,
LEAD(timestamp, %(history_size)d) OVER (
PARTITION BY teamid ORDER BY timestamp DESC)
as nth_last_match_timestamp,
FROM (%(match_games)s)
) h
JOIN (%(match_games_with_stats)s) m1
ON h.teamid = m1.teamid
WHERE
h.nth_last_match_timestamp is not NULL AND
h.last_match_timestamp IS NOT NULL AND
m1.timestamp >= h.nth_last_match_timestamp AND
m1.timestamp <= h.last_match_timestamp
""" % {'history_size': history_size,
'match_games_with_stats': MATCH_GAME_WITH_STATS,
'match_games': match_stats.match_games_table()}
def get_history_query(history_size):
""" Computes summary statistics for the N preceeding matches. """
return """
SELECT
summary.matchid as matchid,
pts.teamid as teamid,
pts.op_teamid as op_teamid,
pts.competitionid as competitionid,
pts.seasonid as seasonid,
pts.is_home as is_home,
pts.team_name as team_name,
pts.op_team_name as op_team_name,
pts.timestamp as timestamp,
summary.avg_points as avg_points,
summary.avg_goals as avg_goals,
summary.op_avg_goals as op_avg_goals,
summary.pass_70 as pass_70,
summary.pass_80 as pass_80,
summary.op_pass_70 as op_pass_70,
summary.op_pass_80 as op_pass_80,
summary.expected_goals as expected_goals,
summary.op_expected_goals as op_expected_goals,
summary.passes as passes,
summary.bad_passes as bad_passes,
summary.pass_ratio as pass_ratio,
summary.corners as corners,
summary.fouls as fouls,
summary.cards as cards,
summary.shots as shots,
summary.op_passes as op_passes,
summary.op_bad_passes as op_bad_passes,
summary.op_corners as op_corners,
summary.op_fouls as op_fouls,
summary.op_cards as op_cards,
summary.op_shots as op_shots,
summary.goals_op_ratio as goals_op_ratio,
summary.shots_op_ratio as shots_op_ratio,
summary.pass_op_ratio as pass_op_ratio,
FROM (
SELECT hist.matchid as matchid,
hist.teamid as teamid,
AVG(games.pass_70) as pass_70,
AVG(games.pass_80) as pass_80,
AVG(games.op_pass_70) as op_pass_70,
AVG(games.op_pass_80) as op_pass_80,
AVG(games.expected_goals) as expected_goals,
AVG(games.op_expected_goals) as op_expected_goals,
AVG(games.passes) as passes,
AVG(games.bad_passes) as bad_passes,
AVG(games.pass_ratio) as pass_ratio,
AVG(games.corners) as corners,
AVG(games.fouls) as fouls,
AVG(games.cards) as cards,
AVG(games.goals) as avg_goals,
AVG(games.points) as avg_points,
AVG(games.shots) as shots,
AVG(games.op_passes) as op_passes,
AVG(games.op_bad_passes) as op_bad_passes,
AVG(games.op_corners) as op_corners,
AVG(games.op_fouls) as op_fouls,
AVG(games.op_cards) as op_cards,
AVG(games.op_shots) as op_shots,
AVG(games.op_goals) as op_avg_goals,
AVG(games.goals_op_ratio) as goals_op_ratio,
AVG(games.shots_op_ratio) as shots_op_ratio,
AVG(games.pass_op_ratio) as pass_op_ratio,
FROM (%(match_history)s) hist
JOIN (%(team_game_op_summary)s) games
ON hist.previous_match = games.matchid and
hist.teamid = games.teamid
GROUP BY matchid, teamid
) as summary
JOIN (%(match_games)s) pts on summary.matchid = pts.matchid
and summary.teamid = pts.teamid
WHERE summary.matchid <> '442291'
ORDER BY matchid, is_home DESC
""" % {'team_game_op_summary': _TEAM_GAME_OP_SUMMARY,
'match_games': match_stats.match_games_table(),
'match_history': get_match_history(history_size)}
def get_history_query_with_goals(history_size):
""" Expands the history_query, which summarizes statistics from past games
with the result of who won the current game. This information will not
be availble for future games that we want to predict, but it will be
available for past games. We can then use this information to train our
models.
"""
return """
SELECT
h.matchid as matchid,
h.teamid as teamid,
h.op_teamid as op_teamid,
h.competitionid as competitionid,
h.seasonid as seasonid,
h.is_home as is_home,
h.team_name as team_name,
h.op_team_name as op_team_name,
h.timestamp as timestamp,
g.goals as goals,
op.goals as op_goals,
if (g.goals > op.goals, 3,
if (g.goals == op.goals, 1, 0)) as points,
h.avg_points as avg_points,
h.avg_goals as avg_goals,
h.op_avg_goals as op_avg_goals,
h.pass_70 as pass_70,
h.pass_80 as pass_80,
h.op_pass_70 as op_pass_70,
h.op_pass_80 as op_pass_80,
h.expected_goals as expected_goals,
h.op_expected_goals as op_expected_goals,
h.passes as passes,
h.bad_passes as bad_passes,
h.pass_ratio as pass_ratio,
h.corners as corners,
h.fouls as fouls,
h.cards as cards,
h.shots as shots,
h.op_passes as op_passes,
h.op_bad_passes as op_bad_passes,
h.op_corners as op_corners,
h.op_fouls as op_fouls,
h.op_cards as op_cards,
h.op_shots as op_shots,
h.goals_op_ratio as goals_op_ratio,
h.shots_op_ratio as shots_op_ratio,
h.pass_op_ratio as pass_op_ratio,
FROM (%(history_query)s) h
JOIN (%(match_goals)s) g
ON h.matchid = g.matchid and h.teamid = g.teamid
JOIN (%(match_goals)s) op
ON h.matchid = op.matchid and h.op_teamid = op.teamid
ORDER BY timestamp DESC, matchid, is_home
""" % {'history_query': get_history_query(history_size),
'match_goals': match_stats.match_goals_table()}
def get_wc_history_query(history_size):
""" Identical to the history_query (which, remember, does not have
outcomes) but gets history for world-cup games.
"""
return """
SELECT * FROM (%(history_query)s) WHERE competitionid = 4
ORDER BY timestamp DESC, matchid, is_home
""" % {'history_query': get_history_query(history_size)}
def get_wc_features(history_size):
""" Runs a bigquery query that gets the features that can be used
to predict the world cup.
"""
return gbq.read_gbq(get_wc_history_query(history_size))
def get_features(history_size):
""" Runs a BigQuery query to get features that can be used to train
a machine learning model.
"""
return gbq.read_gbq(get_history_query_with_goals(history_size))
def get_game_summaries():
""" Runs a BigQuery Query that gets game summaries. """
return gbq.read_gbq("""
SELECT * FROM (%(team_game_op_summary)s)
ORDER BY timestamp DESC, matchid, is_home
""" % {'team_game_op_summary': _TEAM_GAME_OP_SUMMARY})
| {
"content_hash": "5fdcf221cc8411a25136f05a3472a284",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 79,
"avg_line_length": 39.358108108108105,
"alnum_prop": 0.5842918454935623,
"repo_name": "Taketrung/ipython-soccer-predictions",
"id": "3429d923137a8b9ff3da96a4b6ebe350a2ca6092",
"size": "12219",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "predict/features.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "44803"
}
],
"symlink_target": ""
} |
"""
:codeauthor: Rahul Handay <rahulha@saltstack.com>
"""
import os
import pytest
import salt.modules.win_path as win_path
import salt.utils.stringutils
import salt.utils.win_reg as reg_util
from tests.support.mock import MagicMock, patch
pytestmark = [pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows]
"""
Test cases for salt.modules.win_path.
"""
@pytest.fixture()
def pathsep():
return ";"
@pytest.fixture
def configure_loader_modules():
return {
win_path: {
"__opts__": {"test": False},
"__salt__": {},
"__utils__": {"reg.read_value": reg_util.read_value},
},
}
def test_get_path():
"""
Test to return the system path
"""
mock = MagicMock(return_value={"vdata": "C:\\Salt"})
with patch.dict(win_path.__utils__, {"reg.read_value": mock}):
assert win_path.get_path() == ["C:\\Salt"]
def test_exists():
"""
Test to check if the directory is configured
"""
mock = MagicMock(return_value=["C:\\Foo", "C:\\Bar"])
with patch.object(win_path, "get_path", mock):
# Ensure case insensitivity respected
assert (win_path.exists("C:\\FOO")) is True
assert (win_path.exists("c:\\foo")) is True
assert (win_path.exists("c:\\mystuff")) is False
def test_util_reg():
"""
Test to check if registry comes back clean when get_path is called
"""
mock = MagicMock(return_value={"vdata": ""})
with patch.dict(win_path.__utils__, {"reg.read_value": mock}):
assert win_path.get_path() == []
def test_add(pathsep):
"""
Test to add the directory to the SYSTEM path
"""
orig_path = ("C:\\Foo", "C:\\Bar")
# Helper function to make the env var easier to reuse
def _env(path):
return {"PATH": salt.utils.stringutils.to_str(pathsep.join(path))}
# Helper function to make the run call easier to reuse
def _run(name, index=None, retval=True, path=None):
if path is None:
path = orig_path
env = _env(path)
# Mock getters and setters
mock_get = MagicMock(return_value=list(path))
mock_set = MagicMock(return_value=retval)
# Mock individual calls that would occur during normal usage
patch_sep = patch.object(win_path, "PATHSEP", pathsep)
patch_path = patch.object(win_path, "get_path", mock_get)
patch_env = patch.object(os, "environ", env)
patch_dict = patch.dict(win_path.__utils__, {"reg.set_value": mock_set})
patch_rehash = patch.object(win_path, "rehash", MagicMock(return_value=True))
with patch_sep, patch_path, patch_env, patch_dict, patch_rehash:
return win_path.add(name, index), env, mock_set
def _path_matches(path):
return salt.utils.stringutils.to_str(pathsep.join(path))
# Test an empty reg update
ret, env, mock_set = _run("")
assert ret is False
# Test a successful reg update
ret, env, mock_set = _run("c:\\salt", retval=True)
new_path = ("C:\\Foo", "C:\\Bar", "c:\\salt")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test an unsuccessful reg update
ret, env, mock_set = _run("c:\\salt", retval=False)
new_path = ("C:\\Foo", "C:\\Bar", "c:\\salt")
assert ret is False
assert env["PATH"] == _path_matches(new_path)
# Test adding with a custom index
ret, env, mock_set = _run("c:\\salt", index=1, retval=True)
new_path = ("C:\\Foo", "c:\\salt", "C:\\Bar")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test adding with a custom index of 0
ret, env, mock_set = _run("c:\\salt", index=0, retval=True)
new_path = ("c:\\salt", "C:\\Foo", "C:\\Bar")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test adding path with a case-insensitive match already present, and
# no index provided. The path should remain unchanged and we should not
# update the registry.
ret, env, mock_set = _run("c:\\foo", retval=True)
assert ret is True
assert env["PATH"] == _path_matches(orig_path)
# Test adding path with a case-insensitive match already present, and a
# negative index provided which does not match the current index. The
# match should be removed, and the path should be added to the end of
# the list.
ret, env, mock_set = _run("c:\\foo", index=-1, retval=True)
new_path = ("C:\\Bar", "c:\\foo")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test adding path with a case-insensitive match already present, and a
# negative index provided which matches the current index. No changes
# should be made.
ret, env, mock_set = _run("c:\\foo", index=-2, retval=True)
assert ret is True
assert env["PATH"] == _path_matches(orig_path)
# Test adding path with a case-insensitive match already present, and a
# negative index provided which is larger than the size of the list. No
# changes should be made, since in these cases we assume an index of 0,
# and the case-insensitive match is also at index 0.
ret, env, mock_set = _run("c:\\foo", index=-5, retval=True)
assert ret is True
assert env["PATH"] == _path_matches(orig_path)
# Test adding path with a case-insensitive match already present, and a
# negative index provided which is larger than the size of the list.
# The match should be removed from its current location and inserted at
# the beginning, since when a negative index is larger than the list,
# we put it at the beginning of the list.
ret, env, mock_set = _run("c:\\bar", index=-5, retval=True)
new_path = ("c:\\bar", "C:\\Foo")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test adding path with a case-insensitive match already present, and a
# negative index provided which matches the current index. The path
# should remain unchanged and we should not update the registry.
ret, env, mock_set = _run("c:\\bar", index=-1, retval=True)
assert ret is True
assert env["PATH"] == _path_matches(orig_path)
# Test adding path with a case-insensitive match already present, and
# an index provided which does not match the current index, and is also
# larger than the size of the PATH list. The match should be removed,
# and the path should be added to the end of the list.
ret, env, mock_set = _run("c:\\foo", index=5, retval=True)
new_path = ("C:\\Bar", "c:\\foo")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
def test_remove(pathsep):
"""
Test win_path.remove
"""
orig_path = ("C:\\Foo", "C:\\Bar", "C:\\Baz")
# Helper function to make the env var easier to reuse
def _env(path):
return {"PATH": salt.utils.stringutils.to_str(pathsep.join(path))}
def _run(name="c:\\salt", retval=True, path=None):
if path is None:
path = orig_path
env = _env(path)
# Mock getters and setters
mock_get = MagicMock(return_value=list(path))
mock_set = MagicMock(return_value=retval)
patch_path_sep = patch.object(win_path, "PATHSEP", pathsep)
patch_path = patch.object(win_path, "get_path", mock_get)
patch_env = patch.object(os, "environ", env)
patch_dict = patch.dict(win_path.__utils__, {"reg.set_value": mock_set})
patch_rehash = patch.object(win_path, "rehash", MagicMock(return_value=True))
with patch_path_sep, patch_path, patch_env, patch_dict, patch_rehash:
return win_path.remove(name), env, mock_set
def _path_matches(path):
return salt.utils.stringutils.to_str(pathsep.join(path))
# Test a successful reg update
ret, env, mock_set = _run("C:\\Bar", retval=True)
new_path = ("C:\\Foo", "C:\\Baz")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test a successful reg update with a case-insensitive match
ret, env, mock_set = _run("c:\\bar", retval=True)
new_path = ("C:\\Foo", "C:\\Baz")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test a successful reg update with multiple case-insensitive matches.
# All matches should be removed.
old_path = orig_path + ("C:\\BAR",)
ret, env, mock_set = _run("c:\\bar", retval=True)
new_path = ("C:\\Foo", "C:\\Baz")
assert ret is True
assert env["PATH"] == _path_matches(new_path)
# Test an unsuccessful reg update
ret, env, mock_set = _run("c:\\bar", retval=False)
new_path = ("C:\\Foo", "C:\\Baz")
assert ret is False
# The local path should still have been modified even
# though reg.set_value failed.
assert env["PATH"] == _path_matches(new_path)
# Test when no match found
ret, env, mock_set = _run("C:\\NotThere", retval=True)
assert ret is True
assert env["PATH"] == _path_matches(orig_path)
| {
"content_hash": "86a2273fc04231d0ded88e606f23d22e",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 85,
"avg_line_length": 36.41869918699187,
"alnum_prop": 0.6252930014510548,
"repo_name": "saltstack/salt",
"id": "7dad024a8783dda3f40683abbc15dfdf77cbdb34",
"size": "8959",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/pytests/unit/modules/test_win_path.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
} |
"""
This module contains implementations of IFilePath for zip files.
See the constructor for ZipArchive for use.
"""
__metaclass__ = type
import os
import time
import errno
# Python 2.6 includes support for incremental unzipping of zipfiles, and
# thus obviates the need for ChunkingZipFile.
import sys
if sys.version_info[:2] >= (2, 6):
_USE_ZIPFILE = True
from zipfile import ZipFile
else:
_USE_ZIPFILE = False
from twisted.python.zipstream import ChunkingZipFile
from twisted.python.filepath import IFilePath, FilePath, AbstractFilePath
from zope.interface import implements
# using FilePath here exclusively rather than os to make sure that we don't do
# anything OS-path-specific here.
ZIP_PATH_SEP = '/' # In zipfiles, "/" is universally used as the
# path separator, regardless of platform.
class ZipPath(AbstractFilePath):
"""
I represent a file or directory contained within a zip file.
"""
implements(IFilePath)
sep = ZIP_PATH_SEP
def __init__(self, archive, pathInArchive):
"""
Don't construct me directly. Use ZipArchive.child().
@param archive: a ZipArchive instance.
@param pathInArchive: a ZIP_PATH_SEP-separated string.
"""
self.archive = archive
self.pathInArchive = pathInArchive
# self.path pretends to be os-specific because that's the way the
# 'zipimport' module does it.
self.path = os.path.join(archive.zipfile.filename,
*(self.pathInArchive.split(ZIP_PATH_SEP)))
def __cmp__(self, other):
if not isinstance(other, ZipPath):
return NotImplemented
return cmp((self.archive, self.pathInArchive),
(other.archive, other.pathInArchive))
def __repr__(self):
parts = [os.path.abspath(self.archive.path)]
parts.extend(self.pathInArchive.split(ZIP_PATH_SEP))
path = os.sep.join(parts)
return "ZipPath('%s')" % (path.encode('string-escape'),)
def parent(self):
splitup = self.pathInArchive.split(ZIP_PATH_SEP)
if len(splitup) == 1:
return self.archive
return ZipPath(self.archive, ZIP_PATH_SEP.join(splitup[:-1]))
def child(self, path):
"""
Return a new ZipPath representing a path in C{self.archive} which is
a child of this path.
@note: Requesting the C{".."} (or other special name) child will not
cause L{InsecurePath} to be raised since these names do not have
any special meaning inside a zip archive. Be particularly
careful with the C{path} attribute (if you absolutely must use
it) as this means it may include special names with special
meaning outside of the context of a zip archive.
"""
return ZipPath(self.archive, ZIP_PATH_SEP.join([self.pathInArchive, path]))
def sibling(self, path):
return self.parent().child(path)
# preauthChild = child
def exists(self):
return self.isdir() or self.isfile()
def isdir(self):
return self.pathInArchive in self.archive.childmap
def isfile(self):
return self.pathInArchive in self.archive.zipfile.NameToInfo
def islink(self):
return False
def listdir(self):
if self.exists():
if self.isdir():
return self.archive.childmap[self.pathInArchive].keys()
else:
raise OSError(errno.ENOTDIR, "Leaf zip entry listed")
else:
raise OSError(errno.ENOENT, "Non-existent zip entry listed")
def splitext(self):
"""
Return a value similar to that returned by os.path.splitext.
"""
# This happens to work out because of the fact that we use OS-specific
# path separators in the constructor to construct our fake 'path'
# attribute.
return os.path.splitext(self.path)
def basename(self):
return self.pathInArchive.split(ZIP_PATH_SEP)[-1]
def dirname(self):
# XXX NOTE: This API isn't a very good idea on filepath, but it's even
# less meaningful here.
return self.parent().path
def open(self, mode="r"):
if _USE_ZIPFILE:
return self.archive.zipfile.open(self.pathInArchive, mode=mode)
else:
# XXX oh man, is this too much hax?
self.archive.zipfile.mode = mode
return self.archive.zipfile.readfile(self.pathInArchive)
def changed(self):
pass
def getsize(self):
"""
Retrieve this file's size.
@return: file size, in bytes
"""
return self.archive.zipfile.NameToInfo[self.pathInArchive].file_size
def getAccessTime(self):
"""
Retrieve this file's last access-time. This is the same as the last access
time for the archive.
@return: a number of seconds since the epoch
"""
return self.archive.getAccessTime()
def getModificationTime(self):
"""
Retrieve this file's last modification time. This is the time of
modification recorded in the zipfile.
@return: a number of seconds since the epoch.
"""
return time.mktime(
self.archive.zipfile.NameToInfo[self.pathInArchive].date_time
+ (0, 0, 0))
def getStatusChangeTime(self):
"""
Retrieve this file's last modification time. This name is provided for
compatibility, and returns the same value as getmtime.
@return: a number of seconds since the epoch.
"""
return self.getModificationTime()
class ZipArchive(ZipPath):
""" I am a FilePath-like object which can wrap a zip archive as if it were a
directory.
"""
archive = property(lambda self: self)
def __init__(self, archivePathname):
"""Create a ZipArchive, treating the archive at archivePathname as a zip file.
@param archivePathname: a str, naming a path in the filesystem.
"""
if _USE_ZIPFILE:
self.zipfile = ZipFile(archivePathname)
else:
self.zipfile = ChunkingZipFile(archivePathname)
self.path = archivePathname
self.pathInArchive = ''
# zipfile is already wasting O(N) memory on cached ZipInfo instances,
# so there's no sense in trying to do this lazily or intelligently
self.childmap = {} # map parent: list of children
for name in self.zipfile.namelist():
name = name.split(ZIP_PATH_SEP)
for x in range(len(name)):
child = name[-x]
parent = ZIP_PATH_SEP.join(name[:-x])
if parent not in self.childmap:
self.childmap[parent] = {}
self.childmap[parent][child] = 1
parent = ''
def child(self, path):
"""
Create a ZipPath pointing at a path within the archive.
@param path: a str with no path separators in it, either '/' or the
system path separator, if it's different.
"""
return ZipPath(self, path)
def exists(self):
"""
Returns true if the underlying archive exists.
"""
return FilePath(self.zipfile.filename).exists()
def getAccessTime(self):
"""
Return the archive file's last access time.
"""
return FilePath(self.zipfile.filename).getAccessTime()
def getModificationTime(self):
"""
Return the archive file's modification time.
"""
return FilePath(self.zipfile.filename).getModificationTime()
def getStatusChangeTime(self):
"""
Return the archive file's status change time.
"""
return FilePath(self.zipfile.filename).getStatusChangeTime()
def __repr__(self):
return 'ZipArchive(%r)' % (os.path.abspath(self.path),)
__all__ = ['ZipArchive', 'ZipPath']
| {
"content_hash": "4091d182ec65cf224108737c2e0df9e9",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 86,
"avg_line_length": 31.328301886792453,
"alnum_prop": 0.5944350758853288,
"repo_name": "hlzz/dotfiles",
"id": "b8f62c380ba1058237c48327524b21c139ac1335",
"size": "8447",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/python/zippath.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "1240"
},
{
"name": "Arc",
"bytes": "38"
},
{
"name": "Assembly",
"bytes": "449468"
},
{
"name": "Batchfile",
"bytes": "16152"
},
{
"name": "C",
"bytes": "102303195"
},
{
"name": "C++",
"bytes": "155056606"
},
{
"name": "CMake",
"bytes": "7200627"
},
{
"name": "CSS",
"bytes": "179330"
},
{
"name": "Cuda",
"bytes": "30026"
},
{
"name": "D",
"bytes": "2152"
},
{
"name": "Emacs Lisp",
"bytes": "14892"
},
{
"name": "FORTRAN",
"bytes": "5276"
},
{
"name": "Forth",
"bytes": "3637"
},
{
"name": "GAP",
"bytes": "14495"
},
{
"name": "GLSL",
"bytes": "438205"
},
{
"name": "Gnuplot",
"bytes": "327"
},
{
"name": "Groff",
"bytes": "518260"
},
{
"name": "HLSL",
"bytes": "965"
},
{
"name": "HTML",
"bytes": "2003175"
},
{
"name": "Haskell",
"bytes": "10370"
},
{
"name": "IDL",
"bytes": "2466"
},
{
"name": "Java",
"bytes": "219109"
},
{
"name": "JavaScript",
"bytes": "1618007"
},
{
"name": "Lex",
"bytes": "119058"
},
{
"name": "Lua",
"bytes": "23167"
},
{
"name": "M",
"bytes": "1080"
},
{
"name": "M4",
"bytes": "292475"
},
{
"name": "Makefile",
"bytes": "7112810"
},
{
"name": "Matlab",
"bytes": "1582"
},
{
"name": "NSIS",
"bytes": "34176"
},
{
"name": "Objective-C",
"bytes": "65312"
},
{
"name": "Objective-C++",
"bytes": "269995"
},
{
"name": "PAWN",
"bytes": "4107117"
},
{
"name": "PHP",
"bytes": "2690"
},
{
"name": "Pascal",
"bytes": "5054"
},
{
"name": "Perl",
"bytes": "485508"
},
{
"name": "Pike",
"bytes": "1338"
},
{
"name": "Prolog",
"bytes": "5284"
},
{
"name": "Python",
"bytes": "16799659"
},
{
"name": "QMake",
"bytes": "89858"
},
{
"name": "Rebol",
"bytes": "291"
},
{
"name": "Ruby",
"bytes": "21590"
},
{
"name": "Scilab",
"bytes": "120244"
},
{
"name": "Shell",
"bytes": "2266191"
},
{
"name": "Slash",
"bytes": "1536"
},
{
"name": "Smarty",
"bytes": "1368"
},
{
"name": "Swift",
"bytes": "331"
},
{
"name": "Tcl",
"bytes": "1911873"
},
{
"name": "TeX",
"bytes": "11981"
},
{
"name": "Verilog",
"bytes": "3893"
},
{
"name": "VimL",
"bytes": "595114"
},
{
"name": "XSLT",
"bytes": "62675"
},
{
"name": "Yacc",
"bytes": "307000"
},
{
"name": "eC",
"bytes": "366863"
}
],
"symlink_target": ""
} |
from werkzeug import check_password_hash, generate_password_hash
from app import app
from app.modules.auth.models import User
_default_comparison = generate_password_hash("", method=app.config['PW_HASH_SETTINGS'], salt_length=app.config['PW_SALT_LENGTH'])
def check(credentials):
user = User.query.filter_by(username=credentials['username']).first()
pwhash = _default_comparison if user is None else user.password
result = check_password_hash(pwhash, credentials['password'])
return result
| {
"content_hash": "2f33f7ff46533cf1f6432cad25abe228",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 129,
"avg_line_length": 42.416666666666664,
"alnum_prop": 0.756385068762279,
"repo_name": "awm/minventory",
"id": "c07b6848e4d45aa9c9a98bd844c8e36cd33365f1",
"size": "509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/plugins/auth/db.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "20452"
}
],
"symlink_target": ""
} |
from django import forms
from django.contrib import admin
from markedit.admin import MarkEditAdmin
from pycon.models import (PyConProposalCategory, PyConSponsorTutorialProposal,
PyConTalkProposal, PyConTutorialProposal,
PyConPosterProposal, PyConLightningTalkProposal,
PyConOpenSpaceProposal)
class ProposalMarkEditAdmin(MarkEditAdmin):
class MarkEdit:
fields = ['abstract', 'additional_notes', 'outline', 'more_info']
options = {
'preview': 'below'
}
class TalkAdmin(ProposalMarkEditAdmin):
list_display = [
'title',
'kind',
'status',
'duration',
'submitted',
'speaker',
'category',
'audience_level',
'cancelled',
]
class TutorialAdmin(ProposalMarkEditAdmin):
list_display = [
'title',
'kind',
'overall_status',
'status', # result.status
'submitted',
'speaker',
'category',
'audience_level',
'domain_level',
'_registration_count',
'cancelled',
]
list_filter = ['result__status', 'cancelled', 'category']
list_select_related = True
search_fields = ['title']
def status(self, obj):
try:
return obj.result.status
except:
return "undecided"
status.admin_order_field = 'result__status'
def _registration_count(self, obj):
kwargs = {'count': obj.registration_count, 'max': obj.max_attendees}
if obj.max_attendees:
if obj.registration_count == obj.max_attendees:
return '<div style="color: blue;">{count} of {max}</div>'.format(**kwargs)
elif obj.registration_count > obj.max_attendees:
return '<div style="color: red;">{count} of {max}</div>'.format(**kwargs)
else:
return '{count} of {max}'.format(**kwargs)
else:
return '{count}'.format(**kwargs)
_registration_count.allow_tags = True
class LightningTalkAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(LightningTalkAdminForm, self).__init__(*args, **kwargs)
# TODO: This is a hack to populate the field...
self.fields['category'].initial = PyConProposalCategory.objects.all()[0]
self.fields['audience_level'].initial = PyConLightningTalkProposal.AUDIENCE_LEVEL_NOVICE
class Meta:
model = PyConLightningTalkProposal
exclude = ['abstract']
class LightningTalkAdmin(MarkEditAdmin):
class MarkEdit:
fields = ['additional_notes']
options = {
'preview': 'below'
}
form = LightningTalkAdminForm
list_display = [
'title',
'kind',
'status',
'submitted',
'speaker',
'cancelled',
]
class PosterAdmin(ProposalMarkEditAdmin):
list_display = [
'title',
'kind',
'status',
'submitted',
'speaker',
'category',
'audience_level',
'cancelled',
]
class OpenSpaceAdmin(ProposalMarkEditAdmin):
list_display = [
'title',
'kind',
'status',
'submitted',
'speaker',
'cancelled'
]
class SponsorTutorialAdmin(ProposalMarkEditAdmin):
list_display = [
'title',
'kind',
'status',
'submitted',
'speaker',
'cancelled',
]
admin.site.register(PyConProposalCategory)
admin.site.register(PyConTalkProposal, TalkAdmin)
admin.site.register(PyConTutorialProposal, TutorialAdmin)
admin.site.register(PyConPosterProposal, PosterAdmin)
admin.site.register(PyConOpenSpaceProposal, OpenSpaceAdmin)
admin.site.register(PyConSponsorTutorialProposal, SponsorTutorialAdmin)
admin.site.register(PyConLightningTalkProposal, LightningTalkAdmin)
from account.models import Account, EmailAddress
from django.contrib.auth.models import User
from django.contrib.auth.admin import UserAdmin
class AccountInline(admin.StackedInline):
model = Account
extra = 0
class EmailAddressInline(admin.StackedInline):
model = EmailAddress
extra = 0
class HasAccountListFilter(admin.SimpleListFilter):
title = "has associated Account"
parameter_name = "has_account"
def lookups(self, request, model_admin):
return (
(1, "Yes"),
(0, "No"),
)
def queryset(self, request, queryset):
if self.value() not in [None, ""]:
if self.value() == "1":
return queryset.exclude(account=None)
if self.value() == "0":
return queryset.filter(account=None)
return queryset
class HasEmailAddressListFilter(admin.SimpleListFilter):
title = "has associated EmailAddress"
parameter_name = "has_emailaddress"
def lookups(self, request, model_admin):
return (
(1, "Yes"),
(0, "No"),
)
def queryset(self, request, queryset):
if self.value() not in [None, ""]:
if self.value() == "1":
return queryset.exclude(emailaddress=None)
if self.value() == "0":
return queryset.filter(emailaddress=None)
return queryset
class PyConUserAdmin(UserAdmin):
inlines = list(UserAdmin.inlines) + [AccountInline, EmailAddressInline]
search_fields = list(UserAdmin.search_fields) + ['emailaddress__email']
list_filter = list(UserAdmin.list_filter) + [HasAccountListFilter, HasEmailAddressListFilter]
admin.site.unregister(User)
admin.site.register(User, PyConUserAdmin)
# HACK HACK - monkey patch User because the username field is useless
# when using django-user-accounts
def user_unicode(self):
# Use full name if any, else email
return self.get_full_name() or self.email
User.__unicode__ = user_unicode
# Also monkey patch the sort order
User._meta.ordering = ['last_name', 'first_name']
| {
"content_hash": "2ca6421818bfe4eb7c4e2cc47248edbf",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 97,
"avg_line_length": 27.304545454545455,
"alnum_prop": 0.6129515565173964,
"repo_name": "Diwahars/pycon",
"id": "299bf7af73862d995a9171e4ed47205fc45570d2",
"size": "6007",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pycon/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "96460"
},
{
"name": "HTML",
"bytes": "291993"
},
{
"name": "JavaScript",
"bytes": "741348"
},
{
"name": "Makefile",
"bytes": "457"
},
{
"name": "Python",
"bytes": "663510"
},
{
"name": "Shell",
"bytes": "1975"
}
],
"symlink_target": ""
} |
__revision__ = "src/engine/SCons/Tool/MSCommon/arch.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
__doc__ = """Module to define supported Windows chip architectures.
"""
import os
class ArchDefinition(object):
"""
A class for defining architecture-specific settings and logic.
"""
def __init__(self, arch, synonyms=[]):
self.arch = arch
self.synonyms = synonyms
SupportedArchitectureList = [
ArchitectureDefinition(
'x86',
['i386', 'i486', 'i586', 'i686'],
),
ArchitectureDefinition(
'x86_64',
['AMD64', 'amd64', 'em64t', 'EM64T', 'x86_64'],
),
ArchitectureDefinition(
'ia64',
['IA64'],
),
ArchitectureDefinition(
'arm',
['ARM'],
),
]
SupportedArchitectureMap = {}
for a in SupportedArchitectureList:
SupportedArchitectureMap[a.arch] = a
for s in a.synonyms:
SupportedArchitectureMap[s] = a
| {
"content_hash": "9647ee7373d1f2f51774393fa6f52876",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 112,
"avg_line_length": 21.90909090909091,
"alnum_prop": 0.5985477178423236,
"repo_name": "pzajda/eloquence",
"id": "f7d28daf48d073e348fc8d3fa4668d2d0146ed0e",
"size": "2077",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "scons-local-2.5.0/SCons/Tool/MSCommon/arch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1927564"
},
{
"name": "Smarty",
"bytes": "226"
}
],
"symlink_target": ""
} |
__author__ = 'serginuez'
#msbee.py
#dependences: python-pcapy
from struct import *
import datetime
import argparse
import socket
import pcapy
import sys
import re
def main(argv):
try:
parser = argparse.ArgumentParser(description='mSbee')
parser.add_argument('-i','--interface', help='interface name',required=False)
parser.add_argument('-f','--file',help='Configuration file name', required=False)
args = parser.parse_args()
dev = 'eth0'
if args.interface is not None :
dev = args.interface
conffile = '/etc/msbee.conf'
if args.file is not None :
conffile = args.file
#print ("Interface to sniff: %s" % dev )
#print ("Configuration file: %s" % conffile )
devices = pcapy.findalldevs()
if dev not in devices:
print "Unkown device " + dev
sys.exit(0)
print "Sniffing device " + dev
cap = pcapy.open_live(dev , 65536 , 1 , 0)
#start sniffing packets
while(1) :
(header, packet) = cap.next()
#print ('%s: captured %d bytes, truncated to %d bytes' %(datetime.datetime.now(), header.getlen(), header.getcaplen()))
parse_packet(packet)
except KeyboardInterrupt:
print "Shutdown requested...exiting"
except Exception:
traceback.print_exc(file=sys.stdout)
sys.exit(0)
#Convert a string of 6 characters of ethernet address into a dash separated hex string
def eth_addr (a) :
b = "%.2x:%.2x:%.2x:%.2x:%.2x:%.2x" % (ord(a[0]) , ord(a[1]) , ord(a[2]), ord(a[3]), ord(a[4]) , ord(a[5]))
return b
#function to parse a packet
def parse_packet(packet) :
#parse ethernet header
eth_length = 14
eth_header = packet[:eth_length]
eth = unpack('!6s6sH' , eth_header)
eth_protocol = socket.ntohs(eth[2])
#print 'Destination MAC : ' + eth_addr(packet[0:6]) + ' Source MAC : ' + eth_addr(packet[6:12]) + ' Protocol : ' + str(eth_protocol)
#Parse IP packets, IP Protocol number = 8
if eth_protocol == 8 :
#Parse IP header
#take first 20 characters for the ip header
ip_header = packet[eth_length:20+eth_length]
#now unpack them :)
iph = unpack('!BBHHHBBH4s4s' , ip_header)
version_ihl = iph[0]
version = version_ihl >> 4
ihl = version_ihl & 0xF
iph_length = ihl * 4
ttl = iph[5]
protocol = iph[6]
s_addr = socket.inet_ntoa(iph[8]);
d_addr = socket.inet_ntoa(iph[9]);
#print 'Version : ' + str(version) + ' IP Header Length : ' + str(ihl) + ' TTL : ' + str(ttl) + ' Protocol : ' + str(protocol) + ' Source Address : ' + str(s_addr) + ' Destination Address : ' + str(d_addr)
#UDP packets
if protocol == 17 :
u = iph_length + eth_length
udph_length = 8
udp_header = packet[u:u+8]
#now unpack them :)
udph = unpack('!HHHH' , udp_header)
source_port = udph[0]
dest_port = udph[1]
length = udph[2]
checksum = udph[3]
if dest_port == 5060 :
print 'Source Port : ' + str(source_port) + ' Dest Port : ' + str(dest_port) + ' Length : ' + str(length) + ' Checksum : ' + str(checksum)
h_size = eth_length + iph_length + udph_length
data_size = len(packet) - h_size
#get data from the packet
data = packet[h_size:]
#tractem els INVITE
if re.match(r'^INVITE', data):
print 'Data : '
print data
#tractem els REGISTER
if re.match(r'^REGISTER', data):
print 'Data : '
print data
if __name__ == "__main__":
main(sys.argv)
| {
"content_hash": "a54a1d2acdafe91cb7133658a6f50938",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 213,
"avg_line_length": 32.46666666666667,
"alnum_prop": 0.5369609856262834,
"repo_name": "serginuez/msbee",
"id": "891f3c1db22b1e00477868f0412ca44ebe937f19",
"size": "3914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "msbee.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "3914"
}
],
"symlink_target": ""
} |
from multiprocessing import Pool
import numpy as np
from gtapps_mp.utils import pyfits
import tempfile
import os
import subprocess
import sys
from gt_apps import filter,expCube
def ltcube(times):
'''This is the atomic function that actually runs in the seperate
threads. It takes a list as input where the first element is
tmin, second is tmax, third is spacecraft file, fourth is the
event file and fifth is the zmax parameter. It first uses
gtselect with wide open cuts to divide up the event file then it
runs gtltcube on that event file. The temporary event file is
deleted automatically. The function returns the name of the
created ltcube file which can be combined with other files and/or
deleted later.'''
print "Starting calculation on interval {} to {}".format(times[0],times[1])
if times[3] != '':
evfile = tempfile.NamedTemporaryFile(suffix=".fits")
filter['ra']="INDEF"
filter['dec']="INDEF"
filter['rad'] = "INDEF"
filter['evclass'] = 0
filter['evtype'] = "INDEF"
filter['infile'] = times[3]
filter['outfile'] = evfile.name
filter['ra'] = "INDEF"
filter['dec'] = "INDEF"
filter['tmin'] = times[0]
filter['tmax'] = times[1]
filter['emin'] = 0.0
filter['emax'] = 1000000.0
filter['zmin'] = 0
filter['zmax'] = 180
filter['convtype'] = -1
filter['chatter'] = 0
filter.run(print_command=True)
osfilehandle,outfilename = tempfile.mkstemp(suffix=".fits")
if times[3] != '':
expCube['evfile'] = evfile.name
else:
expCube['evfile'] = ""
expCube['scfile'] = times[2]
expCube['outfile'] = outfilename
expCube['tmin'] = times[0]
expCube['tmax'] = times[1]
expCube['dcostheta'] = 0.025
expCube['binsz'] = 1
expCube['phibins'] = 0
expCube['zmax'] = times[4]
expCube['chatter'] = 0
expCube.run(print_command=True)
print "Completed calculation on interval {} to {}".format(times[0],times[1])
return outfilename
def ltsum(filenames, Outfile, SaveTemp):
'''This function takes a list of livetime cubes and sums them up using
gtltsum. It first checks to see if there's only one temporary
file. If so, it just copies that to the output file. If not, it
creates a temporary file that lists the individual ltcube files
and operates gtltsum on them.'''
if len(filenames) <= 1:
subprocess.call(["cp", filenames[0], Outfile])
else:
fileListfile = tempfile.NamedTemporaryFile()
for filename in filenames:
fileListfile.file.write(filename + "\n")
fileListfile.flush()
subprocess.call(["gtltsum",
"infile1=@"+fileListfile.name,
"outfile="+Outfile])
if SaveTemp:
print "Did not delete the following temporary files:"
print filenames
else:
print "Deleting temporary files..."
for filename in filenames:
os.remove(filename)
def gtltcube_mp(bins, SCFile, EVFile, OutFile, SaveTemp, zmax, tmin, tmax):
'''This functions looks at a spacecraft file and splits the time into
chunks that match the bin edges in the spacecraft file. It then
submits jobs based upon those start and stop times. This is to
make the resulting files as close to the original as possible.
Note that this assumes you are using the full time period in your
spacecraft file.'''
verbose = False
if EVFile != "":
evfile = pyfits.open(EVFile, mode='readonly')
gti_data = evfile[2].data
if tmin == 0:
print "Determining start and stop times from the event file..."
tstart = evfile[0].header['TSTART']
tstop = evfile[0].header['TSTOP']
else:
print "Using user defined tmin and tmax..."
tstart = tmin
tstop = tmax
print "Opening SC file to determine break points..."
hdulist = pyfits.open(SCFile, mode='readonly')
scdata = hdulist[1].data
hdulist.close()
scstart = scdata.field('START')
scstop = scdata.field('STOP')
time_filter = (tstart <= scstart) & (scstop <= tstop)
redo = True
if EVFile !="":
print "Checking for good times in the event file..."
while redo:
redo = False
scstartssplit = np.array_split(scstart[time_filter],int(bins))
scstopssplit = np.array_split(scstop[time_filter],bins)
#Explicitly set the first and last point to the values in the evfile header
scstartssplit[0][0] = tstart
scstopssplit[-1][-1] = tstop
starts = [st[0] for st in scstartssplit]
stops = [st[-1] for st in scstopssplit]
if EVFile != "":
for interval in zip(starts,stops):
if verbose: print "Looking at interval",interval[0],"to",interval[1]
good_times = False
#grrrr. some bug in pyfits doesn't let me do this the python way...
for gti_i in range(len(gti_data)):
if(not good_times):
if verbose: print " Checking gti",gti_data[gti_i]['START'],"to",gti_data[gti_i]['STOP']
gti_starts = interval[0] <= gti_data[gti_i]['START'] <= interval[1]
gti_stops = interval[0] <= gti_data[gti_i]['STOP'] <= interval[1]
if verbose: print " Does this gti start inside this interval? ", gti_starts
if verbose: print " Does this gti stop inside this interval? ", gti_stops
good_times = gti_starts or gti_stops
if verbose: print
if verbose: print " Are there good times inside this interval? ", good_times
if not good_times:
redo = True
if verbose: print
if redo:
if bins <= 1:
print "No good time intervals found. Bailing..."
sys.exit(1)
print "One (or more) of the slices doesn't have a GTI."
print "Reducing the number of threads from ",bins,"to",bins-1
bins -= 1
scfiles = [SCFile for st in scstartssplit]
evfiles = [EVFile for st in scstartssplit]
print "EVFiles:",evfiles
zmaxes = [zmax for st in scstartssplit]
pool = Pool(processes=bins)
times = np.array([starts,stops,scfiles,evfiles,zmaxes])
print "Spawning {} jobs...".format(bins)
tempfilenames = pool.map(ltcube,times.transpose())
print "Combining temporary files..."
ltsum(tempfilenames, OutFile, SaveTemp)
def cli():
helpString = "Submits the gtltcube program as sperate threads via python and\
joins up the resulting temporary exposure cubes at the end\
resulting in a single exposure cube for the input event file.\
This greatly reduces the running time. For more details on \
gtltcube see the gtltcube help file."
import argparse
parser = argparse.ArgumentParser(description=helpString)
parser.add_argument("jobs", type=int, help="The number of jobs you wish to spawn (usually the number of cores on your machine).")
parser.add_argument("sfile", help="The spacecraft data file. See gtltcube help for more information.")
parser.add_argument("evfile", help="Input event file. See gtltcube help for more information.")
parser.add_argument("outfile", help="Output file name.")
parser.add_argument("--savetmp", default = False, help="Save the temporary files (default is False).")
parser.add_argument("--zmax", type=int, default = 180, help="zmax parameter for gtltcube (default is 180)")
parser.add_argument("--tmin", type=float, default=0, help="start time (if not given, will derive from evfile)")
parser.add_argument("--tmax", type=float, default=0, help="stop time (if not given, will derive from evfile)")
args = parser.parse_args()
gtltcube_mp(args.jobs, args.sfile, args.evfile, args.outfile, args.savetmp, args.zmax, args.tmin, args.tmax)
if __name__ == '__main__': cli()
| {
"content_hash": "7b5e9eb747415ddc9234ccda458770ba",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 133,
"avg_line_length": 39.52403846153846,
"alnum_prop": 0.6146454202651745,
"repo_name": "fermiPy/gtapps_mp",
"id": "29c5654e49d9736864daa3a4a5c0eb4c019dee24",
"size": "8244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gtapps_mp/gtltcube_mp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44337"
}
],
"symlink_target": ""
} |
import logging
from flask import current_app, Flask, redirect, url_for
def create_app(config, debug=False, testing=False, config_overrides=None):
app = Flask(__name__)
app.config.from_object(config)
app.debug = debug
app.testing = testing
if config_overrides:
app.config.update(config_overrides)
# Configure logging
if not app.testing:
logging.basicConfig(level=logging.INFO)
# Setup the data model.
with app.app_context():
model = get_model()
model.init_app(app)
# Register the Bookshelf CRUD blueprint.
from .crud import crud
app.register_blueprint(crud, url_prefix='/books')
# Add a default root route.
@app.route("/")
def index():
return redirect(url_for('crud.list'))
# Add an error handler. This is useful for debugging the live application,
# however, you should disable the output of the exception for production
# applications.
@app.errorhandler(500)
def server_error(e):
return """
An internal error occurred: <pre>{}</pre>
See logs for full stacktrace.
""".format(e), 500
return app
def get_model():
model_backend = current_app.config['DATA_BACKEND']
if model_backend == 'cloudsql':
from . import model_cloudsql
model = model_cloudsql
elif model_backend == 'datastore':
from . import model_datastore
model = model_datastore
elif model_backend == 'mongodb':
from . import model_mongodb
model = model_mongodb
else:
raise ValueError(
"No appropriate databackend configured. "
"Please specify datastore, cloudsql, or mongodb")
return model
| {
"content_hash": "5789b31da2f5d142c725e816850e81cf",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 78,
"avg_line_length": 27.317460317460316,
"alnum_prop": 0.6374201045903545,
"repo_name": "yafraorg/yafra-toroam",
"id": "419c524cb607ad2eb63cfc578edad15ed8e124e9",
"size": "2297",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "com.toroam.appengine2/bookshelf/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "367"
},
{
"name": "HTML",
"bytes": "87961"
},
{
"name": "Python",
"bytes": "67200"
}
],
"symlink_target": ""
} |
from twisted.trial import unittest
from nagcat import plugin
class PluginTest(unittest.TestCase):
def testSearch(self):
plugins = plugin.search(plugin.INagcatPlugin)
self.assert_(plugins)
| {
"content_hash": "53a21624ed5a4d1367ac1a043f9d42f5",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 53,
"avg_line_length": 26.25,
"alnum_prop": 0.7380952380952381,
"repo_name": "marineam/nagcat",
"id": "69ac65c8a6c8e1f5c03f857414e7d27f579f62d3",
"size": "793",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/nagcat/unittests/test_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "119709"
},
{
"name": "Python",
"bytes": "572702"
},
{
"name": "Shell",
"bytes": "3443"
}
],
"symlink_target": ""
} |
"""Fichier contenant la classe Commentaire, détaillée plus bas."""
from .instruction import Instruction
class Commentaire(Instruction):
"""Classe définissant un commentaire.
Un commentaire est une instruction commençant par un #,
non interprétée par Python.
"""
def __init__(self):
"""Construction d'un commentaire."""
Instruction.__init__(self)
self.commentaire = ""
def __str__(self):
return "|grf|#" + self.commentaire + "|ff|"
@classmethod
def peut_interpreter(cls, chaine):
"""La chaîne peut-elle être interprétée par la classe Commentaire ?"""
return chaine.startswith("#")
@classmethod
def construire(cls, chaine):
"""Construit l'instruction."""
commentaire = chaine[1:]
if commentaire and commentaire[0] != " ":
commentaire = " " + commentaire
ins = Commentaire()
ins.commentaire = commentaire
return ins
@property
def code_python(self):
"""Retourne le code Python associé à l'instruction."""
py_code = "#" + self.commentaire
return py_code
| {
"content_hash": "72fe4a3018f01852d233aa7f8514da66",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 78,
"avg_line_length": 27.651162790697676,
"alnum_prop": 0.5853658536585366,
"repo_name": "vlegoff/tsunami",
"id": "1643fdca055f049a1ef1ec0329b878759dc678f4",
"size": "2771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/primaires/scripting/commentaire.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7930908"
},
{
"name": "Ruby",
"bytes": "373"
}
],
"symlink_target": ""
} |
from .constants import *
from .geometry import *
from .pdbIO import *
from .graph import *
| {
"content_hash": "3ad396d29d15e20cd21d5d6689e817dd",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 24,
"avg_line_length": 18.4,
"alnum_prop": 0.7282608695652174,
"repo_name": "BIOS-IMASL/bomeba0",
"id": "7debb86746c05b9ea1d0690d63fc043ee6f9b4bc",
"size": "92",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bomeba0/utils/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "966268"
},
{
"name": "Python",
"bytes": "136301"
}
],
"symlink_target": ""
} |
from recipe_engine.engine_types import ResourceCost
PYTHON_VERSION_COMPATIBILITY = 'PY2+3'
DEPS = [
'step',
]
def RunSteps(api):
with api.step.nest('parent step'):
pass
api.step('null step', [])
api.step('zero step', ['echo', 'hi'], cost=None)
api.step('default step', ['echo', 'hi'])
api.step('max cpu step', ['echo', 'hi'],
cost=api.step.ResourceCost(cpu=api.step.MAX_CPU))
api.step('max memory step', ['echo', 'hi'],
cost=api.step.ResourceCost(memory=api.step.MAX_MEMORY))
api.step('over-max step', ['echo', 'hi'],
cost=api.step.ResourceCost(
cpu=api.step.MAX_CPU*2, memory=api.step.MAX_MEMORY*2))
def GenTests(api):
yield (
api.test('basic')
+ api.post_check(lambda check, steps: check(
steps['parent step'].cost is None
))
+ api.post_check(lambda check, steps: check(
steps['null step'].cost is None
))
+ api.post_check(lambda check, steps: check(
steps['zero step'].cost == ResourceCost.zero()
))
+ api.post_check(lambda check, steps: check(
steps['default step'].cost == ResourceCost()
))
+ api.post_check(lambda check, steps: check(
steps['max cpu step'].cost == ResourceCost(cpu=8 * 1000)
))
+ api.post_check(lambda check, steps: check(
steps['max memory step'].cost == ResourceCost(memory=16384)
))
+ api.post_check(lambda check, steps: check(
steps['over-max step'].cost == ResourceCost(cpu=8 * 1000, memory=16384)
))
)
| {
"content_hash": "0928bbb27bc524948ac0d7f0c9532a05",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 79,
"avg_line_length": 30.52,
"alnum_prop": 0.6055045871559633,
"repo_name": "luci/recipes-py",
"id": "14b2a3769814bb4778a921eada074c139102e9a3",
"size": "1700",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "recipe_modules/step/tests/step_cost.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "26"
},
{
"name": "Python",
"bytes": "900422"
},
{
"name": "Shell",
"bytes": "5746"
}
],
"symlink_target": ""
} |
"""
Class and functions for dealing with credentials in UNC connections on Windows.
"""
from win_unc.errors import InvalidUsernameError
from win_unc.cleaners import clean_username
from win_unc.validators import is_valid_username
class UncCredentials(object):
"""
Represents a set of credentials to be used with a UNC connection. Credentials include a
username and a password.
"""
def __init__(self, username=None, password=None):
"""
Returns a new `UncCredentials` object. Both `username` and `password` are optional.
If neither are provided, the new object will mean that credentials are unnecessary.
`username` must be a string representing a Windows username (logon). Windows usernames
may include a domain prefix (i.e. "domain\username"). If `username` cannot be
construed as a valid Windows username, then this will raise an
`InvalidUsernameError`.
Note: UNC connections that require authentication will use the username of the
currently logged in Windows user unless specifically provided another
username.
Note: Providing `None` and `''` (the empty string) have very different meanings.
Usernames cannot be empty.
`password` must be a string representing a password.
Note: Providing `None` and `''` (the empty string) have very different meanings.
The empty string is a meaningful, legitimate password.
If only the first positional argument is provided and it is already an instance of the
`UncCredentials` class (either directly or by inheritance), this constructor will clone
it and create a new `UncCredentials` object with the same properties.
"""
if password is None and isinstance(username, self.__class__):
new_username = username._username
new_password = username._password
else:
new_username = username
new_password = password
cleaned_username = clean_username(new_username) if new_username is not None else None
if cleaned_username is None or is_valid_username(cleaned_username):
self._username = cleaned_username
self._password = new_password
else:
raise InvalidUsernameError(new_username)
def get_username(self):
"""
Returns the username of this `UncCredentials` object or `None` if no username was provided.
"""
return self._username
def get_password(self):
"""
Returns the password of this `UncCredentials` object or `None` if no password was provided.
"""
return self._password
def is_empty(self):
"""
Returns `True` if this `UncCredentials` object does not contain any meaningful credentials.
"""
return self._username is None and self._password is None
def get_auth_string(self):
"""
Returns a standard representation of these credentials as a string. The string mimics
the HTTP Basic Authentication scheme.
"""
if self._password is not None:
return '{0}:{1}'.format(self._username or '', self._password)
elif self._username:
return self._username
else:
return ''
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self._username == other._username and self._password == other._password)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(str(self))
def __repr__(self):
return '<{cls}: "{str}">'.format(cls=self.__class__.__name__, str=self.get_auth_string())
def get_creds_from_string(string):
"""
Parses a standardized string from `UncCredentials`'s `get_auth_string` method into a new
`UncCredentials` object and returns it. Whatever errors can be raised by `UncCredentials`'s
constructor can also be raised by this function.
"""
username, password = None, None
if ':' in string:
username, password = string.split(':', 1) # Always split on the first `:` in case the
# password contains it.
else:
username = string
return UncCredentials(username or None, password) # Usernames cannot be `''`, but password can be.
| {
"content_hash": "1a0236350cf82a2a703f38fdf161742f",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 103,
"avg_line_length": 40.598214285714285,
"alnum_prop": 0.6217286122718276,
"repo_name": "CovenantEyes/py_win_unc",
"id": "1e9bc578c8a386741ee7ea599ccf5201a7a31a12",
"size": "4547",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "win_unc/unc_credentials.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "70467"
}
],
"symlink_target": ""
} |
import six
from .node import Node
from .utils import parse_date
@six.python_2_unicode_compatible
class News(Node):
"""Represents a News on CrunchBase"""
KNOWN_PROPERTIES = [
"title",
"author",
"posted_on",
"url",
"created_at",
"updated_at",
]
def _coerce_values(self):
for attr in ['posted_on']:
if getattr(self, attr, None):
setattr(self, attr, parse_date(getattr(self, attr)))
def __str__(self):
return u'{title} by {author} on {posted_on}'.format(
title=self.title,
author=self.author,
posted_on=self.posted_on,
)
def __repr__(self):
return self.__str__()
| {
"content_hash": "bd1fb8f88b8bfc08a426e0d1ab32c5ad",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 68,
"avg_line_length": 22.242424242424242,
"alnum_prop": 0.5299727520435967,
"repo_name": "alabid/pycrunchbase",
"id": "67242ca520fe227c4dc5b1285fa4919f577e6495",
"size": "734",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/pycrunchbase/resource/news.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1489"
},
{
"name": "PowerShell",
"bytes": "2986"
},
{
"name": "Python",
"bytes": "107738"
}
],
"symlink_target": ""
} |
from sms.tests.config import *
from django.test import TestCase
from unittest.mock import patch
import sms.routes.contact as routes
from contact.models import Contact
SAMPLE = 'sms.routes.contact.Contact.objects.sample'
INFORM_ALL = 'sms.routes.contact.Contact.objects.inform_all'
GET_TEMPLATER = 'sms.routes.contact.get_templater'
FORWARD_TO_ME = 'sms.routes.contact.forward_message_to_me'
class InformTestCase(TestCase):
def setUp(self):
self.c = Contact.objects.create(phone_number="666",
informed=False)
@patch(INFORM_ALL)
def testInformCallsInformAll(self, mock):
routes.inform()
mock.assert_called_with()
def testInformReturnsDict(self):
self.assertIsInstance(routes.inform(), dict)
def testInformKeyIsNumber(self):
self.assertIn(self.c.phone_number, routes.inform().keys())
@patch(GET_TEMPLATER)
def testInformGetsInformMessage(self, mock):
routes.inform()
mock.assert_called_with('inform')
class PanicTestCase(TestCase):
@patch(SAMPLE)
def testPanicCallsSampleWithDefaultCount(self, mock):
routes.panic()
mock.assert_called_with(routes.DEFAULT_MESSAGE_COUNT)
@patch(SAMPLE)
def testPanicCallsSampleWithCount(self, mock):
routes.panic(10)
mock.assert_called_with(10)
@patch(GET_TEMPLATER)
def testPanicGetsPanicMessage(self, mock):
routes.panic()
mock.assert_called_with('panic')
@patch(FORWARD_TO_ME)
def testPanicBypassesMessageSaving(self, mock):
routes.config('tag', 'abc')
routes.panic()
routes.process_outside_message('1234', 'this is tagged')
mock.assert_called_with('1234', 'this is tagged')
class SayTestCase(TestCase):
@patch(GET_TEMPLATER)
@patch(SAMPLE)
def testSayHasDefaultParams(self, mock_sample, mock_template):
routes.say()
mock_sample.assert_called_with(routes.DEFAULT_MESSAGE_COUNT)
mock_template.assert_called_with('talk')
@patch(GET_TEMPLATER)
@patch(SAMPLE)
def testSayHasCallableParams(self, mock_sample, mock_template):
routes.say('YESOCH', 10)
mock_sample.assert_called_with(10)
mock_template.assert_called_with('yesoch')
class OutsideMessageTestCase(TestCase):
@patch(FORWARD_TO_ME)
def testCallsForwardIfNoTagCache(self, mock):
routes.process_outside_message("123", "Hail YESOCH")
mock.assert_called_with("123", "Hail YESOCH")
@patch('sms.models.save_tagged_message')
def testCallsStoreIfTagCache(self, mock):
routes.config('tag', 'elephant')
routes.process_outside_message("123", "Hail YESOCH")
mock.assert_called_with("elephant", '123', "Hail YESOCH")
class ForwardTestCase(TestCase):
def setUp(self):
self.c = Contact.objects.create(phone_number="123",
first_name="A",
last_name="B",
informed=False)
def testGetsNameIfInDatabase(self):
response = routes.forward_message_to_me("123", "My name's YESOCH")
self.assertIn("A", response)
self.assertIn("B", response)
def testGetsNumberIfNotInDatabase(self):
response = routes.forward_message_to_me("1234", "My name's YESOCH")
self.assertIn("1234", response)
def testGetsMessage(self):
response = routes.forward_message_to_me("123", "My name's YESOCH")
self.assertIn("My name's YESOCH", response)
class ListenTestCase(TestCase):
@patch(FORWARD_TO_ME)
def testGetsMessage(self, mock):
routes.config('tag', 'abc')
routes.process_outside_message('1234', 'this is tagged')
output = routes.listen('abc')
mock.assert_called_with('1234', 'this is tagged')
| {
"content_hash": "96855297efc3017b4def11d8ff21f1cb",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 75,
"avg_line_length": 31.94214876033058,
"alnum_prop": 0.6507115135834411,
"repo_name": "hwayne/safehouse",
"id": "2d4ca28cacfea44a6de90fcefae71825346079e0",
"size": "3865",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sms/tests/routes/test_contact.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "60555"
}
],
"symlink_target": ""
} |
__author__ = 'alisonbnt'
import os
import urllib
import ConfigParser
def setup():
print('-- GCM REPOSITORY SETUP --')
print('Checking setup')
already_installed_hook = False
git_hook_path = '.git/hooks/commit-msg'
cfg_file_path = '.git/hooks/gcm.cfg'
if os.path.isfile(git_hook_path):
already_installed_hook = True
already_installed_config = False
if os.path.isfile(cfg_file_path):
already_installed_config = True
if already_installed_hook and already_installed_config:
print('Hook and Config files already exists.')
rerun = yes_no_dialog('Would you like to replace then?')
if not rerun:
print('Quitting...')
exit()
else:
print('Removing old files... '),
os.remove(git_hook_path)
os.remove(cfg_file_path)
already_installed_config = False
already_installed_hook = False
print('DONE')
print('Running script... ')
print('Current directory')
print(os.getcwd())
print('Make sure this script is running at the repository root')
running_root_dir = yes_no_dialog('Running in root dir?')
if running_root_dir:
print('Running setup')
if not already_installed_hook:
print('Downloading hook script... '),
testfile = urllib.URLopener()
testfile.retrieve(
"https://raw.githubusercontent.com/alisonbnt/gcm-commit-msg-hook/master/commit-msg",
".git/hooks/commit-msg"
)
print('DONE')
print('Hook retrieved successfully')
else:
print('Git hook already downloaded.. skipping')
if not already_installed_config:
correct_config = False
config = ConfigParser.RawConfigParser()
config.add_section('GCM')
owner = None
repository = None
print('Creating config file')
while not correct_config:
print('Repository access details')
owner = raw_input('Enter the repository owner: ')
repository = raw_input('Enter the repository name: ')
print('')
print('Please verify the given data')
print('Repository owner: ' + owner)
print('Repository name: ' + repository)
correct_config = yes_no_dialog('Is this correct?')
config.set('GCM', 'repo', repository)
config.set('GCM', 'owner', owner)
with open(cfg_file_path, 'wb') as configfile:
print('Writing data to file... '),
config.write(configfile)
print('DONE')
else:
print('Config file already set... Skipping')
print('')
print('Setup complete')
print('Remember to give execution rights for downloaded hook (Use the command below)')
print('chmod +x .git/hooks/commit-msg')
else:
print('Quitting...')
exit()
def yes_no_dialog(prompt):
answer = raw_input(prompt + ' (Y/n) ')
if answer.lower() == "n":
return False
elif answer is True and answer.lower() != "y":
print('Invalid option - quitting')
exit()
return True
if __name__ == '__main__':
setup()
| {
"content_hash": "b99fca0274eefce7a31be549b7daa4da",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 100,
"avg_line_length": 31.30841121495327,
"alnum_prop": 0.5585074626865671,
"repo_name": "alisonbnt/gcm-commit-msg-hook",
"id": "a0822f10c62c4079ab957478a740fa91366372db",
"size": "3370",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gcm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5685"
}
],
"symlink_target": ""
} |
import os, glob, datetime,sys,getopt
from GeoData.GeoData import GeoData
import matplotlib.pyplot as plt
import numpy as np
from GeoData.utilityfuncs import readIonofiles, readAllskyFITS,readSRI_h5
from PlottingClass import str2posix
from matplotlib.dates import YearLocator, MonthLocator, DateFormatter,MinuteLocator, HourLocator,MinuteLocator
def PlotTECdiff(gpsloc,timelist,satnum,sublist,pname='mahdiff.png',tectype = 'TEC'):
""" This will plot the differences between two gps sites. The code filters
first by satilite number. The TEC is interpolated over the time period
and then subtracted.
Inputs
gpsloc - The directory holding the ionofiles.
timelist - A list of strings holding the time the data will be interpolated over
[date1,time1,date2,time1]
satnum - The satilite number as an int or float.
sublist - This is the name of the recievers that will have their values compared
pname - Name of the plot
tectype - Either vTEC or TEC, determines which type will be compared."""
# Figure out location or receivers
flist1 = glob.glob(os.path.join(gpsloc,'*.iono'))
fnames = np.array([os.path.splitext(os.path.split(i)[1])[0].lower().split('-')[0] for i in flist1])
[f0,f1] = [np.argwhere(i.lower()==fnames)[0][0] for i in sublist]
# read in the receivers and filter out by sat number
mah0str = flist1[f0]
mah1str = flist1[f1]
mah0 = GeoData(readIonofiles,(mah0str,))
mah1 = GeoData(readIonofiles,(mah1str,))
sat230 = mah0.data['satnum']==satnum
sat231 = mah1.data['satnum']==satnum
timemah0 = mah0.times[sat230]
timemah1 = mah1.times[sat231]
TEC0 = mah0.data[tectype][sat230]
TEC1 = mah1.data[tectype][sat231]
# Interpolation
xends = str2posix(timelist)
xint = np.linspace(xends[0],xends[1],180)
mah0int = np.interp(xint,timemah0[:,0],TEC0)
mah1int = np.interp(xint,timemah1[:,0],TEC1)
mahdif = mah0int-mah1int
#plotting
dts = map(datetime.datetime.utcfromtimestamp, xint)
fig, axmat = plt.subplots(1,1,dpi=300)
lines = axmat.plot(dts,mahdif)
axmat.set_title(sublist[0]+' - '+sublist[1] +' '+ timelist[0])
dtfmt = DateFormatter('%H:%M:%S')
axmat.xaxis.set_major_locator(HourLocator())
axmat.xaxis.set_major_formatter(dtfmt)
axmat.xaxis.set_minor_locator(MinuteLocator(interval=15))
axmat.set_xlabel('Time UT')
axmat.set_ylabel(tectype)
plt.savefig(pname)
def plottecvstime(TECGD,satnum,fig,ax):
""" This will plot a single set of TEC data.
Inputs
TECGD - A GeoData instance that has been filtered by time to the desired period.
satnum - The number of the satilite that will be plotted.
fig - The figure handle.
ax - the axis handle.
outputs
lines - The handle for the line plot
"""
keep = TECGD.data['satnum']==satnum
times = TECGD.times[:,0][keep]
vtec = TECGD.data['vTEC'][keep]
dts = map(datetime.datetime.utcfromtimestamp, times)
dtfmt = DateFormatter('%H:%M:%S')
lines = ax.plot(dts,vtec)
ax.xaxis.set_major_locator(MinuteLocator(byminute = [0,15,30,45]))#interval=15))
ax.xaxis.set_major_formatter(dtfmt)
ax.set_ylabel('TEC')
ax.set_ylim([-10.,30.])
ax.set_title('Data From Sat {0:d}'.format(satnum))
return lines
def plotalltecvstime(TEClist1,flist1,satnum,pfname='TECMaps'):
""" This will plot a single set of TEC data.
Inputs
TEClist1 - A list of GeoData objects that have been filtered by time to the desired period.
flist1 - The name of the files that the goedata objects are from
satnum - The number of the satilite that will be plotted.
pfname - The name of the plot."""
flist=[]
TEClist = []
for i,j in enumerate(TEClist1):
if np.any(j.data['satnum']==satnum):
TEClist.append(j)
flist.append(flist1[i])
col = 2.
numr = np.ceil(len(flist)/col)
dxs = 4.0
dys = 2.
fig, axmat = plt.subplots(int(numr),int(col),dpi=300,sharex=True,sharey=True,figsize=(dxs*col,dys*(numr+1)))
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
axvec= axmat.flatten()
dnames = [os.path.splitext(os.path.split(i)[-1])[0] for i in flist]
for i,iGD in enumerate(TEClist):
lines = plottecvstime(iGD,satnum,fig,axvec[i])
axvec[i].set_title(dnames[i])
plt.suptitle('Data from Sat: {0:d}'.format(satnum))
plt.subplots_adjust(top=0.95)
plt.savefig(pfname)
if __name__== '__main__':
argv = sys.argv[1:]
outstr = '''
Usage: mahlidiff.py -i <ionofile dir>, -s <sat number>, -b <begining time>, -e <endtime>, -p <plotname> -m <first receiver> -n <second receiver>
or
python plotdata.py -h
This script will run comparethe TEC in two mahali recievers
Optional arguments
-i The directory that holds all of the TEC data in ionofile formats.
-s Satlilite number
-b begining time.
-e endtime
-p plotdir
-m first receiver
-n seocnd receiver
'''
try:
opts, args = getopt.gnu_getopt(argv,"hs:i:b:e:p:m:n:")
except getopt.GetoptError:
print(outstr)
sys.exit(2)
gpsloc=''
ASloc=''
ISRloc=''
inifile=None
sublist = ['','']
for opt, arg in opts:
if opt == '-h':
print(outstr)
sys.exit()
elif opt in ("-i", "--ifile"):
gpsloc = os.path.expanduser(arg)
elif opt in ("-s", "--sat"):
satnum=int(float(arg))
elif opt in ("-b", "--btime"):
btime=os.path.expanduser(arg)
elif opt in ("-e", "--etime"):
endtime = os.path.expanduser(arg)
elif opt in ('-p','--pdir'):
plotdir=os.path.expanduser(arg)
elif opt in ('-m','--minu'):
sublist[0] = arg
elif opt in ('-n','--ninu'):
sublist[1] = arg
timelist = btime.split(' ') +endtime.split(' ')
PlotTECdiff(gpsloc,timelist,satnum,sublist) | {
"content_hash": "9842b81b231542d1e1950acc5c2ae883",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 158,
"avg_line_length": 34.743169398907106,
"alnum_prop": 0.6005033029254483,
"repo_name": "jswoboda/MahaliPlotting",
"id": "a6da6392046d9e43f47e1502387947e37e4b4321",
"size": "6380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mahalidiff.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "87400"
},
{
"name": "Shell",
"bytes": "1033"
}
],
"symlink_target": ""
} |
import os
import bdb
import types
from Tkinter import *
from WindowList import ListedToplevel
from ScrolledList import ScrolledList
import macosxSupport
class Idb(bdb.Bdb):
def __init__(self, gui):
self.gui = gui
bdb.Bdb.__init__(self)
def user_line(self, frame):
if self.in_rpc_code(frame):
self.set_step()
return
message = self.__frame2message(frame)
self.gui.interaction(message, frame)
def user_exception(self, frame, info):
if self.in_rpc_code(frame):
self.set_step()
return
message = self.__frame2message(frame)
self.gui.interaction(message, frame, info)
def in_rpc_code(self, frame):
if frame.f_code.co_filename.count('rpc.py'):
return True
else:
prev_frame = frame.f_back
if prev_frame.f_code.co_filename.count('Debugger.py'):
# (that test will catch both Debugger.py and RemoteDebugger.py)
return False
return self.in_rpc_code(prev_frame)
def __frame2message(self, frame):
code = frame.f_code
filename = code.co_filename
lineno = frame.f_lineno
basename = os.path.basename(filename)
message = "%s:%s" % (basename, lineno)
if code.co_name != "?":
message = "%s: %s()" % (message, code.co_name)
return message
class Debugger:
vstack = vsource = vlocals = vglobals = None
def __init__(self, pyshell, idb=None):
if idb is None:
idb = Idb(self)
self.pyshell = pyshell
self.idb = idb
self.frame = None
self.make_gui()
self.interacting = 0
def run(self, *args):
try:
self.interacting = 1
return self.idb.run(*args)
finally:
self.interacting = 0
def close(self, event=None):
if self.interacting:
self.top.bell()
return
if self.stackviewer:
self.stackviewer.close(); self.stackviewer = None
# Clean up pyshell if user clicked debugger control close widget.
# (Causes a harmless extra cycle through close_debugger() if user
# toggled debugger from pyshell Debug menu)
self.pyshell.close_debugger()
# Now close the debugger control window....
self.top.destroy()
def make_gui(self):
pyshell = self.pyshell
self.flist = pyshell.flist
self.root = root = pyshell.root
self.top = top = ListedToplevel(root)
self.top.wm_title("Debug Control")
self.top.wm_iconname("Debug")
top.wm_protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<Escape>", self.close)
#
self.bframe = bframe = Frame(top)
self.bframe.pack(anchor="w")
self.buttons = bl = []
#
self.bcont = b = Button(bframe, text="Go", command=self.cont)
bl.append(b)
self.bstep = b = Button(bframe, text="Step", command=self.step)
bl.append(b)
self.bnext = b = Button(bframe, text="Over", command=self.next)
bl.append(b)
self.bret = b = Button(bframe, text="Out", command=self.ret)
bl.append(b)
self.bret = b = Button(bframe, text="Quit", command=self.quit)
bl.append(b)
#
for b in bl:
b.configure(state="disabled")
b.pack(side="left")
#
self.cframe = cframe = Frame(bframe)
self.cframe.pack(side="left")
#
if not self.vstack:
self.__class__.vstack = BooleanVar(top)
self.vstack.set(1)
self.bstack = Checkbutton(cframe,
text="Stack", command=self.show_stack, variable=self.vstack)
self.bstack.grid(row=0, column=0)
if not self.vsource:
self.__class__.vsource = BooleanVar(top)
self.bsource = Checkbutton(cframe,
text="Source", command=self.show_source, variable=self.vsource)
self.bsource.grid(row=0, column=1)
if not self.vlocals:
self.__class__.vlocals = BooleanVar(top)
self.vlocals.set(1)
self.blocals = Checkbutton(cframe,
text="Locals", command=self.show_locals, variable=self.vlocals)
self.blocals.grid(row=1, column=0)
if not self.vglobals:
self.__class__.vglobals = BooleanVar(top)
self.bglobals = Checkbutton(cframe,
text="Globals", command=self.show_globals, variable=self.vglobals)
self.bglobals.grid(row=1, column=1)
#
self.status = Label(top, anchor="w")
self.status.pack(anchor="w")
self.error = Label(top, anchor="w")
self.error.pack(anchor="w", fill="x")
self.errorbg = self.error.cget("background")
#
self.fstack = Frame(top, height=1)
self.fstack.pack(expand=1, fill="both")
self.flocals = Frame(top)
self.flocals.pack(expand=1, fill="both")
self.fglobals = Frame(top, height=1)
self.fglobals.pack(expand=1, fill="both")
#
if self.vstack.get():
self.show_stack()
if self.vlocals.get():
self.show_locals()
if self.vglobals.get():
self.show_globals()
def interaction(self, message, frame, info=None):
self.frame = frame
self.status.configure(text=message)
#
if info:
type, value, tb = info
try:
m1 = type.__name__
except AttributeError:
m1 = "%s" % str(type)
if value is not None:
try:
m1 = "%s: %s" % (m1, str(value))
except:
pass
bg = "yellow"
else:
m1 = ""
tb = None
bg = self.errorbg
self.error.configure(text=m1, background=bg)
#
sv = self.stackviewer
if sv:
stack, i = self.idb.get_stack(self.frame, tb)
sv.load_stack(stack, i)
#
self.show_variables(1)
#
if self.vsource.get():
self.sync_source_line()
#
for b in self.buttons:
b.configure(state="normal")
#
self.top.wakeup()
self.root.mainloop()
#
for b in self.buttons:
b.configure(state="disabled")
self.status.configure(text="")
self.error.configure(text="", background=self.errorbg)
self.frame = None
def sync_source_line(self):
frame = self.frame
if not frame:
return
filename, lineno = self.__frame2fileline(frame)
if filename[:1] + filename[-1:] != "<>" and os.path.exists(filename):
self.flist.gotofileline(filename, lineno)
def __frame2fileline(self, frame):
code = frame.f_code
filename = code.co_filename
lineno = frame.f_lineno
return filename, lineno
def cont(self):
self.idb.set_continue()
self.root.quit()
def step(self):
self.idb.set_step()
self.root.quit()
def next(self):
self.idb.set_next(self.frame)
self.root.quit()
def ret(self):
self.idb.set_return(self.frame)
self.root.quit()
def quit(self):
self.idb.set_quit()
self.root.quit()
stackviewer = None
def show_stack(self):
if not self.stackviewer and self.vstack.get():
self.stackviewer = sv = StackViewer(self.fstack, self.flist, self)
if self.frame:
stack, i = self.idb.get_stack(self.frame, None)
sv.load_stack(stack, i)
else:
sv = self.stackviewer
if sv and not self.vstack.get():
self.stackviewer = None
sv.close()
self.fstack['height'] = 1
def show_source(self):
if self.vsource.get():
self.sync_source_line()
def show_frame(self, (frame, lineno)):
self.frame = frame
self.show_variables()
localsviewer = None
globalsviewer = None
def show_locals(self):
lv = self.localsviewer
if self.vlocals.get():
if not lv:
self.localsviewer = NamespaceViewer(self.flocals, "Locals")
else:
if lv:
self.localsviewer = None
lv.close()
self.flocals['height'] = 1
self.show_variables()
def show_globals(self):
gv = self.globalsviewer
if self.vglobals.get():
if not gv:
self.globalsviewer = NamespaceViewer(self.fglobals, "Globals")
else:
if gv:
self.globalsviewer = None
gv.close()
self.fglobals['height'] = 1
self.show_variables()
def show_variables(self, force=0):
lv = self.localsviewer
gv = self.globalsviewer
frame = self.frame
if not frame:
ldict = gdict = None
else:
ldict = frame.f_locals
gdict = frame.f_globals
if lv and gv and ldict is gdict:
ldict = None
if lv:
lv.load_dict(ldict, force, self.pyshell.interp.rpcclt)
if gv:
gv.load_dict(gdict, force, self.pyshell.interp.rpcclt)
def set_breakpoint_here(self, filename, lineno):
self.idb.set_break(filename, lineno)
def clear_breakpoint_here(self, filename, lineno):
self.idb.clear_break(filename, lineno)
def clear_file_breaks(self, filename):
self.idb.clear_all_file_breaks(filename)
def load_breakpoints(self):
"Load PyShellEditorWindow breakpoints into subprocess debugger"
pyshell_edit_windows = self.pyshell.flist.inversedict.keys()
for editwin in pyshell_edit_windows:
filename = editwin.io.filename
try:
for lineno in editwin.breakpoints:
self.set_breakpoint_here(filename, lineno)
except AttributeError:
continue
class StackViewer(ScrolledList):
def __init__(self, master, flist, gui):
if macosxSupport.runningAsOSXApp():
# At least on with the stock AquaTk version on OSX 10.4 you'll
# get an shaking GUI that eventually kills IDLE if the width
# argument is specified.
ScrolledList.__init__(self, master)
else:
ScrolledList.__init__(self, master, width=80)
self.flist = flist
self.gui = gui
self.stack = []
def load_stack(self, stack, index=None):
self.stack = stack
self.clear()
for i in range(len(stack)):
frame, lineno = stack[i]
try:
modname = frame.f_globals["__name__"]
except:
modname = "?"
code = frame.f_code
filename = code.co_filename
funcname = code.co_name
import linecache
sourceline = linecache.getline(filename, lineno)
import string
sourceline = string.strip(sourceline)
if funcname in ("?", "", None):
item = "%s, line %d: %s" % (modname, lineno, sourceline)
else:
item = "%s.%s(), line %d: %s" % (modname, funcname,
lineno, sourceline)
if i == index:
item = "> " + item
self.append(item)
if index is not None:
self.select(index)
def popup_event(self, event):
"override base method"
if self.stack:
return ScrolledList.popup_event(self, event)
def fill_menu(self):
"override base method"
menu = self.menu
menu.add_command(label="Go to source line",
command=self.goto_source_line)
menu.add_command(label="Show stack frame",
command=self.show_stack_frame)
def on_select(self, index):
"override base method"
if 0 <= index < len(self.stack):
self.gui.show_frame(self.stack[index])
def on_double(self, index):
"override base method"
self.show_source(index)
def goto_source_line(self):
index = self.listbox.index("active")
self.show_source(index)
def show_stack_frame(self):
index = self.listbox.index("active")
if 0 <= index < len(self.stack):
self.gui.show_frame(self.stack[index])
def show_source(self, index):
if not (0 <= index < len(self.stack)):
return
frame, lineno = self.stack[index]
code = frame.f_code
filename = code.co_filename
if os.path.isfile(filename):
edit = self.flist.open(filename)
if edit:
edit.gotoline(lineno)
class NamespaceViewer:
def __init__(self, master, title, dict=None):
width = 0
height = 40
if dict:
height = 20*len(dict) # XXX 20 == observed height of Entry widget
self.master = master
self.title = title
import repr
self.repr = repr.Repr()
self.repr.maxstring = 60
self.repr.maxother = 60
self.frame = frame = Frame(master)
self.frame.pack(expand=1, fill="both")
self.label = Label(frame, text=title, borderwidth=2, relief="groove")
self.label.pack(fill="x")
self.vbar = vbar = Scrollbar(frame, name="vbar")
vbar.pack(side="right", fill="y")
self.canvas = canvas = Canvas(frame,
height=min(300, max(40, height)),
scrollregion=(0, 0, width, height))
canvas.pack(side="left", fill="both", expand=1)
vbar["command"] = canvas.yview
canvas["yscrollcommand"] = vbar.set
self.subframe = subframe = Frame(canvas)
self.sfid = canvas.create_window(0, 0, window=subframe, anchor="nw")
self.load_dict(dict)
dict = -1
def load_dict(self, dict, force=0, rpc_client=None):
if dict is self.dict and not force:
return
subframe = self.subframe
frame = self.frame
for c in subframe.children.values():
c.destroy()
self.dict = None
if not dict:
l = Label(subframe, text="None")
l.grid(row=0, column=0)
else:
names = dict.keys()
names.sort()
row = 0
for name in names:
value = dict[name]
svalue = self.repr.repr(value) # repr(value)
# Strip extra quotes caused by calling repr on the (already)
# repr'd value sent across the RPC interface:
if rpc_client:
svalue = svalue[1:-1]
l = Label(subframe, text=name)
l.grid(row=row, column=0, sticky="nw")
l = Entry(subframe, width=0, borderwidth=0)
l.insert(0, svalue)
l.grid(row=row, column=1, sticky="nw")
row = row+1
self.dict = dict
# XXX Could we use a <Configure> callback for the following?
subframe.update_idletasks() # Alas!
width = subframe.winfo_reqwidth()
height = subframe.winfo_reqheight()
canvas = self.canvas
self.canvas["scrollregion"] = (0, 0, width, height)
if height > 300:
canvas["height"] = 300
frame.pack(expand=1)
else:
canvas["height"] = height
frame.pack(expand=0)
def close(self):
self.frame.destroy()
| {
"content_hash": "b11df6b7f70868411302a3362c3be53c",
"timestamp": "",
"source": "github",
"line_count": 481,
"max_line_length": 79,
"avg_line_length": 33.82536382536382,
"alnum_prop": 0.5232944068838353,
"repo_name": "ericlink/adms-server",
"id": "c4af5b016764a93b3e53ad15d1989ce1699d4820",
"size": "16270",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "playframework-dist/1.1-src/python/Lib/idlelib/Debugger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "408"
},
{
"name": "C",
"bytes": "152256"
},
{
"name": "CSS",
"bytes": "97486"
},
{
"name": "HTML",
"bytes": "553901"
},
{
"name": "Java",
"bytes": "3086962"
},
{
"name": "JavaScript",
"bytes": "736134"
},
{
"name": "Python",
"bytes": "15750302"
},
{
"name": "SQLPL",
"bytes": "10111"
},
{
"name": "Scala",
"bytes": "1432"
},
{
"name": "Shell",
"bytes": "1369"
}
],
"symlink_target": ""
} |
from .client import APIClient
from interage.api import managers
class InterageAPI(object):
def __init__(self, **args):
super(InterageAPI, self).__init__()
self.__client = APIClient(**args)
self.__init_managers()
def __init_managers(self):
self.__managers = {}
self.__managers['interacoes'] = managers.InteracaoAPIManager(client = self.__client)
self.__managers['medicamentos'] = managers.MedicamentoAPIManager(client = self.__client)
self.__managers['principios_ativos'] = managers.PrincipioAtivoAPIManager(client = self.__client)
@property
def interacoes(self):
return self.__managers['interacoes']
@property
def medicamentos(self):
return self.__managers['medicamentos']
@property
def principios_ativos(self):
return self.__managers['principios_ativos']
| {
"content_hash": "6b70c2aaf565caba6a00531a8359d2b9",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 104,
"avg_line_length": 33.61538461538461,
"alnum_prop": 0.6521739130434783,
"repo_name": "IntMed/interage_python_sdk",
"id": "e045d297057d5fcefa1fa0717b48802004983637",
"size": "874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "interage/api/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21616"
}
],
"symlink_target": ""
} |
try:
import unittest2 as unittest
except ImportError:
import unittest
import requests
import responses
from pkg_resources import resource_string
import json
import phabricator
phabricator.ARCRC = {} # overwrite any arcrc that might be read
RESPONSES = json.loads(
resource_string(
'phabricator.tests.resources',
'responses.json'
).decode('utf8')
)
CERTIFICATE = resource_string(
'phabricator.tests.resources',
'certificate.txt'
).decode('utf8').strip()
# Protect against local user's .arcrc interference.
phabricator.ARCRC = {}
class PhabricatorTest(unittest.TestCase):
def setUp(self):
self.api = phabricator.Phabricator(
username='test',
certificate='test',
host='http://localhost/api/'
)
self.api.certificate = CERTIFICATE
def test_generate_hash(self):
token = '12345678'
hashed = self.api.generate_hash(token)
self.assertEqual(hashed, 'f8d3bea4e58a2b2967d93d5b307bfa7c693b2e7f')
@responses.activate
def test_connect(self):
responses.add('POST', 'http://localhost/api/conduit.connect',
body=RESPONSES['conduit.connect'], status=200)
api = phabricator.Phabricator(
username='test',
certificate='test',
host='http://localhost/api/'
)
api.connect()
keys = api._conduit.keys()
self.assertIn('sessionKey', keys)
self.assertIn('connectionID', keys)
assert len(responses.calls) == 1
@responses.activate
def test_user_whoami(self):
responses.add('POST', 'http://localhost/api/user.whoami',
body=RESPONSES['user.whoami'], status=200)
api = phabricator.Phabricator(
username='test',
certificate='test',
host='http://localhost/api/'
)
api._conduit = True
self.assertEqual(api.user.whoami()['userName'], 'testaccount')
def test_classic_resources(self):
api = phabricator.Phabricator(
username='test',
certificate='test',
host='http://localhost/api/'
)
self.assertEqual(api.user.whoami.method, 'user')
self.assertEqual(api.user.whoami.endpoint, 'whoami')
def test_nested_resources(self):
api = phabricator.Phabricator(
username='test',
certificate='test',
host='http://localhost/api/'
)
self.assertEqual(api.diffusion.repository.edit.method, 'diffusion')
self.assertEqual(
api.diffusion.repository.edit.endpoint, 'repository.edit')
@responses.activate
def test_bad_status(self):
responses.add(
'POST', 'http://localhost/api/conduit.connect', status=400)
api = phabricator.Phabricator(
username='test',
certificate='test',
host='http://localhost/api/'
)
with self.assertRaises(requests.exceptions.HTTPError):
api.user.whoami()
assert len(responses.calls) == 1
@responses.activate
def test_maniphest_find(self):
responses.add('POST', 'http://localhost/api/maniphest.find',
body=RESPONSES['maniphest.find'], status=200)
api = phabricator.Phabricator(
username='test',
certificate='test',
host='http://localhost/api/'
)
api._conduit = True
result = api.maniphest.find(
ownerphids=['PHID-USER-5022a9389121884ab9db']
)
self.assertEqual(len(result), 1)
# Test iteration
self.assertIsInstance([x for x in result], list)
# Test getattr
self.assertEqual(
result['PHID-TASK-4cgpskv6zzys6rp5rvrc']['status'],
'3'
)
def test_validation(self):
self.api._conduit = True
self.assertRaises(ValueError, self.api.differential.find)
with self.assertRaises(ValueError):
self.api.differential.find(query=1)
with self.assertRaises(ValueError):
self.api.differential.find(query='1')
with self.assertRaises(ValueError):
self.api.differential.find(query='1', guids='1')
def test_map_param_type(self):
uint = 'uint'
self.assertEqual(phabricator.map_param_type(uint), int)
list_bool = 'list<bool>'
self.assertEqual(phabricator.map_param_type(list_bool), [bool])
list_pair = 'list<pair<callsign, path>>'
self.assertEqual(phabricator.map_param_type(list_pair), [tuple])
complex_list_pair = 'list<pair<string-constant<"gtcm">, string>>'
self.assertEqual(phabricator.map_param_type(
complex_list_pair), [tuple])
def test_endpoint_shadowing(self):
shadowed_endpoints = [e for e in self.api._interface.keys() if e in self.api.__dict__]
self.assertEqual(
shadowed_endpoints,
[],
"The following endpoints are shadowed: {}".format(shadowed_endpoints)
)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "e4357c99d2dd984d50e9e23e48f849b3",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 94,
"avg_line_length": 29.56896551724138,
"alnum_prop": 0.599028182701652,
"repo_name": "disqus/python-phabricator",
"id": "e12b2859b8f66030bd7ee7f96e782f73b838d7df",
"size": "5145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phabricator/tests/test_phabricator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18861"
},
{
"name": "Shell",
"bytes": "82"
}
],
"symlink_target": ""
} |
"""
Services module forms
"""
from django import forms
from django.db.models import Q
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.utils.html import strip_tags
from maker.core.conf import settings
from maker.identities.models import Contact
from maker.core.decorators import preprocess_form
from maker.core.mail import SystemEmail
from maker.core.models import Object, ModuleSetting
from maker.core.rendering import get_template_source
from maker.messaging.models import Message
from maker.messaging.emails import EmailMessage
from maker.services.models import Ticket, TicketRecord, ServiceAgent, TicketStatus, Service
from maker.services.models import ServiceLevelAgreement, TicketQueue
preprocess_form()
class SettingsForm(forms.Form):
""" Administration settings form """
default_ticket_status = forms.ModelChoiceField(label='Default Ticket Status', queryset=[])
default_ticket_queue = forms.ModelChoiceField(label='Default Queue', queryset=[])
send_email_to_caller = forms.ChoiceField(label="Notify Caller By E-mail", choices=((True, _('Yes')),
(False, _('No'))), required=False)
send_email_template = forms.CharField(label="E-mail Template", widget=forms.Textarea, required=False)
def __init__(self, user, *args, **kwargs):
"Sets choices and initial value"
super(SettingsForm, self).__init__(*args, **kwargs)
# Translate
self.fields['default_ticket_status'].label = _('Default Ticket Status')
self.fields['default_ticket_queue'].label = _('Default Queue')
self.fields['send_email_to_caller'].label = _("Notify Caller By E-mail")
self.fields['send_email_template'].label = _("E-mail Template")
self.fields['default_ticket_status'].queryset = Object.filter_permitted(user, TicketStatus.objects, mode='x')
self.fields['default_ticket_queue'].queryset = Object.filter_permitted(user, TicketQueue.objects, mode='x')
try:
conf = ModuleSetting.get_for_module('maker.services', 'default_ticket_status')[0]
default_ticket_status = TicketStatus.objects.get(pk=long(conf.value))
self.fields['default_ticket_status'].initial = default_ticket_status.id
except Exception:
pass
try:
conf = ModuleSetting.get_for_module('maker.services', 'default_ticket_queue')[0]
default_ticket_queue = TicketQueue.objects.get(pk=long(conf.value))
self.fields['default_ticket_queue'].initial = default_ticket_queue.id
except Exception:
pass
try:
conf = ModuleSetting.get_for_module('maker.services', 'send_email_to_caller')[0]
self.fields['send_email_to_caller'].initial = conf.value
except:
self.fields['send_email_to_caller'].initial = settings.MAKER_SEND_EMAIL_TO_CALLER
# notification template
try:
conf = ModuleSetting.get_for_module('maker.services', 'send_email_template')[0]
self.fields['send_email_template'].initial = conf.value
except Exception:
self.fields['send_email_template'].initial = get_template_source('services/emails/notify_caller.html')
def save(self):
"Form processor"
try:
ModuleSetting.set_for_module('default_ticket_status',
self.cleaned_data['default_ticket_status'].id,
'maker.services')
ModuleSetting.set_for_module('default_ticket_queue',
self.cleaned_data['default_ticket_queue'].id,
'maker.services')
ModuleSetting.set_for_module('send_email_to_caller',
self.cleaned_data['send_email_to_caller'],
'maker.services')
ModuleSetting.set_for_module('send_email_template',
self.cleaned_data['send_email_template'],
'maker.services')
return True
except Exception:
return False
class MassActionForm(forms.Form):
"""
Mass action form for Tickets
"""
status = forms.ModelChoiceField(queryset=[], required=False)
service = forms.ModelChoiceField(queryset=[], required=False)
queue = forms.ModelChoiceField(queryset=[], required=False)
delete = forms.ChoiceField(label=_("Delete"), choices=(('', '-----'), ('delete', _('Delete Completely')),
('trash', _('Move to Trash'))), required=False)
instance = None
def __init__(self, user, *args, **kwargs):
"Sets allowed values"
if 'instance' in kwargs:
self.instance = kwargs['instance']
del kwargs['instance']
super(MassActionForm, self).__init__(*args, **kwargs)
self.fields['status'].queryset = Object.filter_permitted(user, TicketStatus.objects, mode='x')
self.fields['status'].label = _("Status")
self.fields['service'].queryset = Object.filter_permitted(user, Service.objects, mode='x')
self.fields['service'].label = _("Service")
self.fields['queue'].queryset = Object.filter_permitted(user, TicketQueue.objects, mode='x')
self.fields['queue'].label = _("Queue")
self.fields['delete'] = forms.ChoiceField(label=_("Delete"), choices=(('', '-----'),
('delete', _('Delete Completely')),
('trash', _('Move to Trash'))), required=False)
def save(self, *args, **kwargs):
"Process form"
if self.instance:
if self.is_valid():
if self.cleaned_data['service']:
self.instance.service = self.cleaned_data['service']
if self.cleaned_data['status']:
self.instance.status = self.cleaned_data['status']
if self.cleaned_data['queue']:
self.instance.queue = self.cleaned_data['queue']
self.instance.save()
if self.cleaned_data['delete']:
if self.cleaned_data['delete'] == 'delete':
self.instance.delete()
if self.cleaned_data['delete'] == 'trash':
self.instance.trash = True
self.instance.save()
class TicketForm(forms.ModelForm):
"""
Ticket form
"""
name = forms.CharField(label='Title', widget=forms.TextInput(attrs={'size':'50'}))
def __init__(self, user, queue, agent, *args, **kwargs):
"Sets allowed values"
super(TicketForm, self).__init__(*args, **kwargs)
# Filter allowed selections for TicketForm
self.fields['reference'].required = False
self.fields['reference'].label = _("Reference")
self.fields['caller'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['caller'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['caller'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['caller'].label = _("Caller")
self.fields['assigned'].queryset = Object.filter_permitted(user, ServiceAgent.objects, mode='x')
self.fields['assigned'].label = _("Assigned to")
self.fields['assigned'].help_text = ""
self.fields['assigned'].widget.attrs.update({'class': 'multicomplete',
'callback': reverse('services_ajax_agent_lookup')})
self.fields['assigned'].widget.attrs.update({'popuplink': reverse('services_agent_add')})
self.fields['status'].queryset = Object.filter_permitted(user, TicketStatus.objects, mode='x')
self.fields['status'].label = _("Status")
self.fields['service'].queryset = Object.filter_permitted(user, Service.objects, mode='x')
self.fields['service'].label = _("Service")
self.fields['queue'].queryset = Object.filter_permitted(user, TicketQueue.objects, mode='x')
self.fields['queue'].label = _("Queue")
self.fields['sla'].queryset = Object.filter_permitted(user, ServiceLevelAgreement.objects, mode='x')
self.fields['sla'].label = _("Service Level Agreement")
self.fields['resolution'].label = _("Resolution")
# Set default values if not editing
if not 'instance' in kwargs:
try:
self.fields['caller'].initial = user.get_contact().id
except Exception:
pass
if queue:
self.fields['queue'].initial = queue.id
if queue.default_ticket_status and queue.default_ticket_status in self.fields['status'].queryset:
self.fields['status'].initial = queue.default_ticket_status_id
else:
try:
conf = ModuleSetting.get_for_module('maker.services', 'default_ticket_status')[0]
self.fields['status'].initial = long(conf.value)
except:
pass
if queue.default_ticket_priority:
self.fields['priority'].initial = queue.default_ticket_priority
if queue.default_service:
self.fields['service'].initial = queue.default_service_id
try:
default_sla = ServiceLevelAgreement.objects.get(service=queue.default_service, default=True)
if default_sla:
self.fields['sla'].initial = default_sla.id
except:
pass
else:
try:
conf = ModuleSetting.get_for_module('maker.services', 'default_ticket_status')[0]
self.fields['status'].initial = long(conf.value)
except:
pass
try:
conf = ModuleSetting.get_for_module('maker.services', 'default_ticket_queue')[0]
self.fields['queue'].initial = long(conf.value)
except:
pass
self.fields['name'].label = _("Name")
self.fields['name'].widget.attrs.update({'class': 'duplicates',
'callback': reverse('services_ajax_ticket_lookup')})
self.fields['priority'].label = _("Priority")
self.fields['priority'].choices = ((5, _('Highest')), (4, _('High')), (3, _('Normal')), (2, _('Low')), (1, _('Lowest')))
self.fields['urgency'].label = _("Urgency")
self.fields['urgency'].choices = ((5, _('Highest')), (4, _('High')), (3, _('Normal')), (2, _('Low')), (1, _('Lowest')))
self.fields['details'].label = _("Details")
if not agent:
del self.fields['caller']
del self.fields['reference']
del self.fields['priority']
del self.fields['status']
del self.fields['queue']
del self.fields['sla']
del self.fields['assigned']
del self.fields['resolution']
class Meta:
"Ticket specified as model"
model = Ticket
fields = ('name', 'reference', 'caller', 'assigned', 'urgency', 'priority',
'status', 'service', 'sla', 'queue', 'details', 'resolution')
class TicketStatusForm(forms.ModelForm):
""" TicketStatus form """
name = forms.CharField(widget=forms.TextInput(attrs={'size':'30'}))
def __init__(self, user, *args, **kwargs):
"Sets allowed values"
super(TicketStatusForm, self).__init__(*args, **kwargs)
class Meta:
"TicketStatus specified as model"
model = TicketStatus
fields = ('name', 'active', 'hidden', 'details')
class TicketRecordForm(forms.ModelForm):
"""
TicketRecord form
"""
def __init__(self, agent, ticket, *args, **kwargs):
super(TicketRecordForm, self).__init__(*args, **kwargs)
self.ticket = ticket
self.fields['body'].label = _("body")
self.fields['body'].required = True
self.fields['notify'].label = _("Notify caller")
self.fields['resolution'] = forms.BooleanField(label=_("Set as Resolution"), required=False)
if not agent:
del self.fields['notify']
del self.fields['resolution']
def save(self, *args, **kwargs):
"Set Resolution if selected"
instance = super(TicketRecordForm, self).save(*args, **kwargs)
ticket = self.ticket
if 'resolution' in self.cleaned_data and self.cleaned_data['resolution']:
ticket.resolution = self.cleaned_data['body']
ticket.save()
# Send update if notify clicked
if 'notify' in self.cleaned_data and self.cleaned_data['notify'] and ticket.caller:
toaddr = ticket.caller.get_email()
if ticket.message or toaddr:
reply = Message()
reply.author = instance.sender
reply.body = instance.body
reply.auto_notify = False
if ticket.message:
reply.stream = ticket.message.stream
reply.reply_to = ticket.message
else:
reply.stream = ticket.queue.message_stream if ticket.queue else None
reply.title = "[#%s] %s" % (ticket.reference, ticket.name)
reply.save()
if not ticket.message:
ticket.message = reply
reply.recipients.add(ticket.caller)
email = EmailMessage(reply)
email.send_email()
return instance
class Meta:
"TicketRecord specified as model"
model = TicketRecord
fields = ['body', 'notify']
class QueueForm(forms.ModelForm):
"""
Queue form
"""
name = forms.CharField(widget=forms.TextInput(attrs={'size':'50'}))
def __init__(self, user, *args, **kwargs):
"Sets allowed values"
super(QueueForm, self).__init__(*args, **kwargs)
manager = TicketQueue.objects
if 'instance' in kwargs:
instance = kwargs['instance']
manager = manager.exclude(Q(parent=instance) & Q(pk=instance.id))
self.fields['parent'].queryset = Object.filter_permitted(user, manager, mode='x')
self.fields['default_service'].queryset = Object.filter_permitted(user, Service.objects, mode='x')
self.fields['waiting_time'].help_text = "seconds"
self.fields['name'].label = _("Name")
self.fields['active'].label = _("Active")
self.fields['parent'].label = _("Parent")
self.fields['default_ticket_status'].label = _("Default ticket status")
self.fields['default_ticket_priority'].label = _("Default ticket priority")
self.fields['default_service'].label = _("Default service")
self.fields['waiting_time'].label = _("Waiting time")
self.fields['next_queue'].queryset = Object.filter_permitted(user, TicketQueue.objects, mode='x')
self.fields['next_queue'].label = _("Next queue")
self.fields['ticket_code'].label = _("Ticket code")
self.fields['message_stream'].label = _("Message stream")
self.fields['message_stream'].widget.attrs.update({'popuplink': reverse('messaging_stream_add')})
self.fields['details'].label = _("Details")
class Meta:
"TicketQueue specified as model"
model = TicketQueue
fields = ('name', 'active', 'parent', 'default_ticket_status',
'default_ticket_priority', 'default_service', 'waiting_time',
'next_queue', 'ticket_code', 'message_stream', 'details')
class ServiceForm(forms.ModelForm):
"""
Service form
"""
name = forms.CharField(widget=forms.TextInput(attrs={'size':'50'}))
def __init__(self, user, *args, **kwargs):
"Sets allowed values"
super(ServiceForm, self).__init__(*args, **kwargs)
manager = Service.objects
if 'instance' in kwargs:
instance = kwargs['instance']
manager = manager.exclude(Q(parent=instance) & Q(pk=instance.id))
self.fields['parent'].queryset = Object.filter_permitted(user, manager, mode='x')
self.fields['name'].label = _("Name")
self.fields['parent'].label = _("Parent")
self.fields['details'].label = _("Details")
class Meta:
"Service specified as model"
model = Service
fields = ('name', 'parent', 'details')
class ServiceLevelAgreementForm(forms.ModelForm):
"""
ServiceLevelAgreement form
"""
name = forms.CharField(widget=forms.TextInput(attrs={'size':'50'}))
def __init__(self, user, *args, **kwargs):
"Sets allowed values"
super(ServiceLevelAgreementForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['response_time'].help_text = 'minutes'
self.fields['response_time'].widget.attrs.update({'size': 10})
self.fields['response_time'].label = _("Response time")
self.fields['uptime_rate'].help_text = 'percent'
self.fields['uptime_rate'].widget.attrs.update({'size': 5})
self.fields['uptime_rate'].label = _("Uptime rate")
self.fields['service'].queryset = Object.filter_permitted(user, Service.objects, mode='x')
self.fields['service'].label = _("Service")
self.fields['client'].queryset = Object.filter_permitted(user, Contact.objects, mode='x')
self.fields['client'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['client'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['client'].label = _("Client")
self.fields['provider'].queryset = Object.filter_permitted(user, Contact.objects, mode='x')
self.fields['provider'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['provider'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['provider'].label = _("Provider")
self.fields['available_from'].initial = "09:00"
self.fields['available_from'].widget.attrs.update({'size': 10})
self.fields['available_from'].label = _("Available from")
self.fields['available_to'].initial = "18:00"
self.fields['available_to'].widget.attrs.update({'size': 10})
self.fields['available_to'].label = _("Available to")
contact = user.default_group.get_contact()
if contact:
self.fields['provider'].initial = contact.id
class Meta:
"ServiceLevelAgreement specified as model"
model = ServiceLevelAgreement
fields = ('name', 'service', 'client', 'provider', 'response_time', 'uptime_rate', 'available_from',
'available_to')
class AgentForm(forms.ModelForm):
"""
Agent form
"""
def __init__(self, user, *args, **kwargs):
"Sets allowed values"
super(AgentForm, self).__init__(*args, **kwargs)
self.fields['related_user'].label = _("Related user")
self.fields['related_user'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_user_lookup')})
self.fields['active'].label = _("Active")
self.fields['occupied'].label = _("Occupied")
self.fields['available_from'].label = _("Available from")
self.fields['available_to'].label = _("Available to")
class Meta:
"Agent specified as model"
model = ServiceAgent
fields = ('related_user', 'active', 'occupied', 'available_from', 'available_to')
class FilterForm(forms.ModelForm):
"""
Ticket Filters definition
"""
def __init__(self, user, skip=[], *args, **kwargs):
"Sets allowed values"
super(FilterForm, self).__init__(*args, **kwargs)
if 'caller' in skip:
del self.fields['caller']
else:
self.fields['caller'].queryset = Object.filter_permitted(user, Contact.objects, mode='x')
self.fields['caller'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['caller'].label = _("Caller")
if 'status' in skip:
del self.fields['status']
else:
self.fields['status'].queryset = Object.filter_permitted(user, TicketStatus.objects, mode='x')
self.fields['status'].required = False
self.fields['status'].label = _("Status")
self.fields['service'].queryset = Object.filter_permitted(user, Service.objects, mode='x')
self.fields['service'].label = _("Service")
self.fields['sla'].queryset = Object.filter_permitted(user, ServiceLevelAgreement.objects, mode='x')
self.fields['sla'].label = _("SLA")
if 'queue' in skip:
del self.fields['queue']
else:
self.fields['queue'].queryset = Object.filter_permitted(user, TicketQueue.objects, mode='x')
self.fields['queue'].label = _("Queue")
if 'assigned' in skip:
del self.fields['assigned']
else:
self.fields['assigned'].queryset = Object.filter_permitted(user, ServiceAgent.objects, mode='x')
self.fields['assigned'].widget.attrs.update({'class': 'multicomplete',
'callback': reverse('services_ajax_agent_lookup')})
self.fields['assigned'].label = _("Assigned to")
self.fields['assigned'].help_text = ""
class Meta:
"Ticket specified as model"
model = Ticket
fields = ('caller', 'status', 'service', 'sla', 'queue', 'assigned')
class SLAFilterForm(forms.ModelForm):
""" SLA Filters definition """
def __init__(self, user, skip=[], *args, **kwargs):
"Sets allowed values"
super(SLAFilterForm, self).__init__(*args, **kwargs)
self.fields['client'].queryset = Object.filter_permitted(user, Contact.objects, mode='x')
self.fields['client'].required = False
self.fields['client'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['client'].label = _("Client")
self.fields['provider'].queryset = Object.filter_permitted(user, Contact.objects, mode='x')
self.fields['provider'].required = False
self.fields['provider'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['provider'].label = _("Provider")
self.fields['service'].queryset = Object.filter_permitted(user, Service.objects, mode='x')
self.fields['service'].required = False
self.fields['service'].label = _("Service")
class Meta:
"ServiceLevelAgreement specified as model"
model = ServiceLevelAgreement
fields = ('service', 'client', 'provider')
class AgentFilterForm(forms.ModelForm):
"""
Agent Filters definition
"""
def __init__(self, user, skip=[], *args, **kwargs):
"Sets allowed values"
super(AgentFilterForm, self).__init__(*args, **kwargs)
self.fields['related_user'].required = False
self.fields['related_user'].label = _("Related user")
class Meta:
"ServiceAgent specified as model"
model = ServiceAgent
fields = ['related_user']
| {
"content_hash": "45691da17c1cb1054e22e12b57be3462",
"timestamp": "",
"source": "github",
"line_count": 555,
"max_line_length": 128,
"avg_line_length": 44.983783783783785,
"alnum_prop": 0.5622847072017945,
"repo_name": "alejo8591/maker",
"id": "2dd115c0a8504fcae6e88ffb4d31222079ff75b2",
"size": "25018",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "services/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1578070"
},
{
"name": "Perl",
"bytes": "164"
},
{
"name": "Python",
"bytes": "2863599"
},
{
"name": "Shell",
"bytes": "3561"
}
],
"symlink_target": ""
} |
"""Get stats about your activity.
Example:
- my_activity.py for stats for the current week (last week on mondays).
- my_activity.py -Q for stats for last quarter.
- my_activity.py -Y for stats for this year.
- my_activity.py -b 4/24/19 for stats since April 24th 2019.
- my_activity.py -b 4/24/19 -e 6/16/19 stats between April 24th and June 16th.
- my_activity.py -jd to output stats for the week to json with deltas data.
To add additional gerrit instances, one can pass a JSON file as parameter:
- my_activity.py -F config.json
{
"gerrit_instances": {
"team-internal-review.googlesource.com": {
"shorturl": "go/teamcl",
"short_url_protocol": "http"
},
"team-external-review.googlesource.com": {}
}
}
"""
# These services typically only provide a created time and a last modified time
# for each item for general queries. This is not enough to determine if there
# was activity in a given time period. So, we first query for all things created
# before end and modified after begin. Then, we get the details of each item and
# check those details to determine if there was activity in the given period.
# This means that query time scales mostly with (today() - begin).
from __future__ import print_function
import collections
import contextlib
from datetime import datetime
from datetime import timedelta
import httplib2
import itertools
import json
import logging
from multiprocessing.pool import ThreadPool
import optparse
import os
import subprocess
from string import Formatter
import sys
import urllib
import re
import auth
import fix_encoding
import gclient_utils
import gerrit_util
if sys.version_info.major == 2:
logging.critical(
'Python 2 is not supported. Run my_activity.py using vpython3.')
try:
import dateutil # pylint: disable=import-error
import dateutil.parser
from dateutil.relativedelta import relativedelta
except ImportError:
logging.error('python-dateutil package required')
sys.exit(1)
class DefaultFormatter(Formatter):
def __init__(self, default = ''):
super(DefaultFormatter, self).__init__()
self.default = default
def get_value(self, key, args, kwargs):
if isinstance(key, str) and key not in kwargs:
return self.default
return Formatter.get_value(self, key, args, kwargs)
gerrit_instances = [
{
'url': 'android-review.googlesource.com',
'shorturl': 'r.android.com',
'short_url_protocol': 'https',
},
{
'url': 'gerrit-review.googlesource.com',
},
{
'url': 'chrome-internal-review.googlesource.com',
'shorturl': 'crrev.com/i',
'short_url_protocol': 'https',
},
{
'url': 'chromium-review.googlesource.com',
'shorturl': 'crrev.com/c',
'short_url_protocol': 'https',
},
{
'url': 'dawn-review.googlesource.com',
},
{
'url': 'pdfium-review.googlesource.com',
},
{
'url': 'skia-review.googlesource.com',
},
{
'url': 'review.coreboot.org',
},
]
monorail_projects = {
'angleproject': {
'shorturl': 'anglebug.com',
'short_url_protocol': 'http',
},
'chromium': {
'shorturl': 'crbug.com',
'short_url_protocol': 'https',
},
'dawn': {},
'google-breakpad': {},
'gyp': {},
'pdfium': {
'shorturl': 'crbug.com/pdfium',
'short_url_protocol': 'https',
},
'skia': {},
'tint': {},
'v8': {
'shorturl': 'crbug.com/v8',
'short_url_protocol': 'https',
},
}
def username(email):
"""Keeps the username of an email address."""
return email and email.split('@', 1)[0]
def datetime_to_midnight(date):
return date - timedelta(hours=date.hour, minutes=date.minute,
seconds=date.second, microseconds=date.microsecond)
def get_quarter_of(date):
begin = (datetime_to_midnight(date) -
relativedelta(months=(date.month - 1) % 3, days=(date.day - 1)))
return begin, begin + relativedelta(months=3)
def get_year_of(date):
begin = (datetime_to_midnight(date) -
relativedelta(months=(date.month - 1), days=(date.day - 1)))
return begin, begin + relativedelta(years=1)
def get_week_of(date):
begin = (datetime_to_midnight(date) - timedelta(days=date.weekday()))
return begin, begin + timedelta(days=7)
def get_yes_or_no(msg):
while True:
response = gclient_utils.AskForData(msg + ' yes/no [no] ')
if response in ('y', 'yes'):
return True
if not response or response in ('n', 'no'):
return False
def datetime_from_gerrit(date_string):
return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S.%f000')
def datetime_from_monorail(date_string):
return datetime.strptime(date_string, '%Y-%m-%dT%H:%M:%S')
def extract_bug_numbers_from_description(issue):
# Getting the description for REST Gerrit
revision = issue['revisions'][issue['current_revision']]
description = revision['commit']['message']
bugs = []
# Handle both "Bug: 99999" and "BUG=99999" bug notations
# Multiple bugs can be noted on a single line or in multiple ones.
matches = re.findall(
r'^(BUG=|(Bug|Fixed):\s*)((((?:[a-zA-Z0-9-]+:)?\d+)(,\s?)?)+)',
description, flags=re.IGNORECASE | re.MULTILINE)
if matches:
for match in matches:
bugs.extend(match[2].replace(' ', '').split(','))
# Add default chromium: prefix if none specified.
bugs = [bug if ':' in bug else 'chromium:%s' % bug for bug in bugs]
return sorted(set(bugs))
class MyActivity(object):
def __init__(self, options):
self.options = options
self.modified_after = options.begin
self.modified_before = options.end
self.user = options.user
self.changes = []
self.reviews = []
self.issues = []
self.referenced_issues = []
self.google_code_auth_token = None
self.access_errors = set()
self.skip_servers = (options.skip_servers.split(','))
def show_progress(self, how='.'):
if sys.stdout.isatty():
sys.stdout.write(how)
sys.stdout.flush()
def gerrit_changes_over_rest(self, instance, filters):
# Convert the "key:value" filter to a list of (key, value) pairs.
req = list(f.split(':', 1) for f in filters)
try:
# Instantiate the generator to force all the requests now and catch the
# errors here.
return list(gerrit_util.GenerateAllChanges(instance['url'], req,
o_params=['MESSAGES', 'LABELS', 'DETAILED_ACCOUNTS',
'CURRENT_REVISION', 'CURRENT_COMMIT']))
except gerrit_util.GerritError as e:
error_message = 'Looking up %r: %s' % (instance['url'], e)
if error_message not in self.access_errors:
self.access_errors.add(error_message)
return []
def gerrit_search(self, instance, owner=None, reviewer=None):
if instance['url'] in self.skip_servers:
return []
max_age = datetime.today() - self.modified_after
filters = ['-age:%ss' % (max_age.days * 24 * 3600 + max_age.seconds)]
if owner:
assert not reviewer
filters.append('owner:%s' % owner)
else:
filters.extend(('-owner:%s' % reviewer, 'reviewer:%s' % reviewer))
# TODO(cjhopman): Should abandoned changes be filtered out when
# merged_only is not enabled?
if self.options.merged_only:
filters.append('status:merged')
issues = self.gerrit_changes_over_rest(instance, filters)
self.show_progress()
issues = [self.process_gerrit_issue(instance, issue)
for issue in issues]
issues = filter(self.filter_issue, issues)
issues = sorted(issues, key=lambda i: i['modified'], reverse=True)
return issues
def process_gerrit_issue(self, instance, issue):
ret = {}
if self.options.deltas:
ret['delta'] = DefaultFormatter().format(
'+{insertions},-{deletions}',
**issue)
ret['status'] = issue['status']
if 'shorturl' in instance:
protocol = instance.get('short_url_protocol', 'http')
url = instance['shorturl']
else:
protocol = 'https'
url = instance['url']
ret['review_url'] = '%s://%s/%s' % (protocol, url, issue['_number'])
ret['header'] = issue['subject']
ret['owner'] = issue['owner'].get('email', '')
ret['author'] = ret['owner']
ret['created'] = datetime_from_gerrit(issue['created'])
ret['modified'] = datetime_from_gerrit(issue['updated'])
if 'messages' in issue:
ret['replies'] = self.process_gerrit_issue_replies(issue['messages'])
else:
ret['replies'] = []
ret['reviewers'] = set(r['author'] for r in ret['replies'])
ret['reviewers'].discard(ret['author'])
ret['bugs'] = extract_bug_numbers_from_description(issue)
return ret
@staticmethod
def process_gerrit_issue_replies(replies):
ret = []
replies = filter(lambda r: 'author' in r and 'email' in r['author'],
replies)
for reply in replies:
ret.append({
'author': reply['author']['email'],
'created': datetime_from_gerrit(reply['date']),
'content': reply['message'],
})
return ret
def monorail_get_auth_http(self):
# Manually use a long timeout (10m); for some users who have a
# long history on the issue tracker, whatever the default timeout
# is is reached.
return auth.Authenticator().authorize(httplib2.Http(timeout=600))
def filter_modified_monorail_issue(self, issue):
"""Precisely checks if an issue has been modified in the time range.
This fetches all issue comments to check if the issue has been modified in
the time range specified by user. This is needed because monorail only
allows filtering by last updated and published dates, which is not
sufficient to tell whether a given issue has been modified at some specific
time range. Any update to the issue is a reported as comment on Monorail.
Args:
issue: Issue dict as returned by monorail_query_issues method. In
particular, must have a key 'uid' formatted as 'project:issue_id'.
Returns:
Passed issue if modified, None otherwise.
"""
http = self.monorail_get_auth_http()
project, issue_id = issue['uid'].split(':')
url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects'
'/%s/issues/%s/comments?maxResults=10000') % (project, issue_id)
_, body = http.request(url)
self.show_progress()
content = json.loads(body)
if not content:
logging.error('Unable to parse %s response from monorail.', project)
return issue
for item in content.get('items', []):
comment_published = datetime_from_monorail(item['published'])
if self.filter_modified(comment_published):
return issue
return None
def monorail_query_issues(self, project, query):
http = self.monorail_get_auth_http()
url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects'
'/%s/issues') % project
query_data = urllib.parse.urlencode(query)
url = url + '?' + query_data
_, body = http.request(url)
self.show_progress()
content = json.loads(body)
if not content:
logging.error('Unable to parse %s response from monorail.', project)
return []
issues = []
project_config = monorail_projects.get(project, {})
for item in content.get('items', []):
if project_config.get('shorturl'):
protocol = project_config.get('short_url_protocol', 'http')
item_url = '%s://%s/%d' % (
protocol, project_config['shorturl'], item['id'])
else:
item_url = 'https://bugs.chromium.org/p/%s/issues/detail?id=%d' % (
project, item['id'])
issue = {
'uid': '%s:%s' % (project, item['id']),
'header': item['title'],
'created': datetime_from_monorail(item['published']),
'modified': datetime_from_monorail(item['updated']),
'author': item['author']['name'],
'url': item_url,
'comments': [],
'status': item['status'],
'labels': [],
'components': []
}
if 'owner' in item:
issue['owner'] = item['owner']['name']
else:
issue['owner'] = 'None'
if 'labels' in item:
issue['labels'] = item['labels']
if 'components' in item:
issue['components'] = item['components']
issues.append(issue)
return issues
def monorail_issue_search(self, project):
epoch = datetime.utcfromtimestamp(0)
# Defaults to @chromium.org email if one wasn't provided on -u option.
user_str = (self.options.email if self.options.email.find('@') >= 0
else '%s@chromium.org' % self.user)
issues = self.monorail_query_issues(project, {
'maxResults': 10000,
'q': user_str,
'publishedMax': '%d' % (self.modified_before - epoch).total_seconds(),
'updatedMin': '%d' % (self.modified_after - epoch).total_seconds(),
})
if self.options.completed_issues:
return [
issue for issue in issues
if (self.match(issue['owner']) and
issue['status'].lower() in ('verified', 'fixed'))
]
return [
issue for issue in issues
if user_str in (issue['author'], issue['owner'])]
def monorail_get_issues(self, project, issue_ids):
return self.monorail_query_issues(project, {
'maxResults': 10000,
'q': 'id:%s' % ','.join(issue_ids)
})
def print_heading(self, heading):
print()
print(self.options.output_format_heading.format(heading=heading))
def match(self, author):
if '@' in self.user:
return author == self.user
return author.startswith(self.user + '@')
def print_change(self, change):
activity = len([
reply
for reply in change['replies']
if self.match(reply['author'])
])
optional_values = {
'created': change['created'].date().isoformat(),
'modified': change['modified'].date().isoformat(),
'reviewers': ', '.join(change['reviewers']),
'status': change['status'],
'activity': activity,
}
if self.options.deltas:
optional_values['delta'] = change['delta']
self.print_generic(self.options.output_format,
self.options.output_format_changes,
change['header'],
change['review_url'],
change['author'],
change['created'],
change['modified'],
optional_values)
def print_issue(self, issue):
optional_values = {
'created': issue['created'].date().isoformat(),
'modified': issue['modified'].date().isoformat(),
'owner': issue['owner'],
'status': issue['status'],
}
self.print_generic(self.options.output_format,
self.options.output_format_issues,
issue['header'],
issue['url'],
issue['author'],
issue['created'],
issue['modified'],
optional_values)
def print_review(self, review):
activity = len([
reply
for reply in review['replies']
if self.match(reply['author'])
])
optional_values = {
'created': review['created'].date().isoformat(),
'modified': review['modified'].date().isoformat(),
'status': review['status'],
'activity': activity,
}
if self.options.deltas:
optional_values['delta'] = review['delta']
self.print_generic(self.options.output_format,
self.options.output_format_reviews,
review['header'],
review['review_url'],
review['author'],
review['created'],
review['modified'],
optional_values)
@staticmethod
def print_generic(default_fmt, specific_fmt,
title, url, author, created, modified,
optional_values=None):
output_format = specific_fmt if specific_fmt is not None else default_fmt
values = {
'title': title,
'url': url,
'author': author,
'created': created,
'modified': modified,
}
if optional_values is not None:
values.update(optional_values)
print(DefaultFormatter().format(output_format, **values))
def filter_issue(self, issue, should_filter_by_user=True):
def maybe_filter_username(email):
return not should_filter_by_user or username(email) == self.user
if (maybe_filter_username(issue['author']) and
self.filter_modified(issue['created'])):
return True
if (maybe_filter_username(issue['owner']) and
(self.filter_modified(issue['created']) or
self.filter_modified(issue['modified']))):
return True
for reply in issue['replies']:
if self.filter_modified(reply['created']):
if not should_filter_by_user:
break
if (username(reply['author']) == self.user
or (self.user + '@') in reply['content']):
break
else:
return False
return True
def filter_modified(self, modified):
return self.modified_after < modified < self.modified_before
def auth_for_changes(self):
#TODO(cjhopman): Move authentication check for getting changes here.
pass
def auth_for_reviews(self):
# Reviews use all the same instances as changes so no authentication is
# required.
pass
def get_changes(self):
num_instances = len(gerrit_instances)
with contextlib.closing(ThreadPool(num_instances)) as pool:
gerrit_changes = pool.map_async(
lambda instance: self.gerrit_search(instance, owner=self.user),
gerrit_instances)
gerrit_changes = itertools.chain.from_iterable(gerrit_changes.get())
self.changes = list(gerrit_changes)
def print_changes(self):
if self.changes:
self.print_heading('Changes')
for change in self.changes:
self.print_change(change)
def print_access_errors(self):
if self.access_errors:
logging.error('Access Errors:')
for error in self.access_errors:
logging.error(error.rstrip())
def get_reviews(self):
num_instances = len(gerrit_instances)
with contextlib.closing(ThreadPool(num_instances)) as pool:
gerrit_reviews = pool.map_async(
lambda instance: self.gerrit_search(instance, reviewer=self.user),
gerrit_instances)
gerrit_reviews = itertools.chain.from_iterable(gerrit_reviews.get())
self.reviews = list(gerrit_reviews)
def print_reviews(self):
if self.reviews:
self.print_heading('Reviews')
for review in self.reviews:
self.print_review(review)
def get_issues(self):
with contextlib.closing(ThreadPool(len(monorail_projects))) as pool:
monorail_issues = pool.map(
self.monorail_issue_search, monorail_projects.keys())
monorail_issues = list(itertools.chain.from_iterable(monorail_issues))
if not monorail_issues:
return
with contextlib.closing(ThreadPool(len(monorail_issues))) as pool:
filtered_issues = pool.map(
self.filter_modified_monorail_issue, monorail_issues)
self.issues = [issue for issue in filtered_issues if issue]
def get_referenced_issues(self):
if not self.issues:
self.get_issues()
if not self.changes:
self.get_changes()
referenced_issue_uids = set(itertools.chain.from_iterable(
change['bugs'] for change in self.changes))
fetched_issue_uids = set(issue['uid'] for issue in self.issues)
missing_issue_uids = referenced_issue_uids - fetched_issue_uids
missing_issues_by_project = collections.defaultdict(list)
for issue_uid in missing_issue_uids:
project, issue_id = issue_uid.split(':')
missing_issues_by_project[project].append(issue_id)
for project, issue_ids in missing_issues_by_project.items():
self.referenced_issues += self.monorail_get_issues(project, issue_ids)
def print_issues(self):
if self.issues:
self.print_heading('Issues')
for issue in self.issues:
self.print_issue(issue)
def print_changes_by_issue(self, skip_empty_own):
if not self.issues or not self.changes:
return
self.print_heading('Changes by referenced issue(s)')
issues = {issue['uid']: issue for issue in self.issues}
ref_issues = {issue['uid']: issue for issue in self.referenced_issues}
changes_by_issue_uid = collections.defaultdict(list)
changes_by_ref_issue_uid = collections.defaultdict(list)
changes_without_issue = []
for change in self.changes:
added = False
for issue_uid in change['bugs']:
if issue_uid in issues:
changes_by_issue_uid[issue_uid].append(change)
added = True
if issue_uid in ref_issues:
changes_by_ref_issue_uid[issue_uid].append(change)
added = True
if not added:
changes_without_issue.append(change)
# Changes referencing own issues.
for issue_uid in issues:
if changes_by_issue_uid[issue_uid] or not skip_empty_own:
self.print_issue(issues[issue_uid])
if changes_by_issue_uid[issue_uid]:
print()
for change in changes_by_issue_uid[issue_uid]:
print(' ', end='') # this prints no newline
self.print_change(change)
print()
# Changes referencing others' issues.
for issue_uid in ref_issues:
assert changes_by_ref_issue_uid[issue_uid]
self.print_issue(ref_issues[issue_uid])
for change in changes_by_ref_issue_uid[issue_uid]:
print('', end=' ') # this prints one space due to comma, but no newline
self.print_change(change)
# Changes referencing no issues.
if changes_without_issue:
print(self.options.output_format_no_url.format(title='Other changes'))
for change in changes_without_issue:
print('', end=' ') # this prints one space due to comma, but no newline
self.print_change(change)
def print_activity(self):
self.print_changes()
self.print_reviews()
self.print_issues()
def dump_json(self, ignore_keys=None):
if ignore_keys is None:
ignore_keys = ['replies']
def format_for_json_dump(in_array):
output = {}
for item in in_array:
url = item.get('url') or item.get('review_url')
if not url:
raise Exception('Dumped item %s does not specify url' % item)
output[url] = dict(
(k, v) for k,v in item.items() if k not in ignore_keys)
return output
class PythonObjectEncoder(json.JSONEncoder):
def default(self, o): # pylint: disable=method-hidden
if isinstance(o, datetime):
return o.isoformat()
if isinstance(o, set):
return list(o)
return json.JSONEncoder.default(self, o)
output = {
'reviews': format_for_json_dump(self.reviews),
'changes': format_for_json_dump(self.changes),
'issues': format_for_json_dump(self.issues)
}
print(json.dumps(output, indent=2, cls=PythonObjectEncoder))
def main():
parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
parser.add_option(
'-u', '--user', metavar='<email>',
# Look for USER and USERNAME (Windows) environment variables.
default=os.environ.get('USER', os.environ.get('USERNAME')),
help='Filter on user, default=%default')
parser.add_option(
'-b', '--begin', metavar='<date>',
help='Filter issues created after the date (mm/dd/yy)')
parser.add_option(
'-e', '--end', metavar='<date>',
help='Filter issues created before the date (mm/dd/yy)')
quarter_begin, quarter_end = get_quarter_of(datetime.today() -
relativedelta(months=2))
parser.add_option(
'-Q', '--last_quarter', action='store_true',
help='Use last quarter\'s dates, i.e. %s to %s' % (
quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
parser.add_option(
'-Y', '--this_year', action='store_true',
help='Use this year\'s dates')
parser.add_option(
'-w', '--week_of', metavar='<date>',
help='Show issues for week of the date (mm/dd/yy)')
parser.add_option(
'-W', '--last_week', action='count',
help='Show last week\'s issues. Use more times for more weeks.')
parser.add_option(
'-a', '--auth',
action='store_true',
help='Ask to authenticate for instances with no auth cookie')
parser.add_option(
'-d', '--deltas',
action='store_true',
help='Fetch deltas for changes.')
parser.add_option(
'--no-referenced-issues',
action='store_true',
help='Do not fetch issues referenced by owned changes. Useful in '
'combination with --changes-by-issue when you only want to list '
'issues that have also been modified in the same time period.')
parser.add_option(
'--skip_servers',
action='store',
default='',
help='A comma separated list of gerrit and rietveld servers to ignore')
parser.add_option(
'--skip-own-issues-without-changes',
action='store_true',
help='Skips listing own issues without changes when showing changes '
'grouped by referenced issue(s). See --changes-by-issue for more '
'details.')
parser.add_option(
'-F', '--config_file', metavar='<config_file>',
help='Configuration file in JSON format, used to add additional gerrit '
'instances (see source code for an example).')
activity_types_group = optparse.OptionGroup(parser, 'Activity Types',
'By default, all activity will be looked up and '
'printed. If any of these are specified, only '
'those specified will be searched.')
activity_types_group.add_option(
'-c', '--changes',
action='store_true',
help='Show changes.')
activity_types_group.add_option(
'-i', '--issues',
action='store_true',
help='Show issues.')
activity_types_group.add_option(
'-r', '--reviews',
action='store_true',
help='Show reviews.')
activity_types_group.add_option(
'--changes-by-issue', action='store_true',
help='Show changes grouped by referenced issue(s).')
parser.add_option_group(activity_types_group)
output_format_group = optparse.OptionGroup(parser, 'Output Format',
'By default, all activity will be printed in the '
'following format: {url} {title}. This can be '
'changed for either all activity types or '
'individually for each activity type. The format '
'is defined as documented for '
'string.format(...). The variables available for '
'all activity types are url, title, author, '
'created and modified. Format options for '
'specific activity types will override the '
'generic format.')
output_format_group.add_option(
'-f', '--output-format', metavar='<format>',
default=u'{url} {title}',
help='Specifies the format to use when printing all your activity.')
output_format_group.add_option(
'--output-format-changes', metavar='<format>',
default=None,
help='Specifies the format to use when printing changes. Supports the '
'additional variable {reviewers}')
output_format_group.add_option(
'--output-format-issues', metavar='<format>',
default=None,
help='Specifies the format to use when printing issues. Supports the '
'additional variable {owner}.')
output_format_group.add_option(
'--output-format-reviews', metavar='<format>',
default=None,
help='Specifies the format to use when printing reviews.')
output_format_group.add_option(
'--output-format-heading', metavar='<format>',
default=u'{heading}:',
help='Specifies the format to use when printing headings. '
'Supports the variable {heading}.')
output_format_group.add_option(
'--output-format-no-url', default='{title}',
help='Specifies the format to use when printing activity without url.')
output_format_group.add_option(
'-m', '--markdown', action='store_true',
help='Use markdown-friendly output (overrides --output-format '
'and --output-format-heading)')
output_format_group.add_option(
'-j', '--json', action='store_true',
help='Output json data (overrides other format options)')
parser.add_option_group(output_format_group)
parser.add_option(
'-v', '--verbose',
action='store_const',
dest='verbosity',
default=logging.WARN,
const=logging.INFO,
help='Output extra informational messages.'
)
parser.add_option(
'-q', '--quiet',
action='store_const',
dest='verbosity',
const=logging.ERROR,
help='Suppress non-error messages.'
)
parser.add_option(
'-M', '--merged-only',
action='store_true',
dest='merged_only',
default=False,
help='Shows only changes that have been merged.')
parser.add_option(
'-C', '--completed-issues',
action='store_true',
dest='completed_issues',
default=False,
help='Shows only monorail issues that have completed (Fixed|Verified) '
'by the user.')
parser.add_option(
'-o', '--output', metavar='<file>',
help='Where to output the results. By default prints to stdout.')
# Remove description formatting
parser.format_description = (
lambda _: parser.description) # pylint: disable=no-member
options, args = parser.parse_args()
options.local_user = os.environ.get('USER')
if args:
parser.error('Args unsupported')
if not options.user:
parser.error('USER/USERNAME is not set, please use -u')
# Retains the original -u option as the email address.
options.email = options.user
options.user = username(options.email)
logging.basicConfig(level=options.verbosity)
# python-keyring provides easy access to the system keyring.
try:
import keyring # pylint: disable=unused-import,unused-variable,F0401
except ImportError:
logging.warning('Consider installing python-keyring')
if not options.begin:
if options.last_quarter:
begin, end = quarter_begin, quarter_end
elif options.this_year:
begin, end = get_year_of(datetime.today())
elif options.week_of:
begin, end = (get_week_of(datetime.strptime(options.week_of, '%m/%d/%y')))
elif options.last_week:
begin, end = (get_week_of(datetime.today() -
timedelta(days=1 + 7 * options.last_week)))
else:
begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
else:
begin = dateutil.parser.parse(options.begin)
if options.end:
end = dateutil.parser.parse(options.end)
else:
end = datetime.today()
options.begin, options.end = begin, end
if begin >= end:
# The queries fail in peculiar ways when the begin date is in the future.
# Give a descriptive error message instead.
logging.error('Start date (%s) is the same or later than end date (%s)' %
(begin, end))
return 1
if options.markdown:
options.output_format_heading = '### {heading}\n'
options.output_format = ' * [{title}]({url})'
options.output_format_no_url = ' * {title}'
logging.info('Searching for activity by %s', options.user)
logging.info('Using range %s to %s', options.begin, options.end)
if options.config_file:
with open(options.config_file) as f:
config = json.load(f)
for item, entries in config.items():
if item == 'gerrit_instances':
for repo, dic in entries.items():
# Use property name as URL
dic['url'] = repo
gerrit_instances.append(dic)
elif item == 'monorail_projects':
monorail_projects.append(entries)
else:
logging.error('Invalid entry in config file.')
return 1
my_activity = MyActivity(options)
my_activity.show_progress('Loading data')
if not (options.changes or options.reviews or options.issues or
options.changes_by_issue):
options.changes = True
options.issues = True
options.reviews = True
# First do any required authentication so none of the user interaction has to
# wait for actual work.
if options.changes or options.changes_by_issue:
my_activity.auth_for_changes()
if options.reviews:
my_activity.auth_for_reviews()
logging.info('Looking up activity.....')
try:
if options.changes or options.changes_by_issue:
my_activity.get_changes()
if options.reviews:
my_activity.get_reviews()
if options.issues or options.changes_by_issue:
my_activity.get_issues()
if not options.no_referenced_issues:
my_activity.get_referenced_issues()
except auth.LoginRequiredError as e:
logging.error('auth.LoginRequiredError: %s', e)
my_activity.show_progress('\n')
my_activity.print_access_errors()
output_file = None
try:
if options.output:
output_file = open(options.output, 'w')
logging.info('Printing output to "%s"', options.output)
sys.stdout = output_file
except (IOError, OSError) as e:
logging.error('Unable to write output: %s', e)
else:
if options.json:
my_activity.dump_json()
else:
if options.changes:
my_activity.print_changes()
if options.reviews:
my_activity.print_reviews()
if options.issues:
my_activity.print_issues()
if options.changes_by_issue:
my_activity.print_changes_by_issue(
options.skip_own_issues_without_changes)
finally:
if output_file:
logging.info('Done printing to file.')
sys.stdout = sys.__stdout__
output_file.close()
return 0
if __name__ == '__main__':
# Fix encoding to support non-ascii issue titles.
fix_encoding.fix_encoding()
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(1)
| {
"content_hash": "09bff1bfafa01d43f6bfe2be3a301e94",
"timestamp": "",
"source": "github",
"line_count": 1001,
"max_line_length": 80,
"avg_line_length": 34.33766233766234,
"alnum_prop": 0.6232398463865937,
"repo_name": "CoherentLabs/depot_tools",
"id": "7fb0f1cf62bc3297c08f823b7ecb282a030d833a",
"size": "34563",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "my_activity.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "27896"
},
{
"name": "PowerShell",
"bytes": "5337"
},
{
"name": "Python",
"bytes": "2549026"
},
{
"name": "Roff",
"bytes": "5283"
},
{
"name": "Shell",
"bytes": "64165"
}
],
"symlink_target": ""
} |
import contextlib
import threading
import time
import weakref
from dogpile.cache import api
from dogpile.cache import proxy
from dogpile.cache import region
from dogpile.cache import util as dogpile_util
from dogpile.core import nameregistry
from oslo_config import cfg
from oslo_log import log
from oslo_utils import importutils
from oslo_utils import reflection
from keystone import exception
from keystone.i18n import _
from keystone.i18n import _LI
from keystone.i18n import _LW
__all__ = ('KeyValueStore', 'KeyValueStoreLock', 'LockTimeout',
'get_key_value_store')
BACKENDS_REGISTERED = False
CONF = cfg.CONF
KEY_VALUE_STORE_REGISTRY = weakref.WeakValueDictionary()
LOCK_WINDOW = 1
LOG = log.getLogger(__name__)
NO_VALUE = api.NO_VALUE
def _register_backends():
# NOTE(morganfainberg): This function exists to ensure we do not try and
# register the backends prior to the configuration object being fully
# available. We also need to ensure we do not register a given backend
# more than one time. All backends will be prefixed with openstack.kvs
# as the "short" name to reference them for configuration purposes. This
# function is used in addition to the pre-registered backends in the
# __init__ file for the KVS system.
global BACKENDS_REGISTERED
if not BACKENDS_REGISTERED:
prefix = 'openstack.kvs.%s'
for backend in CONF.kvs.backends:
module, cls = backend.rsplit('.', 1)
backend_name = prefix % cls
LOG.debug(('Registering Dogpile Backend %(backend_path)s as '
'%(backend_name)s'),
{'backend_path': backend, 'backend_name': backend_name})
region.register_backend(backend_name, module, cls)
BACKENDS_REGISTERED = True
def sha1_mangle_key(key):
"""Wrapper for dogpile's sha1_mangle_key.
Taken from oslo_cache.core._sha1_mangle_key
dogpile's sha1_mangle_key function expects an encoded string, so we
should take steps to properly handle multiple inputs before passing
the key through.
"""
try:
key = key.encode('utf-8', errors='xmlcharrefreplace')
except (UnicodeError, AttributeError): # nosec
# NOTE(stevemar): if encoding fails just continue anyway.
pass
return dogpile_util.sha1_mangle_key(key)
class LockTimeout(exception.UnexpectedError):
debug_message_format = _('Lock Timeout occurred for key, %(target)s')
class KeyValueStore(object):
"""Basic KVS manager object to support Keystone Key-Value-Store systems.
This manager also supports the concept of locking a given key resource to
allow for a guaranteed atomic transaction to the backend.
"""
def __init__(self, kvs_region):
self.locking = True
self._lock_timeout = 0
self._region = kvs_region
self._security_strategy = None
self._secret_key = None
self._lock_registry = nameregistry.NameRegistry(self._create_mutex)
def configure(self, backing_store, key_mangler=None, proxy_list=None,
locking=True, **region_config_args):
"""Configure the KeyValueStore instance.
:param backing_store: dogpile.cache short name of the region backend
:param key_mangler: key_mangler function
:param proxy_list: list of proxy classes to apply to the region
:param locking: boolean that allows disabling of locking mechanism for
this instantiation
:param region_config_args: key-word args passed to the dogpile.cache
backend for configuration
"""
if self.is_configured:
# NOTE(morganfainberg): It is a bad idea to reconfigure a backend,
# there are a lot of pitfalls and potential memory leaks that could
# occur. By far the best approach is to re-create the KVS object
# with the new configuration.
raise RuntimeError(_('KVS region %s is already configured. '
'Cannot reconfigure.') % self._region.name)
self.locking = locking
self._lock_timeout = region_config_args.pop(
'lock_timeout', CONF.kvs.default_lock_timeout)
self._configure_region(backing_store, **region_config_args)
self._set_key_mangler(key_mangler)
self._apply_region_proxy(proxy_list)
@property
def is_configured(self):
return 'backend' in self._region.__dict__
def _apply_region_proxy(self, proxy_list):
if isinstance(proxy_list, list):
proxies = []
for item in proxy_list:
if isinstance(item, str):
LOG.debug('Importing class %s as KVS proxy.', item)
pxy = importutils.import_class(item)
else:
pxy = item
if issubclass(pxy, proxy.ProxyBackend):
proxies.append(pxy)
else:
pxy_cls_name = reflection.get_class_name(
pxy, fully_qualified=False)
LOG.warning(_LW('%s is not a dogpile.proxy.ProxyBackend'),
pxy_cls_name)
for proxy_cls in reversed(proxies):
proxy_cls_name = reflection.get_class_name(
proxy_cls, fully_qualified=False)
LOG.info(_LI('Adding proxy \'%(proxy)s\' to KVS %(name)s.'),
{'proxy': proxy_cls_name,
'name': self._region.name})
self._region.wrap(proxy_cls)
def _assert_configured(self):
if'backend' not in self._region.__dict__:
raise exception.UnexpectedError(_('Key Value Store not '
'configured: %s'),
self._region.name)
def _set_keymangler_on_backend(self, key_mangler):
try:
self._region.backend.key_mangler = key_mangler
except Exception as e:
# NOTE(morganfainberg): The setting of the key_mangler on the
# backend is used to allow the backend to
# calculate a hashed key value as needed. Not all backends
# require the ability to calculate hashed keys. If the
# backend does not support/require this feature log a
# debug line and move on otherwise raise the proper exception.
# Support of the feature is implied by the existence of the
# 'raw_no_expiry_keys' attribute.
if not hasattr(self._region.backend, 'raw_no_expiry_keys'):
LOG.debug(('Non-expiring keys not supported/required by '
'%(region)s backend; unable to set '
'key_mangler for backend: %(err)s'),
{'region': self._region.name, 'err': e})
else:
raise
def _set_key_mangler(self, key_mangler):
# Set the key_mangler that is appropriate for the given region being
# configured here. The key_mangler function is called prior to storing
# the value(s) in the backend. This is to help prevent collisions and
# limit issues such as memcache's limited cache_key size.
use_backend_key_mangler = getattr(self._region.backend,
'use_backend_key_mangler', False)
if ((key_mangler is None or use_backend_key_mangler) and
(self._region.backend.key_mangler is not None)):
# NOTE(morganfainberg): Use the configured key_mangler as a first
# choice. Second choice would be the key_mangler defined by the
# backend itself. Finally, fall back to the defaults. The one
# exception is if the backend defines `use_backend_key_mangler`
# as True, which indicates the backend's key_mangler should be
# the first choice.
key_mangler = self._region.backend.key_mangler
if CONF.kvs.enable_key_mangler:
if key_mangler is not None:
msg = _LI('Using %(func)s as KVS region %(name)s key_mangler')
if callable(key_mangler):
self._region.key_mangler = key_mangler
LOG.info(msg, {'func': key_mangler.__name__,
'name': self._region.name})
else:
# NOTE(morganfainberg): We failed to set the key_mangler,
# we should error out here to ensure we aren't causing
# key-length or collision issues.
raise exception.ValidationError(
_('`key_mangler` option must be a function reference'))
else:
msg = _LI('Using default keystone.common.kvs.sha1_mangle_key '
'as KVS region %s key_mangler')
LOG.info(msg, self._region.name)
# NOTE(morganfainberg): Use 'default' keymangler to ensure
# that unless explicitly changed, we mangle keys. This helps
# to limit unintended cases of exceeding cache-key in backends
# such as memcache.
self._region.key_mangler = sha1_mangle_key
self._set_keymangler_on_backend(self._region.key_mangler)
else:
LOG.info(_LI('KVS region %s key_mangler disabled.'),
self._region.name)
self._set_keymangler_on_backend(None)
def _configure_region(self, backend, **config_args):
prefix = CONF.kvs.config_prefix
conf_dict = {}
conf_dict['%s.backend' % prefix] = backend
if 'distributed_lock' not in config_args:
config_args['distributed_lock'] = True
config_args['lock_timeout'] = self._lock_timeout
# NOTE(morganfainberg): To mitigate race conditions on comparing
# the timeout and current time on the lock mutex, we are building
# in a static 1 second overlap where the lock will still be valid
# in the backend but not from the perspective of the context
# manager. Since we must develop to the lowest-common-denominator
# when it comes to the backends, memcache's cache store is not more
# refined than 1 second, therefore we must build in at least a 1
# second overlap. `lock_timeout` of 0 means locks never expire.
if config_args['lock_timeout'] > 0:
config_args['lock_timeout'] += LOCK_WINDOW
for argument, value in config_args.items():
arg_key = '.'.join([prefix, 'arguments', argument])
conf_dict[arg_key] = value
LOG.debug('KVS region configuration for %(name)s: %(config)r',
{'name': self._region.name, 'config': conf_dict})
self._region.configure_from_config(conf_dict, '%s.' % prefix)
def _mutex(self, key):
return self._lock_registry.get(key)
def _create_mutex(self, key):
mutex = self._region.backend.get_mutex(key)
if mutex is not None:
return mutex
else:
return self._LockWrapper(lock_timeout=self._lock_timeout)
class _LockWrapper(object):
"""weakref-capable threading.Lock wrapper."""
def __init__(self, lock_timeout):
self.lock = threading.Lock()
self.lock_timeout = lock_timeout
def acquire(self, wait=True):
return self.lock.acquire(wait)
def release(self):
self.lock.release()
def get(self, key):
"""Get a single value from the KVS backend."""
self._assert_configured()
value = self._region.get(key)
if value is NO_VALUE:
raise exception.NotFound(target=key)
return value
def get_multi(self, keys):
"""Get multiple values in a single call from the KVS backend."""
self._assert_configured()
values = self._region.get_multi(keys)
not_found = []
for index, key in enumerate(keys):
if values[index] is NO_VALUE:
not_found.append(key)
if not_found:
# NOTE(morganfainberg): If any of the multi-get values are non-
# existent, we should raise a NotFound error to mimic the .get()
# method's behavior. In all cases the internal dogpile NO_VALUE
# should be masked from the consumer of the KeyValueStore.
raise exception.NotFound(target=not_found)
return values
def set(self, key, value, lock=None):
"""Set a single value in the KVS backend."""
self._assert_configured()
with self._action_with_lock(key, lock):
self._region.set(key, value)
def set_multi(self, mapping):
"""Set multiple key/value pairs in the KVS backend at once.
Like delete_multi, this call does not serialize through the
KeyValueStoreLock mechanism (locking cannot occur on more than one
key in a given context without significant deadlock potential).
"""
self._assert_configured()
self._region.set_multi(mapping)
def delete(self, key, lock=None):
"""Delete a single key from the KVS backend.
This method will raise NotFound if the key doesn't exist. The get and
delete are done in a single transaction (via KeyValueStoreLock
mechanism).
"""
self._assert_configured()
with self._action_with_lock(key, lock):
self.get(key)
self._region.delete(key)
def delete_multi(self, keys):
"""Delete multiple keys from the KVS backend in a single call.
Like set_multi, this call does not serialize through the
KeyValueStoreLock mechanism (locking cannot occur on more than one
key in a given context without significant deadlock potential).
"""
self._assert_configured()
self._region.delete_multi(keys)
def get_lock(self, key):
"""Get a write lock on the KVS value referenced by `key`.
The ability to get a context manager to pass into the set/delete
methods allows for a single-transaction to occur while guaranteeing the
backing store will not change between the start of the 'lock' and the
end. Lock timeout is fixed to the KeyValueStore configured lock
timeout.
"""
self._assert_configured()
return KeyValueStoreLock(self._mutex(key), key, self.locking,
self._lock_timeout)
@contextlib.contextmanager
def _action_with_lock(self, key, lock=None):
"""Wrapper context manager.
Validates and handles the lock and lock timeout if passed in.
"""
if not isinstance(lock, KeyValueStoreLock):
# NOTE(morganfainberg): Locking only matters if a lock is passed in
# to this method. If lock isn't a KeyValueStoreLock, treat this as
# if no locking needs to occur.
yield
else:
if not lock.key == key:
raise ValueError(_('Lock key must match target key: %(lock)s '
'!= %(target)s') %
{'lock': lock.key, 'target': key})
if not lock.active:
raise exception.ValidationError(_('Must be called within an '
'active lock context.'))
if not lock.expired:
yield
else:
raise LockTimeout(target=key)
class KeyValueStoreLock(object):
"""Basic KeyValueStoreLock context manager.
Hooks into the dogpile.cache backend mutex allowing for distributed locking
on resources. This is only a write lock, and will not prevent reads from
occurring.
"""
def __init__(self, mutex, key, locking_enabled=True, lock_timeout=0):
self.mutex = mutex
self.key = key
self.enabled = locking_enabled
self.lock_timeout = lock_timeout
self.active = False
self.acquire_time = 0
def acquire(self):
if self.enabled:
self.mutex.acquire()
LOG.debug('KVS lock acquired for: %s', self.key)
self.active = True
self.acquire_time = time.time()
return self
__enter__ = acquire
@property
def expired(self):
if self.lock_timeout:
calculated = time.time() - self.acquire_time + LOCK_WINDOW
return calculated > self.lock_timeout
else:
return False
def release(self):
if self.enabled:
self.mutex.release()
if not self.expired:
LOG.debug('KVS lock released for: %s', self.key)
else:
LOG.warning(_LW('KVS lock released (timeout reached) for: %s'),
self.key)
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
def get_key_value_store(name, kvs_region=None):
"""Retrieve key value store.
Instantiate a new :class:`.KeyValueStore` or return a previous
instantiation that has the same name.
"""
global KEY_VALUE_STORE_REGISTRY
_register_backends()
key_value_store = KEY_VALUE_STORE_REGISTRY.get(name)
if key_value_store is None:
if kvs_region is None:
kvs_region = region.make_region(name=name)
key_value_store = KeyValueStore(kvs_region)
KEY_VALUE_STORE_REGISTRY[name] = key_value_store
return key_value_store
| {
"content_hash": "bea1500ecc776ab55e17c4babac47be9",
"timestamp": "",
"source": "github",
"line_count": 436,
"max_line_length": 79,
"avg_line_length": 40.4954128440367,
"alnum_prop": 0.5975305845038514,
"repo_name": "klmitch/keystone",
"id": "064825f81361ffce56e9b38ee00d150f7d2c0595",
"size": "18237",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "keystone/common/kvs/core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "665"
},
{
"name": "Python",
"bytes": "4405298"
}
],
"symlink_target": ""
} |
"""Toolset to run multiple Swarming tasks in parallel."""
import getpass
import json
import os
import subprocess
import sys
import tempfile
import time
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.insert(0, ROOT_DIR)
import auth
import isolateserver
from utils import threading_utils
from utils import tools
def task_to_name(name, dimensions, isolated_hash):
"""Returns a task name the same way swarming.py generates them."""
return '%s/%s/%s' % (
name,
'_'.join('%s=%s' % (k, v) for k, v in sorted(dimensions.iteritems())),
isolated_hash)
def capture(cmd):
assert all(isinstance(i, basestring) for i in cmd), cmd
start = time.time()
p = subprocess.Popen(
[sys.executable] + cmd, cwd=ROOT_DIR, stdout=subprocess.PIPE)
out = p.communicate()[0]
return p.returncode, out, time.time() - start
def trigger(swarming_server, isolate_server, task_name, isolated_hash, args):
"""Triggers a specified .isolated file."""
fd, jsonfile = tempfile.mkstemp(prefix=u'swarming')
os.close(fd)
try:
cmd = [
'swarming.py', 'trigger',
'--swarming', swarming_server,
'--isolate-server', isolate_server,
'--task-name', task_name,
'--dump-json', jsonfile,
isolated_hash,
]
returncode, out, duration = capture(cmd + args)
with open(jsonfile) as f:
data = json.load(f)
task_id = str(data['tasks'][task_name]['task_id'])
return returncode, out, duration, task_id
finally:
os.remove(jsonfile)
def collect(swarming_server, task_id):
"""Collects results of a swarming task."""
cmd = ['swarming.py', 'collect', '--swarming', swarming_server, task_id]
return capture(cmd)
class Runner(object):
"""Runners runs tasks in parallel on Swarming."""
def __init__(
self, swarming_server, isolate_server, add_task, progress,
extra_trigger_args):
self.swarming_server = swarming_server
self.isolate_server = isolate_server
self.add_task = add_task
self.progress = progress
self.extra_trigger_args = extra_trigger_args
def trigger(self, task_name, isolated_hash, dimensions):
args = sum((['--dimension', k, v] for k, v in dimensions.iteritems()), [])
returncode, stdout, duration, task_id = trigger(
self.swarming_server,
self.isolate_server,
task_name,
isolated_hash,
self.extra_trigger_args + args)
step_name = '%s (%3.2fs)' % (task_name, duration)
if returncode:
line = 'Failed to trigger %s\n%s' % (step_name, stdout)
self.progress.update_item(line, index=1)
return
self.progress.update_item('Triggered %s' % step_name, index=1)
self.add_task(0, self.collect, task_name, task_id, dimensions)
def collect(self, task_name, task_id, dimensions):
returncode, stdout, duration = collect(self.swarming_server, task_id)
step_name = '%s (%3.2fs)' % (task_name, duration)
if returncode:
# Only print the output for failures, successes are unexciting.
self.progress.update_item(
'Failed %s:\n%s' % (step_name, stdout), index=1)
return (task_name, dimensions, stdout)
self.progress.update_item('Passed %s' % step_name, index=1)
def run_swarming_tasks_parallel(
swarming_server, isolate_server, extra_trigger_args, tasks):
"""Triggers swarming tasks in parallel and gets results.
This is done by using one thread per task and shelling out swarming.py.
Arguments:
extra_trigger_args: list of additional flags to pass down to
'swarming.py trigger'
tasks: list of tuple(task_name, isolated_hash, dimensions) where dimension
are --dimension flags to provide when triggering the task.
Yields:
tuple(name, dimensions, stdout) for the tasks that failed.
"""
runs = len(tasks)
# triger + collect
total = 2 * runs
failed_tasks = []
progress = threading_utils.Progress([('index', 0), ('size', total)])
progress.use_cr_only = False
start = time.time()
with threading_utils.ThreadPoolWithProgress(
progress, runs, runs, total) as pool:
runner = Runner(
swarming_server, isolate_server, pool.add_task, progress,
extra_trigger_args)
for task_name, isolated_hash, dimensions in tasks:
pool.add_task(0, runner.trigger, task_name, isolated_hash, dimensions)
# Runner.collect() only return task failures.
for failed_task in pool.iter_results():
task_name, dimensions, stdout = failed_task
yield task_name, dimensions, stdout
failed_tasks.append(task_name)
duration = time.time() - start
print('\nCompleted in %3.2fs' % duration)
if failed_tasks:
print('Detected the following failures:')
for task in sorted(failed_tasks):
print(' %s' % task)
class OptionParser(tools.OptionParserWithLogging):
def __init__(self, **kwargs):
tools.OptionParserWithLogging.__init__(self, **kwargs)
self.server_group = tools.optparse.OptionGroup(self, 'Server')
self.server_group.add_option(
'-S', '--swarming',
metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
help='Swarming server to use')
isolateserver.add_isolate_server_options(self.server_group)
self.add_option_group(self.server_group)
auth.add_auth_options(self)
self.add_option(
'-d', '--dimension', default=[], action='append', nargs=2,
dest='dimensions', metavar='FOO bar',
help='dimension to filter on')
self.add_option(
'--priority', type='int',
help='The lower value, the more important the task is. It may be '
'important to specify a higher priority since the default value '
'will make the task to be triggered only when the bots are idle.')
self.add_option(
'--deadline', type='int', default=6*60*60,
help='Seconds to allow the task to be pending for a bot to run before '
'this task request expires.')
def parse_args(self, *args, **kwargs):
options, args = tools.OptionParserWithLogging.parse_args(
self, *args, **kwargs)
options.swarming = options.swarming.rstrip('/')
if not options.swarming:
self.error('--swarming is required.')
auth.process_auth_options(self, options)
isolateserver.process_isolate_server_options(self, options, False)
options.dimensions = dict(options.dimensions)
return options, args
def format_description(self, _):
return self.description
| {
"content_hash": "e8e5044cc55197cb91048ec38666ae33",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 79,
"avg_line_length": 34.63101604278075,
"alnum_prop": 0.6641445336627548,
"repo_name": "guorendong/iridium-browser-ubuntu",
"id": "3ada588f4447e173ae5d99bb14c1fd9a3de94622",
"size": "6654",
"binary": false,
"copies": "1",
"ref": "refs/heads/ubuntu/precise",
"path": "tools/swarming_client/tools/parallel_execution.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "8402"
},
{
"name": "Assembly",
"bytes": "256197"
},
{
"name": "Batchfile",
"bytes": "34966"
},
{
"name": "C",
"bytes": "15445429"
},
{
"name": "C++",
"bytes": "276628399"
},
{
"name": "CMake",
"bytes": "27829"
},
{
"name": "CSS",
"bytes": "867238"
},
{
"name": "Emacs Lisp",
"bytes": "3348"
},
{
"name": "Go",
"bytes": "13628"
},
{
"name": "Groff",
"bytes": "7777"
},
{
"name": "HTML",
"bytes": "20250399"
},
{
"name": "Java",
"bytes": "9950308"
},
{
"name": "JavaScript",
"bytes": "13873772"
},
{
"name": "LLVM",
"bytes": "1169"
},
{
"name": "Logos",
"bytes": "6893"
},
{
"name": "Lua",
"bytes": "16189"
},
{
"name": "Makefile",
"bytes": "179129"
},
{
"name": "Objective-C",
"bytes": "1871766"
},
{
"name": "Objective-C++",
"bytes": "9674498"
},
{
"name": "PHP",
"bytes": "42038"
},
{
"name": "PLpgSQL",
"bytes": "163248"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "474121"
},
{
"name": "Python",
"bytes": "11646662"
},
{
"name": "Ragel in Ruby Host",
"bytes": "104923"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "1151673"
},
{
"name": "Standard ML",
"bytes": "5034"
},
{
"name": "VimL",
"bytes": "4075"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
from Child import Child
from Node import Node # noqa: I201
GENERIC_NODES = [
# generic-where-clause -> 'where' requirement-list
Node('GenericWhereClause', kind='Syntax',
children=[
Child('WhereKeyword', kind='WhereToken'),
Child('RequirementList', kind='GenericRequirementList',
collection_element_name='Requirement'),
]),
Node('GenericRequirementList', kind='SyntaxCollection',
element='GenericRequirement',
element_name='GenericRequirement'),
# generic-requirement ->
# (same-type-requrement|conformance-requirement) ','?
Node('GenericRequirement', kind='Syntax',
traits=['WithTrailingComma'],
children=[
Child('Body', kind='Syntax',
node_choices=[
Child('SameTypeRequirement',
kind='SameTypeRequirement'),
Child('ConformanceRequirement',
kind='ConformanceRequirement'),
]),
Child('TrailingComma', kind='CommaToken',
is_optional=True),
]),
# same-type-requirement -> type-identifier == type
Node('SameTypeRequirement', kind='Syntax',
children=[
Child('LeftTypeIdentifier', kind='Type'),
Child('EqualityToken', kind='Token',
token_choices=[
'SpacedBinaryOperatorToken',
'UnspacedBinaryOperatorToken',
'PrefixOperatorToken',
'PostfixOperatorToken',
]),
Child('RightTypeIdentifier', kind='Type'),
]),
Node('GenericParameterList', kind='SyntaxCollection',
element='GenericParameter'),
# generic-parameter -> type-name
# | type-name : type-identifier
# | type-name : protocol-composition-type
Node('GenericParameter', kind='Syntax',
traits=['WithTrailingComma'],
children=[
Child('Attributes', kind='AttributeList',
collection_element_name='Attribute', is_optional=True),
Child('Name', kind='IdentifierToken'),
Child('Colon', kind='ColonToken',
is_optional=True),
Child('InheritedType', kind='Type',
is_optional=True),
Child('TrailingComma', kind='CommaToken',
is_optional=True),
]),
# generic-parameter-clause -> '<' generic-parameter-list '>'
Node('GenericParameterClause', kind='Syntax',
children=[
Child('LeftAngleBracket', kind='LeftAngleToken'),
Child('GenericParameterList', kind='GenericParameterList',
collection_element_name='GenericParameter'),
Child('RightAngleBracket', kind='RightAngleToken'),
]),
# conformance-requirement -> type-identifier : type-identifier
Node('ConformanceRequirement', kind='Syntax',
children=[
Child('LeftTypeIdentifier', kind='Type'),
Child('Colon', kind='ColonToken'),
Child('RightTypeIdentifier', kind='Type'),
]),
]
| {
"content_hash": "b514cf121d0300143eb0bd8d19163b5c",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 74,
"avg_line_length": 39.34939759036145,
"alnum_prop": 0.5447030006123699,
"repo_name": "harlanhaskins/swift",
"id": "57ab1c5e67c4ca1adfa523789058d67d3f5c29cf",
"size": "3266",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "utils/gyb_syntax_support/GenericNodes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13516"
},
{
"name": "C",
"bytes": "259045"
},
{
"name": "C++",
"bytes": "37905780"
},
{
"name": "CMake",
"bytes": "587211"
},
{
"name": "D",
"bytes": "1107"
},
{
"name": "DTrace",
"bytes": "2593"
},
{
"name": "Emacs Lisp",
"bytes": "57295"
},
{
"name": "LLVM",
"bytes": "70652"
},
{
"name": "MATLAB",
"bytes": "2576"
},
{
"name": "Makefile",
"bytes": "1841"
},
{
"name": "Objective-C",
"bytes": "447802"
},
{
"name": "Objective-C++",
"bytes": "251142"
},
{
"name": "Python",
"bytes": "1727772"
},
{
"name": "Roff",
"bytes": "3495"
},
{
"name": "Ruby",
"bytes": "2117"
},
{
"name": "Shell",
"bytes": "176578"
},
{
"name": "Swift",
"bytes": "34255988"
},
{
"name": "Vim script",
"bytes": "19683"
},
{
"name": "sed",
"bytes": "1050"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
for survey in active_surveys:
survey.send_start_notification()
db.session.commit()
| {
"content_hash": "8121b0fc7a2f8873a5215f48594c148b",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 117,
"avg_line_length": 36.86666666666667,
"alnum_prop": 0.7721518987341772,
"repo_name": "mic4ael/indico",
"id": "47ed535a1e40e955a0bc7c499d24d67709994517",
"size": "767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "indico/modules/events/surveys/tasks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "553825"
},
{
"name": "HTML",
"bytes": "1375160"
},
{
"name": "JavaScript",
"bytes": "1852830"
},
{
"name": "Mako",
"bytes": "1340"
},
{
"name": "Python",
"bytes": "4612709"
},
{
"name": "Shell",
"bytes": "2665"
},
{
"name": "TeX",
"bytes": "23292"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
} |
"""Checks WebKit style for text files."""
from blinkpy.style.checkers.common import TabChecker
class TextChecker(object):
"""Processes text lines for checking style."""
def __init__(self, file_path, handle_style_error):
self.file_path = file_path
self.handle_style_error = handle_style_error
self._tab_checker = TabChecker(file_path, handle_style_error)
def check(self, lines):
self._tab_checker.check(lines)
# FIXME: Remove this function (requires refactoring unit tests).
def process_file_data(filename, lines, error):
checker = TextChecker(filename, error)
checker.check(lines)
| {
"content_hash": "376b6843a0345faf9519392d4c1d40e7",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 69,
"avg_line_length": 30.476190476190474,
"alnum_prop": 0.696875,
"repo_name": "chromium/chromium",
"id": "bc995b9040c3115e185955bd0e32afb21cb15c7b",
"size": "2228",
"binary": false,
"copies": "9",
"ref": "refs/heads/main",
"path": "third_party/blink/tools/blinkpy/style/checkers/text.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from django.conf.urls import patterns, url
from django.views.generic import View
urlpatterns = patterns(
'',
url('', View.as_view()),
)
| {
"content_hash": "bbb979c3e1c32daba2c29ccb5e7107d3",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 42,
"avg_line_length": 20.714285714285715,
"alnum_prop": 0.6827586206896552,
"repo_name": "dheeraj1991/django-after-response",
"id": "bb248abe7d8da0b3de57447f9bf6da7190d81b4e",
"size": "146",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test_urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4478"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'urleater.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'api/data', 'app.views.data'),
url(r'^$', 'app.views.index'),
url(r'^customers/(?P<slug>[-\w]+)/$', 'app.views.customer_page'),
url(r'^generate_csv$', 'app.views.get_csv'),
url(r'^export_zabbix$', 'app.views.export_zabbix'),
url(r'^admin/', include(admin.site.urls)),
)
| {
"content_hash": "78f2334f5f2c7c89fe4a24b97ef9bfdd",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 69,
"avg_line_length": 34.25,
"alnum_prop": 0.6186131386861314,
"repo_name": "renanvicente/urleater-server",
"id": "97787ecc08ad807abe56e75ff3e8970f554bfc2f",
"size": "548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "urleater/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8727"
}
],
"symlink_target": ""
} |
'''
Database installation script.
It uses the configuration settings from $COG_CONFIG_DIR/cog_settings.cfg
'''
import os
import cog
from cog.constants import SECTION_EMAIL
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from cog.models import Project
from cog.models import UserProfile
from cog.models import PeerSite
from cog.views.views_project import initProject
from cog.installation.constants import (DEFAULT_PROJECT_SHORT_NAME, ESGF_ROOTADMIN_PASSWORD_FILE,
DEFAULT_ROOTADMIN_PWD, ROOTADMIN_USERNAME, ROOTADMIN_GROUP,
ROOTADMIN_ROLE)
from cog.services.registration.registration_impl import esgfRegistrationServiceImpl
from cog.plugins.esgf.security import esgfDatabaseManager
from django_openid_auth.models import UserOpenID
from django.core.exceptions import ObjectDoesNotExist
from django.core import management
import sqlalchemy
import datetime
import logging
from cog.site_manager import SiteManager
class CoGInstall(object):
'''
Class that initializes, populates and upgrades the CoG database.
'''
def __init__(self):
# read cog_settings.cfg
self.siteManager = SiteManager()
def install(self):
'''Driver method.'''
self._upgradeCog()
self._createObjects()
def _upgradeCog(self):
'''Method to run the necessary Django management commands to upgrade the CoG installation.'''
#cogpath = os.path.dirname(cog.__file__)
# create database if not existing already
dbtype = self.siteManager.get('DJANGO_DATABASE')
if dbtype=='sqllite3':
pass # database will be created automatically
elif dbtype=='postgres':
self._createPostgresDB()
else:
raise Exception("Unknow database type: %s" % dbtype)
# django management commands
#management.call_command("syncdb", interactive=False)
#management.call_command("migrate","--fake-initial")
management.call_command("migrate", interactive=False)
management.call_command("collectstatic", interactive=False, verbosity=0)
# custom management commands
management.call_command("init_site")
management.call_command("sync_sites") # updates list of CoG peers from sites.xml
def _createPostgresDB(self):
'''Method to create the Postgres database if not existing already.'''
dbname = self.siteManager.get('DATABASE_NAME')
dbuser = self.siteManager.get('DATABASE_USER')
dbpassword = self.siteManager.get('DATABASE_PASSWORD')
dbhost = self.siteManager.get('DATABASE_HOST')
dbport = self.siteManager.get('DATABASE_PORT')
dburl = 'postgresql://%s:%s@%s:%s/postgres' % (dbuser, dbpassword, dbhost, dbport)
# connect to the 'postgres' database
engine = sqlalchemy.create_engine(dburl)
conn = engine.connect()
# must end current transaction before creating a new database
conn.execute("commit")
# create new database if not existing already
try:
conn.execute("create database %s with owner=%s" % (dbname, dbuser))
logging.info("Postgres database: %s created" % dbname)
except sqlalchemy.exc.ProgrammingError as e:
logging.warn(e)
logging.info("Postgres database: %s already exists" % dbname)
conn.close()
def _createObjects(self):
'''Method to populate the database with some initial objects.'''
# Site: reuse default site 'example.com'
site = Site.objects.get(pk=1)
logging.info("Updating site: %s" % site)
site.name = self.siteManager.get('SITE_NAME')
site.domain = self.siteManager.get('SITE_DOMAIN')
site.save()
# Test project
#if not Project.objects.filter(short_name=DEFAULT_PROJECT_SHORT_NAME).exists():
if Project.objects.count() == 0:
logging.info("Creating project: %s" % DEFAULT_PROJECT_SHORT_NAME)
project = Project.objects.create(short_name=DEFAULT_PROJECT_SHORT_NAME,
long_name='Test Project',
description='This is a test project',
site=site, active=True)
initProject(project)
project.save()
# create Administrator user - one time only
if User.objects.count() == 0:
# create User object
logging.info("Creating admin user")
user = User(first_name='Admin', last_name='User',
username=ROOTADMIN_USERNAME,
email=self.siteManager.get('EMAIL_SENDER', section=SECTION_EMAIL),
is_staff=True, is_superuser=True)
if settings.ESGF_CONFIG:
password = self._getRootAdminPassword()
else:
password = DEFAULT_ROOTADMIN_PWD
user.set_password(password)
user.save()
# create UserProfile object
userp = UserProfile(user=user, institution='Institution', city='City', state='State', country='Country',
site=site, last_password_update=datetime.datetime.now())
userp.clearTextPwd=password # needed by esgfDatabaseManager, NOT saved as clear text in any database
userp.save()
# ESGF database setup
if settings.ESGF_CONFIG:
# create rootAdmin openid: https://<ESGF_HOSTNAME>/esgf-idp/openid/rootAdmin
openid = esgfDatabaseManager.buildOpenid(ROOTADMIN_USERNAME)
UserOpenID.objects.create(user=user, claimed_id=openid, display_id=openid)
logging.info("Created openid:%s for CoG administrator: %s" % (openid, user.username) )
# insert rootAdmin user in ESGF database
logging.info("Inserting CoG administrator: %s in ESGF database" % user.username)
esgfDatabaseManager.insertEsgfUser(user.profile)
esgfRegistrationServiceImpl.subscribe(openid, ROOTADMIN_GROUP, ROOTADMIN_ROLE)
esgfRegistrationServiceImpl.process(openid, ROOTADMIN_GROUP, ROOTADMIN_ROLE, True)
# must create and enable 'esgf.idp.peer" as federated CoG peer
if settings.IDP_REDIRECT is not None and settings.IDP_REDIRECT.strip() != '':
idpHostname = settings.IDP_REDIRECT.lower().replace('http://','').replace('https://','')
try:
idpPeerSite = PeerSite.objects.get(site__domain=idpHostname)
idpPeerSite.enabled=True
idpPeerSite.save()
except ObjectDoesNotExist:
site = Site.objects.create(name=idpHostname, domain=idpHostname)
idpPeerSite = PeerSite.objects.create(site=site, enabled=True)
print '\tCreated IdP Peer site: %s with enabled=%s' % (idpPeerSite, idpPeerSite.enabled)
def _getRootAdminPassword(self):
'''Tries to read the rootAdmin password from the ESGF standard location '/esg/config/.esgf_pass',
if not found it uses 'changeit'.'''
# /esg/config/.esgf_pass
try:
with open(ESGF_ROOTADMIN_PASSWORD_FILE, 'r') as f:
password = f.read().strip()
logging.info("Read ESGF administrator password from file: %s" % ESGF_ROOTADMIN_PASSWORD_FILE)
return password
except IOError:
# file not found
logging.warn("ESGF administrator password file: %s could not found or could not be read" % ESGF_ROOTADMIN_PASSWORD_FILE)
logging.warn("Using standard administrator password, please change it right away.")
return DEFAULT_ROOTADMIN_PWD
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
installer = CoGInstall()
installer.install()
| {
"content_hash": "fc49fbdeae8a54c0d9162fe51fd308f2",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 133,
"avg_line_length": 42.676923076923075,
"alnum_prop": 0.6092285508291276,
"repo_name": "EarthSystemCoG/COG",
"id": "0fef6e910008fabc266330d36b9e40f167dbb1f5",
"size": "8322",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cog/installation/install.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "270"
},
{
"name": "CSS",
"bytes": "893678"
},
{
"name": "Classic ASP",
"bytes": "48011"
},
{
"name": "HTML",
"bytes": "96546078"
},
{
"name": "Java",
"bytes": "483882"
},
{
"name": "JavaScript",
"bytes": "13287152"
},
{
"name": "MATLAB",
"bytes": "30087"
},
{
"name": "PHP",
"bytes": "80287"
},
{
"name": "Python",
"bytes": "852780"
},
{
"name": "Rich Text Format",
"bytes": "6112"
},
{
"name": "Shell",
"bytes": "10602"
}
],
"symlink_target": ""
} |
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = [
]
setup(
name='fashionmodel',
version='0.1',
author = 'Google',
author_email = 'training-feedback@cloud.google.com',
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
description='Image model in Cloud ML Engine',
requires=[]
)
| {
"content_hash": "31cf8de258953983136e63a082e35b32",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 56,
"avg_line_length": 23.058823529411764,
"alnum_prop": 0.6964285714285714,
"repo_name": "turbomanage/training-data-analyst",
"id": "3cc5bb4e4157a27669ee248e6490977bcda43bc4",
"size": "989",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bootcamps/imagereco/fashionmodel/setup.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "19768"
},
{
"name": "C++",
"bytes": "30926"
},
{
"name": "CSS",
"bytes": "13208"
},
{
"name": "Dockerfile",
"bytes": "35682"
},
{
"name": "HTML",
"bytes": "2069111"
},
{
"name": "Java",
"bytes": "1539437"
},
{
"name": "JavaScript",
"bytes": "2540305"
},
{
"name": "Jsonnet",
"bytes": "5696"
},
{
"name": "Jupyter Notebook",
"bytes": "61371931"
},
{
"name": "Makefile",
"bytes": "4118"
},
{
"name": "PLpgSQL",
"bytes": "5868"
},
{
"name": "PigLatin",
"bytes": "393"
},
{
"name": "Python",
"bytes": "9553863"
},
{
"name": "R",
"bytes": "68"
},
{
"name": "Shell",
"bytes": "390786"
},
{
"name": "TSQL",
"bytes": "34160"
}
],
"symlink_target": ""
} |
from typing import Callable
from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY
from azure.ai.ml.entities._builders import Command
from azure.ai.ml.entities._job.pipeline._component_translatable import ComponentTranslatableMixin
# pylint: disable=unused-argument
def to_component(*, job: ComponentTranslatableMixin, **kwargs) -> Callable[..., Command]:
"""Translate a job object to a component function, provided job should be
able to translate to a component.
For example:
.. code-block:: python
# Load a local command job to a component function.
my_job = load_job("my_job.yaml")
component_func = dsl.to_component(my_job)
# Load a remote command job component to a component function.
my_job = ml_client.jobs.get("my_job")
component_func = dsl.to_component(my_job)
# Consuming the component func
component = component_func(param1=xxx, param2=xxx)
:param job: Job load from local or remote.
:type job: ~azure.ai.ml.entities._job.pipeline._component_translatable.ComponentTranslatableMixin
:param kwargs: A dictionary of additional configuration parameters.
:type kwargs: dict
:return: Wrapped function call.
:rtype: typing.Callable[..., ~azure.ai.ml.entities._builders.command.Command]
"""
from pathlib import Path
# set default base path as "./". Because if code path is relative path and base path is None, will raise error when
# get arm id of Code
return job._to_component(context={BASE_PATH_CONTEXT_KEY: Path("./")})
| {
"content_hash": "3806dad5dbacd33274814424d2e884c8",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 119,
"avg_line_length": 40.23076923076923,
"alnum_prop": 0.7055449330783938,
"repo_name": "Azure/azure-sdk-for-python",
"id": "b111ace632ef1537d88ccffca71fa190e69cfcef",
"size": "1786",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_load_import.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
__author__ = 'chengbin.wang'
from django.conf.urls import patterns, url
from .views import ProductList, ProductDetail
urlpatterns = patterns('',
url(r'^$', ProductList.as_view(), name="work"),
url(r'^detail/(?P<pk>\S+)/$', ProductDetail.as_view(), name="work_detail"),
)
| {
"content_hash": "69ccb1a4a3d92f0927a6410ba516e0e6",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 79,
"avg_line_length": 28.2,
"alnum_prop": 0.6631205673758865,
"repo_name": "bin112/GreenShoots-Studio",
"id": "b462458215b1eaf4560e110488792ec869c5917c",
"size": "421",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "work/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "379077"
},
{
"name": "HTML",
"bytes": "179122"
},
{
"name": "JavaScript",
"bytes": "1514937"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "37632"
},
{
"name": "XML",
"bytes": "6794"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import Images, Document
admin.site.register(Images)
admin.site.register(Document) | {
"content_hash": "461848dd9e2a65d41f8688cb5374077c",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 36,
"avg_line_length": 21.5,
"alnum_prop": 0.8217054263565892,
"repo_name": "calmhandtitan/Smush-It",
"id": "ba0eba37f9e6b6a08ce40019d94d57ca6940e5d2",
"size": "129",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smush/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3321"
},
{
"name": "Python",
"bytes": "17012"
}
],
"symlink_target": ""
} |
import os
import sys
import sphinx
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
on_rtd = os.getenv('READTHEDOCS') == 'True'
if on_rtd:
html_theme = 'default'
#
# class QtWidgets(object):
#
# class QWidget(object):
# pass
#
# class QAbstractButton(object):
# pass
#
# class QToolButton(object):
# pass
#
# class QDialog(object):
# exec_ = None
#
# class QTableWidget(object):
# pass
#
# class QTreeWidget(object):
# pass
#
# class QMainWindow(object):
# pass
#
# class QMenu(object):
# pass
#
# class QLineEdit(object):
# Normal = 0
#
# class QDoubleSpinBox(object):
# pass
#
# class QSpinBox(object):
# pass
#
# class QApplication(object):
# exec_ = None
#
# class QtCore(object):
#
# __version__ = ''
# __version_info__ = tuple()
#
# class Qt(object):
# KeepAspectRatio = 1
#
# class QThread(object):
# pass
#
# class QObject(object):
# pass
#
# @staticmethod
# def pyqtSignal(*args, **kwargs):
# pass
#
# @staticmethod
# def pyqtSlot(*args, **kwargs):
# pass
#
# @staticmethod
# def Signal(*args, **kwargs):
# pass
#
# @staticmethod
# def Slot(*args, **kwargs):
# pass
#
# class QtGui(object):
#
# class QValidator(object):
# pass
#
# class QtSvg(object):
# pass
#
# class PySide6(object):
# __version__ = ''
# __version_info__ = tuple()
#
# sys.modules['PySide6'] = PySide6
# sys.modules['PySide6.QtGui'] = QtGui
# sys.modules['PySide6.QtCore'] = QtCore
# sys.modules['PySide6.QtWidgets'] = QtWidgets
# sys.modules['PySide6.QtSvg'] = QtSvg
else:
html_theme = 'sphinx_rtd_theme'
from msl import qt
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
]
# autodoc options
if sphinx.version_info[:2] < (1, 8):
# 'alphabetical', 'bysource', 'groupwise'
autodoc_member_order = 'bysource'
# 'members', 'undoc-members', 'private-members', 'special-members', 'inherited-members', 'show-inheritance'
autodoc_default_flags = ['members', 'undoc-members', 'show-inheritance']
else:
autodoc_default_options = {
'members': None,
'member-order': 'bysource',
'undoc-members': None,
'show-inheritance': None,
}
# Generate autodoc stubs with summaries from code
autosummary_generate = True
# include both class docstring and __init__
autoclass_content = 'both'
# Napoleon settings
napoleon_google_docstring = False
napoleon_numpy_docstring = True
napoleon_include_init_with_doc = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'MSL-Qt'
copyright = qt.__copyright__[1:]
author = qt.__author__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = qt.__version__
# The full version, including alpha/beta/rc tags.
release = qt.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, to-do and to-doList produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'MSL-Qtdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#
'pointsize': '11pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'MSL-Qt.tex', 'MSL-Qt Documentation',
'Measurement Standards Laboratory of New Zealand', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'MSL-Qt', 'MSL-Qt Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'MSL-Qt', 'MSL-Qt Documentation',
author, 'MSL-Qt', 'MSL-Qt',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'PySide6': ('', 'PySide6-aliases.inv'),
# 'PySide6': ('https://doc.qt.io/qtforpython/', None),
# 'PyQt6': ('https://www.riverbankcomputing.com/static/Docs/PyQt6/', None),
# 'PyQt6': ('', 'PyQt6-modified-objects.inv'),
}
# show all the Qt linking warnings
nitpicky = True
# known bad links
nitpick_ignore = [
('py:class', '_ctypes.Structure'),
]
| {
"content_hash": "429f35f71698618883c64e35d3c50ca0",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 111,
"avg_line_length": 27.194174757281555,
"alnum_prop": 0.6152564560276091,
"repo_name": "MSLNZ/msl-qt",
"id": "c057db676abe51317ab53dc5b68697bb56ca7278",
"size": "8403",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "268127"
}
],
"symlink_target": ""
} |
import os
import tempfile
import requirements
def format_toml(data):
"""Pretty-formats a given toml string."""
data = data.split('\n')
for i, line in enumerate(data):
if i > 0:
if line.startswith('['):
data[i] = '\n{0}'.format(line)
return '\n'.join(data)
def multi_split(s, split):
"""Splits on multiple given separators."""
for r in split:
s = s.replace(r, '|')
return [i for i in s.split('|') if len(i) > 0]
def convert_deps_from_pip(dep):
""""Converts a pip-formatted dependency to a Pipfile-formatted one."""
dependency = {}
req = [r for r in requirements.parse(dep)][0]
# Comparison operators: e.g. Django>1.10
if req.specs:
r = multi_split(dep, '=<>')
dependency[req.name] = dep[len(r[0]):]
# Extras: e.g. requests[socks]
elif req.extras:
r = multi_split(dep, '[]')
dependency[req.name] = {'extras': req.extras}
# VCS Installs.
elif req.vcs:
# Crop off the git+, etc part.
dependency[req.name] = {req.vcs: req.uri[len(req.vcs) + 1:]}
# Add --editable, if it's there.
if req.editable:
dependency[req.name].update({'editable': True})
# Add the specifier, if it was provided.
if req.revision:
dependency[req.name].update({'ref': req.revision})
# Bare dependencies: e.g. requests
else:
dependency[dep] = '*'
return dependency
def convert_deps_to_pip(deps, r=True):
""""Converts a Pipfile-formatteddependency to a pip-formatted one."""
dependencies = []
for dep in deps.keys():
# Default (e.g. '>1.10').
extra = deps[dep]
version = ''
# Get rid of '*'.
if deps[dep] == '*' or str(extra) == '{}':
extra = ''
if 'hash' in deps[dep]:
extra = ' --hash={0}'.format(deps[dep]['hash'])
# Support for extras (e.g. requests[socks])
if 'extras' in deps[dep]:
extra = '[{0}]'.format(deps[dep]['extras'][0])
if 'version' in deps[dep]:
version = deps[dep]['version']
# Support for version control
maybe_vcs = [vcs for vcs in ('git', 'svn', 'hg', 'bzr') if vcs in deps[dep]]
vcs = maybe_vcs[0] if maybe_vcs else None
if vcs:
extra = '{0}+{1}'.format(vcs, deps[dep][vcs])
# Support for @refs.
if 'ref' in deps[dep]:
extra += '@{0}'.format(deps[dep]['ref'])
extra += '#egg={0}'.format(dep)
# Support for editable.
if 'editable' in deps[dep]:
# Support for --egg.
dep = '-e '
else:
dep = ''
dependencies.append('{0}{1}{2}'.format(dep, version, extra))
if not r:
return dependencies
# Write requirements.txt to tmp directory.
f = tempfile.NamedTemporaryFile(suffix='-requirements.txt', delete=False)
f.write('\n'.join(dependencies).encode('utf-8'))
return f.name
def mkdir_p(newdir):
"""works the way a good mkdir should :)
- already exists, silently complete
- regular file in the way, raise an exception
- parent directory(ies) does not exist, make them as well
From: http://code.activestate.com/recipes/82465-a-friendly-mkdir/
"""
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired dir, '{0}', already exists.".format(newdir))
else:
head, tail = os.path.split(newdir)
if head and not os.path.isdir(head):
mkdir_p(head)
if tail:
os.mkdir(newdir)
def is_required_version(version, specified_version):
"""Check to see if there's a hard requirement for version
number provided in the Pipfile.
"""
# Certain packages may be defined with multiple values.
if isinstance(specified_version, dict):
specified_version = specified_version.get('version', '')
if specified_version.startswith('=='):
return version.strip() == specified_version.split('==')[1].strip()
return True
| {
"content_hash": "be0120b819af143c02b4ca2d22617200",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 108,
"avg_line_length": 28.910344827586208,
"alnum_prop": 0.5593988549618321,
"repo_name": "adrianliaw/pipenv",
"id": "8c50a1077d6af33c93639609fc9e4cf27208d101",
"size": "4216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pipenv/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "135"
},
{
"name": "Python",
"bytes": "51231"
}
],
"symlink_target": ""
} |
def main():
import os
import argparse
from qap.script_utils import csv_to_pandas_df, read_txt_file, \
qap_csv_correlations
parser = argparse.ArgumentParser()
parser.add_argument("old_csv", type=str, \
help="path to the QAP CSV output file from a " \
"previous run or version")
parser.add_argument("new_csv", type=str, \
help="path to the QAP CSV output file from the " \
"current run or version")
parser.add_argument("--replacements", type=str, \
help="text file containing column name pairs " \
"to be updated/renamed in the old CSV - " \
"ex. 'subject,Participant' on one line to " \
"rename the 'subject' column to " \
"'Participant'")
args = parser.parse_args()
# run it!
old_data = csv_to_pandas_df(args.old_csv)
new_data = csv_to_pandas_df(args.new_csv)
if args.replacements:
replacements = read_txt_file(args.replacements)
else:
replacements = None
correlations_dict = qap_csv_correlations(old_data, new_data, replacements)
print "\nQAP Results Correlations (Pearson's r)"
print "Previous dataset: %s" % os.path.abspath(args.old_csv)
print "Current dataset: %s\n" % os.path.abspath(args.new_csv)
for key in sorted(correlations_dict.keys()):
print "%s: %f" % (key, correlations_dict[key][0])
print "\n"
if __name__ == "__main__":
main()
| {
"content_hash": "05557e48f550bf079e63512a64f0eeec",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 78,
"avg_line_length": 34.816326530612244,
"alnum_prop": 0.5216881594372802,
"repo_name": "preprocessed-connectomes-project/quality-assessment-protocol",
"id": "7e8f234e7f35b532c322fd9b401031659f613b3e",
"size": "1729",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/qap_test_correlations.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "14051"
},
{
"name": "OpenEdge ABL",
"bytes": "3211681"
},
{
"name": "Python",
"bytes": "429408"
},
{
"name": "R",
"bytes": "7072"
},
{
"name": "Ruby",
"bytes": "1699"
},
{
"name": "Shell",
"bytes": "4639"
}
],
"symlink_target": ""
} |
from django.forms import ModelForm
from django.core.exceptions import ValidationError
from django import forms
from apps.titulos.models import TituloNacional, EstadoTituloNacional, NormativaNacional, EstadoNormativaNacional
class TituloNacionalForm(forms.ModelForm):
nombre = forms.CharField(max_length=255, required=True)
normativa_nacional = forms.ModelChoiceField(queryset=NormativaNacional.objects.filter(estado__nombre=EstadoNormativaNacional.VIGENTE).order_by('numero'), label='Normativa')
observaciones = forms.CharField(widget=forms.Textarea, required=False)
estado = forms.ModelChoiceField(queryset=EstadoTituloNacional.objects.all().order_by('nombre'), required=False, empty_label=None)
fecha_alta = forms.DateField(required=False, widget=forms.HiddenInput())
class Meta:
model = TituloNacional
exclude = ('carreras')
| {
"content_hash": "7c086149e538a3162de8a9cc3f3cc8dd",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 173,
"avg_line_length": 52.5625,
"alnum_prop": 0.8180737217598097,
"repo_name": "MERegistro/meregistro",
"id": "68992c5c438bb3189a7bb5265a7d8b84d121c242",
"size": "865",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meregistro/apps/titulos/forms/TituloNacionalForm.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "79500"
},
{
"name": "HTML",
"bytes": "782188"
},
{
"name": "JavaScript",
"bytes": "106755"
},
{
"name": "PLpgSQL",
"bytes": "515442"
},
{
"name": "Python",
"bytes": "7190737"
},
{
"name": "Shell",
"bytes": "804"
}
],
"symlink_target": ""
} |
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from Queue import Queue
from unittest import TestCase
from ambari_agent.LiveStatus import LiveStatus
from ambari_agent.ActionQueue import ActionQueue
from ambari_agent.AmbariConfig import AmbariConfig
import os, errno, time, pprint, tempfile, threading
import sys
from threading import Thread
import copy
import signal
from mock.mock import patch, MagicMock, call
from ambari_agent.CustomServiceOrchestrator import CustomServiceOrchestrator
from ambari_agent.PythonExecutor import PythonExecutor
from ambari_agent.ActualConfigHandler import ActualConfigHandler
from ambari_agent.RecoveryManager import RecoveryManager
from ambari_commons import OSCheck
from only_for_platform import not_for_platform, os_distro_value, PLATFORM_WINDOWS, PLATFORM_LINUX
import logging
class TestActionQueue(TestCase):
def setUp(self):
# save original open() method for later use
self.original_open = open
def tearDown(self):
sys.stdout = sys.__stdout__
logger = logging.getLogger()
datanode_install_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'DATANODE',
'roleCommand': u'INSTALL',
'commandId': '1-1',
'taskId': 3,
'clusterName': u'cc',
'serviceName': u'HDFS',
'hostLevelParams': {},
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v1' }},
'commandParams': {
'command_retry_enabled': 'true'
}
}
datanode_install_no_retry_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'DATANODE',
'roleCommand': u'INSTALL',
'commandId': '1-1',
'taskId': 3,
'clusterName': u'cc',
'serviceName': u'HDFS',
'hostLevelParams': {},
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v1' }},
'commandParams': {
'command_retry_enabled': 'false'
}
}
datanode_auto_start_command = {
'commandType': 'AUTO_EXECUTION_COMMAND',
'role': u'DATANODE',
'roleCommand': u'START',
'commandId': '1-1',
'taskId': 3,
'clusterName': u'cc',
'serviceName': u'HDFS',
'hostLevelParams': {},
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v1' }}
}
datanode_upgrade_command = {
'commandId': 17,
'role' : "role",
'taskId' : "taskId",
'clusterName' : "clusterName",
'serviceName' : "serviceName",
'roleCommand' : 'UPGRADE',
'hostname' : "localhost.localdomain",
'hostLevelParams': {},
'clusterHostInfo': "clusterHostInfo",
'commandType': "EXECUTION_COMMAND",
'configurations':{'global' : {}},
'roleParams': {},
'commandParams' : {
'source_stack_version' : 'HDP-1.2.1',
'target_stack_version' : 'HDP-1.3.0'
}
}
namenode_install_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'NAMENODE',
'roleCommand': u'INSTALL',
'commandId': '1-1',
'taskId': 4,
'clusterName': u'cc',
'serviceName': u'HDFS',
'hostLevelParams': {}
}
snamenode_install_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'SECONDARY_NAMENODE',
'roleCommand': u'INSTALL',
'commandId': '1-1',
'taskId': 5,
'clusterName': u'cc',
'serviceName': u'HDFS',
'hostLevelParams': {}
}
hbase_install_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'HBASE',
'roleCommand': u'INSTALL',
'commandId': '1-1',
'taskId': 7,
'clusterName': u'cc',
'serviceName': u'HDFS',
'hostLevelParams': {},
'commandParams': {
'command_retry_enabled': 'true'
}
}
status_command = {
"serviceName" : 'HDFS',
"commandType" : "STATUS_COMMAND",
"clusterName" : "",
"componentName" : "DATANODE",
'configurations':{},
'hostLevelParams': {}
}
datanode_restart_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'DATANODE',
'roleCommand': u'CUSTOM_COMMAND',
'commandId': '1-1',
'taskId': 9,
'clusterName': u'cc',
'serviceName': u'HDFS',
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v123' }},
'hostLevelParams':{'custom_command': 'RESTART', 'clientsToUpdateConfigs': []}
}
datanode_restart_command_no_logging = {
'commandType': 'EXECUTION_COMMAND',
'role': u'DATANODE',
'roleCommand': u'CUSTOM_COMMAND',
'commandId': '1-1',
'taskId': 9,
'clusterName': u'cc',
'serviceName': u'HDFS',
'configurations': {'global': {}},
'configurationTags': {'global': {'tag': 'v123'}},
'commandParams': {
'log_output': 'false'
},
'hostLevelParams': {'custom_command': 'RESTART', 'clientsToUpdateConfigs': []}
}
datanode_restart_command_no_clients_update = {
'commandType': 'EXECUTION_COMMAND',
'role': u'DATANODE',
'roleCommand': u'CUSTOM_COMMAND',
'commandId': '1-1',
'taskId': 9,
'clusterName': u'cc',
'serviceName': u'HDFS',
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v123' }},
'hostLevelParams':{'custom_command': 'RESTART'}
}
datanode_start_custom_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'DATANODE',
'roleCommand': u'CUSTOM_COMMAND',
'commandId': '1-1',
'taskId': 9,
'clusterName': u'cc',
'serviceName': u'HDFS',
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v123' }},
'hostLevelParams':{'custom_command': 'START'}
}
yarn_refresh_queues_custom_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'RESOURCEMANAGER',
'roleCommand': u'CUSTOM_COMMAND',
'commandId': '1-1',
'taskId': 9,
'clusterName': u'cc',
'serviceName': u'YARN',
'commandParams' : {'forceRefreshConfigTags' : 'capacity-scheduler'},
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v123' }, 'capacity-scheduler' : {'tag': 'v123'}},
'hostLevelParams':{'custom_command': 'REFRESHQUEUES'}
}
status_command_for_alerts = {
"serviceName" : 'FLUME',
"commandType" : "STATUS_COMMAND",
"clusterName" : "",
"componentName" : "FLUME_HANDLER",
'configurations':{},
'hostLevelParams': {}
}
retryable_command = {
'commandType': 'EXECUTION_COMMAND',
'role': 'NAMENODE',
'roleCommand': 'INSTALL',
'commandId': '1-1',
'taskId': 19,
'clusterName': 'c1',
'serviceName': 'HDFS',
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v123' }},
'commandParams' : {
'script_type' : 'PYTHON',
'script' : 'script.py',
'command_timeout' : '600',
'jdk_location' : '.',
'service_package_folder' : '.',
'command_retry_enabled' : 'true',
'max_duration_for_retries' : '5'
},
'hostLevelParams' : {}
}
background_command = {
'commandType': 'BACKGROUND_EXECUTION_COMMAND',
'role': 'NAMENODE',
'roleCommand': 'CUSTOM_COMMAND',
'commandId': '1-1',
'taskId': 19,
'clusterName': 'c1',
'serviceName': 'HDFS',
'configurations':{'global' : {}},
'configurationTags':{'global' : { 'tag': 'v123' }},
'hostLevelParams':{'custom_command': 'REBALANCE_HDFS'},
'commandParams' : {
'script_type' : 'PYTHON',
'script' : 'script.py',
'command_timeout' : '600',
'jdk_location' : '.',
'service_package_folder' : '.'
}
}
cancel_background_command = {
'commandType': 'EXECUTION_COMMAND',
'role': 'NAMENODE',
'roleCommand': 'ACTIONEXECUTE',
'commandId': '1-1',
'taskId': 20,
'clusterName': 'c1',
'serviceName': 'HDFS',
'configurations':{'global' : {}},
'configurationTags':{'global' : {}},
'hostLevelParams':{},
'commandParams' : {
'script_type' : 'PYTHON',
'script' : 'cancel_background_task.py',
'before_system_hook_function' : 'fetch_bg_pid_by_taskid',
'jdk_location' : '.',
'command_timeout' : '600',
'service_package_folder' : '.',
'cancel_policy': 'SIGKILL',
'cancel_task_id': "19",
}
}
@patch.object(AmbariConfig, "get_parallel_exec_option")
@patch.object(ActionQueue, "process_command")
@patch.object(Queue, "get")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_ActionQueueStartStop(self, CustomServiceOrchestrator_mock,
get_mock, process_command_mock, get_parallel_exec_option_mock):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
config = MagicMock()
get_parallel_exec_option_mock.return_value = 0
config.get_parallel_exec_option = get_parallel_exec_option_mock
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.start()
time.sleep(0.1)
actionQueue.stop()
actionQueue.join()
self.assertEqual(actionQueue.stopped(), True, 'Action queue is not stopped.')
self.assertTrue(process_command_mock.call_count > 1)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("logging.RootLogger.exception")
@patch.object(ActionQueue, "execute_command")
def test_process_command(self, execute_command_mock, log_exc_mock):
dummy_controller = MagicMock()
config = AmbariConfig()
config.set('agent', 'tolerate_download_failures', "true")
actionQueue = ActionQueue(config, dummy_controller)
execution_command = {
'commandType' : ActionQueue.EXECUTION_COMMAND,
}
status_command = {
'commandType' : ActionQueue.STATUS_COMMAND,
}
wrong_command = {
'commandType' : "SOME_WRONG_COMMAND",
}
# Try wrong command
actionQueue.process_command(wrong_command)
self.assertFalse(execute_command_mock.called)
self.assertFalse(log_exc_mock.called)
execute_command_mock.reset_mock()
log_exc_mock.reset_mock()
# Try normal execution
actionQueue.process_command(execution_command)
self.assertTrue(execute_command_mock.called)
self.assertFalse(log_exc_mock.called)
execute_command_mock.reset_mock()
log_exc_mock.reset_mock()
execute_command_mock.reset_mock()
log_exc_mock.reset_mock()
# Try exception to check proper logging
def side_effect(self):
raise Exception("TerribleException")
execute_command_mock.side_effect = side_effect
actionQueue.process_command(execution_command)
self.assertTrue(log_exc_mock.called)
log_exc_mock.reset_mock()
actionQueue.process_command(execution_command)
self.assertTrue(log_exc_mock.called)
@patch.object(ActionQueue, "log_command_output")
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch("CommandStatusDict.CommandStatusDict")
@patch.object(ActionQueue, "status_update_callback")
def test_log_execution_commands(self, status_update_callback_mock,
command_status_dict_mock,
cso_runCommand_mock, mock_log_command_output):
custom_service_orchestrator_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : '',
'exitcode' : 0
}
cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
config.set('logging', 'log_command_executes', 1)
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.execute_command(self.datanode_restart_command)
report = actionQueue.result()
expected = {'status': 'COMPLETED',
'configurationTags': {'global': {'tag': 'v123'}},
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'CUSTOM_COMMAND',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 9,
'customCommand': 'RESTART',
'exitCode': 0}
# Agent caches configurationTags if custom_command RESTART completed
mock_log_command_output.assert_has_calls([call("out\n\nCommand completed successfully!\n", "9"), call("stderr", "9")], any_order=True)
self.assertEqual(len(report['reports']), 1)
self.assertEqual(expected, report['reports'][0])
@patch.object(ActionQueue, "log_command_output")
@patch.object(OSCheck, "os_distribution", new=MagicMock(return_value=os_distro_value))
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch("CommandStatusDict.CommandStatusDict")
@patch.object(ActionQueue, "status_update_callback")
def test_do_not_log_execution_commands(self, status_update_callback_mock,
command_status_dict_mock,
cso_runCommand_mock, mock_log_command_output):
custom_service_orchestrator_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut': '',
'exitcode': 0
}
cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
config.set('logging', 'log_command_executes', 1)
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.execute_command(self.datanode_restart_command_no_logging)
report = actionQueue.result()
expected = {'status': 'COMPLETED',
'configurationTags': {'global': {'tag': 'v123'}},
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'CUSTOM_COMMAND',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 9,
'customCommand': 'RESTART',
'exitCode': 0}
# Agent caches configurationTags if custom_command RESTART completed
mock_log_command_output.assert_not_called(
[call("out\n\nCommand completed successfully!\n", "9"), call("stderr", "9")], any_order=True)
self.assertEqual(len(report['reports']), 1)
self.assertEqual(expected, report['reports'][0])
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("__builtin__.open")
@patch.object(ActionQueue, "status_update_callback")
def test_auto_execute_command(self, status_update_callback_mock, open_mock):
# Make file read calls visible
def open_side_effect(file, mode):
if mode == 'r':
file_mock = MagicMock()
file_mock.read.return_value = "Read from " + str(file)
return file_mock
else:
return self.original_open(file, mode)
open_mock.side_effect = open_side_effect
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp())
dummy_controller.recovery_manager.update_config(5, 5, 1, 11, True, False, False, "", -1)
actionQueue = ActionQueue(config, dummy_controller)
unfreeze_flag = threading.Event()
python_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : ''
}
def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False):
unfreeze_flag.wait()
return python_execution_result_dict
def patched_aq_execute_command(command):
# We have to perform patching for separate thread in the same thread
with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
runCommand_mock.side_effect = side_effect
actionQueue.process_command(command)
python_execution_result_dict['status'] = 'COMPLETE'
python_execution_result_dict['exitcode'] = 0
self.assertFalse(actionQueue.tasks_in_progress_or_pending())
# We call method in a separate thread
execution_thread = Thread(target = patched_aq_execute_command ,
args = (self.datanode_auto_start_command, ))
execution_thread.start()
# check in progress report
# wait until ready
while True:
time.sleep(0.1)
if actionQueue.tasks_in_progress_or_pending():
break
# Continue command execution
unfreeze_flag.set()
# wait until ready
check_queue = True
while check_queue:
report = actionQueue.result()
if not actionQueue.tasks_in_progress_or_pending():
break
time.sleep(0.1)
self.assertEqual(len(report['reports']), 0)
## Test failed execution
python_execution_result_dict['status'] = 'FAILED'
python_execution_result_dict['exitcode'] = 13
# We call method in a separate thread
execution_thread = Thread(target = patched_aq_execute_command ,
args = (self.datanode_auto_start_command, ))
execution_thread.start()
unfreeze_flag.set()
# check in progress report
# wait until ready
while check_queue:
report = actionQueue.result()
if not actionQueue.tasks_in_progress_or_pending():
break
time.sleep(0.1)
self.assertEqual(len(report['reports']), 0)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("__builtin__.open")
@patch.object(ActionQueue, "status_update_callback")
def test_execute_command(self, status_update_callback_mock, open_mock):
# Make file read calls visible
def open_side_effect(file, mode):
if mode == 'r':
file_mock = MagicMock()
file_mock.read.return_value = "Read from " + str(file)
return file_mock
else:
return self.original_open(file, mode)
open_mock.side_effect = open_side_effect
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
unfreeze_flag = threading.Event()
python_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : ''
}
def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False):
unfreeze_flag.wait()
return python_execution_result_dict
def patched_aq_execute_command(command):
# We have to perform patching for separate thread in the same thread
with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
runCommand_mock.side_effect = side_effect
actionQueue.execute_command(command)
### Test install/start/stop command ###
## Test successful execution with configuration tags
python_execution_result_dict['status'] = 'COMPLETE'
python_execution_result_dict['exitcode'] = 0
# We call method in a separate thread
execution_thread = Thread(target = patched_aq_execute_command ,
args = (self.datanode_install_command, ))
execution_thread.start()
# check in progress report
# wait until ready
while True:
time.sleep(0.1)
report = actionQueue.result()
if len(report['reports']) != 0:
break
expected = {'status': 'IN_PROGRESS',
'stderr': 'Read from {0}'.format(os.path.join(tempdir, "errors-3.txt")),
'stdout': 'Read from {0}'.format(os.path.join(tempdir, "output-3.txt")),
'structuredOut' : 'Read from {0}'.format(os.path.join(tempdir, "structured-out-3.json")),
'clusterName': u'cc',
'roleCommand': u'INSTALL',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 3,
'exitCode': 777}
self.assertEqual(report['reports'][0], expected)
self.assertTrue(actionQueue.tasks_in_progress_or_pending())
# Continue command execution
unfreeze_flag.set()
# wait until ready
while report['reports'][0]['status'] == 'IN_PROGRESS':
time.sleep(0.1)
report = actionQueue.result()
# check report
configname = os.path.join(tempdir, 'config.json')
expected = {'status': 'COMPLETED',
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'INSTALL',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 3,
'configurationTags': {'global': {'tag': 'v1'}},
'exitCode': 0}
self.assertEqual(len(report['reports']), 1)
self.assertEqual(report['reports'][0], expected)
self.assertTrue(os.path.isfile(configname))
# Check that we had 2 status update calls ( IN_PROGRESS and COMPLETE)
self.assertEqual(status_update_callback_mock.call_count, 2)
os.remove(configname)
# now should not have reports (read complete/failed reports are deleted)
report = actionQueue.result()
self.assertEqual(len(report['reports']), 0)
## Test failed execution
python_execution_result_dict['status'] = 'FAILED'
python_execution_result_dict['exitcode'] = 13
# We call method in a separate thread
execution_thread = Thread(target = patched_aq_execute_command ,
args = (self.datanode_install_command, ))
execution_thread.start()
unfreeze_flag.set()
# check in progress report
# wait until ready
report = actionQueue.result()
while len(report['reports']) == 0 or \
report['reports'][0]['status'] == 'IN_PROGRESS':
time.sleep(0.1)
report = actionQueue.result()
# check report
expected = {'status': 'FAILED',
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n\n\nCommand failed after 1 tries\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'INSTALL',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 3,
'exitCode': 13}
self.assertEqual(len(report['reports']), 1)
self.assertEqual(report['reports'][0], expected)
# now should not have reports (read complete/failed reports are deleted)
report = actionQueue.result()
self.assertEqual(len(report['reports']), 0)
### Test upgrade command ###
python_execution_result_dict['status'] = 'COMPLETE'
python_execution_result_dict['exitcode'] = 0
execution_thread = Thread(target = patched_aq_execute_command ,
args = (self.datanode_upgrade_command, ))
execution_thread.start()
unfreeze_flag.set()
# wait until ready
report = actionQueue.result()
while len(report['reports']) == 0 or \
report['reports'][0]['status'] == 'IN_PROGRESS':
time.sleep(0.1)
report = actionQueue.result()
# check report
expected = {'status': 'COMPLETED',
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n\n\nCommand failed after 1 tries\n\n\nCommand completed successfully!\n',
'clusterName': 'clusterName',
'structuredOut': '""',
'roleCommand': 'UPGRADE',
'serviceName': 'serviceName',
'role': 'role',
'actionId': 17,
'taskId': 'taskId',
'exitCode': 0}
self.assertEqual(len(report['reports']), 1)
self.assertEqual(report['reports'][0], expected)
# now should not have reports (read complete/failed reports are deleted)
report = actionQueue.result()
self.assertEqual(len(report['reports']), 0)
def test_cancel_with_reschedule_command(self):
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
unfreeze_flag = threading.Event()
python_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : '',
'status' : '',
'exitcode' : -signal.SIGTERM
}
def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False):
unfreeze_flag.wait()
return python_execution_result_dict
def patched_aq_execute_command(command):
# We have to perform patching for separate thread in the same thread
with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
runCommand_mock.side_effect = side_effect
actionQueue.execute_command(command)
# We call method in a separate thread
execution_thread = Thread(target = patched_aq_execute_command ,
args = (self.datanode_install_command, ))
execution_thread.start()
# check in progress report
# wait until ready
while True:
time.sleep(0.1)
report = actionQueue.result()
if len(report['reports']) != 0:
break
unfreeze_flag.set()
# wait until ready
while len(report['reports']) != 0:
time.sleep(0.1)
report = actionQueue.result()
# check report
self.assertEqual(len(report['reports']), 0)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch("CommandStatusDict.CommandStatusDict")
@patch.object(ActionQueue, "status_update_callback")
def test_store_configuration_tags(self, status_update_callback_mock,
command_status_dict_mock,
cso_runCommand_mock):
custom_service_orchestrator_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : '',
'exitcode' : 0
}
cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.execute_command(self.datanode_restart_command)
report = actionQueue.result()
expected = {'status': 'COMPLETED',
'configurationTags': {'global': {'tag': 'v123'}},
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'CUSTOM_COMMAND',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 9,
'customCommand': 'RESTART',
'exitCode': 0}
# Agent caches configurationTags if custom_command RESTART completed
self.assertEqual(len(report['reports']), 1)
self.assertEqual(expected, report['reports'][0])
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActualConfigHandler, "write_client_components")
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch("CommandStatusDict.CommandStatusDict")
@patch.object(ActionQueue, "status_update_callback")
def test_store_configuration_tags_no_clients(self, status_update_callback_mock,
command_status_dict_mock,
cso_runCommand_mock, write_client_components_mock):
custom_service_orchestrator_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : '',
'exitcode' : 0
}
cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.execute_command(self.datanode_restart_command_no_clients_update)
report = actionQueue.result()
expected = {'status': 'COMPLETED',
'configurationTags': {'global': {'tag': 'v123'}},
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'CUSTOM_COMMAND',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 9,
'customCommand': 'RESTART',
'exitCode': 0}
# Agent caches configurationTags if custom_command RESTART completed
self.assertEqual(len(report['reports']), 1)
self.assertEqual(expected, report['reports'][0])
self.assertFalse(write_client_components_mock.called)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActualConfigHandler, "write_client_components")
@patch.object(ActualConfigHandler, "write_actual_component")
@patch.object(ActualConfigHandler, "update_component_tag")
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch("CommandStatusDict.CommandStatusDict")
@patch.object(ActionQueue, "status_update_callback")
def test_refresh_queues_custom_command(self, status_update_callback_mock,
command_status_dict_mock,
cso_runCommand_mock, update_component_tag, write_actual_component_mock, write_client_components_mock):
custom_service_orchestrator_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : '',
'exitcode' : 0
}
cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.execute_command(self.yarn_refresh_queues_custom_command)
report = actionQueue.result()
expected = {'status': 'COMPLETED',
'configurationTags': None,
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'CUSTOM_COMMAND',
'serviceName': u'YARN',
'role': u'RESOURCEMANAGER',
'actionId': '1-1',
'taskId': 9,
'customCommand': 'RESTART',
'exitCode': 0}
self.assertEqual(len(report['reports']), 1)
self.assertEqual(expected, report['reports'][0])
# Configuration tags should be updated
self.assertTrue(update_component_tag.called)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActualConfigHandler, "write_client_components")
@patch.object(ActualConfigHandler, "write_actual_component")
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch("CommandStatusDict.CommandStatusDict")
@patch.object(ActionQueue, "status_update_callback")
def test_store_configuration_tags_on_custom_start_command(self, status_update_callback_mock,
command_status_dict_mock,
cso_runCommand_mock, write_actual_component_mock, write_client_components_mock):
custom_service_orchestrator_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : '',
'exitcode' : 0
}
cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.execute_command(self.datanode_start_custom_command)
report = actionQueue.result()
expected = {'status': 'COMPLETED',
'configurationTags': {'global': {'tag': 'v123'}},
'stderr': 'stderr',
'stdout': 'out\n\nCommand completed successfully!\n',
'clusterName': u'cc',
'structuredOut': '""',
'roleCommand': u'CUSTOM_COMMAND',
'serviceName': u'HDFS',
'role': u'DATANODE',
'actionId': '1-1',
'taskId': 9,
'customCommand': 'START',
'exitCode': 0}
self.assertEqual(len(report['reports']), 1)
self.assertEqual(expected, report['reports'][0])
# Configuration tags should be updated on custom start command
self.assertTrue(write_actual_component_mock.called)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActualConfigHandler, "write_actual_component")
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch("CommandStatusDict.CommandStatusDict")
@patch.object(ActionQueue, "status_update_callback")
def test_store_config_tags_on_install_client_command(self, status_update_callback_mock,
command_status_dict_mock,
cso_runCommand_mock, write_actual_component_mock):
custom_service_orchestrator_execution_result_dict = {
'stdout': 'out',
'stderr': 'stderr',
'structuredOut' : '',
'exitcode' : 0
}
cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict
tez_client_install_command = {
'commandType': 'EXECUTION_COMMAND',
'role': u'TEZ_CLIENT',
'roleCommand': u'INSTALL',
'commandId': '1-1',
'taskId': 9,
'clusterName': u'cc',
'serviceName': u'TEZ',
'configurations': {'global' : {}},
'configurationTags': {'global' : { 'tag': 'v123' }},
'hostLevelParams': {}
}
LiveStatus.CLIENT_COMPONENTS = ({'serviceName': 'TEZ', 'componentName': 'TEZ_CLIENT'}, )
config = AmbariConfig()
tempdir = tempfile.gettempdir()
config.set('agent', 'prefix', tempdir)
config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
config.set('agent', 'tolerate_download_failures', "true")
dummy_controller = MagicMock()
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.execute_command(tez_client_install_command)
# Configuration tags should be updated on install client command
self.assertTrue(write_actual_component_mock.called)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActionQueue, "status_update_callback")
@patch.object(ActionQueue, "execute_command")
@patch.object(LiveStatus, "build")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_execute_status_command(self, CustomServiceOrchestrator_mock,
build_mock, execute_command_mock,
status_update_callback):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
build_mock.return_value = {'dummy report': '' }
dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp())
result = (self.status_command, {'exitcode': 0 })
actionQueue.process_status_command_result(result)
report = actionQueue.result()
expected = {'dummy report': ''}
self.assertEqual(len(report['componentStatus']), 1)
self.assertEqual(report['componentStatus'][0], expected)
@patch.object(RecoveryManager, "command_exists")
@patch.object(RecoveryManager, "requires_recovery")
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActionQueue, "status_update_callback")
@patch.object(ActionQueue, "execute_command")
@patch.object(LiveStatus, "build")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_process_status_command_result_recovery(self, CustomServiceOrchestrator_mock,
build_mock, execute_command_mock,
status_update_callback, requires_recovery_mock,
command_exists_mock):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
build_mock.return_value = {'dummy report': '' }
requires_recovery_mock.return_value = True
command_exists_mock.return_value = False
dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp(), True, False)
result = (self.status_command, {'exitcode': 0 })
actionQueue.process_status_command_result(result)
report = actionQueue.result()
expected = {'dummy report': '',
'sendExecCmdDet': 'True'}
self.assertEqual(len(report['componentStatus']), 1)
self.assertEqual(report['componentStatus'][0], expected)
requires_recovery_mock.return_value = True
command_exists_mock.return_value = True
result = (self.status_command, {'exitcode': 0 })
actionQueue.process_status_command_result(result)
report = actionQueue.result()
expected = {'dummy report': '',
'sendExecCmdDet': 'False'}
self.assertEqual(len(report['componentStatus']), 1)
self.assertEqual(report['componentStatus'][0], expected)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(ActionQueue, "status_update_callback")
@patch.object(ActionQueue, "execute_command")
@patch.object(LiveStatus, "build")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_process_status_command_result_with_alerts(self, CustomServiceOrchestrator_mock,
build_mock, execute_command_mock,
status_update_callback):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
command_return_value = {
'exitcode': 0,
'stdout': 'out',
'stderr': 'err',
'structuredOut': {'alerts': [ {'name': 'flume_alert'} ] }
}
result = (self.status_command_for_alerts, command_return_value)
build_mock.return_value = {'somestatusresult': 'aresult'}
actionQueue.process_status_command_result(result)
report = actionQueue.result()
self.assertEqual(len(report['componentStatus']), 1)
self.assertTrue(report['componentStatus'][0].has_key('alerts'))
@patch.object(AmbariConfig, "get_parallel_exec_option")
@patch.object(ActionQueue, "process_command")
@patch.object(Queue, "get")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_reset_queue(self, CustomServiceOrchestrator_mock,
get_mock, process_command_mock, gpeo_mock):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp())
config = MagicMock()
gpeo_mock.return_value = 0
config.get_parallel_exec_option = gpeo_mock
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.start()
actionQueue.put([self.datanode_install_command, self.hbase_install_command])
self.assertEqual(2, actionQueue.commandQueue.qsize())
self.assertTrue(actionQueue.tasks_in_progress_or_pending())
actionQueue.reset()
self.assertTrue(actionQueue.commandQueue.empty())
self.assertFalse(actionQueue.tasks_in_progress_or_pending())
time.sleep(0.1)
actionQueue.stop()
actionQueue.join()
self.assertEqual(actionQueue.stopped(), True, 'Action queue is not stopped.')
@patch.object(AmbariConfig, "get_parallel_exec_option")
@patch.object(ActionQueue, "process_command")
@patch.object(Queue, "get")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_cancel(self, CustomServiceOrchestrator_mock,
get_mock, process_command_mock, gpeo_mock):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
config = MagicMock()
gpeo_mock.return_value = 0
config.get_parallel_exec_option = gpeo_mock
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.start()
actionQueue.put([self.datanode_install_command, self.hbase_install_command])
self.assertEqual(2, actionQueue.commandQueue.qsize())
actionQueue.reset()
self.assertTrue(actionQueue.commandQueue.empty())
time.sleep(0.1)
actionQueue.stop()
actionQueue.join()
self.assertEqual(actionQueue.stopped(), True, 'Action queue is not stopped.')
@patch.object(AmbariConfig, "get_parallel_exec_option")
@patch.object(ActionQueue, "process_command")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_parallel_exec(self, CustomServiceOrchestrator_mock,
process_command_mock, gpeo_mock):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
config = MagicMock()
gpeo_mock.return_value = 1
config.get_parallel_exec_option = gpeo_mock
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.put([self.datanode_install_command, self.hbase_install_command])
self.assertEqual(2, actionQueue.commandQueue.qsize())
actionQueue.start()
time.sleep(1)
actionQueue.stop()
actionQueue.join()
self.assertEqual(actionQueue.stopped(), True, 'Action queue is not stopped.')
self.assertEqual(2, process_command_mock.call_count)
process_command_mock.assert_any_calls([call(self.datanode_install_command), call(self.hbase_install_command)])
@patch("threading.Thread")
@patch.object(AmbariConfig, "get_parallel_exec_option")
@patch.object(ActionQueue, "process_command")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_parallel_exec_no_retry(self, CustomServiceOrchestrator_mock,
process_command_mock, gpeo_mock, threading_mock):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
config = MagicMock()
gpeo_mock.return_value = 1
config.get_parallel_exec_option = gpeo_mock
actionQueue = ActionQueue(config, dummy_controller)
actionQueue.put([self.datanode_install_no_retry_command, self.snamenode_install_command])
self.assertEqual(2, actionQueue.commandQueue.qsize())
actionQueue.start()
time.sleep(1)
actionQueue.stop()
actionQueue.join()
self.assertEqual(actionQueue.stopped(), True, 'Action queue is not stopped.')
self.assertEqual(2, process_command_mock.call_count)
self.assertEqual(0, threading_mock.call_count)
process_command_mock.assert_any_calls([call(self.datanode_install_command), call(self.hbase_install_command)])
@not_for_platform(PLATFORM_LINUX)
@patch("time.sleep")
@patch.object(OSCheck, "os_distribution", new=MagicMock(return_value=os_distro_value))
@patch.object(CustomServiceOrchestrator, "__init__")
def test_execute_retryable_command(self, CustomServiceOrchestrator_mock,
sleep_mock
):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
python_execution_result_dict = {
'exitcode': 1,
'stdout': 'out',
'stderr': 'stderr',
'structuredOut': '',
'status': 'FAILED'
}
def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False):
return python_execution_result_dict
command = copy.deepcopy(self.retryable_command)
with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
runCommand_mock.side_effect = side_effect
actionQueue.execute_command(command)
#assert that python executor start
self.assertTrue(runCommand_mock.called)
self.assertEqual(3, runCommand_mock.call_count)
self.assertEqual(2, sleep_mock.call_count)
sleep_mock.assert_has_calls([call(2), call(3)], False)
runCommand_mock.assert_has_calls([
call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt',
os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=True, retry=False),
call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt',
os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=False, retry=True),
call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt',
os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=False, retry=True)])
@patch("time.time")
@patch("time.sleep")
@patch.object(OSCheck, "os_distribution", new=MagicMock(return_value=os_distro_value))
@patch.object(CustomServiceOrchestrator, "__init__")
def test_execute_retryable_command_with_time_lapse(self, CustomServiceOrchestrator_mock,
sleep_mock, time_mock
):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp())
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
python_execution_result_dict = {
'exitcode': 1,
'stdout': 'out',
'stderr': 'stderr',
'structuredOut': '',
'status': 'FAILED'
}
times_arr = [8, 10, 14, 18, 22, 26, 30, 34]
if self.logger.isEnabledFor(logging.INFO):
times_arr.insert(0, 4)
time_mock.side_effect = times_arr
def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False):
return python_execution_result_dict
command = copy.deepcopy(self.retryable_command)
with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
runCommand_mock.side_effect = side_effect
actionQueue.execute_command(command)
#assert that python executor start
self.assertTrue(runCommand_mock.called)
self.assertEqual(2, runCommand_mock.call_count)
self.assertEqual(1, sleep_mock.call_count)
sleep_mock.assert_has_calls([call(1)], False)
runCommand_mock.assert_has_calls([
call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt',
os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=True, retry=False),
call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt',
os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=False, retry=True)])
#retryable_command
@not_for_platform(PLATFORM_LINUX)
@patch("time.sleep")
@patch.object(OSCheck, "os_distribution", new=MagicMock(return_value=os_distro_value))
@patch.object(CustomServiceOrchestrator, "__init__")
def test_execute_retryable_command_fail_and_succeed(self, CustomServiceOrchestrator_mock,
sleep_mock
):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
execution_result_fail_dict = {
'exitcode': 1,
'stdout': 'out',
'stderr': 'stderr',
'structuredOut': '',
'status': 'FAILED'
}
execution_result_succ_dict = {
'exitcode': 0,
'stdout': 'out',
'stderr': 'stderr',
'structuredOut': '',
'status': 'COMPLETED'
}
command = copy.deepcopy(self.retryable_command)
self.assertFalse('commandBeingRetried' in command)
with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
runCommand_mock.side_effect = [execution_result_fail_dict, execution_result_succ_dict]
actionQueue.execute_command(command)
#assert that python executor start
self.assertTrue(runCommand_mock.called)
self.assertEqual(2, runCommand_mock.call_count)
self.assertEqual(1, sleep_mock.call_count)
self.assertEqual(command['commandBeingRetried'], "true")
sleep_mock.assert_any_call(2)
@not_for_platform(PLATFORM_LINUX)
@patch("time.sleep")
@patch.object(OSCheck, "os_distribution", new=MagicMock(return_value=os_distro_value))
@patch.object(CustomServiceOrchestrator, "__init__")
def test_execute_retryable_command_succeed(self, CustomServiceOrchestrator_mock,
sleep_mock
):
CustomServiceOrchestrator_mock.return_value = None
dummy_controller = MagicMock()
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
execution_result_succ_dict = {
'exitcode': 0,
'stdout': 'out',
'stderr': 'stderr',
'structuredOut': '',
'status': 'COMPLETED'
}
command = copy.deepcopy(self.retryable_command)
with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
runCommand_mock.side_effect = [execution_result_succ_dict]
actionQueue.execute_command(command)
#assert that python executor start
self.assertTrue(runCommand_mock.called)
self.assertFalse(sleep_mock.called)
self.assertEqual(1, runCommand_mock.call_count)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(CustomServiceOrchestrator, "runCommand")
@patch.object(CustomServiceOrchestrator, "__init__")
def test_execute_background_command(self, CustomServiceOrchestrator_mock,
runCommand_mock,
):
CustomServiceOrchestrator_mock.return_value = None
CustomServiceOrchestrator.runCommand.return_value = {'exitcode' : 0,
'stdout': 'out-11',
'stderr' : 'err-13'}
dummy_controller = MagicMock()
actionQueue = ActionQueue(AmbariConfig(), dummy_controller)
execute_command = copy.deepcopy(self.background_command)
actionQueue.put([execute_command])
actionQueue.processBackgroundQueueSafeEmpty();
actionQueue.controller.statusCommandExecutor.process_results();
#assert that python execturor start
self.assertTrue(runCommand_mock.called)
runningCommand = actionQueue.commandStatuses.current_state.get(execute_command['taskId'])
self.assertTrue(runningCommand is not None)
self.assertEqual(runningCommand[1]['status'], ActionQueue.IN_PROGRESS_STATUS)
report = actionQueue.result()
self.assertEqual(len(report['reports']),1)
@patch.object(CustomServiceOrchestrator, "get_py_executor")
@patch.object(CustomServiceOrchestrator, "resolve_script_path")
def test_execute_python_executor(self, resolve_script_path_mock,
get_py_executor_mock):
dummy_controller = MagicMock()
cfg = AmbariConfig()
cfg.set('agent', 'tolerate_download_failures', 'true')
cfg.set('agent', 'prefix', '.')
cfg.set('agent', 'cache_dir', 'background_tasks')
actionQueue = ActionQueue(cfg, dummy_controller)
pyex = PythonExecutor(actionQueue.customServiceOrchestrator.tmp_dir, actionQueue.customServiceOrchestrator.config)
patch_output_file(pyex)
get_py_executor_mock.return_value = pyex
actionQueue.customServiceOrchestrator.dump_command_to_json = MagicMock()
result = {}
lock = threading.RLock()
complete_done = threading.Condition(lock)
def command_complete_w(process_condensed_result, handle):
with lock:
result['command_complete'] = {'condensed_result' : copy.copy(process_condensed_result),
'handle' : copy.copy(handle),
'command_status' : actionQueue.commandStatuses.get_command_status(handle.command['taskId'])
}
complete_done.notifyAll()
actionQueue.on_background_command_complete_callback = wraped(actionQueue.on_background_command_complete_callback,
None, command_complete_w)
actionQueue.put([self.background_command])
actionQueue.processBackgroundQueueSafeEmpty();
actionQueue.controller.statusCommandExecutor.process_results();
with lock:
complete_done.wait(0.1)
finished_status = result['command_complete']['command_status']
self.assertEqual(finished_status['status'], ActionQueue.COMPLETED_STATUS)
self.assertEqual(finished_status['stdout'], 'process_out')
self.assertEqual(finished_status['stderr'], 'process_err')
self.assertEqual(finished_status['exitCode'], 0)
runningCommand = actionQueue.commandStatuses.current_state.get(self.background_command['taskId'])
self.assertTrue(runningCommand is not None)
report = actionQueue.result()
self.assertEqual(len(report['reports']),1)
self.assertEqual(report['reports'][0]['stdout'],'process_out')
# self.assertEqual(report['reports'][0]['structuredOut'],'{"a": "b."}')
cancel_background_command = {
"commandType":"CANCEL_COMMAND",
"role":"AMBARI_SERVER_ACTION",
"roleCommand":"ABORT",
"commandId":"2--1",
"taskId":20,
"clusterName":"c1",
"serviceName":"",
"hostname":"c6401",
"roleParams":{
"cancelTaskIdTargets":"13,14"
},
}
def patch_output_file(pythonExecutor):
def windows_py(command, tmpout, tmperr):
proc = MagicMock()
proc.pid = 33
proc.returncode = 0
with tmpout:
tmpout.write('process_out')
with tmperr:
tmperr.write('process_err')
return proc
def open_subprocess_files_win(fout, ferr, f):
return MagicMock(), MagicMock()
def read_result_from_files(out_path, err_path, structured_out_path):
return 'process_out', 'process_err', '{"a": "b."}'
pythonExecutor.launch_python_subprocess = windows_py
pythonExecutor.open_subprocess_files = open_subprocess_files_win
pythonExecutor.read_result_from_files = read_result_from_files
def wraped(func, before = None, after = None):
def wrapper(*args, **kwargs):
if(before is not None):
before(*args, **kwargs)
ret = func(*args, **kwargs)
if(after is not None):
after(*args, **kwargs)
return ret
return wrapper
| {
"content_hash": "64edd0bdd0ecde05b84fede49bec7b75",
"timestamp": "",
"source": "github",
"line_count": 1436,
"max_line_length": 162,
"avg_line_length": 39.74442896935933,
"alnum_prop": 0.640145077357069,
"repo_name": "radicalbit/ambari",
"id": "faa9b81f3f7e94a0e840fda8c67304d0a6658b39",
"size": "57096",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "ambari-agent/src/test/python/ambari_agent/TestActionQueue.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "42212"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "182799"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "1287531"
},
{
"name": "CoffeeScript",
"bytes": "4323"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Groovy",
"bytes": "88056"
},
{
"name": "HTML",
"bytes": "5098825"
},
{
"name": "Java",
"bytes": "29006663"
},
{
"name": "JavaScript",
"bytes": "17274453"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLSQL",
"bytes": "2160"
},
{
"name": "PLpgSQL",
"bytes": "314333"
},
{
"name": "PowerShell",
"bytes": "2087991"
},
{
"name": "Python",
"bytes": "14584206"
},
{
"name": "R",
"bytes": "1457"
},
{
"name": "Roff",
"bytes": "13935"
},
{
"name": "Ruby",
"bytes": "14478"
},
{
"name": "SQLPL",
"bytes": "2117"
},
{
"name": "Shell",
"bytes": "741459"
},
{
"name": "Vim script",
"bytes": "5813"
}
],
"symlink_target": ""
} |
"""
Module which groups all the aggregated precomputed information in order to
save computational power.
"""
import pandas as pd
from FirmsLocations.Preprocess.preprocess_cols import cp2str
def read_agg(filepath):
"Read file of aggregated info."
table = pd.read_csv(filepath, sep=';')
table = cp2str(table)
return table
def read_aggregation(filepath, typevars):
## TODO
aggtable = read_agg(filepath)
aggfeatures = aggtable[typevars['feat_vars']]
agglocs = aggtable[typevars['loc_vars']]
return agglocs, aggfeatures
| {
"content_hash": "710bfff4f543f87096a5dc286295bd4c",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 74,
"avg_line_length": 25.272727272727273,
"alnum_prop": 0.7194244604316546,
"repo_name": "tgquintela/Mscthesis",
"id": "c99fcbb55477fc82ff789938f0e3527d43e5571d",
"size": "557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "FirmsLocations/IO/io_aggfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "257118"
},
{
"name": "TeX",
"bytes": "1277"
}
],
"symlink_target": ""
} |
import sys
import time
import hashlib
import random
import base64
import urllib
import os
import binascii
from urlparse import urlparse, urlunparse
from urllib2 import HTTPError, URLError
from types import ListType, DictType
from traceback import print_exc
from xml.dom.minidom import parseString
from xml.dom import expatbuilder
from threading import Thread
try:
import json
except:
import simplejson as json
from M2Crypto import RSA, BIO
from ACEStream.version import VERSION
from ACEStream.Core.simpledefs import *
from ACEStream.Core.TorrentDef import *
from ACEStream.Core.Utilities.timeouturlopen import urlOpenTimeout
from ACEStream.Core.Utilities.logger import log, log_exc
from ACEStream.Core.TS.domutils import domutils
DEBUG = False
class BadResponseException(Exception):
pass
class AdBlockDetectedException(Exception):
pass
class AdManager:
TS_ADSYSTEM = 'TS_ADS'
REQUEST_SECRET = 'q\\\'X!;UL0J_<R*z#GBTL(9mCeRJbm/;L.oi9.`\\"iETli9GD]`t&xlT(]MhJ{NVN,Q.)r~(6+9Bt(G,O%2c/g@sPi]<c[i\\\\ga]fkbHgwH:->ok4w8><y]^:Lw465+W4a(:'
AD_REQUEST_TIMEOUT = 5
MAX_VAST_REDIRECTS = 3
AD_BLOCK_MAX_NETWORK_ERRORS = 10
RESPONSE_PUBKEY = '-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAymBcELt1ju/RIS/pWpeE\ncj7HzeCXxwmAyeYY3IIqtQgRFGMj+aMoJBVPIJwhN4Q+SqfNStmYTpCKUm1nyxi4\nNyP/+x/7eaaWzGSrShRXUOOE/gA23LKWKrugL7+y+XhEe11cyjN6qRqvO+uoCFZy\nwOIz+eK+nnK8LR/v9acFHdwXtFQyqP9uGgNkjytvPU2xLa9Ye2M7zMaq7wfmiqgQ\nEeQQkL3/aCMsEg3LnLGLA3F9zQ0JSc5hcbisGkijVA7cPgSVJ9Q1I6P8G5Ha4Bvm\n6qh325LuShD6PGK5ad1/TmbMIeeYEutRZsDqK81ww9gLvq6LCxBgkm5E+VgJoiMr\nUQIDAQAB\n-----END PUBLIC KEY-----'
AD_SERVERS = ['a1.torrentstream.org',
'a1.torrentstream.net',
'a1.torrentstream.info',
'a2.torrentstream.org',
'a2.torrentstream.net',
'a2.torrentstream.info',
'a3.torrentstream.org',
'a3.torrentstream.net',
'a3.torrentstream.info']
def __init__(self, baseapp, cookie_jar = None):
self.baseapp = baseapp
self.ad_first_count = {}
self.network_errors = {}
self.cookie_jar = cookie_jar
def get_ads(self, device_id, user_login, user_level, content_type, content_id, content_ext, content_duration, affiliate_id = 0, zone_id = 0, developer_id = 0, include_interruptable_ads = True, is_live = False, provider_key = None, provider_content_id = None, user_profile = None):
return []
random_number = random.randint(1, sys.maxint)
params = []
params.append('d=' + device_id)
params.append('u=' + user_login)
params.append('ul=' + str(user_level))
params.append('ct=' + str(content_type))
params.append('cid=' + content_id)
params.append('cext=' + content_ext)
params.append('dur=' + str(content_duration))
params.append('a=' + str(affiliate_id))
params.append('z=' + str(zone_id))
params.append('did=' + str(developer_id))
params.append('t=' + str(int(time.time())))
params.append('r=' + str(random_number))
params.append('i=' + str(1 if include_interruptable_ads else 0))
params.append('l=' + str(1 if is_live else 0))
params.append('v=' + VERSION)
if provider_key is not None:
params.append('p=' + provider_key)
if provider_content_id is not None:
params.append('pc=' + provider_content_id)
if user_profile is not None:
params.append('gender=' + str(user_profile.get_gender_id()))
params.append('age=' + str(user_profile.get_age_id()))
data = '#'.join(sorted(params))
sig = hashlib.sha1(data + self.REQUEST_SECRET).hexdigest()
p = []
for param in params:
p.append(urllib.quote_plus(param, '='))
query = '/get?' + '&'.join(p) + '&s=' + sig
got_success = False
for ad_server in self.AD_SERVERS:
try:
url = 'http://' + ad_server + query
if DEBUG:
log('AdManager::get_ads: send request: url', url)
request_data = {'start_time': time.time(),
'timeout': self.AD_REQUEST_TIMEOUT}
_t = time.time()
stream = urlOpenTimeout(url, timeout=self.AD_REQUEST_TIMEOUT)
_t_open = time.time() - _t
_t = time.time()
response = stream.read()
_t_read = time.time() - _t
_t = time.time()
stream.close()
_t_close = time.time() - _t
if DEBUG:
log('admanager::get_ads: request time: open', _t_open, 'read', _t_read, 'close', _t_close)
vast_ads, ad_settings = self.parse_vast_response(url, response, random_number, request_data, self.MAX_VAST_REDIRECTS)
ads = self.format_vast_ads(vast_ads, ad_settings, include_interruptable_ads)
got_success = True
break
except BadResponseException as e:
if DEBUG:
log('AdManager::get_ads: exc: ' + str(e))
except HTTPError as e:
if DEBUG:
log('AdManager::get_ads: http error: ' + str(e))
except URLError as e:
if DEBUG:
log('AdManager::get_ads: url error: ' + str(e))
except AdBlockDetectedException:
if DEBUG:
log('AdManager::get_ads: ad block detected')
except:
if DEBUG:
print_exc()
if not got_success:
return False
return ads
def get_preload_ads(self, deviceid, user_login, include_interruptable_ads = True, user_profile = None):
return []
random_number = random.randint(1, sys.maxint)
params = []
params.append('d=' + deviceid)
params.append('u=' + user_login)
if include_interruptable_ads:
flag = 1
else:
flag = 0
params.append('i=' + str(flag))
params.append('t=' + str(int(time.time())))
params.append('r=' + str(random_number))
params.append('v=' + VERSION)
if user_profile is not None:
params.append('gender=' + str(user_profile.get_gender_id()))
params.append('age=' + str(user_profile.get_age_id()))
data = '#'.join(sorted(params))
sig = hashlib.sha1(data + self.REQUEST_SECRET).hexdigest()
p = []
for param in params:
p.append(urllib.quote_plus(param, '='))
query = '/preload?' + '&'.join(p) + '&s=' + sig
got_success = False
for ad_server in self.AD_SERVERS:
try:
url = 'http://' + ad_server + query
if DEBUG:
log('AdManager::get_preload_ads: send request: url', url)
stream = urlOpenTimeout(url, timeout=5)
response = stream.read()
stream.close()
ads = self.parse_preload_ad_response(response, random_number)
got_success = True
break
except BadResponseException as e:
if DEBUG:
log('AdManager::get_preload_ads: exc: ' + str(e))
except HTTPError as e:
if DEBUG:
log('AdManager::get_preload_ads: http error: ' + str(e))
except URLError as e:
if DEBUG:
log('AdManager::get_preload_ads: url error: ' + str(e))
except:
log_exc()
if not got_success:
return False
return ads
def send_event(self, tracking_url_list, add_sign):
for url in tracking_url_list:
if DEBUG:
log('AdManager::send_event: url', url, 'add_sign', add_sign)
if add_sign:
urldata = list(urlparse(url))
params = []
query = urldata[4]
if len(query) > 0:
params = query.split('&')
random_number = random.randint(1, sys.maxint)
params.append('r=' + str(random_number))
params.append('t=' + str(long(time.time())))
payload = []
for param in params:
name, value = param.split('=')
payload.append(name + '=' + urllib.unquote_plus(value))
payload = '#'.join(sorted(payload))
sig = hashlib.sha1(payload + self.REQUEST_SECRET).hexdigest()
params.append('s=' + sig)
query = '&'.join(params)
urldata[4] = query
url = urlunparse(urldata)
if DEBUG:
log('admanager::send_event: added request signature: params', params, 'payload', payload, 'url', url)
try:
stream = urlOpenTimeout(url, timeout=30, cookiejar=self.cookie_jar)
response = stream.read()
stream.close()
except:
if DEBUG:
print_exc()
def send_error(self, tracking_url_list, error_code, error_description, delayed = False):
if delayed:
if DEBUG:
log('admanager::send_error: schedule delayed execution')
self.baseapp.run_delayed(self.send_error, args=[tracking_url_list,
error_code,
error_description,
False])
return
for url in tracking_url_list:
if DEBUG:
log('AdManager::send_error: url', url, 'error_code', error_code, 'error_description', error_description)
try:
error_description = urllib.quote_plus(str(error_description))
except:
if DEBUG:
print_exc()
try:
url = url.replace('[ERRORCODE]', str(error_code))
url = url.replace('[ERRORDESCRIPTION]', error_description)
stream = urlOpenTimeout(url, timeout=30, cookiejar=self.cookie_jar)
response = stream.read()
stream.close()
except:
if DEBUG:
print_exc()
def parse_vast_response(self, ad_server_url, response, request_random, request_data, max_redirects, redirects = 0):
if DEBUG:
log('admanager::parse_vast_response: request_data', request_data, 'max_redirects', max_redirects, 'redirects', redirects, 'response', response)
if len(response) == 0:
raise BadResponseException('Empty response')
doc = parseString(response)
root = doc.documentElement
if root.tagName == 'VAST':
ver = root.getAttribute('version')
if len(ver) == 0:
raise BadResponseException, 'Missing vast version'
if ver == '2.0':
vast_version = 2
elif ver == '1.0':
vast_version = 1
else:
raise BadResponseException, 'Unsupported vast version ' + ver
elif root.tagName == 'VideoAdServingTemplate':
vast_version = 1
else:
raise BadResponseException, 'Bad response tagname: ' + root.tagName
if redirects == 0:
primary_response = True
else:
primary_response = False
first_ad = True
ad_settings = {}
inline_ads = []
wrapper_ads = []
for e_ad in domutils.get_children_by_tag_name(root, 'Ad'):
inline = domutils.get_children_by_tag_name(e_ad, 'InLine')
wrapper = domutils.get_children_by_tag_name(e_ad, 'Wrapper')
if len(inline) == 0 and len(wrapper) == 0:
raise BadResponseException, 'InLine or Wrapper expected'
if len(inline) > 0 and len(wrapper) > 0:
raise BadResponseException, 'Single InLine or Wrapper expected'
data = {}
ad_root = None
if len(inline) > 0:
if len(inline) > 1:
raise BadResponseException, 'Single InLine expected'
data['type'] = 'inline'
ad_root = inline[0]
data['ad_server_url'] = ad_server_url
data['adsystem'] = self.vast_parse_adsystem(vast_version, ad_root)
data['impressions'] = self.vast_parse_impressions(vast_version, ad_root)
data['errors'] = self.vast_parse_errors(vast_version, ad_root)
data['creatives'] = self.vast_parse_creatives(vast_version, ad_root, is_wrapper=False, adsystem=data['adsystem']['name'])
elif len(wrapper) > 0:
if len(wrapper) > 1:
raise BadResponseException, 'Single Wrapper expected'
data['type'] = 'wrapper'
data['max_redirects'] = max_redirects
ad_root = wrapper[0]
data['vast_redirect_url'] = self.vast_parse_redirect(vast_version, ad_root)
data['adsystem'] = self.vast_parse_adsystem(vast_version, ad_root)
data['impressions'] = self.vast_parse_impressions(vast_version, ad_root)
data['errors'] = self.vast_parse_errors(vast_version, ad_root)
data['creatives'] = self.vast_parse_creatives(vast_version, ad_root, is_wrapper=True, adsystem=data['adsystem']['name'])
ext_data = self.vast_parse_ts_extension(ad_root)
if primary_response:
if data['adsystem']['name'] != self.TS_ADSYSTEM:
raise BadResponseException, 'Bad AdSystem for the primary response'
if first_ad:
if ext_data['min_ads_duration'] is None:
raise BadResponseException, 'Missing MinAdsDuration'
if ext_data['max_ads_duration'] is None:
raise BadResponseException, 'Missing MaxAdsDuration'
ad_settings['min_ads_duration'] = ext_data['min_ads_duration']
ad_settings['max_ads_duration'] = ext_data['max_ads_duration']
if ext_data['request_timeout'] is not None:
request_data['timeout'] = ext_data['request_timeout']
if DEBUG:
log('admanager::parse_vast_response: update from ext_data: request_timeout', request_data['timeout'])
if data['type'] == 'wrapper':
if ext_data['max_redirects'] is not None:
data['max_redirects'] = ext_data['max_redirects']
if DEBUG:
log('admanager::parse_vast_response: update from ext_data: max_redirects', data['max_redirects'])
if ext_data['check_ts_id'] == 'no':
data['check_ts_id'] = False
else:
data['check_ts_id'] = True
if ext_data['predownload'] == 'yes':
data['predownload'] = True
else:
data['predownload'] = False
try:
priority = int(ext_data.get('priority', 1000))
except:
priority = 1000
data['priority'] = priority
data['check_duration'] = ext_data['check_duration']
else:
data['check_ts_id'] = request_data['check_ts_id']
data['predownload'] = request_data['predownload']
data['priority'] = request_data['priority']
data['check_duration'] = request_data['check_duration']
if DEBUG:
log('admanager::parse_vast_response: inherit settings from primary response: check_ts_id', data['check_ts_id'], 'predownload', data['predownload'], 'priority', data['priority'], 'check_duration', data['check_duration'])
check_signature = False
if data['adsystem']['name'] == self.TS_ADSYSTEM:
check_signature = True
if first_ad:
first_ad = False
if check_signature:
self.vast_check_signature(response, request_random, ext_data['response_random'], ext_data['response_sig'])
if data['type'] == 'inline':
inline_ads.append(data)
else:
wrapper_ads.append(data)
if DEBUG:
log('admanager::parse_vast_response: inline_ads', inline_ads, 'wrapper_ads', wrapper_ads)
if len(wrapper_ads):
wrapper_inline_ads = self.start_all_wrapper_requests(wrapper_ads, request_random, request_data, redirects)
if wrapper_inline_ads is not None:
inline_ads.extend(wrapper_inline_ads)
return (inline_ads, ad_settings)
def start_all_wrapper_requests(self, wrapper_ads, request_random, request_data, redirects):
wrapper_ads.sort(key=lambda wrapper: wrapper['priority'])
threads = [[]]
level = 0
prev_priority = -1
for wrapper in wrapper_ads:
if redirects >= wrapper['max_redirects']:
if DEBUG:
log('admanager::start_all_wrapper_requests: max redirects reached: url', wrapper['vast_redirect_url'], 'redirects', redirects, 'max', wrapper['max_redirects'])
self.send_error(wrapper['errors'], 302, 'max_redirects_reached', True)
continue
t = time.time() - request_data['start_time']
if t >= request_data['timeout']:
if DEBUG:
log('admanager::start_all_wrapper_requests: request timed out, stop: time', t, 'timeout', request_data['timeout'])
self.send_error(wrapper['errors'], 301, 'wrapper_timeout', True)
break
time_left = int(request_data['timeout'] - t)
if time_left == 0:
time_left = 1
url = wrapper['vast_redirect_url']
if DEBUG:
log('admanager::start_all_wrapper_requests: fetch wrapper url: url', url, 'time_left', time_left, 'max_redirects', wrapper['max_redirects'])
if redirects == 0:
request_data = request_data.copy()
request_data['check_ts_id'] = wrapper['check_ts_id']
request_data['predownload'] = wrapper['predownload']
request_data['priority'] = wrapper['priority']
request_data['check_duration'] = wrapper['check_duration']
if DEBUG:
log('admanager::start_all_wrapper_requests: set inherited settings: url', url, 'request_data', request_data)
thread, retval = self.start_single_wrapper_request_thread(wrapper, url, time_left, request_random, request_data, wrapper['max_redirects'], redirects)
if prev_priority != -1 and prev_priority < wrapper['priority']:
threads.append([])
level = len(threads) - 1
if DEBUG:
log('admanager::start_all_wrapper_requests: create new priority level: prev_priority', prev_priority, 'priority', wrapper['priority'], 'level', level)
prev_priority = wrapper['priority']
if DEBUG:
log('admanager::start_all_wrapper_requests: append wrapper: priority', prev_priority, 'level', level)
threads[level].append((thread, retval, wrapper))
for t in threads:
for thread, retval, wrapper in t:
if DEBUG:
log('admanager::start_all_wrapper_requests: start wrapper thread: url', wrapper['vast_redirect_url'], 'thread', thread.name)
thread.start()
wrapper_inline_ads = None
selected_wrapper = None
level = 0
while level < len(threads):
if DEBUG:
log('admanager::start_all_wrapper_requests: wait for threads at level', level)
got_unfinished_thread = False
for thread, retval, wrapper in threads[level]:
if DEBUG:
log('admanager::start_all_wrapper_requests: wait for thread', thread.name)
if thread.is_alive():
if DEBUG:
log('admanager::start_all_wrapper_requests: thread is not finished, check next: thread', thread.name)
got_unfinished_thread = True
continue
if retval.get('finished', False):
if DEBUG:
log('admanager::start_all_wrapper_requests: thread is already finished, do not process: thread', thread.name)
continue
retval['finished'] = True
if DEBUG:
log('admanager::start_all_wrapper_requests: thread finished: thread', thread.name, 'retval', retval)
if retval['ads'] is not None:
if DEBUG:
log('admanager::start_all_wrapper_requests: got ads, select wrapper: thread', thread.name)
wrapper_inline_ads = retval['ads']
selected_wrapper = wrapper
break
elif isinstance(retval['error'], HTTPError) or isinstance(retval['error'], URLError):
if DEBUG:
log('admanager::start_all_wrapper_requests: got network error: thread', thread.name)
if not self.check_network_errors(wrapper['vast_redirect_url']):
raise AdBlockDetectedException
elif DEBUG:
log('admanager::start_all_wrapper_requests: got error: thread', thread.name)
if selected_wrapper is not None:
if DEBUG:
log('admanager::start_all_wrapper_requests: got selected wrapper, break: thread', thread.name)
break
if got_unfinished_thread:
if DEBUG:
log('admanager::start_all_wrapper_requests: got unfinished thread, wait')
time.sleep(0.1)
continue
elif DEBUG:
log('admanager::start_all_wrapper_requests: no unfinished threads, check next level')
level += 1
if wrapper_inline_ads is not None:
for ad in wrapper_inline_ads:
ad['impressions'].extend(selected_wrapper['impressions'])
ad['errors'].extend(selected_wrapper['errors'])
for creative in ad['creatives']:
tracking = self.vast_get_wrapper_tracking(selected_wrapper, creative['type'], creative['adid'])
if len(tracking) > 0:
creative['tracking'].extend(tracking)
return wrapper_inline_ads
def start_single_wrapper_request_thread(self, wrapper, url, timeout, request_random, request_data, max_redirects, redirects):
retval = {'ads': None,
'error': None}
t = Thread(target=self.start_single_wrapper_request, args=[wrapper,
retval,
url,
timeout,
request_random,
request_data,
max_redirects,
redirects])
t.name = 'WrapperRequest' + t.name
t.daemon = True
return (t, retval)
def start_single_wrapper_request(self, wrapper, retval, url, timeout, request_random, request_data, max_redirects, redirects):
got_network_error = False
try:
if DEBUG:
t = time.time()
log('admanager::start_single_wrapper_request: start request to wrapper: url', url, 'timeout', timeout, 'cookies', self.cookie_jar)
stream = urlOpenTimeout(url, timeout, cookiejar=self.cookie_jar)
response = stream.read()
stream.close()
if DEBUG:
log('admanager::start_single_wrapper_request: finished request to wrapper: url', url, 'cookies', self.cookie_jar, 'time', time.time() - t)
wrapper_inline_ads, _ = self.parse_vast_response(url, response, request_random, request_data, max_redirects, redirects + 1)
retval['ads'] = wrapper_inline_ads
except BadResponseException as e:
try:
errmsg = str(e)
except:
errmsg = 'unknown error'
if DEBUG:
log('admanager::start_single_wrapper_request: failed to parse wrapper response: url', url, 'time', time.time() - t, 'err', errmsg)
print_exc()
self.send_error(wrapper['errors'], 100, errmsg, True)
retval['error'] = e
except Exception as e:
try:
errmsg = str(e)
except:
errmsg = 'unknown error'
if DEBUG:
log('admanager::start_single_wrapper_request: failed request to wrapper: url', url, 'time', time.time() - t, 'err', errmsg)
print_exc()
if isinstance(e, (HTTPError, URLError)):
got_network_error = True
self.send_error(wrapper['errors'], 301, errmsg, True)
retval['error'] = e
if got_network_error:
if DEBUG:
log('admanager::start_single_wrapper_request: got network error: url', url)
self.inc_network_errors(url)
else:
if DEBUG:
log('admanager::start_single_wrapper_request: reset network error: url', url)
self.reset_network_errors(url)
def get_network_errors(self):
return self.network_errors
def check_network_errors(self, url):
urldata = urlparse(url)
host = urldata.hostname
errors = self.network_errors.get(host, 0)
if errors >= self.AD_BLOCK_MAX_NETWORK_ERRORS:
if DEBUG:
log('admanager::check_network_errors: too much errors: host', host, 'errors', errors)
return False
return True
def inc_network_errors(self, url):
try:
urldata = urlparse(url)
host = urldata.hostname
self.network_errors.setdefault(host, 0)
self.network_errors[host] += 1
if DEBUG:
log('admanager::inc_network_errors: host', host, 'errors', self.network_errors[host])
except:
if DEBUG:
print_exc()
def reset_network_errors(self, url):
try:
urldata = urlparse(url)
host = urldata.hostname
self.network_errors[host] = 0
if DEBUG:
log('admanager::reset_network_errors: host', host, 'errors', self.network_errors[host])
except:
if DEBUG:
print_exc()
def format_vast_ads(self, vast_ads, ad_settings, include_interruptable_ads = True):
formatted_ads = []
for ad in vast_ads:
if ad['type'] != 'inline':
raise ValueError, 'Inline ad expected'
impressions = []
for impression in ad['impressions']:
uri = impression['uri']
if impression['id'] == self.TS_ADSYSTEM and ad['adsystem']['name'] != self.TS_ADSYSTEM:
uri = self.update_wrapper_tracking_uri(uri, ad['adsystem']['name'], None)
impressions.append(uri)
first_creative = True
for creative in ad['creatives']:
skip_ad = False
events = {'error': ad['errors'],
'creativeView': [],
'start': [],
'firstQuartile': [],
'midpoint': [],
'thirdQuartile': [],
'complete': []}
if creative['type'] != 'linear':
if DEBUG:
log('admanager::format_vast_ads: skip creative: type', creative['type'])
continue
if creative['interruptable'] is not None and creative['interruptable'] == 'yes':
interruptable = True
else:
interruptable = False
if interruptable and not include_interruptable_ads:
if DEBUG:
log('admanager::format_vast_ads: skip interruptable ad')
continue
if creative['wait_preload'] is not None and creative['wait_preload'] == 'yes':
wait_preload = True
else:
wait_preload = False
mediafile = self.select_media_file(creative['files'])
if mediafile is None:
if DEBUG:
log('admanager::format_vast_ads:format: skip creative with no suitable files')
continue
for tracking in creative['tracking']:
if events.has_key(tracking['event']):
uri = tracking['uri']
if len(uri) == 0:
continue
if tracking['adsystem'] == self.TS_ADSYSTEM and ad['adsystem']['name'] != self.TS_ADSYSTEM:
uri = self.update_wrapper_tracking_uri(uri, ad['adsystem']['name'], creative['adid'])
events[tracking['event']].append(uri)
if DEBUG:
log('admanager::format_vast_ads: set event handler: event', tracking['event'], 'uri', uri)
elif DEBUG:
log('admanager::format_vast_ads: unknown event', tracking['event'])
if creative['placement'] is None:
placement = 'preroll'
else:
placement = creative['placement']
try:
sequence = int(creative['sequence'])
except:
sequence = 1000
if ad['check_duration']:
duration = self.duration_from_string(creative['duration'])
if DEBUG:
log('admanager::format_vast_ads: use vast duration:', duration)
else:
if DEBUG:
log('admanager::format_vast_ads: no duration check, set zero duration')
duration = 0
formatted_ad = {'adsystem': ad['adsystem']['name'],
'ad_server_url': ad['ad_server_url'],
'adsystem_version': ad['adsystem']['version'],
'duration': duration,
'click_through': creative.get('click_through', None),
'placement': placement,
'interruptable': interruptable,
'wait_preload': wait_preload,
'predownload': ad.get('predownload', False),
'tracking': {},
'sequence': sequence}
if first_creative:
formatted_ad['tracking']['impression'] = impressions
first_creative = False
formatted_ad['tracking'].update(events)
if creative['ts_ad_id'] is not None:
formatted_ad['dltype'] = DLTYPE_TORRENT
formatted_ad['ad_id'] = creative['ts_ad_id']
tdef = self.baseapp.get_torrent_from_adid(creative['ts_ad_id'])
if tdef is None:
if DEBUG:
log('admanager::format_vast_ads: failed to get torrent from ad-id: ad_id', creative['ts_ad_id'])
skip_ad = True
else:
if DEBUG:
log('admanager::format_vast_ads: got torrent from ad-id: ad_id', creative['ts_ad_id'], 'infohash', binascii.hexlify(tdef.get_infohash()))
formatted_ad['tdef'] = tdef
else:
if ad['check_ts_id']:
if DEBUG:
log('admanager::format_vast_ads: missing mandatory ts-ad-id, skip ad')
skip_ad = True
formatted_ad['dltype'] = DLTYPE_DIRECT
formatted_ad['url'] = mediafile['uri']
if formatted_ad.has_key('ad_id'):
creative_id = formatted_ad['ad_id']
else:
creative_id = formatted_ad['url']
formatted_ad['creative_id'] = hashlib.sha1(creative_id).hexdigest()
formatted_ad['count_first'] = self.ad_first_count.get(formatted_ad['creative_id'], 0)
if not skip_ad:
formatted_ads.append(formatted_ad)
if DEBUG:
s = '\n'
for ad in formatted_ads:
s += ad['adsystem'] + '|d=' + str(ad['duration']) + '|t=' + str(ad['dltype']) + '|i=' + str(ad['interruptable']) + '|w=' + str(ad['wait_preload']) + '|seq=' + str(ad['sequence']) + '|first=' + str(ad['count_first']) + '|'
if ad['dltype'] == DLTYPE_DIRECT:
s += ad['url']
else:
s += binascii.hexlify(ad['tdef'].get_infohash())
s += '\n'
log('admanager::format_vast_ads: unsorted formatted ads:', s)
formatted_ads.sort(key=lambda ad: (ad['sequence'], ad['count_first'], random.randint(1, sys.maxint)))
main_ads = []
additional_ads = []
interruptable_ads = []
main_block_duration = 0
for ad in formatted_ads:
if ad['interruptable']:
interruptable_ads.append(ad)
else:
if main_block_duration < ad_settings['min_ads_duration'] and main_block_duration + ad['duration'] <= ad_settings['max_ads_duration']:
add_to_main_block = True
else:
add_to_main_block = False
if DEBUG:
log('admanager::format_vast_ads:sort: duration', ad['duration'], 'total', main_block_duration, 'min', ad_settings['min_ads_duration'], 'max', ad_settings['max_ads_duration'], 'add', add_to_main_block)
if add_to_main_block:
main_ads.append(ad)
main_block_duration += ad['duration']
else:
ad['interruptable'] = True
additional_ads.append(ad)
formatted_ads = []
formatted_ads.extend(main_ads)
formatted_ads.extend(additional_ads)
formatted_ads.extend(interruptable_ads)
if len(formatted_ads):
_id = formatted_ads[0]['creative_id']
self.ad_first_count.setdefault(_id, 0)
self.ad_first_count[_id] += 1
if DEBUG:
s = '\n'
for ad in formatted_ads:
s += ad['adsystem'] + '|d=' + str(ad['duration']) + '|t=' + str(ad['dltype']) + '|i=' + str(ad['interruptable']) + '|seq=' + str(ad['sequence']) + '|first=' + str(ad['count_first']) + '|'
if ad['dltype'] == DLTYPE_DIRECT:
s += ad['url']
else:
s += binascii.hexlify(ad['tdef'].get_infohash())
s += '\n'
log('admanager::format_vast_ads: sorted formatted ads:', s)
return formatted_ads
def select_media_file(self, files):
if len(files) == 0:
return
if len(files) == 1:
return files[0]
selected_file = None
max_bitrate = 0
for f in files:
if f['delivery'] == 'progressive':
try:
bitrate = int(f['bitrate'])
except:
bitrate = 0
if selected_file is None:
selected_file = f
elif bitrate > max_bitrate:
selected_file = f
max_bitrate = bitrate
if DEBUG:
log('admanager::select_media_file: files', files, 'selected_file', selected_file)
return selected_file
def vast_parse_redirect(self, vast_version, root):
if vast_version == 2:
e = domutils.get_single_element(root, 'VASTAdTagURI')
redirect_uri = domutils.get_node_text(e)
elif vast_version == 1:
e = domutils.get_single_element(root, 'VASTAdTagURL')
e = domutils.get_single_element(e, 'URL')
redirect_uri = domutils.get_node_text(e)
else:
raise BadResponseException, 'Unknown vast version: ' + str(vast_version)
redirect_uri = redirect_uri.strip(' \t\r\n')
if DEBUG:
log('%%%%', redirect_uri)
return redirect_uri
def vast_parse_adsystem(self, vast_version, root):
e = domutils.get_single_element(root, 'AdSystem')
return {'name': domutils.get_node_text(e),
'version': e.getAttribute('version')}
def vast_parse_impressions(self, vast_version, root):
impressions = []
if vast_version == 2:
for e in domutils.get_children_by_tag_name(root, 'Impression'):
uri = domutils.get_node_text(e)
if uri:
impressions.append({'uri': uri,
'id': e.getAttribute('id')})
elif vast_version == 1:
e_impression = domutils.get_single_element(root, 'Impression', False)
if e_impression is not None:
for e in domutils.get_children_by_tag_name(e_impression, 'URL'):
uri = domutils.get_node_text(e)
if uri:
impressions.append({'uri': uri,
'id': e.getAttribute('id')})
else:
raise BadResponseException, 'Unknown vast version: ' + str(vast_version)
return impressions
def vast_parse_errors(self, vast_version, root):
errors = []
if vast_version == 2:
for e in domutils.get_children_by_tag_name(root, 'Error'):
url = domutils.get_node_text(e)
if url:
errors.append(url)
elif vast_version == 1:
e_error = domutils.get_single_element(root, 'Error', False)
if e_error is not None:
for e in domutils.get_children_by_tag_name(e_error, 'URL'):
url = domutils.get_node_text(e)
if url:
errors.append(url)
else:
raise BadResponseException, 'Unknown vast version: ' + str(vast_version)
return errors
def vast_parse_ts_extension(self, root):
ext_data = {'response_random': None,
'response_sig': None,
'max_redirects': None,
'request_timeout': None,
'check_ts_id': '',
'predownload': 'no',
'min_ads_duration': None,
'max_ads_duration': None,
'priority': 1000,
'check_duration': False}
e_extensions = domutils.get_single_element(root, 'Extensions', False)
if e_extensions is not None:
for e_extension in domutils.get_children_by_tag_name(e_extensions, 'Extension'):
if e_extension.getAttribute('type') == self.TS_ADSYSTEM:
e_response_data = domutils.get_single_element(e_extension, 'ResponseData', False)
if e_response_data is not None:
value = domutils.get_node_text(e_response_data)
if len(value) > 0:
ext_data['response_random'] = value
value = e_response_data.getAttribute('sig')
if len(value) > 0:
ext_data['response_sig'] = value
e_max_redirects = domutils.get_single_element(e_extension, 'MaxRedirects', False)
if e_max_redirects is not None:
value = domutils.get_node_text(e_max_redirects)
if len(value) > 0:
try:
ext_data['max_redirects'] = int(value)
except:
pass
e_request_timeout = domutils.get_single_element(e_extension, 'RequestTimeout', False)
if e_request_timeout is not None:
value = domutils.get_node_text(e_request_timeout)
if len(value) > 0:
try:
ext_data['request_timeout'] = int(value)
except:
pass
e_min_ads_duration = domutils.get_single_element(e_extension, 'MinAdsDuration', False)
if e_min_ads_duration is not None:
value = domutils.get_node_text(e_min_ads_duration)
if len(value) > 0:
try:
ext_data['min_ads_duration'] = int(value)
except:
pass
e_max_ads_duration = domutils.get_single_element(e_extension, 'MaxAdsDuration', False)
if e_max_ads_duration is not None:
value = domutils.get_node_text(e_max_ads_duration)
if len(value) > 0:
try:
ext_data['max_ads_duration'] = int(value)
except:
pass
e_check_id = domutils.get_single_element(e_extension, 'CheckID', False)
if e_check_id is not None:
ext_data['check_ts_id'] = domutils.get_node_text(e_check_id)
e_predownload = domutils.get_single_element(e_extension, 'Predownload', False)
if e_predownload is not None:
ext_data['predownload'] = domutils.get_node_text(e_predownload)
e_priority = domutils.get_single_element(e_extension, 'Priority', False)
if e_priority is not None:
value = domutils.get_node_text(e_priority)
if len(value) > 0:
try:
ext_data['priority'] = int(value)
except:
pass
e_check_duration = domutils.get_single_element(e_extension, 'CheckDuration', False)
if e_check_duration is not None:
value = domutils.get_node_text(e_check_duration)
ext_data['check_duration'] = bool(value == 'yes')
if DEBUG:
log('admanager:vast_parse_ts_extension: ext_data', ext_data)
return ext_data
def vast_check_signature(self, response, request_random, response_random, response_sig):
if DEBUG:
log('admanager::vast_check_signature: response data: response_random', response_random, 'response_sig', response_sig)
if response_random is None:
raise BadResponseException, 'Missing response random'
if response_sig is None:
raise BadResponseException, 'Missing response sig'
try:
response_random = int(response_random)
except:
raise BadResponseException, 'Non-integer response random'
if response_random != request_random:
if DEBUG:
log('admanager::vast_check_signature: bad response random: response_random', response_random, 'request_random', request_random)
raise BadResponseException, 'Bad response random'
try:
payload = response.replace('<ResponseData sig="' + response_sig + '"', '<ResponseData')
bio = BIO.MemoryBuffer(self.RESPONSE_PUBKEY)
pubkey = RSA.load_pub_key_bio(bio)
signature = base64.b64decode(response_sig)
sign_ok = pubkey.verify(hashlib.sha1(payload).digest(), signature)
except:
if DEBUG:
print_exc()
raise BadResponseException, 'Failed to verify data'
if not sign_ok:
raise BadResponseException, 'Failed to verify data'
return True
def vast_get_wrapper_tracking(self, wrapper, creative_type, creative_ad_id = None):
tracking = []
for wrapper_creative in wrapper['creatives']:
if wrapper_creative['type'] == creative_type:
if wrapper_creative['adid'] is not None:
if creative_ad_id is None:
continue
if wrapper_creative['adid'] != creative_ad_id:
continue
tracking.extend(wrapper_creative['tracking'])
if DEBUG:
log('admanager::vast_get_wrapper_tracking: wrapper', wrapper, 'creative_type', creative_type, 'creative_ad_id', creative_ad_id, 'tracking', tracking)
return tracking
def update_wrapper_tracking_uri(self, uri, adsystem, ad_id):
if len(adsystem) == 0 and ad_id is None:
return uri
if uri.find('?') == -1:
uri += '?'
elif not uri.endswith('&'):
uri += '&'
if len(adsystem) > 0:
uri += 'adsystem=' + urllib.quote_plus(adsystem) + '&'
if ad_id is not None:
uri += 'adid=' + urllib.quote_plus(ad_id) + '&'
uri = uri[:-1]
return uri
def vast_parse_creatives(self, vast_version, root, is_wrapper, adsystem):
if vast_version == 1:
return self.vast_parse_creatives_vast_1(root, is_wrapper, adsystem)
if vast_version == 2:
return self.vast_parse_creatives_vast_2(root, is_wrapper, adsystem)
raise BadResponseException, 'Unknown vast version: ' + str(vast_version)
def vast_parse_creatives_vast_1(self, root, is_wrapper, adsystem):
creatives = []
tracking = []
e_tracking_events = domutils.get_single_element(root, 'TrackingEvents', False)
if e_tracking_events is not None:
for e_tracking in domutils.get_children_by_tag_name(e_tracking_events, 'Tracking'):
for e_url in domutils.get_children_by_tag_name(e_tracking, 'URL'):
tracking.append({'event': e_tracking.getAttribute('event'),
'uri': domutils.get_node_text(e_url),
'adsystem': adsystem})
if is_wrapper:
creatives.append({'type': 'linear',
'adid': None,
'tracking': tracking})
else:
e_video = domutils.get_single_element(root, 'Video', False)
if e_video is not None:
files = []
placement = None
interruptable = None
ts_ad_id = None
e_duration = domutils.get_single_element(e_video, 'Duration')
e_ad_id = domutils.get_single_element(e_video, 'AdID', False)
if e_ad_id is None:
ad_id = None
else:
ad_id = domutils.get_node_text(e_ad_id)
if len(ad_id) == 0:
ad_id = None
e_media_files = domutils.get_single_element(e_video, 'MediaFiles')
for e_media_file in domutils.get_children_by_tag_name(e_media_files, 'MediaFile'):
e_url = domutils.get_single_element(e_media_file, 'URL')
files.append({'uri': domutils.get_node_text(e_url),
'id': e_media_file.getAttribute('id'),
'delivery': e_media_file.getAttribute('delivery'),
'type': e_media_file.getAttribute('type'),
'bitrate': e_media_file.getAttribute('bitrate'),
'width': e_media_file.getAttribute('width'),
'height': e_media_file.getAttribute('height'),
'apiFramework': e_media_file.getAttribute('apiFramework')})
e_ad_parameters = domutils.get_single_element(e_video, 'AdParameters', False)
if e_ad_parameters is not None:
e_placement = domutils.get_single_element(e_ad_parameters, 'Placement', False)
e_interruptable = domutils.get_single_element(e_ad_parameters, 'Interruptable', False)
e_ts_ad_id = domutils.get_single_element(e_ad_parameters, 'TSAdID', False)
if e_placement is not None:
placement = domutils.get_node_text(e_placement)
if e_interruptable is not None:
interruptable = domutils.get_node_text(e_interruptable)
if e_ts_ad_id is not None:
ts_ad_id = domutils.get_node_text(e_ts_ad_id)
if len(ts_ad_id) == 0:
ts_ad_id = None
creatives.append({'type': 'linear',
'duration': domutils.get_node_text(e_duration),
'adid': ad_id,
'tracking': tracking,
'files': files,
'placement': placement,
'interruptable': interruptable,
'ts_ad_id': ts_ad_id})
return creatives
def vast_parse_creatives_vast_2(self, root, is_wrapper, adsystem):
creatives = []
e_creatives = domutils.get_single_element(root, 'Creatives')
for e_creative in domutils.get_children_by_tag_name(e_creatives, 'Creative'):
ad_id = e_creative.getAttribute('AdID')
if len(ad_id) == 0:
ad_id = None
e_linear = domutils.get_single_element(e_creative, 'Linear', False)
if e_linear is not None:
duration = None
tracking = []
files = []
placement = None
interruptable = None
ts_ad_id = None
wait_preload = None
click_through = None
if not is_wrapper:
e_duration = domutils.get_single_element(e_linear, 'Duration')
duration = domutils.get_node_text(e_duration)
e_video_clicks = domutils.get_single_element(e_linear, 'VideoClicks', False)
if e_video_clicks is not None:
e_click_through = domutils.get_single_element(e_video_clicks, 'ClickThrough', False)
if e_click_through is not None:
click_through = domutils.get_node_text(e_click_through)
e_tracking_events = domutils.get_single_element(e_linear, 'TrackingEvents', False)
if e_tracking_events is not None:
a = domutils.get_children_by_tag_name(e_tracking_events, 'Tracking')
for e_tracking in a:
uri = domutils.get_node_text(e_tracking)
if uri:
tracking.append({'event': e_tracking.getAttribute('event'),
'uri': uri,
'adsystem': adsystem})
if not is_wrapper:
e_ad_parameters = domutils.get_single_element(e_linear, 'AdParameters', False)
if e_ad_parameters is not None:
e_placement = domutils.get_single_element(e_ad_parameters, 'Placement', False)
e_interruptable = domutils.get_single_element(e_ad_parameters, 'Interruptable', False)
e_ts_ad_id = domutils.get_single_element(e_ad_parameters, 'TSAdID', False)
e_wait_preload = domutils.get_single_element(e_ad_parameters, 'WaitPreload', False)
if e_placement is not None:
placement = domutils.get_node_text(e_placement)
if e_interruptable is not None:
interruptable = domutils.get_node_text(e_interruptable)
if e_wait_preload is not None:
wait_preload = domutils.get_node_text(e_wait_preload)
if e_ts_ad_id is not None:
ts_ad_id = domutils.get_node_text(e_ts_ad_id)
if len(ts_ad_id) == 0:
ts_ad_id = None
if not is_wrapper:
e_media_files = domutils.get_single_element(e_linear, 'MediaFiles')
for e_media_file in domutils.get_children_by_tag_name(e_media_files, 'MediaFile'):
files.append({'uri': domutils.get_node_text(e_media_file),
'id': e_media_file.getAttribute('id'),
'delivery': e_media_file.getAttribute('delivery'),
'type': e_media_file.getAttribute('type'),
'bitrate': e_media_file.getAttribute('bitrate'),
'width': e_media_file.getAttribute('width'),
'height': e_media_file.getAttribute('height'),
'apiFramework': e_media_file.getAttribute('apiFramework')})
if is_wrapper:
creatives.append({'type': 'linear',
'adid': ad_id,
'tracking': tracking})
else:
creatives.append({'type': 'linear',
'id': e_creative.getAttribute('id'),
'sequence': e_creative.getAttribute('sequence'),
'adid': ad_id,
'duration': duration,
'tracking': tracking,
'click_through': click_through,
'files': files,
'placement': placement,
'interruptable': interruptable,
'wait_preload': wait_preload,
'ts_ad_id': ts_ad_id})
return creatives
def parse_preload_ad_response(self, response, request_random):
if len(response) < 8:
raise BadResponseException('response too small')
try:
sig_len = int(base64.b64decode(response[0:8]), 16)
except:
if DEBUG:
print_exc()
raise BadResponseException('cannot get sign length')
signature = response[8:8 + sig_len]
data = response[8 + sig_len:]
try:
signature = base64.b64decode(signature)
except:
if DEBUG:
print_exc()
raise BadResponseException('failed to decode signature')
try:
data = base64.b64decode(data)
except:
if DEBUG:
print_exc()
raise BadResponseException('failed to decode data')
try:
bio = BIO.MemoryBuffer(self.RESPONSE_PUBKEY)
pubkey = RSA.load_pub_key_bio(bio)
sign_ok = pubkey.verify(hashlib.sha1(data).digest(), signature)
except:
if DEBUG:
print_exc()
raise BadResponseException('failed to verify data')
if not sign_ok:
raise BadResponseException('failed to verify data')
response = json.loads(data)
if type(response) != DictType:
raise BadResponseException('response is not a dict')
if 'r' not in response:
raise BadResponseException('missing random in response')
try:
response_random = int(response['r'])
except:
raise BadResponseException('non-int random in response')
if response_random != request_random:
raise BadResponseException('bad random: response=%s request=%s' % (response_random, request_random))
if DEBUG:
log('AdManager::parse_preload_ad_response: got success:', response)
if 'data' not in response:
raise BadResponseException('missing data')
if type(response['data']) != ListType:
raise BadResponseException('data is not a list')
ads = []
for a in response['data']:
if type(a) != DictType:
raise BadResponseException('data item is not a dict')
mandatory_fields = ['infohash', 'id']
for field in mandatory_fields:
if field not in a:
raise BadResponseException('missing ' + field)
try:
priority = int(a['priority'])
except:
priority = 0
tdef = None
try:
infohash = binascii.unhexlify(a['infohash'])
ret = self.baseapp.get_torrent_by_infohash(infohash)
if ret is not None:
tdef = ret['tdef']
except:
if DEBUG:
print_exc()
if tdef is None:
if DEBUG:
log('AdManager::parse_preload_ad_response: cannot get torrent by infohash: infohash', binascii.hexlify(infohash))
continue
if binascii.hexlify(tdef.get_infohash()) != a['infohash']:
if DEBUG:
log('AdManager::parse_preload_ad_response: infohash does not match: response_infohash', a['infohash'], 'tdef_infohash', binascii.hexlify(tdef.get_infohash()))
raise BadResponseException('infohash does not match')
try:
if DEBUG:
log('admanager::parse_preload_ad_response: save ad-id: id', a['id'], 'infohash', binascii.hexlify(infohash))
self.baseapp.save_adid2infohash_db(a['id'], infohash)
except:
if DEBUG:
print_exc()
trackers = a.get('trackers', None)
if DEBUG:
log('admanager::parse_preload_ad_response: trackers: infohash', binascii.hexlify(infohash), 'trackers', trackers)
ad = {'tdef': tdef,
'dltype': DLTYPE_TORRENT,
'priority': priority,
'trackers': trackers}
ads.append(ad)
return ads
def duration_from_string(self, string):
a = string.split(':')
if len(a) != 3:
raise ValueError, 'Bad string duration ' + string
try:
hours = int(a[0])
minutes = int(a[1])
seconds = int(a[2])
except:
raise ValueError, 'Malformat string duration ' + string
return hours * 3600 + minutes * 60 + seconds
| {
"content_hash": "f120f1eb474f0de0d2efceae45274470",
"timestamp": "",
"source": "github",
"line_count": 1250,
"max_line_length": 482,
"avg_line_length": 47.0256,
"alnum_prop": 0.5218264094450682,
"repo_name": "aplicatii-romanesti/allinclusive-kodi-pi",
"id": "cd8b2eb7a4a6843d7ccec60e70f0b14f758f18d3",
"size": "58834",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".kodi/userdata/addon_data/plugin.video.p2p-streams/acestream/ace/ACEStream/Core/Ads/Manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6178"
},
{
"name": "Python",
"bytes": "8657978"
},
{
"name": "Shell",
"bytes": "198"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.